WIP: generalize for Gerrit / Floral #3
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -10,3 +10,7 @@ result
|
||||||
target
|
target
|
||||||
*.nix.orig
|
*.nix.orig
|
||||||
*.nix.rej
|
*.nix.rej
|
||||||
|
.rabbitmq-data
|
||||||
|
.checkouts
|
||||||
|
.ofborg-state
|
||||||
|
.direnv
|
||||||
|
|
3224
Cargo.lock
generated
3224
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
3
Procfile
Normal file
3
Procfile
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
amqp-server: rabbitmq-server
|
||||||
|
pastebin-worker: cargo run --bin pastebin-worker -- dev.config.json
|
||||||
|
stats-worker: cargo run --bin stats -- dev.config.json
|
133
default.nix
133
default.nix
|
@ -1,6 +1,127 @@
|
||||||
(import
|
{ sources ? import ./npins, pkgs ? import sources.nixpkgs {} }:
|
||||||
(fetchTarball {
|
{
|
||||||
url = "https://github.com/edolstra/flake-compat/archive/0f9255e01c2351cc7d116c072cb317785dd33b33.tar.gz";
|
overlay = self: super:
|
||||||
sha256 = "0m9grvfsbwmvgwaxvdzv6cmyvjnlww004gfxjvcl806ndqaxzy4j";
|
let
|
||||||
})
|
inherit (super) lib;
|
||||||
{ src = ./.; }).defaultNix.packages.${builtins.currentSystem}
|
in
|
||||||
|
{
|
||||||
|
ofborg = super.rustPlatform.buildRustPackage {
|
||||||
|
name = "ofborg";
|
||||||
|
src = super.nix-gitignore.gitignoreSource [ ] ./.;
|
||||||
|
|
||||||
|
nativeBuildInputs = with pkgs; [
|
||||||
|
pkg-config
|
||||||
|
rustPackages.clippy
|
||||||
|
];
|
||||||
|
|
||||||
|
buildInputs = lib.optionals super.stdenv.isDarwin (with pkgs; [
|
||||||
|
darwin.apple_sdk.frameworks.Security
|
||||||
|
darwin.apple_sdk.frameworks.CoreFoundation
|
||||||
|
]);
|
||||||
|
|
||||||
|
preBuild = ''
|
||||||
|
cargo clippy
|
||||||
|
'';
|
||||||
|
|
||||||
|
doCheck = false; # Tests require access to a /nix/ and a nix daemon
|
||||||
|
checkInputs = with super; [
|
||||||
|
lix
|
||||||
|
];
|
||||||
|
|
||||||
|
cargoLock = {
|
||||||
|
lockFile = ./Cargo.lock;
|
||||||
|
outputHashes = {
|
||||||
|
"hubcaps-0.6.2" = "sha256-yyHOCxUsehvtYfttRY4T9TDrJhSKGpJRa/SX3Sd1TNc=";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
packages = {
|
||||||
|
ofborg = pkgs.rustPlatform.buildRustPackage {
|
||||||
|
name = "ofborg";
|
||||||
|
src = pkgs.nix-gitignore.gitignoreSource [ ] ./.;
|
||||||
|
|
||||||
|
nativeBuildInputs = with pkgs; [
|
||||||
|
pkg-config
|
||||||
|
rustPackages.clippy
|
||||||
|
];
|
||||||
|
|
||||||
|
buildInputs = with pkgs; [
|
||||||
|
] ++ lib.optionals pkgs.stdenv.isDarwin (with pkgs; [
|
||||||
|
darwin.apple_sdk.frameworks.Security
|
||||||
|
darwin.apple_sdk.frameworks.CoreFoundation
|
||||||
|
]);
|
||||||
|
|
||||||
|
preBuild = ''
|
||||||
|
cargo clippy
|
||||||
|
'';
|
||||||
|
|
||||||
|
doCheck = false; # Tests require access to a /nix/ and a nix daemon
|
||||||
|
checkInputs = with pkgs; [
|
||||||
|
lix
|
||||||
|
];
|
||||||
|
|
||||||
|
cargoLock = {
|
||||||
|
lockFile = ./Cargo.lock;
|
||||||
|
outputHashes = {
|
||||||
|
"hubcaps-0.6.2" = "sha256-yyHOCxUsehvtYfttRY4T9TDrJhSKGpJRa/SX3Sd1TNc=";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
shell = pkgs.mkShell {
|
||||||
|
name = "ofborg-devenv";
|
||||||
|
RABBITMQ_CONFIG_FILE = pkgs.writeText "rabbitmq-dev.conf" ''
|
||||||
|
listeners.tcp.1=:::5672
|
||||||
|
'';
|
||||||
|
TRIVIAL_PASSWORD = pkgs.writeText "trivial-password.txt" "test";
|
||||||
|
RABBITMQ_LOGS = "-";
|
||||||
|
RUST_SRC_PATH = pkgs.rustPlatform.rustLibSrc;
|
||||||
|
packages = with pkgs; [
|
||||||
|
lix
|
||||||
|
gitFull
|
||||||
|
nix-prefetch-git
|
||||||
|
rustc
|
||||||
|
cargo
|
||||||
|
cargo-edit
|
||||||
|
cargo-outdated
|
||||||
|
clippy
|
||||||
|
rustfmt
|
||||||
|
pkg-config
|
||||||
|
rabbitmq-server
|
||||||
|
hivemind
|
||||||
|
];
|
||||||
|
|
||||||
|
postHook = ''
|
||||||
|
checkPhase() (
|
||||||
|
cd "${builtins.toString ./.}/ofborg"
|
||||||
|
set -x
|
||||||
|
cargo fmt
|
||||||
|
git diff --exit-code
|
||||||
|
cargofmtexit=$?
|
||||||
|
|
||||||
|
cargo clippy
|
||||||
|
cargoclippyexit=$?
|
||||||
|
|
||||||
|
cargo build && cargo test
|
||||||
|
cargotestexit=$?
|
||||||
|
|
||||||
|
sum=$((cargofmtexit + cargoclippyexit + cargotestexit))
|
||||||
|
exit $sum
|
||||||
|
)
|
||||||
|
'';
|
||||||
|
|
||||||
|
shellHook = ''
|
||||||
|
export RABBITMQ_MNESIA_BASE="$(git rev-parse --show-toplevel)/.rabbitmq-data";
|
||||||
|
export STATE_DIRECTORY="$(git rev-parse --show-toplevel)/.ofborg-state"
|
||||||
|
mkdir -p "$STATE_DIRECTORY"
|
||||||
|
mkdir -p "$RABBITMQ_MNESIA_BASE"
|
||||||
|
'';
|
||||||
|
|
||||||
|
RUSTFLAGS = "-D warnings";
|
||||||
|
RUST_BACKTRACE = "1";
|
||||||
|
RUST_LOG = "ofborg=debug";
|
||||||
|
NIX_PATH = "nixpkgs=${pkgs.path}";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
35
dev.config.json
Normal file
35
dev.config.json
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
{
|
||||||
|
"runner": {
|
||||||
|
"identity": "dev"
|
||||||
|
},
|
||||||
|
"checkout": {
|
||||||
|
"root": "$STATE_DIRECTORY/.checkouts"
|
||||||
|
},
|
||||||
|
"vcs": "Gerrit",
|
||||||
|
"gerrit": {
|
||||||
|
"instance_uri": "cl.forkos.org",
|
||||||
|
"ssh_private_key_file": "/dev/null"
|
||||||
|
},
|
||||||
|
"nix": {
|
||||||
|
"system": "x86_64-linux",
|
||||||
|
"remote": "daemon",
|
||||||
|
"build_timeout_seconds": 1800
|
||||||
|
},
|
||||||
|
"rabbitmq": {
|
||||||
|
"ssl": false,
|
||||||
|
"host": "127.0.0.1",
|
||||||
|
"virtualhost": "/",
|
||||||
|
"username": "test",
|
||||||
|
"password_file": "$TRIVIAL_PASSWORD"
|
||||||
|
},
|
||||||
|
"statcheck": {
|
||||||
|
"db": "$STATE_DIRECTORY/statcheck/db"
|
||||||
|
},
|
||||||
|
"feedback": {
|
||||||
|
"full_logs": true
|
||||||
|
},
|
||||||
|
"pastebin": {
|
||||||
|
"root": "$STATE_DIRECTORY/pastebins",
|
||||||
|
"db": "$STATE_DIRECTORY/pastebins/db.json"
|
||||||
|
}
|
||||||
|
}
|
27
flake.lock
27
flake.lock
|
@ -1,27 +0,0 @@
|
||||||
{
|
|
||||||
"nodes": {
|
|
||||||
"nixpkgs": {
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1720031269,
|
|
||||||
"narHash": "sha256-rwz8NJZV+387rnWpTYcXaRNvzUSnnF9aHONoJIYmiUQ=",
|
|
||||||
"owner": "nixos",
|
|
||||||
"repo": "nixpkgs",
|
|
||||||
"rev": "9f4128e00b0ae8ec65918efeba59db998750ead6",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "nixos",
|
|
||||||
"ref": "nixos-unstable",
|
|
||||||
"repo": "nixpkgs",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"root": {
|
|
||||||
"inputs": {
|
|
||||||
"nixpkgs": "nixpkgs"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"root": "root",
|
|
||||||
"version": 7
|
|
||||||
}
|
|
115
flake.nix
115
flake.nix
|
@ -1,115 +0,0 @@
|
||||||
{
|
|
||||||
inputs = {
|
|
||||||
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
|
|
||||||
};
|
|
||||||
|
|
||||||
outputs =
|
|
||||||
{ self
|
|
||||||
, nixpkgs
|
|
||||||
, ...
|
|
||||||
}@inputs:
|
|
||||||
let
|
|
||||||
supportedSystems = [ "aarch64-darwin" "x86_64-darwin" "x86_64-linux" "aarch64-linux" ];
|
|
||||||
forAllSystems = f: nixpkgs.lib.genAttrs supportedSystems (system: f system);
|
|
||||||
in
|
|
||||||
{
|
|
||||||
devShell = forAllSystems (system: inputs.self.devShells.${system}.default);
|
|
||||||
devShells = forAllSystems
|
|
||||||
(system:
|
|
||||||
let
|
|
||||||
pkgs = import nixpkgs {
|
|
||||||
inherit system;
|
|
||||||
};
|
|
||||||
in
|
|
||||||
{
|
|
||||||
default = pkgs.mkShell {
|
|
||||||
name = "gh-event-forwarder";
|
|
||||||
nativeBuildInputs = with pkgs; [
|
|
||||||
nix # so in --pure mode we actually find the "correct" nix
|
|
||||||
bash
|
|
||||||
nix-prefetch-git
|
|
||||||
rustc
|
|
||||||
cargo
|
|
||||||
clippy
|
|
||||||
rustfmt
|
|
||||||
pkg-config
|
|
||||||
git
|
|
||||||
];
|
|
||||||
buildInputs = with pkgs; [
|
|
||||||
] ++ lib.optionals stdenv.isDarwin [ darwin.Security libiconv ];
|
|
||||||
|
|
||||||
postHook = ''
|
|
||||||
checkPhase() (
|
|
||||||
cd "${builtins.toString ./.}/ofborg"
|
|
||||||
set -x
|
|
||||||
cargo fmt
|
|
||||||
git diff --exit-code
|
|
||||||
cargofmtexit=$?
|
|
||||||
|
|
||||||
cargo clippy
|
|
||||||
cargoclippyexit=$?
|
|
||||||
|
|
||||||
cargo build && cargo test
|
|
||||||
cargotestexit=$?
|
|
||||||
|
|
||||||
sum=$((cargofmtexit + cargoclippyexit + cargotestexit))
|
|
||||||
exit $sum
|
|
||||||
)
|
|
||||||
'';
|
|
||||||
|
|
||||||
RUSTFLAGS = "-D warnings";
|
|
||||||
RUST_BACKTRACE = "1";
|
|
||||||
RUST_LOG = "ofborg=debug";
|
|
||||||
NIX_PATH = "nixpkgs=${pkgs.path}";
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
packages = forAllSystems (system:
|
|
||||||
let
|
|
||||||
pkgs = import nixpkgs {
|
|
||||||
inherit system;
|
|
||||||
};
|
|
||||||
|
|
||||||
pkg = pkgs.rustPlatform.buildRustPackage {
|
|
||||||
name = "ofborg";
|
|
||||||
src = pkgs.nix-gitignore.gitignoreSource [ ] ./.;
|
|
||||||
|
|
||||||
nativeBuildInputs = with pkgs; [
|
|
||||||
pkg-config
|
|
||||||
pkgs.rustPackages.clippy
|
|
||||||
];
|
|
||||||
|
|
||||||
buildInputs = with pkgs; [
|
|
||||||
] ++ lib.optionals pkgs.stdenv.isDarwin (with pkgs; [
|
|
||||||
darwin.apple_sdk.frameworks.Security
|
|
||||||
darwin.apple_sdk.frameworks.CoreFoundation
|
|
||||||
]);
|
|
||||||
|
|
||||||
preBuild = ''
|
|
||||||
cargo clippy
|
|
||||||
'';
|
|
||||||
|
|
||||||
doCheck = false; # Tests require access to a /nix/ and a nix daemon
|
|
||||||
checkInputs = with pkgs; [
|
|
||||||
nix
|
|
||||||
];
|
|
||||||
|
|
||||||
cargoLock = {
|
|
||||||
lockFile = ./Cargo.lock;
|
|
||||||
outputHashes = {
|
|
||||||
"hubcaps-0.6.2" = "sha256-yyHOCxUsehvtYfttRY4T9TDrJhSKGpJRa/SX3Sd1TNc=";
|
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
in
|
|
||||||
{
|
|
||||||
default = pkg;
|
|
||||||
ofborg = pkg;
|
|
||||||
});
|
|
||||||
|
|
||||||
hydraJobs = {
|
|
||||||
buildRs = forAllSystems (system: self.packages.${system}.ofborg);
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
80
npins/default.nix
Normal file
80
npins/default.nix
Normal file
|
@ -0,0 +1,80 @@
|
||||||
|
# Generated by npins. Do not modify; will be overwritten regularly
|
||||||
|
let
|
||||||
|
data = builtins.fromJSON (builtins.readFile ./sources.json);
|
||||||
|
version = data.version;
|
||||||
|
|
||||||
|
mkSource =
|
||||||
|
spec:
|
||||||
|
assert spec ? type;
|
||||||
|
let
|
||||||
|
path =
|
||||||
|
if spec.type == "Git" then
|
||||||
|
mkGitSource spec
|
||||||
|
else if spec.type == "GitRelease" then
|
||||||
|
mkGitSource spec
|
||||||
|
else if spec.type == "PyPi" then
|
||||||
|
mkPyPiSource spec
|
||||||
|
else if spec.type == "Channel" then
|
||||||
|
mkChannelSource spec
|
||||||
|
else
|
||||||
|
builtins.throw "Unknown source type ${spec.type}";
|
||||||
|
in
|
||||||
|
spec // { outPath = path; };
|
||||||
|
|
||||||
|
mkGitSource =
|
||||||
|
{
|
||||||
|
repository,
|
||||||
|
revision,
|
||||||
|
url ? null,
|
||||||
|
hash,
|
||||||
|
branch ? null,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
assert repository ? type;
|
||||||
|
# At the moment, either it is a plain git repository (which has an url), or it is a GitHub/GitLab repository
|
||||||
|
# In the latter case, there we will always be an url to the tarball
|
||||||
|
if url != null then
|
||||||
|
(builtins.fetchTarball {
|
||||||
|
inherit url;
|
||||||
|
sha256 = hash;
|
||||||
|
})
|
||||||
|
else
|
||||||
|
assert repository.type == "Git";
|
||||||
|
let
|
||||||
|
urlToName =
|
||||||
|
url: rev:
|
||||||
|
let
|
||||||
|
matched = builtins.match "^.*/([^/]*)(\\.git)?$" repository.url;
|
||||||
|
|
||||||
|
short = builtins.substring 0 7 rev;
|
||||||
|
|
||||||
|
appendShort = if (builtins.match "[a-f0-9]*" rev) != null then "-${short}" else "";
|
||||||
|
in
|
||||||
|
"${if matched == null then "source" else builtins.head matched}${appendShort}";
|
||||||
|
name = urlToName repository.url revision;
|
||||||
|
in
|
||||||
|
builtins.fetchGit {
|
||||||
|
url = repository.url;
|
||||||
|
rev = revision;
|
||||||
|
inherit name;
|
||||||
|
narHash = hash;
|
||||||
|
};
|
||||||
|
|
||||||
|
mkPyPiSource =
|
||||||
|
{ url, hash, ... }:
|
||||||
|
builtins.fetchurl {
|
||||||
|
inherit url;
|
||||||
|
sha256 = hash;
|
||||||
|
};
|
||||||
|
|
||||||
|
mkChannelSource =
|
||||||
|
{ url, hash, ... }:
|
||||||
|
builtins.fetchTarball {
|
||||||
|
inherit url;
|
||||||
|
sha256 = hash;
|
||||||
|
};
|
||||||
|
in
|
||||||
|
if version == 4 then
|
||||||
|
builtins.mapAttrs (_: mkSource) data.pins
|
||||||
|
else
|
||||||
|
throw "Unsupported format version ${toString version} in sources.json. Try running `npins upgrade`"
|
11
npins/sources.json
Normal file
11
npins/sources.json
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
{
|
||||||
|
"pins": {
|
||||||
|
"nixpkgs": {
|
||||||
|
"type": "Channel",
|
||||||
|
"name": "nixpkgs-unstable",
|
||||||
|
"url": "https://releases.nixos.org/nixpkgs/nixpkgs-24.11pre698484.30c9efeef01e/nixexprs.tar.xz",
|
||||||
|
"hash": "079c53r3wryasv3ghyi2da9ipxh1bfh2pl63yj447bilh5ghjhhz"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"version": 4
|
||||||
|
}
|
|
@ -7,4 +7,5 @@ edition = "2018"
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ofborg = { path = "../ofborg" }
|
ofborg = { path = "../ofborg" }
|
||||||
|
|
||||||
log = "0.4.17"
|
tokio = "*"
|
||||||
|
log = "0.4.22"
|
||||||
|
|
|
@ -8,7 +8,8 @@ use std::path::Path;
|
||||||
use ofborg::config;
|
use ofborg::config;
|
||||||
use ofborg::nix;
|
use ofborg::nix;
|
||||||
|
|
||||||
fn main() {
|
#[tokio::main]
|
||||||
|
async fn main() {
|
||||||
ofborg::setup_log();
|
ofborg::setup_log();
|
||||||
|
|
||||||
log::info!("Loading config...");
|
log::info!("Loading config...");
|
||||||
|
@ -16,11 +17,14 @@ fn main() {
|
||||||
let nix = cfg.nix();
|
let nix = cfg.nix();
|
||||||
|
|
||||||
log::info!("Running build...");
|
log::info!("Running build...");
|
||||||
match nix.safely_build_attrs(
|
match nix
|
||||||
|
.safely_build_attrs(
|
||||||
Path::new("./"),
|
Path::new("./"),
|
||||||
nix::File::DefaultNixpkgs,
|
nix::File::DefaultNixpkgs,
|
||||||
vec![String::from("hello")],
|
vec![String::from("hello")],
|
||||||
) {
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
Ok(mut out) => {
|
Ok(mut out) => {
|
||||||
print!("{}", file_to_str(&mut out));
|
print!("{}", file_to_str(&mut out));
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,37 +1,53 @@
|
||||||
[package]
|
[package]
|
||||||
name = "ofborg"
|
name = "ofborg"
|
||||||
version = "0.1.9"
|
version = "0.90.0"
|
||||||
authors = ["Graham Christensen <graham@grahamc.com>"]
|
authors = [
|
||||||
|
"Graham Christensen <graham@grahamc.com>",
|
||||||
|
"Ryan Lahfa <raito@lix.systems>"
|
||||||
|
]
|
||||||
build = "build.rs"
|
build = "build.rs"
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
async-std = { version = "=1.12.0", features = ["unstable", "tokio1"] }
|
async-stream = "0.3.6"
|
||||||
|
async-trait = "0.1.83"
|
||||||
|
axum = "0.7.8"
|
||||||
|
base64 = "0.22.1"
|
||||||
brace-expand = "0.1.0"
|
brace-expand = "0.1.0"
|
||||||
chrono = "0.4.22"
|
chrono = "0.4.38"
|
||||||
either = "1.8.0"
|
clap = { version = "4.5.21", features = ["derive"] }
|
||||||
|
either = "1.13.0"
|
||||||
fs2 = "0.4.3"
|
fs2 = "0.4.3"
|
||||||
futures-util = "0.3.25"
|
futures = "0.3.31"
|
||||||
#hubcaps = "0.6"
|
futures-util = "0.3.31"
|
||||||
# for Conclusion::Skipped which is in master
|
http = "1.1"
|
||||||
hubcaps = { git = "https://github.com/softprops/hubcaps.git", rev = "d60d157b6638760fc725b2e4e4f329a4ec6b901e", default-features = false, features = ["app", "rustls-tls"] }
|
hyper = "1.5"
|
||||||
# hyper = { version = "0.14", features = ["full"] }
|
hyper-server = "0.6.0"
|
||||||
hyper = "=0.10.*"
|
jfs = "0.9.0"
|
||||||
# maybe can be removed when hyper is updated
|
lapin = "2.5.0"
|
||||||
http = "0.2"
|
|
||||||
lapin = "2.1.1"
|
|
||||||
lru-cache = "0.1.2"
|
lru-cache = "0.1.2"
|
||||||
md5 = "0.7.0"
|
md5 = "0.7.0"
|
||||||
nom = "4.2.3"
|
nom = "4,<5" # FIXME: update
|
||||||
regex = "1.7.0"
|
openssh = { version = "0.11.3", features = ["process-mux"], default-features = false }
|
||||||
|
opentelemetry = "0.27.0"
|
||||||
|
opentelemetry_sdk = { version = "0.27.0", features = ["rt-tokio"] }
|
||||||
|
opentelemetry-otlp = { version = "0.27.0", features = ["http-json", "reqwest-client", "reqwest-rustls"] }
|
||||||
|
opentelemetry-semantic-conventions = "0.27.0"
|
||||||
|
opentelemetry-stdout = "0.27.0"
|
||||||
|
regex = "1.11.1"
|
||||||
|
rustls-pemfile = "2.2.0"
|
||||||
separator = "0.4.1"
|
separator = "0.4.1"
|
||||||
serde = "1.0"
|
serde = "1.0"
|
||||||
serde_derive = "1.0"
|
serde_derive = "1.0"
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
sys-info = "0.9.1"
|
|
||||||
tempfile = "3.3.0"
|
|
||||||
tracing = "0.1.37"
|
|
||||||
tracing-subscriber = { version = "0.3.16", features = ["json", "env-filter"] }
|
|
||||||
uuid = { version = "1.2", features = ["v4"] }
|
|
||||||
rustls-pemfile = "1.0.2"
|
|
||||||
shellexpand = "3.1.0"
|
shellexpand = "3.1.0"
|
||||||
|
sys-info = "0.9.1"
|
||||||
|
tempfile = "3.14.0"
|
||||||
|
thiserror = "2.0.3"
|
||||||
|
tokio = { version = "1.41.1", features = ["rt-multi-thread"] }
|
||||||
|
tokio-stream = { version = "0.1.16", features = ["io-util"] }
|
||||||
|
tracing = "0.1.40"
|
||||||
|
tracing-subscriber = { version = "0.3.18", features = ["json", "env-filter"] }
|
||||||
|
tracing-opentelemetry = "0.28.0"
|
||||||
|
uuid = { version = "1.11", features = ["v4"] }
|
||||||
|
zstd = "0.13.2"
|
||||||
|
|
|
@ -63,7 +63,7 @@ impl MetricType {
|
||||||
let fields: Vec<String> = event
|
let fields: Vec<String> = event
|
||||||
.fields
|
.fields
|
||||||
.iter()
|
.iter()
|
||||||
.map(|&(ref _fieldname, ref fieldtype)| fieldtype.clone())
|
.map(|(_fieldname, fieldtype)| fieldtype.clone())
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
fields
|
fields
|
||||||
|
@ -94,7 +94,7 @@ impl MetricType {
|
||||||
let fields: Vec<String> = event
|
let fields: Vec<String> = event
|
||||||
.fields
|
.fields
|
||||||
.iter()
|
.iter()
|
||||||
.map(|&(ref fieldname, ref _fieldtype)| fieldname.clone())
|
.map(|(fieldname, _fieldtype)| fieldname.clone())
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
fields
|
fields
|
||||||
|
@ -139,7 +139,7 @@ fn name_to_parts(name: &str) -> Vec<String> {
|
||||||
parts.push(buf.to_owned());
|
parts.push(buf.to_owned());
|
||||||
buf = String::from("");
|
buf = String::from("");
|
||||||
}
|
}
|
||||||
buf.push_str(&c.to_string());
|
buf.push(c);
|
||||||
}
|
}
|
||||||
if !buf.is_empty() {
|
if !buf.is_empty() {
|
||||||
parts.push(buf.to_owned());
|
parts.push(buf.to_owned());
|
||||||
|
@ -224,6 +224,11 @@ fn events() -> Vec<MetricType> {
|
||||||
"Number of jobs for issues which are already closed",
|
"Number of jobs for issues which are already closed",
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
|
Metric::ticker(
|
||||||
|
"CurrentlyWorkInProgress",
|
||||||
|
"Number of jobs which are currently work in progress during analysis time",
|
||||||
|
None,
|
||||||
|
),
|
||||||
Metric::ticker(
|
Metric::ticker(
|
||||||
"IssueFetchFailed",
|
"IssueFetchFailed",
|
||||||
"Number of failed fetches for GitHub issues",
|
"Number of failed fetches for GitHub issues",
|
||||||
|
|
|
@ -6,6 +6,7 @@ pub struct Acl {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Acl {
|
impl Acl {
|
||||||
|
#[must_use]
|
||||||
pub fn new(repos: Vec<String>, mut trusted_users: Option<Vec<String>>) -> Acl {
|
pub fn new(repos: Vec<String>, mut trusted_users: Option<Vec<String>>) -> Acl {
|
||||||
if let Some(ref mut users) = trusted_users {
|
if let Some(ref mut users) = trusted_users {
|
||||||
users.iter_mut().map(|x| *x = x.to_lowercase()).last();
|
users.iter_mut().map(|x| *x = x.to_lowercase()).last();
|
||||||
|
@ -17,10 +18,12 @@ impl Acl {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn is_repo_eligible(&self, name: &str) -> bool {
|
pub fn is_repo_eligible(&self, name: &str) -> bool {
|
||||||
self.repos.contains(&name.to_lowercase())
|
self.repos.contains(&name.to_lowercase())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn build_job_architectures_for_user_repo(&self, user: &str, repo: &str) -> Vec<System> {
|
pub fn build_job_architectures_for_user_repo(&self, user: &str, repo: &str) -> Vec<System> {
|
||||||
if self.can_build_unrestricted(user, repo) {
|
if self.can_build_unrestricted(user, repo) {
|
||||||
vec![
|
vec![
|
||||||
|
@ -35,6 +38,7 @@ impl Acl {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn build_job_destinations_for_user_repo(
|
pub fn build_job_destinations_for_user_repo(
|
||||||
&self,
|
&self,
|
||||||
user: &str,
|
user: &str,
|
||||||
|
@ -42,10 +46,11 @@ impl Acl {
|
||||||
) -> Vec<(Option<String>, Option<String>)> {
|
) -> Vec<(Option<String>, Option<String>)> {
|
||||||
self.build_job_architectures_for_user_repo(user, repo)
|
self.build_job_architectures_for_user_repo(user, repo)
|
||||||
.iter()
|
.iter()
|
||||||
.map(|system| system.as_build_destination())
|
.map(super::systems::System::as_build_destination)
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn can_build_unrestricted(&self, user: &str, repo: &str) -> bool {
|
pub fn can_build_unrestricted(&self, user: &str, repo: &str) -> bool {
|
||||||
if let Some(ref users) = self.trusted_users {
|
if let Some(ref users) = self.trusted_users {
|
||||||
if repo.to_lowercase() == "nixos/nixpkgs" {
|
if repo.to_lowercase() == "nixos/nixpkgs" {
|
||||||
|
|
|
@ -82,10 +82,12 @@ fn child_wait<T: Send + 'static>(
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AsyncCmd {
|
impl AsyncCmd {
|
||||||
|
#[must_use]
|
||||||
pub fn new(cmd: Command) -> AsyncCmd {
|
pub fn new(cmd: Command) -> AsyncCmd {
|
||||||
AsyncCmd { command: cmd }
|
AsyncCmd { command: cmd }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn spawn(mut self) -> SpawnedAsyncCmd {
|
pub fn spawn(mut self) -> SpawnedAsyncCmd {
|
||||||
let mut child = self
|
let mut child = self
|
||||||
.command
|
.command
|
||||||
|
@ -122,7 +124,7 @@ impl AsyncCmd {
|
||||||
child_wait(WaitTarget::Child, monitor_tx, child),
|
child_wait(WaitTarget::Child, monitor_tx, child),
|
||||||
);
|
);
|
||||||
|
|
||||||
let head_waiter = thread::spawn(move || block_on_waiters(monitor_rx, waiters));
|
let head_waiter = thread::spawn(move || block_on_waiters(&monitor_rx, waiters));
|
||||||
|
|
||||||
SpawnedAsyncCmd {
|
SpawnedAsyncCmd {
|
||||||
waiter: head_waiter,
|
waiter: head_waiter,
|
||||||
|
@ -152,12 +154,12 @@ impl SpawnedAsyncCmd {
|
||||||
// FIXME: remove with rust/cargo update
|
// FIXME: remove with rust/cargo update
|
||||||
#[allow(clippy::cognitive_complexity)]
|
#[allow(clippy::cognitive_complexity)]
|
||||||
fn block_on_waiters(
|
fn block_on_waiters(
|
||||||
monitor_rx: mpsc::Receiver<(WaitTarget, WaitResult<()>)>,
|
monitor_rx: &mpsc::Receiver<(WaitTarget, WaitResult<()>)>,
|
||||||
mut waiters: HashMap<WaitTarget, thread::JoinHandle<()>>,
|
mut waiters: HashMap<WaitTarget, thread::JoinHandle<()>>,
|
||||||
) -> Option<Result<ExitStatus, io::Error>> {
|
) -> Option<Result<ExitStatus, io::Error>> {
|
||||||
let mut status = None;
|
let mut status = None;
|
||||||
|
|
||||||
for (id, interior_result) in monitor_rx.iter() {
|
for (id, interior_result) in monitor_rx {
|
||||||
match waiters.remove(&id) {
|
match waiters.remove(&id) {
|
||||||
Some(handle) => {
|
Some(handle) => {
|
||||||
info!("Received notice that {:?} finished", id);
|
info!("Received notice that {:?} finished", id);
|
||||||
|
@ -215,7 +217,7 @@ mod tests {
|
||||||
let lines: Vec<String> = spawned.lines().collect();
|
let lines: Vec<String> = spawned.lines().collect();
|
||||||
assert_eq!(lines, vec!["hi"]);
|
assert_eq!(lines, vec!["hi"]);
|
||||||
let ret = spawned.wait().unwrap().success();
|
let ret = spawned.wait().unwrap().success();
|
||||||
assert_eq!(true, ret);
|
assert!(ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -235,7 +237,7 @@ mod tests {
|
||||||
let lines: Vec<String> = spawned.lines().collect();
|
let lines: Vec<String> = spawned.lines().collect();
|
||||||
assert_eq!(lines, vec!["stdout", "stderr", "stdout2", "stderr2"]);
|
assert_eq!(lines, vec!["stdout", "stderr", "stdout2", "stderr2"]);
|
||||||
let ret = spawned.wait().unwrap().success();
|
let ret = spawned.wait().unwrap().success();
|
||||||
assert_eq!(true, ret);
|
assert!(ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -250,7 +252,7 @@ mod tests {
|
||||||
assert_eq!(lines.len(), 20000);
|
assert_eq!(lines.len(), 20000);
|
||||||
let thread_result = spawned.wait();
|
let thread_result = spawned.wait();
|
||||||
let exit_status = thread_result.expect("Thread should exit correctly");
|
let exit_status = thread_result.expect("Thread should exit correctly");
|
||||||
assert_eq!(true, exit_status.success());
|
assert!(exit_status.success());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -262,10 +264,10 @@ mod tests {
|
||||||
|
|
||||||
let mut spawned = acmd.spawn();
|
let mut spawned = acmd.spawn();
|
||||||
let lines: Vec<String> = spawned.lines().collect();
|
let lines: Vec<String> = spawned.lines().collect();
|
||||||
assert_eq!(lines.len(), 200000);
|
assert_eq!(lines.len(), 200_000);
|
||||||
let thread_result = spawned.wait();
|
let thread_result = spawned.wait();
|
||||||
let exit_status = thread_result.expect("Thread should exit correctly");
|
let exit_status = thread_result.expect("Thread should exit correctly");
|
||||||
assert_eq!(true, exit_status.success());
|
assert!(exit_status.success());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -286,6 +288,6 @@ mod tests {
|
||||||
vec!["hi", "Non-UTF8 data omitted from the log.", "there"]
|
vec!["hi", "Non-UTF8 data omitted from the log.", "there"]
|
||||||
);
|
);
|
||||||
let ret = spawned.wait().unwrap().success();
|
let ret = spawned.wait().unwrap().success();
|
||||||
assert_eq!(true, ret);
|
assert!(ret);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,25 +1,25 @@
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
|
|
||||||
use async_std::task;
|
|
||||||
use lapin::message::Delivery;
|
use lapin::message::Delivery;
|
||||||
use lapin::BasicProperties;
|
use lapin::BasicProperties;
|
||||||
|
|
||||||
use ofborg::commentparser;
|
use ofborg::commentparser;
|
||||||
use ofborg::config;
|
use ofborg::config;
|
||||||
use ofborg::easylapin;
|
use ofborg::easylapin;
|
||||||
use ofborg::message::{buildjob, Pr, Repo};
|
use ofborg::message::{buildjob, Change, Repo};
|
||||||
use ofborg::notifyworker::NotificationReceiver;
|
use ofborg::notifyworker::NotificationReceiver;
|
||||||
use ofborg::worker;
|
use ofborg::worker;
|
||||||
|
|
||||||
fn main() -> Result<(), Box<dyn Error>> {
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<(), Box<dyn Error>> {
|
||||||
ofborg::setup_log();
|
ofborg::setup_log();
|
||||||
|
|
||||||
let arg = env::args().nth(1).expect("usage: build-faker <config>");
|
let arg = env::args().nth(1).expect("usage: build-faker <config>");
|
||||||
let cfg = config::load(arg.as_ref());
|
let cfg = config::load(arg.as_ref());
|
||||||
|
|
||||||
let conn = easylapin::from_config(&cfg.rabbitmq)?;
|
let conn = easylapin::from_config(&cfg.rabbitmq).await?;
|
||||||
let mut chan = task::block_on(conn.create_channel())?;
|
let mut chan = conn.create_channel().await?;
|
||||||
|
|
||||||
let repo_msg = Repo {
|
let repo_msg = Repo {
|
||||||
clone_url: "https://github.com/nixos/ofborg.git".to_owned(),
|
clone_url: "https://github.com/nixos/ofborg.git".to_owned(),
|
||||||
|
@ -28,7 +28,7 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
name: "ofborg".to_owned(),
|
name: "ofborg".to_owned(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let pr_msg = Pr {
|
let pr_msg = Change {
|
||||||
number: 42,
|
number: 42,
|
||||||
head_sha: "6dd9f0265d52b946dd13daf996f30b64e4edb446".to_owned(),
|
head_sha: "6dd9f0265d52b946dd13daf996f30b64e4edb446".to_owned(),
|
||||||
target_branch: Some("scratch".to_owned()),
|
target_branch: Some("scratch".to_owned()),
|
||||||
|
@ -38,7 +38,7 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
|
|
||||||
let msg = buildjob::BuildJob {
|
let msg = buildjob::BuildJob {
|
||||||
repo: repo_msg,
|
repo: repo_msg,
|
||||||
pr: pr_msg,
|
change: pr_msg,
|
||||||
subset: Some(commentparser::Subset::Nixpkgs),
|
subset: Some(commentparser::Subset::Nixpkgs),
|
||||||
attrs: vec!["success".to_owned()],
|
attrs: vec!["success".to_owned()],
|
||||||
logs: Some((Some("logs".to_owned()), Some(logbackrk.to_lowercase()))),
|
logs: Some((Some("logs".to_owned()), Some(logbackrk.to_lowercase()))),
|
||||||
|
@ -60,10 +60,11 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
|
|
||||||
for _i in 1..2 {
|
for _i in 1..2 {
|
||||||
recv.tell(worker::publish_serde_action(
|
recv.tell(worker::publish_serde_action(
|
||||||
None,
|
&None,
|
||||||
Some("build-inputs-x86_64-darwin".to_owned()),
|
&Some("build-inputs-x86_64-darwin".to_owned()),
|
||||||
&msg,
|
&msg,
|
||||||
));
|
))
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,8 +2,8 @@ use std::env;
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use async_std::task::{self, JoinHandle};
|
|
||||||
use futures_util::future;
|
use futures_util::future;
|
||||||
|
use tokio::task::JoinHandle;
|
||||||
use tracing::{info, warn};
|
use tracing::{info, warn};
|
||||||
|
|
||||||
use ofborg::easyamqp::{self, ChannelExt, ConsumerExt};
|
use ofborg::easyamqp::{self, ChannelExt, ConsumerExt};
|
||||||
|
@ -12,7 +12,8 @@ use ofborg::{checkout, config, tasks};
|
||||||
|
|
||||||
// FIXME: remove with rust/cargo update
|
// FIXME: remove with rust/cargo update
|
||||||
#[allow(clippy::cognitive_complexity)]
|
#[allow(clippy::cognitive_complexity)]
|
||||||
fn main() -> Result<(), Box<dyn Error>> {
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<(), Box<dyn Error>> {
|
||||||
ofborg::setup_log();
|
ofborg::setup_log();
|
||||||
|
|
||||||
let arg = env::args().nth(1).expect("usage: builder <config>");
|
let arg = env::args().nth(1).expect("usage: builder <config>");
|
||||||
|
@ -28,27 +29,27 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
panic!();
|
panic!();
|
||||||
};
|
};
|
||||||
|
|
||||||
let conn = easylapin::from_config(&cfg.rabbitmq)?;
|
let conn = easylapin::from_config(&cfg.rabbitmq).await?;
|
||||||
let mut handles = Vec::new();
|
let mut handles = Vec::new();
|
||||||
|
|
||||||
for system in &cfg.nix.system {
|
for system in &cfg.nix.system {
|
||||||
let handle_ext = self::create_handle(&conn, &cfg, system.to_string())?;
|
let handle_ext = self::create_handle(&conn, &cfg, system.to_string());
|
||||||
handles.push(handle_ext);
|
handles.push(handle_ext);
|
||||||
}
|
}
|
||||||
|
|
||||||
task::block_on(future::join_all(handles));
|
future::join_all(handles).await;
|
||||||
|
|
||||||
drop(conn); // Close connection.
|
drop(conn); // Close connection.
|
||||||
info!("Closed the session... EOF");
|
info!("Closed the session... EOF");
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_handle(
|
async fn create_handle(
|
||||||
conn: &lapin::Connection,
|
conn: &lapin::Connection,
|
||||||
cfg: &config::Config,
|
cfg: &config::Config,
|
||||||
system: String,
|
system: String,
|
||||||
) -> Result<JoinHandle<()>, Box<dyn Error>> {
|
) -> Result<JoinHandle<Result<(), lapin::Error>>, Box<dyn Error>> {
|
||||||
let mut chan = task::block_on(conn.create_channel())?;
|
let mut chan = conn.create_channel().await?;
|
||||||
|
|
||||||
let cloner = checkout::cached_cloner(Path::new(&cfg.checkout.root));
|
let cloner = checkout::cached_cloner(Path::new(&cfg.checkout.root));
|
||||||
let nix = cfg.nix().with_system(system.clone());
|
let nix = cfg.nix().with_system(system.clone());
|
||||||
|
@ -61,7 +62,8 @@ fn create_handle(
|
||||||
auto_delete: false,
|
auto_delete: false,
|
||||||
no_wait: false,
|
no_wait: false,
|
||||||
internal: false,
|
internal: false,
|
||||||
})?;
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
let queue_name = if cfg.runner.build_all_jobs != Some(true) {
|
let queue_name = if cfg.runner.build_all_jobs != Some(true) {
|
||||||
let queue_name = format!("build-inputs-{}", system);
|
let queue_name = format!("build-inputs-{}", system);
|
||||||
|
@ -72,7 +74,8 @@ fn create_handle(
|
||||||
exclusive: false,
|
exclusive: false,
|
||||||
auto_delete: false,
|
auto_delete: false,
|
||||||
no_wait: false,
|
no_wait: false,
|
||||||
})?;
|
})
|
||||||
|
.await?;
|
||||||
queue_name
|
queue_name
|
||||||
} else {
|
} else {
|
||||||
warn!("Building all jobs, please don't use this unless you're");
|
warn!("Building all jobs, please don't use this unless you're");
|
||||||
|
@ -85,7 +88,8 @@ fn create_handle(
|
||||||
exclusive: true,
|
exclusive: true,
|
||||||
auto_delete: true,
|
auto_delete: true,
|
||||||
no_wait: false,
|
no_wait: false,
|
||||||
})?;
|
})
|
||||||
|
.await?;
|
||||||
queue_name
|
queue_name
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -94,9 +98,12 @@ fn create_handle(
|
||||||
exchange: "build-jobs".to_owned(),
|
exchange: "build-jobs".to_owned(),
|
||||||
routing_key: None,
|
routing_key: None,
|
||||||
no_wait: false,
|
no_wait: false,
|
||||||
})?;
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
let handle = easylapin::NotifyChannel(chan).consume(
|
info!("Fetching jobs from {}", &queue_name);
|
||||||
|
|
||||||
|
let fut = easylapin::NotifyChannel(chan).consume(
|
||||||
tasks::build::BuildWorker::new(cloner, nix, system, cfg.runner.identity.clone()),
|
tasks::build::BuildWorker::new(cloner, nix, system, cfg.runner.identity.clone()),
|
||||||
easyamqp::ConsumeConfig {
|
easyamqp::ConsumeConfig {
|
||||||
queue: queue_name.clone(),
|
queue: queue_name.clone(),
|
||||||
|
@ -106,8 +113,7 @@ fn create_handle(
|
||||||
no_wait: false,
|
no_wait: false,
|
||||||
exclusive: false,
|
exclusive: false,
|
||||||
},
|
},
|
||||||
)?;
|
);
|
||||||
|
|
||||||
info!("Fetching jobs from {}", &queue_name);
|
Ok(tokio::spawn(fut))
|
||||||
Ok(task::spawn(handle))
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
|
|
||||||
use async_std::task;
|
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
|
|
||||||
use ofborg::config;
|
use ofborg::config;
|
||||||
|
@ -9,7 +8,8 @@ use ofborg::easyamqp::{self, ChannelExt, ConsumerExt};
|
||||||
use ofborg::easylapin;
|
use ofborg::easylapin;
|
||||||
use ofborg::tasks;
|
use ofborg::tasks;
|
||||||
|
|
||||||
fn main() -> Result<(), Box<dyn Error>> {
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<(), Box<dyn Error>> {
|
||||||
ofborg::setup_log();
|
ofborg::setup_log();
|
||||||
|
|
||||||
let arg = env::args()
|
let arg = env::args()
|
||||||
|
@ -17,8 +17,8 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
.expect("usage: evaluation-filter <config>");
|
.expect("usage: evaluation-filter <config>");
|
||||||
let cfg = config::load(arg.as_ref());
|
let cfg = config::load(arg.as_ref());
|
||||||
|
|
||||||
let conn = easylapin::from_config(&cfg.rabbitmq)?;
|
let conn = easylapin::from_config(&cfg.rabbitmq).await?;
|
||||||
let mut chan = task::block_on(conn.create_channel())?;
|
let mut chan = conn.create_channel().await?;
|
||||||
|
|
||||||
chan.declare_exchange(easyamqp::ExchangeConfig {
|
chan.declare_exchange(easyamqp::ExchangeConfig {
|
||||||
exchange: "github-events".to_owned(),
|
exchange: "github-events".to_owned(),
|
||||||
|
@ -28,7 +28,8 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
auto_delete: false,
|
auto_delete: false,
|
||||||
no_wait: false,
|
no_wait: false,
|
||||||
internal: false,
|
internal: false,
|
||||||
})?;
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
chan.declare_queue(easyamqp::QueueConfig {
|
chan.declare_queue(easyamqp::QueueConfig {
|
||||||
queue: "mass-rebuild-check-jobs".to_owned(),
|
queue: "mass-rebuild-check-jobs".to_owned(),
|
||||||
|
@ -37,7 +38,8 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
exclusive: false,
|
exclusive: false,
|
||||||
auto_delete: false,
|
auto_delete: false,
|
||||||
no_wait: false,
|
no_wait: false,
|
||||||
})?;
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
let queue_name = String::from("mass-rebuild-check-inputs");
|
let queue_name = String::from("mass-rebuild-check-inputs");
|
||||||
chan.declare_queue(easyamqp::QueueConfig {
|
chan.declare_queue(easyamqp::QueueConfig {
|
||||||
|
@ -47,16 +49,20 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
exclusive: false,
|
exclusive: false,
|
||||||
auto_delete: false,
|
auto_delete: false,
|
||||||
no_wait: false,
|
no_wait: false,
|
||||||
})?;
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
chan.bind_queue(easyamqp::BindQueueConfig {
|
chan.bind_queue(easyamqp::BindQueueConfig {
|
||||||
queue: queue_name.clone(),
|
queue: queue_name.clone(),
|
||||||
exchange: "github-events".to_owned(),
|
exchange: "github-events".to_owned(),
|
||||||
routing_key: Some("pull_request.nixos/nixpkgs".to_owned()),
|
routing_key: Some("pull_request.nixos/nixpkgs".to_owned()),
|
||||||
no_wait: false,
|
no_wait: false,
|
||||||
})?;
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
let handle = easylapin::WorkerChannel(chan).consume(
|
info!("Fetching jobs from {}", &queue_name);
|
||||||
|
easylapin::WorkerChannel(chan)
|
||||||
|
.consume(
|
||||||
tasks::evaluationfilter::EvaluationFilterWorker::new(cfg.acl()),
|
tasks::evaluationfilter::EvaluationFilterWorker::new(cfg.acl()),
|
||||||
easyamqp::ConsumeConfig {
|
easyamqp::ConsumeConfig {
|
||||||
queue: queue_name.clone(),
|
queue: queue_name.clone(),
|
||||||
|
@ -66,10 +72,8 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
no_wait: false,
|
no_wait: false,
|
||||||
exclusive: false,
|
exclusive: false,
|
||||||
},
|
},
|
||||||
)?;
|
)
|
||||||
|
.await?;
|
||||||
info!("Fetching jobs from {}", &queue_name);
|
|
||||||
task::block_on(handle);
|
|
||||||
|
|
||||||
drop(conn); // Close connection.
|
drop(conn); // Close connection.
|
||||||
info!("Closed the session... EOF");
|
info!("Closed the session... EOF");
|
||||||
|
|
97
ofborg/src/bin/gerrit-event-streamer.rs
Normal file
97
ofborg/src/bin/gerrit-event-streamer.rs
Normal file
|
@ -0,0 +1,97 @@
|
||||||
|
/// This is a Gerrit event streamer into AMQP
|
||||||
|
/// It declares a `gerrit-events` exchange where events are published per topic (i.e. type).
|
||||||
|
/// The list of event type listened to is static.
|
||||||
|
use std::env;
|
||||||
|
use std::error::Error;
|
||||||
|
|
||||||
|
use futures::{pin_mut, StreamExt};
|
||||||
|
use lapin::options::BasicPublishOptions;
|
||||||
|
use lapin::BasicProperties;
|
||||||
|
use ofborg::vcs::gerrit::ssh::GerritSSHApi;
|
||||||
|
use ofborg::worker::prepare_queue_message;
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
|
use ofborg::config;
|
||||||
|
use ofborg::easyamqp::{self, ChannelExt};
|
||||||
|
use ofborg::easylapin;
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<(), Box<dyn Error>> {
|
||||||
|
ofborg::setup_log();
|
||||||
|
|
||||||
|
let arg = env::args()
|
||||||
|
.nth(1)
|
||||||
|
.expect("usage: gerrit-events-streamer <config>");
|
||||||
|
let cfg = config::load(arg.as_ref());
|
||||||
|
|
||||||
|
let conn = easylapin::from_config(&cfg.rabbitmq).await?;
|
||||||
|
let mut chan = conn.create_channel().await?;
|
||||||
|
|
||||||
|
let exchange_name = "gerrit-events";
|
||||||
|
|
||||||
|
chan.declare_exchange(easyamqp::ExchangeConfig {
|
||||||
|
exchange: exchange_name.to_owned(),
|
||||||
|
exchange_type: easyamqp::ExchangeType::Topic,
|
||||||
|
passive: false,
|
||||||
|
durable: true,
|
||||||
|
auto_delete: false,
|
||||||
|
no_wait: false,
|
||||||
|
internal: false,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
info!("Publishing events from Gerrit into {}", &exchange_name);
|
||||||
|
|
||||||
|
let gerrit_cfg = cfg
|
||||||
|
.gerrit
|
||||||
|
.expect("Gerrit event streamer requires Gerrit configuration");
|
||||||
|
let mut gerrit_api = GerritSSHApi::new(
|
||||||
|
gerrit_cfg.ssh_private_key_file,
|
||||||
|
&format!("ssh://{}:{}", gerrit_cfg.instance_uri, gerrit_cfg.ssh_port),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let routing_key = "abc";
|
||||||
|
|
||||||
|
let event_stream = gerrit_api.stream_events().await.unwrap();
|
||||||
|
pin_mut!(event_stream);
|
||||||
|
loop {
|
||||||
|
let raw_evt = event_stream.next().await;
|
||||||
|
tracing::debug!("{:?}", raw_evt);
|
||||||
|
|
||||||
|
match raw_evt {
|
||||||
|
Some(Ok(event)) => {
|
||||||
|
println!("{:#?}", event);
|
||||||
|
let queue_message =
|
||||||
|
prepare_queue_message(Some(exchange_name), Some(routing_key), &event);
|
||||||
|
let props = BasicProperties::default()
|
||||||
|
.with_delivery_mode(2)
|
||||||
|
.with_content_type("application/json".into());
|
||||||
|
|
||||||
|
match chan
|
||||||
|
.basic_publish(
|
||||||
|
exchange_name,
|
||||||
|
routing_key,
|
||||||
|
BasicPublishOptions::default(),
|
||||||
|
&queue_message.content,
|
||||||
|
props,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(_confirmation) => {
|
||||||
|
tracing::debug!("Gerrit event published in the exchange");
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
tracing::error!("Failed to publish gerrit event: {}", err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(Err(_err)) => {
|
||||||
|
// notify the event
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
// notify the event
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,78 +0,0 @@
|
||||||
use std::env;
|
|
||||||
use std::error::Error;
|
|
||||||
|
|
||||||
use async_std::task;
|
|
||||||
use tracing::info;
|
|
||||||
|
|
||||||
use ofborg::config;
|
|
||||||
use ofborg::easyamqp::{self, ChannelExt, ConsumerExt};
|
|
||||||
use ofborg::easylapin;
|
|
||||||
use ofborg::tasks;
|
|
||||||
|
|
||||||
fn main() -> Result<(), Box<dyn Error>> {
|
|
||||||
ofborg::setup_log();
|
|
||||||
|
|
||||||
let arg = env::args()
|
|
||||||
.nth(1)
|
|
||||||
.expect("usage: github-comment-filter <config>");
|
|
||||||
let cfg = config::load(arg.as_ref());
|
|
||||||
|
|
||||||
let conn = easylapin::from_config(&cfg.rabbitmq)?;
|
|
||||||
let mut chan = task::block_on(conn.create_channel())?;
|
|
||||||
|
|
||||||
chan.declare_exchange(easyamqp::ExchangeConfig {
|
|
||||||
exchange: "github-events".to_owned(),
|
|
||||||
exchange_type: easyamqp::ExchangeType::Topic,
|
|
||||||
passive: false,
|
|
||||||
durable: true,
|
|
||||||
auto_delete: false,
|
|
||||||
no_wait: false,
|
|
||||||
internal: false,
|
|
||||||
})?;
|
|
||||||
|
|
||||||
chan.declare_exchange(easyamqp::ExchangeConfig {
|
|
||||||
exchange: "build-jobs".to_owned(),
|
|
||||||
exchange_type: easyamqp::ExchangeType::Fanout,
|
|
||||||
passive: false,
|
|
||||||
durable: true,
|
|
||||||
auto_delete: false,
|
|
||||||
no_wait: false,
|
|
||||||
internal: false,
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let queue_name = "build-inputs";
|
|
||||||
chan.declare_queue(easyamqp::QueueConfig {
|
|
||||||
queue: queue_name.to_owned(),
|
|
||||||
passive: false,
|
|
||||||
durable: true,
|
|
||||||
exclusive: false,
|
|
||||||
auto_delete: false,
|
|
||||||
no_wait: false,
|
|
||||||
})?;
|
|
||||||
|
|
||||||
chan.bind_queue(easyamqp::BindQueueConfig {
|
|
||||||
queue: "build-inputs".to_owned(),
|
|
||||||
exchange: "github-events".to_owned(),
|
|
||||||
routing_key: Some("issue_comment.*".to_owned()),
|
|
||||||
no_wait: false,
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let handle = easylapin::WorkerChannel(chan).consume(
|
|
||||||
tasks::githubcommentfilter::GitHubCommentWorker::new(cfg.acl(), cfg.github()),
|
|
||||||
easyamqp::ConsumeConfig {
|
|
||||||
queue: "build-inputs".to_owned(),
|
|
||||||
consumer_tag: format!("{}-github-comment-filter", cfg.whoami()),
|
|
||||||
no_local: false,
|
|
||||||
no_ack: false,
|
|
||||||
no_wait: false,
|
|
||||||
exclusive: false,
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
|
|
||||||
info!("Fetching jobs from {}", &queue_name);
|
|
||||||
task::block_on(handle);
|
|
||||||
|
|
||||||
drop(conn); // Close connection.
|
|
||||||
info!("Closed the session... EOF");
|
|
||||||
Ok(())
|
|
||||||
}
|
|
|
@ -1,66 +0,0 @@
|
||||||
use std::env;
|
|
||||||
use std::error::Error;
|
|
||||||
|
|
||||||
use async_std::task;
|
|
||||||
use tracing::info;
|
|
||||||
|
|
||||||
use ofborg::config;
|
|
||||||
use ofborg::easyamqp::{self, ChannelExt, ConsumerExt};
|
|
||||||
use ofborg::easylapin;
|
|
||||||
use ofborg::tasks;
|
|
||||||
|
|
||||||
fn main() -> Result<(), Box<dyn Error>> {
|
|
||||||
ofborg::setup_log();
|
|
||||||
|
|
||||||
let arg = env::args()
|
|
||||||
.nth(1)
|
|
||||||
.expect("usage: github-comment-poster <config>");
|
|
||||||
let cfg = config::load(arg.as_ref());
|
|
||||||
|
|
||||||
let conn = easylapin::from_config(&cfg.rabbitmq)?;
|
|
||||||
let mut chan = task::block_on(conn.create_channel())?;
|
|
||||||
|
|
||||||
chan.declare_exchange(easyamqp::ExchangeConfig {
|
|
||||||
exchange: "build-results".to_owned(),
|
|
||||||
exchange_type: easyamqp::ExchangeType::Fanout,
|
|
||||||
passive: false,
|
|
||||||
durable: true,
|
|
||||||
auto_delete: false,
|
|
||||||
no_wait: false,
|
|
||||||
internal: false,
|
|
||||||
})?;
|
|
||||||
|
|
||||||
chan.declare_queue(easyamqp::QueueConfig {
|
|
||||||
queue: "build-results".to_owned(),
|
|
||||||
passive: false,
|
|
||||||
durable: true,
|
|
||||||
exclusive: false,
|
|
||||||
auto_delete: false,
|
|
||||||
no_wait: false,
|
|
||||||
})?;
|
|
||||||
|
|
||||||
chan.bind_queue(easyamqp::BindQueueConfig {
|
|
||||||
queue: "build-results".to_owned(),
|
|
||||||
exchange: "build-results".to_owned(),
|
|
||||||
routing_key: None,
|
|
||||||
no_wait: false,
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let handle = easylapin::WorkerChannel(chan).consume(
|
|
||||||
tasks::githubcommentposter::GitHubCommentPoster::new(cfg.github_app_vendingmachine()),
|
|
||||||
easyamqp::ConsumeConfig {
|
|
||||||
queue: "build-results".to_owned(),
|
|
||||||
consumer_tag: format!("{}-github-comment-poster", cfg.whoami()),
|
|
||||||
no_local: false,
|
|
||||||
no_ack: false,
|
|
||||||
no_wait: false,
|
|
||||||
exclusive: false,
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
|
|
||||||
task::block_on(handle);
|
|
||||||
|
|
||||||
drop(conn); // Close connection.
|
|
||||||
info!("Closed the session... EOF");
|
|
||||||
Ok(())
|
|
||||||
}
|
|
37
ofborg/src/bin/listen-gerrit-events.rs
Normal file
37
ofborg/src/bin/listen-gerrit-events.rs
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
/// This is a Gerrit listener for events which puts them on stdout for debugging purposes.
|
||||||
|
/// The list of event type listened to is static.
|
||||||
|
use std::env;
|
||||||
|
use std::error::Error;
|
||||||
|
|
||||||
|
use futures::{pin_mut, StreamExt};
|
||||||
|
use ofborg::vcs::gerrit::ssh::GerritSSHApi;
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
|
use ofborg::config;
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<(), Box<dyn Error>> {
|
||||||
|
ofborg::setup_log();
|
||||||
|
|
||||||
|
let arg = env::args()
|
||||||
|
.nth(1)
|
||||||
|
.expect("usage: listen-gerrit-events <config>");
|
||||||
|
let cfg = config::load(arg.as_ref());
|
||||||
|
|
||||||
|
let gerrit_cfg = cfg
|
||||||
|
.gerrit
|
||||||
|
.expect("Gerrit event streaming requires a Gerrit configuration");
|
||||||
|
let gerrit_ssh_uri = format!("ssh://{}:{}", gerrit_cfg.instance_uri, gerrit_cfg.ssh_port);
|
||||||
|
info!("Listening events from Gerrit on {}", gerrit_ssh_uri);
|
||||||
|
let mut gerrit_api = GerritSSHApi::new(gerrit_cfg.ssh_private_key_file, &gerrit_ssh_uri).await;
|
||||||
|
|
||||||
|
let event_stream = gerrit_api.stream_events().await.unwrap();
|
||||||
|
pin_mut!(event_stream);
|
||||||
|
loop {
|
||||||
|
let thing = event_stream.next().await;
|
||||||
|
println!("{:?}", thing);
|
||||||
|
if let Some(Ok(event)) = thing {
|
||||||
|
println!("{:#?}", event);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -2,7 +2,6 @@ use std::env;
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use async_std::task;
|
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
|
|
||||||
use ofborg::config;
|
use ofborg::config;
|
||||||
|
@ -10,7 +9,8 @@ use ofborg::easyamqp::{self, ChannelExt, ConsumerExt};
|
||||||
use ofborg::easylapin;
|
use ofborg::easylapin;
|
||||||
use ofborg::tasks;
|
use ofborg::tasks;
|
||||||
|
|
||||||
fn main() -> Result<(), Box<dyn Error>> {
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<(), Box<dyn Error>> {
|
||||||
ofborg::setup_log();
|
ofborg::setup_log();
|
||||||
|
|
||||||
let arg = env::args()
|
let arg = env::args()
|
||||||
|
@ -18,8 +18,8 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
.expect("usage: log-message-collector <config>");
|
.expect("usage: log-message-collector <config>");
|
||||||
let cfg = config::load(arg.as_ref());
|
let cfg = config::load(arg.as_ref());
|
||||||
|
|
||||||
let conn = easylapin::from_config(&cfg.rabbitmq)?;
|
let conn = easylapin::from_config(&cfg.rabbitmq).await?;
|
||||||
let mut chan = task::block_on(conn.create_channel())?;
|
let mut chan = conn.create_channel().await?;
|
||||||
|
|
||||||
chan.declare_exchange(easyamqp::ExchangeConfig {
|
chan.declare_exchange(easyamqp::ExchangeConfig {
|
||||||
exchange: "logs".to_owned(),
|
exchange: "logs".to_owned(),
|
||||||
|
@ -29,7 +29,8 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
auto_delete: false,
|
auto_delete: false,
|
||||||
no_wait: false,
|
no_wait: false,
|
||||||
internal: false,
|
internal: false,
|
||||||
})?;
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
let queue_name = "".to_owned();
|
let queue_name = "".to_owned();
|
||||||
chan.declare_queue(easyamqp::QueueConfig {
|
chan.declare_queue(easyamqp::QueueConfig {
|
||||||
|
@ -39,17 +40,21 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
exclusive: true,
|
exclusive: true,
|
||||||
auto_delete: true,
|
auto_delete: true,
|
||||||
no_wait: false,
|
no_wait: false,
|
||||||
})?;
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
chan.bind_queue(easyamqp::BindQueueConfig {
|
chan.bind_queue(easyamqp::BindQueueConfig {
|
||||||
queue: queue_name.clone(),
|
queue: queue_name.clone(),
|
||||||
exchange: "logs".to_owned(),
|
exchange: "logs".to_owned(),
|
||||||
routing_key: Some("*.*".to_owned()),
|
routing_key: Some("*.*".to_owned()),
|
||||||
no_wait: false,
|
no_wait: false,
|
||||||
})?;
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
info!("Fetching jobs from {}", &queue_name);
|
||||||
|
|
||||||
// Regular channel, we want prefetching here.
|
// Regular channel, we want prefetching here.
|
||||||
let handle = chan.consume(
|
chan.consume(
|
||||||
tasks::log_message_collector::LogMessageCollector::new(
|
tasks::log_message_collector::LogMessageCollector::new(
|
||||||
PathBuf::from(cfg.log_storage.clone().unwrap().path),
|
PathBuf::from(cfg.log_storage.clone().unwrap().path),
|
||||||
100,
|
100,
|
||||||
|
@ -62,10 +67,8 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
no_wait: false,
|
no_wait: false,
|
||||||
exclusive: false,
|
exclusive: false,
|
||||||
},
|
},
|
||||||
)?;
|
)
|
||||||
|
.await?;
|
||||||
info!("Fetching jobs from {}", &queue_name);
|
|
||||||
task::block_on(handle);
|
|
||||||
|
|
||||||
drop(conn); // Close connection.
|
drop(conn); // Close connection.
|
||||||
info!("Closed the session... EOF");
|
info!("Closed the session... EOF");
|
||||||
|
|
|
@ -3,7 +3,8 @@ use std::error::Error;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::process;
|
use std::process;
|
||||||
|
|
||||||
use async_std::task;
|
use ofborg::config::VCSConfig;
|
||||||
|
use ofborg::tasks::evaluate::SupportedVCS;
|
||||||
use tracing::{error, info};
|
use tracing::{error, info};
|
||||||
|
|
||||||
use ofborg::checkout;
|
use ofborg::checkout;
|
||||||
|
@ -15,7 +16,8 @@ use ofborg::tasks;
|
||||||
|
|
||||||
// FIXME: remove with rust/cargo update
|
// FIXME: remove with rust/cargo update
|
||||||
#[allow(clippy::cognitive_complexity)]
|
#[allow(clippy::cognitive_complexity)]
|
||||||
fn main() -> Result<(), Box<dyn Error>> {
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<(), Box<dyn Error>> {
|
||||||
ofborg::setup_log();
|
ofborg::setup_log();
|
||||||
|
|
||||||
let arg = env::args().nth(1).expect("usage: mass-rebuilder <config>");
|
let arg = env::args().nth(1).expect("usage: mass-rebuilder <config>");
|
||||||
|
@ -32,14 +34,14 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
process::exit(1);
|
process::exit(1);
|
||||||
};
|
};
|
||||||
|
|
||||||
let conn = easylapin::from_config(&cfg.rabbitmq)?;
|
let conn = easylapin::from_config(&cfg.rabbitmq).await?;
|
||||||
let mut chan = task::block_on(conn.create_channel())?;
|
let mut chan = conn.create_channel().await?;
|
||||||
|
|
||||||
let root = Path::new(&cfg.checkout.root);
|
let root = Path::new(&cfg.checkout.root);
|
||||||
let cloner = checkout::cached_cloner(&root.join(cfg.runner.instance.to_string()));
|
let cloner = checkout::cached_cloner(&root.join(cfg.runner.instance.to_string()));
|
||||||
let nix = cfg.nix();
|
let nix = cfg.nix();
|
||||||
|
|
||||||
let events = stats::RabbitMq::from_lapin(&cfg.whoami(), task::block_on(conn.create_channel())?);
|
let events = stats::RabbitMq::from_lapin(&cfg.whoami(), conn.create_channel().await?);
|
||||||
|
|
||||||
let queue_name = String::from("mass-rebuild-check-jobs");
|
let queue_name = String::from("mass-rebuild-check-jobs");
|
||||||
chan.declare_queue(easyamqp::QueueConfig {
|
chan.declare_queue(easyamqp::QueueConfig {
|
||||||
|
@ -49,14 +51,21 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
exclusive: false,
|
exclusive: false,
|
||||||
auto_delete: false,
|
auto_delete: false,
|
||||||
no_wait: false,
|
no_wait: false,
|
||||||
})?;
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
let handle = easylapin::WorkerChannel(chan).consume(
|
let vcs_data = match cfg.vcs {
|
||||||
|
VCSConfig::Gerrit => SupportedVCS::Gerrit,
|
||||||
|
};
|
||||||
|
|
||||||
|
info!("Fetching jobs from {}", queue_name);
|
||||||
|
|
||||||
|
easylapin::WorkerChannel(chan)
|
||||||
|
.consume(
|
||||||
tasks::evaluate::EvaluationWorker::new(
|
tasks::evaluate::EvaluationWorker::new(
|
||||||
cloner,
|
cloner,
|
||||||
&nix,
|
&nix,
|
||||||
cfg.github(),
|
vcs_data,
|
||||||
cfg.github_app_vendingmachine(),
|
|
||||||
cfg.acl(),
|
cfg.acl(),
|
||||||
cfg.runner.identity.clone(),
|
cfg.runner.identity.clone(),
|
||||||
events,
|
events,
|
||||||
|
@ -69,10 +78,8 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
no_wait: false,
|
no_wait: false,
|
||||||
exclusive: false,
|
exclusive: false,
|
||||||
},
|
},
|
||||||
)?;
|
)
|
||||||
|
.await?;
|
||||||
info!("Fetching jobs from {}", queue_name);
|
|
||||||
task::block_on(handle);
|
|
||||||
|
|
||||||
drop(conn); // Close connection.
|
drop(conn); // Close connection.
|
||||||
info!("Closed the session... EOF");
|
info!("Closed the session... EOF");
|
||||||
|
|
151
ofborg/src/bin/ofborgctl.rs
Normal file
151
ofborg/src/bin/ofborgctl.rs
Normal file
|
@ -0,0 +1,151 @@
|
||||||
|
use std::error::Error;
|
||||||
|
use std::io::Read;
|
||||||
|
|
||||||
|
use lapin::message::Delivery;
|
||||||
|
use lapin::BasicProperties;
|
||||||
|
|
||||||
|
use lapin::Channel;
|
||||||
|
use ofborg::config;
|
||||||
|
use ofborg::easylapin;
|
||||||
|
use ofborg::notifyworker::NotificationReceiver;
|
||||||
|
use ofborg::utils::pastebin::Pastebin;
|
||||||
|
use ofborg::worker;
|
||||||
|
|
||||||
|
use clap::{Parser, Subcommand};
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
/// Control interface for OfBorg CI
|
||||||
|
#[derive(Parser, Debug)]
|
||||||
|
#[command(name = "ofborgctl")]
|
||||||
|
struct Cli {
|
||||||
|
/// Path to the configuration file
|
||||||
|
#[arg(short, long, value_name = "CONFIG")]
|
||||||
|
config: String,
|
||||||
|
|
||||||
|
#[command(subcommand)]
|
||||||
|
command: Commands,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand, Debug)]
|
||||||
|
enum Commands {
|
||||||
|
/// Commands related to pastebin
|
||||||
|
Pastebin {
|
||||||
|
#[command(subcommand)]
|
||||||
|
action: PastebinCommands,
|
||||||
|
},
|
||||||
|
/// Commands related to checks
|
||||||
|
Checks {
|
||||||
|
#[command(subcommand)]
|
||||||
|
action: CheckCommands,
|
||||||
|
},
|
||||||
|
/// Commands related to status
|
||||||
|
Status {
|
||||||
|
#[command(subcommand)]
|
||||||
|
action: StatusCommands,
|
||||||
|
},
|
||||||
|
/// Re-evaluate a specific change
|
||||||
|
ReEvaluate {
|
||||||
|
/// Change ID to re-evaluate
|
||||||
|
change_id: String,
|
||||||
|
},
|
||||||
|
/// Relabel a specific change
|
||||||
|
Relabel {
|
||||||
|
/// Change ID to relabel
|
||||||
|
change_id: String,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand, Debug)]
|
||||||
|
enum PastebinCommands {
|
||||||
|
/// Create a pastebin
|
||||||
|
Create { title: String },
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand, Debug)]
|
||||||
|
enum CheckCommands {
|
||||||
|
/// Change checks
|
||||||
|
Change,
|
||||||
|
/// List checks
|
||||||
|
List,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand, Debug)]
|
||||||
|
enum StatusCommands {
|
||||||
|
/// List statuses
|
||||||
|
List,
|
||||||
|
/// Change status
|
||||||
|
Change,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Publisher<'a> {
|
||||||
|
recv: easylapin::ChannelNotificationReceiver<'a>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Publisher<'a> {
|
||||||
|
fn new(chan: &'a mut Channel) -> Self {
|
||||||
|
let delivery = Box::new(Delivery {
|
||||||
|
delivery_tag: 0,
|
||||||
|
exchange: "no-exchange".into(),
|
||||||
|
routing_key: "".into(),
|
||||||
|
redelivered: false,
|
||||||
|
properties: BasicProperties::default(),
|
||||||
|
data: vec![],
|
||||||
|
acker: Default::default(),
|
||||||
|
});
|
||||||
|
|
||||||
|
Self {
|
||||||
|
// We don't have the choice, it's quite ridiculous.
|
||||||
|
// FIXME: make ChannelNotificationReceiver be split into something that can only
|
||||||
|
// publish.
|
||||||
|
recv: easylapin::ChannelNotificationReceiver::new(chan, Box::leak(delivery)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn publish_serde_action<T>(
|
||||||
|
&mut self,
|
||||||
|
exchange: Option<String>,
|
||||||
|
routing_key: Option<String>,
|
||||||
|
msg: &T,
|
||||||
|
) where
|
||||||
|
T: Serialize + ?Sized,
|
||||||
|
{
|
||||||
|
self.recv
|
||||||
|
.tell(worker::publish_serde_action(&exchange, &routing_key, msg))
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<(), Box<dyn Error>> {
|
||||||
|
ofborg::setup_log();
|
||||||
|
|
||||||
|
let args = Cli::parse();
|
||||||
|
let cfg = config::load(args.config.as_ref());
|
||||||
|
|
||||||
|
let conn = easylapin::from_config(&cfg.rabbitmq).await?;
|
||||||
|
let mut chan = conn.create_channel().await?;
|
||||||
|
|
||||||
|
match args.command {
|
||||||
|
Commands::Pastebin { action } => match action {
|
||||||
|
PastebinCommands::Create { title } => {
|
||||||
|
let mut publisher = Publisher::new(&mut chan);
|
||||||
|
let mut contents: String = String::new();
|
||||||
|
std::io::stdin()
|
||||||
|
.read_to_string(&mut contents)
|
||||||
|
.expect("Failed to read pastebin contents");
|
||||||
|
|
||||||
|
publisher
|
||||||
|
.publish_serde_action(
|
||||||
|
None,
|
||||||
|
Some("pastebin-log".to_owned()),
|
||||||
|
&Pastebin { title, contents },
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
_ => todo!(),
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
75
ofborg/src/bin/pastebin-worker.rs
Normal file
75
ofborg/src/bin/pastebin-worker.rs
Normal file
|
@ -0,0 +1,75 @@
|
||||||
|
use std::env;
|
||||||
|
use std::error::Error;
|
||||||
|
|
||||||
|
use ofborg::config;
|
||||||
|
use ofborg::easyamqp;
|
||||||
|
use ofborg::easyamqp::ChannelExt;
|
||||||
|
use ofborg::easyamqp::ConsumerExt;
|
||||||
|
use ofborg::easylapin;
|
||||||
|
use ofborg::tasks;
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<(), Box<dyn Error>> {
|
||||||
|
ofborg::setup_log();
|
||||||
|
|
||||||
|
let arg = env::args().nth(1).expect("usage: pastebin-worker <config>");
|
||||||
|
let cfg = config::load(arg.as_ref());
|
||||||
|
|
||||||
|
let conn = easylapin::from_config(&cfg.rabbitmq).await?;
|
||||||
|
let mut chan = conn.create_channel().await?;
|
||||||
|
|
||||||
|
let queue_name = "pastebin-log".to_owned();
|
||||||
|
|
||||||
|
chan.declare_exchange(easyamqp::ExchangeConfig {
|
||||||
|
exchange: "pastebin-log".to_owned(),
|
||||||
|
exchange_type: easyamqp::ExchangeType::Topic,
|
||||||
|
passive: false,
|
||||||
|
durable: true,
|
||||||
|
auto_delete: false,
|
||||||
|
no_wait: false,
|
||||||
|
internal: false,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
chan.declare_queue(easyamqp::QueueConfig {
|
||||||
|
queue: queue_name.clone(),
|
||||||
|
passive: false,
|
||||||
|
durable: true,
|
||||||
|
exclusive: false,
|
||||||
|
auto_delete: false,
|
||||||
|
no_wait: false,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
chan.bind_queue(easyamqp::BindQueueConfig {
|
||||||
|
queue: queue_name.clone(),
|
||||||
|
exchange: "pastebin-log".to_owned(),
|
||||||
|
routing_key: None,
|
||||||
|
no_wait: false,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
info!("Fetching jobs from {}", &queue_name);
|
||||||
|
|
||||||
|
easylapin::WorkerChannel(chan)
|
||||||
|
.consume(
|
||||||
|
tasks::pastebin_collector::PastebinCollector::new(
|
||||||
|
cfg.pastebin.clone().root,
|
||||||
|
cfg.pastebin.clone().db,
|
||||||
|
),
|
||||||
|
easyamqp::ConsumeConfig {
|
||||||
|
queue: queue_name.clone(),
|
||||||
|
consumer_tag: format!("{}-pastebin-worker", cfg.whoami()),
|
||||||
|
no_local: false,
|
||||||
|
no_ack: false,
|
||||||
|
no_wait: false,
|
||||||
|
exclusive: false,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
drop(conn); // Close connection.
|
||||||
|
info!("Closed the session... EOF");
|
||||||
|
Ok(())
|
||||||
|
}
|
83
ofborg/src/bin/statcheck-worker.rs
Normal file
83
ofborg/src/bin/statcheck-worker.rs
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
/// Statuses and checks worker
|
||||||
|
/// - will keep a database of changes
|
||||||
|
/// - their statuses
|
||||||
|
/// - their checks
|
||||||
|
/// - is VCS/CI agnostic
|
||||||
|
use std::env;
|
||||||
|
use std::error::Error;
|
||||||
|
|
||||||
|
use ofborg::config;
|
||||||
|
use ofborg::easyamqp;
|
||||||
|
use ofborg::easyamqp::ChannelExt;
|
||||||
|
use ofborg::easyamqp::ConsumerExt;
|
||||||
|
use ofborg::easylapin;
|
||||||
|
use ofborg::tasks;
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<(), Box<dyn Error>> {
|
||||||
|
ofborg::setup_log();
|
||||||
|
|
||||||
|
let arg = env::args()
|
||||||
|
.nth(1)
|
||||||
|
.expect("usage: statcheck-worker <config>");
|
||||||
|
let cfg = config::load(arg.as_ref());
|
||||||
|
|
||||||
|
let conn = easylapin::from_config(&cfg.rabbitmq).await?;
|
||||||
|
let mut chan = conn.create_channel().await?;
|
||||||
|
|
||||||
|
// an RPC queue for verbs
|
||||||
|
let api_queue_name = "statcheck-api".to_owned();
|
||||||
|
// an event queue to be notified about statuses & checks changes.
|
||||||
|
let event_queue_name = "statcheck-events".to_owned();
|
||||||
|
|
||||||
|
chan.declare_exchange(easyamqp::ExchangeConfig {
|
||||||
|
exchange: api_queue_name.clone(),
|
||||||
|
exchange_type: easyamqp::ExchangeType::Topic,
|
||||||
|
passive: false,
|
||||||
|
durable: true,
|
||||||
|
auto_delete: false,
|
||||||
|
no_wait: false,
|
||||||
|
internal: false,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
chan.declare_queue(easyamqp::QueueConfig {
|
||||||
|
queue: api_queue_name.clone(),
|
||||||
|
passive: false,
|
||||||
|
durable: true,
|
||||||
|
exclusive: false,
|
||||||
|
auto_delete: false,
|
||||||
|
no_wait: false,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
chan.bind_queue(easyamqp::BindQueueConfig {
|
||||||
|
queue: api_queue_name.clone(),
|
||||||
|
exchange: api_queue_name.clone(),
|
||||||
|
routing_key: None,
|
||||||
|
no_wait: false,
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
info!("Waiting for API calls on {}", api_queue_name);
|
||||||
|
info!("Notifying of new changes on {}", event_queue_name);
|
||||||
|
|
||||||
|
easylapin::WorkerChannel(chan)
|
||||||
|
.consume(
|
||||||
|
tasks::status_check_collector::StatusCheckCollector::new(cfg.statcheck.clone().db),
|
||||||
|
easyamqp::ConsumeConfig {
|
||||||
|
queue: api_queue_name.clone(),
|
||||||
|
consumer_tag: format!("{}-{}", cfg.whoami(), api_queue_name),
|
||||||
|
no_local: false,
|
||||||
|
no_ack: false,
|
||||||
|
no_wait: false,
|
||||||
|
exclusive: false,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
drop(conn); // Close connection.
|
||||||
|
info!("Closed the session... EOF");
|
||||||
|
Ok(())
|
||||||
|
}
|
|
@ -1,24 +1,24 @@
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
use std::thread;
|
|
||||||
|
|
||||||
use async_std::task;
|
use axum::routing::get;
|
||||||
use hyper::server::{Request, Response, Server};
|
use axum::Router;
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
|
|
||||||
use ofborg::easyamqp::{ChannelExt, ConsumerExt};
|
use ofborg::easyamqp::{ChannelExt, ConsumerExt};
|
||||||
use ofborg::{config, easyamqp, easylapin, stats, tasks};
|
use ofborg::{config, easyamqp, easylapin, stats, tasks};
|
||||||
|
|
||||||
fn main() -> Result<(), Box<dyn Error>> {
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<(), Box<dyn Error>> {
|
||||||
ofborg::setup_log();
|
ofborg::setup_log();
|
||||||
|
|
||||||
let arg = env::args().nth(1).expect("usage: stats <config>");
|
let arg = env::args().nth(1).expect("usage: stats <config>");
|
||||||
let cfg = config::load(arg.as_ref());
|
let cfg = config::load(arg.as_ref());
|
||||||
|
|
||||||
let conn = easylapin::from_config(&cfg.rabbitmq)?;
|
let conn = easylapin::from_config(&cfg.rabbitmq).await?;
|
||||||
let mut chan = task::block_on(conn.create_channel())?;
|
let mut chan = conn.create_channel().await?;
|
||||||
|
|
||||||
let events = stats::RabbitMq::from_lapin(&cfg.whoami(), task::block_on(conn.create_channel())?);
|
let events = stats::RabbitMq::from_lapin(&cfg.whoami(), conn.create_channel().await?);
|
||||||
|
|
||||||
let metrics = stats::MetricCollector::new();
|
let metrics = stats::MetricCollector::new();
|
||||||
let collector = tasks::statscollector::StatCollectorWorker::new(events, metrics.clone());
|
let collector = tasks::statscollector::StatCollectorWorker::new(events, metrics.clone());
|
||||||
|
@ -31,7 +31,8 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
auto_delete: false,
|
auto_delete: false,
|
||||||
no_wait: false,
|
no_wait: false,
|
||||||
internal: false,
|
internal: false,
|
||||||
})?;
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
let queue_name = String::from("stats-events");
|
let queue_name = String::from("stats-events");
|
||||||
chan.declare_queue(easyamqp::QueueConfig {
|
chan.declare_queue(easyamqp::QueueConfig {
|
||||||
|
@ -41,16 +42,30 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
exclusive: false,
|
exclusive: false,
|
||||||
auto_delete: false,
|
auto_delete: false,
|
||||||
no_wait: false,
|
no_wait: false,
|
||||||
})?;
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
chan.bind_queue(easyamqp::BindQueueConfig {
|
chan.bind_queue(easyamqp::BindQueueConfig {
|
||||||
queue: queue_name.clone(),
|
queue: queue_name.clone(),
|
||||||
exchange: "stats".to_owned(),
|
exchange: "stats".to_owned(),
|
||||||
routing_key: None,
|
routing_key: None,
|
||||||
no_wait: false,
|
no_wait: false,
|
||||||
})?;
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
let handle = chan.consume(
|
tokio::spawn(async {
|
||||||
|
let addr = "0.0.0.0:9898";
|
||||||
|
info!("listening addr {:?}", addr);
|
||||||
|
let app = Router::new().route(
|
||||||
|
"/metrics",
|
||||||
|
get(|| async move { metrics.prometheus_output() }),
|
||||||
|
);
|
||||||
|
let listener = tokio::net::TcpListener::bind(addr).await.unwrap();
|
||||||
|
axum::serve(listener, app).await.unwrap()
|
||||||
|
});
|
||||||
|
|
||||||
|
info!("Fetching jobs from {}", &queue_name);
|
||||||
|
chan.consume(
|
||||||
collector,
|
collector,
|
||||||
easyamqp::ConsumeConfig {
|
easyamqp::ConsumeConfig {
|
||||||
queue: "stats-events".to_owned(),
|
queue: "stats-events".to_owned(),
|
||||||
|
@ -60,19 +75,8 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
no_wait: false,
|
no_wait: false,
|
||||||
exclusive: false,
|
exclusive: false,
|
||||||
},
|
},
|
||||||
)?;
|
)
|
||||||
|
.await?;
|
||||||
thread::spawn(|| {
|
|
||||||
let addr = "0.0.0.0:9898";
|
|
||||||
info!("listening addr {:?}", addr);
|
|
||||||
Server::http(addr)?.handle(move |_: Request, res: Response| {
|
|
||||||
res.send(metrics.prometheus_output().as_bytes()).unwrap();
|
|
||||||
})?;
|
|
||||||
Ok::<_, Box<dyn Error + Sync + Send + '_>>(())
|
|
||||||
});
|
|
||||||
|
|
||||||
info!("Fetching jobs from {}", &queue_name);
|
|
||||||
task::block_on(handle);
|
|
||||||
|
|
||||||
drop(conn); // Close connection.
|
drop(conn); // Close connection.
|
||||||
info!("Closed the session... EOF");
|
info!("Closed the session... EOF");
|
||||||
|
|
|
@ -12,6 +12,7 @@ pub struct CachedCloner {
|
||||||
root: PathBuf,
|
root: PathBuf,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn cached_cloner(path: &Path) -> CachedCloner {
|
pub fn cached_cloner(path: &Path) -> CachedCloner {
|
||||||
CachedCloner {
|
CachedCloner {
|
||||||
root: path.to_path_buf(),
|
root: path.to_path_buf(),
|
||||||
|
@ -31,6 +32,7 @@ pub struct CachedProjectCo {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CachedCloner {
|
impl CachedCloner {
|
||||||
|
#[must_use]
|
||||||
pub fn project(&self, name: &str, clone_url: String) -> CachedProject {
|
pub fn project(&self, name: &str, clone_url: String) -> CachedProject {
|
||||||
// <root>/repo/<hash>/clone
|
// <root>/repo/<hash>/clone
|
||||||
// <root>/repo/<hash>/clone.lock
|
// <root>/repo/<hash>/clone.lock
|
||||||
|
@ -39,7 +41,7 @@ impl CachedCloner {
|
||||||
|
|
||||||
let mut new_root = self.root.clone();
|
let mut new_root = self.root.clone();
|
||||||
new_root.push("repo");
|
new_root.push("repo");
|
||||||
new_root.push(format!("{:x}", md5::compute(&name)));
|
new_root.push(format!("{:x}", md5::compute(name)));
|
||||||
|
|
||||||
CachedProject {
|
CachedProject {
|
||||||
root: new_root,
|
root: new_root,
|
||||||
|
@ -101,7 +103,7 @@ impl CachedProjectCo {
|
||||||
let result = Command::new("git")
|
let result = Command::new("git")
|
||||||
.arg("fetch")
|
.arg("fetch")
|
||||||
.arg("origin")
|
.arg("origin")
|
||||||
.arg(format!("+refs/pull/{}/head:pr", pr_id))
|
.arg(format!("+refs/pull/{pr_id}/head:pr"))
|
||||||
.current_dir(self.clone_to())
|
.current_dir(self.clone_to())
|
||||||
.stdout(Stdio::null())
|
.stdout(Stdio::null())
|
||||||
.status()?;
|
.status()?;
|
||||||
|
@ -162,7 +164,7 @@ impl CachedProjectCo {
|
||||||
let result = Command::new("git")
|
let result = Command::new("git")
|
||||||
.arg("log")
|
.arg("log")
|
||||||
.arg("--format=format:%s")
|
.arg("--format=format:%s")
|
||||||
.arg(format!("HEAD..{}", commit))
|
.arg(format!("HEAD..{commit}"))
|
||||||
.current_dir(self.clone_to())
|
.current_dir(self.clone_to())
|
||||||
.output()?;
|
.output()?;
|
||||||
|
|
||||||
|
@ -171,7 +173,7 @@ impl CachedProjectCo {
|
||||||
if result.status.success() {
|
if result.status.success() {
|
||||||
Ok(String::from_utf8_lossy(&result.stdout)
|
Ok(String::from_utf8_lossy(&result.stdout)
|
||||||
.lines()
|
.lines()
|
||||||
.map(|l| l.to_owned())
|
.map(std::borrow::ToOwned::to_owned)
|
||||||
.collect())
|
.collect())
|
||||||
} else {
|
} else {
|
||||||
Err(Error::new(
|
Err(Error::new(
|
||||||
|
@ -187,7 +189,7 @@ impl CachedProjectCo {
|
||||||
let result = Command::new("git")
|
let result = Command::new("git")
|
||||||
.arg("diff")
|
.arg("diff")
|
||||||
.arg("--name-only")
|
.arg("--name-only")
|
||||||
.arg(format!("HEAD...{}", commit))
|
.arg(format!("HEAD...{commit}"))
|
||||||
.current_dir(self.clone_to())
|
.current_dir(self.clone_to())
|
||||||
.output()?;
|
.output()?;
|
||||||
|
|
||||||
|
@ -196,7 +198,7 @@ impl CachedProjectCo {
|
||||||
if result.status.success() {
|
if result.status.success() {
|
||||||
Ok(String::from_utf8_lossy(&result.stdout)
|
Ok(String::from_utf8_lossy(&result.stdout)
|
||||||
.lines()
|
.lines()
|
||||||
.map(|l| l.to_owned())
|
.map(std::borrow::ToOwned::to_owned)
|
||||||
.collect())
|
.collect())
|
||||||
} else {
|
} else {
|
||||||
Err(Error::new(
|
Err(Error::new(
|
||||||
|
@ -278,8 +280,8 @@ mod tests {
|
||||||
.expect("building the test PR failed");
|
.expect("building the test PR failed");
|
||||||
|
|
||||||
let stderr =
|
let stderr =
|
||||||
String::from_utf8(output.stderr).unwrap_or_else(|err| format!("warning: {}", err));
|
String::from_utf8(output.stderr).unwrap_or_else(|err| format!("warning: {err}"));
|
||||||
println!("{}", stderr);
|
println!("{stderr}");
|
||||||
|
|
||||||
let hash = String::from_utf8(output.stdout).expect("Should just be a hash");
|
let hash = String::from_utf8(output.stdout).expect("Should just be a hash");
|
||||||
return hash.trim().to_owned();
|
return hash.trim().to_owned();
|
||||||
|
|
|
@ -14,7 +14,7 @@ pub struct Lock {
|
||||||
|
|
||||||
impl Lock {
|
impl Lock {
|
||||||
pub fn unlock(&mut self) {
|
pub fn unlock(&mut self) {
|
||||||
self.lock = None
|
self.lock = None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -33,20 +33,19 @@ pub trait GitClonable {
|
||||||
warn!("Failed to create lock file {:?}: {}", self.lock_path(), e);
|
warn!("Failed to create lock file {:?}: {}", self.lock_path(), e);
|
||||||
Err(e)
|
Err(e)
|
||||||
}
|
}
|
||||||
Ok(lock) => match lock.lock_exclusive() {
|
Ok(lock) => {
|
||||||
Err(e) => {
|
if let Err(e) = lock.lock_exclusive() {
|
||||||
warn!(
|
warn!(
|
||||||
"Failed to get exclusive lock on file {:?}: {}",
|
"Failed to get exclusive lock on file {:?}: {}",
|
||||||
self.lock_path(),
|
self.lock_path(),
|
||||||
e
|
e
|
||||||
);
|
);
|
||||||
Err(e)
|
Err(e)
|
||||||
}
|
} else {
|
||||||
Ok(_) => {
|
|
||||||
debug!("Got lock on {:?}", self.lock_path());
|
debug!("Got lock on {:?}", self.lock_path());
|
||||||
Ok(Lock { lock: Some(lock) })
|
Ok(Lock { lock: Some(lock) })
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -67,8 +66,8 @@ pub trait GitClonable {
|
||||||
let result = Command::new("git")
|
let result = Command::new("git")
|
||||||
.arg("clone")
|
.arg("clone")
|
||||||
.args(self.extra_clone_args())
|
.args(self.extra_clone_args())
|
||||||
.arg(&self.clone_from())
|
.arg(self.clone_from())
|
||||||
.arg(&self.clone_to())
|
.arg(self.clone_to())
|
||||||
.stdout(Stdio::null())
|
.stdout(Stdio::null())
|
||||||
.status()?;
|
.status()?;
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
use nom::types::CompleteStr;
|
use nom::types::CompleteStr;
|
||||||
use tracing::warn;
|
use tracing::warn;
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn parse(text: &str) -> Option<Vec<Instruction>> {
|
pub fn parse(text: &str) -> Option<Vec<Instruction>> {
|
||||||
let instructions: Vec<Instruction> = text
|
let instructions: Vec<Instruction> = text
|
||||||
.lines()
|
.lines()
|
||||||
|
|
|
@ -1,102 +0,0 @@
|
||||||
use futures_util::future::TryFutureExt;
|
|
||||||
use tracing::warn;
|
|
||||||
|
|
||||||
pub struct CommitStatus {
|
|
||||||
api: hubcaps::statuses::Statuses,
|
|
||||||
sha: String,
|
|
||||||
context: String,
|
|
||||||
description: String,
|
|
||||||
url: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CommitStatus {
|
|
||||||
pub fn new(
|
|
||||||
api: hubcaps::statuses::Statuses,
|
|
||||||
sha: String,
|
|
||||||
context: String,
|
|
||||||
description: String,
|
|
||||||
url: Option<String>,
|
|
||||||
) -> CommitStatus {
|
|
||||||
let mut stat = CommitStatus {
|
|
||||||
api,
|
|
||||||
sha,
|
|
||||||
context,
|
|
||||||
description,
|
|
||||||
url: "".to_owned(),
|
|
||||||
};
|
|
||||||
|
|
||||||
stat.set_url(url);
|
|
||||||
|
|
||||||
stat
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_url(&mut self, url: Option<String>) {
|
|
||||||
self.url = url.unwrap_or_else(|| String::from(""))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_with_description(
|
|
||||||
&mut self,
|
|
||||||
description: &str,
|
|
||||||
state: hubcaps::statuses::State,
|
|
||||||
) -> Result<(), CommitStatusError> {
|
|
||||||
self.set_description(description.to_owned());
|
|
||||||
self.set(state)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_description(&mut self, description: String) {
|
|
||||||
self.description = description;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set(&self, state: hubcaps::statuses::State) -> Result<(), CommitStatusError> {
|
|
||||||
let desc = if self.description.len() >= 140 {
|
|
||||||
warn!(
|
|
||||||
"description is over 140 char; truncating: {:?}",
|
|
||||||
&self.description
|
|
||||||
);
|
|
||||||
self.description.chars().take(140).collect()
|
|
||||||
} else {
|
|
||||||
self.description.clone()
|
|
||||||
};
|
|
||||||
async_std::task::block_on(
|
|
||||||
self.api
|
|
||||||
.create(
|
|
||||||
self.sha.as_ref(),
|
|
||||||
&hubcaps::statuses::StatusOptions::builder(state)
|
|
||||||
.context(self.context.clone())
|
|
||||||
.description(desc)
|
|
||||||
.target_url(self.url.clone())
|
|
||||||
.build(),
|
|
||||||
)
|
|
||||||
.map_ok(|_| ())
|
|
||||||
.map_err(|e| CommitStatusError::from(e)),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum CommitStatusError {
|
|
||||||
ExpiredCreds(hubcaps::Error),
|
|
||||||
MissingSha(hubcaps::Error),
|
|
||||||
Error(hubcaps::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<hubcaps::Error> for CommitStatusError {
|
|
||||||
fn from(e: hubcaps::Error) -> CommitStatusError {
|
|
||||||
use http::status::StatusCode;
|
|
||||||
use hubcaps::Error;
|
|
||||||
match &e {
|
|
||||||
Error::Fault { code, error }
|
|
||||||
if code == &StatusCode::UNAUTHORIZED && error.message == "Bad credentials" =>
|
|
||||||
{
|
|
||||||
CommitStatusError::ExpiredCreds(e)
|
|
||||||
}
|
|
||||||
Error::Fault { code, error }
|
|
||||||
if code == &StatusCode::UNPROCESSABLE_ENTITY
|
|
||||||
&& error.message.starts_with("No commit found for SHA:") =>
|
|
||||||
{
|
|
||||||
CommitStatusError::MissingSha(e)
|
|
||||||
}
|
|
||||||
_otherwise => CommitStatusError::Error(e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,16 +1,19 @@
|
||||||
use crate::acl;
|
use crate::acl;
|
||||||
use crate::nix::Nix;
|
use crate::nix::Nix;
|
||||||
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::{BufReader, Read};
|
use std::io::Read;
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use hubcaps::{Credentials, Github, InstallationTokenGenerator, JWTCredentials};
|
|
||||||
use serde::de::{self, Deserialize, Deserializer};
|
use serde::de::{self, Deserialize, Deserializer};
|
||||||
use tracing::{debug, error, info, warn};
|
use tracing::error;
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub enum VCSConfig {
|
||||||
|
Gerrit,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct Config {
|
pub struct Config {
|
||||||
|
@ -19,9 +22,13 @@ pub struct Config {
|
||||||
pub checkout: CheckoutConfig,
|
pub checkout: CheckoutConfig,
|
||||||
pub nix: NixConfig,
|
pub nix: NixConfig,
|
||||||
pub rabbitmq: RabbitMqConfig,
|
pub rabbitmq: RabbitMqConfig,
|
||||||
pub github: Option<GithubConfig>,
|
pub vcs: VCSConfig,
|
||||||
pub github_app: Option<GithubAppConfig>,
|
pub statcheck: StatusCheckConfig,
|
||||||
|
pub pastebin: PastebinConfig,
|
||||||
pub log_storage: Option<LogStorage>,
|
pub log_storage: Option<LogStorage>,
|
||||||
|
|
||||||
|
// Gerrit-specific configuration if vcs == Gerrit.
|
||||||
|
pub gerrit: Option<GerritConfig>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
@ -29,13 +36,29 @@ pub struct FeedbackConfig {
|
||||||
pub full_logs: bool,
|
pub full_logs: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||||
|
pub struct StatusCheckConfig {
|
||||||
|
#[serde(deserialize_with = "deserialize_and_expand_pathbuf")]
|
||||||
|
pub db: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||||
|
pub struct PastebinConfig {
|
||||||
|
#[serde(deserialize_with = "deserialize_and_expand_pathbuf")]
|
||||||
|
pub root: PathBuf,
|
||||||
|
#[serde(deserialize_with = "deserialize_and_expand_pathbuf")]
|
||||||
|
pub db: PathBuf,
|
||||||
|
// max_disk_size?
|
||||||
|
// auto_expiry?
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||||
pub struct RabbitMqConfig {
|
pub struct RabbitMqConfig {
|
||||||
pub ssl: bool,
|
pub ssl: bool,
|
||||||
pub host: String,
|
pub host: String,
|
||||||
pub virtualhost: Option<String>,
|
pub virtualhost: Option<String>,
|
||||||
pub username: String,
|
pub username: String,
|
||||||
pub password_file: PathBuf,
|
pub password_file: Option<PathBuf>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
@ -47,15 +70,18 @@ pub struct NixConfig {
|
||||||
pub initial_heap_size: Option<String>,
|
pub initial_heap_size: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
const fn default_gerrit_ssh_port() -> u16 {
|
||||||
pub struct GithubConfig {
|
29418
|
||||||
pub token_file: PathBuf,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||||
pub struct GithubAppConfig {
|
pub struct GerritConfig {
|
||||||
pub app_id: u64,
|
// For all requests.
|
||||||
pub private_key: PathBuf,
|
#[serde(deserialize_with = "deserialize_and_expand_pathbuf")]
|
||||||
|
pub ssh_private_key_file: PathBuf,
|
||||||
|
pub instance_uri: String,
|
||||||
|
#[serde(default = "default_gerrit_ssh_port")]
|
||||||
|
pub ssh_port: u16,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||||
|
@ -88,14 +114,17 @@ pub struct RunnerConfig {
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct CheckoutConfig {
|
pub struct CheckoutConfig {
|
||||||
|
#[serde(deserialize_with = "deserialize_and_expand_string")]
|
||||||
pub root: String,
|
pub root: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Config {
|
impl Config {
|
||||||
|
#[must_use]
|
||||||
pub fn whoami(&self) -> String {
|
pub fn whoami(&self) -> String {
|
||||||
format!("{}-{}", self.runner.identity, self.nix.system.join(","))
|
format!("{}-{}", self.runner.identity, self.nix.system.join(","))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn acl(&self) -> acl::Acl {
|
pub fn acl(&self) -> acl::Acl {
|
||||||
let repos = self
|
let repos = self
|
||||||
.runner
|
.runner
|
||||||
|
@ -117,25 +146,6 @@ impl Config {
|
||||||
acl::Acl::new(repos, trusted_users)
|
acl::Acl::new(repos, trusted_users)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn github(&self) -> Github {
|
|
||||||
let token = std::fs::read_to_string(self.github.clone().unwrap().token_file)
|
|
||||||
.expect("Couldn't read from GitHub token file");
|
|
||||||
Github::new(
|
|
||||||
"github.com/grahamc/ofborg",
|
|
||||||
// tls configured hyper client
|
|
||||||
Credentials::Token(token),
|
|
||||||
)
|
|
||||||
.expect("Unable to create a github client instance")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn github_app_vendingmachine(&self) -> GithubAppVendingMachine {
|
|
||||||
GithubAppVendingMachine {
|
|
||||||
conf: self.github_app.clone().unwrap(),
|
|
||||||
id_cache: HashMap::new(),
|
|
||||||
client_cache: HashMap::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn nix(&self) -> Nix {
|
pub fn nix(&self) -> Nix {
|
||||||
if self.nix.build_timeout_seconds < 1200 {
|
if self.nix.build_timeout_seconds < 1200 {
|
||||||
error!(?self.nix.build_timeout_seconds, "Please set build_timeout_seconds to at least 1200");
|
error!(?self.nix.build_timeout_seconds, "Please set build_timeout_seconds to at least 1200");
|
||||||
|
@ -157,9 +167,15 @@ impl Config {
|
||||||
|
|
||||||
impl RabbitMqConfig {
|
impl RabbitMqConfig {
|
||||||
pub fn as_uri(&self) -> Result<String, std::io::Error> {
|
pub fn as_uri(&self) -> Result<String, std::io::Error> {
|
||||||
let password_file_as_str = self.password_file.to_string_lossy();
|
let password;
|
||||||
let expanded_password_file = shellexpand::env(&password_file_as_str).expect("Failed to expand the password-file configuration string");
|
if let Some(password_file) = &self.password_file {
|
||||||
let password = std::fs::read_to_string(expanded_password_file.as_ref())?;
|
let password_file_as_str = password_file.to_string_lossy();
|
||||||
|
let expanded_password_file = shellexpand::env(&password_file_as_str)
|
||||||
|
.expect("Failed to expand the password-file configuration string");
|
||||||
|
password = std::fs::read_to_string(expanded_password_file.as_ref())?;
|
||||||
|
} else {
|
||||||
|
password = String::new();
|
||||||
|
}
|
||||||
let uri = format!(
|
let uri = format!(
|
||||||
"{}://{}:{}@{}/{}",
|
"{}://{}:{}@{}/{}",
|
||||||
if self.ssl { "amqps" } else { "amqp" },
|
if self.ssl { "amqps" } else { "amqp" },
|
||||||
|
@ -172,6 +188,7 @@ impl RabbitMqConfig {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn load(filename: &Path) -> Config {
|
pub fn load(filename: &Path) -> Config {
|
||||||
let mut file = File::open(filename).unwrap();
|
let mut file = File::open(filename).unwrap();
|
||||||
let mut contents = String::new();
|
let mut contents = String::new();
|
||||||
|
@ -182,68 +199,6 @@ pub fn load(filename: &Path) -> Config {
|
||||||
deserialized
|
deserialized
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct GithubAppVendingMachine {
|
|
||||||
conf: GithubAppConfig,
|
|
||||||
id_cache: HashMap<(String, String), Option<u64>>,
|
|
||||||
client_cache: HashMap<u64, Github>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl GithubAppVendingMachine {
|
|
||||||
fn useragent(&self) -> &'static str {
|
|
||||||
"github.com/grahamc/ofborg (app)"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn jwt(&self) -> JWTCredentials {
|
|
||||||
let private_key_file =
|
|
||||||
File::open(self.conf.private_key.clone()).expect("Unable to read private_key");
|
|
||||||
let mut private_key_reader = BufReader::new(private_key_file);
|
|
||||||
let private_keys = rustls_pemfile::rsa_private_keys(&mut private_key_reader)
|
|
||||||
.expect("Unable to convert private_key to DER format");
|
|
||||||
// We can be reasonably certain that there will only be one private key in this file
|
|
||||||
let private_key = &private_keys[0];
|
|
||||||
JWTCredentials::new(self.conf.app_id, private_key.to_vec())
|
|
||||||
.expect("Unable to create JWTCredentials")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn install_id_for_repo(&mut self, owner: &str, repo: &str) -> Option<u64> {
|
|
||||||
let useragent = self.useragent();
|
|
||||||
let jwt = self.jwt();
|
|
||||||
|
|
||||||
let key = (owner.to_owned(), repo.to_owned());
|
|
||||||
|
|
||||||
*self.id_cache.entry(key).or_insert_with(|| {
|
|
||||||
info!("Looking up install ID for {}/{}", owner, repo);
|
|
||||||
|
|
||||||
let lookup_gh = Github::new(useragent, Credentials::JWT(jwt)).unwrap();
|
|
||||||
|
|
||||||
match async_std::task::block_on(lookup_gh.app().find_repo_installation(owner, repo)) {
|
|
||||||
Ok(install_id) => {
|
|
||||||
debug!("Received install ID {:?}", install_id);
|
|
||||||
Some(install_id.id)
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
warn!("Error during install ID lookup: {:?}", e);
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn for_repo<'a>(&'a mut self, owner: &str, repo: &str) -> Option<&'a Github> {
|
|
||||||
let useragent = self.useragent();
|
|
||||||
let jwt = self.jwt();
|
|
||||||
let install_id = self.install_id_for_repo(owner, repo)?;
|
|
||||||
|
|
||||||
Some(self.client_cache.entry(install_id).or_insert_with(|| {
|
|
||||||
Github::new(
|
|
||||||
useragent,
|
|
||||||
Credentials::InstallationToken(InstallationTokenGenerator::new(install_id, jwt)),
|
|
||||||
)
|
|
||||||
.expect("Unable to create a github client instance")
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Copied from https://stackoverflow.com/a/43627388
|
// Copied from https://stackoverflow.com/a/43627388
|
||||||
fn deserialize_one_or_many<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
|
fn deserialize_one_or_many<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
|
||||||
where
|
where
|
||||||
|
@ -275,3 +230,25 @@ where
|
||||||
|
|
||||||
deserializer.deserialize_any(StringOrVec(PhantomData))
|
deserializer.deserialize_any(StringOrVec(PhantomData))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn deserialize_and_expand_string<'de, D>(deserializer: D) -> Result<String, D::Error>
|
||||||
|
where
|
||||||
|
D: Deserializer<'de>,
|
||||||
|
{
|
||||||
|
let raw_literal: String = Deserialize::deserialize(deserializer)?;
|
||||||
|
Ok(shellexpand::env(&raw_literal)
|
||||||
|
.map_err(|_| serde::de::Error::custom("failed to expand the variable in a string"))?
|
||||||
|
.into_owned())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn deserialize_and_expand_pathbuf<'de, D>(deserializer: D) -> Result<PathBuf, D::Error>
|
||||||
|
where
|
||||||
|
D: Deserializer<'de>,
|
||||||
|
{
|
||||||
|
let raw_literal: PathBuf = Deserialize::deserialize(deserializer)?;
|
||||||
|
Ok(PathBuf::from(
|
||||||
|
shellexpand::env(&raw_literal.to_string_lossy())
|
||||||
|
.map_err(|_| serde::de::Error::custom("failed to expand the variable in a path"))?
|
||||||
|
.into_owned(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
|
@ -1,3 +1,10 @@
|
||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
#[allow(clippy::struct_excessive_bools)]
|
||||||
pub struct ConsumeConfig {
|
pub struct ConsumeConfig {
|
||||||
/// Specifies the name of the queue to consume from.
|
/// Specifies the name of the queue to consume from.
|
||||||
pub queue: String,
|
pub queue: String,
|
||||||
|
@ -38,6 +45,7 @@ pub struct ConsumeConfig {
|
||||||
pub no_wait: bool,
|
pub no_wait: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct BindQueueConfig {
|
pub struct BindQueueConfig {
|
||||||
/// Specifies the name of the queue to bind.
|
/// Specifies the name of the queue to bind.
|
||||||
///
|
///
|
||||||
|
@ -81,6 +89,7 @@ pub struct BindQueueConfig {
|
||||||
pub no_wait: bool,
|
pub no_wait: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
pub enum ExchangeType {
|
pub enum ExchangeType {
|
||||||
Topic,
|
Topic,
|
||||||
Headers,
|
Headers,
|
||||||
|
@ -101,6 +110,8 @@ impl From<ExchangeType> for String {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
#[allow(clippy::struct_excessive_bools)]
|
||||||
pub struct ExchangeConfig {
|
pub struct ExchangeConfig {
|
||||||
/// Exchange names starting with "amq." are reserved for
|
/// Exchange names starting with "amq." are reserved for
|
||||||
/// pre-declared and standardised exchanges. The client MAY
|
/// pre-declared and standardised exchanges. The client MAY
|
||||||
|
@ -181,6 +192,8 @@ pub struct ExchangeConfig {
|
||||||
pub no_wait: bool,
|
pub no_wait: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
#[allow(clippy::struct_excessive_bools)]
|
||||||
pub struct QueueConfig {
|
pub struct QueueConfig {
|
||||||
/// The queue name MAY be empty, in which case the server MUST
|
/// The queue name MAY be empty, in which case the server MUST
|
||||||
/// create a new queue with a unique generated name and return
|
/// create a new queue with a unique generated name and return
|
||||||
|
@ -258,15 +271,27 @@ pub struct QueueConfig {
|
||||||
pub no_wait: bool,
|
pub no_wait: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
pub trait ChannelExt {
|
pub trait ChannelExt {
|
||||||
type Error;
|
type Error;
|
||||||
fn declare_exchange(&mut self, config: ExchangeConfig) -> Result<(), Self::Error>;
|
async fn declare_exchange(&mut self, config: ExchangeConfig) -> Result<(), Self::Error>;
|
||||||
fn declare_queue(&mut self, config: QueueConfig) -> Result<(), Self::Error>;
|
async fn declare_queue(&mut self, config: QueueConfig) -> Result<(), Self::Error>;
|
||||||
fn bind_queue(&mut self, config: BindQueueConfig) -> Result<(), Self::Error>;
|
async fn bind_queue(&mut self, config: BindQueueConfig) -> Result<(), Self::Error>;
|
||||||
|
async fn send_request<
|
||||||
|
T: ?Sized + Serialize + Sync + Send + Debug,
|
||||||
|
U: for<'a> Deserialize<'a> + Debug,
|
||||||
|
>(
|
||||||
|
&mut self,
|
||||||
|
exchange: Option<&str>,
|
||||||
|
routing_key: Option<&str>,
|
||||||
|
msg: &T,
|
||||||
|
) -> Result<U, Self::Error>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait ConsumerExt<'a, C> {
|
#[async_trait]
|
||||||
|
pub trait ConsumerExt<C> {
|
||||||
type Error;
|
type Error;
|
||||||
type Handle;
|
async fn consume(self, callback: C, config: ConsumeConfig) -> Result<(), Self::Error>
|
||||||
fn consume(self, callback: C, config: ConsumeConfig) -> Result<Self::Handle, Self::Error>;
|
where
|
||||||
|
C: 'async_trait;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
use std::pin::Pin;
|
|
||||||
|
|
||||||
use crate::config::RabbitMqConfig;
|
use crate::config::RabbitMqConfig;
|
||||||
use crate::easyamqp::{
|
use crate::easyamqp::{
|
||||||
BindQueueConfig, ChannelExt, ConsumeConfig, ConsumerExt, ExchangeConfig, ExchangeType,
|
BindQueueConfig, ChannelExt, ConsumeConfig, ConsumerExt, ExchangeConfig, ExchangeType,
|
||||||
|
@ -7,21 +5,23 @@ use crate::easyamqp::{
|
||||||
};
|
};
|
||||||
use crate::notifyworker::{NotificationReceiver, SimpleNotifyWorker};
|
use crate::notifyworker::{NotificationReceiver, SimpleNotifyWorker};
|
||||||
use crate::ofborg;
|
use crate::ofborg;
|
||||||
use crate::worker::{Action, SimpleWorker};
|
use crate::worker::{prepare_queue_message, Action, SimpleWorker};
|
||||||
|
|
||||||
use async_std::future::Future;
|
use std::fmt::Debug;
|
||||||
use async_std::stream::StreamExt;
|
|
||||||
use async_std::task;
|
use async_trait::async_trait;
|
||||||
|
use futures::StreamExt;
|
||||||
use lapin::message::Delivery;
|
use lapin::message::Delivery;
|
||||||
use lapin::options::{
|
use lapin::options::{
|
||||||
BasicAckOptions, BasicConsumeOptions, BasicNackOptions, BasicPublishOptions, BasicQosOptions,
|
BasicAckOptions, BasicConsumeOptions, BasicNackOptions, BasicPublishOptions, BasicQosOptions,
|
||||||
ExchangeDeclareOptions, QueueBindOptions, QueueDeclareOptions,
|
ExchangeDeclareOptions, QueueBindOptions, QueueDeclareOptions,
|
||||||
};
|
};
|
||||||
use lapin::types::{AMQPValue, FieldTable};
|
use lapin::types::{AMQPValue, FieldTable, ShortString};
|
||||||
use lapin::{BasicProperties, Channel, Connection, ConnectionProperties, ExchangeKind};
|
use lapin::{BasicProperties, Channel, Connection, ConnectionProperties, ExchangeKind};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
use tracing::{debug, trace};
|
use tracing::{debug, trace};
|
||||||
|
|
||||||
pub fn from_config(cfg: &RabbitMqConfig) -> Result<Connection, lapin::Error> {
|
pub async fn from_config(cfg: &RabbitMqConfig) -> Result<Connection, lapin::Error> {
|
||||||
let mut props = FieldTable::default();
|
let mut props = FieldTable::default();
|
||||||
props.insert(
|
props.insert(
|
||||||
"ofborg_version".into(),
|
"ofborg_version".into(),
|
||||||
|
@ -31,13 +31,15 @@ pub fn from_config(cfg: &RabbitMqConfig) -> Result<Connection, lapin::Error> {
|
||||||
client_properties: props,
|
client_properties: props,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
task::block_on(Connection::connect(&cfg.as_uri()?, opts))
|
Connection::connect(&cfg.as_uri()?, opts).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl ChannelExt for Channel {
|
impl ChannelExt for Channel {
|
||||||
type Error = lapin::Error;
|
type Error = lapin::Error;
|
||||||
|
|
||||||
fn declare_exchange(&mut self, config: ExchangeConfig) -> Result<(), Self::Error> {
|
#[tracing::instrument(ret)]
|
||||||
|
async fn declare_exchange(&mut self, config: ExchangeConfig) -> Result<(), Self::Error> {
|
||||||
let opts = ExchangeDeclareOptions {
|
let opts = ExchangeDeclareOptions {
|
||||||
passive: config.passive,
|
passive: config.passive,
|
||||||
durable: config.durable,
|
durable: config.durable,
|
||||||
|
@ -51,11 +53,13 @@ impl ChannelExt for Channel {
|
||||||
ExchangeType::Fanout => ExchangeKind::Fanout,
|
ExchangeType::Fanout => ExchangeKind::Fanout,
|
||||||
_ => panic!("exchange kind"),
|
_ => panic!("exchange kind"),
|
||||||
};
|
};
|
||||||
task::block_on(self.exchange_declare(&config.exchange, kind, opts, FieldTable::default()))?;
|
self.exchange_declare(&config.exchange, kind, opts, FieldTable::default())
|
||||||
|
.await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn declare_queue(&mut self, config: QueueConfig) -> Result<(), Self::Error> {
|
#[tracing::instrument(ret)]
|
||||||
|
async fn declare_queue(&mut self, config: QueueConfig) -> Result<(), Self::Error> {
|
||||||
let opts = QueueDeclareOptions {
|
let opts = QueueDeclareOptions {
|
||||||
passive: config.passive,
|
passive: config.passive,
|
||||||
durable: config.durable,
|
durable: config.durable,
|
||||||
|
@ -64,71 +68,145 @@ impl ChannelExt for Channel {
|
||||||
nowait: config.no_wait,
|
nowait: config.no_wait,
|
||||||
};
|
};
|
||||||
|
|
||||||
task::block_on(self.queue_declare(&config.queue, opts, FieldTable::default()))?;
|
self.queue_declare(&config.queue, opts, FieldTable::default())
|
||||||
|
.await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn bind_queue(&mut self, config: BindQueueConfig) -> Result<(), Self::Error> {
|
#[tracing::instrument(ret)]
|
||||||
|
async fn bind_queue(&mut self, config: BindQueueConfig) -> Result<(), Self::Error> {
|
||||||
let opts = QueueBindOptions {
|
let opts = QueueBindOptions {
|
||||||
nowait: config.no_wait,
|
nowait: config.no_wait,
|
||||||
};
|
};
|
||||||
|
|
||||||
task::block_on(self.queue_bind(
|
self.queue_bind(
|
||||||
&config.queue,
|
&config.queue,
|
||||||
&config.exchange,
|
&config.exchange,
|
||||||
&config.routing_key.unwrap_or_else(|| "".into()),
|
&config.routing_key.unwrap_or_default(),
|
||||||
opts,
|
opts,
|
||||||
FieldTable::default(),
|
FieldTable::default(),
|
||||||
))?;
|
)
|
||||||
|
.await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(ret)]
|
||||||
|
async fn send_request<
|
||||||
|
T: ?Sized + Serialize + Sync + Send + Debug,
|
||||||
|
U: for<'a> Deserialize<'a> + Debug,
|
||||||
|
>(
|
||||||
|
&mut self,
|
||||||
|
exchange: Option<&str>,
|
||||||
|
routing_key: Option<&str>,
|
||||||
|
msg: &T,
|
||||||
|
) -> Result<U, Self::Error> {
|
||||||
|
let mut msg = prepare_queue_message(exchange, routing_key, msg);
|
||||||
|
|
||||||
|
let correlation_id: ShortString = format!("{}", uuid::Uuid::new_v4().as_simple()).into();
|
||||||
|
let mut props = BasicProperties::default()
|
||||||
|
.with_reply_to("amq.rabbitmq.reply-to".into())
|
||||||
|
.with_correlation_id(correlation_id.clone())
|
||||||
|
.with_delivery_mode(2); // do not lose pastebins please
|
||||||
|
|
||||||
|
if let Some(s) = msg.content_type {
|
||||||
|
props = props.with_content_type(s.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
trace!(?exchange, ?routing_key, "sending a RPC request");
|
||||||
|
let confirmation = self
|
||||||
|
.basic_publish(
|
||||||
|
&msg.exchange.take().unwrap_or_default(),
|
||||||
|
&msg.routing_key.take().unwrap_or_default(),
|
||||||
|
BasicPublishOptions::default(),
|
||||||
|
&msg.content,
|
||||||
|
props,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
trace!(?confirmation, "RPC request sent");
|
||||||
|
|
||||||
|
let mut consumer = self
|
||||||
|
.basic_consume(
|
||||||
|
"amq.rabbitmq.reply-to",
|
||||||
|
"whoami",
|
||||||
|
BasicConsumeOptions::default(),
|
||||||
|
FieldTable::default(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
while let Some(Ok(deliver)) = consumer.next().await {
|
||||||
|
debug!(?deliver.delivery_tag, "received an RPC reply");
|
||||||
|
if let Some(deliver_correlation_id) = deliver.properties.correlation_id().as_ref() {
|
||||||
|
if deliver_correlation_id == &correlation_id {
|
||||||
|
trace!(?deliver_correlation_id, "received the expected RPC reply");
|
||||||
|
return Ok(serde_json::from_slice(&deliver.data).unwrap());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
panic!("did not receive an RPC reply");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, W: SimpleWorker + 'a> ConsumerExt<'a, W> for Channel {
|
#[async_trait]
|
||||||
|
impl<W: SimpleWorker> ConsumerExt<W> for Channel {
|
||||||
type Error = lapin::Error;
|
type Error = lapin::Error;
|
||||||
type Handle = Pin<Box<dyn Future<Output = ()> + 'a>>;
|
|
||||||
|
|
||||||
fn consume(self, mut worker: W, config: ConsumeConfig) -> Result<Self::Handle, Self::Error> {
|
#[tracing::instrument(skip(worker), ret)]
|
||||||
let mut consumer = task::block_on(self.basic_consume(
|
async fn consume(mut self, mut worker: W, config: ConsumeConfig) -> Result<(), Self::Error>
|
||||||
|
where
|
||||||
|
W: 'async_trait,
|
||||||
|
{
|
||||||
|
let mut consumer = self
|
||||||
|
.basic_consume(
|
||||||
&config.queue,
|
&config.queue,
|
||||||
&config.consumer_tag,
|
&config.consumer_tag,
|
||||||
BasicConsumeOptions::default(),
|
BasicConsumeOptions::default(),
|
||||||
FieldTable::default(),
|
FieldTable::default(),
|
||||||
))?;
|
)
|
||||||
Ok(Box::pin(async move {
|
.await?;
|
||||||
|
|
||||||
while let Some(Ok(deliver)) = consumer.next().await {
|
while let Some(Ok(deliver)) = consumer.next().await {
|
||||||
debug!(?deliver.delivery_tag, "consumed delivery");
|
debug!(?deliver.delivery_tag, "consumed delivery");
|
||||||
let content_type = deliver.properties.content_type();
|
let content_type = deliver.properties.content_type();
|
||||||
let job = worker
|
let job = worker
|
||||||
.msg_to_job(
|
.msg_to_job(
|
||||||
deliver.routing_key.as_str(),
|
deliver.routing_key.as_str(),
|
||||||
&content_type.as_ref().map(|s| s.to_string()),
|
&content_type.as_ref().map(std::string::ToString::to_string),
|
||||||
&deliver.data,
|
&deliver.data,
|
||||||
)
|
)
|
||||||
|
.await
|
||||||
.expect("worker unexpected message consumed");
|
.expect("worker unexpected message consumed");
|
||||||
|
|
||||||
for action in worker.consumer(&job) {
|
for action in worker.consumer(&mut self, &job).await {
|
||||||
action_deliver(&self, &deliver, action)
|
action_deliver(&self, &deliver, action)
|
||||||
.await
|
.await
|
||||||
.expect("action deliver failure");
|
.expect("action deliver failure");
|
||||||
}
|
}
|
||||||
debug!(?deliver.delivery_tag, "done");
|
debug!(?deliver.delivery_tag, "done");
|
||||||
}
|
}
|
||||||
}))
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Same as a regular channel, but without prefetching,
|
/// Same as a regular channel, but without prefetching,
|
||||||
/// used for services with multiple instances.
|
/// used for services with multiple instances.
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct WorkerChannel(pub Channel);
|
pub struct WorkerChannel(pub Channel);
|
||||||
|
|
||||||
impl<'a, W: SimpleWorker + 'a> ConsumerExt<'a, W> for WorkerChannel {
|
#[async_trait]
|
||||||
|
impl<W: SimpleWorker> ConsumerExt<W> for WorkerChannel {
|
||||||
type Error = lapin::Error;
|
type Error = lapin::Error;
|
||||||
type Handle = Pin<Box<dyn Future<Output = ()> + 'a>>;
|
|
||||||
|
|
||||||
fn consume(self, worker: W, config: ConsumeConfig) -> Result<Self::Handle, Self::Error> {
|
#[tracing::instrument(skip(worker), ret)]
|
||||||
task::block_on(self.0.basic_qos(1, BasicQosOptions::default()))?;
|
async fn consume(self, worker: W, config: ConsumeConfig) -> Result<(), Self::Error>
|
||||||
self.0.consume(worker, config)
|
where
|
||||||
|
W: 'async_trait,
|
||||||
|
{
|
||||||
|
self.0.basic_qos(1, BasicQosOptions::default()).await?;
|
||||||
|
Ok(self.0.consume(worker, config).await?)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -143,32 +221,42 @@ impl<'a> ChannelNotificationReceiver<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl<'a> NotificationReceiver for ChannelNotificationReceiver<'a> {
|
impl<'a> NotificationReceiver for ChannelNotificationReceiver<'a> {
|
||||||
fn tell(&mut self, action: Action) {
|
async fn tell(&mut self, action: Action) {
|
||||||
task::block_on(action_deliver(self.channel, self.deliver, action))
|
action_deliver(self.channel, self.deliver, action)
|
||||||
|
.await
|
||||||
.expect("action deliver failure");
|
.expect("action deliver failure");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME the consumer trait for SimpleWorker and SimpleNotifyWorker conflict,
|
// FIXME the consumer trait for SimpleWorker and SimpleNotifyWorker conflict,
|
||||||
// but one could probably be implemented in terms of the other instead.
|
// but one could probably be implemented in terms of the other instead.
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct NotifyChannel(pub Channel);
|
pub struct NotifyChannel(pub Channel);
|
||||||
|
|
||||||
impl<'a, W: SimpleNotifyWorker + 'a + Send> ConsumerExt<'a, W> for NotifyChannel {
|
#[async_trait]
|
||||||
|
impl<W: SimpleNotifyWorker> ConsumerExt<W> for NotifyChannel {
|
||||||
type Error = lapin::Error;
|
type Error = lapin::Error;
|
||||||
type Handle = Pin<Box<dyn Future<Output = ()> + 'a + Send>>;
|
|
||||||
|
|
||||||
fn consume(self, worker: W, config: ConsumeConfig) -> Result<Self::Handle, Self::Error> {
|
#[tracing::instrument(skip(worker), ret)]
|
||||||
task::block_on(self.0.basic_qos(1, BasicQosOptions::default()))?;
|
async fn consume(self, worker: W, config: ConsumeConfig) -> Result<(), Self::Error>
|
||||||
|
where
|
||||||
|
W: 'async_trait,
|
||||||
|
{
|
||||||
|
self.0.basic_qos(1, BasicQosOptions::default()).await?;
|
||||||
|
|
||||||
let mut consumer = task::block_on(self.0.basic_consume(
|
let mut consumer = self
|
||||||
|
.0
|
||||||
|
.basic_consume(
|
||||||
&config.queue,
|
&config.queue,
|
||||||
&config.consumer_tag,
|
&config.consumer_tag,
|
||||||
BasicConsumeOptions::default(),
|
BasicConsumeOptions::default(),
|
||||||
FieldTable::default(),
|
FieldTable::default(),
|
||||||
))?;
|
)
|
||||||
|
.await?;
|
||||||
let mut chan = self.0;
|
let mut chan = self.0;
|
||||||
Ok(Box::pin(async move {
|
|
||||||
while let Some(Ok(deliver)) = consumer.next().await {
|
while let Some(Ok(deliver)) = consumer.next().await {
|
||||||
debug!(?deliver.delivery_tag, "consumed delivery");
|
debug!(?deliver.delivery_tag, "consumed delivery");
|
||||||
let mut receiver = ChannelNotificationReceiver {
|
let mut receiver = ChannelNotificationReceiver {
|
||||||
|
@ -180,15 +268,17 @@ impl<'a, W: SimpleNotifyWorker + 'a + Send> ConsumerExt<'a, W> for NotifyChannel
|
||||||
let job = worker
|
let job = worker
|
||||||
.msg_to_job(
|
.msg_to_job(
|
||||||
deliver.routing_key.as_str(),
|
deliver.routing_key.as_str(),
|
||||||
&content_type.as_ref().map(|s| s.to_string()),
|
&content_type.as_ref().map(std::string::ToString::to_string),
|
||||||
&deliver.data,
|
&deliver.data,
|
||||||
)
|
)
|
||||||
|
.await
|
||||||
.expect("worker unexpected message consumed");
|
.expect("worker unexpected message consumed");
|
||||||
|
|
||||||
worker.consumer(&job, &mut receiver);
|
worker.consumer(&job, &mut receiver).await;
|
||||||
debug!(?deliver.delivery_tag, "done");
|
debug!(?deliver.delivery_tag, "done");
|
||||||
}
|
}
|
||||||
}))
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -217,8 +307,8 @@ async fn action_deliver(
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
Action::Publish(mut msg) => {
|
Action::Publish(mut msg) => {
|
||||||
let exch = msg.exchange.take().unwrap_or_else(|| "".to_owned());
|
let exch = msg.exchange.take().unwrap_or_else(String::new);
|
||||||
let key = msg.routing_key.take().unwrap_or_else(|| "".to_owned());
|
let key = msg.routing_key.take().unwrap_or_else(String::new);
|
||||||
trace!(?exch, ?key, "action publish");
|
trace!(?exch, ?key, "action publish");
|
||||||
|
|
||||||
let mut props = BasicProperties::default().with_delivery_mode(2); // persistent.
|
let mut props = BasicProperties::default().with_delivery_mode(2); // persistent.
|
||||||
|
@ -227,7 +317,7 @@ async fn action_deliver(
|
||||||
props = props.with_content_type(s.into());
|
props = props.with_content_type(s.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
let _confirmaton = chan
|
let _confirmation = chan
|
||||||
.basic_publish(
|
.basic_publish(
|
||||||
&exch,
|
&exch,
|
||||||
&key,
|
&key,
|
||||||
|
|
|
@ -11,6 +11,7 @@ pub struct EvalChecker {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EvalChecker {
|
impl EvalChecker {
|
||||||
|
#[must_use]
|
||||||
pub fn new(name: &str, op: nix::Operation, args: Vec<String>, nix: nix::Nix) -> EvalChecker {
|
pub fn new(name: &str, op: nix::Operation, args: Vec<String>, nix: nix::Nix) -> EvalChecker {
|
||||||
EvalChecker {
|
EvalChecker {
|
||||||
name: name.to_owned(),
|
name: name.to_owned(),
|
||||||
|
@ -20,14 +21,18 @@ impl EvalChecker {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn name(&self) -> &str {
|
pub fn name(&self) -> &str {
|
||||||
&self.name
|
&self.name
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn execute(&self, path: &Path) -> Result<File, File> {
|
pub async fn execute(&self, path: &Path) -> Result<File, File> {
|
||||||
self.nix.safely(&self.op, path, self.args.clone(), false)
|
self.nix
|
||||||
|
.safely(&self.op, path, self.args.clone(), false)
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn cli_cmd(&self) -> String {
|
pub fn cli_cmd(&self) -> String {
|
||||||
let mut cli = vec![self.op.to_string()];
|
let mut cli = vec![self.op.to_string()];
|
||||||
cli.append(&mut self.args.clone());
|
cli.append(&mut self.args.clone());
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
#![recursion_limit = "512"]
|
#![recursion_limit = "512"]
|
||||||
// Replacing .map(|arch| arch.to_string())
|
#![deny(clippy::pedantic)]
|
||||||
// with .map(systems::System::to_string)
|
#![allow(clippy::missing_errors_doc)]
|
||||||
//
|
#![allow(clippy::missing_panics_doc)]
|
||||||
// seems much less clear and I just don't like it :)
|
#![allow(clippy::module_name_repetitions)]
|
||||||
#![allow(clippy::redundant_closure)]
|
#![allow(clippy::similar_names)]
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate serde_derive;
|
extern crate serde_derive;
|
||||||
|
@ -13,6 +13,13 @@ extern crate nom;
|
||||||
|
|
||||||
use std::env;
|
use std::env;
|
||||||
|
|
||||||
|
use opentelemetry::global;
|
||||||
|
use opentelemetry::trace::TracerProvider;
|
||||||
|
use opentelemetry::KeyValue;
|
||||||
|
use opentelemetry_sdk::Resource;
|
||||||
|
use opentelemetry_semantic_conventions::resource::SERVICE_NAME;
|
||||||
|
use opentelemetry_semantic_conventions::resource::SERVICE_VERSION;
|
||||||
|
use opentelemetry_semantic_conventions::SCHEMA_URL;
|
||||||
use tracing_subscriber::prelude::*;
|
use tracing_subscriber::prelude::*;
|
||||||
use tracing_subscriber::EnvFilter;
|
use tracing_subscriber::EnvFilter;
|
||||||
|
|
||||||
|
@ -21,7 +28,6 @@ pub mod asynccmd;
|
||||||
pub mod checkout;
|
pub mod checkout;
|
||||||
pub mod clone;
|
pub mod clone;
|
||||||
pub mod commentparser;
|
pub mod commentparser;
|
||||||
pub mod commitstatus;
|
|
||||||
pub mod config;
|
pub mod config;
|
||||||
pub mod easyamqp;
|
pub mod easyamqp;
|
||||||
pub mod easylapin;
|
pub mod easylapin;
|
||||||
|
@ -41,6 +47,8 @@ pub mod systems;
|
||||||
pub mod tagger;
|
pub mod tagger;
|
||||||
pub mod tasks;
|
pub mod tasks;
|
||||||
pub mod test_scratch;
|
pub mod test_scratch;
|
||||||
|
pub mod utils;
|
||||||
|
pub mod vcs;
|
||||||
pub mod worker;
|
pub mod worker;
|
||||||
pub mod writetoline;
|
pub mod writetoline;
|
||||||
|
|
||||||
|
@ -50,7 +58,6 @@ pub mod ofborg {
|
||||||
pub use crate::checkout;
|
pub use crate::checkout;
|
||||||
pub use crate::clone;
|
pub use crate::clone;
|
||||||
pub use crate::commentparser;
|
pub use crate::commentparser;
|
||||||
pub use crate::commitstatus;
|
|
||||||
pub use crate::config;
|
pub use crate::config;
|
||||||
pub use crate::easyamqp;
|
pub use crate::easyamqp;
|
||||||
pub use crate::evalchecker;
|
pub use crate::evalchecker;
|
||||||
|
@ -66,15 +73,17 @@ pub mod ofborg {
|
||||||
pub use crate::tagger;
|
pub use crate::tagger;
|
||||||
pub use crate::tasks;
|
pub use crate::tasks;
|
||||||
pub use crate::test_scratch;
|
pub use crate::test_scratch;
|
||||||
|
pub use crate::vcs;
|
||||||
pub use crate::worker;
|
pub use crate::worker;
|
||||||
pub use crate::writetoline;
|
pub use crate::writetoline;
|
||||||
|
|
||||||
pub const VERSION: &str = env!("CARGO_PKG_VERSION");
|
pub const VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn partition_result<A, B>(results: Vec<Result<A, B>>) -> (Vec<A>, Vec<B>) {
|
pub fn partition_result<A, B>(results: Vec<Result<A, B>>) -> (Vec<A>, Vec<B>) {
|
||||||
let mut ok = Vec::new();
|
let mut ok = Vec::new();
|
||||||
let mut err = Vec::new();
|
let mut err = Vec::new();
|
||||||
for result in results.into_iter() {
|
for result in results {
|
||||||
match result {
|
match result {
|
||||||
Ok(x) => {
|
Ok(x) => {
|
||||||
ok.push(x);
|
ok.push(x);
|
||||||
|
@ -89,6 +98,16 @@ pub mod ofborg {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn resource() -> Resource {
|
||||||
|
Resource::from_schema_url(
|
||||||
|
[
|
||||||
|
KeyValue::new(SERVICE_NAME, env!("CARGO_PKG_NAME")),
|
||||||
|
KeyValue::new(SERVICE_VERSION, env!("CARGO_PKG_VERSION")),
|
||||||
|
],
|
||||||
|
SCHEMA_URL,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn setup_log() {
|
pub fn setup_log() {
|
||||||
let filter_layer = EnvFilter::try_from_default_env()
|
let filter_layer = EnvFilter::try_from_default_env()
|
||||||
.or_else(|_| EnvFilter::try_new("info"))
|
.or_else(|_| EnvFilter::try_new("info"))
|
||||||
|
@ -96,19 +115,31 @@ pub fn setup_log() {
|
||||||
|
|
||||||
let log_json = env::var("RUST_LOG_JSON").map_or(false, |s| s == "1");
|
let log_json = env::var("RUST_LOG_JSON").map_or(false, |s| s == "1");
|
||||||
|
|
||||||
if log_json {
|
let json_layer = if log_json {
|
||||||
let fmt_layer = tracing_subscriber::fmt::layer().json();
|
tracing_subscriber::fmt::layer().json().boxed()
|
||||||
tracing_subscriber::registry()
|
|
||||||
.with(filter_layer)
|
|
||||||
.with(fmt_layer)
|
|
||||||
.init();
|
|
||||||
} else {
|
} else {
|
||||||
let fmt_layer = tracing_subscriber::fmt::layer();
|
tracing_subscriber::fmt::layer().boxed()
|
||||||
|
};
|
||||||
|
|
||||||
|
let provider = opentelemetry_sdk::trace::TracerProvider::builder()
|
||||||
|
.with_batch_exporter(
|
||||||
|
opentelemetry_otlp::SpanExporter::builder()
|
||||||
|
.with_http()
|
||||||
|
.build()
|
||||||
|
.unwrap(),
|
||||||
|
opentelemetry_sdk::runtime::Tokio,
|
||||||
|
)
|
||||||
|
.with_config(opentelemetry_sdk::trace::Config::default().with_resource(resource()))
|
||||||
|
.build();
|
||||||
|
|
||||||
|
global::set_tracer_provider(provider.clone());
|
||||||
|
let telemetry = tracing_opentelemetry::layer().with_tracer(provider.tracer("ofborg"));
|
||||||
|
|
||||||
tracing_subscriber::registry()
|
tracing_subscriber::registry()
|
||||||
.with(filter_layer)
|
.with(filter_layer)
|
||||||
.with(fmt_layer)
|
.with(json_layer)
|
||||||
|
.with(telemetry)
|
||||||
.init();
|
.init();
|
||||||
}
|
|
||||||
|
|
||||||
tracing::info!("Logging configured");
|
tracing::info!("Logging configured");
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,6 @@ pub struct Lock {
|
||||||
|
|
||||||
impl Lock {
|
impl Lock {
|
||||||
pub fn unlock(&mut self) {
|
pub fn unlock(&mut self) {
|
||||||
self.lock = None
|
self.lock = None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -48,7 +48,7 @@ impl From<std::string::FromUtf8Error> for CalculationError {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ImpactedMaintainers {
|
impl ImpactedMaintainers {
|
||||||
pub fn calculate(
|
pub async fn calculate(
|
||||||
nix: &Nix,
|
nix: &Nix,
|
||||||
checkout: &Path,
|
checkout: &Path,
|
||||||
paths: &[String],
|
paths: &[String],
|
||||||
|
@ -56,11 +56,11 @@ impl ImpactedMaintainers {
|
||||||
) -> Result<ImpactedMaintainers, CalculationError> {
|
) -> Result<ImpactedMaintainers, CalculationError> {
|
||||||
let mut path_file = NamedTempFile::new()?;
|
let mut path_file = NamedTempFile::new()?;
|
||||||
let pathstr = serde_json::to_string(&paths)?;
|
let pathstr = serde_json::to_string(&paths)?;
|
||||||
write!(path_file, "{}", pathstr)?;
|
write!(path_file, "{pathstr}")?;
|
||||||
|
|
||||||
let mut attr_file = NamedTempFile::new()?;
|
let mut attr_file = NamedTempFile::new()?;
|
||||||
let attrstr = serde_json::to_string(&attributes)?;
|
let attrstr = serde_json::to_string(&attributes)?;
|
||||||
write!(attr_file, "{}", attrstr)?;
|
write!(attr_file, "{attrstr}")?;
|
||||||
|
|
||||||
let mut argstrs: HashMap<&str, &str> = HashMap::new();
|
let mut argstrs: HashMap<&str, &str> = HashMap::new();
|
||||||
argstrs.insert("changedattrsjson", attr_file.path().to_str().unwrap());
|
argstrs.insert("changedattrsjson", attr_file.path().to_str().unwrap());
|
||||||
|
@ -73,27 +73,29 @@ impl ImpactedMaintainers {
|
||||||
&[path_file.path(), attr_file.path()],
|
&[path_file.path(), attr_file.path()],
|
||||||
);
|
);
|
||||||
|
|
||||||
let ret = cmd.output()?;
|
let ret = cmd.output().await?;
|
||||||
|
|
||||||
Ok(serde_json::from_str(&String::from_utf8(ret.stdout)?)?)
|
Ok(serde_json::from_str(&String::from_utf8(ret.stdout)?)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn maintainers(&self) -> Vec<String> {
|
pub fn maintainers(&self) -> Vec<String> {
|
||||||
self.0
|
self.0
|
||||||
.iter()
|
.keys()
|
||||||
.map(|(maintainer, _)| maintainer.0.clone())
|
.map(|maintainer| maintainer.0.clone())
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn maintainers_by_package(&self) -> MaintainersByPackage {
|
pub fn maintainers_by_package(&self) -> MaintainersByPackage {
|
||||||
let mut bypkg = MaintainersByPackage(HashMap::new());
|
let mut bypkg = MaintainersByPackage(HashMap::new());
|
||||||
|
|
||||||
for (maintainer, packages) in self.0.iter() {
|
for (maintainer, packages) in &self.0 {
|
||||||
for package in packages.iter() {
|
for package in packages {
|
||||||
bypkg
|
bypkg
|
||||||
.0
|
.0
|
||||||
.entry(package.clone())
|
.entry(package.clone())
|
||||||
.or_insert_with(HashSet::new)
|
.or_default()
|
||||||
.insert(maintainer.clone());
|
.insert(maintainer.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -120,7 +122,7 @@ impl std::fmt::Display for ImpactedMaintainers {
|
||||||
})
|
})
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
.join("\n");
|
.join("\n");
|
||||||
write!(f, "{}", d)
|
write!(f, "{d}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -156,15 +158,15 @@ mod tests {
|
||||||
.expect("building the test PR failed");
|
.expect("building the test PR failed");
|
||||||
|
|
||||||
let stderr =
|
let stderr =
|
||||||
String::from_utf8(output.stderr).unwrap_or_else(|err| format!("warning: {}", err));
|
String::from_utf8(output.stderr).unwrap_or_else(|err| format!("warning: {err}"));
|
||||||
println!("{}", stderr);
|
println!("{stderr}");
|
||||||
|
|
||||||
let hash = String::from_utf8(output.stdout).expect("Should just be a hash");
|
let hash = String::from_utf8(output.stdout).expect("Should just be a hash");
|
||||||
return hash.trim().to_owned();
|
return hash.trim().to_owned();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn example() {
|
async fn example() {
|
||||||
let workingdir = TestScratch::new_dir("test-maintainers-example");
|
let workingdir = TestScratch::new_dir("test-maintainers-example");
|
||||||
|
|
||||||
let bare = TestScratch::new_dir("test-maintainers-example-bare");
|
let bare = TestScratch::new_dir("test-maintainers-example-bare");
|
||||||
|
@ -188,11 +190,11 @@ mod tests {
|
||||||
|
|
||||||
working_co.checkout_ref(OsStr::new(&hash)).unwrap();
|
working_co.checkout_ref(OsStr::new(&hash)).unwrap();
|
||||||
|
|
||||||
let remote = env::var("NIX_REMOTE").unwrap_or("".to_owned());
|
let remote = env::var("NIX_REMOTE").unwrap_or_default();
|
||||||
let nix = Nix::new(SYSTEM.to_owned(), remote, 1800, None);
|
let nix = Nix::new(SYSTEM.to_owned(), remote, 1800, None);
|
||||||
|
|
||||||
let parsed =
|
let parsed =
|
||||||
ImpactedMaintainers::calculate(&nix, &working_co.clone_to(), &paths, &attributes);
|
ImpactedMaintainers::calculate(&nix, &working_co.clone_to(), &paths, &attributes).await;
|
||||||
|
|
||||||
let mut expect = ImpactedMaintainers(HashMap::new());
|
let mut expect = ImpactedMaintainers(HashMap::new());
|
||||||
expect.0.insert(
|
expect.0.insert(
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
use crate::commentparser::Subset;
|
use crate::commentparser::Subset;
|
||||||
use crate::message::{Pr, Repo};
|
use crate::message::{Change, Repo};
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct BuildJob {
|
pub struct BuildJob {
|
||||||
pub repo: Repo,
|
pub repo: Repo,
|
||||||
pub pr: Pr,
|
pub change: Change,
|
||||||
pub subset: Option<Subset>,
|
pub subset: Option<Subset>,
|
||||||
pub attrs: Vec<String>,
|
pub attrs: Vec<String>,
|
||||||
pub request_id: String,
|
pub request_id: String,
|
||||||
|
@ -23,20 +23,21 @@ type Exchange = String;
|
||||||
type RoutingKey = String;
|
type RoutingKey = String;
|
||||||
|
|
||||||
impl BuildJob {
|
impl BuildJob {
|
||||||
|
#[must_use]
|
||||||
pub fn new(
|
pub fn new(
|
||||||
repo: Repo,
|
repo: Repo,
|
||||||
pr: Pr,
|
change: Change,
|
||||||
subset: Subset,
|
subset: Subset,
|
||||||
attrs: Vec<String>,
|
attrs: Vec<String>,
|
||||||
logs: Option<ExchangeQueue>,
|
logs: Option<ExchangeQueue>,
|
||||||
statusreport: Option<ExchangeQueue>,
|
statusreport: Option<ExchangeQueue>,
|
||||||
request_id: String,
|
request_id: String,
|
||||||
) -> BuildJob {
|
) -> BuildJob {
|
||||||
let logbackrk = format!("{}.{}", repo.full_name, pr.number).to_lowercase();
|
let logbackrk = format!("{}.{}", repo.full_name, change.number).to_lowercase();
|
||||||
|
|
||||||
BuildJob {
|
BuildJob {
|
||||||
repo,
|
repo,
|
||||||
pr,
|
change,
|
||||||
subset: Some(subset),
|
subset: Some(subset),
|
||||||
attrs,
|
attrs,
|
||||||
logs: Some(logs.unwrap_or((Some("logs".to_owned()), Some(logbackrk)))),
|
logs: Some(logs.unwrap_or((Some("logs".to_owned()), Some(logbackrk)))),
|
||||||
|
|
|
@ -3,7 +3,7 @@ pub struct BuildLogMsg {
|
||||||
pub system: String,
|
pub system: String,
|
||||||
pub identity: String,
|
pub identity: String,
|
||||||
pub attempt_id: String,
|
pub attempt_id: String,
|
||||||
pub line_number: u64,
|
pub line_number: usize,
|
||||||
pub output: String,
|
pub output: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
use crate::message::{Pr, Repo};
|
use crate::message::{Change, Repo};
|
||||||
|
|
||||||
use hubcaps::checks::Conclusion;
|
// FIXME: drop
|
||||||
|
// v1
|
||||||
|
// legacy
|
||||||
|
// support.
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)]
|
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)]
|
||||||
pub enum BuildStatus {
|
pub enum BuildStatus {
|
||||||
|
@ -20,27 +23,15 @@ impl From<BuildStatus> for String {
|
||||||
BuildStatus::Failure => "Failure".into(),
|
BuildStatus::Failure => "Failure".into(),
|
||||||
BuildStatus::HashMismatch => "A fixed output derivation's hash was incorrect".into(),
|
BuildStatus::HashMismatch => "A fixed output derivation's hash was incorrect".into(),
|
||||||
BuildStatus::TimedOut => "Timed out, unknown build status".into(),
|
BuildStatus::TimedOut => "Timed out, unknown build status".into(),
|
||||||
BuildStatus::UnexpectedError { ref err } => format!("Unexpected error: {}", err),
|
BuildStatus::UnexpectedError { ref err } => format!("Unexpected error: {err}"),
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<BuildStatus> for Conclusion {
|
|
||||||
fn from(status: BuildStatus) -> Conclusion {
|
|
||||||
match status {
|
|
||||||
BuildStatus::Skipped => Conclusion::Skipped,
|
|
||||||
BuildStatus::Success => Conclusion::Success,
|
|
||||||
BuildStatus::Failure => Conclusion::Neutral,
|
|
||||||
BuildStatus::HashMismatch => Conclusion::Failure,
|
|
||||||
BuildStatus::TimedOut => Conclusion::Neutral,
|
|
||||||
BuildStatus::UnexpectedError { .. } => Conclusion::Neutral,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct LegacyBuildResult {
|
pub struct LegacyBuildResult {
|
||||||
pub repo: Repo,
|
pub repo: Repo,
|
||||||
pub pr: Pr,
|
// TODO: change me to V1 tag.
|
||||||
|
pub pr: Change,
|
||||||
pub system: String,
|
pub system: String,
|
||||||
pub output: Vec<String>,
|
pub output: Vec<String>,
|
||||||
pub attempt_id: String,
|
pub attempt_id: String,
|
||||||
|
@ -50,6 +41,11 @@ pub struct LegacyBuildResult {
|
||||||
pub attempted_attrs: Option<Vec<String>>,
|
pub attempted_attrs: Option<Vec<String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub enum V2Tag {
|
||||||
|
V2,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub enum V1Tag {
|
pub enum V1Tag {
|
||||||
V1,
|
V1,
|
||||||
|
@ -58,10 +54,24 @@ pub enum V1Tag {
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
#[serde(untagged)]
|
#[serde(untagged)]
|
||||||
pub enum BuildResult {
|
pub enum BuildResult {
|
||||||
|
V2 {
|
||||||
|
tag: V2Tag,
|
||||||
|
repo: Repo,
|
||||||
|
change: Change,
|
||||||
|
system: String,
|
||||||
|
output: Vec<String>,
|
||||||
|
attempt_id: String,
|
||||||
|
request_id: String,
|
||||||
|
// removed success
|
||||||
|
status: BuildStatus,
|
||||||
|
skipped_attrs: Option<Vec<String>>,
|
||||||
|
attempted_attrs: Option<Vec<String>>,
|
||||||
|
},
|
||||||
V1 {
|
V1 {
|
||||||
tag: V1Tag, // use serde once all enum variants have a tag
|
tag: V1Tag, // use serde once all enum variants have a tag
|
||||||
repo: Repo,
|
repo: Repo,
|
||||||
pr: Pr,
|
// TODO: move me to V1PR later on.
|
||||||
|
pr: Change,
|
||||||
system: String,
|
system: String,
|
||||||
output: Vec<String>,
|
output: Vec<String>,
|
||||||
attempt_id: String,
|
attempt_id: String,
|
||||||
|
@ -73,7 +83,7 @@ pub enum BuildResult {
|
||||||
},
|
},
|
||||||
Legacy {
|
Legacy {
|
||||||
repo: Repo,
|
repo: Repo,
|
||||||
pr: Pr,
|
pr: Change,
|
||||||
system: String,
|
system: String,
|
||||||
output: Vec<String>,
|
output: Vec<String>,
|
||||||
attempt_id: String,
|
attempt_id: String,
|
||||||
|
@ -86,6 +96,7 @@ pub enum BuildResult {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BuildResult {
|
impl BuildResult {
|
||||||
|
#[must_use]
|
||||||
pub fn legacy(&self) -> LegacyBuildResult {
|
pub fn legacy(&self) -> LegacyBuildResult {
|
||||||
// TODO: replace this with simpler structs for specific usecases, since
|
// TODO: replace this with simpler structs for specific usecases, since
|
||||||
// it's decouples the structs from serialization. These can be changed
|
// it's decouples the structs from serialization. These can be changed
|
||||||
|
@ -101,18 +112,8 @@ impl BuildResult {
|
||||||
ref attempted_attrs,
|
ref attempted_attrs,
|
||||||
ref skipped_attrs,
|
ref skipped_attrs,
|
||||||
..
|
..
|
||||||
} => LegacyBuildResult {
|
}
|
||||||
repo: repo.to_owned(),
|
| BuildResult::V1 {
|
||||||
pr: pr.to_owned(),
|
|
||||||
system: system.to_owned(),
|
|
||||||
output: output.to_owned(),
|
|
||||||
attempt_id: attempt_id.to_owned(),
|
|
||||||
request_id: request_id.to_owned(),
|
|
||||||
status: self.status(),
|
|
||||||
attempted_attrs: attempted_attrs.to_owned(),
|
|
||||||
skipped_attrs: skipped_attrs.to_owned(),
|
|
||||||
},
|
|
||||||
BuildResult::V1 {
|
|
||||||
ref repo,
|
ref repo,
|
||||||
ref pr,
|
ref pr,
|
||||||
ref system,
|
ref system,
|
||||||
|
@ -133,16 +134,39 @@ impl BuildResult {
|
||||||
attempted_attrs: attempted_attrs.to_owned(),
|
attempted_attrs: attempted_attrs.to_owned(),
|
||||||
skipped_attrs: skipped_attrs.to_owned(),
|
skipped_attrs: skipped_attrs.to_owned(),
|
||||||
},
|
},
|
||||||
|
BuildResult::V2 {
|
||||||
|
ref repo,
|
||||||
|
ref change,
|
||||||
|
ref system,
|
||||||
|
ref output,
|
||||||
|
ref attempt_id,
|
||||||
|
ref request_id,
|
||||||
|
ref attempted_attrs,
|
||||||
|
ref skipped_attrs,
|
||||||
|
..
|
||||||
|
} => LegacyBuildResult {
|
||||||
|
repo: repo.to_owned(),
|
||||||
|
pr: change.to_owned(),
|
||||||
|
system: system.to_owned(),
|
||||||
|
output: output.to_owned(),
|
||||||
|
attempt_id: attempt_id.to_owned(),
|
||||||
|
request_id: request_id.to_owned(),
|
||||||
|
status: self.status(),
|
||||||
|
attempted_attrs: attempted_attrs.to_owned(),
|
||||||
|
skipped_attrs: skipped_attrs.to_owned(),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pr(&self) -> Pr {
|
#[must_use]
|
||||||
|
pub fn change(&self) -> Change {
|
||||||
match self {
|
match self {
|
||||||
BuildResult::Legacy { pr, .. } => pr.to_owned(),
|
BuildResult::Legacy { pr, .. } | BuildResult::V1 { pr, .. } => pr.to_owned(),
|
||||||
BuildResult::V1 { pr, .. } => pr.to_owned(),
|
BuildResult::V2 { change, .. } => change.to_owned(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn status(&self) -> BuildStatus {
|
pub fn status(&self) -> BuildStatus {
|
||||||
match *self {
|
match *self {
|
||||||
BuildResult::Legacy {
|
BuildResult::Legacy {
|
||||||
|
@ -157,7 +181,9 @@ impl BuildResult {
|
||||||
Some(false) => BuildStatus::Failure,
|
Some(false) => BuildStatus::Failure,
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
BuildResult::V1 { ref status, .. } => status.to_owned(),
|
BuildResult::V1 { ref status, .. } | BuildResult::V2 { ref status, .. } => {
|
||||||
|
status.to_owned()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -176,8 +202,7 @@ mod tests {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
output,
|
output,
|
||||||
r#"{"tag":"V1","repo":{"owner":"NixOS","name":"nixpkgs","full_name":"NixOS/nixpkgs","clone_url":"https://github.com/nixos/nixpkgs.git"},"pr":{"target_branch":"master","number":42,"head_sha":"0000000000000000000000000000000000000000"},"system":"x86_64-linux","output":["unpacking sources"],"attempt_id":"attempt-id-foo","request_id":"bogus-request-id","status":"Success","skipped_attrs":["AAAAAASomeThingsFailToEvaluate"],"attempted_attrs":["hello"]}"#,
|
r#"{"tag":"V1","repo":{"owner":"NixOS","name":"nixpkgs","full_name":"NixOS/nixpkgs","clone_url":"https://github.com/nixos/nixpkgs.git"},"pr":{"target_branch":"master","number":42,"head_sha":"0000000000000000000000000000000000000000"},"system":"x86_64-linux","output":["unpacking sources"],"attempt_id":"attempt-id-foo","request_id":"bogus-request-id","status":"Success","skipped_attrs":["AAAAAASomeThingsFailToEvaluate"],"attempted_attrs":["hello"]}"#,
|
||||||
"json of: {:?}",
|
"json of: {result:?}"
|
||||||
result
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -190,8 +215,7 @@ mod tests {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
output,
|
output,
|
||||||
r#"{"repo":{"owner":"NixOS","name":"nixpkgs","full_name":"NixOS/nixpkgs","clone_url":"https://github.com/nixos/nixpkgs.git"},"pr":{"target_branch":"master","number":42,"head_sha":"0000000000000000000000000000000000000000"},"system":"x86_64-linux","output":["unpacking sources"],"attempt_id":"attempt-id-foo","request_id":"bogus-request-id","success":true,"status":"Success","skipped_attrs":["AAAAAASomeThingsFailToEvaluate"],"attempted_attrs":["hello"]}"#,
|
r#"{"repo":{"owner":"NixOS","name":"nixpkgs","full_name":"NixOS/nixpkgs","clone_url":"https://github.com/nixos/nixpkgs.git"},"pr":{"target_branch":"master","number":42,"head_sha":"0000000000000000000000000000000000000000"},"system":"x86_64-linux","output":["unpacking sources"],"attempt_id":"attempt-id-foo","request_id":"bogus-request-id","success":true,"status":"Success","skipped_attrs":["AAAAAASomeThingsFailToEvaluate"],"attempted_attrs":["hello"]}"#,
|
||||||
"json of: {:?}",
|
"json of: {result:?}"
|
||||||
result
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -204,8 +228,7 @@ mod tests {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
output,
|
output,
|
||||||
r#"{"repo":{"owner":"NixOS","name":"nixpkgs","full_name":"NixOS/nixpkgs","clone_url":"https://github.com/nixos/nixpkgs.git"},"pr":{"target_branch":"master","number":42,"head_sha":"0000000000000000000000000000000000000000"},"system":"x86_64-linux","output":[],"attempt_id":"attempt-id-foo","request_id":"bogus-request-id","success":null,"status":null,"skipped_attrs":null,"attempted_attrs":null}"#,
|
r#"{"repo":{"owner":"NixOS","name":"nixpkgs","full_name":"NixOS/nixpkgs","clone_url":"https://github.com/nixos/nixpkgs.git"},"pr":{"target_branch":"master","number":42,"head_sha":"0000000000000000000000000000000000000000"},"system":"x86_64-linux","output":[],"attempt_id":"attempt-id-foo","request_id":"bogus-request-id","success":null,"status":null,"skipped_attrs":null,"attempted_attrs":null}"#,
|
||||||
"json of: {:?}",
|
"json of: {result:?}"
|
||||||
result
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -218,8 +241,7 @@ mod tests {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
output,
|
output,
|
||||||
r#"{"repo":{"owner":"NixOS","name":"nixpkgs","full_name":"NixOS/nixpkgs","clone_url":"https://github.com/nixos/nixpkgs.git"},"pr":{"target_branch":"master","number":42,"head_sha":"0000000000000000000000000000000000000000"},"system":"x86_64-linux","output":["unpacking sources"],"attempt_id":"attempt-id-foo","request_id":"bogus-request-id","success":true,"status":null,"skipped_attrs":["AAAAAASomeThingsFailToEvaluate"],"attempted_attrs":["hello"]}"#,
|
r#"{"repo":{"owner":"NixOS","name":"nixpkgs","full_name":"NixOS/nixpkgs","clone_url":"https://github.com/nixos/nixpkgs.git"},"pr":{"target_branch":"master","number":42,"head_sha":"0000000000000000000000000000000000000000"},"system":"x86_64-linux","output":["unpacking sources"],"attempt_id":"attempt-id-foo","request_id":"bogus-request-id","success":true,"status":null,"skipped_attrs":["AAAAAASomeThingsFailToEvaluate"],"attempted_attrs":["hello"]}"#,
|
||||||
"json of: {:?}",
|
"json of: {result:?}"
|
||||||
result
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,7 +7,7 @@ pub struct Repo {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||||
pub struct Pr {
|
pub struct Change {
|
||||||
pub target_branch: Option<String>,
|
pub target_branch: Option<String>,
|
||||||
pub number: u64,
|
pub number: u64,
|
||||||
pub head_sha: String,
|
pub head_sha: String,
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use crate::message::{Pr, Repo};
|
use crate::message::{Change, Repo};
|
||||||
use crate::worker;
|
use crate::worker;
|
||||||
|
|
||||||
pub fn from(data: &[u8]) -> Result<EvaluationJob, serde_json::error::Error> {
|
pub fn from(data: &[u8]) -> Result<EvaluationJob, serde_json::error::Error> {
|
||||||
|
@ -8,10 +8,11 @@ pub fn from(data: &[u8]) -> Result<EvaluationJob, serde_json::error::Error> {
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct EvaluationJob {
|
pub struct EvaluationJob {
|
||||||
pub repo: Repo,
|
pub repo: Repo,
|
||||||
pub pr: Pr,
|
pub change: Change,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EvaluationJob {
|
impl EvaluationJob {
|
||||||
|
#[must_use]
|
||||||
pub fn is_nixpkgs(&self) -> bool {
|
pub fn is_nixpkgs(&self) -> bool {
|
||||||
self.repo.name == "nixpkgs"
|
self.repo.name == "nixpkgs"
|
||||||
}
|
}
|
||||||
|
@ -20,15 +21,18 @@ impl EvaluationJob {
|
||||||
pub struct Actions {}
|
pub struct Actions {}
|
||||||
|
|
||||||
impl Actions {
|
impl Actions {
|
||||||
pub fn retry_later(&mut self, _job: &EvaluationJob) -> worker::Actions {
|
#[must_use]
|
||||||
|
pub fn retry_later(_job: &EvaluationJob) -> worker::Actions {
|
||||||
vec![worker::Action::NackRequeue]
|
vec![worker::Action::NackRequeue]
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn skip(&mut self, _job: &EvaluationJob) -> worker::Actions {
|
#[must_use]
|
||||||
|
pub fn skip(_job: &EvaluationJob) -> worker::Actions {
|
||||||
vec![worker::Action::Ack]
|
vec![worker::Action::Ack]
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn done(&mut self, _job: &EvaluationJob, mut response: worker::Actions) -> worker::Actions {
|
#[must_use]
|
||||||
|
pub fn done(_job: &EvaluationJob, mut response: worker::Actions) -> worker::Actions {
|
||||||
response.push(worker::Action::Ack);
|
response.push(worker::Action::Ack);
|
||||||
response
|
response
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,4 +4,4 @@ pub mod buildresult;
|
||||||
mod common;
|
mod common;
|
||||||
pub mod evaluationjob;
|
pub mod evaluationjob;
|
||||||
|
|
||||||
pub use self::common::{Pr, Repo};
|
pub use self::common::{Change, Repo};
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
use crate::asynccmd::{AsyncCmd, SpawnedAsyncCmd};
|
|
||||||
use crate::message::buildresult::BuildStatus;
|
use crate::message::buildresult::BuildStatus;
|
||||||
use crate::ofborg::partition_result;
|
use crate::ofborg::partition_result;
|
||||||
|
|
||||||
|
@ -9,9 +8,10 @@ use std::fmt;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::io::{BufRead, BufReader, Seek, SeekFrom};
|
use std::io::{BufRead, BufReader, Seek, SeekFrom};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::process::{Command, Stdio};
|
use std::process::Stdio;
|
||||||
|
|
||||||
use tempfile::tempfile;
|
use tempfile::tempfile;
|
||||||
|
use tokio::process::{Child, Command};
|
||||||
|
|
||||||
#[allow(clippy::upper_case_acronyms)]
|
#[allow(clippy::upper_case_acronyms)]
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||||
|
@ -43,11 +43,11 @@ pub enum Operation {
|
||||||
impl Operation {
|
impl Operation {
|
||||||
fn command(&self) -> Command {
|
fn command(&self) -> Command {
|
||||||
match *self {
|
match *self {
|
||||||
Operation::Evaluate => Command::new("nix-instantiate"),
|
Operation::Evaluate | Operation::Instantiate => Command::new("nix-instantiate"),
|
||||||
Operation::Instantiate => Command::new("nix-instantiate"),
|
|
||||||
Operation::Build => Command::new("nix-build"),
|
Operation::Build => Command::new("nix-build"),
|
||||||
Operation::QueryPackagesJson => Command::new("nix-env"),
|
Operation::QueryPackagesJson | Operation::QueryPackagesOutputs => {
|
||||||
Operation::QueryPackagesOutputs => Command::new("nix-env"),
|
Command::new("nix-env")
|
||||||
|
}
|
||||||
Operation::NoOp { .. } => Command::new("echo"),
|
Operation::NoOp { .. } => Command::new("echo"),
|
||||||
Operation::Unknown { ref program } => Command::new(program),
|
Operation::Unknown { ref program } => Command::new(program),
|
||||||
}
|
}
|
||||||
|
@ -56,7 +56,7 @@ impl Operation {
|
||||||
fn args(&self, command: &mut Command) {
|
fn args(&self, command: &mut Command) {
|
||||||
match *self {
|
match *self {
|
||||||
Operation::Build => {
|
Operation::Build => {
|
||||||
command.args(&[
|
command.args([
|
||||||
"--no-out-link",
|
"--no-out-link",
|
||||||
"--keep-going",
|
"--keep-going",
|
||||||
"--option",
|
"--option",
|
||||||
|
@ -65,7 +65,7 @@ impl Operation {
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
Operation::QueryPackagesJson => {
|
Operation::QueryPackagesJson => {
|
||||||
command.args(&[
|
command.args([
|
||||||
"--query",
|
"--query",
|
||||||
"--available",
|
"--available",
|
||||||
"--json",
|
"--json",
|
||||||
|
@ -75,7 +75,7 @@ impl Operation {
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
Operation::QueryPackagesOutputs => {
|
Operation::QueryPackagesOutputs => {
|
||||||
command.args(&[
|
command.args([
|
||||||
"--query",
|
"--query",
|
||||||
"--available",
|
"--available",
|
||||||
"--no-name",
|
"--no-name",
|
||||||
|
@ -90,7 +90,7 @@ impl Operation {
|
||||||
operation.args(command);
|
operation.args(command);
|
||||||
}
|
}
|
||||||
Operation::Evaluate => {
|
Operation::Evaluate => {
|
||||||
command.args(&[
|
command.args([
|
||||||
"--eval",
|
"--eval",
|
||||||
"--strict",
|
"--strict",
|
||||||
"--json",
|
"--json",
|
||||||
|
@ -100,9 +100,9 @@ impl Operation {
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
Operation::Instantiate => {
|
Operation::Instantiate => {
|
||||||
command.args(&["--option", "extra-experimental-features", "no-url-literals"]);
|
command.args(["--option", "extra-experimental-features", "no-url-literals"]);
|
||||||
}
|
}
|
||||||
_ => (),
|
Operation::Unknown { .. } => (),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -115,7 +115,7 @@ impl fmt::Display for Operation {
|
||||||
Operation::QueryPackagesJson => write!(f, "nix-env -qa --json"),
|
Operation::QueryPackagesJson => write!(f, "nix-env -qa --json"),
|
||||||
Operation::QueryPackagesOutputs => write!(f, "nix-env -qaP --no-name --out-path"),
|
Operation::QueryPackagesOutputs => write!(f, "nix-env -qaP --no-name --out-path"),
|
||||||
Operation::NoOp { ref operation } => operation.fmt(f),
|
Operation::NoOp { ref operation } => operation.fmt(f),
|
||||||
Operation::Unknown { ref program } => write!(f, "{}", program),
|
Operation::Unknown { ref program } => write!(f, "{program}"),
|
||||||
Operation::Evaluate => write!(f, "nix-instantiate --strict --json ..."),
|
Operation::Evaluate => write!(f, "nix-instantiate --strict --json ..."),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -131,6 +131,7 @@ pub struct Nix {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Nix {
|
impl Nix {
|
||||||
|
#[must_use]
|
||||||
pub fn new(
|
pub fn new(
|
||||||
system: String,
|
system: String,
|
||||||
remote: String,
|
remote: String,
|
||||||
|
@ -146,54 +147,62 @@ impl Nix {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn with_system(&self, system: String) -> Nix {
|
pub fn with_system(&self, system: String) -> Nix {
|
||||||
let mut n = self.clone();
|
let mut n = self.clone();
|
||||||
n.system = system;
|
n.system = system;
|
||||||
n
|
n
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn with_limited_supported_systems(&self) -> Nix {
|
pub fn with_limited_supported_systems(&self) -> Nix {
|
||||||
let mut n = self.clone();
|
let mut n = self.clone();
|
||||||
n.limit_supported_systems = true;
|
n.limit_supported_systems = true;
|
||||||
n
|
n
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn without_limited_supported_systems(&self) -> Nix {
|
pub fn without_limited_supported_systems(&self) -> Nix {
|
||||||
let mut n = self.clone();
|
let mut n = self.clone();
|
||||||
n.limit_supported_systems = false;
|
n.limit_supported_systems = false;
|
||||||
n
|
n
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn safely_partition_instantiable_attrs(
|
pub async fn safely_partition_instantiable_attrs(
|
||||||
&self,
|
&self,
|
||||||
nixpkgs: &Path,
|
nixpkgs: &Path,
|
||||||
file: File,
|
file: File,
|
||||||
attrs: Vec<String>,
|
attrs: Vec<String>,
|
||||||
) -> (Vec<String>, Vec<(String, Vec<String>)>) {
|
) -> (Vec<String>, Vec<(String, Vec<String>)>) {
|
||||||
let attr_instantiations: Vec<Result<String, (String, Vec<String>)>> = attrs
|
let mut attr_instantiations: Vec<Result<String, (String, Vec<String>)>> = vec![];
|
||||||
.into_iter()
|
|
||||||
.map(
|
for attr in attrs {
|
||||||
|attr| match self.safely_instantiate_attrs(nixpkgs, file, vec![attr.clone()]) {
|
attr_instantiations.push(
|
||||||
|
match self
|
||||||
|
.safely_instantiate_attrs(nixpkgs, file, vec![attr.clone()])
|
||||||
|
.await
|
||||||
|
{
|
||||||
Ok(_) => Ok(attr),
|
Ok(_) => Ok(attr),
|
||||||
Err(f) => Err((attr, lines_from_file(f))),
|
Err(f) => Err((attr, lines_from_file(f))),
|
||||||
},
|
},
|
||||||
)
|
);
|
||||||
.collect();
|
}
|
||||||
|
|
||||||
partition_result(attr_instantiations)
|
partition_result(attr_instantiations)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn safely_instantiate_attrs(
|
pub async fn safely_instantiate_attrs(
|
||||||
&self,
|
&self,
|
||||||
nixpkgs: &Path,
|
nixpkgs: &Path,
|
||||||
file: File,
|
file: File,
|
||||||
attrs: Vec<String>,
|
attrs: Vec<String>,
|
||||||
) -> Result<fs::File, fs::File> {
|
) -> Result<fs::File, fs::File> {
|
||||||
let mut command = self.safe_command::<&OsStr>(&Operation::Instantiate, nixpkgs, &[], &[]);
|
let mut command = self.safe_command::<&OsStr>(&Operation::Instantiate, nixpkgs, &[], &[]);
|
||||||
self.set_attrs_command(&mut command, file, attrs);
|
Self::set_attrs_command(&mut command, file, attrs);
|
||||||
self.run(command, true)
|
self.run(command, true).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn safely_evaluate_expr_cmd(
|
pub fn safely_evaluate_expr_cmd(
|
||||||
&self,
|
&self,
|
||||||
nixpkgs: &Path,
|
nixpkgs: &Path,
|
||||||
|
@ -213,31 +222,32 @@ impl Nix {
|
||||||
self.safe_command(&Operation::Evaluate, nixpkgs, &attrargs, extra_paths)
|
self.safe_command(&Operation::Evaluate, nixpkgs, &attrargs, extra_paths)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn safely_build_attrs(
|
pub async fn safely_build_attrs(
|
||||||
&self,
|
&self,
|
||||||
nixpkgs: &Path,
|
nixpkgs: &Path,
|
||||||
file: File,
|
file: File,
|
||||||
attrs: Vec<String>,
|
attrs: Vec<String>,
|
||||||
) -> Result<fs::File, fs::File> {
|
) -> Result<fs::File, fs::File> {
|
||||||
let mut command = self.safe_command::<&OsStr>(&Operation::Build, nixpkgs, &[], &[]);
|
let mut command = self.safe_command::<&OsStr>(&Operation::Build, nixpkgs, &[], &[]);
|
||||||
self.set_attrs_command(&mut command, file, attrs);
|
Self::set_attrs_command(&mut command, file, attrs);
|
||||||
self.run(command, true)
|
self.run(command, true).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn safely_build_attrs_async(
|
pub fn safely_build_attrs_async(
|
||||||
&self,
|
&self,
|
||||||
nixpkgs: &Path,
|
nixpkgs: &Path,
|
||||||
file: File,
|
file: File,
|
||||||
attrs: Vec<String>,
|
attrs: Vec<String>,
|
||||||
) -> SpawnedAsyncCmd {
|
) -> Child {
|
||||||
let mut command = self.safe_command::<&OsStr>(&Operation::Build, nixpkgs, &[], &[]);
|
let mut command = self.safe_command::<&OsStr>(&Operation::Build, nixpkgs, &[], &[]);
|
||||||
self.set_attrs_command(&mut command, file, attrs);
|
Nix::set_attrs_command(&mut command, file, attrs);
|
||||||
AsyncCmd::new(command).spawn()
|
command.spawn().expect("Failed to run Nix")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_attrs_command(&self, command: &mut Command, file: File, attrs: Vec<String>) {
|
fn set_attrs_command(command: &mut Command, file: File, attrs: Vec<String>) {
|
||||||
let mut args: Vec<String> = Vec::with_capacity(3 + (attrs.len() * 2));
|
let mut args: Vec<String> = Vec::with_capacity(3 + (attrs.len() * 2));
|
||||||
args.push(format!("{}", file));
|
args.push(format!("{file}"));
|
||||||
for attr in attrs {
|
for attr in attrs {
|
||||||
args.push(String::from("-A"));
|
args.push(String::from("-A"));
|
||||||
args.push(attr);
|
args.push(attr);
|
||||||
|
@ -252,7 +262,7 @@ impl Nix {
|
||||||
command.args(args);
|
command.args(args);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn safely(
|
pub async fn safely(
|
||||||
&self,
|
&self,
|
||||||
op: &Operation,
|
op: &Operation,
|
||||||
nixpkgs: &Path,
|
nixpkgs: &Path,
|
||||||
|
@ -260,9 +270,10 @@ impl Nix {
|
||||||
keep_stdout: bool,
|
keep_stdout: bool,
|
||||||
) -> Result<fs::File, fs::File> {
|
) -> Result<fs::File, fs::File> {
|
||||||
self.run(self.safe_command(op, nixpkgs, &args, &[]), keep_stdout)
|
self.run(self.safe_command(op, nixpkgs, &args, &[]), keep_stdout)
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run(&self, mut cmd: Command, keep_stdout: bool) -> Result<fs::File, fs::File> {
|
pub async fn run(&self, mut cmd: Command, keep_stdout: bool) -> Result<fs::File, fs::File> {
|
||||||
let stderr = tempfile().expect("Fetching a stderr tempfile");
|
let stderr = tempfile().expect("Fetching a stderr tempfile");
|
||||||
let mut reader = stderr.try_clone().expect("Cloning stderr to the reader");
|
let mut reader = stderr.try_clone().expect("Cloning stderr to the reader");
|
||||||
|
|
||||||
|
@ -276,6 +287,7 @@ impl Nix {
|
||||||
.stdout(stdout)
|
.stdout(stdout)
|
||||||
.stderr(Stdio::from(stderr))
|
.stderr(Stdio::from(stderr))
|
||||||
.status()
|
.status()
|
||||||
|
.await
|
||||||
.expect("Running a program ...");
|
.expect("Running a program ...");
|
||||||
|
|
||||||
reader
|
reader
|
||||||
|
@ -289,7 +301,7 @@ impl Nix {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_stderr_stdout(&self, mut cmd: Command) -> (bool, fs::File, fs::File) {
|
pub async fn run_stderr_stdout(&self, mut cmd: Command) -> (bool, fs::File, fs::File) {
|
||||||
let stdout_file = tempfile().expect("Fetching a stdout tempfile");
|
let stdout_file = tempfile().expect("Fetching a stdout tempfile");
|
||||||
let mut stdout_reader = stdout_file
|
let mut stdout_reader = stdout_file
|
||||||
.try_clone()
|
.try_clone()
|
||||||
|
@ -304,6 +316,7 @@ impl Nix {
|
||||||
.stdout(Stdio::from(stdout_file))
|
.stdout(Stdio::from(stdout_file))
|
||||||
.stderr(Stdio::from(stderr_file))
|
.stderr(Stdio::from(stderr_file))
|
||||||
.status()
|
.status()
|
||||||
|
.await
|
||||||
.expect("Running a program ...");
|
.expect("Running a program ...");
|
||||||
|
|
||||||
stdout_reader
|
stdout_reader
|
||||||
|
@ -343,23 +356,23 @@ impl Nix {
|
||||||
command.env("NIX_REMOTE", &self.remote);
|
command.env("NIX_REMOTE", &self.remote);
|
||||||
|
|
||||||
if let Some(ref initial_heap_size) = self.initial_heap_size {
|
if let Some(ref initial_heap_size) = self.initial_heap_size {
|
||||||
command.env("GC_INITIAL_HEAP_SIZE", &initial_heap_size);
|
command.env("GC_INITIAL_HEAP_SIZE", initial_heap_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
let path = env::var("PATH").unwrap();
|
let path = env::var("PATH").unwrap();
|
||||||
command.env("PATH", path);
|
command.env("PATH", path);
|
||||||
|
|
||||||
command.args(&["--show-trace"]);
|
command.args(["--show-trace"]);
|
||||||
command.args(&["--option", "restrict-eval", "true"]);
|
command.args(["--option", "restrict-eval", "true"]);
|
||||||
command.args(&[
|
command.args([
|
||||||
"--option",
|
"--option",
|
||||||
"build-timeout",
|
"build-timeout",
|
||||||
&format!("{}", self.build_timeout),
|
&format!("{}", self.build_timeout),
|
||||||
]);
|
]);
|
||||||
command.args(&["--argstr", "system", &self.system]);
|
command.args(["--argstr", "system", &self.system]);
|
||||||
|
|
||||||
if self.limit_supported_systems {
|
if self.limit_supported_systems {
|
||||||
command.args(&[
|
command.args([
|
||||||
"--arg",
|
"--arg",
|
||||||
"supportedSystems",
|
"supportedSystems",
|
||||||
&format!("[\"{}\"]", &self.system),
|
&format!("[\"{}\"]", &self.system),
|
||||||
|
@ -374,33 +387,34 @@ impl Nix {
|
||||||
fn lines_from_file(file: fs::File) -> Vec<String> {
|
fn lines_from_file(file: fs::File) -> Vec<String> {
|
||||||
BufReader::new(file)
|
BufReader::new(file)
|
||||||
.lines()
|
.lines()
|
||||||
.filter_map(|line| line.ok())
|
.map_while(Result::ok)
|
||||||
.filter(|msg| !is_user_setting_warning(msg))
|
.filter(|msg| !is_user_setting_warning(msg))
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn is_user_setting_warning(line: &str) -> bool {
|
pub fn is_user_setting_warning(line: &str) -> bool {
|
||||||
let line = line.trim();
|
let line = line.trim();
|
||||||
line.starts_with("warning: ignoring the user-specified setting '")
|
line.starts_with("warning: ignoring the user-specified setting '")
|
||||||
&& line.ends_with("because it is a restricted setting and you are not a trusted user")
|
&& line.ends_with("because it is a restricted setting and you are not a trusted user")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn wait_for_build_status(spawned: SpawnedAsyncCmd) -> BuildStatus {
|
pub async fn wait_for_build_status(mut spawned: Child) -> BuildStatus {
|
||||||
match spawned.wait() {
|
match spawned.wait().await {
|
||||||
Ok(s) => match s.code() {
|
Ok(s) => match s.code() {
|
||||||
Some(0) => BuildStatus::Success,
|
Some(0) => BuildStatus::Success,
|
||||||
Some(100) => BuildStatus::Failure, // nix permanent failure
|
Some(100) => BuildStatus::Failure, // nix permanent failure
|
||||||
Some(101) => BuildStatus::TimedOut, // nix build timedout
|
Some(101) => BuildStatus::TimedOut, // nix build timedout
|
||||||
Some(102) => BuildStatus::HashMismatch, // Fixed Output Derivation's hash was wrong
|
Some(102) => BuildStatus::HashMismatch, // Fixed Output Derivation's hash was wrong
|
||||||
Some(i) => BuildStatus::UnexpectedError {
|
Some(i) => BuildStatus::UnexpectedError {
|
||||||
err: format!("command failed with exit code {}", i),
|
err: format!("command failed with exit code {i}"),
|
||||||
},
|
},
|
||||||
None => BuildStatus::UnexpectedError {
|
None => BuildStatus::UnexpectedError {
|
||||||
err: "unexpected build failure".into(),
|
err: "unexpected build failure".into(),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
e => BuildStatus::UnexpectedError {
|
e => BuildStatus::UnexpectedError {
|
||||||
err: format!("failed on interior command {:?}", e),
|
err: format!("failed on interior command {e:?}"),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -420,7 +434,7 @@ mod tests {
|
||||||
let path = env::var("PATH").unwrap();
|
let path = env::var("PATH").unwrap();
|
||||||
let test_path = format!("{}/test-nix/bin:{}", env!("CARGO_MANIFEST_DIR"), path);
|
let test_path = format!("{}/test-nix/bin:{}", env!("CARGO_MANIFEST_DIR"), path);
|
||||||
env::set_var("PATH", test_path);
|
env::set_var("PATH", test_path);
|
||||||
let remote = env::var("NIX_REMOTE").unwrap_or("".to_owned());
|
let remote = env::var("NIX_REMOTE").unwrap_or_default();
|
||||||
Nix::new(SYSTEM.to_owned(), remote, 1800, None)
|
Nix::new(SYSTEM.to_owned(), remote, 1800, None)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -456,8 +470,7 @@ mod tests {
|
||||||
|
|
||||||
fn strip_ansi(string: &str) -> String {
|
fn strip_ansi(string: &str) -> String {
|
||||||
string
|
string
|
||||||
.replace('‘', "'")
|
.replace(['‘', '’'], "'")
|
||||||
.replace('’', "'")
|
|
||||||
.replace("\u{1b}[31;1m", "") // red
|
.replace("\u{1b}[31;1m", "") // red
|
||||||
.replace("\u{1b}[0m", "") // reset
|
.replace("\u{1b}[0m", "") // reset
|
||||||
}
|
}
|
||||||
|
@ -468,15 +481,14 @@ mod tests {
|
||||||
Fail,
|
Fail,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn assert_run(res: Result<fs::File, fs::File>, expected: Expect, require: Vec<&str>) {
|
fn assert_run(res: Result<fs::File, fs::File>, expected: &Expect, require: Vec<&str>) {
|
||||||
let expectation_held: bool = match expected {
|
let expectation_held: bool = match expected {
|
||||||
Expect::Pass => res.is_ok(),
|
Expect::Pass => res.is_ok(),
|
||||||
Expect::Fail => res.is_err(),
|
Expect::Fail => res.is_err(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let file: fs::File = match res {
|
let file: fs::File = match res {
|
||||||
Ok(file) => file,
|
Ok(file) | Err(file) => file,
|
||||||
Err(file) => file,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let lines = lines_from_file(file);
|
let lines = lines_from_file(file);
|
||||||
|
@ -484,7 +496,7 @@ mod tests {
|
||||||
let buildlog = lines
|
let buildlog = lines
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|line| strip_ansi(&line))
|
.map(|line| strip_ansi(&line))
|
||||||
.map(|line| format!(" | {}", line))
|
.map(|line| format!(" | {line}"))
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
.join("\n");
|
.join("\n");
|
||||||
|
|
||||||
|
@ -492,7 +504,7 @@ mod tests {
|
||||||
let mut missed_requirements: usize = 0;
|
let mut missed_requirements: usize = 0;
|
||||||
let requirements_held: Vec<Result<String, String>> = require
|
let requirements_held: Vec<Result<String, String>> = require
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|line| line.to_owned())
|
.map(std::borrow::ToOwned::to_owned)
|
||||||
.map(|line| {
|
.map(|line| {
|
||||||
if buildlog.contains(&line) {
|
if buildlog.contains(&line) {
|
||||||
Ok(line)
|
Ok(line)
|
||||||
|
@ -503,36 +515,34 @@ mod tests {
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let mut prefixes: Vec<String> = vec!["".to_owned(), "".to_owned()];
|
let mut prefixes: Vec<String> = vec![String::new(), String::new()];
|
||||||
|
|
||||||
if !expectation_held {
|
if expectation_held {
|
||||||
prefixes.push(format!(
|
prefixes.push(format!("The run was expected to {expected:?}, and did."));
|
||||||
"The run was expected to {:?}, but did not.",
|
|
||||||
expected
|
|
||||||
));
|
|
||||||
prefixes.push("".to_owned());
|
|
||||||
} else {
|
} else {
|
||||||
prefixes.push(format!("The run was expected to {:?}, and did.", expected));
|
prefixes.push(format!(
|
||||||
prefixes.push("".to_owned());
|
"The run was expected to {expected:?}, but did not."
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
prefixes.push(String::new());
|
||||||
|
|
||||||
let mut suffixes = vec![
|
let mut suffixes = vec![
|
||||||
"".to_owned(),
|
String::new(),
|
||||||
format!(
|
format!(
|
||||||
"{} out of {} required lines matched.",
|
"{} out of {} required lines matched.",
|
||||||
(total_requirements - missed_requirements),
|
(total_requirements - missed_requirements),
|
||||||
total_requirements
|
total_requirements
|
||||||
),
|
),
|
||||||
"".to_owned(),
|
String::new(),
|
||||||
];
|
];
|
||||||
|
|
||||||
for expected_line in requirements_held {
|
for expected_line in requirements_held {
|
||||||
suffixes.push(format!(" - {:?}", expected_line));
|
suffixes.push(format!(" - {expected_line:?}"));
|
||||||
}
|
}
|
||||||
suffixes.push("".to_owned());
|
suffixes.push(String::new());
|
||||||
|
|
||||||
let output_blocks: Vec<Vec<String>> =
|
let output_blocks: Vec<Vec<String>> =
|
||||||
vec![prefixes, vec![buildlog, "".to_owned()], suffixes];
|
vec![prefixes, vec![buildlog, String::new()], suffixes];
|
||||||
|
|
||||||
let output_blocks_strings: Vec<String> = output_blocks
|
let output_blocks_strings: Vec<String> = output_blocks
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -547,70 +557,78 @@ mod tests {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn test_build_operations() {
|
async fn test_build_operations() {
|
||||||
let nix = nix();
|
let nix = nix();
|
||||||
let op = noop(Operation::Build);
|
let op = noop(Operation::Build);
|
||||||
assert_eq!(op.to_string(), "nix-build");
|
assert_eq!(op.to_string(), "nix-build");
|
||||||
|
|
||||||
let ret: Result<fs::File, fs::File> = nix.run(
|
let ret: Result<fs::File, fs::File> = nix
|
||||||
|
.run(
|
||||||
nix.safe_command(&op, build_path().as_path(), &["--version"], &[]),
|
nix.safe_command(&op, build_path().as_path(), &["--version"], &[]),
|
||||||
true,
|
true,
|
||||||
);
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
assert_run(
|
assert_run(
|
||||||
ret,
|
ret,
|
||||||
Expect::Pass,
|
&Expect::Pass,
|
||||||
vec!["--no-out-link --keep-going", "--version"],
|
vec!["--no-out-link --keep-going", "--version"],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn test_instantiate_operation() {
|
async fn test_instantiate_operation() {
|
||||||
let nix = nix();
|
let nix = nix();
|
||||||
let op = noop(Operation::Instantiate);
|
let op = noop(Operation::Instantiate);
|
||||||
assert_eq!(op.to_string(), "nix-instantiate");
|
assert_eq!(op.to_string(), "nix-instantiate");
|
||||||
|
|
||||||
let ret: Result<fs::File, fs::File> = nix.run(
|
let ret: Result<fs::File, fs::File> = nix
|
||||||
|
.run(
|
||||||
nix.safe_command(&op, build_path().as_path(), &["--version"], &[]),
|
nix.safe_command(&op, build_path().as_path(), &["--version"], &[]),
|
||||||
true,
|
true,
|
||||||
);
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
assert_run(ret, Expect::Pass, vec!["--version"]);
|
assert_run(ret, &Expect::Pass, vec!["--version"]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn test_query_packages_json() {
|
async fn test_query_packages_json() {
|
||||||
let nix = nix();
|
let nix = nix();
|
||||||
let op = noop(Operation::QueryPackagesJson);
|
let op = noop(Operation::QueryPackagesJson);
|
||||||
assert_eq!(op.to_string(), "nix-env -qa --json");
|
assert_eq!(op.to_string(), "nix-env -qa --json");
|
||||||
|
|
||||||
let ret: Result<fs::File, fs::File> = nix.run(
|
let ret: Result<fs::File, fs::File> = nix
|
||||||
|
.run(
|
||||||
nix.safe_command(&op, build_path().as_path(), &["--version"], &[]),
|
nix.safe_command(&op, build_path().as_path(), &["--version"], &[]),
|
||||||
true,
|
true,
|
||||||
);
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
assert_run(
|
assert_run(
|
||||||
ret,
|
ret,
|
||||||
Expect::Pass,
|
&Expect::Pass,
|
||||||
vec!["--query --available --json", "--version"],
|
vec!["--query --available --json", "--version"],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn test_query_packages_outputs() {
|
async fn test_query_packages_outputs() {
|
||||||
let nix = nix();
|
let nix = nix();
|
||||||
let op = noop(Operation::QueryPackagesOutputs);
|
let op = noop(Operation::QueryPackagesOutputs);
|
||||||
assert_eq!(op.to_string(), "nix-env -qaP --no-name --out-path");
|
assert_eq!(op.to_string(), "nix-env -qaP --no-name --out-path");
|
||||||
|
|
||||||
let ret: Result<fs::File, fs::File> = nix.run(
|
let ret: Result<fs::File, fs::File> = nix
|
||||||
|
.run(
|
||||||
nix.safe_command(&op, build_path().as_path(), &["--version"], &[]),
|
nix.safe_command(&op, build_path().as_path(), &["--version"], &[]),
|
||||||
true,
|
true,
|
||||||
);
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
assert_run(
|
assert_run(
|
||||||
ret,
|
ret,
|
||||||
Expect::Pass,
|
&Expect::Pass,
|
||||||
vec![
|
vec![
|
||||||
"--query --available --no-name --attr-path --out-path",
|
"--query --available --no-name --attr-path --out-path",
|
||||||
"--version",
|
"--version",
|
||||||
|
@ -618,18 +636,20 @@ mod tests {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn safe_command_environment() {
|
async fn safe_command_environment() {
|
||||||
let nix = nix();
|
let nix = nix();
|
||||||
|
|
||||||
let ret: Result<fs::File, fs::File> = nix.run(
|
let ret: Result<fs::File, fs::File> = nix
|
||||||
|
.run(
|
||||||
nix.safe_command::<&OsStr>(&env_noop(), build_path().as_path(), &[], &[]),
|
nix.safe_command::<&OsStr>(&env_noop(), build_path().as_path(), &[], &[]),
|
||||||
true,
|
true,
|
||||||
);
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
assert_run(
|
assert_run(
|
||||||
ret,
|
ret,
|
||||||
Expect::Pass,
|
&Expect::Pass,
|
||||||
vec![
|
vec![
|
||||||
"HOME=/homeless-shelter",
|
"HOME=/homeless-shelter",
|
||||||
"NIX_PATH=ofborg-nixpkgs-pr=",
|
"NIX_PATH=ofborg-nixpkgs-pr=",
|
||||||
|
@ -639,19 +659,21 @@ mod tests {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn safe_command_custom_gc() {
|
async fn safe_command_custom_gc() {
|
||||||
let remote = env::var("NIX_REMOTE").unwrap_or("".to_owned());
|
let remote = env::var("NIX_REMOTE").unwrap_or_default();
|
||||||
let nix = Nix::new(SYSTEM.to_owned(), remote, 1800, Some("4g".to_owned()));
|
let nix = Nix::new(SYSTEM.to_owned(), remote, 1800, Some("4g".to_owned()));
|
||||||
|
|
||||||
let ret: Result<fs::File, fs::File> = nix.run(
|
let ret: Result<fs::File, fs::File> = nix
|
||||||
|
.run(
|
||||||
nix.safe_command::<&OsStr>(&env_noop(), build_path().as_path(), &[], &[]),
|
nix.safe_command::<&OsStr>(&env_noop(), build_path().as_path(), &[], &[]),
|
||||||
true,
|
true,
|
||||||
);
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
assert_run(
|
assert_run(
|
||||||
ret,
|
ret,
|
||||||
Expect::Pass,
|
&Expect::Pass,
|
||||||
vec![
|
vec![
|
||||||
"HOME=/homeless-shelter",
|
"HOME=/homeless-shelter",
|
||||||
"NIX_PATH=ofborg-nixpkgs-pr=",
|
"NIX_PATH=ofborg-nixpkgs-pr=",
|
||||||
|
@ -662,57 +684,59 @@ mod tests {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn safe_command_options() {
|
async fn safe_command_options() {
|
||||||
let nix = nix();
|
let nix = nix();
|
||||||
let op = noop(Operation::Build);
|
let op = noop(Operation::Build);
|
||||||
|
|
||||||
let ret: Result<fs::File, fs::File> = nix.run(
|
let ret: Result<fs::File, fs::File> = nix
|
||||||
|
.run(
|
||||||
nix.safe_command::<&OsStr>(&op, build_path().as_path(), &[], &[]),
|
nix.safe_command::<&OsStr>(&op, build_path().as_path(), &[], &[]),
|
||||||
true,
|
true,
|
||||||
);
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
assert_run(
|
assert_run(
|
||||||
ret,
|
ret,
|
||||||
Expect::Pass,
|
&Expect::Pass,
|
||||||
vec!["--option restrict-eval true", "--option build-timeout 1800"],
|
vec!["--option restrict-eval true", "--option build-timeout 1800"],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn set_attrs_nixpkgs() {
|
async fn set_attrs_nixpkgs() {
|
||||||
let nix = nix();
|
let nix = nix();
|
||||||
let op = noop(Operation::Build);
|
let op = noop(Operation::Build);
|
||||||
|
|
||||||
let mut command = nix.safe_command::<&OsStr>(&op, build_path().as_path(), &[], &[]);
|
let mut command = nix.safe_command::<&OsStr>(&op, build_path().as_path(), &[], &[]);
|
||||||
nix.set_attrs_command(
|
Nix::set_attrs_command(
|
||||||
&mut command,
|
&mut command,
|
||||||
File::DefaultNixpkgs,
|
File::DefaultNixpkgs,
|
||||||
vec!["foo".into(), "bar".into()],
|
vec!["foo".into(), "bar".into()],
|
||||||
);
|
);
|
||||||
|
|
||||||
let ret: Result<fs::File, fs::File> = nix.run(command, true);
|
let ret: Result<fs::File, fs::File> = nix.run(command, true).await;
|
||||||
|
|
||||||
assert_run(ret, Expect::Pass, vec!["./default.nix", "-A foo -A bar"]);
|
assert_run(ret, &Expect::Pass, vec!["./default.nix", "-A foo -A bar"]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn set_attrs_nixos() {
|
async fn set_attrs_nixos() {
|
||||||
let nix = nix();
|
let nix = nix();
|
||||||
let op = noop(Operation::Instantiate);
|
let op = noop(Operation::Instantiate);
|
||||||
|
|
||||||
let mut command = nix.safe_command::<&OsStr>(&op, build_path().as_path(), &[], &[]);
|
let mut command = nix.safe_command::<&OsStr>(&op, build_path().as_path(), &[], &[]);
|
||||||
nix.set_attrs_command(
|
Nix::set_attrs_command(
|
||||||
&mut command,
|
&mut command,
|
||||||
File::ReleaseNixOS,
|
File::ReleaseNixOS,
|
||||||
vec!["foo".into(), "bar".into()],
|
vec!["foo".into(), "bar".into()],
|
||||||
);
|
);
|
||||||
|
|
||||||
let ret: Result<fs::File, fs::File> = nix.run(command, true);
|
let ret: Result<fs::File, fs::File> = nix.run(command, true).await;
|
||||||
|
|
||||||
assert_run(
|
assert_run(
|
||||||
ret,
|
ret,
|
||||||
Expect::Pass,
|
&Expect::Pass,
|
||||||
vec![
|
vec![
|
||||||
"./nixos/release.nix",
|
"./nixos/release.nix",
|
||||||
"--arg nixpkgs { outPath=./.; revCount=999999; shortRev=\"ofborg\"; rev=\"0000000000000000000000000000000000000000\"; }",
|
"--arg nixpkgs { outPath=./.; revCount=999999; shortRev=\"ofborg\"; rev=\"0000000000000000000000000000000000000000\"; }",
|
||||||
|
@ -720,36 +744,40 @@ mod tests {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn safely_build_attrs_success() {
|
async fn safely_build_attrs_success() {
|
||||||
let nix = nix();
|
let nix = nix();
|
||||||
|
|
||||||
let ret: Result<fs::File, fs::File> = nix.safely_build_attrs(
|
let ret: Result<fs::File, fs::File> = nix
|
||||||
|
.safely_build_attrs(
|
||||||
build_path().as_path(),
|
build_path().as_path(),
|
||||||
File::DefaultNixpkgs,
|
File::DefaultNixpkgs,
|
||||||
vec![String::from("success")],
|
vec![String::from("success")],
|
||||||
);
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
assert_run(
|
assert_run(
|
||||||
ret,
|
ret,
|
||||||
Expect::Pass,
|
&Expect::Pass,
|
||||||
vec!["-success.drv", "building ", "hi", "-success"],
|
vec!["-success.drv", "building ", "hi", "-success"],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn safely_build_attrs_failure() {
|
async fn safely_build_attrs_failure() {
|
||||||
let nix = nix();
|
let nix = nix();
|
||||||
|
|
||||||
let ret: Result<fs::File, fs::File> = nix.safely_build_attrs(
|
let ret: Result<fs::File, fs::File> = nix
|
||||||
|
.safely_build_attrs(
|
||||||
build_path().as_path(),
|
build_path().as_path(),
|
||||||
File::DefaultNixpkgs,
|
File::DefaultNixpkgs,
|
||||||
vec![String::from("failed")],
|
vec![String::from("failed")],
|
||||||
);
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
assert_run(
|
assert_run(
|
||||||
ret,
|
ret,
|
||||||
Expect::Fail,
|
&Expect::Fail,
|
||||||
vec![
|
vec![
|
||||||
"-failed.drv",
|
"-failed.drv",
|
||||||
"building ",
|
"building ",
|
||||||
|
@ -759,8 +787,8 @@ mod tests {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn partition_instantiable_attributes() {
|
async fn partition_instantiable_attributes() {
|
||||||
let nix = nix();
|
let nix = nix();
|
||||||
|
|
||||||
let ret: (Vec<String>, Vec<(String, Vec<String>)>) = nix
|
let ret: (Vec<String>, Vec<(String, Vec<String>)>) = nix
|
||||||
|
@ -772,7 +800,8 @@ mod tests {
|
||||||
String::from("passes-instantiation"),
|
String::from("passes-instantiation"),
|
||||||
String::from("missing-attr"),
|
String::from("missing-attr"),
|
||||||
],
|
],
|
||||||
);
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
assert_eq!(ret.0, vec!["passes-instantiation"]);
|
assert_eq!(ret.0, vec!["passes-instantiation"]);
|
||||||
|
|
||||||
|
@ -791,80 +820,90 @@ mod tests {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn safely_instantiate_attrs_failure() {
|
async fn safely_instantiate_attrs_failure() {
|
||||||
let nix = nix();
|
let nix = nix();
|
||||||
|
|
||||||
let ret: Result<fs::File, fs::File> = nix.safely_instantiate_attrs(
|
let ret: Result<fs::File, fs::File> = nix
|
||||||
|
.safely_instantiate_attrs(
|
||||||
individual_eval_path().as_path(),
|
individual_eval_path().as_path(),
|
||||||
File::DefaultNixpkgs,
|
File::DefaultNixpkgs,
|
||||||
vec![String::from("fails-instantiation")],
|
vec![String::from("fails-instantiation")],
|
||||||
);
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
assert_run(
|
assert_run(
|
||||||
ret,
|
ret,
|
||||||
Expect::Fail,
|
&Expect::Fail,
|
||||||
vec!["You just can't", "assertion", "failed"],
|
vec!["You just can't", "assertion", "failed"],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn safely_instantiate_attrs_success() {
|
async fn safely_instantiate_attrs_success() {
|
||||||
let nix = nix();
|
let nix = nix();
|
||||||
|
|
||||||
let ret: Result<fs::File, fs::File> = nix.safely_instantiate_attrs(
|
let ret: Result<fs::File, fs::File> = nix
|
||||||
|
.safely_instantiate_attrs(
|
||||||
individual_eval_path().as_path(),
|
individual_eval_path().as_path(),
|
||||||
File::DefaultNixpkgs,
|
File::DefaultNixpkgs,
|
||||||
vec![String::from("passes-instantiation")],
|
vec![String::from("passes-instantiation")],
|
||||||
);
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
assert_run(ret, Expect::Pass, vec!["-passes-instantiation.drv"]);
|
assert_run(ret, &Expect::Pass, vec!["-passes-instantiation.drv"]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn safely_evaluate_expr_success() {
|
async fn safely_evaluate_expr_success() {
|
||||||
let nix = nix();
|
let nix = nix();
|
||||||
|
|
||||||
let ret: Result<fs::File, fs::File> = nix.run(
|
let ret: Result<fs::File, fs::File> = nix
|
||||||
|
.run(
|
||||||
nix.safely_evaluate_expr_cmd(
|
nix.safely_evaluate_expr_cmd(
|
||||||
individual_eval_path().as_path(),
|
individual_eval_path().as_path(),
|
||||||
r#"{ foo ? "bar" }: "The magic value is ${foo}""#,
|
r#"{ foo ? "bar" }: "The magic value is ${foo}""#,
|
||||||
[("foo", "tux")].iter().cloned().collect(),
|
[("foo", "tux")].iter().copied().collect(),
|
||||||
&[],
|
&[],
|
||||||
),
|
),
|
||||||
true,
|
true,
|
||||||
);
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
assert_run(ret, Expect::Pass, vec!["The magic value is tux"]);
|
assert_run(ret, &Expect::Pass, vec!["The magic value is tux"]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn strict_sandboxing() {
|
async fn strict_sandboxing() {
|
||||||
let ret: Result<fs::File, fs::File> = nix().safely_build_attrs(
|
let ret: Result<fs::File, fs::File> = nix()
|
||||||
|
.safely_build_attrs(
|
||||||
build_path().as_path(),
|
build_path().as_path(),
|
||||||
File::DefaultNixpkgs,
|
File::DefaultNixpkgs,
|
||||||
vec![String::from("sandbox-violation")],
|
vec![String::from("sandbox-violation")],
|
||||||
);
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
assert_run(
|
assert_run(
|
||||||
ret,
|
ret,
|
||||||
Expect::Fail,
|
&Expect::Fail,
|
||||||
vec!["access to absolute path", "is forbidden in restricted mode"],
|
vec!["access to absolute path", "is forbidden in restricted mode"],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn instantiation_success() {
|
async fn instantiation_success() {
|
||||||
let ret: Result<fs::File, fs::File> = nix().safely(
|
let ret: Result<fs::File, fs::File> = nix()
|
||||||
|
.safely(
|
||||||
&Operation::Instantiate,
|
&Operation::Instantiate,
|
||||||
passing_eval_path().as_path(),
|
passing_eval_path().as_path(),
|
||||||
vec![],
|
vec![],
|
||||||
true,
|
true,
|
||||||
);
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
assert_run(
|
assert_run(
|
||||||
ret,
|
ret,
|
||||||
Expect::Pass,
|
&Expect::Pass,
|
||||||
vec![
|
vec![
|
||||||
"the result might be removed by the garbage collector",
|
"the result might be removed by the garbage collector",
|
||||||
"-failed.drv",
|
"-failed.drv",
|
||||||
|
@ -873,18 +912,20 @@ mod tests {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn instantiation_nixpkgs_restricted_mode() {
|
async fn instantiation_nixpkgs_restricted_mode() {
|
||||||
let ret: Result<fs::File, fs::File> = nix().safely(
|
let ret: Result<fs::File, fs::File> = nix()
|
||||||
|
.safely(
|
||||||
&Operation::Instantiate,
|
&Operation::Instantiate,
|
||||||
individual_eval_path().as_path(),
|
individual_eval_path().as_path(),
|
||||||
vec![String::from("-A"), String::from("nixpkgs-restricted-mode")],
|
vec![String::from("-A"), String::from("nixpkgs-restricted-mode")],
|
||||||
true,
|
true,
|
||||||
);
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
assert_run(
|
assert_run(
|
||||||
ret,
|
ret,
|
||||||
Expect::Fail,
|
&Expect::Fail,
|
||||||
vec![
|
vec![
|
||||||
"access to absolute path '/fake'",
|
"access to absolute path '/fake'",
|
||||||
"is forbidden in restricted mode",
|
"is forbidden in restricted mode",
|
||||||
|
|
|
@ -17,6 +17,7 @@ pub struct HydraNixEnv {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HydraNixEnv {
|
impl HydraNixEnv {
|
||||||
|
#[must_use]
|
||||||
pub fn new(nix: nix::Nix, path: PathBuf, check_meta: bool) -> HydraNixEnv {
|
pub fn new(nix: nix::Nix, path: PathBuf, check_meta: bool) -> HydraNixEnv {
|
||||||
HydraNixEnv {
|
HydraNixEnv {
|
||||||
path,
|
path,
|
||||||
|
@ -25,11 +26,11 @@ impl HydraNixEnv {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn execute_with_stats(
|
pub async fn execute_with_stats(
|
||||||
&self,
|
&self,
|
||||||
) -> Result<(outpathdiff::PackageOutPaths, EvaluationStats), Error> {
|
) -> Result<(outpathdiff::PackageOutPaths, EvaluationStats), Error> {
|
||||||
self.place_nix()?;
|
self.place_nix()?;
|
||||||
let (status, stdout, stderr, stats) = self.run_nix_env();
|
let (status, stdout, stderr, stats) = self.run_nix_env().await;
|
||||||
self.remove_nix()?;
|
self.remove_nix()?;
|
||||||
|
|
||||||
if status {
|
if status {
|
||||||
|
@ -79,7 +80,7 @@ impl HydraNixEnv {
|
||||||
// when it fails to evaluate something. In this case, we can ignore (but
|
// when it fails to evaluate something. In this case, we can ignore (but
|
||||||
// warn about) the error.
|
// warn about) the error.
|
||||||
if let Err(e) = fs::remove_file(&outpath_stats) {
|
if let Err(e) = fs::remove_file(&outpath_stats) {
|
||||||
warn!("Failed to remove file {:?}: {:?}", outpath_stats, e)
|
warn!("Failed to remove file {:?}: {:?}", outpath_stats, e);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -93,7 +94,7 @@ impl HydraNixEnv {
|
||||||
self.path.join(".gc-of-borg-stats.json")
|
self.path.join(".gc-of-borg-stats.json")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_nix_env(&self) -> (bool, File, File, Result<File, io::Error>) {
|
async fn run_nix_env(&self) -> (bool, File, File, Result<File, io::Error>) {
|
||||||
let check_meta = if self.check_meta { "true" } else { "false" };
|
let check_meta = if self.check_meta { "true" } else { "false" };
|
||||||
|
|
||||||
let mut cmd = self.nix.safe_command(
|
let mut cmd = self.nix.safe_command(
|
||||||
|
@ -111,7 +112,7 @@ impl HydraNixEnv {
|
||||||
cmd.env("NIX_SHOW_STATS", "1");
|
cmd.env("NIX_SHOW_STATS", "1");
|
||||||
cmd.env("NIX_SHOW_STATS_PATH", self.outpath_stats_path());
|
cmd.env("NIX_SHOW_STATS_PATH", self.outpath_stats_path());
|
||||||
|
|
||||||
let (status, stdout, stderr) = self.nix.run_stderr_stdout(cmd);
|
let (status, stdout, stderr) = self.nix.run_stderr_stdout(cmd).await;
|
||||||
let stats = File::open(self.outpath_stats_path());
|
let stats = File::open(self.outpath_stats_path());
|
||||||
|
|
||||||
(status, stdout, stderr, stats)
|
(status, stdout, stderr, stats)
|
||||||
|
@ -135,13 +136,14 @@ impl From<io::Error> for Error {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Error {
|
impl Error {
|
||||||
|
#[must_use]
|
||||||
pub fn display(self) -> String {
|
pub fn display(self) -> String {
|
||||||
match self {
|
match self {
|
||||||
Error::Io(e) => format!("Failed during the setup of executing nix-env: {:?}", e),
|
Error::Io(e) => format!("Failed during the setup of executing nix-env: {e:?}"),
|
||||||
Error::CreateFile(path, err) => format!("Failed to create file {:?}: {:?}", path, err),
|
Error::CreateFile(path, err) => format!("Failed to create file {path:?}: {err:?}"),
|
||||||
Error::RemoveFile(path, err) => format!("Failed to remove file {:?}: {:?}", path, err),
|
Error::RemoveFile(path, err) => format!("Failed to remove file {path:?}: {err:?}"),
|
||||||
Error::WriteFile(file, err) => {
|
Error::WriteFile(file, err) => {
|
||||||
format!("Failed to write to file '{:?}': {:?}", file, err)
|
format!("Failed to write to file '{file:?}': {err:?}")
|
||||||
}
|
}
|
||||||
Error::CommandFailed(mut fd) => {
|
Error::CommandFailed(mut fd) => {
|
||||||
let mut buffer = Vec::new();
|
let mut buffer = Vec::new();
|
||||||
|
@ -149,15 +151,14 @@ impl Error {
|
||||||
let bufstr = String::from_utf8_lossy(&buffer);
|
let bufstr = String::from_utf8_lossy(&buffer);
|
||||||
|
|
||||||
match read_result {
|
match read_result {
|
||||||
Ok(_) => format!("nix-env failed:\n{}", bufstr),
|
Ok(_) => format!("nix-env failed:\n{bufstr}"),
|
||||||
Err(e) => format!(
|
Err(e) => format!(
|
||||||
"nix-env failed and loading the error result caused a new error {:?}\n\n{}",
|
"nix-env failed and loading the error result caused a new error {e:?}\n\n{bufstr}"
|
||||||
e, bufstr
|
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Error::UncleanEvaluation(warnings) => {
|
Error::UncleanEvaluation(warnings) => {
|
||||||
format!("nix-env did not evaluate cleanly:\n {:?}", warnings)
|
format!("nix-env did not evaluate cleanly:\n {warnings:?}")
|
||||||
}
|
}
|
||||||
Error::StatsParse(mut fd, seek, parse_err) => {
|
Error::StatsParse(mut fd, seek, parse_err) => {
|
||||||
let mut buffer = Vec::new();
|
let mut buffer = Vec::new();
|
||||||
|
@ -169,21 +170,19 @@ impl Error {
|
||||||
|
|
||||||
if let Err(seek_err) = seek {
|
if let Err(seek_err) = seek {
|
||||||
lines.push_str(&format!(
|
lines.push_str(&format!(
|
||||||
"Additionally, resetting to the beginning of the output failed with:\n{:?}\n\n",
|
"Additionally, resetting to the beginning of the output failed with:\n{seek_err:?}\n\n"
|
||||||
seek_err
|
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Err(read_err) = read_result {
|
if let Err(read_err) = read_result {
|
||||||
lines.push_str(&format!(
|
lines.push_str(&format!(
|
||||||
"Additionally, loading the output failed with:\n{:?}\n\n",
|
"Additionally, loading the output failed with:\n{read_err:?}\n\n"
|
||||||
read_err
|
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
lines.push_str(&format!("Parse error:\n{:?}\n\n", parse_err));
|
lines.push_str(&format!("Parse error:\n{parse_err:?}\n\n"));
|
||||||
|
|
||||||
lines.push_str(&format!("Evaluation output:\n{}", bufstr));
|
lines.push_str(&format!("Evaluation output:\n{bufstr}"));
|
||||||
|
|
||||||
lines
|
lines
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
//! Statistics emitted by Nix when NIX_SHOW_STATS=1
|
//! Statistics emitted by Nix when `NIX_SHOW_STATS=1`
|
||||||
use separator::Separatable;
|
use separator::Separatable;
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
@ -102,23 +102,14 @@ pub struct EvaluationStatsDiff<'a> {
|
||||||
right: &'a EvaluationStats,
|
right: &'a EvaluationStats,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> EvaluationStatsDiff<'a> {
|
struct Row {
|
||||||
pub fn compare(
|
|
||||||
left: &'a EvaluationStats,
|
|
||||||
right: &'a EvaluationStats,
|
|
||||||
) -> EvaluationStatsDiff<'a> {
|
|
||||||
EvaluationStatsDiff { left, right }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn markdown(&self) -> String {
|
|
||||||
struct Row {
|
|
||||||
before: String,
|
before: String,
|
||||||
after: String,
|
after: String,
|
||||||
diff: String,
|
diff: String,
|
||||||
diff_pct: String,
|
diff_pct: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Row {
|
impl Row {
|
||||||
fn from_u64(left: u64, right: u64) -> Row {
|
fn from_u64(left: u64, right: u64) -> Row {
|
||||||
let (diff, direction): (u64, _) = match left.cmp(&right) {
|
let (diff, direction): (u64, _) = match left.cmp(&right) {
|
||||||
std::cmp::Ordering::Greater => (left - right, "↘ "),
|
std::cmp::Ordering::Greater => (left - right, "↘ "),
|
||||||
|
@ -126,13 +117,14 @@ impl<'a> EvaluationStatsDiff<'a> {
|
||||||
std::cmp::Ordering::Equal => (0, ""),
|
std::cmp::Ordering::Equal => (0, ""),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
#[allow(clippy::cast_precision_loss)]
|
||||||
let diff_pct: String = if diff > 0 {
|
let diff_pct: String = if diff > 0 {
|
||||||
format!(
|
format!(
|
||||||
"{:.2}%",
|
"{:.2}%",
|
||||||
((right as f64) - (left as f64)) / (left as f64) * 100.0
|
((right as f64) - (left as f64)) / (left as f64) * 100.0
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
String::from("")
|
String::new()
|
||||||
};
|
};
|
||||||
|
|
||||||
Row {
|
Row {
|
||||||
|
@ -150,27 +142,39 @@ impl<'a> EvaluationStatsDiff<'a> {
|
||||||
{
|
{
|
||||||
std::cmp::Ordering::Greater => (left - right, "↘ "),
|
std::cmp::Ordering::Greater => (left - right, "↘ "),
|
||||||
std::cmp::Ordering::Less => (right - left, "↗ "),
|
std::cmp::Ordering::Less => (right - left, "↗ "),
|
||||||
std::cmp::Ordering::Equal => (0 as f32, ""),
|
std::cmp::Ordering::Equal => (0.0, ""),
|
||||||
};
|
};
|
||||||
|
|
||||||
let diff_pct: String = if diff > 0 as _ {
|
let diff_pct: String = if diff > 0.0 {
|
||||||
format!(
|
format!(
|
||||||
"{:.2}%",
|
"{:.2}%",
|
||||||
((right as f64) - (left as f64)) / (left as f64) * 100.0
|
(f64::from(right) - f64::from(left)) / f64::from(left) * 100.0
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
String::from("")
|
String::new()
|
||||||
};
|
};
|
||||||
|
|
||||||
Row {
|
Row {
|
||||||
before: format!("{:.2}", left),
|
before: format!("{left:.2}"),
|
||||||
after: format!("{:.2}", right),
|
after: format!("{right:.2}"),
|
||||||
diff: format!("{}{:.2}", direction, diff),
|
diff: format!("{direction}{diff:.2}"),
|
||||||
diff_pct,
|
diff_pct,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> EvaluationStatsDiff<'a> {
|
||||||
|
#[must_use]
|
||||||
|
pub fn compare(
|
||||||
|
left: &'a EvaluationStats,
|
||||||
|
right: &'a EvaluationStats,
|
||||||
|
) -> EvaluationStatsDiff<'a> {
|
||||||
|
EvaluationStatsDiff { left, right }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
#[allow(clippy::too_many_lines)]
|
||||||
|
pub fn markdown(&self) -> String {
|
||||||
let mut data: HashMap<&str, Row> = HashMap::new();
|
let mut data: HashMap<&str, Row> = HashMap::new();
|
||||||
data.insert(
|
data.insert(
|
||||||
"cpuTime",
|
"cpuTime",
|
||||||
|
@ -310,7 +314,7 @@ impl<'a> EvaluationStatsDiff<'a> {
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut keys = data.keys().cloned().collect::<Vec<&str>>();
|
let mut keys = data.keys().copied().collect::<Vec<&str>>();
|
||||||
keys.sort_unstable();
|
keys.sort_unstable();
|
||||||
|
|
||||||
let rows = keys
|
let rows = keys
|
||||||
|
@ -442,53 +446,54 @@ mod tests {
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[allow(clippy::float_cmp)] // FIXME
|
||||||
fn verify_load() {
|
fn verify_load() {
|
||||||
let load: EvaluationStats = serde_json::from_str(EXAMPLE).unwrap();
|
let load: EvaluationStats = serde_json::from_str(EXAMPLE).unwrap();
|
||||||
|
|
||||||
assert_eq!(load.cpu_time, 135.2);
|
assert_eq!(load.cpu_time, 135.2);
|
||||||
assert_eq!(load.envs.number, 130714125);
|
assert_eq!(load.envs.number, 130_714_125);
|
||||||
assert_eq!(load.envs.elements, 183953876);
|
assert_eq!(load.envs.elements, 183_953_876);
|
||||||
assert_eq!(load.envs.bytes, 3563057008);
|
assert_eq!(load.envs.bytes, 3_563_057_008);
|
||||||
|
|
||||||
assert_eq!(load.list.elements, 207421516);
|
assert_eq!(load.list.elements, 207_421_516);
|
||||||
assert_eq!(load.list.bytes, 1659372128);
|
assert_eq!(load.list.bytes, 1_659_372_128);
|
||||||
assert_eq!(load.list.concats, 7194150);
|
assert_eq!(load.list.concats, 7_194_150);
|
||||||
|
|
||||||
assert_eq!(load.values.number, 260454370);
|
assert_eq!(load.values.number, 260_454_370);
|
||||||
assert_eq!(load.values.bytes, 6250904880);
|
assert_eq!(load.values.bytes, 6_250_904_880);
|
||||||
|
|
||||||
assert_eq!(load.symbols.number, 372918);
|
assert_eq!(load.symbols.number, 372_918);
|
||||||
assert_eq!(load.symbols.bytes, 16324262);
|
assert_eq!(load.symbols.bytes, 16_324_262);
|
||||||
|
|
||||||
assert_eq!(load.sets.number, 27310541);
|
assert_eq!(load.sets.number, 27_310_541);
|
||||||
assert_eq!(load.sets.bytes, 7134676648);
|
assert_eq!(load.sets.bytes, 7_134_676_648);
|
||||||
assert_eq!(load.sets.elements, 288174680);
|
assert_eq!(load.sets.elements, 288_174_680);
|
||||||
|
|
||||||
assert_eq!(load.sizes.env, 16);
|
assert_eq!(load.sizes.env, 16);
|
||||||
assert_eq!(load.sizes.value, 24);
|
assert_eq!(load.sizes.value, 24);
|
||||||
assert_eq!(load.sizes.bindings, 8);
|
assert_eq!(load.sizes.bindings, 8);
|
||||||
assert_eq!(load.sizes.attr, 24);
|
assert_eq!(load.sizes.attr, 24);
|
||||||
|
|
||||||
assert_eq!(load.nr_op_updates, 11883339);
|
assert_eq!(load.nr_op_updates, 11_883_339);
|
||||||
assert_eq!(load.nr_op_update_values_copied, 208834564);
|
assert_eq!(load.nr_op_update_values_copied, 208_834_564);
|
||||||
assert_eq!(load.nr_thunks, 173325665);
|
assert_eq!(load.nr_thunks, 173_325_665);
|
||||||
assert_eq!(load.nr_avoided, 177840681);
|
assert_eq!(load.nr_avoided, 177_840_681);
|
||||||
assert_eq!(load.nr_lookups, 75292052);
|
assert_eq!(load.nr_lookups, 75_292_052);
|
||||||
assert_eq!(load.nr_prim_op_calls, 85571252);
|
assert_eq!(load.nr_prim_op_calls, 85_571_252);
|
||||||
assert_eq!(load.nr_function_calls, 115193164);
|
assert_eq!(load.nr_function_calls, 115_193_164);
|
||||||
|
|
||||||
assert_eq!(load.gc.heap_size, 12104687616);
|
assert_eq!(load.gc.heap_size, 12_104_687_616);
|
||||||
assert_eq!(load.gc.total_bytes, 24191819392);
|
assert_eq!(load.gc.total_bytes, 24_191_819_392);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn diff_text(left: &str, right: &str) {
|
fn diff_text(left: &str, right: &str) {
|
||||||
println!("left:\n{}", left);
|
println!("left:\n{left}");
|
||||||
println!("right:\n{}", right);
|
println!("right:\n{right}");
|
||||||
|
|
||||||
let lines = left.split('\n').zip(right.split('\n'));
|
let lines = left.split('\n').zip(right.split('\n'));
|
||||||
|
|
||||||
for (idx, (linea, lineb)) in lines.enumerate() {
|
for (idx, (linea, lineb)) in lines.enumerate() {
|
||||||
assert_eq!(linea, lineb, "Line {}", idx);
|
assert_eq!(linea, lineb, "Line {idx}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -499,7 +504,7 @@ mod tests {
|
||||||
|
|
||||||
diff_text(
|
diff_text(
|
||||||
&EvaluationStatsDiff::compare(&left, &right).markdown(),
|
&EvaluationStatsDiff::compare(&left, &right).markdown(),
|
||||||
r#"
|
r"
|
||||||
| stat | before | after | Δ | Δ% |
|
| stat | before | after | Δ | Δ% |
|
||||||
|:---------------------------|---------------:|---------------:|:--------------|-------:|
|
|:---------------------------|---------------:|---------------:|:--------------|-------:|
|
||||||
| **cpuTime** | 135.20 | 132.90 | ↘ 2.30 | -1.70% |
|
| **cpuTime** | 135.20 | 132.90 | ↘ 2.30 | -1.70% |
|
||||||
|
@ -529,7 +534,7 @@ mod tests {
|
||||||
| **symbols-number** | 372,918 | 372,917 | ↘ 1 | -0.00% |
|
| **symbols-number** | 372,918 | 372,917 | ↘ 1 | -0.00% |
|
||||||
| **values-bytes** | 6,250,904,880 | 5,869,027,296 | ↘ 381,877,584 | -6.11% |
|
| **values-bytes** | 6,250,904,880 | 5,869,027,296 | ↘ 381,877,584 | -6.11% |
|
||||||
| **values-number** | 260,454,370 | 244,542,804 | ↘ 15,911,566 | -6.11% |
|
| **values-number** | 260,454,370 | 244,542,804 | ↘ 15,911,566 | -6.11% |
|
||||||
"#
|
"
|
||||||
.trim_start(),
|
.trim_start(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,11 +1,13 @@
|
||||||
use crate::worker::Action;
|
use crate::worker::Action;
|
||||||
|
use async_trait::async_trait;
|
||||||
|
|
||||||
pub trait SimpleNotifyWorker {
|
#[async_trait]
|
||||||
type J;
|
pub trait SimpleNotifyWorker: Send + Sync {
|
||||||
|
type J: Send + Sync;
|
||||||
|
|
||||||
fn consumer(&self, job: &Self::J, notifier: &mut dyn NotificationReceiver);
|
async fn consumer(&self, job: &Self::J, notifier: &mut dyn NotificationReceiver);
|
||||||
|
|
||||||
fn msg_to_job(
|
async fn msg_to_job(
|
||||||
&self,
|
&self,
|
||||||
routing_key: &str,
|
routing_key: &str,
|
||||||
content_type: &Option<String>,
|
content_type: &Option<String>,
|
||||||
|
@ -13,8 +15,9 @@ pub trait SimpleNotifyWorker {
|
||||||
) -> Result<Self::J, String>;
|
) -> Result<Self::J, String>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait NotificationReceiver {
|
#[async_trait]
|
||||||
fn tell(&mut self, action: Action);
|
pub trait NotificationReceiver: Send + Sync {
|
||||||
|
async fn tell(&mut self, action: Action);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
|
@ -23,13 +26,15 @@ pub struct DummyNotificationReceiver {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DummyNotificationReceiver {
|
impl DummyNotificationReceiver {
|
||||||
|
#[must_use]
|
||||||
pub fn new() -> DummyNotificationReceiver {
|
pub fn new() -> DummyNotificationReceiver {
|
||||||
Default::default()
|
DummyNotificationReceiver::default()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl NotificationReceiver for DummyNotificationReceiver {
|
impl NotificationReceiver for DummyNotificationReceiver {
|
||||||
fn tell(&mut self, action: Action) {
|
async fn tell(&mut self, action: Action) {
|
||||||
self.actions.push(action);
|
self.actions.push(action);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,6 +15,7 @@ pub struct OutPathDiff {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl OutPathDiff {
|
impl OutPathDiff {
|
||||||
|
#[must_use]
|
||||||
pub fn new(nix: nix::Nix, path: PathBuf) -> OutPathDiff {
|
pub fn new(nix: nix::Nix, path: PathBuf) -> OutPathDiff {
|
||||||
OutPathDiff {
|
OutPathDiff {
|
||||||
calculator: HydraNixEnv::new(nix, path, false),
|
calculator: HydraNixEnv::new(nix, path, false),
|
||||||
|
@ -23,21 +24,22 @@ impl OutPathDiff {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find_before(&mut self) -> Result<(), NixEnvError> {
|
pub async fn find_before(&mut self) -> Result<(), NixEnvError> {
|
||||||
self.original = Some(self.run()?);
|
self.original = Some(self.run().await?);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find_after(&mut self) -> Result<(), NixEnvError> {
|
pub async fn find_after(&mut self) -> Result<(), NixEnvError> {
|
||||||
if self.original.is_none() {
|
if self.original.is_none() {
|
||||||
debug!("Before is None, not bothering with After");
|
debug!("Before is None, not bothering with After");
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
self.current = Some(self.run()?);
|
self.current = Some(self.run().await?);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn performance_diff(&self) -> Option<EvaluationStatsDiff> {
|
pub fn performance_diff(&self) -> Option<EvaluationStatsDiff> {
|
||||||
if let Some((_, ref cur)) = self.current {
|
if let Some((_, ref cur)) = self.current {
|
||||||
if let Some((_, ref orig)) = self.original {
|
if let Some((_, ref orig)) = self.original {
|
||||||
|
@ -50,6 +52,7 @@ impl OutPathDiff {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn package_diff(&self) -> Option<(Vec<PackageArch>, Vec<PackageArch>)> {
|
pub fn package_diff(&self) -> Option<(Vec<PackageArch>, Vec<PackageArch>)> {
|
||||||
if let Some((ref cur, _)) = self.current {
|
if let Some((ref cur, _)) = self.current {
|
||||||
if let Some((ref orig, _)) = self.original {
|
if let Some((ref orig, _)) = self.original {
|
||||||
|
@ -80,11 +83,11 @@ impl OutPathDiff {
|
||||||
if let Some((ref orig, _)) = self.original {
|
if let Some((ref orig, _)) = self.original {
|
||||||
for key in cur.keys() {
|
for key in cur.keys() {
|
||||||
trace!("Checking out {:?}", key);
|
trace!("Checking out {:?}", key);
|
||||||
if cur.get(key) != orig.get(key) {
|
if cur.get(key) == orig.get(key) {
|
||||||
trace!(" {:?} != {:?}", cur.get(key), orig.get(key));
|
|
||||||
rebuild.push(key.clone())
|
|
||||||
} else {
|
|
||||||
trace!(" {:?} == {:?}", cur.get(key), orig.get(key));
|
trace!(" {:?} == {:?}", cur.get(key), orig.get(key));
|
||||||
|
} else {
|
||||||
|
trace!(" {:?} != {:?}", cur.get(key), orig.get(key));
|
||||||
|
rebuild.push(key.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -95,8 +98,8 @@ impl OutPathDiff {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run(&mut self) -> Result<(PackageOutPaths, EvaluationStats), NixEnvError> {
|
async fn run(&mut self) -> Result<(PackageOutPaths, EvaluationStats), NixEnvError> {
|
||||||
self.calculator.execute_with_stats()
|
self.calculator.execute_with_stats().await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,16 +1,15 @@
|
||||||
use async_std::task;
|
use async_trait::async_trait;
|
||||||
use lapin::options::BasicPublishOptions;
|
use lapin::options::BasicPublishOptions;
|
||||||
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/events.rs"));
|
#[allow(clippy::all, clippy::pedantic)]
|
||||||
|
mod events {
|
||||||
#[macro_use]
|
include!(concat!(env!("OUT_DIR"), "/events.rs"));
|
||||||
mod macros {
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! my_macro(() => (FooBar));
|
|
||||||
}
|
}
|
||||||
|
pub use events::*;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
pub trait SysEvents: Send {
|
pub trait SysEvents: Send {
|
||||||
fn notify(&mut self, event: Event);
|
async fn notify(&mut self, event: Event);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
@ -25,6 +24,7 @@ pub struct RabbitMq<C> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RabbitMq<lapin::Channel> {
|
impl RabbitMq<lapin::Channel> {
|
||||||
|
#[must_use]
|
||||||
pub fn from_lapin(identity: &str, channel: lapin::Channel) -> Self {
|
pub fn from_lapin(identity: &str, channel: lapin::Channel) -> Self {
|
||||||
RabbitMq {
|
RabbitMq {
|
||||||
identity: identity.to_owned(),
|
identity: identity.to_owned(),
|
||||||
|
@ -33,10 +33,10 @@ impl RabbitMq<lapin::Channel> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl SysEvents for RabbitMq<lapin::Channel> {
|
impl SysEvents for RabbitMq<lapin::Channel> {
|
||||||
fn notify(&mut self, event: Event) {
|
async fn notify(&mut self, event: Event) {
|
||||||
let props = lapin::BasicProperties::default().with_content_type("application/json".into());
|
let props = lapin::BasicProperties::default().with_content_type("application/json".into());
|
||||||
task::block_on(async {
|
|
||||||
let _confirmaton = self
|
let _confirmaton = self
|
||||||
.channel
|
.channel
|
||||||
.basic_publish(
|
.basic_publish(
|
||||||
|
@ -55,6 +55,5 @@ impl SysEvents for RabbitMq<lapin::Channel> {
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,16 +18,16 @@ impl std::fmt::Display for System {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl System {
|
impl System {
|
||||||
|
#[must_use]
|
||||||
pub fn as_build_destination(&self) -> (Option<String>, Option<String>) {
|
pub fn as_build_destination(&self) -> (Option<String>, Option<String>) {
|
||||||
(None, Some(format!("build-inputs-{}", self)))
|
(None, Some(format!("build-inputs-{self}")))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn can_run_nixos_tests(&self) -> bool {
|
pub fn can_run_nixos_tests(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
System::X8664Linux => true,
|
System::X8664Linux | System::Aarch64Linux => true,
|
||||||
System::Aarch64Linux => true,
|
System::X8664Darwin | System::Aarch64Darwin => false,
|
||||||
System::X8664Darwin => false,
|
|
||||||
System::Aarch64Darwin => false,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,8 +25,9 @@ impl Default for StdenvTagger {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl StdenvTagger {
|
impl StdenvTagger {
|
||||||
|
#[must_use]
|
||||||
pub fn new() -> StdenvTagger {
|
pub fn new() -> StdenvTagger {
|
||||||
Default::default()
|
StdenvTagger::default()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn changed(&mut self, systems: Vec<tasks::eval::stdenvs::System>) {
|
pub fn changed(&mut self, systems: Vec<tasks::eval::stdenvs::System>) {
|
||||||
|
@ -42,19 +43,20 @@ impl StdenvTagger {
|
||||||
}
|
}
|
||||||
|
|
||||||
for tag in &self.selected {
|
for tag in &self.selected {
|
||||||
if !self.possible.contains(tag) {
|
assert!(
|
||||||
panic!(
|
self.possible.contains(tag),
|
||||||
"Tried to add label {} but it isn't in the possible list!",
|
"Tried to add label {} but it isn't in the possible list!",
|
||||||
tag
|
tag
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn tags_to_add(&self) -> Vec<String> {
|
pub fn tags_to_add(&self) -> Vec<String> {
|
||||||
self.selected.clone()
|
self.selected.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn tags_to_remove(&self) -> Vec<String> {
|
pub fn tags_to_remove(&self) -> Vec<String> {
|
||||||
let mut remove = self.possible.clone();
|
let mut remove = self.possible.clone();
|
||||||
for tag in &self.selected {
|
for tag in &self.selected {
|
||||||
|
@ -87,8 +89,9 @@ impl Default for PkgsAddedRemovedTagger {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PkgsAddedRemovedTagger {
|
impl PkgsAddedRemovedTagger {
|
||||||
|
#[must_use]
|
||||||
pub fn new() -> PkgsAddedRemovedTagger {
|
pub fn new() -> PkgsAddedRemovedTagger {
|
||||||
Default::default()
|
PkgsAddedRemovedTagger::default()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn changed(&mut self, removed: &[PackageArch], added: &[PackageArch]) {
|
pub fn changed(&mut self, removed: &[PackageArch], added: &[PackageArch]) {
|
||||||
|
@ -101,10 +104,12 @@ impl PkgsAddedRemovedTagger {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn tags_to_add(&self) -> Vec<String> {
|
pub fn tags_to_add(&self) -> Vec<String> {
|
||||||
self.selected.clone()
|
self.selected.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn tags_to_remove(&self) -> Vec<String> {
|
pub fn tags_to_remove(&self) -> Vec<String> {
|
||||||
// The cleanup tag is too vague to automatically remove.
|
// The cleanup tag is too vague to automatically remove.
|
||||||
vec![]
|
vec![]
|
||||||
|
@ -147,8 +152,9 @@ impl Default for RebuildTagger {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RebuildTagger {
|
impl RebuildTagger {
|
||||||
|
#[must_use]
|
||||||
pub fn new() -> RebuildTagger {
|
pub fn new() -> RebuildTagger {
|
||||||
Default::default()
|
RebuildTagger::default()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_attrs(&mut self, attrs: Vec<PackageArch>) {
|
pub fn parse_attrs(&mut self, attrs: Vec<PackageArch>) {
|
||||||
|
@ -160,12 +166,10 @@ impl RebuildTagger {
|
||||||
"x86_64-darwin" => {
|
"x86_64-darwin" => {
|
||||||
counter_darwin += 1;
|
counter_darwin += 1;
|
||||||
}
|
}
|
||||||
"aarch64-darwin" => {}
|
|
||||||
"x86_64-linux" => {
|
"x86_64-linux" => {
|
||||||
counter_linux += 1;
|
counter_linux += 1;
|
||||||
}
|
}
|
||||||
"aarch64-linux" => {}
|
"aarch64-darwin" | "aarch64-linux" | "i686-linux" => {}
|
||||||
"i686-linux" => {}
|
|
||||||
arch => {
|
arch => {
|
||||||
info!("Unknown arch: {:?}", arch);
|
info!("Unknown arch: {:?}", arch);
|
||||||
}
|
}
|
||||||
|
@ -176,35 +180,36 @@ impl RebuildTagger {
|
||||||
self.selected.extend(
|
self.selected.extend(
|
||||||
RebuildTagger::bucket(counter_darwin)
|
RebuildTagger::bucket(counter_darwin)
|
||||||
.iter()
|
.iter()
|
||||||
.map(|bucket| format!("10.rebuild-darwin: {}", bucket))
|
.map(|bucket| format!("10.rebuild-darwin: {bucket}"))
|
||||||
.collect::<Vec<String>>(),
|
.collect::<Vec<String>>(),
|
||||||
);
|
);
|
||||||
|
|
||||||
self.selected.extend(
|
self.selected.extend(
|
||||||
RebuildTagger::bucket(counter_linux)
|
RebuildTagger::bucket(counter_linux)
|
||||||
.iter()
|
.iter()
|
||||||
.map(|bucket| format!("10.rebuild-linux: {}", bucket))
|
.map(|bucket| format!("10.rebuild-linux: {bucket}"))
|
||||||
.collect::<Vec<String>>(),
|
.collect::<Vec<String>>(),
|
||||||
);
|
);
|
||||||
|
|
||||||
for tag in &self.selected {
|
for tag in &self.selected {
|
||||||
if !self.possible.contains(tag) {
|
assert!(
|
||||||
panic!(
|
self.possible.contains(tag),
|
||||||
"Tried to add label {} but it isn't in the possible list!",
|
"Tried to add label {} but it isn't in the possible list!",
|
||||||
tag
|
tag
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn tags_to_add(&self) -> Vec<String> {
|
pub fn tags_to_add(&self) -> Vec<String> {
|
||||||
self.selected.clone()
|
self.selected.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn tags_to_remove(&self) -> Vec<String> {
|
pub fn tags_to_remove(&self) -> Vec<String> {
|
||||||
let mut remove = vec![];
|
let mut remove = vec![];
|
||||||
|
|
||||||
for tag in self.possible.clone().into_iter() {
|
for tag in self.possible.clone() {
|
||||||
if !self.selected.contains(&tag) {
|
if !self.selected.contains(&tag) {
|
||||||
remove.push(tag);
|
remove.push(tag);
|
||||||
}
|
}
|
||||||
|
@ -254,8 +259,9 @@ impl Default for MaintainerPrTagger {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MaintainerPrTagger {
|
impl MaintainerPrTagger {
|
||||||
|
#[must_use]
|
||||||
pub fn new() -> MaintainerPrTagger {
|
pub fn new() -> MaintainerPrTagger {
|
||||||
Default::default()
|
MaintainerPrTagger::default()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn record_maintainer(
|
pub fn record_maintainer(
|
||||||
|
@ -270,7 +276,7 @@ impl MaintainerPrTagger {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (_package, maintainers) in identified_maintainers.0.iter() {
|
for maintainers in identified_maintainers.0.values() {
|
||||||
if !maintainers.contains(&submitter) {
|
if !maintainers.contains(&submitter) {
|
||||||
// One of the packages is not maintained by this submitter
|
// One of the packages is not maintained by this submitter
|
||||||
return;
|
return;
|
||||||
|
@ -281,10 +287,12 @@ impl MaintainerPrTagger {
|
||||||
.push(String::from("11.by: package-maintainer"));
|
.push(String::from("11.by: package-maintainer"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn tags_to_add(&self) -> Vec<String> {
|
pub fn tags_to_add(&self) -> Vec<String> {
|
||||||
self.selected.clone()
|
self.selected.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn tags_to_remove(&self) -> Vec<String> {
|
pub fn tags_to_remove(&self) -> Vec<String> {
|
||||||
// The cleanup tag is too vague to automatically remove.
|
// The cleanup tag is too vague to automatically remove.
|
||||||
vec![]
|
vec![]
|
||||||
|
@ -314,14 +322,12 @@ mod tests {
|
||||||
impl From<PackageArchSrc> for Vec<PackageArch> {
|
impl From<PackageArchSrc> for Vec<PackageArch> {
|
||||||
fn from(src: PackageArchSrc) -> Vec<PackageArch> {
|
fn from(src: PackageArchSrc) -> Vec<PackageArch> {
|
||||||
let darwin: Vec<PackageArch> = (0..src.darwin)
|
let darwin: Vec<PackageArch> = (0..src.darwin)
|
||||||
.into_iter()
|
|
||||||
.map(|_| PackageArch {
|
.map(|_| PackageArch {
|
||||||
package: String::from("bogus :)"),
|
package: String::from("bogus :)"),
|
||||||
architecture: String::from("x86_64-darwin"),
|
architecture: String::from("x86_64-darwin"),
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
let linux: Vec<PackageArch> = (0..src.linux)
|
let linux: Vec<PackageArch> = (0..src.linux)
|
||||||
.into_iter()
|
|
||||||
.map(|_| PackageArch {
|
.map(|_| PackageArch {
|
||||||
package: String::from("bogus :)"),
|
package: String::from("bogus :)"),
|
||||||
architecture: String::from("x86_64-linux"),
|
architecture: String::from("x86_64-linux"),
|
||||||
|
@ -335,6 +341,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[allow(clippy::too_many_lines)]
|
||||||
pub fn test_packages_changed() {
|
pub fn test_packages_changed() {
|
||||||
let mut tagger = RebuildTagger::new();
|
let mut tagger = RebuildTagger::new();
|
||||||
tagger.parse_attrs(PackageArchSrc::linux(0).and_darwin(0).into());
|
tagger.parse_attrs(PackageArchSrc::linux(0).and_darwin(0).into());
|
||||||
|
|
|
@ -8,6 +8,9 @@ use crate::worker;
|
||||||
|
|
||||||
use std::collections::VecDeque;
|
use std::collections::VecDeque;
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use tokio::io::AsyncBufReadExt;
|
||||||
|
use tokio::io::BufReader;
|
||||||
use tracing::{debug, debug_span, error, info};
|
use tracing::{debug, debug_span, error, info};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
@ -19,6 +22,7 @@ pub struct BuildWorker {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BuildWorker {
|
impl BuildWorker {
|
||||||
|
#[must_use]
|
||||||
pub fn new(
|
pub fn new(
|
||||||
cloner: checkout::CachedCloner,
|
cloner: checkout::CachedCloner,
|
||||||
nix: nix::Nix,
|
nix: nix::Nix,
|
||||||
|
@ -47,7 +51,7 @@ pub struct JobActions<'a, 'b> {
|
||||||
identity: String,
|
identity: String,
|
||||||
receiver: &'a mut dyn notifyworker::NotificationReceiver,
|
receiver: &'a mut dyn notifyworker::NotificationReceiver,
|
||||||
job: &'b buildjob::BuildJob,
|
job: &'b buildjob::BuildJob,
|
||||||
line_counter: u64,
|
line_counter: usize,
|
||||||
snippet_log: VecDeque<String>,
|
snippet_log: VecDeque<String>,
|
||||||
attempt_id: String,
|
attempt_id: String,
|
||||||
log_exchange: Option<String>,
|
log_exchange: Option<String>,
|
||||||
|
@ -88,27 +92,28 @@ impl<'a, 'b> JobActions<'a, 'b> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn log_snippet(&self) -> Vec<String> {
|
pub fn log_snippet(&self) -> Vec<String> {
|
||||||
self.snippet_log.clone().into()
|
self.snippet_log.clone().into()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pr_head_missing(&mut self) {
|
pub async fn pr_head_missing(&mut self) {
|
||||||
self.tell(worker::Action::Ack);
|
self.tell(worker::Action::Ack).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn commit_missing(&mut self) {
|
pub async fn commit_missing(&mut self) {
|
||||||
self.tell(worker::Action::Ack);
|
self.tell(worker::Action::Ack).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn nothing_to_do(&mut self) {
|
pub async fn nothing_to_do(&mut self) {
|
||||||
self.tell(worker::Action::Ack);
|
self.tell(worker::Action::Ack).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn merge_failed(&mut self) {
|
pub async fn merge_failed(&mut self) {
|
||||||
let msg = BuildResult::V1 {
|
let msg = BuildResult::V1 {
|
||||||
tag: V1Tag::V1,
|
tag: V1Tag::V1,
|
||||||
repo: self.job.repo.clone(),
|
repo: self.job.repo.clone(),
|
||||||
pr: self.job.pr.clone(),
|
pr: self.job.change.clone(),
|
||||||
system: self.system.clone(),
|
system: self.system.clone(),
|
||||||
output: vec![String::from("Merge failed")],
|
output: vec![String::from("Merge failed")],
|
||||||
attempt_id: self.attempt_id.clone(),
|
attempt_id: self.attempt_id.clone(),
|
||||||
|
@ -122,14 +127,15 @@ impl<'a, 'b> JobActions<'a, 'b> {
|
||||||
let result_routing_key = self.result_routing_key.clone();
|
let result_routing_key = self.result_routing_key.clone();
|
||||||
|
|
||||||
self.tell(worker::publish_serde_action(
|
self.tell(worker::publish_serde_action(
|
||||||
result_exchange,
|
&result_exchange,
|
||||||
result_routing_key,
|
&result_routing_key,
|
||||||
&msg,
|
&msg,
|
||||||
));
|
))
|
||||||
self.tell(worker::Action::Ack);
|
.await;
|
||||||
|
self.tell(worker::Action::Ack).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn log_started(&mut self, can_build: Vec<String>, cannot_build: Vec<String>) {
|
pub async fn log_started(&mut self, can_build: Vec<String>, cannot_build: Vec<String>) {
|
||||||
let msg = buildlogmsg::BuildLogStart {
|
let msg = buildlogmsg::BuildLogStart {
|
||||||
identity: self.identity.clone(),
|
identity: self.identity.clone(),
|
||||||
system: self.system.clone(),
|
system: self.system.clone(),
|
||||||
|
@ -142,24 +148,26 @@ impl<'a, 'b> JobActions<'a, 'b> {
|
||||||
let log_routing_key = self.log_routing_key.clone();
|
let log_routing_key = self.log_routing_key.clone();
|
||||||
|
|
||||||
self.tell(worker::publish_serde_action(
|
self.tell(worker::publish_serde_action(
|
||||||
log_exchange,
|
&log_exchange,
|
||||||
log_routing_key,
|
&log_routing_key,
|
||||||
&msg,
|
&msg,
|
||||||
));
|
))
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn log_instantiation_errors(&mut self, cannot_build: Vec<(String, Vec<String>)>) {
|
pub async fn log_instantiation_errors(&mut self, cannot_build: Vec<(String, Vec<String>)>) {
|
||||||
for (attr, log) in cannot_build {
|
for (attr, log) in cannot_build {
|
||||||
self.log_line(&format!("Cannot nix-instantiate `{}' because:", &attr));
|
self.log_line(&format!("Cannot nix-instantiate `{}' because:", &attr))
|
||||||
|
.await;
|
||||||
|
|
||||||
for line in log {
|
for line in log {
|
||||||
self.log_line(&line);
|
self.log_line(&line).await;
|
||||||
}
|
}
|
||||||
self.log_line("");
|
self.log_line("").await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn log_line(&mut self, line: &str) {
|
pub async fn log_line(&mut self, line: &str) {
|
||||||
self.line_counter += 1;
|
self.line_counter += 1;
|
||||||
|
|
||||||
if self.snippet_log.len() >= 10 {
|
if self.snippet_log.len() >= 10 {
|
||||||
|
@ -179,17 +187,18 @@ impl<'a, 'b> JobActions<'a, 'b> {
|
||||||
let log_routing_key = self.log_routing_key.clone();
|
let log_routing_key = self.log_routing_key.clone();
|
||||||
|
|
||||||
self.tell(worker::publish_serde_action(
|
self.tell(worker::publish_serde_action(
|
||||||
log_exchange,
|
&log_exchange,
|
||||||
log_routing_key,
|
&log_routing_key,
|
||||||
&msg,
|
&msg,
|
||||||
));
|
))
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build_not_attempted(&mut self, not_attempted_attrs: Vec<String>) {
|
pub async fn build_not_attempted(&mut self, not_attempted_attrs: Vec<String>) {
|
||||||
let msg = BuildResult::V1 {
|
let msg = BuildResult::V1 {
|
||||||
tag: V1Tag::V1,
|
tag: V1Tag::V1,
|
||||||
repo: self.job.repo.clone(),
|
repo: self.job.repo.clone(),
|
||||||
pr: self.job.pr.clone(),
|
pr: self.job.change.clone(),
|
||||||
system: self.system.clone(),
|
system: self.system.clone(),
|
||||||
output: self.log_snippet(),
|
output: self.log_snippet(),
|
||||||
attempt_id: self.attempt_id.clone(),
|
attempt_id: self.attempt_id.clone(),
|
||||||
|
@ -202,23 +211,25 @@ impl<'a, 'b> JobActions<'a, 'b> {
|
||||||
let result_exchange = self.result_exchange.clone();
|
let result_exchange = self.result_exchange.clone();
|
||||||
let result_routing_key = self.result_routing_key.clone();
|
let result_routing_key = self.result_routing_key.clone();
|
||||||
self.tell(worker::publish_serde_action(
|
self.tell(worker::publish_serde_action(
|
||||||
result_exchange,
|
&result_exchange,
|
||||||
result_routing_key,
|
&result_routing_key,
|
||||||
&msg,
|
&msg,
|
||||||
));
|
))
|
||||||
|
.await;
|
||||||
|
|
||||||
let log_exchange = self.log_exchange.clone();
|
let log_exchange = self.log_exchange.clone();
|
||||||
let log_routing_key = self.log_routing_key.clone();
|
let log_routing_key = self.log_routing_key.clone();
|
||||||
self.tell(worker::publish_serde_action(
|
self.tell(worker::publish_serde_action(
|
||||||
log_exchange,
|
&log_exchange,
|
||||||
log_routing_key,
|
&log_routing_key,
|
||||||
&msg,
|
&msg,
|
||||||
));
|
))
|
||||||
|
.await;
|
||||||
|
|
||||||
self.tell(worker::Action::Ack);
|
self.tell(worker::Action::Ack).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build_finished(
|
pub async fn build_finished(
|
||||||
&mut self,
|
&mut self,
|
||||||
status: BuildStatus,
|
status: BuildStatus,
|
||||||
attempted_attrs: Vec<String>,
|
attempted_attrs: Vec<String>,
|
||||||
|
@ -227,7 +238,7 @@ impl<'a, 'b> JobActions<'a, 'b> {
|
||||||
let msg = BuildResult::V1 {
|
let msg = BuildResult::V1 {
|
||||||
tag: V1Tag::V1,
|
tag: V1Tag::V1,
|
||||||
repo: self.job.repo.clone(),
|
repo: self.job.repo.clone(),
|
||||||
pr: self.job.pr.clone(),
|
pr: self.job.change.clone(),
|
||||||
system: self.system.clone(),
|
system: self.system.clone(),
|
||||||
output: self.log_snippet(),
|
output: self.log_snippet(),
|
||||||
attempt_id: self.attempt_id.clone(),
|
attempt_id: self.attempt_id.clone(),
|
||||||
|
@ -240,31 +251,39 @@ impl<'a, 'b> JobActions<'a, 'b> {
|
||||||
let result_exchange = self.result_exchange.clone();
|
let result_exchange = self.result_exchange.clone();
|
||||||
let result_routing_key = self.result_routing_key.clone();
|
let result_routing_key = self.result_routing_key.clone();
|
||||||
self.tell(worker::publish_serde_action(
|
self.tell(worker::publish_serde_action(
|
||||||
result_exchange,
|
&result_exchange,
|
||||||
result_routing_key,
|
&result_routing_key,
|
||||||
&msg,
|
&msg,
|
||||||
));
|
))
|
||||||
|
.await;
|
||||||
|
|
||||||
let log_exchange = self.log_exchange.clone();
|
let log_exchange = self.log_exchange.clone();
|
||||||
let log_routing_key = self.log_routing_key.clone();
|
let log_routing_key = self.log_routing_key.clone();
|
||||||
self.tell(worker::publish_serde_action(
|
self.tell(worker::publish_serde_action(
|
||||||
log_exchange,
|
&log_exchange,
|
||||||
log_routing_key,
|
&log_routing_key,
|
||||||
&msg,
|
&msg,
|
||||||
));
|
))
|
||||||
|
.await;
|
||||||
|
|
||||||
self.tell(worker::Action::Ack);
|
self.tell(worker::Action::Ack).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tell(&mut self, action: worker::Action) {
|
async fn tell(&mut self, action: worker::Action) {
|
||||||
self.receiver.tell(action);
|
self.receiver.tell(action).await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl notifyworker::SimpleNotifyWorker for BuildWorker {
|
impl notifyworker::SimpleNotifyWorker for BuildWorker {
|
||||||
type J = buildjob::BuildJob;
|
type J = buildjob::BuildJob;
|
||||||
|
|
||||||
fn msg_to_job(&self, _: &str, _: &Option<String>, body: &[u8]) -> Result<Self::J, String> {
|
async fn msg_to_job(
|
||||||
|
&self,
|
||||||
|
_: &str,
|
||||||
|
_: &Option<String>,
|
||||||
|
body: &[u8],
|
||||||
|
) -> Result<Self::J, String> {
|
||||||
info!("lmao I got a job?");
|
info!("lmao I got a job?");
|
||||||
match buildjob::from(body) {
|
match buildjob::from(body) {
|
||||||
Ok(e) => Ok(e),
|
Ok(e) => Ok(e),
|
||||||
|
@ -277,25 +296,25 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
|
||||||
|
|
||||||
// FIXME: remove with rust/cargo update
|
// FIXME: remove with rust/cargo update
|
||||||
#[allow(clippy::cognitive_complexity)]
|
#[allow(clippy::cognitive_complexity)]
|
||||||
fn consumer(
|
async fn consumer(
|
||||||
&self,
|
&self,
|
||||||
job: &buildjob::BuildJob,
|
job: &buildjob::BuildJob,
|
||||||
notifier: &mut dyn notifyworker::NotificationReceiver,
|
notifier: &mut dyn notifyworker::NotificationReceiver,
|
||||||
) {
|
) {
|
||||||
let span = debug_span!("job", pr = ?job.pr.number);
|
let span = debug_span!("job", pr = ?job.change.number);
|
||||||
let _enter = span.enter();
|
let _enter = span.enter();
|
||||||
|
|
||||||
let mut actions = self.actions(job, notifier);
|
let mut actions = self.actions(job, notifier);
|
||||||
|
|
||||||
if job.attrs.is_empty() {
|
if job.attrs.is_empty() {
|
||||||
debug!("No attrs to build");
|
debug!("No attrs to build");
|
||||||
actions.nothing_to_do();
|
actions.nothing_to_do().await;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
info!(
|
info!(
|
||||||
"Working on https://github.com/{}/pull/{}",
|
"Working on https://github.com/{}/pull/{}",
|
||||||
job.repo.full_name, job.pr.number
|
job.repo.full_name, job.change.number
|
||||||
);
|
);
|
||||||
let project = self
|
let project = self
|
||||||
.cloner
|
.cloner
|
||||||
|
@ -304,7 +323,7 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
|
||||||
.clone_for("builder".to_string(), self.identity.clone())
|
.clone_for("builder".to_string(), self.identity.clone())
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let target_branch = match job.pr.target_branch.clone() {
|
let target_branch = match job.change.target_branch.clone() {
|
||||||
Some(x) => x,
|
Some(x) => x,
|
||||||
None => String::from("origin/master"),
|
None => String::from("origin/master"),
|
||||||
};
|
};
|
||||||
|
@ -316,21 +335,21 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
|
||||||
|
|
||||||
let refpath = co.checkout_origin_ref(target_branch.as_ref()).unwrap();
|
let refpath = co.checkout_origin_ref(target_branch.as_ref()).unwrap();
|
||||||
|
|
||||||
if co.fetch_pr(job.pr.number).is_err() {
|
if co.fetch_pr(job.change.number).is_err() {
|
||||||
info!("Failed to fetch {}", job.pr.number);
|
info!("Failed to fetch {}", job.change.number);
|
||||||
actions.pr_head_missing();
|
actions.pr_head_missing().await;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if !co.commit_exists(job.pr.head_sha.as_ref()) {
|
if !co.commit_exists(job.change.head_sha.as_ref()) {
|
||||||
info!("Commit {} doesn't exist", job.pr.head_sha);
|
info!("Commit {} doesn't exist", job.change.head_sha);
|
||||||
actions.commit_missing();
|
actions.commit_missing().await;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if co.merge_commit(job.pr.head_sha.as_ref()).is_err() {
|
if co.merge_commit(job.change.head_sha.as_ref()).is_err() {
|
||||||
info!("Failed to merge {}", job.pr.head_sha);
|
info!("Failed to merge {}", job.change.head_sha);
|
||||||
actions.merge_failed();
|
actions.merge_failed().await;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -338,11 +357,10 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
|
||||||
"Got path: {:?}, determining which ones we can build ",
|
"Got path: {:?}, determining which ones we can build ",
|
||||||
refpath
|
refpath
|
||||||
);
|
);
|
||||||
let (can_build, cannot_build) = self.nix.safely_partition_instantiable_attrs(
|
let (can_build, cannot_build) = self
|
||||||
refpath.as_ref(),
|
.nix
|
||||||
buildfile,
|
.safely_partition_instantiable_attrs(refpath.as_ref(), buildfile, job.attrs.clone())
|
||||||
job.attrs.clone(),
|
.await;
|
||||||
);
|
|
||||||
|
|
||||||
let cannot_build_attrs: Vec<String> = cannot_build
|
let cannot_build_attrs: Vec<String> = cannot_build
|
||||||
.clone()
|
.clone()
|
||||||
|
@ -356,11 +374,13 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
|
||||||
cannot_build_attrs.join(", ")
|
cannot_build_attrs.join(", ")
|
||||||
);
|
);
|
||||||
|
|
||||||
actions.log_started(can_build.clone(), cannot_build_attrs.clone());
|
actions
|
||||||
actions.log_instantiation_errors(cannot_build);
|
.log_started(can_build.clone(), cannot_build_attrs.clone())
|
||||||
|
.await;
|
||||||
|
actions.log_instantiation_errors(cannot_build).await;
|
||||||
|
|
||||||
if can_build.is_empty() {
|
if can_build.is_empty() {
|
||||||
actions.build_not_attempted(cannot_build_attrs);
|
actions.build_not_attempted(cannot_build_attrs).await;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -368,11 +388,12 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
|
||||||
self.nix
|
self.nix
|
||||||
.safely_build_attrs_async(refpath.as_ref(), buildfile, can_build.clone());
|
.safely_build_attrs_async(refpath.as_ref(), buildfile, can_build.clone());
|
||||||
|
|
||||||
for line in spawned.lines() {
|
let mut reader = BufReader::new(spawned.stdout.take().unwrap()).lines();
|
||||||
actions.log_line(&line);
|
while let Ok(Some(line)) = reader.next_line().await {
|
||||||
|
actions.log_line(&line).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
let status = nix::wait_for_build_status(spawned);
|
let status = nix::wait_for_build_status(spawned).await;
|
||||||
|
|
||||||
info!("ok built ({:?}), building", status);
|
info!("ok built ({:?}), building", status);
|
||||||
info!("Lines:");
|
info!("Lines:");
|
||||||
|
@ -384,7 +405,9 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
|
||||||
.last();
|
.last();
|
||||||
info!("----->8-----");
|
info!("----->8-----");
|
||||||
|
|
||||||
actions.build_finished(status, can_build, cannot_build_attrs);
|
actions
|
||||||
|
.build_finished(status, can_build, cannot_build_attrs)
|
||||||
|
.await;
|
||||||
info!("Build done!");
|
info!("Build done!");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -392,7 +415,7 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::message::{Pr, Repo};
|
use crate::message::{Change, Repo};
|
||||||
use crate::notifyworker::SimpleNotifyWorker;
|
use crate::notifyworker::SimpleNotifyWorker;
|
||||||
use crate::test_scratch::TestScratch;
|
use crate::test_scratch::TestScratch;
|
||||||
use std::env;
|
use std::env;
|
||||||
|
@ -406,7 +429,7 @@ mod tests {
|
||||||
const SYSTEM: &str = "x86_64-darwin";
|
const SYSTEM: &str = "x86_64-darwin";
|
||||||
|
|
||||||
fn nix() -> nix::Nix {
|
fn nix() -> nix::Nix {
|
||||||
let remote = env::var("NIX_REMOTE").unwrap_or("".to_owned());
|
let remote = env::var("NIX_REMOTE").unwrap_or_default();
|
||||||
nix::Nix::new("x86_64-linux".to_owned(), remote, 1800, None)
|
nix::Nix::new("x86_64-linux".to_owned(), remote, 1800, None)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -427,15 +450,18 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_pr_repo(bare: &Path, co: &Path) -> String {
|
fn make_pr_repo(bare: &Path, co: &Path) -> String {
|
||||||
let output = Command::new("bash")
|
let mut cmd = Command::new("bash");
|
||||||
.current_dir(tpath("./test-srcs"))
|
|
||||||
.arg("make-pr.sh")
|
cmd.current_dir(tpath("./test-srcs"))
|
||||||
|
.arg("make-change.sh")
|
||||||
.arg(bare)
|
.arg(bare)
|
||||||
.arg(co)
|
.arg(co)
|
||||||
.stderr(Stdio::null())
|
.stderr(Stdio::null())
|
||||||
.stdout(Stdio::piped())
|
.stdout(Stdio::piped());
|
||||||
.output()
|
|
||||||
.expect("building the test PR failed");
|
assert!(cmd.status().unwrap().success());
|
||||||
|
|
||||||
|
let output = cmd.output().expect("building the test PR failed");
|
||||||
let hash = String::from_utf8(output.stdout).expect("Should just be a hash");
|
let hash = String::from_utf8(output.stdout).expect("Should just be a hash");
|
||||||
|
|
||||||
hash.trim().to_owned()
|
hash.trim().to_owned()
|
||||||
|
@ -443,30 +469,29 @@ mod tests {
|
||||||
|
|
||||||
fn strip_escaped_ansi(string: &str) -> String {
|
fn strip_escaped_ansi(string: &str) -> String {
|
||||||
string
|
string
|
||||||
.replace('‘', "'")
|
.replace(['‘', '’'], "'")
|
||||||
.replace('’', "'")
|
|
||||||
.replace("\\u001b[31;1m", "") // red
|
.replace("\\u001b[31;1m", "") // red
|
||||||
.replace("\\u001b[0m", "") // reset
|
.replace("\\u001b[0m", "") // reset
|
||||||
}
|
}
|
||||||
|
|
||||||
fn assert_contains_job(actions: &mut IntoIter<worker::Action>, text_to_match: &str) {
|
fn assert_contains_job(actions: &mut IntoIter<worker::Action>, text_to_match: &str) {
|
||||||
println!("\n\n Searching: {:?}", text_to_match);
|
println!("\n\n Searching: {text_to_match:?}");
|
||||||
actions
|
actions
|
||||||
.position(|job| match job {
|
.position(|job| match job {
|
||||||
worker::Action::Publish(ref body) => {
|
worker::Action::Publish(ref body) => {
|
||||||
let content = String::from_utf8(body.content.clone()).unwrap();
|
let content = String::from_utf8(body.content.clone()).unwrap();
|
||||||
let text = strip_escaped_ansi(&content);
|
let text = strip_escaped_ansi(&content);
|
||||||
eprintln!("{}", text);
|
eprintln!("{text}");
|
||||||
if text.contains(text_to_match) {
|
if text.contains(text_to_match) {
|
||||||
println!(" ok");
|
println!(" ok");
|
||||||
true
|
true
|
||||||
} else {
|
} else {
|
||||||
println!(" notContains: {}", text);
|
println!(" notContains: {text}");
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
e => {
|
e => {
|
||||||
println!(" notPublish: {:?}", e);
|
println!(" notPublish: {e:?}");
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -478,8 +503,8 @@ mod tests {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
pub fn test_simple_build() {
|
pub async fn test_simple_build() {
|
||||||
let p = TestScratch::new_dir("build-simple-build-working");
|
let p = TestScratch::new_dir("build-simple-build-working");
|
||||||
let bare_repo = TestScratch::new_dir("build-simple-build-bare");
|
let bare_repo = TestScratch::new_dir("build-simple-build-bare");
|
||||||
let co_repo = TestScratch::new_dir("build-simple-build-co");
|
let co_repo = TestScratch::new_dir("build-simple-build-co");
|
||||||
|
@ -489,7 +514,7 @@ mod tests {
|
||||||
|
|
||||||
let job = buildjob::BuildJob {
|
let job = buildjob::BuildJob {
|
||||||
attrs: vec!["success".to_owned()],
|
attrs: vec!["success".to_owned()],
|
||||||
pr: Pr {
|
change: Change {
|
||||||
head_sha,
|
head_sha,
|
||||||
number: 1,
|
number: 1,
|
||||||
target_branch: Some("master".to_owned()),
|
target_branch: Some("master".to_owned()),
|
||||||
|
@ -508,7 +533,7 @@ mod tests {
|
||||||
|
|
||||||
let mut dummyreceiver = notifyworker::DummyNotificationReceiver::new();
|
let mut dummyreceiver = notifyworker::DummyNotificationReceiver::new();
|
||||||
|
|
||||||
worker.consumer(&job, &mut dummyreceiver);
|
worker.consumer(&job, &mut dummyreceiver).await;
|
||||||
|
|
||||||
println!("Total actions: {:?}", dummyreceiver.actions.len());
|
println!("Total actions: {:?}", dummyreceiver.actions.len());
|
||||||
let mut actions = dummyreceiver.actions.into_iter();
|
let mut actions = dummyreceiver.actions.into_iter();
|
||||||
|
@ -523,8 +548,8 @@ mod tests {
|
||||||
assert_eq!(actions.next(), Some(worker::Action::Ack));
|
assert_eq!(actions.next(), Some(worker::Action::Ack));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
pub fn test_all_jobs_skipped() {
|
pub async fn test_all_jobs_skipped() {
|
||||||
let p = TestScratch::new_dir("no-attempt");
|
let p = TestScratch::new_dir("no-attempt");
|
||||||
let bare_repo = TestScratch::new_dir("no-attempt-bare");
|
let bare_repo = TestScratch::new_dir("no-attempt-bare");
|
||||||
let co_repo = TestScratch::new_dir("no-attempt-co");
|
let co_repo = TestScratch::new_dir("no-attempt-co");
|
||||||
|
@ -534,7 +559,7 @@ mod tests {
|
||||||
|
|
||||||
let job = buildjob::BuildJob {
|
let job = buildjob::BuildJob {
|
||||||
attrs: vec!["not-real".to_owned()],
|
attrs: vec!["not-real".to_owned()],
|
||||||
pr: Pr {
|
change: Change {
|
||||||
head_sha,
|
head_sha,
|
||||||
number: 1,
|
number: 1,
|
||||||
target_branch: Some("master".to_owned()),
|
target_branch: Some("master".to_owned()),
|
||||||
|
@ -553,7 +578,7 @@ mod tests {
|
||||||
|
|
||||||
let mut dummyreceiver = notifyworker::DummyNotificationReceiver::new();
|
let mut dummyreceiver = notifyworker::DummyNotificationReceiver::new();
|
||||||
|
|
||||||
worker.consumer(&job, &mut dummyreceiver);
|
worker.consumer(&job, &mut dummyreceiver).await;
|
||||||
|
|
||||||
println!("Total actions: {:?}", dummyreceiver.actions.len());
|
println!("Total actions: {:?}", dummyreceiver.actions.len());
|
||||||
let mut actions = dummyreceiver.actions.into_iter();
|
let mut actions = dummyreceiver.actions.into_iter();
|
||||||
|
|
|
@ -1,46 +0,0 @@
|
||||||
use crate::checkout::CachedProjectCo;
|
|
||||||
use crate::commitstatus::CommitStatus;
|
|
||||||
use crate::evalchecker::EvalChecker;
|
|
||||||
use crate::tasks::eval::{EvaluationComplete, EvaluationStrategy, StepResult};
|
|
||||||
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct GenericStrategy {}
|
|
||||||
impl GenericStrategy {
|
|
||||||
pub fn new() -> GenericStrategy {
|
|
||||||
Self {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl EvaluationStrategy for GenericStrategy {
|
|
||||||
fn pre_clone(&mut self) -> StepResult<()> {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn on_target_branch(&mut self, _co: &Path, _status: &mut CommitStatus) -> StepResult<()> {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn after_fetch(&mut self, _co: &CachedProjectCo) -> StepResult<()> {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn merge_conflict(&mut self) {}
|
|
||||||
|
|
||||||
fn after_merge(&mut self, _status: &mut CommitStatus) -> StepResult<()> {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn evaluation_checks(&self) -> Vec<EvalChecker> {
|
|
||||||
vec![]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn all_evaluations_passed(
|
|
||||||
&mut self,
|
|
||||||
_co: &Path,
|
|
||||||
_status: &mut CommitStatus,
|
|
||||||
) -> StepResult<EvaluationComplete> {
|
|
||||||
Ok(Default::default())
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,33 +1,11 @@
|
||||||
mod generic;
|
|
||||||
mod nixpkgs;
|
mod nixpkgs;
|
||||||
pub mod stdenvs;
|
pub mod stdenvs;
|
||||||
|
|
||||||
pub use self::generic::GenericStrategy;
|
|
||||||
pub use self::nixpkgs::NixpkgsStrategy;
|
pub use self::nixpkgs::NixpkgsStrategy;
|
||||||
pub use self::stdenvs::Stdenvs;
|
pub use self::stdenvs::Stdenvs;
|
||||||
use crate::checkout::CachedProjectCo;
|
|
||||||
use crate::commitstatus::{CommitStatus, CommitStatusError};
|
|
||||||
use crate::evalchecker::EvalChecker;
|
|
||||||
use crate::message::buildjob::BuildJob;
|
use crate::message::buildjob::BuildJob;
|
||||||
|
use crate::vcs::commit_status::CommitStatusError;
|
||||||
use hubcaps::checks::CheckRunOptions;
|
use crate::vcs::generic::CheckRunOptions;
|
||||||
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
pub trait EvaluationStrategy {
|
|
||||||
fn pre_clone(&mut self) -> StepResult<()>;
|
|
||||||
|
|
||||||
fn on_target_branch(&mut self, co: &Path, status: &mut CommitStatus) -> StepResult<()>;
|
|
||||||
fn after_fetch(&mut self, co: &CachedProjectCo) -> StepResult<()>;
|
|
||||||
fn merge_conflict(&mut self);
|
|
||||||
fn after_merge(&mut self, status: &mut CommitStatus) -> StepResult<()>;
|
|
||||||
fn evaluation_checks(&self) -> Vec<EvalChecker>;
|
|
||||||
fn all_evaluations_passed(
|
|
||||||
&mut self,
|
|
||||||
co: &Path,
|
|
||||||
status: &mut CommitStatus,
|
|
||||||
) -> StepResult<EvaluationComplete>;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type StepResult<T> = Result<T, Error>;
|
pub type StepResult<T> = Result<T, Error>;
|
||||||
|
|
||||||
|
@ -41,7 +19,7 @@ pub struct EvaluationComplete {
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
CommitStatusWrite(CommitStatusError),
|
CommitStatusWrite(CommitStatusError),
|
||||||
Fail(String),
|
Fail(String),
|
||||||
FailWithGist(String, String, String),
|
FailWithPastebin(String, String, String),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<CommitStatusError> for Error {
|
impl From<CommitStatusError> for Error {
|
||||||
|
|
|
@ -1,26 +1,24 @@
|
||||||
use crate::checkout::CachedProjectCo;
|
use crate::checkout::CachedProjectCo;
|
||||||
use crate::commentparser::Subset;
|
use crate::commentparser::Subset;
|
||||||
use crate::commitstatus::CommitStatus;
|
|
||||||
use crate::evalchecker::EvalChecker;
|
use crate::evalchecker::EvalChecker;
|
||||||
use crate::maintainers::{self, ImpactedMaintainers};
|
use crate::maintainers::{self, ImpactedMaintainers};
|
||||||
use crate::message::buildjob::BuildJob;
|
use crate::message::buildjob::BuildJob;
|
||||||
use crate::message::evaluationjob::EvaluationJob;
|
use crate::message::evaluationjob::EvaluationJob;
|
||||||
|
use crate::message::{Change, Repo};
|
||||||
use crate::nix::{self, Nix};
|
use crate::nix::{self, Nix};
|
||||||
use crate::nixenv::HydraNixEnv;
|
use crate::nixenv::HydraNixEnv;
|
||||||
use crate::outpathdiff::{OutPathDiff, PackageArch};
|
use crate::outpathdiff::{OutPathDiff, PackageArch};
|
||||||
use crate::tagger::{MaintainerPrTagger, PkgsAddedRemovedTagger, RebuildTagger, StdenvTagger};
|
use crate::tagger::{MaintainerPrTagger, PkgsAddedRemovedTagger, RebuildTagger, StdenvTagger};
|
||||||
use crate::tasks::eval::{
|
use crate::tasks::eval::{stdenvs::Stdenvs, Error, EvaluationComplete, StepResult};
|
||||||
stdenvs::Stdenvs, Error, EvaluationComplete, EvaluationStrategy, StepResult,
|
use crate::vcs::commit_status::CommitStatus;
|
||||||
|
use crate::vcs::generic::{
|
||||||
|
CheckRunOptions, CheckRunState, Conclusion, State, VersionControlSystemAPI,
|
||||||
};
|
};
|
||||||
use crate::tasks::evaluate::{get_prefix, make_gist, update_labels};
|
|
||||||
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use chrono::Utc;
|
use chrono::Utc;
|
||||||
use hubcaps::checks::{CheckRunOptions, CheckRunState, Conclusion, Output};
|
|
||||||
use hubcaps::gists::Gists;
|
|
||||||
use hubcaps::issues::{Issue, IssueRef};
|
|
||||||
use hubcaps::repositories::Repository;
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use tracing::{info, warn};
|
use tracing::{info, warn};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
@ -38,7 +36,7 @@ fn label_from_title(title: &str) -> Vec<String> {
|
||||||
let labels: Vec<_> = TITLE_LABELS
|
let labels: Vec<_> = TITLE_LABELS
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|(word, _label)| {
|
.filter(|(word, _label)| {
|
||||||
let re = Regex::new(&format!("\\b{}\\b", word)).unwrap();
|
let re = Regex::new(&format!("\\b{word}\\b")).unwrap();
|
||||||
re.is_match(title)
|
re.is_match(title)
|
||||||
})
|
})
|
||||||
.map(|(_word, label)| (*label).into())
|
.map(|(_word, label)| (*label).into())
|
||||||
|
@ -48,12 +46,11 @@ fn label_from_title(title: &str) -> Vec<String> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct NixpkgsStrategy<'a> {
|
pub struct NixpkgsStrategy<'a> {
|
||||||
|
chan: lapin::Channel,
|
||||||
job: &'a EvaluationJob,
|
job: &'a EvaluationJob,
|
||||||
pull: &'a hubcaps::pulls::PullRequest,
|
vcs_api: Arc<dyn VersionControlSystemAPI>,
|
||||||
issue: &'a Issue,
|
change: &'a Change,
|
||||||
issue_ref: &'a IssueRef,
|
repo: &'a Repo,
|
||||||
repo: &'a Repository,
|
|
||||||
gists: &'a Gists,
|
|
||||||
nix: Nix,
|
nix: Nix,
|
||||||
stdenv_diff: Option<Stdenvs>,
|
stdenv_diff: Option<Stdenvs>,
|
||||||
outpath_diff: Option<OutPathDiff>,
|
outpath_diff: Option<OutPathDiff>,
|
||||||
|
@ -64,21 +61,19 @@ pub struct NixpkgsStrategy<'a> {
|
||||||
impl<'a> NixpkgsStrategy<'a> {
|
impl<'a> NixpkgsStrategy<'a> {
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub fn new(
|
pub fn new(
|
||||||
|
chan: lapin::Channel,
|
||||||
job: &'a EvaluationJob,
|
job: &'a EvaluationJob,
|
||||||
pull: &'a hubcaps::pulls::PullRequest,
|
vcs_api: Arc<dyn VersionControlSystemAPI>,
|
||||||
issue: &'a Issue,
|
repo: &'a Repo,
|
||||||
issue_ref: &'a IssueRef,
|
change: &'a Change,
|
||||||
repo: &'a Repository,
|
|
||||||
gists: &'a Gists,
|
|
||||||
nix: Nix,
|
nix: Nix,
|
||||||
) -> NixpkgsStrategy<'a> {
|
) -> NixpkgsStrategy<'a> {
|
||||||
Self {
|
Self {
|
||||||
|
chan,
|
||||||
job,
|
job,
|
||||||
pull,
|
vcs_api,
|
||||||
issue,
|
change,
|
||||||
issue_ref,
|
|
||||||
repo,
|
repo,
|
||||||
gists,
|
|
||||||
nix,
|
nix,
|
||||||
stdenv_diff: None,
|
stdenv_diff: None,
|
||||||
outpath_diff: None,
|
outpath_diff: None,
|
||||||
|
@ -87,57 +82,92 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tag_from_title(&self) {
|
async fn tag_from_title(&self) {
|
||||||
let title = match async_std::task::block_on(self.issue_ref.get()) {
|
let Ok(issue) = self.vcs_api.get_issue(self.repo, self.change.number).await else {
|
||||||
Ok(issue) => issue.title.to_lowercase(),
|
return;
|
||||||
Err(_) => return,
|
|
||||||
};
|
};
|
||||||
|
let labels = label_from_title(&issue.title);
|
||||||
let labels = label_from_title(&title);
|
|
||||||
|
|
||||||
if labels.is_empty() {
|
if labels.is_empty() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
update_labels(self.issue_ref, &labels, &[]);
|
self.vcs_api
|
||||||
|
.update_labels(self.repo, self.change.number, &labels, &[])
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_stdenvs_before(&mut self, dir: &Path) {
|
async fn update_labels(&self, to_add: &[String], to_remove: &[String]) {
|
||||||
|
self.vcs_api
|
||||||
|
.update_labels(self.repo, self.change.number, to_add, to_remove)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn request_reviews(&self, impacted_maintainers: &maintainers::ImpactedMaintainers) {
|
||||||
|
info!(
|
||||||
|
"Impact maintainers: {:?}",
|
||||||
|
impacted_maintainers.maintainers()
|
||||||
|
);
|
||||||
|
|
||||||
|
if impacted_maintainers.maintainers().len() < 10 {
|
||||||
|
let existing_reviewers = self
|
||||||
|
.vcs_api
|
||||||
|
.get_existing_reviewers(self.repo, self.change.number)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
// Normalize both sides, compute the IM - ER set
|
||||||
|
let new_reviewers: Vec<String> = impacted_maintainers
|
||||||
|
.maintainers()
|
||||||
|
.into_iter()
|
||||||
|
.map(|maintainer| maintainer.to_ascii_lowercase())
|
||||||
|
.filter(|maint| !existing_reviewers.entity_reviewers.contains(maint))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Add them as reviewers.
|
||||||
|
self.vcs_api
|
||||||
|
.request_reviewers(self.repo, self.change.number, new_reviewers, vec![])
|
||||||
|
.await;
|
||||||
|
} else {
|
||||||
|
warn!(
|
||||||
|
"Too many reviewers ({}), skipping review requests",
|
||||||
|
impacted_maintainers.maintainers().len()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn check_stdenvs_before(&mut self, dir: &Path) {
|
||||||
let mut stdenvs = Stdenvs::new(self.nix.clone(), dir.to_path_buf());
|
let mut stdenvs = Stdenvs::new(self.nix.clone(), dir.to_path_buf());
|
||||||
stdenvs.identify_before();
|
stdenvs.identify_before().await;
|
||||||
self.stdenv_diff = Some(stdenvs);
|
self.stdenv_diff = Some(stdenvs);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_stdenvs_after(&mut self) {
|
async fn check_stdenvs_after(&mut self) {
|
||||||
if let Some(ref mut stdenvs) = self.stdenv_diff {
|
if let Some(ref mut stdenvs) = self.stdenv_diff {
|
||||||
stdenvs.identify_after();
|
stdenvs.identify_after().await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_stdenv_labels(&self) {
|
async fn update_stdenv_labels(&self) {
|
||||||
if let Some(ref stdenvs) = self.stdenv_diff {
|
if let Some(ref stdenvs) = self.stdenv_diff {
|
||||||
let mut stdenvtagger = StdenvTagger::new();
|
let mut stdenvtagger = StdenvTagger::new();
|
||||||
if !stdenvs.are_same() {
|
if !stdenvs.are_same() {
|
||||||
stdenvtagger.changed(stdenvs.changed());
|
stdenvtagger.changed(stdenvs.changed());
|
||||||
}
|
}
|
||||||
update_labels(
|
self.update_labels(&stdenvtagger.tags_to_add(), &stdenvtagger.tags_to_remove())
|
||||||
self.issue_ref,
|
.await;
|
||||||
&stdenvtagger.tags_to_add(),
|
|
||||||
&stdenvtagger.tags_to_remove(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_outpaths_before(&mut self, dir: &Path) -> StepResult<()> {
|
async fn check_outpaths_before(&mut self, dir: &Path) -> StepResult<()> {
|
||||||
let mut rebuildsniff = OutPathDiff::new(self.nix.clone(), dir.to_path_buf());
|
let mut rebuildsniff = OutPathDiff::new(self.nix.clone(), dir.to_path_buf());
|
||||||
|
|
||||||
if let Err(err) = rebuildsniff.find_before() {
|
if let Err(err) = rebuildsniff.find_before().await {
|
||||||
/*
|
/*
|
||||||
self.events
|
self.events
|
||||||
.notify(Event::TargetBranchFailsEvaluation(target_branch.clone()));
|
.notify(Event::TargetBranchFailsEvaluation(target_branch.clone()));
|
||||||
*/
|
*/
|
||||||
|
|
||||||
Err(Error::FailWithGist(
|
Err(Error::FailWithPastebin(
|
||||||
String::from("The branch this PR will merge in to does not cleanly evaluate, and so this PR cannot be checked."),
|
String::from("The branch this PR will merge in to does not cleanly evaluate, and so this PR cannot be checked."),
|
||||||
String::from("Output path comparison"),
|
String::from("Output path comparison"),
|
||||||
err.display(),
|
err.display(),
|
||||||
|
@ -148,10 +178,10 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_outpaths_after(&mut self) -> StepResult<()> {
|
async fn check_outpaths_after(&mut self) -> StepResult<()> {
|
||||||
if let Some(ref mut rebuildsniff) = self.outpath_diff {
|
if let Some(ref mut rebuildsniff) = self.outpath_diff {
|
||||||
if let Err(err) = rebuildsniff.find_after() {
|
if let Err(err) = rebuildsniff.find_after().await {
|
||||||
Err(Error::FailWithGist(
|
Err(Error::FailWithPastebin(
|
||||||
String::from("This PR does not cleanly list package outputs after merging."),
|
String::from("This PR does not cleanly list package outputs after merging."),
|
||||||
String::from("Output path comparison"),
|
String::from("Output path comparison"),
|
||||||
err.display(),
|
err.display(),
|
||||||
|
@ -168,10 +198,9 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
|
|
||||||
fn performance_stats(&self) -> Vec<CheckRunOptions> {
|
fn performance_stats(&self) -> Vec<CheckRunOptions> {
|
||||||
if let Some(ref rebuildsniff) = self.outpath_diff {
|
if let Some(ref rebuildsniff) = self.outpath_diff {
|
||||||
if let Some(report) = rebuildsniff.performance_diff() {
|
if let Some(_report) = rebuildsniff.performance_diff() {
|
||||||
return vec![CheckRunOptions {
|
return vec![CheckRunOptions {
|
||||||
name: "Evaluation Performance Report".to_owned(),
|
name: "Evaluation Performance Report".to_owned(),
|
||||||
actions: None,
|
|
||||||
completed_at: Some(
|
completed_at: Some(
|
||||||
Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
|
Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
|
||||||
),
|
),
|
||||||
|
@ -180,36 +209,37 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
status: Some(CheckRunState::Completed),
|
status: Some(CheckRunState::Completed),
|
||||||
details_url: None,
|
details_url: None,
|
||||||
external_id: None,
|
external_id: None,
|
||||||
head_sha: self.job.pr.head_sha.clone(),
|
head_sha: self.job.change.head_sha.clone(),
|
||||||
output: Some(Output {
|
// FIXME: before going into production, let's reintroduce this as a pastebin?
|
||||||
title: "Evaluator Performance Report".to_string(),
|
// output: Some(Output {
|
||||||
summary: "".to_string(),
|
// title: "Evaluator Performance Report".to_string(),
|
||||||
text: Some(report.markdown()),
|
// summary: "".to_string(),
|
||||||
annotations: None,
|
// text: Some(report.markdown()),
|
||||||
images: None,
|
// annotations: None,
|
||||||
}),
|
// images: None,
|
||||||
|
// }),
|
||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
vec![]
|
vec![]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_new_package_labels(&self) {
|
async fn update_new_package_labels(&self) {
|
||||||
if let Some(ref rebuildsniff) = self.outpath_diff {
|
if let Some(ref rebuildsniff) = self.outpath_diff {
|
||||||
if let Some((removed, added)) = rebuildsniff.package_diff() {
|
if let Some((removed, added)) = rebuildsniff.package_diff() {
|
||||||
let mut addremovetagger = PkgsAddedRemovedTagger::new();
|
let mut addremovetagger = PkgsAddedRemovedTagger::new();
|
||||||
addremovetagger.changed(&removed, &added);
|
addremovetagger.changed(&removed, &added);
|
||||||
update_labels(
|
self.update_labels(
|
||||||
self.issue_ref,
|
|
||||||
&addremovetagger.tags_to_add(),
|
&addremovetagger.tags_to_add(),
|
||||||
&addremovetagger.tags_to_remove(),
|
&addremovetagger.tags_to_remove(),
|
||||||
);
|
)
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_rebuild_labels(
|
async fn update_rebuild_labels(
|
||||||
&self,
|
&mut self,
|
||||||
dir: &Path,
|
dir: &Path,
|
||||||
overall_status: &mut CommitStatus,
|
overall_status: &mut CommitStatus,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
|
@ -218,36 +248,39 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
|
|
||||||
if let Some(attrs) = rebuildsniff.calculate_rebuild() {
|
if let Some(attrs) = rebuildsniff.calculate_rebuild() {
|
||||||
if !attrs.is_empty() {
|
if !attrs.is_empty() {
|
||||||
overall_status.set_url(self.gist_changed_paths(&attrs));
|
overall_status.set_url(self.gist_changed_paths(&attrs).await);
|
||||||
self.record_impacted_maintainers(dir, &attrs)?;
|
self.record_impacted_maintainers(dir, &attrs).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
rebuild_tags.parse_attrs(attrs);
|
rebuild_tags.parse_attrs(attrs);
|
||||||
}
|
}
|
||||||
|
|
||||||
update_labels(
|
self.update_labels(&rebuild_tags.tags_to_add(), &rebuild_tags.tags_to_remove())
|
||||||
self.issue_ref,
|
.await;
|
||||||
&rebuild_tags.tags_to_add(),
|
|
||||||
&rebuild_tags.tags_to_remove(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gist_changed_paths(&self, attrs: &[PackageArch]) -> Option<String> {
|
async fn gist_changed_paths(&mut self, attrs: &[PackageArch]) -> Option<String> {
|
||||||
make_gist(
|
crate::utils::pastebin::make_pastebin(
|
||||||
self.gists,
|
&mut self.chan,
|
||||||
"Changed Paths",
|
"Changed Paths",
|
||||||
Some("".to_owned()),
|
|
||||||
attrs
|
attrs
|
||||||
.iter()
|
.iter()
|
||||||
.map(|attr| format!("{}\t{}", &attr.architecture, &attr.package))
|
.map(|attr| format!("{}\t{}", &attr.architecture, &attr.package))
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
.join("\n"),
|
.join("\n"),
|
||||||
)
|
)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
|
.map(|pp| pp.uri)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn record_impacted_maintainers(&self, dir: &Path, attrs: &[PackageArch]) -> Result<(), Error> {
|
async fn record_impacted_maintainers(
|
||||||
|
&mut self,
|
||||||
|
dir: &Path,
|
||||||
|
attrs: &[PackageArch],
|
||||||
|
) -> Result<(), Error> {
|
||||||
let changed_attributes = attrs
|
let changed_attributes = attrs
|
||||||
.iter()
|
.iter()
|
||||||
.map(|attr| attr.package.split('.').collect::<Vec<&str>>())
|
.map(|attr| attr.package.split('.').collect::<Vec<&str>>())
|
||||||
|
@ -255,19 +288,20 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
|
|
||||||
if let Some(ref changed_paths) = self.changed_paths {
|
if let Some(ref changed_paths) = self.changed_paths {
|
||||||
let m =
|
let m =
|
||||||
ImpactedMaintainers::calculate(&self.nix, dir, changed_paths, &changed_attributes);
|
ImpactedMaintainers::calculate(&self.nix, dir, changed_paths, &changed_attributes)
|
||||||
|
.await;
|
||||||
|
|
||||||
let gist_url = make_gist(
|
let gist_url = crate::utils::pastebin::make_pastebin(
|
||||||
self.gists,
|
&mut self.chan,
|
||||||
"Potential Maintainers",
|
"Potential Maintainers",
|
||||||
Some("".to_owned()),
|
|
||||||
match m {
|
match m {
|
||||||
Ok(ref maintainers) => format!("Maintainers:\n{}", maintainers),
|
Ok(ref maintainers) => format!("Maintainers:\n{maintainers}"),
|
||||||
Err(ref e) => format!("Ignorable calculation error:\n{:?}", e),
|
Err(ref e) => format!("Ignorable calculation error:\n{e:?}"),
|
||||||
},
|
},
|
||||||
);
|
)
|
||||||
|
.await
|
||||||
let prefix = get_prefix(self.repo.statuses(), &self.job.pr.head_sha)?;
|
.ok()
|
||||||
|
.map(|pp| pp.uri);
|
||||||
|
|
||||||
if changed_paths.len() > MAINTAINER_REVIEW_MAX_CHANGED_PATHS {
|
if changed_paths.len() > MAINTAINER_REVIEW_MAX_CHANGED_PATHS {
|
||||||
info!(
|
info!(
|
||||||
|
@ -275,56 +309,60 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
changed_paths.len()
|
changed_paths.len()
|
||||||
);
|
);
|
||||||
let status = CommitStatus::new(
|
let status = CommitStatus::new(
|
||||||
self.repo.statuses(),
|
self.vcs_api.clone(),
|
||||||
self.job.pr.head_sha.clone(),
|
self.repo.clone(),
|
||||||
format!("{}-eval-check-maintainers", prefix),
|
self.job.change.head_sha.clone(),
|
||||||
|
"ofborg-eval-check-maintainers".to_owned(),
|
||||||
String::from("large change, skipping automatic review requests"),
|
String::from("large change, skipping automatic review requests"),
|
||||||
gist_url,
|
gist_url,
|
||||||
);
|
);
|
||||||
status.set(hubcaps::statuses::State::Success)?;
|
status.set(State::Success).await?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let status = CommitStatus::new(
|
let status = CommitStatus::new(
|
||||||
self.repo.statuses(),
|
self.vcs_api.clone(),
|
||||||
self.job.pr.head_sha.clone(),
|
self.repo.clone(),
|
||||||
format!("{}-eval-check-maintainers", prefix),
|
self.job.change.head_sha.clone(),
|
||||||
|
"ofborg-eval-check-maintainers".to_owned(),
|
||||||
String::from("matching changed paths to changed attrs..."),
|
String::from("matching changed paths to changed attrs..."),
|
||||||
gist_url,
|
gist_url,
|
||||||
);
|
);
|
||||||
status.set(hubcaps::statuses::State::Success)?;
|
status.set(State::Success).await?;
|
||||||
|
|
||||||
if let Ok(ref maint) = m {
|
if let Ok(ref maint) = m {
|
||||||
request_reviews(maint, self.pull);
|
self.request_reviews(maint).await;
|
||||||
let mut maint_tagger = MaintainerPrTagger::new();
|
let mut maint_tagger = MaintainerPrTagger::new();
|
||||||
|
// TODO: this is really weird.
|
||||||
|
let issue = self
|
||||||
|
.vcs_api
|
||||||
|
.get_issue(self.repo, self.change.number)
|
||||||
|
.await
|
||||||
|
.expect("Failed to obtain the issue");
|
||||||
maint_tagger
|
maint_tagger
|
||||||
.record_maintainer(&self.issue.user.login, &maint.maintainers_by_package());
|
.record_maintainer(&issue.created_by.username, &maint.maintainers_by_package());
|
||||||
update_labels(
|
self.update_labels(&maint_tagger.tags_to_add(), &maint_tagger.tags_to_remove())
|
||||||
self.issue_ref,
|
.await;
|
||||||
&maint_tagger.tags_to_add(),
|
|
||||||
&maint_tagger.tags_to_remove(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_meta_queue_builds(&self, dir: &Path) -> StepResult<Vec<BuildJob>> {
|
async fn check_meta_queue_builds(&mut self, dir: &Path) -> StepResult<Vec<BuildJob>> {
|
||||||
if let Some(ref possibly_touched_packages) = self.touched_packages {
|
if let Some(ref possibly_touched_packages) = self.touched_packages {
|
||||||
let prefix = get_prefix(self.repo.statuses(), &self.job.pr.head_sha)?;
|
|
||||||
|
|
||||||
let mut status = CommitStatus::new(
|
let mut status = CommitStatus::new(
|
||||||
self.repo.statuses(),
|
self.vcs_api.clone(),
|
||||||
self.job.pr.head_sha.clone(),
|
self.repo.clone(),
|
||||||
format!("{}-eval-check-meta", prefix),
|
self.job.change.head_sha.clone(),
|
||||||
|
"ci-eval-check-meta".to_owned(),
|
||||||
String::from("config.nix: checkMeta = true"),
|
String::from("config.nix: checkMeta = true"),
|
||||||
None,
|
None,
|
||||||
);
|
);
|
||||||
status.set(hubcaps::statuses::State::Pending)?;
|
status.set(State::Pending).await?;
|
||||||
|
|
||||||
let nixenv = HydraNixEnv::new(self.nix.clone(), dir.to_path_buf(), true);
|
let nixenv = HydraNixEnv::new(self.nix.clone(), dir.to_path_buf(), true);
|
||||||
match nixenv.execute_with_stats() {
|
match nixenv.execute_with_stats().await {
|
||||||
Ok((pkgs, _stats)) => {
|
Ok((pkgs, _stats)) => {
|
||||||
let mut try_build: Vec<String> = pkgs
|
let mut try_build: Vec<String> = pkgs
|
||||||
.keys()
|
.keys()
|
||||||
|
@ -336,7 +374,7 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
try_build.dedup();
|
try_build.dedup();
|
||||||
|
|
||||||
status.set_url(None);
|
status.set_url(None);
|
||||||
status.set(hubcaps::statuses::State::Success)?;
|
status.set(State::Success).await?;
|
||||||
|
|
||||||
if !try_build.is_empty() && try_build.len() <= 20 {
|
if !try_build.is_empty() && try_build.len() <= 20 {
|
||||||
// In the case of trying to merge master in to
|
// In the case of trying to merge master in to
|
||||||
|
@ -345,7 +383,7 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
// less than or exactly 20
|
// less than or exactly 20
|
||||||
Ok(vec![BuildJob::new(
|
Ok(vec![BuildJob::new(
|
||||||
self.job.repo.clone(),
|
self.job.repo.clone(),
|
||||||
self.job.pr.clone(),
|
self.job.change.clone(),
|
||||||
Subset::Nixpkgs,
|
Subset::Nixpkgs,
|
||||||
try_build,
|
try_build,
|
||||||
None,
|
None,
|
||||||
|
@ -357,8 +395,17 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(out) => {
|
Err(out) => {
|
||||||
status.set_url(make_gist(self.gists, "Meta Check", None, out.display()));
|
status.set_url(
|
||||||
status.set(hubcaps::statuses::State::Failure)?;
|
crate::utils::pastebin::make_pastebin(
|
||||||
|
&mut self.chan,
|
||||||
|
"Meta Check",
|
||||||
|
out.display(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
|
.map(|pp| pp.uri),
|
||||||
|
);
|
||||||
|
status.set(State::Failure).await?;
|
||||||
Err(Error::Fail(String::from(
|
Err(Error::Fail(String::from(
|
||||||
"Failed to validate package metadata.",
|
"Failed to validate package metadata.",
|
||||||
)))
|
)))
|
||||||
|
@ -368,74 +415,71 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
Ok(vec![])
|
Ok(vec![])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> {
|
pub(crate) async fn pre_clone(&self) -> StepResult<()> {
|
||||||
fn pre_clone(&mut self) -> StepResult<()> {
|
self.tag_from_title().await;
|
||||||
self.tag_from_title();
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn on_target_branch(&mut self, dir: &Path, status: &mut CommitStatus) -> StepResult<()> {
|
pub(crate) async fn on_target_branch(
|
||||||
status.set_with_description(
|
&mut self,
|
||||||
"Checking original stdenvs",
|
dir: &Path,
|
||||||
hubcaps::statuses::State::Pending,
|
status: &mut CommitStatus,
|
||||||
)?;
|
) -> StepResult<()> {
|
||||||
self.check_stdenvs_before(dir);
|
status
|
||||||
|
.set_with_description("Checking original stdenvs", State::Pending)
|
||||||
|
.await?;
|
||||||
|
self.check_stdenvs_before(dir).await;
|
||||||
|
|
||||||
status.set_with_description(
|
status
|
||||||
"Checking original out paths",
|
.set_with_description("Checking original out paths", State::Pending)
|
||||||
hubcaps::statuses::State::Pending,
|
.await?;
|
||||||
)?;
|
self.check_outpaths_before(dir).await?;
|
||||||
self.check_outpaths_before(dir)?;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn after_fetch(&mut self, co: &CachedProjectCo) -> StepResult<()> {
|
pub(crate) fn after_fetch(&mut self, co: &CachedProjectCo) {
|
||||||
let changed_paths = co
|
let changed_paths = co
|
||||||
.files_changed_from_head(&self.job.pr.head_sha)
|
.files_changed_from_head(&self.job.change.head_sha)
|
||||||
.unwrap_or_else(|_| vec![]);
|
.unwrap_or_else(|_| vec![]);
|
||||||
self.changed_paths = Some(changed_paths);
|
self.changed_paths = Some(changed_paths);
|
||||||
|
|
||||||
self.touched_packages = Some(parse_commit_messages(
|
self.touched_packages = Some(parse_commit_messages(
|
||||||
&co.commit_messages_from_head(&self.job.pr.head_sha)
|
&co.commit_messages_from_head(&self.job.change.head_sha)
|
||||||
.unwrap_or_else(|_| vec!["".to_owned()]),
|
.unwrap_or_else(|_| vec![String::new()]),
|
||||||
));
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn merge_conflict(&self) {
|
||||||
|
self.update_labels(&["2.status: merge conflict".to_owned()], &[])
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn after_merge(&mut self, status: &mut CommitStatus) -> StepResult<()> {
|
||||||
|
self.update_labels(&[], &["2.status: merge conflict".to_owned()])
|
||||||
|
.await;
|
||||||
|
|
||||||
|
status
|
||||||
|
.set_with_description("Checking new stdenvs", State::Pending)
|
||||||
|
.await?;
|
||||||
|
self.check_stdenvs_after().await;
|
||||||
|
|
||||||
|
status
|
||||||
|
.set_with_description("Checking new out paths", State::Pending)
|
||||||
|
.await?;
|
||||||
|
self.check_outpaths_after().await?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn merge_conflict(&mut self) {
|
#[allow(clippy::too_many_lines)]
|
||||||
update_labels(
|
pub(crate) fn evaluation_checks(&self) -> Vec<EvalChecker> {
|
||||||
self.issue_ref,
|
|
||||||
&["2.status: merge conflict".to_owned()],
|
|
||||||
&[],
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn after_merge(&mut self, status: &mut CommitStatus) -> StepResult<()> {
|
|
||||||
update_labels(
|
|
||||||
self.issue_ref,
|
|
||||||
&[],
|
|
||||||
&["2.status: merge conflict".to_owned()],
|
|
||||||
);
|
|
||||||
|
|
||||||
status.set_with_description("Checking new stdenvs", hubcaps::statuses::State::Pending)?;
|
|
||||||
self.check_stdenvs_after();
|
|
||||||
|
|
||||||
status.set_with_description("Checking new out paths", hubcaps::statuses::State::Pending)?;
|
|
||||||
self.check_outpaths_after()?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn evaluation_checks(&self) -> Vec<EvalChecker> {
|
|
||||||
// the value that's passed as the nixpkgs arg
|
// the value that's passed as the nixpkgs arg
|
||||||
let nixpkgs_arg_value = format!(
|
let nixpkgs_arg_value = format!(
|
||||||
"{{ outPath=./.; revCount=999999; shortRev=\"{}\"; rev=\"{}\"; }}",
|
"{{ outPath=./.; revCount=999999; shortRev=\"{}\"; rev=\"{}\"; }}",
|
||||||
&self.job.pr.head_sha[0..7],
|
&self.job.change.head_sha[0..7],
|
||||||
&self.job.pr.head_sha,
|
&self.job.change.head_sha,
|
||||||
);
|
);
|
||||||
vec![
|
vec![
|
||||||
EvalChecker::new(
|
EvalChecker::new(
|
||||||
|
@ -561,63 +605,26 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> {
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn all_evaluations_passed(
|
pub(crate) async fn all_evaluations_passed(
|
||||||
&mut self,
|
&mut self,
|
||||||
dir: &Path,
|
dir: &Path,
|
||||||
status: &mut CommitStatus,
|
status: &mut CommitStatus,
|
||||||
) -> StepResult<EvaluationComplete> {
|
) -> StepResult<EvaluationComplete> {
|
||||||
self.update_stdenv_labels();
|
self.update_stdenv_labels().await;
|
||||||
|
|
||||||
status.set_with_description(
|
status
|
||||||
"Calculating Changed Outputs",
|
.set_with_description("Calculating Changed Outputs", State::Pending)
|
||||||
hubcaps::statuses::State::Pending,
|
.await?;
|
||||||
)?;
|
|
||||||
|
|
||||||
self.update_new_package_labels();
|
self.update_new_package_labels().await;
|
||||||
self.update_rebuild_labels(dir, status)?;
|
self.update_rebuild_labels(dir, status).await?;
|
||||||
let checks = self.performance_stats();
|
let checks = self.performance_stats();
|
||||||
|
|
||||||
let builds = self.check_meta_queue_builds(dir)?;
|
let builds = self.check_meta_queue_builds(dir).await?;
|
||||||
Ok(EvaluationComplete { builds, checks })
|
Ok(EvaluationComplete { builds, checks })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn request_reviews(maint: &maintainers::ImpactedMaintainers, pull: &hubcaps::pulls::PullRequest) {
|
|
||||||
let pull_meta = async_std::task::block_on(pull.get());
|
|
||||||
|
|
||||||
info!("Impacted maintainers: {:?}", maint.maintainers());
|
|
||||||
if maint.maintainers().len() < 10 {
|
|
||||||
for maintainer in maint.maintainers() {
|
|
||||||
match &pull_meta {
|
|
||||||
Ok(meta) => {
|
|
||||||
// GitHub doesn't let us request a review from the PR author, so
|
|
||||||
// we silently skip them.
|
|
||||||
if meta.user.login.to_ascii_lowercase() == maintainer.to_ascii_lowercase() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
warn!("PR meta was invalid? {:?}", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Err(e) = async_std::task::block_on(pull.review_requests().create(
|
|
||||||
&hubcaps::review_requests::ReviewRequestOptions {
|
|
||||||
reviewers: vec![maintainer.clone()],
|
|
||||||
team_reviewers: vec![],
|
|
||||||
},
|
|
||||||
)) {
|
|
||||||
warn!("Failure requesting a review from {}: {:?}", maintainer, e,);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
warn!(
|
|
||||||
"Too many reviewers ({}), skipping review requests",
|
|
||||||
maint.maintainers().len()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_commit_messages(messages: &[String]) -> Vec<String> {
|
fn parse_commit_messages(messages: &[String]) -> Vec<String> {
|
||||||
messages
|
messages
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -628,7 +635,7 @@ fn parse_commit_messages(messages: &[String]) -> Vec<String> {
|
||||||
// NOTE: This transforms `{foo,bar}` into `{{foo,bar}}` and `foo,bar` into `{foo,bar}`,
|
// NOTE: This transforms `{foo,bar}` into `{{foo,bar}}` and `foo,bar` into `{foo,bar}`,
|
||||||
// which allows both the old style (`foo,bar`) and the new style (`{foo,bar}`) to expand to
|
// which allows both the old style (`foo,bar`) and the new style (`{foo,bar}`) to expand to
|
||||||
// `foo` and `bar`.
|
// `foo` and `bar`.
|
||||||
.flat_map(|line| brace_expand::brace_expand(&format!("{{{}}}", line)))
|
.flat_map(|line| brace_expand::brace_expand(&format!("{{{line}}}")))
|
||||||
.map(|line| line.trim().to_owned())
|
.map(|line| line.trim().to_owned())
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
@ -674,7 +681,7 @@ mod tests {
|
||||||
firefox{,-beta}{,-bin}, librewolf: blah blah blah
|
firefox{,-beta}{,-bin}, librewolf: blah blah blah
|
||||||
"
|
"
|
||||||
.lines()
|
.lines()
|
||||||
.map(|l| l.to_owned())
|
.map(std::borrow::ToOwned::to_owned)
|
||||||
.collect::<Vec<String>>(),
|
.collect::<Vec<String>>(),
|
||||||
),
|
),
|
||||||
expect
|
expect
|
||||||
|
@ -708,12 +715,11 @@ mod tests {
|
||||||
vec![String::from("6.topic: darwin")]
|
vec![String::from("6.topic: darwin")]
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
label_from_title("fix build on bsd and darwin").sort(),
|
label_from_title("fix build on bsd and darwin"),
|
||||||
vec![
|
[
|
||||||
String::from("6.topic: darwin"),
|
String::from("6.topic: bsd"),
|
||||||
String::from("6.topic: bsd")
|
String::from("6.topic: darwin")
|
||||||
]
|
]
|
||||||
.sort()
|
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
label_from_title("pkg: fix cross"),
|
label_from_title("pkg: fix cross"),
|
||||||
|
|
|
@ -29,6 +29,7 @@ pub struct Stdenvs {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Stdenvs {
|
impl Stdenvs {
|
||||||
|
#[must_use]
|
||||||
pub fn new(nix: nix::Nix, co: PathBuf) -> Stdenvs {
|
pub fn new(nix: nix::Nix, co: PathBuf) -> Stdenvs {
|
||||||
Stdenvs {
|
Stdenvs {
|
||||||
nix,
|
nix,
|
||||||
|
@ -42,20 +43,22 @@ impl Stdenvs {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn identify_before(&mut self) {
|
pub async fn identify_before(&mut self) {
|
||||||
self.identify(System::X8664Linux, StdenvFrom::Before);
|
self.identify(System::X8664Linux, StdenvFrom::Before).await;
|
||||||
self.identify(System::X8664Darwin, StdenvFrom::Before);
|
self.identify(System::X8664Darwin, StdenvFrom::Before).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn identify_after(&mut self) {
|
pub async fn identify_after(&mut self) {
|
||||||
self.identify(System::X8664Linux, StdenvFrom::After);
|
self.identify(System::X8664Linux, StdenvFrom::After).await;
|
||||||
self.identify(System::X8664Darwin, StdenvFrom::After);
|
self.identify(System::X8664Darwin, StdenvFrom::After).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn are_same(&self) -> bool {
|
pub fn are_same(&self) -> bool {
|
||||||
self.changed().is_empty()
|
self.changed().is_empty()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn changed(&self) -> Vec<System> {
|
pub fn changed(&self) -> Vec<System> {
|
||||||
let mut changed: Vec<System> = vec![];
|
let mut changed: Vec<System> = vec![];
|
||||||
|
|
||||||
|
@ -70,29 +73,31 @@ impl Stdenvs {
|
||||||
changed
|
changed
|
||||||
}
|
}
|
||||||
|
|
||||||
fn identify(&mut self, system: System, from: StdenvFrom) {
|
async fn identify(&mut self, system: System, from: StdenvFrom) {
|
||||||
match (system, from) {
|
match (system, from) {
|
||||||
(System::X8664Linux, StdenvFrom::Before) => {
|
(System::X8664Linux, StdenvFrom::Before) => {
|
||||||
self.linux_stdenv_before = self.evalstdenv("x86_64-linux");
|
self.linux_stdenv_before = self.evalstdenv("x86_64-linux").await;
|
||||||
}
|
}
|
||||||
(System::X8664Linux, StdenvFrom::After) => {
|
(System::X8664Linux, StdenvFrom::After) => {
|
||||||
self.linux_stdenv_after = self.evalstdenv("x86_64-linux");
|
self.linux_stdenv_after = self.evalstdenv("x86_64-linux").await;
|
||||||
}
|
}
|
||||||
|
|
||||||
(System::X8664Darwin, StdenvFrom::Before) => {
|
(System::X8664Darwin, StdenvFrom::Before) => {
|
||||||
self.darwin_stdenv_before = self.evalstdenv("x86_64-darwin");
|
self.darwin_stdenv_before = self.evalstdenv("x86_64-darwin").await;
|
||||||
}
|
}
|
||||||
(System::X8664Darwin, StdenvFrom::After) => {
|
(System::X8664Darwin, StdenvFrom::After) => {
|
||||||
self.darwin_stdenv_after = self.evalstdenv("x86_64-darwin");
|
self.darwin_stdenv_after = self.evalstdenv("x86_64-darwin").await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This is used to find out what the output path of the stdenv for the
|
/// This is used to find out what the output path of the stdenv for the
|
||||||
/// given system.
|
/// given system.
|
||||||
fn evalstdenv(&self, system: &str) -> Option<String> {
|
async fn evalstdenv(&self, system: &str) -> Option<String> {
|
||||||
info!(?system, "query stdenv output");
|
info!(?system, "query stdenv output");
|
||||||
let result = self.nix.with_system(system.to_owned()).safely(
|
let nix = self.nix.with_system(system.to_owned());
|
||||||
|
let result = nix
|
||||||
|
.safely(
|
||||||
&nix::Operation::QueryPackagesOutputs,
|
&nix::Operation::QueryPackagesOutputs,
|
||||||
&self.co,
|
&self.co,
|
||||||
vec![
|
vec![
|
||||||
|
@ -102,7 +107,8 @@ impl Stdenvs {
|
||||||
String::from("stdenv"),
|
String::from("stdenv"),
|
||||||
],
|
],
|
||||||
true,
|
true,
|
||||||
);
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
match result {
|
match result {
|
||||||
Ok(mut out) => Some(file_to_str(&mut out)),
|
Ok(mut out) => Some(file_to_str(&mut out)),
|
||||||
|
@ -121,23 +127,29 @@ mod tests {
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn stdenv_checking() {
|
async fn stdenv_checking() {
|
||||||
let output = Command::new("nix-instantiate")
|
let output = Command::new("nix-instantiate")
|
||||||
.args(&["--eval", "-E", "<nixpkgs>"])
|
.args(["--eval", "-E", "<nixpkgs>"])
|
||||||
.output()
|
.output()
|
||||||
.expect("nix-instantiate required");
|
.expect("nix-instantiate required");
|
||||||
|
|
||||||
let nixpkgs = String::from_utf8(output.stdout).expect("nixpkgs required");
|
let nixpkgs = String::from_utf8(output.stdout).expect("nixpkgs required");
|
||||||
|
|
||||||
let remote = env::var("NIX_REMOTE").unwrap_or("".to_owned());
|
let remote = env::var("NIX_REMOTE").unwrap_or_default();
|
||||||
let nix = nix::Nix::new(String::from("x86_64-linux"), remote, 1200, None);
|
let nix = nix::Nix::new(String::from("x86_64-linux"), remote, 1200, None);
|
||||||
let mut stdenv = Stdenvs::new(nix, PathBuf::from(nixpkgs.trim_end()));
|
let mut stdenv = Stdenvs::new(nix, PathBuf::from(nixpkgs.trim_end()));
|
||||||
stdenv.identify(System::X8664Linux, StdenvFrom::Before);
|
stdenv
|
||||||
stdenv.identify(System::X8664Darwin, StdenvFrom::Before);
|
.identify(System::X8664Linux, StdenvFrom::Before)
|
||||||
|
.await;
|
||||||
|
stdenv
|
||||||
|
.identify(System::X8664Darwin, StdenvFrom::Before)
|
||||||
|
.await;
|
||||||
|
|
||||||
stdenv.identify(System::X8664Linux, StdenvFrom::After);
|
stdenv.identify(System::X8664Linux, StdenvFrom::After).await;
|
||||||
stdenv.identify(System::X8664Darwin, StdenvFrom::After);
|
stdenv
|
||||||
|
.identify(System::X8664Darwin, StdenvFrom::After)
|
||||||
|
.await;
|
||||||
|
|
||||||
assert!(stdenv.are_same());
|
assert!(stdenv.are_same());
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,32 +1,34 @@
|
||||||
/// This is what evaluates every pull-request
|
/// This is what evaluates every pull-request
|
||||||
use crate::acl::Acl;
|
use crate::acl::Acl;
|
||||||
use crate::checkout;
|
use crate::checkout;
|
||||||
use crate::commitstatus::{CommitStatus, CommitStatusError};
|
|
||||||
use crate::config::GithubAppVendingMachine;
|
|
||||||
use crate::files::file_to_str;
|
use crate::files::file_to_str;
|
||||||
|
use crate::message::evaluationjob::Actions;
|
||||||
use crate::message::{buildjob, evaluationjob};
|
use crate::message::{buildjob, evaluationjob};
|
||||||
use crate::nix;
|
use crate::nix;
|
||||||
use crate::stats::{self, Event};
|
use crate::stats::{self, Event};
|
||||||
use crate::systems;
|
use crate::systems;
|
||||||
use crate::tasks::eval;
|
use crate::tasks::eval;
|
||||||
|
use crate::utils::pastebin::PersistedPastebin;
|
||||||
|
use crate::vcs::commit_status::{CommitStatus, CommitStatusError};
|
||||||
|
use crate::vcs::generic::{Issue, IssueState, State, VersionControlSystemAPI};
|
||||||
|
use crate::vcs::gerrit::http::GerritHTTPApi;
|
||||||
use crate::worker;
|
use crate::worker;
|
||||||
use futures_util::TryFutureExt;
|
|
||||||
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::sync::RwLock;
|
use std::sync::Arc;
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
|
|
||||||
use hubcaps::checks::CheckRunOptions;
|
use async_trait::async_trait;
|
||||||
use hubcaps::gists::Gists;
|
use tracing::{debug_span, error, info, warn};
|
||||||
use hubcaps::issues::Issue;
|
|
||||||
use tracing::{debug, debug_span, error, info, warn};
|
pub enum SupportedVCS {
|
||||||
|
Gerrit,
|
||||||
|
}
|
||||||
|
|
||||||
pub struct EvaluationWorker<E> {
|
pub struct EvaluationWorker<E> {
|
||||||
cloner: checkout::CachedCloner,
|
cloner: checkout::CachedCloner,
|
||||||
nix: nix::Nix,
|
nix: nix::Nix,
|
||||||
github: hubcaps::Github,
|
vcs: SupportedVCS,
|
||||||
github_vend: RwLock<GithubAppVendingMachine>,
|
|
||||||
acl: Acl,
|
acl: Acl,
|
||||||
identity: String,
|
identity: String,
|
||||||
events: E,
|
events: E,
|
||||||
|
@ -37,8 +39,7 @@ impl<E: stats::SysEvents> EvaluationWorker<E> {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
cloner: checkout::CachedCloner,
|
cloner: checkout::CachedCloner,
|
||||||
nix: &nix::Nix,
|
nix: &nix::Nix,
|
||||||
github: hubcaps::Github,
|
vcs: SupportedVCS,
|
||||||
github_vend: GithubAppVendingMachine,
|
|
||||||
acl: Acl,
|
acl: Acl,
|
||||||
identity: String,
|
identity: String,
|
||||||
events: E,
|
events: E,
|
||||||
|
@ -46,8 +47,7 @@ impl<E: stats::SysEvents> EvaluationWorker<E> {
|
||||||
EvaluationWorker {
|
EvaluationWorker {
|
||||||
cloner,
|
cloner,
|
||||||
nix: nix.without_limited_supported_systems(),
|
nix: nix.without_limited_supported_systems(),
|
||||||
github,
|
vcs,
|
||||||
github_vend: RwLock::new(github_vend),
|
|
||||||
acl,
|
acl,
|
||||||
identity,
|
identity,
|
||||||
events,
|
events,
|
||||||
|
@ -55,18 +55,24 @@ impl<E: stats::SysEvents> EvaluationWorker<E> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<E: stats::SysEvents + 'static> worker::SimpleWorker for EvaluationWorker<E> {
|
#[async_trait]
|
||||||
|
impl<E: stats::SysEvents + 'static + Sync + Send> worker::SimpleWorker for EvaluationWorker<E> {
|
||||||
type J = evaluationjob::EvaluationJob;
|
type J = evaluationjob::EvaluationJob;
|
||||||
|
|
||||||
fn msg_to_job(&mut self, _: &str, _: &Option<String>, body: &[u8]) -> Result<Self::J, String> {
|
async fn msg_to_job(
|
||||||
self.events.notify(Event::JobReceived);
|
&mut self,
|
||||||
|
_: &str,
|
||||||
|
_: &Option<String>,
|
||||||
|
body: &[u8],
|
||||||
|
) -> Result<Self::J, String> {
|
||||||
|
self.events.notify(Event::JobReceived).await;
|
||||||
match evaluationjob::from(body) {
|
match evaluationjob::from(body) {
|
||||||
Ok(e) => {
|
Ok(e) => {
|
||||||
self.events.notify(Event::JobDecodeSuccess);
|
self.events.notify(Event::JobDecodeSuccess).await;
|
||||||
Ok(e)
|
Ok(e)
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
self.events.notify(Event::JobDecodeFailure);
|
self.events.notify(Event::JobDecodeFailure).await;
|
||||||
error!(
|
error!(
|
||||||
"Failed to decode message: {:?}, Err: {:?}",
|
"Failed to decode message: {:?}, Err: {:?}",
|
||||||
String::from_utf8(body.to_vec()),
|
String::from_utf8(body.to_vec()),
|
||||||
|
@ -77,22 +83,20 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for EvaluationWorker<E>
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn consumer(&mut self, job: &evaluationjob::EvaluationJob) -> worker::Actions {
|
async fn consumer(
|
||||||
let span = debug_span!("job", pr = ?job.pr.number);
|
&mut self,
|
||||||
|
chan: &mut lapin::Channel,
|
||||||
|
job: &evaluationjob::EvaluationJob,
|
||||||
|
) -> worker::Actions {
|
||||||
|
let span: tracing::Span = debug_span!("job", change_id = ?job.change.number);
|
||||||
let _enter = span.enter();
|
let _enter = span.enter();
|
||||||
|
|
||||||
let mut vending_machine = self
|
let vcs_api: Arc<dyn VersionControlSystemAPI> = match self.vcs {
|
||||||
.github_vend
|
SupportedVCS::Gerrit => Arc::new(GerritHTTPApi),
|
||||||
.write()
|
};
|
||||||
.expect("Failed to get write lock on github vending machine");
|
|
||||||
|
|
||||||
let github_client = vending_machine
|
|
||||||
.for_repo(&job.repo.owner, &job.repo.name)
|
|
||||||
.expect("Failed to get a github client token");
|
|
||||||
|
|
||||||
OneEval::new(
|
OneEval::new(
|
||||||
github_client,
|
vcs_api,
|
||||||
&self.github,
|
|
||||||
&self.nix,
|
&self.nix,
|
||||||
&self.acl,
|
&self.acl,
|
||||||
&mut self.events,
|
&mut self.events,
|
||||||
|
@ -100,14 +104,13 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for EvaluationWorker<E>
|
||||||
&self.cloner,
|
&self.cloner,
|
||||||
job,
|
job,
|
||||||
)
|
)
|
||||||
.worker_actions()
|
.worker_actions(chan)
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct OneEval<'a, E> {
|
struct OneEval<'a, E> {
|
||||||
client_app: &'a hubcaps::Github,
|
vcs_api: Arc<dyn VersionControlSystemAPI>,
|
||||||
repo: hubcaps::repositories::Repository,
|
|
||||||
gists: Gists,
|
|
||||||
nix: &'a nix::Nix,
|
nix: &'a nix::Nix,
|
||||||
acl: &'a Acl,
|
acl: &'a Acl,
|
||||||
events: &'a mut E,
|
events: &'a mut E,
|
||||||
|
@ -119,8 +122,7 @@ struct OneEval<'a, E> {
|
||||||
impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
fn new(
|
fn new(
|
||||||
client_app: &'a hubcaps::Github,
|
vcs_api: Arc<dyn VersionControlSystemAPI>,
|
||||||
client_legacy: &'a hubcaps::Github,
|
|
||||||
nix: &'a nix::Nix,
|
nix: &'a nix::Nix,
|
||||||
acl: &'a Acl,
|
acl: &'a Acl,
|
||||||
events: &'a mut E,
|
events: &'a mut E,
|
||||||
|
@ -128,13 +130,8 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
||||||
cloner: &'a checkout::CachedCloner,
|
cloner: &'a checkout::CachedCloner,
|
||||||
job: &'a evaluationjob::EvaluationJob,
|
job: &'a evaluationjob::EvaluationJob,
|
||||||
) -> OneEval<'a, E> {
|
) -> OneEval<'a, E> {
|
||||||
let gists = client_legacy.gists();
|
|
||||||
|
|
||||||
let repo = client_app.repo(job.repo.owner.clone(), job.repo.name.clone());
|
|
||||||
OneEval {
|
OneEval {
|
||||||
client_app,
|
vcs_api,
|
||||||
repo,
|
|
||||||
gists,
|
|
||||||
nix,
|
nix,
|
||||||
acl,
|
acl,
|
||||||
events,
|
events,
|
||||||
|
@ -144,15 +141,11 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn actions(&self) -> evaluationjob::Actions {
|
async fn update_status(
|
||||||
evaluationjob::Actions {}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_status(
|
|
||||||
&self,
|
&self,
|
||||||
description: String,
|
description: String,
|
||||||
url: Option<String>,
|
url: Option<String>,
|
||||||
state: hubcaps::statuses::State,
|
state: State,
|
||||||
) -> Result<(), CommitStatusError> {
|
) -> Result<(), CommitStatusError> {
|
||||||
let description = if description.len() >= 140 {
|
let description = if description.len() >= 140 {
|
||||||
warn!(
|
warn!(
|
||||||
|
@ -163,58 +156,56 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
||||||
} else {
|
} else {
|
||||||
description
|
description
|
||||||
};
|
};
|
||||||
let repo = self
|
|
||||||
.client_app
|
|
||||||
.repo(self.job.repo.owner.clone(), self.job.repo.name.clone());
|
|
||||||
let prefix = get_prefix(repo.statuses(), &self.job.pr.head_sha)?;
|
|
||||||
|
|
||||||
let mut builder = hubcaps::statuses::StatusOptions::builder(state);
|
|
||||||
builder.context(format!("{}-eval", prefix));
|
|
||||||
builder.description(description.clone());
|
|
||||||
|
|
||||||
if let Some(url) = url {
|
|
||||||
builder.target_url(url);
|
|
||||||
}
|
|
||||||
|
|
||||||
info!(
|
info!(
|
||||||
"Updating status on {}:{} -> {}",
|
"Updating status on {}:{} -> {}",
|
||||||
&self.job.pr.number, &self.job.pr.head_sha, &description
|
&self.job.change.number, &self.job.change.head_sha, &description
|
||||||
);
|
);
|
||||||
|
|
||||||
async_std::task::block_on(
|
self.vcs_api
|
||||||
self.repo
|
.create_commit_statuses(
|
||||||
.statuses()
|
&self.job.repo,
|
||||||
.create(&self.job.pr.head_sha, &builder.build())
|
self.job.change.head_sha.clone(),
|
||||||
.map_ok(|_| ())
|
state,
|
||||||
.map_err(|e| CommitStatusError::from(e)),
|
"ofborg-eval".to_owned(),
|
||||||
|
description,
|
||||||
|
// TODO: make this an option
|
||||||
|
url.unwrap_or_default(),
|
||||||
)
|
)
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_gist(
|
async fn make_pastebin(
|
||||||
&self,
|
&mut self,
|
||||||
filename: &str,
|
chan: &mut lapin::Channel,
|
||||||
description: Option<String>,
|
title: &str,
|
||||||
content: String,
|
contents: String,
|
||||||
) -> Option<String> {
|
) -> Option<PersistedPastebin> {
|
||||||
make_gist(&self.gists, filename, description, content)
|
crate::utils::pastebin::make_pastebin(chan, title, contents)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn worker_actions(&mut self) -> worker::Actions {
|
async fn worker_actions(&mut self, chan: &mut lapin::Channel) -> worker::Actions {
|
||||||
let eval_result = self.evaluate_job().map_err(|eval_error| match eval_error {
|
let eval_result = match self.evaluate_job(chan).await {
|
||||||
|
Ok(r) => Ok(r),
|
||||||
// Handle error cases which expect us to post statuses
|
// Handle error cases which expect us to post statuses
|
||||||
// to github. Convert Eval Errors in to Result<_, CommitStatusWrite>
|
// to github. Convert Eval Errors in to Result<_, CommitStatusWrite>
|
||||||
EvalWorkerError::EvalError(eval::Error::Fail(msg)) => {
|
Err(EvalWorkerError::EvalError(eval::Error::Fail(msg))) => {
|
||||||
self.update_status(msg, None, hubcaps::statuses::State::Failure)
|
Err(self.update_status(msg, None, State::Failure).await)
|
||||||
}
|
}
|
||||||
EvalWorkerError::EvalError(eval::Error::FailWithGist(msg, filename, content)) => self
|
Err(EvalWorkerError::EvalError(eval::Error::FailWithPastebin(msg, title, content))) => {
|
||||||
.update_status(
|
let pastebin = self
|
||||||
msg,
|
.make_pastebin(chan, &title, content)
|
||||||
self.make_gist(&filename, Some("".to_owned()), content),
|
.await
|
||||||
hubcaps::statuses::State::Failure,
|
.map(|pp| pp.uri);
|
||||||
),
|
Err(self.update_status(msg, pastebin, State::Failure).await)
|
||||||
EvalWorkerError::EvalError(eval::Error::CommitStatusWrite(e)) => Err(e),
|
}
|
||||||
EvalWorkerError::CommitStatusWrite(e) => Err(e),
|
Err(
|
||||||
});
|
EvalWorkerError::EvalError(eval::Error::CommitStatusWrite(e))
|
||||||
|
| EvalWorkerError::CommitStatusWrite(e),
|
||||||
|
) => Err(Err(e)),
|
||||||
|
};
|
||||||
|
|
||||||
match eval_result {
|
match eval_result {
|
||||||
Ok(eval_actions) => eval_actions,
|
Ok(eval_actions) => eval_actions,
|
||||||
|
@ -222,18 +213,18 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
||||||
// There was an error during eval, but we successfully
|
// There was an error during eval, but we successfully
|
||||||
// updated the PR.
|
// updated the PR.
|
||||||
|
|
||||||
self.actions().skip(self.job)
|
Actions::skip(self.job)
|
||||||
}
|
}
|
||||||
Err(Err(CommitStatusError::ExpiredCreds(e))) => {
|
Err(Err(CommitStatusError::ExpiredCreds(e))) => {
|
||||||
error!("Failed writing commit status: creds expired: {:?}", e);
|
error!("Failed writing commit status: creds expired: {:?}", e);
|
||||||
self.actions().retry_later(self.job)
|
Actions::retry_later(self.job)
|
||||||
}
|
}
|
||||||
Err(Err(CommitStatusError::MissingSha(e))) => {
|
Err(Err(CommitStatusError::MissingSha(e))) => {
|
||||||
error!(
|
error!(
|
||||||
"Failed writing commit status: commit sha was force-pushed away: {:?}",
|
"Failed writing commit status: commit sha was force-pushed away: {:?}",
|
||||||
e
|
e
|
||||||
);
|
);
|
||||||
self.actions().skip(self.job)
|
Actions::skip(self.job)
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(Err(CommitStatusError::Error(cswerr))) => {
|
Err(Err(CommitStatusError::Error(cswerr))) => {
|
||||||
|
@ -241,243 +232,263 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
||||||
"Internal error writing commit status: {:?}, marking internal error",
|
"Internal error writing commit status: {:?}, marking internal error",
|
||||||
cswerr
|
cswerr
|
||||||
);
|
);
|
||||||
let issue_ref = self.repo.issue(self.job.pr.number);
|
|
||||||
update_labels(&issue_ref, &[String::from("ofborg-internal-error")], &[]);
|
|
||||||
|
|
||||||
self.actions().skip(self.job)
|
self.vcs_api
|
||||||
|
.update_labels(
|
||||||
|
&self.job.repo,
|
||||||
|
self.job.change.number,
|
||||||
|
&[String::from("ofborg-internal-error")],
|
||||||
|
&[],
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
Actions::skip(self.job)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: remove with rust/cargo update
|
// FIXME: remove with rust/cargo update
|
||||||
#[allow(clippy::cognitive_complexity)]
|
#[allow(clippy::cognitive_complexity)]
|
||||||
fn evaluate_job(&mut self) -> Result<worker::Actions, EvalWorkerError> {
|
#[allow(clippy::too_many_lines)]
|
||||||
|
async fn evaluate_job(
|
||||||
|
&mut self,
|
||||||
|
chan: &mut lapin::Channel,
|
||||||
|
) -> Result<worker::Actions, EvalWorkerError> {
|
||||||
let job = self.job;
|
let job = self.job;
|
||||||
let repo = self
|
let issue_ref = self.vcs_api.get_issue(&job.repo, job.change.number).await;
|
||||||
.client_app
|
|
||||||
.repo(self.job.repo.owner.clone(), self.job.repo.name.clone());
|
|
||||||
let pulls = repo.pulls();
|
|
||||||
let pull = pulls.get(job.pr.number);
|
|
||||||
let issue_ref = repo.issue(job.pr.number);
|
|
||||||
let issue: Issue;
|
|
||||||
let auto_schedule_build_archs: Vec<systems::System>;
|
let auto_schedule_build_archs: Vec<systems::System>;
|
||||||
|
|
||||||
match async_std::task::block_on(issue_ref.get()) {
|
let _issue: Issue = match issue_ref {
|
||||||
Ok(iss) => {
|
Ok(iss) => {
|
||||||
if iss.state == "closed" {
|
if matches!(iss.state, IssueState::Closed) {
|
||||||
self.events.notify(Event::IssueAlreadyClosed);
|
self.events.notify(Event::IssueAlreadyClosed).await;
|
||||||
info!("Skipping {} because it is closed", job.pr.number);
|
info!("Skipping {} because it is closed", job.change.number);
|
||||||
return Ok(self.actions().skip(job));
|
return Ok(Actions::skip(job));
|
||||||
}
|
}
|
||||||
|
|
||||||
if issue_is_wip(&iss) {
|
if iss.is_wip() {
|
||||||
|
self.events.notify(Event::CurrentlyWorkInProgress).await;
|
||||||
auto_schedule_build_archs = vec![];
|
auto_schedule_build_archs = vec![];
|
||||||
} else {
|
} else {
|
||||||
auto_schedule_build_archs = self.acl.build_job_architectures_for_user_repo(
|
auto_schedule_build_archs = self.acl.build_job_architectures_for_user_repo(
|
||||||
&iss.user.login,
|
&iss.created_by.username,
|
||||||
&job.repo.full_name,
|
&job.repo.full_name,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
issue = iss;
|
iss
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
self.events.notify(Event::IssueFetchFailed);
|
self.events.notify(Event::IssueFetchFailed).await;
|
||||||
error!("Error fetching {}!", job.pr.number);
|
error!("Error fetching {}!", job.change.number);
|
||||||
error!("E: {:?}", e);
|
error!("E: {:?}", e);
|
||||||
return Ok(self.actions().skip(job));
|
return Ok(Actions::skip(job));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut evaluation_strategy: Box<dyn eval::EvaluationStrategy> = if job.is_nixpkgs() {
|
let mut evaluation_strategy = eval::NixpkgsStrategy::new(
|
||||||
Box::new(eval::NixpkgsStrategy::new(
|
chan.clone(),
|
||||||
job,
|
job,
|
||||||
&pull,
|
self.vcs_api.clone(),
|
||||||
&issue,
|
&job.repo,
|
||||||
&issue_ref,
|
&job.change,
|
||||||
&repo,
|
|
||||||
&self.gists,
|
|
||||||
self.nix.clone(),
|
self.nix.clone(),
|
||||||
))
|
);
|
||||||
} else {
|
|
||||||
Box::new(eval::GenericStrategy::new())
|
|
||||||
};
|
|
||||||
|
|
||||||
let prefix = get_prefix(repo.statuses(), &job.pr.head_sha)?;
|
|
||||||
|
|
||||||
let mut overall_status = CommitStatus::new(
|
let mut overall_status = CommitStatus::new(
|
||||||
repo.statuses(),
|
self.vcs_api.clone(),
|
||||||
job.pr.head_sha.clone(),
|
job.repo.clone(),
|
||||||
format!("{}-eval", &prefix),
|
job.change.head_sha.clone(),
|
||||||
|
"ofborg-eval".to_owned(),
|
||||||
"Starting".to_owned(),
|
"Starting".to_owned(),
|
||||||
None,
|
None,
|
||||||
);
|
);
|
||||||
|
|
||||||
overall_status.set_with_description("Starting", hubcaps::statuses::State::Pending)?;
|
overall_status
|
||||||
|
.set_with_description("Starting", State::Pending)
|
||||||
|
.await?;
|
||||||
|
|
||||||
evaluation_strategy.pre_clone()?;
|
evaluation_strategy.pre_clone().await?;
|
||||||
|
|
||||||
let project = self
|
let project = self
|
||||||
.cloner
|
.cloner
|
||||||
.project(&job.repo.full_name, job.repo.clone_url.clone());
|
.project(&job.repo.full_name, job.repo.clone_url.clone());
|
||||||
|
|
||||||
overall_status
|
overall_status
|
||||||
.set_with_description("Cloning project", hubcaps::statuses::State::Pending)?;
|
.set_with_description("Cloning project", State::Pending)
|
||||||
|
.await?;
|
||||||
|
|
||||||
info!("Working on {}", job.pr.number);
|
info!("Working on {}", job.change.number);
|
||||||
let co = project
|
let co = project
|
||||||
|
// TODO: what is 'mr-est' ?
|
||||||
.clone_for("mr-est".to_string(), self.identity.to_string())
|
.clone_for("mr-est".to_string(), self.identity.to_string())
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let target_branch = match job.pr.target_branch.clone() {
|
let target_branch = match job.change.target_branch.clone() {
|
||||||
Some(x) => x,
|
Some(x) => x,
|
||||||
None => String::from("master"),
|
None => String::from("master"),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// TODO: this is a preflight check, encode it as such.
|
||||||
if target_branch.starts_with("nixos-") || target_branch.starts_with("nixpkgs-") {
|
if target_branch.starts_with("nixos-") || target_branch.starts_with("nixpkgs-") {
|
||||||
overall_status.set_with_description(
|
overall_status
|
||||||
|
.set_with_description(
|
||||||
"The branch you have targeted is a read-only mirror for channels. \
|
"The branch you have targeted is a read-only mirror for channels. \
|
||||||
Please target release-* or master.",
|
Please target release-* or master.",
|
||||||
hubcaps::statuses::State::Error,
|
State::Error,
|
||||||
)?;
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
info!("PR targets a nixos-* or nixpkgs-* branch");
|
info!("PR targets a nixos-* or nixpkgs-* branch");
|
||||||
return Ok(self.actions().skip(job));
|
return Ok(Actions::skip(job));
|
||||||
};
|
};
|
||||||
|
|
||||||
overall_status.set_with_description(
|
overall_status
|
||||||
|
.set_with_description(
|
||||||
format!("Checking out {}", &target_branch).as_ref(),
|
format!("Checking out {}", &target_branch).as_ref(),
|
||||||
hubcaps::statuses::State::Pending,
|
State::Pending,
|
||||||
)?;
|
)
|
||||||
|
.await?;
|
||||||
info!("Checking out target branch {}", &target_branch);
|
info!("Checking out target branch {}", &target_branch);
|
||||||
let refpath = co.checkout_origin_ref(target_branch.as_ref()).unwrap();
|
let refpath = co.checkout_origin_ref(target_branch.as_ref()).unwrap();
|
||||||
|
|
||||||
evaluation_strategy.on_target_branch(Path::new(&refpath), &mut overall_status)?;
|
evaluation_strategy
|
||||||
|
.on_target_branch(Path::new(&refpath), &mut overall_status)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let target_branch_rebuild_sniff_start = Instant::now();
|
let target_branch_rebuild_sniff_start = Instant::now();
|
||||||
|
|
||||||
self.events.notify(Event::EvaluationDuration(
|
self.events
|
||||||
|
.notify(Event::EvaluationDuration(
|
||||||
target_branch.clone(),
|
target_branch.clone(),
|
||||||
target_branch_rebuild_sniff_start.elapsed().as_secs(),
|
target_branch_rebuild_sniff_start.elapsed().as_secs(),
|
||||||
));
|
))
|
||||||
|
.await;
|
||||||
self.events
|
self.events
|
||||||
.notify(Event::EvaluationDurationCount(target_branch));
|
.notify(Event::EvaluationDurationCount(target_branch))
|
||||||
|
.await;
|
||||||
|
|
||||||
overall_status.set_with_description("Fetching PR", hubcaps::statuses::State::Pending)?;
|
|
||||||
|
|
||||||
co.fetch_pr(job.pr.number).unwrap();
|
|
||||||
|
|
||||||
if !co.commit_exists(job.pr.head_sha.as_ref()) {
|
|
||||||
overall_status
|
overall_status
|
||||||
.set_with_description("Commit not found", hubcaps::statuses::State::Error)?;
|
.set_with_description("Fetching PR", State::Pending)
|
||||||
|
.await?;
|
||||||
|
|
||||||
info!("Commit {} doesn't exist", job.pr.head_sha);
|
// TODO: generalize fetch change
|
||||||
return Ok(self.actions().skip(job));
|
co.fetch_pr(job.change.number).unwrap();
|
||||||
|
|
||||||
|
if !co.commit_exists(job.change.head_sha.as_ref()) {
|
||||||
|
overall_status
|
||||||
|
.set_with_description("Commit not found", State::Error)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
info!("Commit {} doesn't exist", job.change.head_sha);
|
||||||
|
return Ok(Actions::skip(job));
|
||||||
}
|
}
|
||||||
|
|
||||||
evaluation_strategy.after_fetch(&co)?;
|
evaluation_strategy.after_fetch(&co);
|
||||||
|
|
||||||
overall_status.set_with_description("Merging PR", hubcaps::statuses::State::Pending)?;
|
|
||||||
|
|
||||||
if co.merge_commit(job.pr.head_sha.as_ref()).is_err() {
|
|
||||||
overall_status
|
overall_status
|
||||||
.set_with_description("Failed to merge", hubcaps::statuses::State::Failure)?;
|
.set_with_description("Merging PR", State::Pending)
|
||||||
|
.await?;
|
||||||
|
|
||||||
info!("Failed to merge {}", job.pr.head_sha);
|
if co.merge_commit(job.change.head_sha.as_ref()).is_err() {
|
||||||
|
overall_status
|
||||||
|
.set_with_description("Failed to merge", State::Failure)
|
||||||
|
.await?;
|
||||||
|
|
||||||
evaluation_strategy.merge_conflict();
|
info!("Failed to merge {}", job.change.head_sha);
|
||||||
|
|
||||||
return Ok(self.actions().skip(job));
|
evaluation_strategy.merge_conflict().await;
|
||||||
|
|
||||||
|
return Ok(Actions::skip(job));
|
||||||
}
|
}
|
||||||
|
|
||||||
evaluation_strategy.after_merge(&mut overall_status)?;
|
evaluation_strategy.after_merge(&mut overall_status).await?;
|
||||||
|
|
||||||
info!("Got path: {:?}, building", refpath);
|
info!("Got path: {:?}, building", refpath);
|
||||||
overall_status
|
overall_status
|
||||||
.set_with_description("Beginning Evaluations", hubcaps::statuses::State::Pending)?;
|
.set_with_description("Beginning Evaluations", State::Pending)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let eval_results: bool = evaluation_strategy
|
let mut all_good = true;
|
||||||
.evaluation_checks()
|
for check in evaluation_strategy.evaluation_checks() {
|
||||||
.into_iter()
|
|
||||||
.map(|check| {
|
|
||||||
let mut status = CommitStatus::new(
|
let mut status = CommitStatus::new(
|
||||||
repo.statuses(),
|
self.vcs_api.clone(),
|
||||||
job.pr.head_sha.clone(),
|
job.repo.clone(),
|
||||||
format!("{}-eval-{}", prefix, check.name()),
|
job.change.head_sha.clone(),
|
||||||
|
format!("ofborg-eval-{}", check.name()),
|
||||||
check.cli_cmd(),
|
check.cli_cmd(),
|
||||||
None,
|
None,
|
||||||
);
|
);
|
||||||
|
|
||||||
status
|
status
|
||||||
.set(hubcaps::statuses::State::Pending)
|
.set(State::Pending)
|
||||||
|
.await
|
||||||
.expect("Failed to set status on eval strategy");
|
.expect("Failed to set status on eval strategy");
|
||||||
|
|
||||||
let state: hubcaps::statuses::State;
|
let state: State;
|
||||||
let gist_url: Option<String>;
|
let gist_url: Option<String>;
|
||||||
match check.execute(Path::new(&refpath)) {
|
match check.execute(Path::new(&refpath)).await {
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
state = hubcaps::statuses::State::Success;
|
state = State::Success;
|
||||||
gist_url = None;
|
gist_url = None;
|
||||||
}
|
}
|
||||||
Err(mut out) => {
|
Err(mut out) => {
|
||||||
state = hubcaps::statuses::State::Failure;
|
state = State::Failure;
|
||||||
gist_url = self.make_gist(
|
gist_url = self
|
||||||
&format!("{}-eval-{}", prefix, check.name()),
|
.make_pastebin(
|
||||||
Some(format!("{:?}", state)),
|
chan,
|
||||||
|
&format!("[ofborg] Evaluation of {}", check.name()),
|
||||||
file_to_str(&mut out),
|
file_to_str(&mut out),
|
||||||
);
|
)
|
||||||
|
.await
|
||||||
|
.map(|pp| pp.uri);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
status.set_url(gist_url);
|
status.set_url(gist_url);
|
||||||
status
|
status
|
||||||
.set(state.clone())
|
.set(state)
|
||||||
|
.await
|
||||||
.expect("Failed to set status on eval strategy");
|
.expect("Failed to set status on eval strategy");
|
||||||
|
|
||||||
if state == hubcaps::statuses::State::Success {
|
if state != State::Success {
|
||||||
Ok(())
|
all_good = false;
|
||||||
} else {
|
}
|
||||||
Err(())
|
|
||||||
}
|
}
|
||||||
})
|
|
||||||
.all(|status| status == Ok(()));
|
|
||||||
|
|
||||||
info!("Finished evaluations");
|
info!("Finished evaluations");
|
||||||
let mut response: worker::Actions = vec![];
|
let mut response: worker::Actions = vec![];
|
||||||
|
|
||||||
if eval_results {
|
if all_good {
|
||||||
let complete = evaluation_strategy
|
let complete = evaluation_strategy
|
||||||
.all_evaluations_passed(Path::new(&refpath), &mut overall_status)?;
|
.all_evaluations_passed(Path::new(&refpath), &mut overall_status)
|
||||||
|
.await?;
|
||||||
|
|
||||||
send_check_statuses(complete.checks, &repo);
|
self.vcs_api
|
||||||
response.extend(schedule_builds(complete.builds, auto_schedule_build_archs));
|
.create_check_statuses(&job.repo, complete.checks)
|
||||||
|
.await;
|
||||||
|
response.extend(schedule_builds(complete.builds, &auto_schedule_build_archs));
|
||||||
|
|
||||||
overall_status.set_with_description("^.^!", hubcaps::statuses::State::Success)?;
|
overall_status
|
||||||
|
.set_with_description("^.^!", State::Success)
|
||||||
|
.await?;
|
||||||
} else {
|
} else {
|
||||||
overall_status
|
overall_status
|
||||||
.set_with_description("Complete, with errors", hubcaps::statuses::State::Failure)?;
|
.set_with_description("Complete, with errors", State::Failure)
|
||||||
|
.await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
self.events.notify(Event::TaskEvaluationCheckComplete);
|
self.events.notify(Event::TaskEvaluationCheckComplete).await;
|
||||||
|
|
||||||
info!("Evaluations done!");
|
info!("Evaluations done!");
|
||||||
Ok(self.actions().done(job, response))
|
Ok(Actions::done(job, response))
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn send_check_statuses(checks: Vec<CheckRunOptions>, repo: &hubcaps::repositories::Repository) {
|
|
||||||
for check in checks {
|
|
||||||
match async_std::task::block_on(repo.checkruns().create(&check)) {
|
|
||||||
Ok(_) => debug!("Sent check update"),
|
|
||||||
Err(e) => warn!("Failed to send check update: {:?}", e),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn schedule_builds(
|
fn schedule_builds(
|
||||||
builds: Vec<buildjob::BuildJob>,
|
builds: Vec<buildjob::BuildJob>,
|
||||||
auto_schedule_build_archs: Vec<systems::System>,
|
auto_schedule_build_archs: &[systems::System],
|
||||||
) -> Vec<worker::Action> {
|
) -> Vec<worker::Action> {
|
||||||
let mut response = vec![];
|
let mut response = vec![];
|
||||||
info!(
|
info!(
|
||||||
|
@ -485,20 +496,22 @@ fn schedule_builds(
|
||||||
builds, auto_schedule_build_archs
|
builds, auto_schedule_build_archs
|
||||||
);
|
);
|
||||||
for buildjob in builds {
|
for buildjob in builds {
|
||||||
for arch in auto_schedule_build_archs.iter() {
|
for arch in auto_schedule_build_archs {
|
||||||
let (exchange, routingkey) = arch.as_build_destination();
|
let (exchange, routingkey) = arch.as_build_destination();
|
||||||
response.push(worker::publish_serde_action(
|
response.push(worker::publish_serde_action(
|
||||||
exchange, routingkey, &buildjob,
|
&exchange,
|
||||||
|
&routingkey,
|
||||||
|
&buildjob,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
response.push(worker::publish_serde_action(
|
response.push(worker::publish_serde_action(
|
||||||
Some("build-results".to_string()),
|
&Some("build-results".to_string()),
|
||||||
None,
|
&None,
|
||||||
&buildjob::QueuedBuildJobs {
|
&buildjob::QueuedBuildJobs {
|
||||||
job: buildjob,
|
job: buildjob,
|
||||||
architectures: auto_schedule_build_archs
|
architectures: auto_schedule_build_archs
|
||||||
.iter()
|
.iter()
|
||||||
.map(|arch| arch.to_string())
|
.map(std::string::ToString::to_string)
|
||||||
.collect(),
|
.collect(),
|
||||||
},
|
},
|
||||||
));
|
));
|
||||||
|
@ -507,119 +520,6 @@ fn schedule_builds(
|
||||||
response
|
response
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn make_gist(
|
|
||||||
gists: &hubcaps::gists::Gists,
|
|
||||||
name: &str,
|
|
||||||
description: Option<String>,
|
|
||||||
contents: String,
|
|
||||||
) -> Option<String> {
|
|
||||||
let mut files: HashMap<String, hubcaps::gists::Content> = HashMap::new();
|
|
||||||
files.insert(
|
|
||||||
name.to_string(),
|
|
||||||
hubcaps::gists::Content {
|
|
||||||
filename: Some(name.to_string()),
|
|
||||||
content: contents,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
Some(
|
|
||||||
async_std::task::block_on(gists.create(&hubcaps::gists::GistOptions {
|
|
||||||
description,
|
|
||||||
public: Some(true),
|
|
||||||
files,
|
|
||||||
}))
|
|
||||||
.expect("Failed to create gist!")
|
|
||||||
.html_url,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn update_labels(issueref: &hubcaps::issues::IssueRef, add: &[String], remove: &[String]) {
|
|
||||||
let l = issueref.labels();
|
|
||||||
let issue = async_std::task::block_on(issueref.get()).expect("Failed to get issue");
|
|
||||||
|
|
||||||
let existing: Vec<String> = issue.labels.iter().map(|l| l.name.clone()).collect();
|
|
||||||
|
|
||||||
let to_add: Vec<&str> = add
|
|
||||||
.iter()
|
|
||||||
.filter(|l| !existing.contains(l)) // Remove labels already on the issue
|
|
||||||
.map(|l| l.as_ref())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let to_remove: Vec<String> = remove
|
|
||||||
.iter()
|
|
||||||
.filter(|l| existing.contains(l)) // Remove labels already on the issue
|
|
||||||
.cloned()
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
info!(
|
|
||||||
"Labeling issue #{}: + {:?} , - {:?}, = {:?}",
|
|
||||||
issue.number, to_add, to_remove, existing
|
|
||||||
);
|
|
||||||
|
|
||||||
async_std::task::block_on(l.add(to_add.clone())).unwrap_or_else(|e| {
|
|
||||||
panic!(
|
|
||||||
"Failed to add labels {:?} to issue #{}: {:?}",
|
|
||||||
to_add, issue.number, e
|
|
||||||
)
|
|
||||||
});
|
|
||||||
|
|
||||||
for label in to_remove {
|
|
||||||
async_std::task::block_on(l.remove(&label)).unwrap_or_else(|e| {
|
|
||||||
panic!(
|
|
||||||
"Failed to remove label {:?} from issue #{}: {:?}",
|
|
||||||
label, issue.number, e
|
|
||||||
)
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn issue_is_wip(issue: &hubcaps::issues::Issue) -> bool {
|
|
||||||
if issue.title.contains("[WIP]") {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if issue.title.starts_with("WIP:") {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
issue.labels.iter().any(|label| indicates_wip(&label.name))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn indicates_wip(text: &str) -> bool {
|
|
||||||
let text = text.to_lowercase();
|
|
||||||
|
|
||||||
if text.contains("work in progress") {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if text.contains("work-in-progress") {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
false
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Determine whether or not to use the "old" status prefix, `grahamcofborg`, or
|
|
||||||
/// the new one, `ofborg`.
|
|
||||||
///
|
|
||||||
/// If the PR already has any `grahamcofborg`-prefixed statuses, continue to use
|
|
||||||
/// that (e.g. if someone used `@ofborg eval`, `@ofborg build`, `@ofborg test`).
|
|
||||||
/// Otherwise, if it's a new PR or was recently force-pushed (and therefore
|
|
||||||
/// doesn't have any old `grahamcofborg`-prefixed statuses), use the new prefix.
|
|
||||||
pub fn get_prefix(
|
|
||||||
statuses: hubcaps::statuses::Statuses,
|
|
||||||
sha: &str,
|
|
||||||
) -> Result<&str, CommitStatusError> {
|
|
||||||
if async_std::task::block_on(statuses.list(sha))?
|
|
||||||
.iter()
|
|
||||||
.any(|s| s.context.starts_with("grahamcofborg-"))
|
|
||||||
{
|
|
||||||
Ok("grahamcofborg")
|
|
||||||
} else {
|
|
||||||
Ok("ofborg")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
enum EvalWorkerError {
|
enum EvalWorkerError {
|
||||||
EvalError(eval::Error),
|
EvalError(eval::Error),
|
||||||
CommitStatusWrite(CommitStatusError),
|
CommitStatusWrite(CommitStatusError),
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
use crate::acl;
|
use crate::acl;
|
||||||
use crate::ghevent;
|
use crate::ghevent;
|
||||||
use crate::message::{evaluationjob, Pr, Repo};
|
use crate::message::{evaluationjob, Change, Repo};
|
||||||
use crate::worker;
|
use crate::worker;
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
use tracing::{debug_span, info};
|
use tracing::{debug_span, info};
|
||||||
|
|
||||||
pub struct EvaluationFilterWorker {
|
pub struct EvaluationFilterWorker {
|
||||||
|
@ -10,15 +11,22 @@ pub struct EvaluationFilterWorker {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EvaluationFilterWorker {
|
impl EvaluationFilterWorker {
|
||||||
|
#[must_use]
|
||||||
pub fn new(acl: acl::Acl) -> EvaluationFilterWorker {
|
pub fn new(acl: acl::Acl) -> EvaluationFilterWorker {
|
||||||
EvaluationFilterWorker { acl }
|
EvaluationFilterWorker { acl }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl worker::SimpleWorker for EvaluationFilterWorker {
|
impl worker::SimpleWorker for EvaluationFilterWorker {
|
||||||
type J = ghevent::PullRequestEvent;
|
type J = ghevent::PullRequestEvent;
|
||||||
|
|
||||||
fn msg_to_job(&mut self, _: &str, _: &Option<String>, body: &[u8]) -> Result<Self::J, String> {
|
async fn msg_to_job(
|
||||||
|
&mut self,
|
||||||
|
_: &str,
|
||||||
|
_: &Option<String>,
|
||||||
|
body: &[u8],
|
||||||
|
) -> Result<Self::J, String> {
|
||||||
match serde_json::from_slice(body) {
|
match serde_json::from_slice(body) {
|
||||||
Ok(e) => Ok(e),
|
Ok(e) => Ok(e),
|
||||||
Err(e) => Err(format!(
|
Err(e) => Err(format!(
|
||||||
|
@ -29,7 +37,11 @@ impl worker::SimpleWorker for EvaluationFilterWorker {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn consumer(&mut self, job: &ghevent::PullRequestEvent) -> worker::Actions {
|
async fn consumer(
|
||||||
|
&mut self,
|
||||||
|
_chan: &mut lapin::Channel,
|
||||||
|
job: &ghevent::PullRequestEvent,
|
||||||
|
) -> worker::Actions {
|
||||||
let span = debug_span!("job", pr = ?job.number);
|
let span = debug_span!("job", pr = ?job.number);
|
||||||
let _enter = span.enter();
|
let _enter = span.enter();
|
||||||
|
|
||||||
|
@ -47,9 +59,9 @@ impl worker::SimpleWorker for EvaluationFilterWorker {
|
||||||
}
|
}
|
||||||
|
|
||||||
let interesting: bool = match job.action {
|
let interesting: bool = match job.action {
|
||||||
ghevent::PullRequestAction::Opened => true,
|
ghevent::PullRequestAction::Opened
|
||||||
ghevent::PullRequestAction::Synchronize => true,
|
| ghevent::PullRequestAction::Synchronize
|
||||||
ghevent::PullRequestAction::Reopened => true,
|
| ghevent::PullRequestAction::Reopened => true,
|
||||||
ghevent::PullRequestAction::Edited => {
|
ghevent::PullRequestAction::Edited => {
|
||||||
if let Some(ref changes) = job.changes {
|
if let Some(ref changes) = job.changes {
|
||||||
changes.base.is_some()
|
changes.base.is_some()
|
||||||
|
@ -57,7 +69,7 @@ impl worker::SimpleWorker for EvaluationFilterWorker {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => false,
|
ghevent::PullRequestAction::Unknown => false,
|
||||||
};
|
};
|
||||||
|
|
||||||
if !interesting {
|
if !interesting {
|
||||||
|
@ -80,7 +92,7 @@ impl worker::SimpleWorker for EvaluationFilterWorker {
|
||||||
name: job.repository.name.clone(),
|
name: job.repository.name.clone(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let pr_msg = Pr {
|
let change_msg = Change {
|
||||||
number: job.number,
|
number: job.number,
|
||||||
head_sha: job.pull_request.head.sha.clone(),
|
head_sha: job.pull_request.head.sha.clone(),
|
||||||
target_branch: Some(job.pull_request.base.git_ref.clone()),
|
target_branch: Some(job.pull_request.base.git_ref.clone()),
|
||||||
|
@ -88,17 +100,18 @@ impl worker::SimpleWorker for EvaluationFilterWorker {
|
||||||
|
|
||||||
let msg = evaluationjob::EvaluationJob {
|
let msg = evaluationjob::EvaluationJob {
|
||||||
repo: repo_msg,
|
repo: repo_msg,
|
||||||
pr: pr_msg,
|
change: change_msg,
|
||||||
};
|
};
|
||||||
|
|
||||||
vec![
|
vec![
|
||||||
worker::publish_serde_action(None, Some("mass-rebuild-check-jobs".to_owned()), &msg),
|
worker::publish_serde_action(&None, &Some("mass-rebuild-check-jobs".to_owned()), &msg),
|
||||||
worker::Action::Ack,
|
worker::Action::Ack,
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
#[cfg(any())]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::worker::SimpleWorker;
|
use crate::worker::SimpleWorker;
|
||||||
|
@ -115,6 +128,8 @@ mod tests {
|
||||||
Some(vec![]),
|
Some(vec![]),
|
||||||
));
|
));
|
||||||
|
|
||||||
|
// FIXME(raito): fake channel?
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
worker.consumer(&job),
|
worker.consumer(&job),
|
||||||
vec![
|
vec![
|
||||||
|
@ -128,7 +143,7 @@ mod tests {
|
||||||
owner: String::from("NixOS"),
|
owner: String::from("NixOS"),
|
||||||
name: String::from("nixpkgs"),
|
name: String::from("nixpkgs"),
|
||||||
},
|
},
|
||||||
pr: Pr {
|
change: Change {
|
||||||
number: 33299,
|
number: 33299,
|
||||||
head_sha: String::from("887e8b460a7d45ddb3bbdebe01447b251b3229e8"),
|
head_sha: String::from("887e8b460a7d45ddb3bbdebe01447b251b3229e8"),
|
||||||
target_branch: Some(String::from("staging")),
|
target_branch: Some(String::from("staging")),
|
||||||
|
|
|
@ -1,163 +0,0 @@
|
||||||
use crate::acl;
|
|
||||||
use crate::commentparser;
|
|
||||||
use crate::ghevent;
|
|
||||||
use crate::message::{buildjob, evaluationjob, Pr, Repo};
|
|
||||||
use crate::worker;
|
|
||||||
|
|
||||||
use tracing::{debug_span, error, info};
|
|
||||||
use uuid::Uuid;
|
|
||||||
|
|
||||||
pub struct GitHubCommentWorker {
|
|
||||||
acl: acl::Acl,
|
|
||||||
github: hubcaps::Github,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl GitHubCommentWorker {
|
|
||||||
pub fn new(acl: acl::Acl, github: hubcaps::Github) -> GitHubCommentWorker {
|
|
||||||
GitHubCommentWorker { acl, github }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl worker::SimpleWorker for GitHubCommentWorker {
|
|
||||||
type J = ghevent::IssueComment;
|
|
||||||
|
|
||||||
fn msg_to_job(&mut self, _: &str, _: &Option<String>, body: &[u8]) -> Result<Self::J, String> {
|
|
||||||
match serde_json::from_slice(body) {
|
|
||||||
Ok(e) => Ok(e),
|
|
||||||
Err(e) => {
|
|
||||||
error!(
|
|
||||||
"Failed to deserialize IsssueComment: {:?}",
|
|
||||||
String::from_utf8(body.to_vec())
|
|
||||||
);
|
|
||||||
panic!("{:?}", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// FIXME: remove with rust/cargo update
|
|
||||||
#[allow(clippy::cognitive_complexity)]
|
|
||||||
fn consumer(&mut self, job: &ghevent::IssueComment) -> worker::Actions {
|
|
||||||
let span = debug_span!("job", pr = ?job.issue.number);
|
|
||||||
let _enter = span.enter();
|
|
||||||
|
|
||||||
if job.action == ghevent::IssueCommentAction::Deleted {
|
|
||||||
return vec![worker::Action::Ack];
|
|
||||||
}
|
|
||||||
|
|
||||||
let instructions = commentparser::parse(&job.comment.body);
|
|
||||||
if instructions == None {
|
|
||||||
return vec![worker::Action::Ack];
|
|
||||||
}
|
|
||||||
|
|
||||||
let build_destinations = self.acl.build_job_architectures_for_user_repo(
|
|
||||||
&job.comment.user.login,
|
|
||||||
&job.repository.full_name,
|
|
||||||
);
|
|
||||||
|
|
||||||
if build_destinations.is_empty() {
|
|
||||||
info!("No build destinations for: {:?}", job);
|
|
||||||
// Don't process comments if they can't build anything
|
|
||||||
return vec![worker::Action::Ack];
|
|
||||||
}
|
|
||||||
|
|
||||||
info!("Got job: {:?}", job);
|
|
||||||
|
|
||||||
let instructions = commentparser::parse(&job.comment.body);
|
|
||||||
info!("Instructions: {:?}", instructions);
|
|
||||||
|
|
||||||
let pr = async_std::task::block_on(
|
|
||||||
self.github
|
|
||||||
.repo(
|
|
||||||
job.repository.owner.login.clone(),
|
|
||||||
job.repository.name.clone(),
|
|
||||||
)
|
|
||||||
.pulls()
|
|
||||||
.get(job.issue.number)
|
|
||||||
.get(),
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Err(x) = pr {
|
|
||||||
info!(
|
|
||||||
"fetching PR {}#{} from GitHub yielded error {}",
|
|
||||||
job.repository.full_name, job.issue.number, x
|
|
||||||
);
|
|
||||||
return vec![worker::Action::Ack];
|
|
||||||
}
|
|
||||||
|
|
||||||
let pr = pr.unwrap();
|
|
||||||
|
|
||||||
let repo_msg = Repo {
|
|
||||||
clone_url: job.repository.clone_url.clone(),
|
|
||||||
full_name: job.repository.full_name.clone(),
|
|
||||||
owner: job.repository.owner.login.clone(),
|
|
||||||
name: job.repository.name.clone(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let pr_msg = Pr {
|
|
||||||
number: job.issue.number,
|
|
||||||
head_sha: pr.head.sha.clone(),
|
|
||||||
target_branch: Some(pr.base.commit_ref),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut response: Vec<worker::Action> = vec![];
|
|
||||||
if let Some(instructions) = instructions {
|
|
||||||
for instruction in instructions {
|
|
||||||
match instruction {
|
|
||||||
commentparser::Instruction::Build(subset, attrs) => {
|
|
||||||
let build_destinations = match subset {
|
|
||||||
commentparser::Subset::NixOS => build_destinations
|
|
||||||
.clone()
|
|
||||||
.into_iter()
|
|
||||||
.filter(|x| x.can_run_nixos_tests())
|
|
||||||
.collect(),
|
|
||||||
_ => build_destinations.clone(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let msg = buildjob::BuildJob::new(
|
|
||||||
repo_msg.clone(),
|
|
||||||
pr_msg.clone(),
|
|
||||||
subset,
|
|
||||||
attrs,
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
format!("{}", Uuid::new_v4()),
|
|
||||||
);
|
|
||||||
|
|
||||||
for arch in build_destinations.iter() {
|
|
||||||
let (exchange, routingkey) = arch.as_build_destination();
|
|
||||||
response.push(worker::publish_serde_action(exchange, routingkey, &msg));
|
|
||||||
}
|
|
||||||
|
|
||||||
response.push(worker::publish_serde_action(
|
|
||||||
Some("build-results".to_string()),
|
|
||||||
None,
|
|
||||||
&buildjob::QueuedBuildJobs {
|
|
||||||
job: msg,
|
|
||||||
architectures: build_destinations
|
|
||||||
.iter()
|
|
||||||
.cloned()
|
|
||||||
.map(|arch| arch.to_string())
|
|
||||||
.collect(),
|
|
||||||
},
|
|
||||||
));
|
|
||||||
}
|
|
||||||
commentparser::Instruction::Eval => {
|
|
||||||
let msg = evaluationjob::EvaluationJob {
|
|
||||||
repo: repo_msg.clone(),
|
|
||||||
pr: pr_msg.clone(),
|
|
||||||
};
|
|
||||||
|
|
||||||
response.push(worker::publish_serde_action(
|
|
||||||
None,
|
|
||||||
Some("mass-rebuild-check-jobs".to_owned()),
|
|
||||||
&msg,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
response.push(worker::Action::Ack);
|
|
||||||
response
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,759 +0,0 @@
|
||||||
use crate::config::GithubAppVendingMachine;
|
|
||||||
use crate::message::buildjob::{BuildJob, QueuedBuildJobs};
|
|
||||||
use crate::message::buildresult::{BuildResult, BuildStatus, LegacyBuildResult};
|
|
||||||
use crate::message::Repo;
|
|
||||||
use crate::worker;
|
|
||||||
|
|
||||||
use chrono::{DateTime, Utc};
|
|
||||||
use hubcaps::checks::{CheckRunOptions, CheckRunState, Conclusion, Output};
|
|
||||||
use tracing::{debug, debug_span, info};
|
|
||||||
|
|
||||||
pub struct GitHubCommentPoster {
|
|
||||||
github_vend: GithubAppVendingMachine,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl GitHubCommentPoster {
|
|
||||||
pub fn new(github_vend: GithubAppVendingMachine) -> GitHubCommentPoster {
|
|
||||||
GitHubCommentPoster { github_vend }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub enum PostableEvent {
|
|
||||||
BuildQueued(QueuedBuildJobs),
|
|
||||||
BuildFinished(BuildResult),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PostableEvent {
|
|
||||||
fn from(bytes: &[u8]) -> Result<PostableEvent, String> {
|
|
||||||
match serde_json::from_slice::<QueuedBuildJobs>(bytes) {
|
|
||||||
Ok(e) => Ok(PostableEvent::BuildQueued(e)),
|
|
||||||
Err(_) => match serde_json::from_slice::<BuildResult>(bytes) {
|
|
||||||
Ok(e) => Ok(PostableEvent::BuildFinished(e)),
|
|
||||||
Err(e) => Err(format!(
|
|
||||||
"Failed to deserialize PostableEvent: {:?}, err: {:}",
|
|
||||||
String::from_utf8_lossy(bytes),
|
|
||||||
e
|
|
||||||
)),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl worker::SimpleWorker for GitHubCommentPoster {
|
|
||||||
type J = PostableEvent;
|
|
||||||
|
|
||||||
fn msg_to_job(&mut self, _: &str, _: &Option<String>, body: &[u8]) -> Result<Self::J, String> {
|
|
||||||
PostableEvent::from(body)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn consumer(&mut self, job: &PostableEvent) -> worker::Actions {
|
|
||||||
let mut checks: Vec<CheckRunOptions> = vec![];
|
|
||||||
let repo: Repo;
|
|
||||||
|
|
||||||
let pr = match job {
|
|
||||||
PostableEvent::BuildQueued(queued_job) => {
|
|
||||||
repo = queued_job.job.repo.clone();
|
|
||||||
for architecture in queued_job.architectures.iter() {
|
|
||||||
checks.push(job_to_check(&queued_job.job, architecture, Utc::now()));
|
|
||||||
}
|
|
||||||
queued_job.job.pr.to_owned()
|
|
||||||
}
|
|
||||||
PostableEvent::BuildFinished(finished_job) => {
|
|
||||||
let result = finished_job.legacy();
|
|
||||||
repo = result.repo.clone();
|
|
||||||
checks.push(result_to_check(&result, Utc::now()));
|
|
||||||
finished_job.pr()
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let span = debug_span!("job", pr = ?pr.number);
|
|
||||||
let _enter = span.enter();
|
|
||||||
|
|
||||||
for check in checks {
|
|
||||||
info!(
|
|
||||||
"check {:?} {} {}",
|
|
||||||
check.status,
|
|
||||||
check.name,
|
|
||||||
check.details_url.as_ref().unwrap_or(&String::from("-"))
|
|
||||||
);
|
|
||||||
debug!("{:?}", check);
|
|
||||||
|
|
||||||
let check_create_attempt = async_std::task::block_on(
|
|
||||||
self.github_vend
|
|
||||||
.for_repo(&repo.owner, &repo.name)
|
|
||||||
.unwrap()
|
|
||||||
.repo(repo.owner.clone(), repo.name.clone())
|
|
||||||
.checkruns()
|
|
||||||
.create(&check),
|
|
||||||
);
|
|
||||||
|
|
||||||
match check_create_attempt {
|
|
||||||
Ok(_) => info!("Successfully sent."),
|
|
||||||
Err(err) => info!("Failed to send check {:?}", err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
vec![worker::Action::Ack]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn job_to_check(job: &BuildJob, architecture: &str, timestamp: DateTime<Utc>) -> CheckRunOptions {
|
|
||||||
let mut all_attrs: Vec<String> = job.attrs.clone();
|
|
||||||
all_attrs.sort();
|
|
||||||
|
|
||||||
if all_attrs.is_empty() {
|
|
||||||
all_attrs = vec![String::from("(unknown attributes)")];
|
|
||||||
}
|
|
||||||
|
|
||||||
CheckRunOptions {
|
|
||||||
name: format!("{} on {}", all_attrs.join(", "), architecture),
|
|
||||||
actions: None,
|
|
||||||
completed_at: None,
|
|
||||||
started_at: Some(timestamp.to_rfc3339_opts(chrono::SecondsFormat::Secs, true)),
|
|
||||||
conclusion: None,
|
|
||||||
details_url: Some(format!(
|
|
||||||
"https://logs.ofborg.org/?key={}/{}.{}",
|
|
||||||
&job.repo.owner.to_lowercase(),
|
|
||||||
&job.repo.name.to_lowercase(),
|
|
||||||
job.pr.number,
|
|
||||||
)),
|
|
||||||
external_id: None,
|
|
||||||
head_sha: job.pr.head_sha.clone(),
|
|
||||||
output: None,
|
|
||||||
status: Some(CheckRunState::Queued),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn result_to_check(result: &LegacyBuildResult, timestamp: DateTime<Utc>) -> CheckRunOptions {
|
|
||||||
let mut all_attrs: Vec<String> =
|
|
||||||
vec![result.attempted_attrs.clone(), result.skipped_attrs.clone()]
|
|
||||||
.into_iter()
|
|
||||||
.map(|opt| opt.unwrap_or_else(|| vec![]))
|
|
||||||
.flat_map(|list| list.into_iter())
|
|
||||||
.collect();
|
|
||||||
all_attrs.sort();
|
|
||||||
|
|
||||||
if all_attrs.is_empty() {
|
|
||||||
all_attrs = vec![String::from("(unknown attributes)")];
|
|
||||||
}
|
|
||||||
|
|
||||||
let conclusion: Conclusion = result.status.clone().into();
|
|
||||||
|
|
||||||
let mut summary: Vec<String> = vec![];
|
|
||||||
if let Some(ref attempted) = result.attempted_attrs {
|
|
||||||
summary.extend(list_segment("Attempted", attempted));
|
|
||||||
}
|
|
||||||
|
|
||||||
if result.status == BuildStatus::TimedOut {
|
|
||||||
summary.push(String::from("Build timed out."));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(ref skipped) = result.skipped_attrs {
|
|
||||||
summary.extend(list_segment(
|
|
||||||
&format!(
|
|
||||||
"The following builds were skipped because they don't evaluate on {}",
|
|
||||||
result.system
|
|
||||||
),
|
|
||||||
skipped,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Allow the clippy violation for improved readability
|
|
||||||
#[allow(clippy::vec_init_then_push)]
|
|
||||||
let text: String = if !result.output.is_empty() {
|
|
||||||
let mut reply: Vec<String> = vec![];
|
|
||||||
|
|
||||||
reply.push("## Partial log".to_owned());
|
|
||||||
reply.push("".to_owned());
|
|
||||||
reply.push("```".to_owned());
|
|
||||||
reply.extend(result.output.clone());
|
|
||||||
reply.push("```".to_owned());
|
|
||||||
|
|
||||||
reply.join("\n")
|
|
||||||
} else {
|
|
||||||
String::from("No partial log is available.")
|
|
||||||
};
|
|
||||||
|
|
||||||
CheckRunOptions {
|
|
||||||
name: format!("{} on {}", all_attrs.join(", "), result.system),
|
|
||||||
actions: None,
|
|
||||||
completed_at: Some(timestamp.to_rfc3339_opts(chrono::SecondsFormat::Secs, true)),
|
|
||||||
started_at: None,
|
|
||||||
conclusion: Some(conclusion),
|
|
||||||
details_url: Some(format!(
|
|
||||||
"https://logs.ofborg.org/?key={}/{}.{}&attempt_id={}",
|
|
||||||
&result.repo.owner.to_lowercase(),
|
|
||||||
&result.repo.name.to_lowercase(),
|
|
||||||
result.pr.number,
|
|
||||||
result.attempt_id,
|
|
||||||
)),
|
|
||||||
external_id: Some(result.attempt_id.clone()),
|
|
||||||
head_sha: result.pr.head_sha.clone(),
|
|
||||||
|
|
||||||
output: Some(Output {
|
|
||||||
annotations: None,
|
|
||||||
images: None,
|
|
||||||
summary: summary.join("\n"),
|
|
||||||
text: Some(text),
|
|
||||||
title: result.status.clone().into(),
|
|
||||||
}),
|
|
||||||
status: Some(CheckRunState::Completed),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn list_segment(name: &str, things: &[String]) -> Vec<String> {
|
|
||||||
let mut reply: Vec<String> = vec![];
|
|
||||||
|
|
||||||
if !things.is_empty() {
|
|
||||||
reply.push(format!("{}: {}", name, things.join(", ")));
|
|
||||||
reply.push("".to_owned());
|
|
||||||
}
|
|
||||||
|
|
||||||
reply
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use crate::message::{Pr, Repo};
|
|
||||||
use chrono::TimeZone;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
pub fn test_queued_build() {
|
|
||||||
let job = BuildJob {
|
|
||||||
repo: Repo {
|
|
||||||
clone_url: "https://github.com/nixos/nixpkgs.git".to_owned(),
|
|
||||||
full_name: "NixOS/nixpkgs".to_owned(),
|
|
||||||
owner: "NixOS".to_owned(),
|
|
||||||
name: "nixpkgs".to_owned(),
|
|
||||||
},
|
|
||||||
pr: Pr {
|
|
||||||
head_sha: "abc123".to_owned(),
|
|
||||||
number: 2345,
|
|
||||||
target_branch: Some("master".to_owned()),
|
|
||||||
},
|
|
||||||
logs: None,
|
|
||||||
statusreport: None,
|
|
||||||
subset: None,
|
|
||||||
|
|
||||||
request_id: "bogus-request-id".to_owned(),
|
|
||||||
attrs: vec!["foo".to_owned(), "bar".to_owned()],
|
|
||||||
};
|
|
||||||
|
|
||||||
let timestamp = Utc.ymd(2023, 4, 20).and_hms(13, 37, 42);
|
|
||||||
assert_eq!(
|
|
||||||
job_to_check(&job, "x86_64-linux", timestamp),
|
|
||||||
CheckRunOptions {
|
|
||||||
name: "bar, foo on x86_64-linux".to_string(),
|
|
||||||
actions: None,
|
|
||||||
started_at: Some("2023-04-20T13:37:42Z".to_string()),
|
|
||||||
completed_at: None,
|
|
||||||
status: Some(CheckRunState::Queued),
|
|
||||||
conclusion: None,
|
|
||||||
details_url: Some("https://logs.ofborg.org/?key=nixos/nixpkgs.2345".to_string()),
|
|
||||||
external_id: None,
|
|
||||||
head_sha: "abc123".to_string(),
|
|
||||||
output: None,
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
pub fn test_check_passing_build() {
|
|
||||||
let result = LegacyBuildResult {
|
|
||||||
repo: Repo {
|
|
||||||
clone_url: "https://github.com/nixos/nixpkgs.git".to_owned(),
|
|
||||||
full_name: "NixOS/nixpkgs".to_owned(),
|
|
||||||
owner: "NixOS".to_owned(),
|
|
||||||
name: "nixpkgs".to_owned(),
|
|
||||||
},
|
|
||||||
pr: Pr {
|
|
||||||
head_sha: "abc123".to_owned(),
|
|
||||||
number: 2345,
|
|
||||||
target_branch: Some("master".to_owned()),
|
|
||||||
},
|
|
||||||
output: vec![
|
|
||||||
"make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'".to_owned(),
|
|
||||||
"make[2]: Nothing to be done for 'install'.".to_owned(),
|
|
||||||
"make[2]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'".to_owned(),
|
|
||||||
"make[1]: Nothing to be done for 'install-target'.".to_owned(),
|
|
||||||
"make[1]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1'".to_owned(),
|
|
||||||
"removed '/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1/share/info/bfd.info'".to_owned(),
|
|
||||||
"post-installation fixup".to_owned(),
|
|
||||||
"strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip".to_owned(),
|
|
||||||
"patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1".to_owned(),
|
|
||||||
"/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1".to_owned(),
|
|
||||||
],
|
|
||||||
attempt_id: "neatattemptid".to_owned(),
|
|
||||||
request_id: "bogus-request-id".to_owned(),
|
|
||||||
system: "x86_64-linux".to_owned(),
|
|
||||||
attempted_attrs: Some(vec!["foo".to_owned()]),
|
|
||||||
skipped_attrs: Some(vec!["bar".to_owned()]),
|
|
||||||
status: BuildStatus::Success,
|
|
||||||
};
|
|
||||||
|
|
||||||
let timestamp = Utc.ymd(2023, 4, 20).and_hms(13, 37, 42);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
result_to_check(&result, timestamp),
|
|
||||||
CheckRunOptions {
|
|
||||||
name: "bar, foo on x86_64-linux".to_string(),
|
|
||||||
actions: None,
|
|
||||||
started_at: None,
|
|
||||||
completed_at: Some("2023-04-20T13:37:42Z".to_string()),
|
|
||||||
status: Some(CheckRunState::Completed),
|
|
||||||
conclusion: Some(Conclusion::Success),
|
|
||||||
details_url: Some(
|
|
||||||
"https://logs.ofborg.org/?key=nixos/nixpkgs.2345&attempt_id=neatattemptid"
|
|
||||||
.to_string()
|
|
||||||
),
|
|
||||||
external_id: Some("neatattemptid".to_string()),
|
|
||||||
head_sha: "abc123".to_string(),
|
|
||||||
output: Some(Output {
|
|
||||||
title: "Success".to_string(),
|
|
||||||
summary: "Attempted: foo
|
|
||||||
|
|
||||||
The following builds were skipped because they don't evaluate on x86_64-linux: bar
|
|
||||||
"
|
|
||||||
.to_string(),
|
|
||||||
text: Some(
|
|
||||||
"## Partial log
|
|
||||||
|
|
||||||
```
|
|
||||||
make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
|
|
||||||
make[2]: Nothing to be done for 'install'.
|
|
||||||
make[2]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
|
|
||||||
make[1]: Nothing to be done for 'install-target'.
|
|
||||||
make[1]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1'
|
|
||||||
removed '/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1/share/info/bfd.info'
|
|
||||||
post-installation fixup
|
|
||||||
strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip
|
|
||||||
patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
|
|
||||||
/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
|
|
||||||
```"
|
|
||||||
.to_string()
|
|
||||||
),
|
|
||||||
annotations: None,
|
|
||||||
images: None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
pub fn test_check_failing_build() {
|
|
||||||
let result = LegacyBuildResult {
|
|
||||||
repo: Repo {
|
|
||||||
clone_url: "https://github.com/nixos/nixpkgs.git".to_owned(),
|
|
||||||
full_name: "NixOS/nixpkgs".to_owned(),
|
|
||||||
owner: "NixOS".to_owned(),
|
|
||||||
name: "nixpkgs".to_owned(),
|
|
||||||
},
|
|
||||||
pr: Pr {
|
|
||||||
head_sha: "abc123".to_owned(),
|
|
||||||
number: 2345,
|
|
||||||
target_branch: Some("master".to_owned()),
|
|
||||||
},
|
|
||||||
output: vec![
|
|
||||||
"make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'".to_owned(),
|
|
||||||
"make[2]: Nothing to be done for 'install'.".to_owned(),
|
|
||||||
"make[2]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'".to_owned(),
|
|
||||||
"make[1]: Nothing to be done for 'install-target'.".to_owned(),
|
|
||||||
"make[1]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1'".to_owned(),
|
|
||||||
"removed '/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1/share/info/bfd.info'".to_owned(),
|
|
||||||
"post-installation fixup".to_owned(),
|
|
||||||
"strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip".to_owned(),
|
|
||||||
"patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1".to_owned(),
|
|
||||||
"/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1".to_owned(),
|
|
||||||
],
|
|
||||||
attempt_id: "neatattemptid".to_owned(),
|
|
||||||
request_id: "bogus-request-id".to_owned(),
|
|
||||||
system: "x86_64-linux".to_owned(),
|
|
||||||
attempted_attrs: Some(vec!["foo".to_owned()]),
|
|
||||||
skipped_attrs: None,
|
|
||||||
status: BuildStatus::Failure,
|
|
||||||
};
|
|
||||||
|
|
||||||
let timestamp = Utc.ymd(2023, 4, 20).and_hms(13, 37, 42);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
result_to_check(&result, timestamp),
|
|
||||||
CheckRunOptions {
|
|
||||||
name: "foo on x86_64-linux".to_string(),
|
|
||||||
actions: None,
|
|
||||||
started_at: None,
|
|
||||||
completed_at: Some("2023-04-20T13:37:42Z".to_string()),
|
|
||||||
status: Some(CheckRunState::Completed),
|
|
||||||
conclusion: Some(Conclusion::Neutral),
|
|
||||||
details_url: Some(
|
|
||||||
"https://logs.ofborg.org/?key=nixos/nixpkgs.2345&attempt_id=neatattemptid"
|
|
||||||
.to_string()
|
|
||||||
),
|
|
||||||
external_id: Some("neatattemptid".to_string()),
|
|
||||||
head_sha: "abc123".to_string(),
|
|
||||||
output: Some(Output {
|
|
||||||
title: "Failure".to_string(),
|
|
||||||
summary: "Attempted: foo
|
|
||||||
"
|
|
||||||
.to_string(),
|
|
||||||
text: Some(
|
|
||||||
"## Partial log
|
|
||||||
|
|
||||||
```
|
|
||||||
make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
|
|
||||||
make[2]: Nothing to be done for 'install'.
|
|
||||||
make[2]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
|
|
||||||
make[1]: Nothing to be done for 'install-target'.
|
|
||||||
make[1]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1'
|
|
||||||
removed '/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1/share/info/bfd.info'
|
|
||||||
post-installation fixup
|
|
||||||
strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip
|
|
||||||
patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
|
|
||||||
/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
|
|
||||||
```"
|
|
||||||
.to_string()
|
|
||||||
),
|
|
||||||
annotations: None,
|
|
||||||
images: None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
pub fn test_check_timedout_build() {
|
|
||||||
let result = LegacyBuildResult {
|
|
||||||
repo: Repo {
|
|
||||||
clone_url: "https://github.com/nixos/nixpkgs.git".to_owned(),
|
|
||||||
full_name: "NixOS/nixpkgs".to_owned(),
|
|
||||||
owner: "NixOS".to_owned(),
|
|
||||||
name: "nixpkgs".to_owned(),
|
|
||||||
},
|
|
||||||
pr: Pr {
|
|
||||||
head_sha: "abc123".to_owned(),
|
|
||||||
number: 2345,
|
|
||||||
target_branch: Some("master".to_owned()),
|
|
||||||
},
|
|
||||||
output: vec![
|
|
||||||
"make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'".to_owned(),
|
|
||||||
"make[2]: Nothing to be done for 'install'.".to_owned(),
|
|
||||||
"make[2]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'".to_owned(),
|
|
||||||
"make[1]: Nothing to be done for 'install-target'.".to_owned(),
|
|
||||||
"make[1]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1'".to_owned(),
|
|
||||||
"removed '/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1/share/info/bfd.info'".to_owned(),
|
|
||||||
"post-installation fixup".to_owned(),
|
|
||||||
"building of '/nix/store/l1limh50lx2cx45yb2gqpv7k8xl1mik2-gdb-8.1.drv' timed out after 1 seconds".to_owned(),
|
|
||||||
"error: build of '/nix/store/l1limh50lx2cx45yb2gqpv7k8xl1mik2-gdb-8.1.drv' failed".to_owned(),
|
|
||||||
],
|
|
||||||
attempt_id: "neatattemptid".to_owned(),
|
|
||||||
request_id: "bogus-request-id".to_owned(),
|
|
||||||
system: "x86_64-linux".to_owned(),
|
|
||||||
attempted_attrs: Some(vec!["foo".to_owned()]),
|
|
||||||
skipped_attrs: None,
|
|
||||||
status: BuildStatus::TimedOut,
|
|
||||||
};
|
|
||||||
|
|
||||||
let timestamp = Utc.ymd(2023, 4, 20).and_hms(13, 37, 42);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
result_to_check(&result, timestamp),
|
|
||||||
CheckRunOptions {
|
|
||||||
name: "foo on x86_64-linux".to_string(),
|
|
||||||
actions: None,
|
|
||||||
started_at: None,
|
|
||||||
completed_at: Some("2023-04-20T13:37:42Z".to_string()),
|
|
||||||
status: Some(CheckRunState::Completed),
|
|
||||||
conclusion: Some(Conclusion::Neutral),
|
|
||||||
details_url: Some(
|
|
||||||
"https://logs.ofborg.org/?key=nixos/nixpkgs.2345&attempt_id=neatattemptid"
|
|
||||||
.to_string()
|
|
||||||
),
|
|
||||||
external_id: Some("neatattemptid".to_string()),
|
|
||||||
head_sha: "abc123".to_string(),
|
|
||||||
output: Some(Output {
|
|
||||||
title: "Timed out, unknown build status".to_string(),
|
|
||||||
summary: "Attempted: foo
|
|
||||||
|
|
||||||
Build timed out."
|
|
||||||
.to_string(),
|
|
||||||
text: Some(
|
|
||||||
"## Partial log
|
|
||||||
|
|
||||||
```
|
|
||||||
make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
|
|
||||||
make[2]: Nothing to be done for 'install'.
|
|
||||||
make[2]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
|
|
||||||
make[1]: Nothing to be done for 'install-target'.
|
|
||||||
make[1]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1'
|
|
||||||
removed '/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1/share/info/bfd.info'
|
|
||||||
post-installation fixup
|
|
||||||
building of '/nix/store/l1limh50lx2cx45yb2gqpv7k8xl1mik2-gdb-8.1.drv' timed out after 1 seconds
|
|
||||||
error: build of '/nix/store/l1limh50lx2cx45yb2gqpv7k8xl1mik2-gdb-8.1.drv' failed
|
|
||||||
```"
|
|
||||||
.to_string()
|
|
||||||
),
|
|
||||||
annotations: None,
|
|
||||||
images: None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
pub fn test_check_passing_build_unspecified_attributes() {
|
|
||||||
let result = LegacyBuildResult {
|
|
||||||
repo: Repo {
|
|
||||||
clone_url: "https://github.com/nixos/nixpkgs.git".to_owned(),
|
|
||||||
full_name: "NixOS/nixpkgs".to_owned(),
|
|
||||||
owner: "NixOS".to_owned(),
|
|
||||||
name: "nixpkgs".to_owned(),
|
|
||||||
},
|
|
||||||
pr: Pr {
|
|
||||||
head_sha: "abc123".to_owned(),
|
|
||||||
number: 2345,
|
|
||||||
target_branch: Some("master".to_owned()),
|
|
||||||
},
|
|
||||||
output: vec![
|
|
||||||
"make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'".to_owned(),
|
|
||||||
"make[2]: Nothing to be done for 'install'.".to_owned(),
|
|
||||||
"make[2]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'".to_owned(),
|
|
||||||
"make[1]: Nothing to be done for 'install-target'.".to_owned(),
|
|
||||||
"make[1]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1'".to_owned(),
|
|
||||||
"removed '/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1/share/info/bfd.info'".to_owned(),
|
|
||||||
"post-installation fixup".to_owned(),
|
|
||||||
"strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip".to_owned(),
|
|
||||||
"patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1".to_owned(),
|
|
||||||
"/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1".to_owned(),
|
|
||||||
],
|
|
||||||
attempt_id: "neatattemptid".to_owned(),
|
|
||||||
request_id: "bogus-request-id".to_owned(),
|
|
||||||
system: "x86_64-linux".to_owned(),
|
|
||||||
attempted_attrs: None,
|
|
||||||
skipped_attrs: None,
|
|
||||||
status: BuildStatus::Success,
|
|
||||||
};
|
|
||||||
|
|
||||||
let timestamp = Utc.ymd(2023, 4, 20).and_hms(13, 37, 42);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
result_to_check(&result, timestamp),
|
|
||||||
CheckRunOptions {
|
|
||||||
name: "(unknown attributes) on x86_64-linux".to_string(),
|
|
||||||
actions: None,
|
|
||||||
started_at: None,
|
|
||||||
completed_at: Some("2023-04-20T13:37:42Z".to_string()),
|
|
||||||
status: Some(CheckRunState::Completed),
|
|
||||||
conclusion: Some(Conclusion::Success),
|
|
||||||
details_url: Some(
|
|
||||||
"https://logs.ofborg.org/?key=nixos/nixpkgs.2345&attempt_id=neatattemptid"
|
|
||||||
.to_string()
|
|
||||||
),
|
|
||||||
external_id: Some("neatattemptid".to_string()),
|
|
||||||
head_sha: "abc123".to_string(),
|
|
||||||
output: Some(Output {
|
|
||||||
title: "Success".to_string(),
|
|
||||||
summary: "".to_string(),
|
|
||||||
text: Some(
|
|
||||||
"## Partial log
|
|
||||||
|
|
||||||
```
|
|
||||||
make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
|
|
||||||
make[2]: Nothing to be done for 'install'.
|
|
||||||
make[2]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
|
|
||||||
make[1]: Nothing to be done for 'install-target'.
|
|
||||||
make[1]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1'
|
|
||||||
removed '/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1/share/info/bfd.info'
|
|
||||||
post-installation fixup
|
|
||||||
strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip
|
|
||||||
patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
|
|
||||||
/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
|
|
||||||
```"
|
|
||||||
.to_string()
|
|
||||||
),
|
|
||||||
annotations: None,
|
|
||||||
images: None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
pub fn test_check_failing_build_unspecified_attributes() {
|
|
||||||
let result = LegacyBuildResult {
|
|
||||||
repo: Repo {
|
|
||||||
clone_url: "https://github.com/nixos/nixpkgs.git".to_owned(),
|
|
||||||
full_name: "NixOS/nixpkgs".to_owned(),
|
|
||||||
owner: "NixOS".to_owned(),
|
|
||||||
name: "nixpkgs".to_owned(),
|
|
||||||
},
|
|
||||||
pr: Pr {
|
|
||||||
head_sha: "abc123".to_owned(),
|
|
||||||
number: 2345,
|
|
||||||
target_branch: Some("master".to_owned()),
|
|
||||||
},
|
|
||||||
output: vec![
|
|
||||||
"make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'".to_owned(),
|
|
||||||
"make[2]: Nothing to be done for 'install'.".to_owned(),
|
|
||||||
"make[2]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'".to_owned(),
|
|
||||||
"make[1]: Nothing to be done for 'install-target'.".to_owned(),
|
|
||||||
"make[1]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1'".to_owned(),
|
|
||||||
"removed '/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1/share/info/bfd.info'".to_owned(),
|
|
||||||
"post-installation fixup".to_owned(),
|
|
||||||
"strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip".to_owned(),
|
|
||||||
"patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1".to_owned(),
|
|
||||||
"/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1".to_owned(),
|
|
||||||
],
|
|
||||||
attempt_id: "neatattemptid".to_owned(),
|
|
||||||
request_id: "bogus-request-id".to_owned(),
|
|
||||||
system: "x86_64-linux".to_owned(),
|
|
||||||
attempted_attrs: None,
|
|
||||||
skipped_attrs: None,
|
|
||||||
status: BuildStatus::Failure,
|
|
||||||
};
|
|
||||||
|
|
||||||
let timestamp = Utc.ymd(2023, 4, 20).and_hms(13, 37, 42);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
result_to_check(&result, timestamp),
|
|
||||||
CheckRunOptions {
|
|
||||||
name: "(unknown attributes) on x86_64-linux".to_string(),
|
|
||||||
actions: None,
|
|
||||||
started_at: None,
|
|
||||||
completed_at: Some("2023-04-20T13:37:42Z".to_string()),
|
|
||||||
status: Some(CheckRunState::Completed),
|
|
||||||
conclusion: Some(Conclusion::Neutral),
|
|
||||||
details_url: Some(
|
|
||||||
"https://logs.ofborg.org/?key=nixos/nixpkgs.2345&attempt_id=neatattemptid"
|
|
||||||
.to_string()
|
|
||||||
),
|
|
||||||
external_id: Some("neatattemptid".to_string()),
|
|
||||||
head_sha: "abc123".to_string(),
|
|
||||||
output: Some(Output {
|
|
||||||
title: "Failure".to_string(),
|
|
||||||
summary: "".to_string(),
|
|
||||||
text: Some(
|
|
||||||
"## Partial log
|
|
||||||
|
|
||||||
```
|
|
||||||
make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
|
|
||||||
make[2]: Nothing to be done for 'install'.
|
|
||||||
make[2]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
|
|
||||||
make[1]: Nothing to be done for 'install-target'.
|
|
||||||
make[1]: Leaving directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1'
|
|
||||||
removed '/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1/share/info/bfd.info'
|
|
||||||
post-installation fixup
|
|
||||||
strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip
|
|
||||||
patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
|
|
||||||
/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
|
|
||||||
```"
|
|
||||||
.to_string()
|
|
||||||
),
|
|
||||||
annotations: None,
|
|
||||||
images: None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
pub fn test_check_no_attempt() {
|
|
||||||
let result = LegacyBuildResult {
|
|
||||||
repo: Repo {
|
|
||||||
clone_url: "https://github.com/nixos/nixpkgs.git".to_owned(),
|
|
||||||
full_name: "NixOS/nixpkgs".to_owned(),
|
|
||||||
owner: "NixOS".to_owned(),
|
|
||||||
name: "nixpkgs".to_owned(),
|
|
||||||
},
|
|
||||||
pr: Pr {
|
|
||||||
head_sha: "abc123".to_owned(),
|
|
||||||
number: 2345,
|
|
||||||
target_branch: Some("master".to_owned()),
|
|
||||||
},
|
|
||||||
output: vec!["foo".to_owned()],
|
|
||||||
attempt_id: "neatattemptid".to_owned(),
|
|
||||||
request_id: "bogus-request-id".to_owned(),
|
|
||||||
system: "x86_64-linux".to_owned(),
|
|
||||||
attempted_attrs: None,
|
|
||||||
skipped_attrs: Some(vec!["not-attempted".to_owned()]),
|
|
||||||
status: BuildStatus::Skipped,
|
|
||||||
};
|
|
||||||
|
|
||||||
let timestamp = Utc.ymd(2023, 4, 20).and_hms(13, 37, 42);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
result_to_check(&result, timestamp),
|
|
||||||
CheckRunOptions {
|
|
||||||
name: "not-attempted on x86_64-linux".to_string(),
|
|
||||||
actions: None,
|
|
||||||
started_at: None,
|
|
||||||
completed_at: Some("2023-04-20T13:37:42Z".to_string()),
|
|
||||||
status: Some(CheckRunState::Completed),
|
|
||||||
conclusion: Some(Conclusion::Skipped),
|
|
||||||
details_url: Some("https://logs.ofborg.org/?key=nixos/nixpkgs.2345&attempt_id=neatattemptid".to_string()),
|
|
||||||
external_id: Some("neatattemptid".to_string()),
|
|
||||||
head_sha: "abc123".to_string(),
|
|
||||||
output: Some(Output {
|
|
||||||
title: "No attempt".to_string(),
|
|
||||||
summary: "The following builds were skipped because they don\'t evaluate on x86_64-linux: not-attempted
|
|
||||||
".to_string(),
|
|
||||||
text: Some("## Partial log
|
|
||||||
|
|
||||||
```
|
|
||||||
foo
|
|
||||||
```".to_string()),
|
|
||||||
annotations: None,
|
|
||||||
images: None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
pub fn test_check_no_attempt_no_log() {
|
|
||||||
let result = LegacyBuildResult {
|
|
||||||
repo: Repo {
|
|
||||||
clone_url: "https://github.com/nixos/nixpkgs.git".to_owned(),
|
|
||||||
full_name: "NixOS/nixpkgs".to_owned(),
|
|
||||||
owner: "NixOS".to_owned(),
|
|
||||||
name: "nixpkgs".to_owned(),
|
|
||||||
},
|
|
||||||
pr: Pr {
|
|
||||||
head_sha: "abc123".to_owned(),
|
|
||||||
number: 2345,
|
|
||||||
target_branch: Some("master".to_owned()),
|
|
||||||
},
|
|
||||||
output: vec![],
|
|
||||||
attempt_id: "neatattemptid".to_owned(),
|
|
||||||
request_id: "bogus-request-id".to_owned(),
|
|
||||||
system: "x86_64-linux".to_owned(),
|
|
||||||
attempted_attrs: None,
|
|
||||||
skipped_attrs: Some(vec!["not-attempted".to_owned()]),
|
|
||||||
status: BuildStatus::Skipped,
|
|
||||||
};
|
|
||||||
|
|
||||||
let timestamp = Utc.ymd(2023, 4, 20).and_hms(13, 37, 42);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
result_to_check(&result, timestamp),
|
|
||||||
CheckRunOptions {
|
|
||||||
name: "not-attempted on x86_64-linux".to_string(),
|
|
||||||
actions: None,
|
|
||||||
started_at: None,
|
|
||||||
completed_at: Some("2023-04-20T13:37:42Z".to_string()),
|
|
||||||
status: Some(CheckRunState::Completed),
|
|
||||||
conclusion: Some(Conclusion::Skipped),
|
|
||||||
details_url: Some("https://logs.ofborg.org/?key=nixos/nixpkgs.2345&attempt_id=neatattemptid".to_string()),
|
|
||||||
external_id: Some("neatattemptid".to_string()),
|
|
||||||
head_sha: "abc123".to_string(),
|
|
||||||
output: Some(Output {
|
|
||||||
title: "No attempt".to_string(),
|
|
||||||
summary: "The following builds were skipped because they don\'t evaluate on x86_64-linux: not-attempted
|
|
||||||
".to_string(),
|
|
||||||
text: Some("No partial log is available.".to_string()),
|
|
||||||
annotations: None,
|
|
||||||
images: None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -3,6 +3,7 @@ use crate::message::buildresult::BuildResult;
|
||||||
use crate::worker;
|
use crate::worker;
|
||||||
use crate::writetoline::LineWriter;
|
use crate::writetoline::LineWriter;
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
use std::fs::{self, File, OpenOptions};
|
use std::fs::{self, File, OpenOptions};
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::path::{Component, Path, PathBuf};
|
use std::path::{Component, Path, PathBuf};
|
||||||
|
@ -55,6 +56,7 @@ fn validate_path_segment(segment: &Path) -> Result<(), String> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LogMessageCollector {
|
impl LogMessageCollector {
|
||||||
|
#[must_use]
|
||||||
pub fn new(log_root: PathBuf, max_open: usize) -> LogMessageCollector {
|
pub fn new(log_root: PathBuf, max_open: usize) -> LogMessageCollector {
|
||||||
LogMessageCollector {
|
LogMessageCollector {
|
||||||
handles: LruCache::new(max_open),
|
handles: LruCache::new(max_open),
|
||||||
|
@ -64,33 +66,33 @@ impl LogMessageCollector {
|
||||||
|
|
||||||
pub fn write_metadata(&mut self, from: &LogFrom, data: &BuildLogStart) -> Result<(), String> {
|
pub fn write_metadata(&mut self, from: &LogFrom, data: &BuildLogStart) -> Result<(), String> {
|
||||||
let metapath = self.path_for_metadata(from)?;
|
let metapath = self.path_for_metadata(from)?;
|
||||||
let mut fp = self.open_file(&metapath)?;
|
let mut fp = Self::open_file(&metapath)?;
|
||||||
|
|
||||||
match serde_json::to_string(data) {
|
match serde_json::to_string(data) {
|
||||||
Ok(data) => {
|
Ok(data) => {
|
||||||
if let Err(e) = fp.write(data.as_bytes()) {
|
if let Err(e) = fp.write(data.as_bytes()) {
|
||||||
Err(format!("Failed to write metadata: {:?}", e))
|
Err(format!("Failed to write metadata: {e:?}"))
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(format!("Failed to stringify metadata: {:?}", e)),
|
Err(e) => Err(format!("Failed to stringify metadata: {e:?}")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write_result(&mut self, from: &LogFrom, data: &BuildResult) -> Result<(), String> {
|
pub fn write_result(&mut self, from: &LogFrom, data: &BuildResult) -> Result<(), String> {
|
||||||
let path = self.path_for_result(from)?;
|
let path = self.path_for_result(from)?;
|
||||||
let mut fp = self.open_file(&path)?;
|
let mut fp = Self::open_file(&path)?;
|
||||||
|
|
||||||
match serde_json::to_string(data) {
|
match serde_json::to_string(data) {
|
||||||
Ok(data) => {
|
Ok(data) => {
|
||||||
if let Err(e) = fp.write(data.as_bytes()) {
|
if let Err(e) = fp.write(data.as_bytes()) {
|
||||||
Err(format!("Failed to write result: {:?}", e))
|
Err(format!("Failed to write result: {e:?}"))
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(format!("Failed to stringify result: {:?}", e)),
|
Err(e) => Err(format!("Failed to stringify result: {e:?}")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -102,7 +104,7 @@ impl LogMessageCollector {
|
||||||
.expect("handles just contained the key"))
|
.expect("handles just contained the key"))
|
||||||
} else {
|
} else {
|
||||||
let logpath = self.path_for_log(from)?;
|
let logpath = self.path_for_log(from)?;
|
||||||
let fp = self.open_file(&logpath)?;
|
let fp = Self::open_file(&logpath)?;
|
||||||
let writer = LineWriter::new(fp);
|
let writer = LineWriter::new(fp);
|
||||||
self.handles.insert(from.clone(), writer);
|
self.handles.insert(from.clone(), writer);
|
||||||
if let Some(handle) = self.handles.get_mut(from) {
|
if let Some(handle) = self.handles.get_mut(from) {
|
||||||
|
@ -142,22 +144,20 @@ impl LogMessageCollector {
|
||||||
Ok(location)
|
Ok(location)
|
||||||
} else {
|
} else {
|
||||||
Err(format!(
|
Err(format!(
|
||||||
"Calculating the log location for {:?} resulted in an invalid path {:?}",
|
"Calculating the log location for {from:?} resulted in an invalid path {location:?}"
|
||||||
from, location
|
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn open_file(&self, path: &Path) -> Result<File, String> {
|
fn open_file(path: &Path) -> Result<File, String> {
|
||||||
let dir = path.parent().unwrap();
|
let dir = path.parent().unwrap();
|
||||||
fs::create_dir_all(dir).unwrap();
|
fs::create_dir_all(dir).unwrap();
|
||||||
|
|
||||||
let attempt = OpenOptions::new()
|
let attempt = OpenOptions::new()
|
||||||
.append(true)
|
.append(true)
|
||||||
.read(true)
|
.read(true)
|
||||||
.write(true)
|
|
||||||
.create(true)
|
.create(true)
|
||||||
.open(&path);
|
.open(path);
|
||||||
|
|
||||||
match attempt {
|
match attempt {
|
||||||
Ok(handle) => Ok(handle),
|
Ok(handle) => Ok(handle),
|
||||||
|
@ -169,10 +169,11 @@ impl LogMessageCollector {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl worker::SimpleWorker for LogMessageCollector {
|
impl worker::SimpleWorker for LogMessageCollector {
|
||||||
type J = LogMessage;
|
type J = LogMessage;
|
||||||
|
|
||||||
fn msg_to_job(
|
async fn msg_to_job(
|
||||||
&mut self,
|
&mut self,
|
||||||
routing_key: &str,
|
routing_key: &str,
|
||||||
_: &Option<String>,
|
_: &Option<String>,
|
||||||
|
@ -196,7 +197,7 @@ impl worker::SimpleWorker for LogMessageCollector {
|
||||||
attempt_id = msg.legacy().attempt_id;
|
attempt_id = msg.legacy().attempt_id;
|
||||||
message = MsgType::Finish(Box::new(msg));
|
message = MsgType::Finish(Box::new(msg));
|
||||||
} else {
|
} else {
|
||||||
return Err(format!("failed to decode job: {:?}", decode_msg));
|
return Err(format!("failed to decode job: {decode_msg:?}"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -210,7 +211,7 @@ impl worker::SimpleWorker for LogMessageCollector {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn consumer(&mut self, job: &LogMessage) -> worker::Actions {
|
async fn consumer(&mut self, _chan: &mut lapin::Channel, job: &LogMessage) -> worker::Actions {
|
||||||
match job.message {
|
match job.message {
|
||||||
MsgType::Start(ref start) => {
|
MsgType::Start(ref start) => {
|
||||||
self.write_metadata(&job.from, start)
|
self.write_metadata(&job.from, start)
|
||||||
|
@ -224,7 +225,7 @@ impl worker::SimpleWorker for LogMessageCollector {
|
||||||
MsgType::Msg(ref message) => {
|
MsgType::Msg(ref message) => {
|
||||||
let handle = self.handle_for(&job.from).unwrap();
|
let handle = self.handle_for(&job.from).unwrap();
|
||||||
|
|
||||||
handle.write_to_line((message.line_number - 1) as usize, &message.output);
|
handle.write_to_line(message.line_number - 1, &message.output);
|
||||||
}
|
}
|
||||||
MsgType::Finish(ref finish) => {
|
MsgType::Finish(ref finish) => {
|
||||||
self.write_result(&job.from, finish)
|
self.write_result(&job.from, finish)
|
||||||
|
@ -237,6 +238,7 @@ impl worker::SimpleWorker for LogMessageCollector {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
#[cfg(any())]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::message::buildresult::{BuildStatus, V1Tag};
|
use crate::message::buildresult::{BuildStatus, V1Tag};
|
||||||
|
|
|
@ -2,7 +2,7 @@ pub mod build;
|
||||||
pub mod eval;
|
pub mod eval;
|
||||||
pub mod evaluate;
|
pub mod evaluate;
|
||||||
pub mod evaluationfilter;
|
pub mod evaluationfilter;
|
||||||
pub mod githubcommentfilter;
|
|
||||||
pub mod githubcommentposter;
|
|
||||||
pub mod log_message_collector;
|
pub mod log_message_collector;
|
||||||
|
pub mod pastebin_collector;
|
||||||
pub mod statscollector;
|
pub mod statscollector;
|
||||||
|
pub mod status_check_collector;
|
||||||
|
|
110
ofborg/src/tasks/pastebin_collector.rs
Normal file
110
ofborg/src/tasks/pastebin_collector.rs
Normal file
|
@ -0,0 +1,110 @@
|
||||||
|
use std::{
|
||||||
|
fs::File,
|
||||||
|
io::{BufWriter, Write},
|
||||||
|
path::PathBuf,
|
||||||
|
};
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use tracing::{debug_span, error, warn};
|
||||||
|
use zstd::stream::write::Encoder;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
utils::pastebin::{Pastebin, PersistedPastebin},
|
||||||
|
worker,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub struct PastebinCollector {
|
||||||
|
pastebin_root: PathBuf,
|
||||||
|
db_path: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PastebinCollector {
|
||||||
|
#[must_use]
|
||||||
|
pub fn new(pastebin_root: PathBuf, db_path: PathBuf) -> Self {
|
||||||
|
Self {
|
||||||
|
pastebin_root,
|
||||||
|
db_path,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl worker::SimpleWorker for PastebinCollector {
|
||||||
|
type J = Pastebin;
|
||||||
|
|
||||||
|
async fn msg_to_job(
|
||||||
|
&mut self,
|
||||||
|
_routing_key: &str,
|
||||||
|
_: &Option<String>,
|
||||||
|
body: &[u8],
|
||||||
|
) -> Result<Self::J, String> {
|
||||||
|
match serde_json::from_slice(body) {
|
||||||
|
Ok(e) => Ok(e),
|
||||||
|
Err(e) => {
|
||||||
|
error!(
|
||||||
|
"Failed to deserialize Pastebin: {:?}",
|
||||||
|
String::from_utf8(body.to_vec())
|
||||||
|
);
|
||||||
|
panic!("{:?}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn consumer(&mut self, _chan: &mut lapin::Channel, job: &Self::J) -> worker::Actions {
|
||||||
|
let span = debug_span!("pastebin", title = ?job.title);
|
||||||
|
let _enter = span.enter();
|
||||||
|
|
||||||
|
let cfg = jfs::Config {
|
||||||
|
single: true,
|
||||||
|
..jfs::Config::default()
|
||||||
|
};
|
||||||
|
let db = jfs::Store::new_with_cfg(&self.db_path, cfg);
|
||||||
|
if db.is_err() {
|
||||||
|
warn!("could not open database: {:?}", db);
|
||||||
|
return vec![worker::Action::NackRequeue];
|
||||||
|
}
|
||||||
|
let db = db.unwrap();
|
||||||
|
|
||||||
|
let final_pastebin: PersistedPastebin = job.into();
|
||||||
|
|
||||||
|
// Write the compressed contents to disk with zstd
|
||||||
|
let tgt_path = self.pastebin_root.join(&final_pastebin.path);
|
||||||
|
let file = File::create(&tgt_path);
|
||||||
|
if file.is_err() {
|
||||||
|
warn!("could not open file {}: {:?}", tgt_path.display(), file);
|
||||||
|
return vec![worker::Action::NackRequeue];
|
||||||
|
}
|
||||||
|
|
||||||
|
let file = file.unwrap();
|
||||||
|
let writer = BufWriter::new(file);
|
||||||
|
let encoder = Encoder::new(writer, 1); // Compression level 1 (adjust as needed)
|
||||||
|
|
||||||
|
if encoder.is_err() {
|
||||||
|
warn!("encoder failed");
|
||||||
|
return vec![worker::Action::NackRequeue];
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut encoder = encoder.unwrap();
|
||||||
|
|
||||||
|
if encoder.write_all(job.contents.as_bytes()).is_err() {
|
||||||
|
warn!("failed to write all bytes");
|
||||||
|
return vec![worker::Action::NackRequeue];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Finish the compression process and ensure all data is written to the file
|
||||||
|
if encoder.finish().is_err() {
|
||||||
|
warn!("failed to finish the compression");
|
||||||
|
return vec![worker::Action::NackRequeue];
|
||||||
|
}
|
||||||
|
|
||||||
|
if db.save(&final_pastebin).is_err() {
|
||||||
|
warn!("failed to finish the save the pastebin metadata");
|
||||||
|
return vec![worker::Action::NackRequeue];
|
||||||
|
}
|
||||||
|
|
||||||
|
vec![worker::Action::Ack]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {}
|
|
@ -1,6 +1,7 @@
|
||||||
use crate::stats;
|
use crate::stats;
|
||||||
use crate::worker;
|
use crate::worker;
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
use tracing::error;
|
use tracing::error;
|
||||||
|
|
||||||
pub struct StatCollectorWorker<E> {
|
pub struct StatCollectorWorker<E> {
|
||||||
|
@ -14,29 +15,39 @@ impl<E: stats::SysEvents + 'static> StatCollectorWorker<E> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
impl<E: stats::SysEvents + 'static> worker::SimpleWorker for StatCollectorWorker<E> {
|
impl<E: stats::SysEvents + 'static> worker::SimpleWorker for StatCollectorWorker<E> {
|
||||||
type J = stats::EventMessage;
|
type J = stats::EventMessage;
|
||||||
|
|
||||||
fn msg_to_job(&mut self, _: &str, _: &Option<String>, body: &[u8]) -> Result<Self::J, String> {
|
async fn msg_to_job(
|
||||||
match serde_json::from_slice(body) {
|
&mut self,
|
||||||
Ok(e) => Ok(e),
|
_: &str,
|
||||||
Err(_) => {
|
_: &Option<String>,
|
||||||
|
body: &[u8],
|
||||||
|
) -> Result<Self::J, String> {
|
||||||
|
if let Ok(e) = serde_json::from_slice(body) {
|
||||||
|
Ok(e)
|
||||||
|
} else {
|
||||||
let mut modified_body: Vec<u8> = vec![b"\""[0]];
|
let mut modified_body: Vec<u8> = vec![b"\""[0]];
|
||||||
modified_body.append(&mut body.to_vec());
|
modified_body.append(&mut body.to_vec());
|
||||||
modified_body.push(b"\""[0]);
|
modified_body.push(b"\""[0]);
|
||||||
|
|
||||||
match serde_json::from_slice(&modified_body) {
|
match serde_json::from_slice(&modified_body) {
|
||||||
Ok(e) => {
|
Ok(e) => {
|
||||||
self.events.notify(stats::Event::StatCollectorLegacyEvent(
|
self.events
|
||||||
|
.notify(stats::Event::StatCollectorLegacyEvent(
|
||||||
stats::event_metric_name(&e),
|
stats::event_metric_name(&e),
|
||||||
));
|
))
|
||||||
|
.await;
|
||||||
Ok(stats::EventMessage {
|
Ok(stats::EventMessage {
|
||||||
sender: "".to_owned(),
|
sender: String::new(),
|
||||||
events: vec![e],
|
events: vec![e],
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
self.events.notify(stats::Event::StatCollectorBogusEvent);
|
self.events
|
||||||
|
.notify(stats::Event::StatCollectorBogusEvent)
|
||||||
|
.await;
|
||||||
error!(
|
error!(
|
||||||
"Failed to decode message: {:?}, Err: {:?}",
|
"Failed to decode message: {:?}, Err: {:?}",
|
||||||
String::from_utf8(body.to_vec()),
|
String::from_utf8(body.to_vec()),
|
||||||
|
@ -47,11 +58,14 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for StatCollectorWorker
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
fn consumer(&mut self, job: &stats::EventMessage) -> worker::Actions {
|
async fn consumer(
|
||||||
|
&mut self,
|
||||||
|
_chan: &mut lapin::Channel,
|
||||||
|
job: &stats::EventMessage,
|
||||||
|
) -> worker::Actions {
|
||||||
let sender = job.sender.clone();
|
let sender = job.sender.clone();
|
||||||
for event in job.events.iter() {
|
for event in &job.events {
|
||||||
self.collector.record(sender.clone(), event.clone());
|
self.collector.record(sender.clone(), event.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
55
ofborg/src/tasks/status_check_collector.rs
Normal file
55
ofborg/src/tasks/status_check_collector.rs
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use tracing::{debug_span, error};
|
||||||
|
|
||||||
|
use crate::worker;
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub enum StatusCheckRPCMessage {
|
||||||
|
ListStatuses,
|
||||||
|
ListChecks,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub struct StatusCheckCollector {
|
||||||
|
db_path: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl StatusCheckCollector {
|
||||||
|
#[must_use]
|
||||||
|
pub fn new(db_path: PathBuf) -> Self {
|
||||||
|
Self { db_path }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// RPC API worker
|
||||||
|
#[async_trait]
|
||||||
|
impl worker::SimpleWorker for StatusCheckCollector {
|
||||||
|
type J = StatusCheckRPCMessage;
|
||||||
|
|
||||||
|
async fn msg_to_job(
|
||||||
|
&mut self,
|
||||||
|
_method: &str,
|
||||||
|
_headers: &Option<String>,
|
||||||
|
body: &[u8],
|
||||||
|
) -> Result<Self::J, String> {
|
||||||
|
match serde_json::from_slice(body) {
|
||||||
|
Ok(e) => Ok(e),
|
||||||
|
Err(e) => {
|
||||||
|
error!(
|
||||||
|
"Failed to deserialize StatusCheckRPCMessage: {:?}",
|
||||||
|
String::from_utf8(body.to_vec())
|
||||||
|
);
|
||||||
|
panic!("{:?}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn consumer(&mut self, _chan: &mut lapin::Channel, _job: &Self::J) -> worker::Actions {
|
||||||
|
let span = debug_span!("command");
|
||||||
|
let _enter = span.enter();
|
||||||
|
|
||||||
|
vec![worker::Action::Ack]
|
||||||
|
}
|
||||||
|
}
|
|
@ -9,12 +9,13 @@ pub struct TestScratch {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TestScratch {
|
impl TestScratch {
|
||||||
|
#[must_use]
|
||||||
pub fn new_dir(ident: &str) -> TestScratch {
|
pub fn new_dir(ident: &str) -> TestScratch {
|
||||||
let scratch = TestScratch {
|
let scratch = TestScratch {
|
||||||
root: Path::new(env!("CARGO_MANIFEST_DIR"))
|
root: Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||||
.join("test-scratch")
|
.join("test-scratch")
|
||||||
.join("dirs")
|
.join("dirs")
|
||||||
.join(&format!("dir-{}", ident)),
|
.join(format!("dir-{ident}")),
|
||||||
};
|
};
|
||||||
|
|
||||||
TestScratch::create_dir(&scratch);
|
TestScratch::create_dir(&scratch);
|
||||||
|
@ -22,12 +23,13 @@ impl TestScratch {
|
||||||
scratch
|
scratch
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn new_file(ident: &str) -> TestScratch {
|
pub fn new_file(ident: &str) -> TestScratch {
|
||||||
let scratch = TestScratch {
|
let scratch = TestScratch {
|
||||||
root: Path::new(env!("CARGO_MANIFEST_DIR"))
|
root: Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||||
.join("test-scratch")
|
.join("test-scratch")
|
||||||
.join("files")
|
.join("files")
|
||||||
.join(&format!("file-{}", ident)),
|
.join(format!("file-{ident}")),
|
||||||
};
|
};
|
||||||
|
|
||||||
TestScratch::create_dir(&scratch);
|
TestScratch::create_dir(&scratch);
|
||||||
|
@ -40,10 +42,12 @@ impl TestScratch {
|
||||||
fs::create_dir_all(target).unwrap();
|
fs::create_dir_all(target).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn path(&self) -> PathBuf {
|
pub fn path(&self) -> PathBuf {
|
||||||
self.root.clone()
|
self.root.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn string(&self) -> String {
|
pub fn string(&self) -> String {
|
||||||
self.path().to_str().unwrap().to_owned()
|
self.path().to_str().unwrap().to_owned()
|
||||||
}
|
}
|
||||||
|
|
1
ofborg/src/utils/mod.rs
Normal file
1
ofborg/src/utils/mod.rs
Normal file
|
@ -0,0 +1 @@
|
||||||
|
pub mod pastebin;
|
49
ofborg/src/utils/pastebin.rs
Normal file
49
ofborg/src/utils/pastebin.rs
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
// A micro-pastebin service.
|
||||||
|
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use base64::{prelude::BASE64_URL_SAFE_NO_PAD, Engine};
|
||||||
|
use lapin::Channel;
|
||||||
|
|
||||||
|
use crate::easyamqp::ChannelExt;
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub struct Pastebin {
|
||||||
|
pub title: String,
|
||||||
|
pub contents: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub struct PersistedPastebin {
|
||||||
|
pub title: String,
|
||||||
|
pub uri: String,
|
||||||
|
pub path: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&Pastebin> for PersistedPastebin {
|
||||||
|
fn from(value: &Pastebin) -> Self {
|
||||||
|
let uuid = uuid::Uuid::new_v4();
|
||||||
|
let uuid_str = uuid.as_bytes();
|
||||||
|
let encoded_uri = BASE64_URL_SAFE_NO_PAD.encode(uuid_str);
|
||||||
|
let path = format!("pastes/{}", uuid.simple());
|
||||||
|
PersistedPastebin {
|
||||||
|
title: value.title.clone(),
|
||||||
|
uri: encoded_uri,
|
||||||
|
path: PathBuf::from(path),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn make_pastebin(
|
||||||
|
chan: &mut Channel,
|
||||||
|
title: &str,
|
||||||
|
contents: String,
|
||||||
|
) -> Result<PersistedPastebin, lapin::Error> {
|
||||||
|
let pastebin = Pastebin {
|
||||||
|
title: title.to_owned(),
|
||||||
|
contents,
|
||||||
|
};
|
||||||
|
|
||||||
|
chan.send_request(Some("pastebin-log"), None, &pastebin)
|
||||||
|
.await
|
||||||
|
}
|
87
ofborg/src/vcs/commit_status.rs
Normal file
87
ofborg/src/vcs/commit_status.rs
Normal file
|
@ -0,0 +1,87 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use tracing::warn;
|
||||||
|
|
||||||
|
use crate::vcs::generic::State;
|
||||||
|
|
||||||
|
use super::generic::VersionControlSystemAPI;
|
||||||
|
|
||||||
|
pub struct CommitStatus {
|
||||||
|
api: Arc<dyn VersionControlSystemAPI>,
|
||||||
|
repo: crate::message::Repo,
|
||||||
|
sha: String,
|
||||||
|
context: String,
|
||||||
|
description: String,
|
||||||
|
url: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CommitStatus {
|
||||||
|
pub fn new(
|
||||||
|
api: Arc<dyn VersionControlSystemAPI>,
|
||||||
|
repo: crate::message::Repo,
|
||||||
|
sha: String,
|
||||||
|
context: String,
|
||||||
|
description: String,
|
||||||
|
url: Option<String>,
|
||||||
|
) -> CommitStatus {
|
||||||
|
let mut stat = CommitStatus {
|
||||||
|
api,
|
||||||
|
repo,
|
||||||
|
sha,
|
||||||
|
context,
|
||||||
|
description,
|
||||||
|
url: String::new(),
|
||||||
|
};
|
||||||
|
|
||||||
|
stat.set_url(url);
|
||||||
|
|
||||||
|
stat
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_url(&mut self, url: Option<String>) {
|
||||||
|
self.url = url.unwrap_or_default();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn set_with_description(
|
||||||
|
&mut self,
|
||||||
|
description: &str,
|
||||||
|
state: State,
|
||||||
|
) -> Result<(), CommitStatusError> {
|
||||||
|
self.set_description(description.to_owned());
|
||||||
|
self.set(state).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_description(&mut self, description: String) {
|
||||||
|
self.description = description;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn set(&self, state: State) -> Result<(), CommitStatusError> {
|
||||||
|
let desc = if self.description.len() >= 140 {
|
||||||
|
warn!(
|
||||||
|
"description is over 140 char; truncating: {:?}",
|
||||||
|
&self.description
|
||||||
|
);
|
||||||
|
self.description.chars().take(140).collect()
|
||||||
|
} else {
|
||||||
|
self.description.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
self.api
|
||||||
|
.create_commit_statuses(
|
||||||
|
&self.repo,
|
||||||
|
self.sha.clone(),
|
||||||
|
state,
|
||||||
|
self.context.clone(),
|
||||||
|
desc,
|
||||||
|
self.url.clone(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum CommitStatusError {
|
||||||
|
ExpiredCreds(()),
|
||||||
|
MissingSha(()),
|
||||||
|
Error(()),
|
||||||
|
}
|
133
ofborg/src/vcs/generic.rs
Normal file
133
ofborg/src/vcs/generic.rs
Normal file
|
@ -0,0 +1,133 @@
|
||||||
|
//! Set of generic structures to abstract over a VCS in a richful way.
|
||||||
|
//! Not all VCS can represent the full set of states, so implementations
|
||||||
|
//! will have to downgrade richer values to the closest representation.
|
||||||
|
//!
|
||||||
|
//! Gerrit is the first-class supported model.
|
||||||
|
use futures_util::future::BoxFuture;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use crate::message::{Change, Repo};
|
||||||
|
|
||||||
|
use super::commit_status::CommitStatusError;
|
||||||
|
|
||||||
|
pub enum IssueState {
|
||||||
|
Open,
|
||||||
|
Closed,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Account {
|
||||||
|
pub username: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Issue {
|
||||||
|
pub title: String,
|
||||||
|
pub number: u64,
|
||||||
|
pub repo: Repo,
|
||||||
|
pub state: IssueState,
|
||||||
|
pub created_by: Account,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Repository {}
|
||||||
|
pub struct ChangeReviewers {
|
||||||
|
pub entity_reviewers: Vec<String>,
|
||||||
|
pub team_reviewers: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Issue {
|
||||||
|
#[must_use]
|
||||||
|
pub fn is_wip(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait VersionControlSystemAPI: Sync + Send {
|
||||||
|
fn get_repository(&self, repo: &crate::message::Repo) -> Repository;
|
||||||
|
fn get_changes(&self, repo: &crate::message::Repo) -> BoxFuture<Vec<Change>>;
|
||||||
|
fn get_change(&self, repo: &crate::message::Repo, number: u64) -> BoxFuture<Option<Change>>;
|
||||||
|
fn get_issue(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
number: u64,
|
||||||
|
) -> BoxFuture<Result<Issue, String>>;
|
||||||
|
fn update_labels(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
number: u64,
|
||||||
|
add: &[String],
|
||||||
|
remove: &[String],
|
||||||
|
) -> BoxFuture<()>;
|
||||||
|
fn get_existing_reviewers(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
number: u64,
|
||||||
|
) -> BoxFuture<ChangeReviewers>;
|
||||||
|
fn request_reviewers(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
number: u64,
|
||||||
|
entity_reviewers: Vec<String>,
|
||||||
|
team_reviewers: Vec<String>,
|
||||||
|
) -> BoxFuture<()>;
|
||||||
|
fn create_commit_statuses(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
sha: String,
|
||||||
|
state: State,
|
||||||
|
context: String,
|
||||||
|
description: String,
|
||||||
|
target_url: String,
|
||||||
|
) -> BoxFuture<Result<(), CommitStatusError>>;
|
||||||
|
fn create_check_statuses(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
checks: Vec<CheckRunOptions>,
|
||||||
|
) -> BoxFuture<()>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub enum CheckRunState {
|
||||||
|
Runnable,
|
||||||
|
Running,
|
||||||
|
Scheduled,
|
||||||
|
Completed,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone, Copy)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub enum State {
|
||||||
|
Pending,
|
||||||
|
Error,
|
||||||
|
Failure,
|
||||||
|
Success,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub enum Conclusion {
|
||||||
|
Skipped,
|
||||||
|
Success,
|
||||||
|
Failure,
|
||||||
|
Neutral,
|
||||||
|
Cancelled,
|
||||||
|
TimedOut,
|
||||||
|
ActionRequired,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, PartialEq)]
|
||||||
|
pub struct CheckRunOptions {
|
||||||
|
pub name: String,
|
||||||
|
pub head_sha: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub details_url: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub external_id: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub status: Option<CheckRunState>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub started_at: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub conclusion: Option<Conclusion>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub completed_at: Option<String>,
|
||||||
|
}
|
159
ofborg/src/vcs/gerrit/checks.rs
Normal file
159
ofborg/src/vcs/gerrit/checks.rs
Normal file
|
@ -0,0 +1,159 @@
|
||||||
|
use crate::vcs::generic::CheckRunState;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// Port from <https://gerrit.googlesource.com/gerrit/+/master/polygerrit-ui/app/api/checks.ts>
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
|
#[serde(rename_all = "UPPERCASE")]
|
||||||
|
enum RunStatus {
|
||||||
|
Runnable,
|
||||||
|
Running,
|
||||||
|
Scheduled,
|
||||||
|
Completed,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<RunStatus> for CheckRunState {
|
||||||
|
fn from(value: RunStatus) -> Self {
|
||||||
|
match value {
|
||||||
|
RunStatus::Runnable => CheckRunState::Runnable,
|
||||||
|
RunStatus::Running => CheckRunState::Running,
|
||||||
|
RunStatus::Scheduled => CheckRunState::Scheduled,
|
||||||
|
RunStatus::Completed => CheckRunState::Completed,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CheckRunState> for RunStatus {
|
||||||
|
fn from(value: CheckRunState) -> Self {
|
||||||
|
match value {
|
||||||
|
CheckRunState::Runnable => Self::Runnable,
|
||||||
|
CheckRunState::Running => Self::Running,
|
||||||
|
CheckRunState::Scheduled => Self::Scheduled,
|
||||||
|
CheckRunState::Completed => Self::Completed,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[derive(Debug, Serialize, PartialEq)]
|
||||||
|
struct CheckRun {
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
change: Option<u64>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
patchset: Option<u64>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
attempt: Option<u64>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
external_id: Option<String>,
|
||||||
|
check_name: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
check_description: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
check_link: Option<String>,
|
||||||
|
// defaults to false
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
is_ai_powered: Option<bool>,
|
||||||
|
status: RunStatus,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
status_description: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
status_link: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
label_name: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
scheduled_timestamp: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
started_timestamp: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
finished_timestamp: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||||
|
results: Vec<CheckResult>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
|
struct CheckResult {
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
external_id: Option<String>,
|
||||||
|
category: Category,
|
||||||
|
summary: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
message: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||||
|
tags: Vec<Tag>,
|
||||||
|
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||||
|
links: Vec<Link>,
|
||||||
|
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||||
|
code_pointers: Vec<CodePointer>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
|
#[serde(rename_all = "UPPERCASE")]
|
||||||
|
enum Category {
|
||||||
|
Success,
|
||||||
|
Info,
|
||||||
|
Warning,
|
||||||
|
Error,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
|
#[serde(rename_all = "UPPERCASE")]
|
||||||
|
enum TagColor {
|
||||||
|
Gray,
|
||||||
|
Yellow,
|
||||||
|
Pink,
|
||||||
|
Purple,
|
||||||
|
Cyan,
|
||||||
|
Brown,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
|
struct Tag {
|
||||||
|
name: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
tooltip: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
color: Option<TagColor>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
|
struct Link {
|
||||||
|
url: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
tooltip: Option<String>,
|
||||||
|
primary: bool,
|
||||||
|
icon: LinkIcon,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
|
struct CodePointer {
|
||||||
|
path: String,
|
||||||
|
range: CommentRange,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
|
#[serde(rename_all = "UPPERCASE")]
|
||||||
|
enum LinkIcon {
|
||||||
|
External,
|
||||||
|
Image,
|
||||||
|
History,
|
||||||
|
// actually this is X_Y uppercase
|
||||||
|
Download,
|
||||||
|
DownloadMobile,
|
||||||
|
HelpPage,
|
||||||
|
ReportBug,
|
||||||
|
Code,
|
||||||
|
FilePresent,
|
||||||
|
ViewTimeline,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
|
struct CommentRange {
|
||||||
|
// 1-based
|
||||||
|
start_line: u64,
|
||||||
|
// 0-based
|
||||||
|
start_character: u64,
|
||||||
|
// 1-based
|
||||||
|
end_line: u64,
|
||||||
|
// 0-based
|
||||||
|
end_character: u64,
|
||||||
|
}
|
388
ofborg/src/vcs/gerrit/data_structures.rs
Normal file
388
ofborg/src/vcs/gerrit/data_structures.rs
Normal file
|
@ -0,0 +1,388 @@
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub struct Account {
|
||||||
|
pub name: Option<String>, // User's full name, if configured
|
||||||
|
pub email: Option<String>, // User's preferred email address
|
||||||
|
pub username: Option<String>, // User's username, if configured
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Vec<Account>> for crate::vcs::generic::ChangeReviewers {
|
||||||
|
fn from(value: Vec<Account>) -> Self {
|
||||||
|
// FIXME: I don't think Gerrit remembers when we added a group instead of entities.
|
||||||
|
crate::vcs::generic::ChangeReviewers {
|
||||||
|
entity_reviewers: value
|
||||||
|
.into_iter()
|
||||||
|
// FIXME: this is a… quite the assumption, let's relax it by having at _least_ one
|
||||||
|
// identity identifier.
|
||||||
|
.map(|a| a.username.expect("Expected username"))
|
||||||
|
.collect(),
|
||||||
|
team_reviewers: vec![],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub struct Approval {
|
||||||
|
pub r#type: String, // Internal name of the approval
|
||||||
|
pub description: String, // Human-readable category of the approval
|
||||||
|
pub value: String, // Value assigned by the approval (usually a numerical score)
|
||||||
|
#[serde(rename = "oldValue")]
|
||||||
|
pub old_value: Option<String>, // Previous approval score, if present
|
||||||
|
#[serde(rename = "grantedOn")]
|
||||||
|
pub granted_on: Option<u64>, // Time in seconds since the UNIX epoch
|
||||||
|
pub by: Option<Account>, // Reviewer of the patch set
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
#[serde(rename_all = "UPPERCASE")]
|
||||||
|
pub enum ChangeType {
|
||||||
|
Added, // The file is being created/introduced by this patch
|
||||||
|
Modified, // The file already exists and has updated content
|
||||||
|
Deleted, // The file existed, but is being removed by this patch
|
||||||
|
Renamed, // The file is renamed
|
||||||
|
Copied, // The file is copied from another file
|
||||||
|
Rewrite, // The file was rewritten
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub struct PatchFile {
|
||||||
|
pub file: String, // Name of the file, or new name if renamed
|
||||||
|
#[serde(rename = "fileOld")]
|
||||||
|
pub file_old: Option<String>, // Old name of the file, if renamed
|
||||||
|
pub r#type: ChangeType, // Type of change (ADDED, MODIFIED, DELETED, etc.)
|
||||||
|
pub insertions: i64, // Number of insertions in the patch
|
||||||
|
pub deletions: i64, // Number of deletions in the patch
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub struct PatchSetComment {
|
||||||
|
pub file: String, // Name of the file on which the comment was added
|
||||||
|
pub line: u64, // Line number at which the comment was added
|
||||||
|
pub reviewer: Account, // Account that added the comment
|
||||||
|
pub message: String, // Comment text
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||||
|
pub enum Kind {
|
||||||
|
Rework, // Nontrivial content changes
|
||||||
|
TrivialRebase, // Conflict-free merge with same commit message
|
||||||
|
TrivialRebaseWithMessageUpdate, // Conflict-free merge with message update
|
||||||
|
MergeFirstParentUpdate, // Conflict-free change of the first (left) parent
|
||||||
|
NoCodeChange, // No code change; same tree and parent tree
|
||||||
|
NoChange, // No changes; same commit message and tree
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub struct PatchSet {
|
||||||
|
pub number: u64, // Patchset number
|
||||||
|
pub revision: String, // Git commit for this patchset
|
||||||
|
pub parents: Vec<String>, // List of parent revisions
|
||||||
|
pub r#ref: String, // Git reference pointing at the revision
|
||||||
|
pub uploader: Account, // Uploader of the patchset
|
||||||
|
pub author: Account, // Author of the patchset
|
||||||
|
#[serde(rename = "createdOn")]
|
||||||
|
pub created_on: u64, // Time in seconds since the UNIX epoch
|
||||||
|
pub kind: Kind, // Kind of change ("REWORK", "TRIVIAL_REBASE", etc.)
|
||||||
|
#[serde(default = "Default::default")]
|
||||||
|
pub approvals: Vec<Approval>, // Approvals granted
|
||||||
|
#[serde(default = "Default::default")]
|
||||||
|
pub comments: Vec<PatchSetComment>, // All comments for this patchset
|
||||||
|
#[serde(default = "Default::default")]
|
||||||
|
pub files: Vec<String>, // All changed files in this patchset
|
||||||
|
#[serde(rename = "sizeInsertions")]
|
||||||
|
pub size_insertions: i64, // Size of insertions
|
||||||
|
#[serde(rename = "sizeDeletions")]
|
||||||
|
pub size_deletions: i64, // Size of deletions
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub struct ReviewerMessage {
|
||||||
|
#[serde(rename = "message")]
|
||||||
|
pub comment_text: String, // Comment text added by the reviewer
|
||||||
|
pub timestamp: u64, // Time in seconds since the UNIX epoch when this comment was added
|
||||||
|
pub reviewer: Account, // Account of the reviewer who added the comment
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub struct TrackingId {
|
||||||
|
pub system: String, // Name of the system from the gerrit.config file
|
||||||
|
pub id: String, // ID number scraped out of the commit message
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub struct ChangeDependency {
|
||||||
|
pub id: String, // Change identifier
|
||||||
|
pub number: u64, // Change number
|
||||||
|
pub revision: String, // Patchset revision
|
||||||
|
pub r#ref: String, // Ref name
|
||||||
|
#[serde(rename = "isCurrentPatchSet")]
|
||||||
|
pub is_current_patch_set: bool, // If the revision is the current patchset of the change
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
#[serde(rename_all = "UPPERCASE")]
|
||||||
|
pub enum ChangeStatus {
|
||||||
|
New, // Change is still being reviewed
|
||||||
|
Merged, // Change has been merged to its branch
|
||||||
|
Abandoned, // Change was abandoned by its owner or administrator
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
#[serde(rename_all = "UPPERCASE")]
|
||||||
|
pub enum SubmitStatus {
|
||||||
|
Ok, // The change is ready for submission or already submitted
|
||||||
|
NotReady, // The change is missing a required label
|
||||||
|
RuleError, // An internal server error occurred preventing computation
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub struct Label {
|
||||||
|
pub label: String, // Name of the label
|
||||||
|
pub status: LabelStatus, // Status of the label
|
||||||
|
pub by: Account, // The account that applied the label
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
#[serde(rename_all = "UPPERCASE")]
|
||||||
|
pub enum LabelStatus {
|
||||||
|
Ok, // This label provides what is necessary for submission
|
||||||
|
Reject, // This label prevents the change from being submitted
|
||||||
|
Need, // The label is required for submission but has not been satisfied
|
||||||
|
May, // The label may be set but isn’t necessary for submission
|
||||||
|
Impossible, // The label is required but impossible to complete
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub struct Requirement {
|
||||||
|
#[serde(rename = "fallbackText")]
|
||||||
|
pub fallback_text: String, // Human-readable description of the requirement
|
||||||
|
pub r#type: String, // Alphanumerical string identifying the requirement
|
||||||
|
pub data: Option<HashMap<String, String>>, // Additional key-value data linked to this requirement
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub struct SubmitRecord {
|
||||||
|
pub status: SubmitStatus, // Current submit status
|
||||||
|
pub labels: Option<Vec<Label>>, // State of each code review label attribute
|
||||||
|
pub requirements: Vec<Requirement>, // Requirements for submission
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub struct Change {
|
||||||
|
pub project: String,
|
||||||
|
pub branch: String,
|
||||||
|
pub topic: Option<String>,
|
||||||
|
pub id: String,
|
||||||
|
#[serde(rename = "number")]
|
||||||
|
pub change_number: Option<u64>, // Deprecated, but keeping it optional
|
||||||
|
pub subject: String,
|
||||||
|
pub owner: Account,
|
||||||
|
pub url: String,
|
||||||
|
pub commit_message: Option<String>,
|
||||||
|
#[serde(default = "Default::default")]
|
||||||
|
pub hashtags: Vec<String>,
|
||||||
|
#[serde(rename = "createdOn")]
|
||||||
|
pub created_on: u64, // Time in seconds since UNIX epoch
|
||||||
|
#[serde(rename = "lastUpdated")]
|
||||||
|
pub last_updated: Option<u64>, // Time in seconds since UNIX epoch
|
||||||
|
pub open: Option<bool>,
|
||||||
|
pub status: ChangeStatus, // "NEW", "MERGED", or "ABANDONED"
|
||||||
|
pub private: Option<bool>,
|
||||||
|
pub wip: Option<bool>, // Work in progress
|
||||||
|
#[serde(default = "Default::default")]
|
||||||
|
pub comments: Vec<ReviewerMessage>, // Inline/file comments
|
||||||
|
#[serde(rename = "trackingIds", default = "Default::default")]
|
||||||
|
pub tracking_ids: Vec<TrackingId>, // Links to issue tracking systems
|
||||||
|
#[serde(rename = "currentPatchSet")]
|
||||||
|
pub current_patch_set: Option<PatchSet>,
|
||||||
|
#[serde(rename = "patchSets", default = "Default::default")]
|
||||||
|
pub patch_sets: Vec<PatchSet>, // All patch sets
|
||||||
|
#[serde(rename = "dependsOn", default = "Default::default")]
|
||||||
|
pub depends_on: Vec<ChangeDependency>, // Dependencies
|
||||||
|
#[serde(rename = "neededBy", default = "Default::default")]
|
||||||
|
pub needed_by: Vec<ChangeDependency>, // Reverse dependencies
|
||||||
|
#[serde(rename = "submitRecords", default = "Default::default")]
|
||||||
|
pub submit_records: Vec<SubmitRecord>, // Submission information
|
||||||
|
#[serde(rename = "allReviewers", default = "Default::default")]
|
||||||
|
pub all_reviewers: Vec<Account>, // List of all reviewers
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Change> for crate::message::Change {
|
||||||
|
fn from(value: Change) -> Self {
|
||||||
|
Self {
|
||||||
|
target_branch: Some(value.branch),
|
||||||
|
// While the change number is deprecated, we actually need it.
|
||||||
|
// FIXME: enforce type level checking of this.
|
||||||
|
number: value.change_number.unwrap(),
|
||||||
|
// FIXME: that's not good…
|
||||||
|
// basically, some events like `comment-added` does not include the current patch set
|
||||||
|
// because it's already included in another field, we could probably do another
|
||||||
|
// conversion pass to improve the type safety.
|
||||||
|
head_sha: value.current_patch_set.unwrap().revision,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub struct RefUpdate {
|
||||||
|
#[serde(rename = "oldRev")]
|
||||||
|
pub old_rev: String, // The old value of the ref, prior to the update
|
||||||
|
#[serde(rename = "newRev")]
|
||||||
|
pub new_rev: String, // The new value the ref was updated to
|
||||||
|
#[serde(rename = "refName")]
|
||||||
|
pub ref_name: String, // Full ref name within the project
|
||||||
|
pub project: String, // Project path in Gerrit
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
#[serde(rename_all = "kebab-case", tag = "type")]
|
||||||
|
pub enum GerritStreamEvent {
|
||||||
|
ChangeAbandoned {
|
||||||
|
change: Change,
|
||||||
|
#[serde(rename = "patchSet")]
|
||||||
|
patch_set: PatchSet,
|
||||||
|
abandoner: Account,
|
||||||
|
reason: String,
|
||||||
|
#[serde(rename = "eventCreatedOn")]
|
||||||
|
event_created_on: u64,
|
||||||
|
},
|
||||||
|
ChangeDeleted {
|
||||||
|
change: Change,
|
||||||
|
deleter: Account,
|
||||||
|
},
|
||||||
|
ChangeMerged {
|
||||||
|
change: Change,
|
||||||
|
#[serde(rename = "patchSet")]
|
||||||
|
patch_set: PatchSet,
|
||||||
|
submitter: Account,
|
||||||
|
#[serde(rename = "newRev")]
|
||||||
|
new_rev: String,
|
||||||
|
#[serde(rename = "eventCreatedOn")]
|
||||||
|
event_created_on: u64,
|
||||||
|
},
|
||||||
|
ChangeRestored {
|
||||||
|
change: Change,
|
||||||
|
#[serde(rename = "patchSet")]
|
||||||
|
patch_set: PatchSet,
|
||||||
|
restorer: Account,
|
||||||
|
reason: String,
|
||||||
|
#[serde(rename = "eventCreatedOn")]
|
||||||
|
event_created_on: u64,
|
||||||
|
},
|
||||||
|
CommentAdded {
|
||||||
|
change: Change,
|
||||||
|
#[serde(rename = "patchSet")]
|
||||||
|
patch_set: PatchSet,
|
||||||
|
author: Account,
|
||||||
|
#[serde(default = "Default::default")]
|
||||||
|
approvals: Vec<Approval>,
|
||||||
|
comment: Option<String>,
|
||||||
|
#[serde(rename = "eventCreatedOn")]
|
||||||
|
event_created_on: u64,
|
||||||
|
},
|
||||||
|
DroppedOutput,
|
||||||
|
HashtagsChanged {
|
||||||
|
change: Change,
|
||||||
|
editor: Account,
|
||||||
|
added: Vec<String>,
|
||||||
|
removed: Vec<String>,
|
||||||
|
hashtags: Vec<String>,
|
||||||
|
#[serde(rename = "eventCreatedOn")]
|
||||||
|
event_created_on: u64,
|
||||||
|
},
|
||||||
|
ProjectCreated {
|
||||||
|
#[serde(rename = "projectName")]
|
||||||
|
project_name: String,
|
||||||
|
#[serde(rename = "projectHead")]
|
||||||
|
project_head: String,
|
||||||
|
#[serde(rename = "eventCreatedOn")]
|
||||||
|
event_created_on: u64,
|
||||||
|
},
|
||||||
|
PatchSetCreated {
|
||||||
|
change: Change,
|
||||||
|
#[serde(rename = "patchSet")]
|
||||||
|
patch_set: PatchSet,
|
||||||
|
uploader: Account,
|
||||||
|
#[serde(rename = "eventCreatedOn")]
|
||||||
|
event_created_on: u64,
|
||||||
|
},
|
||||||
|
RefUpdated {
|
||||||
|
submitter: Account,
|
||||||
|
#[serde(rename = "refUpdate")]
|
||||||
|
ref_update: RefUpdate,
|
||||||
|
#[serde(rename = "eventCreatedOn")]
|
||||||
|
event_created_on: u64,
|
||||||
|
},
|
||||||
|
BatchRefUpdated {
|
||||||
|
submitter: Account,
|
||||||
|
#[serde(rename = "refUpdates")]
|
||||||
|
ref_updates: Vec<RefUpdate>,
|
||||||
|
#[serde(rename = "eventCreatedOn")]
|
||||||
|
event_created_on: u64,
|
||||||
|
},
|
||||||
|
ReviewerAdded {
|
||||||
|
change: Change,
|
||||||
|
#[serde(rename = "patchSet")]
|
||||||
|
patch_set: PatchSet,
|
||||||
|
reviewer: Account,
|
||||||
|
adder: Account,
|
||||||
|
#[serde(rename = "eventCreatedOn")]
|
||||||
|
event_created_on: u64,
|
||||||
|
},
|
||||||
|
ReviewerDeleted {
|
||||||
|
change: Change,
|
||||||
|
#[serde(rename = "patchSet")]
|
||||||
|
patch_set: PatchSet,
|
||||||
|
reviewer: Account,
|
||||||
|
remover: Account,
|
||||||
|
approvals: Vec<Approval>,
|
||||||
|
comment: String,
|
||||||
|
#[serde(rename = "eventCreatedOn")]
|
||||||
|
event_created_on: u64,
|
||||||
|
},
|
||||||
|
TopicChanged {
|
||||||
|
change: Change,
|
||||||
|
old_topic: Option<String>,
|
||||||
|
new_topic: Option<String>,
|
||||||
|
changer: Account,
|
||||||
|
#[serde(rename = "eventCreatedOn")]
|
||||||
|
event_created_on: u64,
|
||||||
|
},
|
||||||
|
WorkInProgressStateChanged {
|
||||||
|
change: Change,
|
||||||
|
#[serde(rename = "patchSet")]
|
||||||
|
patch_set: PatchSet,
|
||||||
|
changer: Account,
|
||||||
|
#[serde(rename = "eventCreatedOn")]
|
||||||
|
event_created_on: u64,
|
||||||
|
},
|
||||||
|
PrivateStateChanged {
|
||||||
|
change: Change,
|
||||||
|
#[serde(rename = "patchSet")]
|
||||||
|
patch_set: PatchSet,
|
||||||
|
changer: Account,
|
||||||
|
#[serde(rename = "eventCreatedOn")]
|
||||||
|
event_created_on: u64,
|
||||||
|
},
|
||||||
|
VoteDeleted {
|
||||||
|
change: Change,
|
||||||
|
#[serde(rename = "patchSet")]
|
||||||
|
patch_set: PatchSet,
|
||||||
|
reviewer: Account,
|
||||||
|
remover: Account,
|
||||||
|
approvals: Vec<Approval>,
|
||||||
|
comment: String,
|
||||||
|
},
|
||||||
|
ProjectHeadUpdate {
|
||||||
|
#[serde(rename = "oldHead")]
|
||||||
|
old_head: String,
|
||||||
|
#[serde(rename = "newHead")]
|
||||||
|
new_head: String,
|
||||||
|
#[serde(rename = "eventCreatedOn")]
|
||||||
|
event_created_on: u64,
|
||||||
|
},
|
||||||
|
}
|
45
ofborg/src/vcs/gerrit/events.rs
Normal file
45
ofborg/src/vcs/gerrit/events.rs
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
use futures_util::stream::try_unfold;
|
||||||
|
use futures_util::Stream;
|
||||||
|
use reqwest::{Client, Response, Error};
|
||||||
|
|
||||||
|
use super::data_structures::GerritStreamEvent;
|
||||||
|
|
||||||
|
/// Streams Gerrit events back to the caller.
|
||||||
|
async fn stream_events(gerrit_baseurl: &str) -> Result<impl Stream<Item = Result<GerritStreamEvent, Box<dyn Error>>>, Box<dyn Error>> {
|
||||||
|
let client = Client::new();
|
||||||
|
|
||||||
|
// Send the request and get a response
|
||||||
|
let response: Response = client.get(format!("{}/stream-events", gerrit_baseurl)).send().await?;
|
||||||
|
|
||||||
|
// Ensure we are getting a successful response
|
||||||
|
if !response.status().is_success() {
|
||||||
|
return Err(format!("Failed to connect: {}", response.status()).into());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a stream from the response body
|
||||||
|
let mut body_stream = response.bytes_stream();
|
||||||
|
|
||||||
|
// Create a stream of GerritStreamEvent from the body stream
|
||||||
|
let stream = try_unfold(body_stream, |mut body_stream| async move {
|
||||||
|
while let Some(item) = body_stream.next().await {
|
||||||
|
let bytes = item?;
|
||||||
|
let line = String::from_utf8_lossy(&bytes).to_string();
|
||||||
|
|
||||||
|
let event: Result<GerritStreamEvent, _> = serde_json::from_str(&line);
|
||||||
|
|
||||||
|
match event {
|
||||||
|
Ok(event) => {
|
||||||
|
return Ok(Some((event, body_stream)));
|
||||||
|
},
|
||||||
|
Err(err) => {
|
||||||
|
eprintln!("Failed to deserialize event: {:?}", line);
|
||||||
|
return Err(Box::new(err));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(None) // End of stream
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(stream)
|
||||||
|
}
|
50
ofborg/src/vcs/gerrit/http.rs
Normal file
50
ofborg/src/vcs/gerrit/http.rs
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
//! REST API bindings for Gerrit
|
||||||
|
//! TODO:
|
||||||
|
//! - trace IDs support
|
||||||
|
//! - label support
|
||||||
|
|
||||||
|
use super::data_structures::{Account, Change};
|
||||||
|
|
||||||
|
pub struct GerritHTTPApi;
|
||||||
|
|
||||||
|
#[allow(clippy::unused_async)] // FIXME
|
||||||
|
impl GerritHTTPApi {
|
||||||
|
// async fn get_project(&self, project_name: &str) -> Project {}
|
||||||
|
/// Fetches all changes according to the query and the given limit.
|
||||||
|
/// This will default to 60 changes by default.
|
||||||
|
pub(crate) async fn list_changes(&self, _query: &str, _limit: Option<u64>) -> Vec<Change> {
|
||||||
|
vec![]
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Fetch the latest change ID for a given project and CL number.
|
||||||
|
pub(crate) async fn get_change_id(&self, _project_name: &str, _cl_number: u64) -> String {
|
||||||
|
String::new()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Fetch a given change according to the change ID (not the CL number).
|
||||||
|
pub(crate) async fn get_change(&self, _change_id: &str) -> Option<Change> {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set additional and remove certain hashtags for a given change ID (not the CL number).
|
||||||
|
pub(crate) async fn set_hashtags(
|
||||||
|
&self,
|
||||||
|
_change_id: &str,
|
||||||
|
_add: &[String],
|
||||||
|
_remove: &[String],
|
||||||
|
) -> Vec<String> {
|
||||||
|
vec![]
|
||||||
|
}
|
||||||
|
/// List all reviewers on a given change ID (not the CL number).
|
||||||
|
pub(crate) async fn list_reviewers(&self, _change_id: &str) -> Vec<Account> {
|
||||||
|
vec![]
|
||||||
|
}
|
||||||
|
/// Set reviewers and a message on a given change ID (not the CL number).
|
||||||
|
pub(crate) async fn set_reviewers(
|
||||||
|
&self,
|
||||||
|
_change_id: &str,
|
||||||
|
_message: &str,
|
||||||
|
_reviewers: Vec<Account>,
|
||||||
|
) {
|
||||||
|
}
|
||||||
|
}
|
137
ofborg/src/vcs/gerrit/impl.rs
Normal file
137
ofborg/src/vcs/gerrit/impl.rs
Normal file
|
@ -0,0 +1,137 @@
|
||||||
|
//! Implementation of the VCS API for Gerrit
|
||||||
|
//! This uses the HTTP API.
|
||||||
|
|
||||||
|
use futures_util::FutureExt;
|
||||||
|
|
||||||
|
use crate::vcs::generic::VersionControlSystemAPI;
|
||||||
|
|
||||||
|
use super::{data_structures::Account, http::GerritHTTPApi};
|
||||||
|
|
||||||
|
impl VersionControlSystemAPI for GerritHTTPApi {
|
||||||
|
// The next three APIs are todo!() because they cannot be implemented in Gerrit.
|
||||||
|
// Gerrit does not offer any way to get this information out.
|
||||||
|
// GerritHTTPApi needs to return something like Unsupported
|
||||||
|
// and we need to compose a GerritHTTPApi with a GerritForge which contains an implementation
|
||||||
|
// of check statuses and commit statuses and an issue tracker.
|
||||||
|
fn create_check_statuses(
|
||||||
|
&self,
|
||||||
|
_repo: &crate::message::Repo,
|
||||||
|
_checks: Vec<crate::vcs::generic::CheckRunOptions>,
|
||||||
|
) -> futures_util::future::BoxFuture<()> {
|
||||||
|
todo!();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_commit_statuses(
|
||||||
|
&self,
|
||||||
|
_repo: &crate::message::Repo,
|
||||||
|
_sha: String,
|
||||||
|
_state: crate::vcs::generic::State,
|
||||||
|
_context: String,
|
||||||
|
_description: String,
|
||||||
|
_target_url: String,
|
||||||
|
) -> futures_util::future::BoxFuture<Result<(), crate::vcs::commit_status::CommitStatusError>>
|
||||||
|
{
|
||||||
|
todo!();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_issue(
|
||||||
|
&self,
|
||||||
|
_repo: &crate::message::Repo,
|
||||||
|
_number: u64,
|
||||||
|
) -> futures_util::future::BoxFuture<Result<crate::vcs::generic::Issue, String>> {
|
||||||
|
todo!();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_repository(&self, _repo: &crate::message::Repo) -> crate::vcs::generic::Repository {
|
||||||
|
todo!();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_changes(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
) -> futures_util::future::BoxFuture<Vec<crate::message::Change>> {
|
||||||
|
let repo_name = repo.name.clone();
|
||||||
|
async move {
|
||||||
|
self.list_changes(&format!("project:{}", &repo_name), None)
|
||||||
|
.await
|
||||||
|
.into_iter()
|
||||||
|
.map(std::convert::Into::into)
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_change(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
number: u64,
|
||||||
|
) -> futures_util::future::BoxFuture<Option<crate::message::Change>> {
|
||||||
|
let repo_name = repo.name.clone();
|
||||||
|
async move {
|
||||||
|
let change_id = self.get_change_id(&repo_name, number).await;
|
||||||
|
GerritHTTPApi::get_change(self, &change_id)
|
||||||
|
.await
|
||||||
|
.map(std::convert::Into::into)
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_labels(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
number: u64,
|
||||||
|
add: &[String],
|
||||||
|
remove: &[String],
|
||||||
|
) -> futures_util::future::BoxFuture<()> {
|
||||||
|
let add = add.to_owned();
|
||||||
|
let remove = remove.to_owned();
|
||||||
|
let repo_name = repo.name.clone();
|
||||||
|
|
||||||
|
async move {
|
||||||
|
let change_id = self.get_change_id(&repo_name, number).await;
|
||||||
|
self.set_hashtags(&change_id, &add, &remove).await;
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_existing_reviewers(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
number: u64,
|
||||||
|
) -> futures_util::future::BoxFuture<crate::vcs::generic::ChangeReviewers> {
|
||||||
|
let repo_name = repo.name.clone();
|
||||||
|
async move {
|
||||||
|
let change_id = self.get_change_id(&repo_name, number).await;
|
||||||
|
self.list_reviewers(&change_id).await.into()
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn request_reviewers(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
number: u64,
|
||||||
|
entity_reviewers: Vec<String>,
|
||||||
|
// FIXME: support group reviews
|
||||||
|
_team_reviewers: Vec<String>,
|
||||||
|
) -> futures_util::future::BoxFuture<()> {
|
||||||
|
let repo_name = repo.name.clone();
|
||||||
|
async move {
|
||||||
|
let change_id = self.get_change_id(&repo_name, number).await;
|
||||||
|
self.set_reviewers(
|
||||||
|
&change_id,
|
||||||
|
"Automatic reviewer request",
|
||||||
|
entity_reviewers
|
||||||
|
.into_iter()
|
||||||
|
.map(|reviewer| Account {
|
||||||
|
username: Some(reviewer),
|
||||||
|
email: None,
|
||||||
|
name: None,
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
}
|
6
ofborg/src/vcs/gerrit/mod.rs
Normal file
6
ofborg/src/vcs/gerrit/mod.rs
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
pub mod checks;
|
||||||
|
pub mod data_structures;
|
||||||
|
pub mod http;
|
||||||
|
pub mod r#impl;
|
||||||
|
pub mod ssh;
|
||||||
|
// pub mod events;
|
79
ofborg/src/vcs/gerrit/ssh.rs
Normal file
79
ofborg/src/vcs/gerrit/ssh.rs
Normal file
|
@ -0,0 +1,79 @@
|
||||||
|
use std::{error::Error, path::PathBuf};
|
||||||
|
|
||||||
|
use async_stream::stream;
|
||||||
|
use futures::StreamExt;
|
||||||
|
use futures_util::Stream;
|
||||||
|
use openssh::{Session, SessionBuilder, Stdio};
|
||||||
|
use tokio::io::AsyncBufReadExt;
|
||||||
|
|
||||||
|
use super::data_structures::GerritStreamEvent;
|
||||||
|
|
||||||
|
pub struct GerritSSHApi {
|
||||||
|
session: Session,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GerritSSHApi {
|
||||||
|
pub async fn new(private_key_file: PathBuf, uri: &str) -> Self {
|
||||||
|
let mut builder = SessionBuilder::default();
|
||||||
|
let (builder, destination) = builder.keyfile(&private_key_file).resolve(uri);
|
||||||
|
let tempdir = builder
|
||||||
|
.launch_master(destination)
|
||||||
|
.await
|
||||||
|
.unwrap_or_else(|_| panic!("Failed to launch SSH master to destination '{}'", uri));
|
||||||
|
Self {
|
||||||
|
session: Session::new_process_mux(tempdir),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn raw_command(
|
||||||
|
&mut self,
|
||||||
|
args: Vec<&str>,
|
||||||
|
) -> Result<impl Stream<Item = Result<String, std::io::Error>> + '_, Box<dyn Error>> {
|
||||||
|
self.session
|
||||||
|
.check()
|
||||||
|
.await
|
||||||
|
.expect("Session is not in a good state.");
|
||||||
|
|
||||||
|
let mut child = self
|
||||||
|
.session
|
||||||
|
.raw_command("gerrit")
|
||||||
|
.raw_args(args)
|
||||||
|
.stdout(Stdio::piped())
|
||||||
|
.stderr(Stdio::piped())
|
||||||
|
.spawn()
|
||||||
|
.await
|
||||||
|
.expect("Failed to spawn a command");
|
||||||
|
|
||||||
|
Ok(stream! {
|
||||||
|
let child_stdout = child.stdout().take().expect("Failed to obtain stdout");
|
||||||
|
let stdout = tokio::io::BufReader::new(child_stdout);
|
||||||
|
let mut line_stream = stdout.lines();
|
||||||
|
|
||||||
|
loop {
|
||||||
|
match line_stream.next_line().await {
|
||||||
|
Ok(Some(line)) => yield Ok(line),
|
||||||
|
Ok(None) => {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
Err(err) => yield Err(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn stream_events(
|
||||||
|
&mut self,
|
||||||
|
) -> Result<impl Stream<Item = Result<GerritStreamEvent, Box<dyn Error>>> + '_, Box<dyn Error>>
|
||||||
|
{
|
||||||
|
let lines = self.raw_command(vec!["stream-events"]).await?;
|
||||||
|
let events = lines.filter_map(|line| async { line.ok() }).map(|line| {
|
||||||
|
let event: Result<GerritStreamEvent, _> = serde_json::from_str(&line);
|
||||||
|
match event {
|
||||||
|
Ok(event) => Ok(event),
|
||||||
|
Err(err) => Err(Box::new(err) as Box<dyn Error>),
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(events)
|
||||||
|
}
|
||||||
|
}
|
3
ofborg/src/vcs/mod.rs
Normal file
3
ofborg/src/vcs/mod.rs
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
pub mod commit_status;
|
||||||
|
pub mod generic;
|
||||||
|
pub mod gerrit;
|
|
@ -1,5 +1,7 @@
|
||||||
|
use async_trait::async_trait;
|
||||||
use std::marker::Send;
|
use std::marker::Send;
|
||||||
|
|
||||||
|
use futures::Stream;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
|
||||||
pub struct Response {}
|
pub struct Response {}
|
||||||
|
@ -24,33 +26,53 @@ pub struct QueueMsg {
|
||||||
pub content: Vec<u8>,
|
pub content: Vec<u8>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn publish_serde_action<T: ?Sized>(
|
pub fn prepare_queue_message<T>(
|
||||||
exchange: Option<String>,
|
exchange: Option<&str>,
|
||||||
routing_key: Option<String>,
|
routing_key: Option<&str>,
|
||||||
msg: &T,
|
msg: &T,
|
||||||
) -> Action
|
) -> QueueMsg
|
||||||
where
|
where
|
||||||
T: Serialize,
|
T: Serialize + ?Sized,
|
||||||
{
|
{
|
||||||
Action::Publish(Box::new(QueueMsg {
|
QueueMsg {
|
||||||
exchange,
|
exchange: exchange.map(std::borrow::ToOwned::to_owned),
|
||||||
routing_key,
|
routing_key: routing_key.map(std::borrow::ToOwned::to_owned),
|
||||||
mandatory: false,
|
mandatory: false,
|
||||||
immediate: false,
|
immediate: false,
|
||||||
content_type: Some("application/json".to_owned()),
|
content_type: Some("application/json".to_owned()),
|
||||||
content: serde_json::to_string(&msg).unwrap().into_bytes(),
|
content: serde_json::to_string(&msg).unwrap().into_bytes(),
|
||||||
}))
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn publish_serde_action<T>(
|
||||||
|
exchange: &Option<String>,
|
||||||
|
routing_key: &Option<String>,
|
||||||
|
msg: &T,
|
||||||
|
) -> Action
|
||||||
|
where
|
||||||
|
T: Serialize + ?Sized,
|
||||||
|
{
|
||||||
|
Action::Publish(Box::new(prepare_queue_message(
|
||||||
|
exchange.as_deref(),
|
||||||
|
routing_key.as_deref(),
|
||||||
|
msg,
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
pub trait SimpleWorker: Send {
|
pub trait SimpleWorker: Send {
|
||||||
type J: Send;
|
type J: Send;
|
||||||
|
|
||||||
fn consumer(&mut self, job: &Self::J) -> Actions;
|
async fn consumer(&mut self, chan: &mut lapin::Channel, job: &Self::J) -> Actions;
|
||||||
|
|
||||||
fn msg_to_job(
|
async fn msg_to_job(
|
||||||
&mut self,
|
&mut self,
|
||||||
method: &str,
|
method: &str,
|
||||||
headers: &Option<String>,
|
headers: &Option<String>,
|
||||||
body: &[u8],
|
body: &[u8],
|
||||||
) -> Result<Self::J, String>;
|
) -> Result<Self::J, String>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub trait StreamWorker: Send {
|
||||||
|
fn publisher(&mut self, chan: &mut lapin::Channel) -> impl Stream<Item = Box<QueueMsg>>;
|
||||||
|
}
|
||||||
|
|
|
@ -8,6 +8,7 @@ pub struct LineWriter {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LineWriter {
|
impl LineWriter {
|
||||||
|
#[must_use]
|
||||||
pub fn new(mut rw: File) -> LineWriter {
|
pub fn new(mut rw: File) -> LineWriter {
|
||||||
let buf = LineWriter::load_buffer(&mut rw);
|
let buf = LineWriter::load_buffer(&mut rw);
|
||||||
let len = buf.len();
|
let len = buf.len();
|
||||||
|
@ -27,7 +28,7 @@ impl LineWriter {
|
||||||
.lines()
|
.lines()
|
||||||
.map(|line| match line {
|
.map(|line| match line {
|
||||||
Ok(s) => s,
|
Ok(s) => s,
|
||||||
Err(e) => format!("UTF-8 Decode err: {:?}", e),
|
Err(e) => format!("UTF-8 Decode err: {e:?}"),
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
@ -35,7 +36,7 @@ impl LineWriter {
|
||||||
pub fn write_to_line(&mut self, line: usize, data: &str) {
|
pub fn write_to_line(&mut self, line: usize, data: &str) {
|
||||||
let original_len = self.buffer.len();
|
let original_len = self.buffer.len();
|
||||||
while self.buffer.len() <= line {
|
while self.buffer.len() <= line {
|
||||||
self.buffer.push("".to_owned());
|
self.buffer.push(String::new());
|
||||||
}
|
}
|
||||||
|
|
||||||
self.buffer.remove(line);
|
self.buffer.remove(line);
|
||||||
|
@ -72,6 +73,7 @@ impl LineWriter {
|
||||||
self.last_line = line;
|
self.last_line = line;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn inner(self) -> File {
|
pub fn inner(self) -> File {
|
||||||
self.file
|
self.file
|
||||||
}
|
}
|
||||||
|
|
3
scripts/init-account.sh
Normal file
3
scripts/init-account.sh
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
rabbitmqctl add_user test
|
||||||
|
rabbitmqctl clear_password test
|
||||||
|
rabbitmqctl set_globally_permissions test ".*" ".*" ".*"
|
Loading…
Reference in a new issue