forked from the-distro/ofborg
Revert "Revert "Evaluation Stats""
This commit is contained in:
parent
dd33b27654
commit
678a1ca3c5
7
ofborg/Cargo.lock
generated
7
ofborg/Cargo.lock
generated
|
@ -452,6 +452,7 @@ dependencies = [
|
|||
"lru-cache 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"md5 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"nom 4.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"separator 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.84 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_derive 1.0.84 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 1.0.34 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -636,6 +637,11 @@ dependencies = [
|
|||
"libc 0.2.46 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "separator"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.84"
|
||||
|
@ -922,6 +928,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
"checksum schannel 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "0e1a231dc10abf6749cfa5d7767f25888d484201accbd919b66ab5413c502d56"
|
||||
"checksum security-framework 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "dfa44ee9c54ce5eecc9de7d5acbad112ee58755239381f687e564004ba4a2332"
|
||||
"checksum security-framework-sys 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "5421621e836278a0b139268f36eee0dc7e389b784dc3f79d8f11aabadf41bead"
|
||||
"checksum separator 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f97841a747eef040fcd2e7b3b9a220a7205926e60488e673d9e4926d27772ce5"
|
||||
"checksum serde 1.0.84 (registry+https://github.com/rust-lang/crates.io-index)" = "0e732ed5a5592c17d961555e3b552985baf98d50ce418b7b655f31f6ba7eb1b7"
|
||||
"checksum serde_derive 1.0.84 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d6115a3ca25c224e409185325afc16a0d5aaaabc15c42b09587d6f1ba39a5b"
|
||||
"checksum serde_json 1.0.34 (registry+https://github.com/rust-lang/crates.io-index)" = "bdf540260cfee6da923831f4776ddc495ada940c30117977c70f1313a6130545"
|
||||
|
|
|
@ -123,6 +123,7 @@ let crates = cratesIO // rec {
|
|||
(cratesIO.crates."lru_cache"."${deps."ofborg"."0.1.8"."lru_cache"}" deps)
|
||||
(cratesIO.crates."md5"."${deps."ofborg"."0.1.8"."md5"}" deps)
|
||||
(cratesIO.crates."nom"."${deps."ofborg"."0.1.8"."nom"}" deps)
|
||||
(cratesIO.crates."separator"."${deps."ofborg"."0.1.8"."separator"}" deps)
|
||||
(cratesIO.crates."serde"."${deps."ofborg"."0.1.8"."serde"}" deps)
|
||||
(cratesIO.crates."serde_derive"."${deps."ofborg"."0.1.8"."serde_derive"}" deps)
|
||||
(cratesIO.crates."serde_json"."${deps."ofborg"."0.1.8"."serde_json"}" deps)
|
||||
|
@ -145,6 +146,7 @@ let crates = cratesIO // rec {
|
|||
md5."${deps.ofborg."0.1.8".md5}".default = true;
|
||||
nom."${deps.ofborg."0.1.8".nom}".default = true;
|
||||
ofborg."0.1.8".default = (f.ofborg."0.1.8".default or true);
|
||||
separator."${deps.ofborg."0.1.8".separator}".default = true;
|
||||
serde."${deps.ofborg."0.1.8".serde}".default = true;
|
||||
serde_derive."${deps.ofborg."0.1.8".serde_derive}".default = true;
|
||||
serde_json."${deps.ofborg."0.1.8".serde_json}".default = true;
|
||||
|
@ -167,6 +169,7 @@ let crates = cratesIO // rec {
|
|||
(cratesIO.features_.lru_cache."${deps."ofborg"."0.1.8"."lru_cache"}" deps)
|
||||
(cratesIO.features_.md5."${deps."ofborg"."0.1.8"."md5"}" deps)
|
||||
(cratesIO.features_.nom."${deps."ofborg"."0.1.8"."nom"}" deps)
|
||||
(cratesIO.features_.separator."${deps."ofborg"."0.1.8"."separator"}" deps)
|
||||
(cratesIO.features_.serde."${deps."ofborg"."0.1.8"."serde"}" deps)
|
||||
(cratesIO.features_.serde_derive."${deps."ofborg"."0.1.8"."serde_derive"}" deps)
|
||||
(cratesIO.features_.serde_json."${deps."ofborg"."0.1.8"."serde_json"}" deps)
|
||||
|
@ -375,6 +378,7 @@ rec {
|
|||
lru_cache = "0.1.1";
|
||||
md5 = "0.3.8";
|
||||
nom = "4.1.1";
|
||||
separator = "0.4.1";
|
||||
serde = "1.0.84";
|
||||
serde_derive = "1.0.84";
|
||||
serde_json = "1.0.34";
|
||||
|
@ -459,6 +463,7 @@ rec {
|
|||
core_foundation_sys = "0.2.3";
|
||||
libc = "0.2.46";
|
||||
};
|
||||
deps.separator."0.4.1" = {};
|
||||
deps.serde."1.0.84" = {};
|
||||
deps.serde_derive."1.0.84" = {
|
||||
proc_macro2 = "0.4.24";
|
||||
|
|
|
@ -27,6 +27,7 @@ lru-cache = "0.1.1"
|
|||
nom = "4.0.0-beta3"
|
||||
sys-info = "0.5.6"
|
||||
chrono = "0.4.6"
|
||||
separator = "0.4.1"
|
||||
|
||||
[patch.crates-io]
|
||||
#hubcaps = { path = "../hubcaps" }
|
||||
|
|
|
@ -73,6 +73,7 @@ safemem-0.3.0
|
|||
schannel-0.1.14
|
||||
security-framework-0.1.16
|
||||
security-framework-sys-0.1.16
|
||||
separator-0.4.1
|
||||
serde-1.0.84
|
||||
serde_derive-1.0.84
|
||||
serde_json-1.0.34
|
||||
|
|
|
@ -2118,6 +2118,20 @@ rec {
|
|||
];
|
||||
|
||||
|
||||
# end
|
||||
# separator-0.4.1
|
||||
|
||||
crates.separator."0.4.1" = deps: { features?(features_.separator."0.4.1" deps {}) }: buildRustCrate {
|
||||
crateName = "separator";
|
||||
version = "0.4.1";
|
||||
authors = [ "Saghm Rossi <saghmrossi@gmail.com>" ];
|
||||
sha256 = "1l7yhf6dy09k9cy0kkwb9wy98rn8mnz72q27wbd6bhiflllwghr7";
|
||||
};
|
||||
features_.separator."0.4.1" = deps: f: updateFeatures f (rec {
|
||||
separator."0.4.1".default = (f.separator."0.4.1".default or true);
|
||||
}) [];
|
||||
|
||||
|
||||
# end
|
||||
# serde-1.0.84
|
||||
|
||||
|
|
|
@ -51,6 +51,7 @@ fn main() {
|
|||
cloner,
|
||||
&nix,
|
||||
cfg.github(),
|
||||
cfg.github_app_vendingmachine(),
|
||||
cfg.acl(),
|
||||
cfg.runner.identity.clone(),
|
||||
events,
|
||||
|
|
|
@ -27,6 +27,7 @@ extern crate hyper;
|
|||
extern crate hyper_native_tls;
|
||||
extern crate lru_cache;
|
||||
extern crate md5;
|
||||
extern crate separator;
|
||||
extern crate tempfile;
|
||||
extern crate uuid;
|
||||
|
||||
|
@ -47,6 +48,8 @@ pub mod locks;
|
|||
pub mod maintainers;
|
||||
pub mod message;
|
||||
pub mod nix;
|
||||
pub mod nixenv;
|
||||
pub mod nixstats;
|
||||
pub mod notifyworker;
|
||||
pub mod outpathdiff;
|
||||
pub mod stats;
|
||||
|
|
|
@ -2,6 +2,7 @@ use ofborg::asynccmd::{AsyncCmd, SpawnedAsyncCmd};
|
|||
use ofborg::partition_result;
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::ffi::OsStr;
|
||||
use std::fmt;
|
||||
use std::fs;
|
||||
use std::io::BufRead;
|
||||
|
@ -161,7 +162,7 @@ impl Nix {
|
|||
file: File,
|
||||
attrs: Vec<String>,
|
||||
) -> Result<fs::File, fs::File> {
|
||||
let mut command = self.safe_command(&Operation::Instantiate, nixpkgs, vec![], &[]);
|
||||
let mut command = self.safe_command::<&OsStr>(&Operation::Instantiate, nixpkgs, &[], &[]);
|
||||
self.set_attrs_command(&mut command, file, attrs);
|
||||
self.run(command, true)
|
||||
}
|
||||
|
@ -182,7 +183,7 @@ impl Nix {
|
|||
attrargs.push(argstr.to_owned());
|
||||
}
|
||||
|
||||
self.safe_command(&Operation::Evaluate, nixpkgs, attrargs, &extra_paths)
|
||||
self.safe_command(&Operation::Evaluate, nixpkgs, &attrargs, &extra_paths)
|
||||
}
|
||||
|
||||
pub fn safely_build_attrs(
|
||||
|
@ -191,7 +192,7 @@ impl Nix {
|
|||
file: File,
|
||||
attrs: Vec<String>,
|
||||
) -> Result<fs::File, fs::File> {
|
||||
let mut command = self.safe_command(&Operation::Build, nixpkgs, vec![], &[]);
|
||||
let mut command = self.safe_command::<&OsStr>(&Operation::Build, nixpkgs, &[], &[]);
|
||||
self.set_attrs_command(&mut command, file, attrs);
|
||||
self.run(command, true)
|
||||
}
|
||||
|
@ -202,7 +203,7 @@ impl Nix {
|
|||
file: File,
|
||||
attrs: Vec<String>,
|
||||
) -> SpawnedAsyncCmd {
|
||||
let mut command = self.safe_command(&Operation::Build, nixpkgs, vec![], &[]);
|
||||
let mut command = self.safe_command::<&OsStr>(&Operation::Build, nixpkgs, &[], &[]);
|
||||
self.set_attrs_command(&mut command, file, attrs);
|
||||
AsyncCmd::new(command).spawn()
|
||||
}
|
||||
|
@ -231,7 +232,7 @@ impl Nix {
|
|||
args: Vec<String>,
|
||||
keep_stdout: bool,
|
||||
) -> Result<fs::File, fs::File> {
|
||||
self.run(self.safe_command(&op, nixpkgs, args, &[]), keep_stdout)
|
||||
self.run(self.safe_command(&op, nixpkgs, &args, &[]), keep_stdout)
|
||||
}
|
||||
|
||||
pub fn run(&self, mut cmd: Command, keep_stdout: bool) -> Result<fs::File, fs::File> {
|
||||
|
@ -261,13 +262,43 @@ impl Nix {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn safe_command(
|
||||
pub fn run_stderr_stdout(&self, mut cmd: Command) -> (bool, fs::File, fs::File) {
|
||||
let stdout_file = tempfile().expect("Fetching a stdout tempfile");
|
||||
let mut stdout_reader = stdout_file
|
||||
.try_clone()
|
||||
.expect("Cloning stdout to the reader");
|
||||
|
||||
let stderr_file = tempfile().expect("Fetching a stderr tempfile");
|
||||
let mut stderr_reader = stderr_file
|
||||
.try_clone()
|
||||
.expect("Cloning stderr to the reader");
|
||||
|
||||
let status = cmd
|
||||
.stdout(Stdio::from(stdout_file))
|
||||
.stderr(Stdio::from(stderr_file))
|
||||
.status()
|
||||
.expect("Running a program ...");
|
||||
|
||||
stdout_reader
|
||||
.seek(SeekFrom::Start(0))
|
||||
.expect("Seeking dout to Start(0)");
|
||||
stderr_reader
|
||||
.seek(SeekFrom::Start(0))
|
||||
.expect("Seeking stderr to Start(0)");
|
||||
|
||||
(status.success(), stdout_reader, stderr_reader)
|
||||
}
|
||||
|
||||
pub fn safe_command<S>(
|
||||
&self,
|
||||
op: &Operation,
|
||||
nixpkgs: &Path,
|
||||
args: Vec<String>,
|
||||
args: &[S],
|
||||
safe_paths: &[&Path],
|
||||
) -> Command {
|
||||
) -> Command
|
||||
where
|
||||
S: AsRef<OsStr>,
|
||||
{
|
||||
let nixpkgspath = format!("nixpkgs={}", nixpkgs.display());
|
||||
let mut nixpath: Vec<String> = safe_paths
|
||||
.iter()
|
||||
|
@ -462,12 +493,7 @@ mod tests {
|
|||
assert_eq!(op.to_string(), "nix-build");
|
||||
|
||||
let ret: Result<fs::File, fs::File> = nix.run(
|
||||
nix.safe_command(
|
||||
&op,
|
||||
build_path().as_path(),
|
||||
vec![String::from("--version")],
|
||||
&[],
|
||||
),
|
||||
nix.safe_command(&op, build_path().as_path(), &["--version"], &[]),
|
||||
true,
|
||||
);
|
||||
|
||||
|
@ -485,12 +511,7 @@ mod tests {
|
|||
assert_eq!(op.to_string(), "nix-instantiate");
|
||||
|
||||
let ret: Result<fs::File, fs::File> = nix.run(
|
||||
nix.safe_command(
|
||||
&op,
|
||||
build_path().as_path(),
|
||||
vec![String::from("--version")],
|
||||
&[],
|
||||
),
|
||||
nix.safe_command(&op, build_path().as_path(), &["--version"], &[]),
|
||||
true,
|
||||
);
|
||||
|
||||
|
@ -504,12 +525,7 @@ mod tests {
|
|||
assert_eq!(op.to_string(), "nix-env -qa --json");
|
||||
|
||||
let ret: Result<fs::File, fs::File> = nix.run(
|
||||
nix.safe_command(
|
||||
&op,
|
||||
build_path().as_path(),
|
||||
vec![String::from("--version")],
|
||||
&[],
|
||||
),
|
||||
nix.safe_command(&op, build_path().as_path(), &["--version"], &[]),
|
||||
true,
|
||||
);
|
||||
|
||||
|
@ -527,12 +543,7 @@ mod tests {
|
|||
assert_eq!(op.to_string(), "nix-env -qaP --no-name --out-path");
|
||||
|
||||
let ret: Result<fs::File, fs::File> = nix.run(
|
||||
nix.safe_command(
|
||||
&op,
|
||||
build_path().as_path(),
|
||||
vec![String::from("--version")],
|
||||
&[],
|
||||
),
|
||||
nix.safe_command(&op, build_path().as_path(), &["--version"], &[]),
|
||||
true,
|
||||
);
|
||||
|
||||
|
@ -551,7 +562,7 @@ mod tests {
|
|||
let nix = nix();
|
||||
|
||||
let ret: Result<fs::File, fs::File> = nix.run(
|
||||
nix.safe_command(&env_noop(), build_path().as_path(), vec![], &[]),
|
||||
nix.safe_command::<&OsStr>(&env_noop(), build_path().as_path(), &[], &[]),
|
||||
true,
|
||||
);
|
||||
|
||||
|
@ -578,7 +589,7 @@ mod tests {
|
|||
);
|
||||
|
||||
let ret: Result<fs::File, fs::File> = nix.run(
|
||||
nix.safe_command(&env_noop(), build_path().as_path(), vec![], &[]),
|
||||
nix.safe_command::<&OsStr>(&env_noop(), build_path().as_path(), &[], &[]),
|
||||
true,
|
||||
);
|
||||
|
||||
|
@ -601,7 +612,7 @@ mod tests {
|
|||
let op = noop(Operation::Build);
|
||||
|
||||
let ret: Result<fs::File, fs::File> = nix.run(
|
||||
nix.safe_command(&op, build_path().as_path(), vec![], &[]),
|
||||
nix.safe_command::<&OsStr>(&op, build_path().as_path(), &[], &[]),
|
||||
true,
|
||||
);
|
||||
|
||||
|
@ -617,7 +628,7 @@ mod tests {
|
|||
let nix = nix();
|
||||
let op = noop(Operation::Build);
|
||||
|
||||
let mut command = nix.safe_command(&op, build_path().as_path(), vec![], &[]);
|
||||
let mut command = nix.safe_command::<&OsStr>(&op, build_path().as_path(), &[], &[]);
|
||||
nix.set_attrs_command(
|
||||
&mut command,
|
||||
File::DefaultNixpkgs,
|
||||
|
@ -634,7 +645,7 @@ mod tests {
|
|||
let nix = nix();
|
||||
let op = noop(Operation::Instantiate);
|
||||
|
||||
let mut command = nix.safe_command(&op, build_path().as_path(), vec![], &[]);
|
||||
let mut command = nix.safe_command::<&OsStr>(&op, build_path().as_path(), &[], &[]);
|
||||
nix.set_attrs_command(
|
||||
&mut command,
|
||||
File::ReleaseNixOS,
|
||||
|
|
117
ofborg/src/nixenv.rs
Normal file
117
ofborg/src/nixenv.rs
Normal file
|
@ -0,0 +1,117 @@
|
|||
/// Evaluates the expression like Hydra would, with regards to
|
||||
/// architecture support and recursed packages.
|
||||
use crate::nixstats::EvaluationStats;
|
||||
use crate::outpathdiff;
|
||||
use ofborg::nix;
|
||||
use serde_json;
|
||||
use std::fs;
|
||||
use std::fs::File;
|
||||
use std::io::BufReader;
|
||||
use std::io::Read;
|
||||
use std::io::Seek;
|
||||
use std::io::SeekFrom;
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub struct HydraNixEnv {
|
||||
path: PathBuf,
|
||||
nix: nix::Nix,
|
||||
check_meta: bool,
|
||||
}
|
||||
|
||||
impl HydraNixEnv {
|
||||
pub fn new(nix: nix::Nix, path: PathBuf, check_meta: bool) -> HydraNixEnv {
|
||||
HydraNixEnv {
|
||||
nix,
|
||||
path,
|
||||
check_meta,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn execute(&self) -> Result<(outpathdiff::PackageOutPaths, EvaluationStats), Error> {
|
||||
self.place_nix()?;
|
||||
let (status, stdout, mut stderr) = self.run_nix_env();
|
||||
self.remove_nix()?;
|
||||
|
||||
if !status {
|
||||
Err(Error::Fd(stderr))
|
||||
} else if let Ok(stats) = serde_json::from_reader(&mut stderr) {
|
||||
let outpaths = outpathdiff::parse_lines(&mut BufReader::new(stdout));
|
||||
Ok((outpaths, stats))
|
||||
} else {
|
||||
stderr
|
||||
.seek(SeekFrom::Start(0))
|
||||
.expect("Seeking to Start(0)");
|
||||
Err(Error::Fd(stderr))
|
||||
}
|
||||
}
|
||||
|
||||
/// Put outpaths.nix in to the project root, which is what
|
||||
/// emulates Hydra's behavior.
|
||||
fn place_nix(&self) -> Result<(), std::io::Error> {
|
||||
let mut file = File::create(self.outpath_nix_path())?;
|
||||
file.write_all(include_bytes!("outpaths.nix"))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn remove_nix(&self) -> Result<(), std::io::Error> {
|
||||
fs::remove_file(self.outpath_nix_path())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn outpath_nix_path(&self) -> PathBuf {
|
||||
self.path.join(".gc-of-borg-outpaths.nix")
|
||||
}
|
||||
|
||||
fn run_nix_env(&self) -> (bool, File, File) {
|
||||
let check_meta = if self.check_meta { "true" } else { "false" };
|
||||
|
||||
let mut cmd = self.nix.safe_command(
|
||||
&nix::Operation::QueryPackagesOutputs,
|
||||
&self.path,
|
||||
&[
|
||||
"-f",
|
||||
".gc-of-borg-outpaths.nix",
|
||||
"--arg",
|
||||
"checkMeta",
|
||||
check_meta,
|
||||
],
|
||||
&[],
|
||||
);
|
||||
cmd.env("NIX_SHOW_STATS", "1");
|
||||
self.nix.run_stderr_stdout(cmd)
|
||||
}
|
||||
}
|
||||
|
||||
pub enum Error {
|
||||
Io(std::io::Error),
|
||||
Fd(File),
|
||||
}
|
||||
|
||||
impl From<std::io::Error> for Error {
|
||||
fn from(e: std::io::Error) -> Error {
|
||||
Error::Io(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl Error {
|
||||
pub fn display(self) -> String {
|
||||
match self {
|
||||
Error::Io(e) => format!("Failed during the setup of executing nix-env: {:?}", e),
|
||||
Error::Fd(mut fd) => {
|
||||
let mut buffer = Vec::new();
|
||||
let read_result = fd.read_to_end(&mut buffer);
|
||||
let bufstr = String::from_utf8_lossy(&buffer);
|
||||
|
||||
match read_result {
|
||||
Ok(_) => format!("nix-env failed:\n{}", bufstr),
|
||||
Err(e) => format!(
|
||||
"nix-env failed and loading the error result caused a new error {:?}\n\n{}",
|
||||
e, bufstr
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
549
ofborg/src/nixstats.rs
Normal file
549
ofborg/src/nixstats.rs
Normal file
|
@ -0,0 +1,549 @@
|
|||
use separator::Separatable;
|
||||
/// Statistics emitted by Nix when NIX_SHOW_STATS=1
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct EvaluationStats {
|
||||
/// Number of CPU seconds spent during evaluation.
|
||||
#[serde(rename = "cpuTime")]
|
||||
pub cpu_time: f32,
|
||||
|
||||
pub envs: Environments,
|
||||
pub list: Lists,
|
||||
pub values: Values,
|
||||
pub symbols: Symbols,
|
||||
pub sets: Sets,
|
||||
pub sizes: Sizes,
|
||||
pub gc: GarbageCollector,
|
||||
|
||||
#[serde(rename = "nrOpUpdates")]
|
||||
pub nr_op_updates: u64,
|
||||
#[serde(rename = "nrOpUpdateValuesCopied")]
|
||||
pub nr_op_update_values_copied: u64,
|
||||
#[serde(rename = "nrThunks")]
|
||||
pub nr_thunks: u64,
|
||||
#[serde(rename = "nrAvoided")]
|
||||
pub nr_avoided: u64,
|
||||
#[serde(rename = "nrLookups")]
|
||||
pub nr_lookups: u64,
|
||||
#[serde(rename = "nrPrimOpCalls")]
|
||||
pub nr_prim_op_calls: u64,
|
||||
#[serde(rename = "nrFunctionCalls")]
|
||||
pub nr_function_calls: u64,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct Environments {
|
||||
pub number: u64,
|
||||
pub elements: u64,
|
||||
pub bytes: u64,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct Lists {
|
||||
pub elements: u64,
|
||||
|
||||
/// Number of bytes consumed
|
||||
pub bytes: u64,
|
||||
pub concats: u64,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct Values {
|
||||
pub number: u64,
|
||||
|
||||
/// Number of bytes consumed
|
||||
pub bytes: u64,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct Symbols {
|
||||
pub number: u64,
|
||||
|
||||
/// Number of bytes consumed
|
||||
pub bytes: u64,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct Sets {
|
||||
pub number: u64,
|
||||
pub elements: u64,
|
||||
|
||||
/// Number of bytes consumed
|
||||
pub bytes: u64,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct Sizes {
|
||||
#[serde(rename = "Env")]
|
||||
pub env: u64,
|
||||
|
||||
#[serde(rename = "Value")]
|
||||
pub value: u64,
|
||||
|
||||
#[serde(rename = "Bindings")]
|
||||
pub bindings: u64,
|
||||
|
||||
#[serde(rename = "Attr")]
|
||||
pub attr: u64,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct GarbageCollector {
|
||||
#[serde(rename = "heapSize")]
|
||||
pub heap_size: u64,
|
||||
#[serde(rename = "totalBytes")]
|
||||
pub total_bytes: u64,
|
||||
}
|
||||
|
||||
pub struct EvaluationStatsDiff<'a> {
|
||||
left: &'a EvaluationStats,
|
||||
right: &'a EvaluationStats,
|
||||
}
|
||||
|
||||
impl<'a> EvaluationStatsDiff<'a> {
|
||||
pub fn compare(
|
||||
left: &'a EvaluationStats,
|
||||
right: &'a EvaluationStats,
|
||||
) -> EvaluationStatsDiff<'a> {
|
||||
EvaluationStatsDiff { left, right }
|
||||
}
|
||||
|
||||
pub fn markdown(&self) -> String {
|
||||
struct Row {
|
||||
before: String,
|
||||
after: String,
|
||||
diff: String,
|
||||
diff_pct: String,
|
||||
}
|
||||
impl Row {
|
||||
fn from_u64(left: u64, right: u64) -> Row {
|
||||
let diff: u64;
|
||||
let direction: &str;
|
||||
let diff_pct: String;
|
||||
|
||||
if left > right {
|
||||
diff = left - right;
|
||||
direction = "🡖 ";
|
||||
} else if left < right {
|
||||
diff = right - left;
|
||||
direction = "🡕 ";
|
||||
} else {
|
||||
diff = 0;
|
||||
direction = "";
|
||||
}
|
||||
|
||||
if diff > 0 {
|
||||
diff_pct = format!(
|
||||
"{:.2}%",
|
||||
((right as f64) - (left as f64)) / (left as f64) * 100.0
|
||||
);
|
||||
} else {
|
||||
diff_pct = String::from("");
|
||||
}
|
||||
|
||||
Row {
|
||||
before: left.separated_string(),
|
||||
after: right.separated_string(),
|
||||
diff: format!("{}{}", direction, diff.separated_string()),
|
||||
diff_pct,
|
||||
}
|
||||
}
|
||||
|
||||
fn from_f32(left: f32, right: f32) -> Row {
|
||||
let diff: f32;
|
||||
let direction: &str;
|
||||
let diff_pct: String;
|
||||
|
||||
if left > right {
|
||||
diff = left - right;
|
||||
direction = "🡖 ";
|
||||
} else if left < right {
|
||||
diff = right - left;
|
||||
direction = "🡕 ";
|
||||
} else {
|
||||
diff = 0.0;
|
||||
direction = "";
|
||||
}
|
||||
|
||||
if diff > 0.0 {
|
||||
diff_pct = format!(
|
||||
"{:.2}%",
|
||||
(f64::from(right) - f64::from(left)) / f64::from(left) * 100.0
|
||||
);
|
||||
} else {
|
||||
diff_pct = String::from("");
|
||||
}
|
||||
|
||||
Row {
|
||||
before: format!("{:.2}", left),
|
||||
after: format!("{:.2}", right),
|
||||
diff: format!("{}{:.2}", direction, diff),
|
||||
diff_pct,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut data: HashMap<&str, Row> = HashMap::new();
|
||||
data.insert(
|
||||
"cpuTime",
|
||||
Row::from_f32(self.left.cpu_time, self.right.cpu_time),
|
||||
);
|
||||
|
||||
data.insert(
|
||||
"envs-number",
|
||||
Row::from_u64(self.left.envs.number, self.right.envs.number),
|
||||
);
|
||||
data.insert(
|
||||
"envs-elements",
|
||||
Row::from_u64(self.left.envs.elements, self.right.envs.elements),
|
||||
);
|
||||
data.insert(
|
||||
"envs-bytes",
|
||||
Row::from_u64(self.left.envs.bytes, self.right.envs.bytes),
|
||||
);
|
||||
|
||||
data.insert(
|
||||
"list-elements",
|
||||
Row::from_u64(self.left.list.elements, self.right.list.elements),
|
||||
);
|
||||
data.insert(
|
||||
"list-bytes",
|
||||
Row::from_u64(self.left.list.bytes, self.right.list.bytes),
|
||||
);
|
||||
data.insert(
|
||||
"list-concats",
|
||||
Row::from_u64(self.left.list.concats, self.right.list.concats),
|
||||
);
|
||||
|
||||
data.insert(
|
||||
"values-number",
|
||||
Row::from_u64(self.left.values.number, self.right.values.number),
|
||||
);
|
||||
data.insert(
|
||||
"values-bytes",
|
||||
Row::from_u64(self.left.values.bytes, self.right.values.bytes),
|
||||
);
|
||||
|
||||
data.insert(
|
||||
"symbols-number",
|
||||
Row::from_u64(self.left.symbols.number, self.right.symbols.number),
|
||||
);
|
||||
data.insert(
|
||||
"symbols-bytes",
|
||||
Row::from_u64(self.left.symbols.bytes, self.right.symbols.bytes),
|
||||
);
|
||||
|
||||
data.insert(
|
||||
"sets-number",
|
||||
Row::from_u64(self.left.sets.number, self.right.sets.number),
|
||||
);
|
||||
data.insert(
|
||||
"sets-bytes",
|
||||
Row::from_u64(self.left.sets.bytes, self.right.sets.bytes),
|
||||
);
|
||||
data.insert(
|
||||
"sets-elements",
|
||||
Row::from_u64(self.left.sets.elements, self.right.sets.elements),
|
||||
);
|
||||
|
||||
data.insert(
|
||||
"sizes-Env",
|
||||
Row::from_u64(self.left.sizes.env, self.right.sizes.env),
|
||||
);
|
||||
data.insert(
|
||||
"sizes-Value",
|
||||
Row::from_u64(self.left.sizes.value, self.right.sizes.value),
|
||||
);
|
||||
data.insert(
|
||||
"sizes-Bindings",
|
||||
Row::from_u64(self.left.sizes.bindings, self.right.sizes.bindings),
|
||||
);
|
||||
data.insert(
|
||||
"sizes-Attr",
|
||||
Row::from_u64(self.left.sizes.attr, self.right.sizes.attr),
|
||||
);
|
||||
|
||||
data.insert(
|
||||
"nrOpUpdates",
|
||||
Row::from_u64(self.left.nr_op_updates, self.right.nr_op_updates),
|
||||
);
|
||||
data.insert(
|
||||
"nrOpUpdateValuesCopied",
|
||||
Row::from_u64(
|
||||
self.left.nr_op_update_values_copied,
|
||||
self.right.nr_op_update_values_copied,
|
||||
),
|
||||
);
|
||||
data.insert(
|
||||
"nrThunks",
|
||||
Row::from_u64(self.left.nr_thunks, self.right.nr_thunks),
|
||||
);
|
||||
data.insert(
|
||||
"nrAvoided",
|
||||
Row::from_u64(self.left.nr_avoided, self.right.nr_avoided),
|
||||
);
|
||||
data.insert(
|
||||
"nrLookups",
|
||||
Row::from_u64(self.left.nr_lookups, self.right.nr_lookups),
|
||||
);
|
||||
data.insert(
|
||||
"nrPrimOpCalls",
|
||||
Row::from_u64(self.left.nr_prim_op_calls, self.right.nr_prim_op_calls),
|
||||
);
|
||||
data.insert(
|
||||
"nrFunctionCalls",
|
||||
Row::from_u64(self.left.nr_function_calls, self.right.nr_function_calls),
|
||||
);
|
||||
data.insert(
|
||||
"gc-heapSize",
|
||||
Row::from_u64(self.left.gc.heap_size, self.right.gc.heap_size),
|
||||
);
|
||||
data.insert(
|
||||
"gc-totalBytes",
|
||||
Row::from_u64(self.left.gc.total_bytes, self.right.gc.total_bytes),
|
||||
);
|
||||
|
||||
let (keylen, beforelen, afterlen, difflen, diff_pctlen): (
|
||||
usize,
|
||||
usize,
|
||||
usize,
|
||||
usize,
|
||||
usize,
|
||||
) = data.iter().fold(
|
||||
(0, 0, 0, 0, 0),
|
||||
|(keylen, before, after, diff, diff_pct), (key, row)| {
|
||||
(
|
||||
std::cmp::max(keylen, key.chars().count()),
|
||||
std::cmp::max(before, row.before.chars().count()),
|
||||
std::cmp::max(after, row.after.chars().count()),
|
||||
std::cmp::max(diff, row.diff.chars().count()),
|
||||
std::cmp::max(diff_pct, row.diff_pct.chars().count()),
|
||||
)
|
||||
},
|
||||
);
|
||||
|
||||
let mut keys = data.keys().cloned().collect::<Vec<&str>>();
|
||||
keys.sort();
|
||||
|
||||
let rows = keys
|
||||
.into_iter()
|
||||
.map(|key| {
|
||||
let row = &data[&key];
|
||||
format!("| {key:<keywidth$} | {before:>beforewidth$} | {after:>afterwidth$} | {diff:<diffwidth$} | {diff_pct:>diff_pctwidth$} |",
|
||||
key=format!("**{}**", key), keywidth=(keylen + 4),
|
||||
before=row.before, beforewidth=beforelen,
|
||||
after=row.after, afterwidth=afterlen,
|
||||
diff=row.diff, diffwidth=difflen,
|
||||
diff_pct=row.diff_pct, diff_pctwidth=diff_pctlen)
|
||||
})
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
let header = format!(
|
||||
"
|
||||
|{key:^keywidth$}|{before:^beforewidth$}|{after:^afterwidth$}|{diff:^diffwidth$}|{diff_pct:^diff_pctwidth$}|
|
||||
|{keydash:-<keywidth$}|{beforedash:->beforewidth$}|{afterdash:->afterwidth$}|{diffdash:-<diffwidth$}|{diff_pctdash:->diff_pctwidth$}|
|
||||
",
|
||||
key="stat", keywidth=(keylen + 6),
|
||||
before="before", beforewidth=(beforelen + 2),
|
||||
after="after", afterwidth=(afterlen + 2),
|
||||
diff="Δ", diffwidth=(difflen + 2),
|
||||
diff_pct="Δ%", diff_pctwidth=(diff_pctlen + 2),
|
||||
keydash=":", beforedash=":", afterdash=":", diffdash=":", diff_pctdash=":"
|
||||
);
|
||||
|
||||
format!("{}\n{}", header.trim(), rows.join("\n"))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::EvaluationStats;
|
||||
use super::EvaluationStatsDiff;
|
||||
use serde_json;
|
||||
|
||||
const EXAMPLE: &'static str = r#"
|
||||
{
|
||||
"cpuTime": 135.2,
|
||||
"envs": {
|
||||
"number": 130714125,
|
||||
"elements": 183953876,
|
||||
"bytes": 3563057008
|
||||
},
|
||||
"list": {
|
||||
"elements": 207421516,
|
||||
"bytes": 1659372128,
|
||||
"concats": 7194150
|
||||
},
|
||||
"values": {
|
||||
"number": 260454370,
|
||||
"bytes": 6250904880
|
||||
},
|
||||
"symbols": {
|
||||
"number": 372918,
|
||||
"bytes": 16324262
|
||||
},
|
||||
"sets": {
|
||||
"number": 27310541,
|
||||
"bytes": 7134676648,
|
||||
"elements": 288174680
|
||||
},
|
||||
"sizes": {
|
||||
"Env": 16,
|
||||
"Value": 24,
|
||||
"Bindings": 8,
|
||||
"Attr": 24
|
||||
},
|
||||
"nrOpUpdates": 11883339,
|
||||
"nrOpUpdateValuesCopied": 208834564,
|
||||
"nrThunks": 173325665,
|
||||
"nrAvoided": 177840681,
|
||||
"nrLookups": 75292052,
|
||||
"nrPrimOpCalls": 85571252,
|
||||
"nrFunctionCalls": 115193164,
|
||||
"gc": {
|
||||
"heapSize": 12104687616,
|
||||
"totalBytes": 24191819392
|
||||
}
|
||||
}
|
||||
"#;
|
||||
|
||||
const EXAMPLE2: &'static str = r#"
|
||||
{
|
||||
"cpuTime": 132.897,
|
||||
"envs": {
|
||||
"number": 124766593,
|
||||
"elements": 177627124,
|
||||
"bytes": 3417282480
|
||||
},
|
||||
"list": {
|
||||
"elements": 204449868,
|
||||
"bytes": 1635598944,
|
||||
"concats": 6988658
|
||||
},
|
||||
"values": {
|
||||
"number": 244542804,
|
||||
"bytes": 5869027296
|
||||
},
|
||||
"symbols": {
|
||||
"number": 372917,
|
||||
"bytes": 16324250
|
||||
},
|
||||
"sets": {
|
||||
"number": 27307373,
|
||||
"bytes": 7133945368,
|
||||
"elements": 288145266
|
||||
},
|
||||
"sizes": {
|
||||
"Env": 16,
|
||||
"Value": 24,
|
||||
"Bindings": 8,
|
||||
"Attr": 24
|
||||
},
|
||||
"nrOpUpdates": 11881928,
|
||||
"nrOpUpdateValuesCopied": 208814478,
|
||||
"nrThunks": 167655588,
|
||||
"nrAvoided": 170493166,
|
||||
"nrLookups": 75275349,
|
||||
"nrPrimOpCalls": 80373629,
|
||||
"nrFunctionCalls": 109822957,
|
||||
"gc": {
|
||||
"heapSize": 11433721856,
|
||||
"totalBytes": 23468008832
|
||||
}
|
||||
}
|
||||
"#;
|
||||
|
||||
#[test]
|
||||
fn verify_load() {
|
||||
let load: EvaluationStats = serde_json::from_str(EXAMPLE).unwrap();
|
||||
|
||||
assert_eq!(load.cpu_time, 135.2);
|
||||
assert_eq!(load.envs.number, 130714125);
|
||||
assert_eq!(load.envs.elements, 183953876);
|
||||
assert_eq!(load.envs.bytes, 3563057008);
|
||||
|
||||
assert_eq!(load.list.elements, 207421516);
|
||||
assert_eq!(load.list.bytes, 1659372128);
|
||||
assert_eq!(load.list.concats, 7194150);
|
||||
|
||||
assert_eq!(load.values.number, 260454370);
|
||||
assert_eq!(load.values.bytes, 6250904880);
|
||||
|
||||
assert_eq!(load.symbols.number, 372918);
|
||||
assert_eq!(load.symbols.bytes, 16324262);
|
||||
|
||||
assert_eq!(load.sets.number, 27310541);
|
||||
assert_eq!(load.sets.bytes, 7134676648);
|
||||
assert_eq!(load.sets.elements, 288174680);
|
||||
|
||||
assert_eq!(load.sizes.env, 16);
|
||||
assert_eq!(load.sizes.value, 24);
|
||||
assert_eq!(load.sizes.bindings, 8);
|
||||
assert_eq!(load.sizes.attr, 24);
|
||||
|
||||
assert_eq!(load.nr_op_updates, 11883339);
|
||||
assert_eq!(load.nr_op_update_values_copied, 208834564);
|
||||
assert_eq!(load.nr_thunks, 173325665);
|
||||
assert_eq!(load.nr_avoided, 177840681);
|
||||
assert_eq!(load.nr_lookups, 75292052);
|
||||
assert_eq!(load.nr_prim_op_calls, 85571252);
|
||||
assert_eq!(load.nr_function_calls, 115193164);
|
||||
|
||||
assert_eq!(load.gc.heap_size, 12104687616);
|
||||
assert_eq!(load.gc.total_bytes, 24191819392);
|
||||
}
|
||||
|
||||
fn diff_text(left: &str, right: &str) {
|
||||
println!("left:\n{}", left);
|
||||
println!("right:\n{}", right);
|
||||
|
||||
let lines = left.split("\n").zip(right.split("\n"));
|
||||
|
||||
for (idx, (linea, lineb)) in lines.enumerate() {
|
||||
assert_eq!(linea, lineb, "Line {}", idx);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn markdown() {
|
||||
let left: EvaluationStats = serde_json::from_str(EXAMPLE).unwrap();
|
||||
let right: EvaluationStats = serde_json::from_str(EXAMPLE2).unwrap();
|
||||
|
||||
diff_text(
|
||||
&EvaluationStatsDiff::compare(&left, &right).markdown(),
|
||||
r#"
|
||||
| stat | before | after | Δ | Δ% |
|
||||
|:---------------------------|---------------:|---------------:|:--------------|-------:|
|
||||
| **cpuTime** | 135.20 | 132.90 | 🡖 2.30 | -1.70% |
|
||||
| **envs-bytes** | 3,563,057,008 | 3,417,282,480 | 🡖 145,774,528 | -4.09% |
|
||||
| **envs-elements** | 183,953,876 | 177,627,124 | 🡖 6,326,752 | -3.44% |
|
||||
| **envs-number** | 130,714,125 | 124,766,593 | 🡖 5,947,532 | -4.55% |
|
||||
| **gc-heapSize** | 12,104,687,616 | 11,433,721,856 | 🡖 670,965,760 | -5.54% |
|
||||
| **gc-totalBytes** | 24,191,819,392 | 23,468,008,832 | 🡖 723,810,560 | -2.99% |
|
||||
| **list-bytes** | 1,659,372,128 | 1,635,598,944 | 🡖 23,773,184 | -1.43% |
|
||||
| **list-concats** | 7,194,150 | 6,988,658 | 🡖 205,492 | -2.86% |
|
||||
| **list-elements** | 207,421,516 | 204,449,868 | 🡖 2,971,648 | -1.43% |
|
||||
| **nrAvoided** | 177,840,681 | 170,493,166 | 🡖 7,347,515 | -4.13% |
|
||||
| **nrFunctionCalls** | 115,193,164 | 109,822,957 | 🡖 5,370,207 | -4.66% |
|
||||
| **nrLookups** | 75,292,052 | 75,275,349 | 🡖 16,703 | -0.02% |
|
||||
| **nrOpUpdateValuesCopied** | 208,834,564 | 208,814,478 | 🡖 20,086 | -0.01% |
|
||||
| **nrOpUpdates** | 11,883,339 | 11,881,928 | 🡖 1,411 | -0.01% |
|
||||
| **nrPrimOpCalls** | 85,571,252 | 80,373,629 | 🡖 5,197,623 | -6.07% |
|
||||
| **nrThunks** | 173,325,665 | 167,655,588 | 🡖 5,670,077 | -3.27% |
|
||||
| **sets-bytes** | 7,134,676,648 | 7,133,945,368 | 🡖 731,280 | -0.01% |
|
||||
| **sets-elements** | 288,174,680 | 288,145,266 | 🡖 29,414 | -0.01% |
|
||||
| **sets-number** | 27,310,541 | 27,307,373 | 🡖 3,168 | -0.01% |
|
||||
| **sizes-Attr** | 24 | 24 | 0 | |
|
||||
| **sizes-Bindings** | 8 | 8 | 0 | |
|
||||
| **sizes-Env** | 16 | 16 | 0 | |
|
||||
| **sizes-Value** | 24 | 24 | 0 | |
|
||||
| **symbols-bytes** | 16,324,262 | 16,324,250 | 🡖 12 | -0.00% |
|
||||
| **symbols-number** | 372,918 | 372,917 | 🡖 1 | -0.00% |
|
||||
| **values-bytes** | 6,250,904,880 | 5,869,027,296 | 🡖 381,877,584 | -6.11% |
|
||||
| **values-number** | 260,454,370 | 244,542,804 | 🡖 15,911,566 | -6.11% |
|
||||
"#
|
||||
.trim_start(),
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,66 +1,57 @@
|
|||
extern crate amqp;
|
||||
extern crate env_logger;
|
||||
|
||||
use crate::nixenv::Error as NixEnvError;
|
||||
use crate::nixenv::HydraNixEnv;
|
||||
use crate::nixstats::{EvaluationStats, EvaluationStatsDiff};
|
||||
use ofborg::nix;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::fs;
|
||||
use std::fs::File;
|
||||
use std::io::BufRead;
|
||||
use std::io::BufReader;
|
||||
use std::io::Write;
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub struct OutPathDiff {
|
||||
calculator: OutPaths,
|
||||
pub original: Option<PackageOutPaths>,
|
||||
pub current: Option<PackageOutPaths>,
|
||||
calculator: HydraNixEnv,
|
||||
pub original: Option<(PackageOutPaths, EvaluationStats)>,
|
||||
pub current: Option<(PackageOutPaths, EvaluationStats)>,
|
||||
}
|
||||
|
||||
impl OutPathDiff {
|
||||
pub fn new(nix: nix::Nix, path: PathBuf) -> OutPathDiff {
|
||||
OutPathDiff {
|
||||
calculator: OutPaths::new(nix, path, false),
|
||||
calculator: HydraNixEnv::new(nix, path, false),
|
||||
original: None,
|
||||
current: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_before(&mut self) -> Result<bool, File> {
|
||||
let x = self.run();
|
||||
match x {
|
||||
Ok(f) => {
|
||||
self.original = Some(f);
|
||||
Ok(true)
|
||||
}
|
||||
Err(e) => {
|
||||
info!("Failed to find Before list");
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
pub fn find_before(&mut self) -> Result<(), NixEnvError> {
|
||||
self.original = Some(self.run()?);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn find_after(&mut self) -> Result<bool, File> {
|
||||
if self.original == None {
|
||||
pub fn find_after(&mut self) -> Result<(), NixEnvError> {
|
||||
if self.original.is_none() {
|
||||
debug!("Before is None, not bothering with After");
|
||||
return Ok(false);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let x = self.run();
|
||||
match x {
|
||||
Ok(f) => {
|
||||
self.current = Some(f);
|
||||
Ok(true)
|
||||
}
|
||||
Err(e) => {
|
||||
info!("Failed to find After list");
|
||||
Err(e)
|
||||
self.current = Some(self.run()?);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn performance_diff(&self) -> Option<EvaluationStatsDiff> {
|
||||
if let Some((_, ref cur)) = self.current {
|
||||
if let Some((_, ref orig)) = self.original {
|
||||
Some(EvaluationStatsDiff::compare(orig, cur))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn package_diff(&self) -> Option<(Vec<PackageArch>, Vec<PackageArch>)> {
|
||||
if let Some(ref cur) = self.current {
|
||||
if let Some(ref orig) = self.original {
|
||||
if let Some((ref cur, _)) = self.current {
|
||||
if let Some((ref orig, _)) = self.original {
|
||||
let orig_set: HashSet<&PackageArch> = orig.keys().collect();
|
||||
let cur_set: HashSet<&PackageArch> = cur.keys().collect();
|
||||
|
||||
|
@ -84,8 +75,8 @@ impl OutPathDiff {
|
|||
pub fn calculate_rebuild(&self) -> Option<Vec<PackageArch>> {
|
||||
let mut rebuild: Vec<PackageArch> = vec![];
|
||||
|
||||
if let Some(ref cur) = self.current {
|
||||
if let Some(ref orig) = self.original {
|
||||
if let Some((ref cur, _)) = self.current {
|
||||
if let Some((ref orig, _)) = self.original {
|
||||
for key in cur.keys() {
|
||||
trace!("Checking out {:?}", key);
|
||||
if cur.get(key) != orig.get(key) {
|
||||
|
@ -103,12 +94,12 @@ impl OutPathDiff {
|
|||
None
|
||||
}
|
||||
|
||||
fn run(&mut self) -> Result<PackageOutPaths, File> {
|
||||
self.calculator.find()
|
||||
fn run(&mut self) -> Result<(PackageOutPaths, EvaluationStats), NixEnvError> {
|
||||
self.calculator.execute()
|
||||
}
|
||||
}
|
||||
|
||||
type PackageOutPaths = HashMap<PackageArch, OutPath>;
|
||||
pub type PackageOutPaths = HashMap<PackageArch, OutPath>;
|
||||
|
||||
#[derive(Debug, PartialEq, Hash, Eq, Clone)]
|
||||
pub struct PackageArch {
|
||||
|
@ -119,76 +110,7 @@ type Package = String;
|
|||
type Architecture = String;
|
||||
type OutPath = String;
|
||||
|
||||
pub struct OutPaths {
|
||||
path: PathBuf,
|
||||
nix: nix::Nix,
|
||||
check_meta: bool,
|
||||
}
|
||||
|
||||
impl OutPaths {
|
||||
pub fn new(nix: nix::Nix, path: PathBuf, check_meta: bool) -> OutPaths {
|
||||
OutPaths {
|
||||
nix,
|
||||
path,
|
||||
check_meta,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find(&self) -> Result<PackageOutPaths, File> {
|
||||
self.run()
|
||||
}
|
||||
|
||||
fn run(&self) -> Result<PackageOutPaths, File> {
|
||||
self.place_nix();
|
||||
let ret = self.execute();
|
||||
self.remove_nix();
|
||||
|
||||
match ret {
|
||||
Ok(file) => Ok(parse_lines(&mut BufReader::new(file))),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
fn place_nix(&self) {
|
||||
let mut file = File::create(self.nix_path()).expect("Failed to create nix out path check");
|
||||
file.write_all(include_bytes!("outpaths.nix"))
|
||||
.expect("Failed to place outpaths.nix");
|
||||
}
|
||||
|
||||
fn remove_nix(&self) {
|
||||
fs::remove_file(self.nix_path()).expect("Failed to delete outpaths.nix");
|
||||
}
|
||||
|
||||
fn nix_path(&self) -> PathBuf {
|
||||
let mut dest = self.path.clone();
|
||||
dest.push(".gc-of-borg-outpaths.nix");
|
||||
|
||||
dest
|
||||
}
|
||||
|
||||
fn execute(&self) -> Result<File, File> {
|
||||
let check_meta: String = if self.check_meta {
|
||||
String::from("true")
|
||||
} else {
|
||||
String::from("false")
|
||||
};
|
||||
|
||||
self.nix.safely(
|
||||
&nix::Operation::QueryPackagesOutputs,
|
||||
&self.path,
|
||||
vec![
|
||||
String::from("-f"),
|
||||
String::from(".gc-of-borg-outpaths.nix"),
|
||||
String::from("--arg"),
|
||||
String::from("checkMeta"),
|
||||
check_meta,
|
||||
],
|
||||
true,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_lines(data: &mut BufRead) -> PackageOutPaths {
|
||||
pub fn parse_lines(data: &mut BufRead) -> PackageOutPaths {
|
||||
data.lines()
|
||||
.filter_map(|line| match line {
|
||||
Ok(line) => Some(line),
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
use ofborg::checkout::CachedProjectCo;
|
||||
use ofborg::commitstatus::CommitStatus;
|
||||
use ofborg::evalchecker::EvalChecker;
|
||||
use ofborg::message::buildjob::BuildJob;
|
||||
use std::path::Path;
|
||||
use tasks::eval::{EvaluationStrategy, StepResult};
|
||||
use tasks::eval::{EvaluationComplete, EvaluationStrategy, StepResult};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct GenericStrategy {}
|
||||
|
@ -40,7 +39,7 @@ impl EvaluationStrategy for GenericStrategy {
|
|||
&mut self,
|
||||
_co: &Path,
|
||||
_status: &mut CommitStatus,
|
||||
) -> StepResult<Vec<BuildJob>> {
|
||||
Ok(vec![])
|
||||
) -> StepResult<EvaluationComplete> {
|
||||
Ok(Default::default())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ mod nixpkgs;
|
|||
pub use self::nixpkgs::NixpkgsStrategy;
|
||||
mod generic;
|
||||
pub use self::generic::GenericStrategy;
|
||||
use hubcaps::checks::CheckRunOptions;
|
||||
use ofborg::checkout::CachedProjectCo;
|
||||
use ofborg::commitstatus::CommitStatus;
|
||||
use ofborg::evalchecker::EvalChecker;
|
||||
|
@ -12,6 +13,7 @@ use std::path::Path;
|
|||
|
||||
pub trait EvaluationStrategy {
|
||||
fn pre_clone(&mut self) -> StepResult<()>;
|
||||
|
||||
fn on_target_branch(&mut self, co: &Path, status: &mut CommitStatus) -> StepResult<()>;
|
||||
fn after_fetch(&mut self, co: &CachedProjectCo) -> StepResult<()>;
|
||||
fn merge_conflict(&mut self);
|
||||
|
@ -21,11 +23,17 @@ pub trait EvaluationStrategy {
|
|||
&mut self,
|
||||
co: &Path,
|
||||
status: &mut CommitStatus,
|
||||
) -> StepResult<Vec<BuildJob>>;
|
||||
) -> StepResult<EvaluationComplete>;
|
||||
}
|
||||
|
||||
pub type StepResult<T> = Result<T, Error>;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct EvaluationComplete {
|
||||
pub builds: Vec<BuildJob>,
|
||||
pub checks: Vec<CheckRunOptions>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
Fail(String),
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
use crate::maintainers;
|
||||
use crate::maintainers::ImpactedMaintainers;
|
||||
use crate::nixenv::HydraNixEnv;
|
||||
use chrono::Utc;
|
||||
use hubcaps::checks::{CheckRunOptions, CheckRunState, Conclusion, Output};
|
||||
use hubcaps::gists::Gists;
|
||||
use hubcaps::issues::{Issue, IssueRef};
|
||||
use hubcaps::repositories::Repository;
|
||||
|
@ -7,15 +10,16 @@ use ofborg::checkout::CachedProjectCo;
|
|||
use ofborg::commentparser::Subset;
|
||||
use ofborg::commitstatus::CommitStatus;
|
||||
use ofborg::evalchecker::EvalChecker;
|
||||
use ofborg::files::file_to_str;
|
||||
use ofborg::message::buildjob::BuildJob;
|
||||
use ofborg::message::evaluationjob::EvaluationJob;
|
||||
use ofborg::nix;
|
||||
use ofborg::nix::Nix;
|
||||
use ofborg::outpathdiff::{OutPathDiff, OutPaths, PackageArch};
|
||||
use ofborg::outpathdiff::{OutPathDiff, PackageArch};
|
||||
use ofborg::tagger::{MaintainerPRTagger, PathsTagger, RebuildTagger};
|
||||
use ofborg::tagger::{PkgsAddedRemovedTagger, StdenvTagger};
|
||||
use ofborg::tasks::eval::{stdenvs::Stdenvs, Error, EvaluationStrategy, StepResult};
|
||||
use ofborg::tasks::eval::{
|
||||
stdenvs::Stdenvs, Error, EvaluationComplete, EvaluationStrategy, StepResult,
|
||||
};
|
||||
use ofborg::tasks::evaluate::update_labels;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
@ -125,7 +129,7 @@ impl<'a> NixpkgsStrategy<'a> {
|
|||
fn check_outpaths_before(&mut self, dir: &Path) -> StepResult<()> {
|
||||
let mut rebuildsniff = OutPathDiff::new(self.nix.clone(), dir.to_path_buf());
|
||||
|
||||
if let Err(mut output) = rebuildsniff.find_before() {
|
||||
if let Err(err) = rebuildsniff.find_before() {
|
||||
/*
|
||||
self.events
|
||||
.notify(Event::TargetBranchFailsEvaluation(target_branch.clone()));
|
||||
|
@ -134,7 +138,7 @@ impl<'a> NixpkgsStrategy<'a> {
|
|||
Err(Error::FailWithGist(
|
||||
String::from("The branch this PR will merge in to does not evaluate, and so this PR cannot be checked."),
|
||||
String::from("Output path comparison"),
|
||||
file_to_str(&mut output),
|
||||
err.display(),
|
||||
))
|
||||
} else {
|
||||
self.outpath_diff = Some(rebuildsniff);
|
||||
|
@ -144,11 +148,11 @@ impl<'a> NixpkgsStrategy<'a> {
|
|||
|
||||
fn check_outpaths_after(&mut self) -> StepResult<()> {
|
||||
if let Some(ref mut rebuildsniff) = self.outpath_diff {
|
||||
if let Err(mut output) = rebuildsniff.find_after() {
|
||||
if let Err(mut err) = rebuildsniff.find_after() {
|
||||
Err(Error::FailWithGist(
|
||||
String::from("This PR breaks listing of package outputs after merging."),
|
||||
String::from("Output path comparison"),
|
||||
file_to_str(&mut output),
|
||||
err.display(),
|
||||
))
|
||||
} else {
|
||||
Ok(())
|
||||
|
@ -160,6 +164,34 @@ impl<'a> NixpkgsStrategy<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn performance_stats(&self) -> Vec<CheckRunOptions> {
|
||||
if let Some(ref rebuildsniff) = self.outpath_diff {
|
||||
if let Some(report) = rebuildsniff.performance_diff() {
|
||||
return vec![CheckRunOptions {
|
||||
name: "Evaluation Performance Report".to_owned(),
|
||||
actions: None,
|
||||
completed_at: Some(
|
||||
Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
|
||||
),
|
||||
started_at: None,
|
||||
conclusion: Some(Conclusion::Success),
|
||||
status: Some(CheckRunState::Completed),
|
||||
details_url: None,
|
||||
external_id: None,
|
||||
head_sha: self.job.pr.head_sha.clone(),
|
||||
output: Some(Output {
|
||||
title: "Evaluator Performance Report".to_string(),
|
||||
summary: "".to_string(),
|
||||
text: Some(report.markdown()),
|
||||
annotations: None,
|
||||
images: None,
|
||||
}),
|
||||
}];
|
||||
}
|
||||
}
|
||||
vec![]
|
||||
}
|
||||
|
||||
fn update_new_package_labels(&self) {
|
||||
if let Some(ref rebuildsniff) = self.outpath_diff {
|
||||
if let Some((removed, added)) = rebuildsniff.package_diff() {
|
||||
|
@ -266,10 +298,11 @@ impl<'a> NixpkgsStrategy<'a> {
|
|||
);
|
||||
status.set(hubcaps::statuses::State::Pending);
|
||||
|
||||
let checker = OutPaths::new(self.nix.clone(), dir.to_path_buf(), true);
|
||||
match checker.find() {
|
||||
let nixenv = HydraNixEnv::new(self.nix.clone(), dir.to_path_buf(), true);
|
||||
match nixenv.execute() {
|
||||
Ok(pkgs) => {
|
||||
let mut try_build: Vec<String> = pkgs
|
||||
.0
|
||||
.keys()
|
||||
.map(|pkgarch| pkgarch.package.clone())
|
||||
.filter(|pkg| possibly_touched_packages.contains(&pkg))
|
||||
|
@ -299,12 +332,7 @@ impl<'a> NixpkgsStrategy<'a> {
|
|||
}
|
||||
}
|
||||
Err(mut out) => {
|
||||
status.set_url(make_gist(
|
||||
&self.gists,
|
||||
"Meta Check",
|
||||
None,
|
||||
file_to_str(&mut out),
|
||||
));
|
||||
status.set_url(make_gist(&self.gists, "Meta Check", None, out.display()));
|
||||
status.set(hubcaps::statuses::State::Failure);
|
||||
Err(Error::Fail(String::from(
|
||||
"Failed to validate package metadata.",
|
||||
|
@ -483,7 +511,7 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> {
|
|||
&mut self,
|
||||
dir: &Path,
|
||||
status: &mut CommitStatus,
|
||||
) -> StepResult<Vec<BuildJob>> {
|
||||
) -> StepResult<EvaluationComplete> {
|
||||
self.update_stdenv_labels();
|
||||
|
||||
status.set_with_description(
|
||||
|
@ -493,8 +521,10 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> {
|
|||
|
||||
self.update_new_package_labels();
|
||||
self.update_rebuild_labels(&dir, status);
|
||||
let checks = self.performance_stats();
|
||||
|
||||
self.check_meta_queue_builds(&dir)
|
||||
let builds = self.check_meta_queue_builds(&dir)?;
|
||||
Ok(EvaluationComplete { builds, checks })
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -4,11 +4,13 @@ extern crate env_logger;
|
|||
extern crate uuid;
|
||||
use amqp::protocol::basic::{BasicProperties, Deliver};
|
||||
use hubcaps;
|
||||
use hubcaps::checks::CheckRunOptions;
|
||||
use hubcaps::gists::Gists;
|
||||
use hubcaps::issues::Issue;
|
||||
use ofborg::acl::ACL;
|
||||
use ofborg::checkout;
|
||||
use ofborg::commitstatus::CommitStatus;
|
||||
use ofborg::config::GithubAppVendingMachine;
|
||||
use ofborg::files::file_to_str;
|
||||
use ofborg::message::{buildjob, evaluationjob};
|
||||
use ofborg::nix;
|
||||
|
@ -18,6 +20,7 @@ use ofborg::systems;
|
|||
use ofborg::worker;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::sync::RwLock;
|
||||
use std::time::Instant;
|
||||
use tasks::eval;
|
||||
use tasks::eval::StepResult;
|
||||
|
@ -26,6 +29,7 @@ pub struct EvaluationWorker<E> {
|
|||
cloner: checkout::CachedCloner,
|
||||
nix: nix::Nix,
|
||||
github: hubcaps::Github,
|
||||
github_vend: RwLock<GithubAppVendingMachine>,
|
||||
acl: ACL,
|
||||
identity: String,
|
||||
events: E,
|
||||
|
@ -33,10 +37,12 @@ pub struct EvaluationWorker<E> {
|
|||
}
|
||||
|
||||
impl<E: stats::SysEvents> EvaluationWorker<E> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
cloner: checkout::CachedCloner,
|
||||
nix: &nix::Nix,
|
||||
github: hubcaps::Github,
|
||||
github_vend: GithubAppVendingMachine,
|
||||
acl: ACL,
|
||||
identity: String,
|
||||
events: E,
|
||||
|
@ -46,6 +52,7 @@ impl<E: stats::SysEvents> EvaluationWorker<E> {
|
|||
cloner,
|
||||
nix: nix.without_limited_supported_systems(),
|
||||
github,
|
||||
github_vend: RwLock::new(github_vend),
|
||||
acl,
|
||||
identity,
|
||||
events,
|
||||
|
@ -106,9 +113,14 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for EvaluationWorker<E>
|
|||
}
|
||||
|
||||
fn consumer(&mut self, job: &evaluationjob::EvaluationJob) -> worker::Actions {
|
||||
let repo = self
|
||||
.github
|
||||
.repo(job.repo.owner.clone(), job.repo.name.clone());
|
||||
let mut vending_machine = self
|
||||
.github_vend
|
||||
.write()
|
||||
.expect("Failed to get write lock on github vending machine");
|
||||
let github_client = vending_machine
|
||||
.for_repo(&job.repo.owner, &job.repo.name)
|
||||
.expect("Failed to get a github client token");
|
||||
let repo = github_client.repo(job.repo.owner.clone(), job.repo.name.clone());
|
||||
let gists = self.github.gists();
|
||||
let pulls = repo.pulls();
|
||||
let pull = pulls.get(job.pr.number);
|
||||
|
@ -320,30 +332,9 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for EvaluationWorker<E>
|
|||
let ret = evaluation_strategy
|
||||
.all_evaluations_passed(&Path::new(&refpath), &mut overall_status);
|
||||
match ret {
|
||||
Ok(builds) => {
|
||||
info!(
|
||||
"Scheduling build jobs {:#?} on arches {:#?}",
|
||||
builds, auto_schedule_build_archs
|
||||
);
|
||||
for buildjob in builds {
|
||||
for arch in auto_schedule_build_archs.iter() {
|
||||
let (exchange, routingkey) = arch.as_build_destination();
|
||||
response.push(worker::publish_serde_action(
|
||||
exchange, routingkey, &buildjob,
|
||||
));
|
||||
}
|
||||
response.push(worker::publish_serde_action(
|
||||
Some("build-results".to_string()),
|
||||
None,
|
||||
&buildjob::QueuedBuildJobs {
|
||||
job: buildjob,
|
||||
architectures: auto_schedule_build_archs
|
||||
.iter()
|
||||
.map(|arch| arch.to_string())
|
||||
.collect(),
|
||||
},
|
||||
));
|
||||
}
|
||||
Ok(complete) => {
|
||||
send_check_statuses(complete.checks, &repo);
|
||||
response.extend(schedule_builds(complete.builds, auto_schedule_build_archs));
|
||||
}
|
||||
Err(e) => {
|
||||
info!("Failed after all the evaluations passed");
|
||||
|
@ -371,6 +362,47 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for EvaluationWorker<E>
|
|||
}
|
||||
}
|
||||
|
||||
fn send_check_statuses(checks: Vec<CheckRunOptions>, repo: &hubcaps::repositories::Repository) {
|
||||
for check in checks {
|
||||
match repo.checkruns().create(&check) {
|
||||
Ok(_) => info!("Sent check update"),
|
||||
Err(e) => info!("Failed to send check update: {:?}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn schedule_builds(
|
||||
builds: Vec<buildjob::BuildJob>,
|
||||
auto_schedule_build_archs: Vec<systems::System>,
|
||||
) -> Vec<worker::Action> {
|
||||
let mut response = vec![];
|
||||
info!(
|
||||
"Scheduling build jobs {:#?} on arches {:#?}",
|
||||
builds, auto_schedule_build_archs
|
||||
);
|
||||
for buildjob in builds {
|
||||
for arch in auto_schedule_build_archs.iter() {
|
||||
let (exchange, routingkey) = arch.as_build_destination();
|
||||
response.push(worker::publish_serde_action(
|
||||
exchange, routingkey, &buildjob,
|
||||
));
|
||||
}
|
||||
response.push(worker::publish_serde_action(
|
||||
Some("build-results".to_string()),
|
||||
None,
|
||||
&buildjob::QueuedBuildJobs {
|
||||
job: buildjob,
|
||||
architectures: auto_schedule_build_archs
|
||||
.iter()
|
||||
.map(|arch| arch.to_string())
|
||||
.collect(),
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
response
|
||||
}
|
||||
|
||||
pub fn make_gist<'a>(
|
||||
gists: &hubcaps::gists::Gists<'a>,
|
||||
name: &str,
|
||||
|
|
Loading…
Reference in a new issue