forked from the-distro/ofborg
Merge pull request #293 from NixOS/experiment/maintainer-ping
Experiment/maintainer ping
This commit is contained in:
commit
6912713eb5
|
@ -39,6 +39,7 @@ pub mod evalchecker;
|
|||
pub mod files;
|
||||
pub mod ghevent;
|
||||
pub mod locks;
|
||||
pub mod maintainers;
|
||||
pub mod message;
|
||||
pub mod nix;
|
||||
pub mod notifyworker;
|
||||
|
|
107
ofborg/src/maintainers.nix
Normal file
107
ofborg/src/maintainers.nix
Normal file
|
@ -0,0 +1,107 @@
|
|||
{ changedattrsjson, changedpathsjson }:
|
||||
let
|
||||
pkgs = import ./. {};
|
||||
|
||||
changedattrs = builtins.fromJSON (builtins.readFile changedattrsjson);
|
||||
changedpaths = builtins.fromJSON (builtins.readFile changedpathsjson);
|
||||
|
||||
anyMatchingFile = filename:
|
||||
let
|
||||
matching = builtins.filter
|
||||
(changed: pkgs.lib.strings.hasSuffix changed filename)
|
||||
changedpaths;
|
||||
in (builtins.length matching) > 0;
|
||||
|
||||
anyMatchingFiles = files:
|
||||
(builtins.length (builtins.filter anyMatchingFile files)) > 0;
|
||||
|
||||
enrichedAttrs = builtins.map
|
||||
(path: {
|
||||
path = path;
|
||||
name = builtins.concatStringsSep "." path;
|
||||
})
|
||||
changedattrs;
|
||||
|
||||
validPackageAttributes = builtins.filter
|
||||
(pkg:
|
||||
if (pkgs.lib.attrsets.hasAttrByPath pkg.path pkgs)
|
||||
then (if (builtins.tryEval (pkgs.lib.attrsets.attrByPath pkg.path null pkgs)).success
|
||||
then true
|
||||
else builtins.trace "Failed to access ${pkg.name} even though it exists" false)
|
||||
else builtins.trace "Failed to locate ${pkg.name}." false
|
||||
)
|
||||
enrichedAttrs;
|
||||
|
||||
attrsWithPackages = builtins.map
|
||||
(pkg: pkg // { package = pkgs.lib.attrsets.attrByPath pkg.path null pkgs; })
|
||||
validPackageAttributes;
|
||||
|
||||
attrsWithMaintainers = builtins.map
|
||||
(pkg: pkg // { maintainers = (pkg.package.meta or {}).maintainers or []; })
|
||||
attrsWithPackages;
|
||||
|
||||
attrsWeCanPing = builtins.filter
|
||||
(pkg: if (builtins.length pkg.maintainers) > 0
|
||||
then true
|
||||
else builtins.trace "Package has no maintainers: ${pkg.name}" false
|
||||
)
|
||||
attrsWithMaintainers;
|
||||
|
||||
relevantFilenames = drv:
|
||||
(pkgs.lib.lists.unique
|
||||
(builtins.map
|
||||
(pos: pos.file)
|
||||
(builtins.filter (x: x != null)
|
||||
[
|
||||
(builtins.unsafeGetAttrPos "maintainers" (drv.meta or {}))
|
||||
(builtins.unsafeGetAttrPos "src" drv)
|
||||
# broken because name is always set by stdenv:
|
||||
# # A hack to make `nix-env -qa` and `nix search` ignore broken packages.
|
||||
# # TODO(@oxij): remove this assert when something like NixOS/nix#1771 gets merged into nix.
|
||||
# name = assert validity.handled; name + lib.optionalString
|
||||
#(builtins.unsafeGetAttrPos "name" drv)
|
||||
(builtins.unsafeGetAttrPos "pname" drv)
|
||||
(builtins.unsafeGetAttrPos "version" drv)
|
||||
]
|
||||
)));
|
||||
|
||||
attrsWithFilenames = builtins.map
|
||||
(pkg: pkg // { filenames = relevantFilenames pkg.package; })
|
||||
attrsWithMaintainers;
|
||||
|
||||
attrsWithModifiedFiles = builtins.filter
|
||||
(pkg: anyMatchingFiles pkg.filenames)
|
||||
attrsWithFilenames;
|
||||
|
||||
listToPing = pkgs.lib.lists.flatten
|
||||
(builtins.map
|
||||
(pkg:
|
||||
builtins.map (maintainer: {
|
||||
handle = maintainer.github;
|
||||
packageName = pkg.name;
|
||||
dueToFiles = pkg.filenames;
|
||||
})
|
||||
pkg.maintainers
|
||||
)
|
||||
attrsWithModifiedFiles);
|
||||
|
||||
byMaintainer = pkgs.lib.lists.foldr
|
||||
(ping: collector: collector // { "${ping.handle}" = [ { inherit (ping) packageName dueToFiles; } ] ++ (collector."${ping.handle}" or []); })
|
||||
{}
|
||||
listToPing;
|
||||
|
||||
textForPackages = packages:
|
||||
pkgs.lib.strings.concatStringsSep ", " (
|
||||
builtins.map (pkg: pkg.packageName)
|
||||
packages);
|
||||
|
||||
textPerMaintainer = pkgs.lib.attrsets.mapAttrs
|
||||
(maintainer: packages: "- @${maintainer} for ${textForPackages packages}")
|
||||
byMaintainer;
|
||||
|
||||
packagesPerMaintainer = pkgs.lib.attrsets.mapAttrs
|
||||
(maintainer: packages:
|
||||
builtins.map (pkg: pkg.packageName)
|
||||
packages)
|
||||
byMaintainer;
|
||||
in packagesPerMaintainer
|
172
ofborg/src/maintainers.rs
Normal file
172
ofborg/src/maintainers.rs
Normal file
|
@ -0,0 +1,172 @@
|
|||
use ofborg::nix::Nix;
|
||||
use std::collections::HashMap;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
#[derive(Deserialize, Debug, Eq, PartialEq)]
|
||||
pub struct ImpactedMaintainers(HashMap<Maintainer, Vec<Package>>);
|
||||
#[derive(Deserialize, Debug, Eq, PartialEq, Hash)]
|
||||
struct Maintainer(String);
|
||||
impl<'a> From<&'a str> for Maintainer {
|
||||
fn from(name: &'a str) -> Maintainer {
|
||||
Maintainer(name.to_owned())
|
||||
}
|
||||
}
|
||||
#[derive(Deserialize, Debug, Eq, PartialEq, Hash)]
|
||||
struct Package(String);
|
||||
impl<'a> From<&'a str> for Package {
|
||||
fn from(name: &'a str) -> Package {
|
||||
Package(name.to_owned())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum CalculationError {
|
||||
DeserializeError(serde_json::Error),
|
||||
Io(std::io::Error),
|
||||
Utf8(std::string::FromUtf8Error),
|
||||
}
|
||||
impl From<serde_json::Error> for CalculationError {
|
||||
fn from(e: serde_json::Error) -> CalculationError {
|
||||
CalculationError::DeserializeError(e)
|
||||
}
|
||||
}
|
||||
impl From<std::io::Error> for CalculationError {
|
||||
fn from(e: std::io::Error) -> CalculationError {
|
||||
CalculationError::Io(e)
|
||||
}
|
||||
}
|
||||
impl From<std::string::FromUtf8Error> for CalculationError {
|
||||
fn from(e: std::string::FromUtf8Error) -> CalculationError {
|
||||
CalculationError::Utf8(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl ImpactedMaintainers {
|
||||
pub fn calculate(
|
||||
nix: &Nix,
|
||||
checkout: &Path,
|
||||
paths: &[String],
|
||||
attributes: &[Vec<&str>],
|
||||
) -> Result<ImpactedMaintainers, CalculationError> {
|
||||
let mut path_file = NamedTempFile::new()?;
|
||||
let pathstr = serde_json::to_string(&paths)?;
|
||||
write!(path_file, "{}", pathstr)?;
|
||||
|
||||
let mut attr_file = NamedTempFile::new()?;
|
||||
let attrstr = serde_json::to_string(&attributes)?;
|
||||
write!(attr_file, "{}", attrstr)?;
|
||||
|
||||
let mut argstrs: HashMap<&str, &str> = HashMap::new();
|
||||
argstrs.insert("changedattrsjson", attr_file.path().to_str().unwrap());
|
||||
argstrs.insert("changedpathsjson", path_file.path().to_str().unwrap());
|
||||
|
||||
let mut cmd = nix.safely_evaluate_expr_cmd(
|
||||
&checkout,
|
||||
include_str!("./maintainers.nix"),
|
||||
argstrs,
|
||||
&[path_file.path(), attr_file.path()],
|
||||
);
|
||||
|
||||
let ret = cmd.output()?;
|
||||
|
||||
Ok(serde_json::from_str(&String::from_utf8(ret.stdout)?)?)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ImpactedMaintainers {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
let d = self
|
||||
.0
|
||||
.iter()
|
||||
.map(|(maintainer, packages)| {
|
||||
format!(
|
||||
"{}: {}",
|
||||
maintainer.0,
|
||||
packages
|
||||
.iter()
|
||||
.map(|pkg| pkg.0.clone())
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
)
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
.join("\n");
|
||||
write!(f, "{}", d)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use checkout::cached_cloner;
|
||||
use clone::GitClonable;
|
||||
use ofborg::test_scratch::TestScratch;
|
||||
use std::env;
|
||||
use std::ffi::OsStr;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
use std::process::Stdio;
|
||||
|
||||
fn tpath(component: &str) -> PathBuf {
|
||||
return Path::new(env!("CARGO_MANIFEST_DIR")).join(component);
|
||||
}
|
||||
|
||||
fn make_pr_repo(bare: &Path, co: &Path) -> String {
|
||||
let output = Command::new("./make-maintainer-pr.sh")
|
||||
.current_dir(tpath("./test-srcs"))
|
||||
.arg(bare)
|
||||
.arg(co)
|
||||
.stdout(Stdio::piped())
|
||||
.output()
|
||||
.expect("building the test PR failed");
|
||||
|
||||
let stderr =
|
||||
String::from_utf8(output.stderr).unwrap_or_else(|err| format!("warning: {}", err));
|
||||
println!("{}", stderr);
|
||||
|
||||
let hash = String::from_utf8(output.stdout).expect("Should just be a hash");
|
||||
return hash.trim().to_owned();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn example() {
|
||||
let workingdir = TestScratch::new_dir("test-maintainers-example");
|
||||
|
||||
let bare = TestScratch::new_dir("test-maintainers-example-bare");
|
||||
let mk_co = TestScratch::new_dir("test-maintainers-example-co");
|
||||
let hash = make_pr_repo(&bare.path(), &mk_co.path());
|
||||
|
||||
let attributes = vec![vec!["foo", "bar", "packageA"]];
|
||||
|
||||
let cloner = cached_cloner(&workingdir.path());
|
||||
let project = cloner.project("maintainer-test", bare.string());
|
||||
|
||||
let working_co = project
|
||||
.clone_for("testing-maintainer-list".to_owned(), "123".to_owned())
|
||||
.expect("clone should work");
|
||||
|
||||
working_co
|
||||
.checkout_origin_ref(&OsStr::new("master"))
|
||||
.unwrap();
|
||||
|
||||
let paths = working_co.files_changed_from_head(&hash).unwrap();
|
||||
|
||||
working_co.checkout_ref(&OsStr::new(&hash)).unwrap();
|
||||
|
||||
let remote = env::var("NIX_REMOTE").unwrap_or("".to_owned());
|
||||
let nix = Nix::new("x86_64-linux".to_owned(), remote, 1800, None);
|
||||
|
||||
let parsed =
|
||||
ImpactedMaintainers::calculate(&nix, &working_co.clone_to(), &paths, &attributes);
|
||||
|
||||
let mut expect = ImpactedMaintainers(HashMap::new());
|
||||
expect.0.insert(
|
||||
Maintainer::from("test"),
|
||||
vec![Package::from("foo.bar.packageA")],
|
||||
);
|
||||
|
||||
assert_eq!(parsed.unwrap(), expect);
|
||||
}
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
use ofborg::asynccmd::{AsyncCmd, SpawnedAsyncCmd};
|
||||
use ofborg::partition_result;
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::fmt;
|
||||
use std::fs::File;
|
||||
|
@ -13,6 +14,7 @@ use tempfile::tempfile;
|
|||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum Operation {
|
||||
Evaluate,
|
||||
Instantiate,
|
||||
Build,
|
||||
QueryPackagesJSON,
|
||||
|
@ -24,6 +26,7 @@ pub enum Operation {
|
|||
impl Operation {
|
||||
fn command(&self) -> Command {
|
||||
match *self {
|
||||
Operation::Evaluate => Command::new("nix-instantiate"),
|
||||
Operation::Instantiate => Command::new("nix-instantiate"),
|
||||
Operation::Build => Command::new("nix-build"),
|
||||
Operation::QueryPackagesJSON => Command::new("nix-env"),
|
||||
|
@ -53,6 +56,9 @@ impl Operation {
|
|||
Operation::NoOp { ref operation } => {
|
||||
operation.args(command);
|
||||
}
|
||||
Operation::Evaluate => {
|
||||
command.args(&["--eval", "--strict", "--json"]);
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
}
|
||||
|
@ -67,6 +73,7 @@ impl fmt::Display for Operation {
|
|||
Operation::QueryPackagesOutputs => write!(f, "nix-env -qaP --no-name --out-path"),
|
||||
Operation::NoOp { ref operation } => operation.fmt(f),
|
||||
Operation::Unknown { ref program } => write!(f, "{}", program),
|
||||
Operation::Evaluate => write!(f, "nix-instantiate --strict --json ..."),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -157,7 +164,26 @@ impl Nix {
|
|||
attrargs.push(attr);
|
||||
}
|
||||
|
||||
self.safe_command(&Operation::Instantiate, nixpkgs, attrargs)
|
||||
self.safe_command(&Operation::Instantiate, nixpkgs, attrargs, &[])
|
||||
}
|
||||
|
||||
pub fn safely_evaluate_expr_cmd(
|
||||
&self,
|
||||
nixpkgs: &Path,
|
||||
expr: &str,
|
||||
argstrs: HashMap<&str, &str>,
|
||||
extra_paths: &[&Path],
|
||||
) -> Command {
|
||||
let mut attrargs: Vec<String> = Vec::with_capacity(2 + (argstrs.len() * 3));
|
||||
attrargs.push("--expr".to_owned());
|
||||
attrargs.push(expr.to_owned());
|
||||
for (argname, argstr) in argstrs {
|
||||
attrargs.push(String::from("--argstr"));
|
||||
attrargs.push(argname.to_owned());
|
||||
attrargs.push(argstr.to_owned());
|
||||
}
|
||||
|
||||
self.safe_command(&Operation::Evaluate, nixpkgs, attrargs, &extra_paths)
|
||||
}
|
||||
|
||||
pub fn safely_build_attrs(
|
||||
|
@ -188,7 +214,7 @@ impl Nix {
|
|||
attrargs.push(attr);
|
||||
}
|
||||
|
||||
self.safe_command(&Operation::Build, nixpkgs, attrargs)
|
||||
self.safe_command(&Operation::Build, nixpkgs, attrargs, &[])
|
||||
}
|
||||
|
||||
pub fn safely(
|
||||
|
@ -198,7 +224,7 @@ impl Nix {
|
|||
args: Vec<String>,
|
||||
keep_stdout: bool,
|
||||
) -> Result<File, File> {
|
||||
self.run(self.safe_command(&op, nixpkgs, args), keep_stdout)
|
||||
self.run(self.safe_command(&op, nixpkgs, args, &[]), keep_stdout)
|
||||
}
|
||||
|
||||
pub fn run(&self, mut cmd: Command, keep_stdout: bool) -> Result<File, File> {
|
||||
|
@ -228,8 +254,19 @@ impl Nix {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn safe_command(&self, op: &Operation, nixpkgs: &Path, args: Vec<String>) -> Command {
|
||||
let nixpath = format!("nixpkgs={}", nixpkgs.display());
|
||||
pub fn safe_command(
|
||||
&self,
|
||||
op: &Operation,
|
||||
nixpkgs: &Path,
|
||||
args: Vec<String>,
|
||||
safe_paths: &[&Path],
|
||||
) -> Command {
|
||||
let nixpkgspath = format!("nixpkgs={}", nixpkgs.display());
|
||||
let mut nixpath: Vec<String> = safe_paths
|
||||
.iter()
|
||||
.map(|path| format!("{}", path.display()))
|
||||
.collect();
|
||||
nixpath.push(nixpkgspath);
|
||||
|
||||
let mut command = op.command();
|
||||
op.args(&mut command);
|
||||
|
@ -237,7 +274,7 @@ impl Nix {
|
|||
command.env_clear();
|
||||
command.current_dir(nixpkgs);
|
||||
command.env("HOME", "/homeless-shelter");
|
||||
command.env("NIX_PATH", nixpath);
|
||||
command.env("NIX_PATH", nixpath.join(":"));
|
||||
command.env("NIX_REMOTE", &self.remote);
|
||||
|
||||
if let Some(ref initial_heap_size) = self.initial_heap_size {
|
||||
|
@ -419,7 +456,12 @@ mod tests {
|
|||
assert_eq!(op.to_string(), "nix-build");
|
||||
|
||||
let ret: Result<File, File> = nix.run(
|
||||
nix.safe_command(&op, build_path().as_path(), vec![String::from("--version")]),
|
||||
nix.safe_command(
|
||||
&op,
|
||||
build_path().as_path(),
|
||||
vec![String::from("--version")],
|
||||
&[],
|
||||
),
|
||||
true,
|
||||
);
|
||||
|
||||
|
@ -437,7 +479,12 @@ mod tests {
|
|||
assert_eq!(op.to_string(), "nix-instantiate");
|
||||
|
||||
let ret: Result<File, File> = nix.run(
|
||||
nix.safe_command(&op, build_path().as_path(), vec![String::from("--version")]),
|
||||
nix.safe_command(
|
||||
&op,
|
||||
build_path().as_path(),
|
||||
vec![String::from("--version")],
|
||||
&[],
|
||||
),
|
||||
true,
|
||||
);
|
||||
|
||||
|
@ -451,7 +498,12 @@ mod tests {
|
|||
assert_eq!(op.to_string(), "nix-env -qa --json");
|
||||
|
||||
let ret: Result<File, File> = nix.run(
|
||||
nix.safe_command(&op, build_path().as_path(), vec![String::from("--version")]),
|
||||
nix.safe_command(
|
||||
&op,
|
||||
build_path().as_path(),
|
||||
vec![String::from("--version")],
|
||||
&[],
|
||||
),
|
||||
true,
|
||||
);
|
||||
|
||||
|
@ -469,7 +521,12 @@ mod tests {
|
|||
assert_eq!(op.to_string(), "nix-env -qaP --no-name --out-path");
|
||||
|
||||
let ret: Result<File, File> = nix.run(
|
||||
nix.safe_command(&op, build_path().as_path(), vec![String::from("--version")]),
|
||||
nix.safe_command(
|
||||
&op,
|
||||
build_path().as_path(),
|
||||
vec![String::from("--version")],
|
||||
&[],
|
||||
),
|
||||
true,
|
||||
);
|
||||
|
||||
|
@ -488,7 +545,7 @@ mod tests {
|
|||
let nix = nix();
|
||||
|
||||
let ret: Result<File, File> = nix.run(
|
||||
nix.safe_command(&env_noop(), build_path().as_path(), vec![]),
|
||||
nix.safe_command(&env_noop(), build_path().as_path(), vec![], &[]),
|
||||
true,
|
||||
);
|
||||
|
||||
|
@ -515,7 +572,7 @@ mod tests {
|
|||
);
|
||||
|
||||
let ret: Result<File, File> = nix.run(
|
||||
nix.safe_command(&env_noop(), build_path().as_path(), vec![]),
|
||||
nix.safe_command(&env_noop(), build_path().as_path(), vec![], &[]),
|
||||
true,
|
||||
);
|
||||
|
||||
|
@ -537,8 +594,10 @@ mod tests {
|
|||
let nix = nix();
|
||||
let op = noop(Operation::Build);
|
||||
|
||||
let ret: Result<File, File> =
|
||||
nix.run(nix.safe_command(&op, build_path().as_path(), vec![]), true);
|
||||
let ret: Result<File, File> = nix.run(
|
||||
nix.safe_command(&op, build_path().as_path(), vec![], &[]),
|
||||
true,
|
||||
);
|
||||
|
||||
assert_run(
|
||||
ret,
|
||||
|
@ -646,6 +705,23 @@ mod tests {
|
|||
assert_run(ret, Expect::Pass, vec!["-passes-instantiation.drv"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn safely_evaluate_expr_success() {
|
||||
let nix = nix();
|
||||
|
||||
let ret: Result<File, File> = nix.run(
|
||||
nix.safely_evaluate_expr_cmd(
|
||||
individual_eval_path().as_path(),
|
||||
r#"{ foo ? "bar" }: "The magic value is ${foo}""#,
|
||||
[("foo", "tux")].iter().cloned().collect(),
|
||||
&[],
|
||||
),
|
||||
true,
|
||||
);
|
||||
|
||||
assert_run(ret, Expect::Pass, vec!["The magic value is tux"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn strict_sandboxing() {
|
||||
let ret: Result<File, File> = nix().safely_build_attrs(
|
||||
|
|
|
@ -3,6 +3,7 @@ extern crate amqp;
|
|||
extern crate env_logger;
|
||||
extern crate uuid;
|
||||
|
||||
use crate::maintainers::ImpactedMaintainers;
|
||||
use amqp::protocol::basic::{BasicProperties, Deliver};
|
||||
use hubcaps;
|
||||
use ofborg::acl::ACL;
|
||||
|
@ -74,7 +75,7 @@ impl<E: stats::SysEvents> MassRebuildWorker<E> {
|
|||
}
|
||||
}
|
||||
|
||||
fn tag_from_paths(&self, issue: &hubcaps::issues::IssueRef, paths: Vec<String>) {
|
||||
fn tag_from_paths(&self, issue: &hubcaps::issues::IssueRef, paths: &[String]) {
|
||||
let mut tagger = PathsTagger::new(self.tag_paths.clone());
|
||||
|
||||
for path in paths {
|
||||
|
@ -238,11 +239,10 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
|
|||
.unwrap_or_else(|_| vec!["".to_owned()]),
|
||||
);
|
||||
|
||||
self.tag_from_paths(
|
||||
&issue,
|
||||
co.files_changed_from_head(&job.pr.head_sha)
|
||||
.unwrap_or_else(|_| vec![]),
|
||||
);
|
||||
let changed_paths = co
|
||||
.files_changed_from_head(&job.pr.head_sha)
|
||||
.unwrap_or_else(|_| vec![]);
|
||||
self.tag_from_paths(&issue, &changed_paths);
|
||||
|
||||
overall_status.set_with_description("Merging PR", hubcaps::statuses::State::Pending);
|
||||
|
||||
|
@ -523,6 +523,38 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
|
|||
);
|
||||
|
||||
overall_status.set_url(gist_url);
|
||||
|
||||
let changed_attributes = attrs
|
||||
.iter()
|
||||
.map(|attr| attr.package.split('.').collect::<Vec<&str>>())
|
||||
.collect::<Vec<Vec<&str>>>();
|
||||
|
||||
let m = ImpactedMaintainers::calculate(
|
||||
&self.nix,
|
||||
&PathBuf::from(&refpath),
|
||||
&changed_paths,
|
||||
&changed_attributes,
|
||||
);
|
||||
|
||||
let gist_url = make_gist(
|
||||
&gists,
|
||||
"Potential Maintainers",
|
||||
Some("".to_owned()),
|
||||
match m {
|
||||
Ok(maintainers) => format!("Maintainers:\n{}", maintainers),
|
||||
Err(e) => format!("Ignorable calculation error:\n{:?}", e),
|
||||
},
|
||||
);
|
||||
|
||||
let mut status = CommitStatus::new(
|
||||
repo.statuses(),
|
||||
job.pr.head_sha.clone(),
|
||||
String::from("grahamcofborg-eval-check-maintainers"),
|
||||
String::from("matching changed paths to changed attrs..."),
|
||||
gist_url,
|
||||
);
|
||||
|
||||
status.set(hubcaps::statuses::State::Success);
|
||||
}
|
||||
|
||||
rebuild_tags.parse_attrs(attrs);
|
||||
|
|
8
ofborg/test-srcs/maintainers-pr/default.nix
Normal file
8
ofborg/test-srcs/maintainers-pr/default.nix
Normal file
|
@ -0,0 +1,8 @@
|
|||
{ ... }:
|
||||
{
|
||||
lib = import ./lib;
|
||||
foo.bar.packageA = {
|
||||
name = "Hi";
|
||||
meta.maintainers = [{ github = "test"; }];
|
||||
};
|
||||
}
|
6
ofborg/test-srcs/maintainers/default.nix
Normal file
6
ofborg/test-srcs/maintainers/default.nix
Normal file
|
@ -0,0 +1,6 @@
|
|||
{ ... }:
|
||||
{
|
||||
lib = import ./lib;
|
||||
foo.bar.packageA = {
|
||||
};
|
||||
}
|
482
ofborg/test-srcs/maintainers/lib/attrsets.nix
Normal file
482
ofborg/test-srcs/maintainers/lib/attrsets.nix
Normal file
|
@ -0,0 +1,482 @@
|
|||
{ lib }:
|
||||
# Operations on attribute sets.
|
||||
|
||||
let
|
||||
inherit (builtins) head tail length;
|
||||
inherit (lib.trivial) and;
|
||||
inherit (lib.strings) concatStringsSep;
|
||||
inherit (lib.lists) fold concatMap concatLists;
|
||||
in
|
||||
|
||||
rec {
|
||||
inherit (builtins) attrNames listToAttrs hasAttr isAttrs getAttr;
|
||||
|
||||
|
||||
/* Return an attribute from nested attribute sets.
|
||||
|
||||
Example:
|
||||
x = { a = { b = 3; }; }
|
||||
attrByPath ["a" "b"] 6 x
|
||||
=> 3
|
||||
attrByPath ["z" "z"] 6 x
|
||||
=> 6
|
||||
*/
|
||||
attrByPath = attrPath: default: e:
|
||||
let attr = head attrPath;
|
||||
in
|
||||
if attrPath == [] then e
|
||||
else if e ? ${attr}
|
||||
then attrByPath (tail attrPath) default e.${attr}
|
||||
else default;
|
||||
|
||||
/* Return if an attribute from nested attribute set exists.
|
||||
|
||||
Example:
|
||||
x = { a = { b = 3; }; }
|
||||
hasAttrByPath ["a" "b"] x
|
||||
=> true
|
||||
hasAttrByPath ["z" "z"] x
|
||||
=> false
|
||||
|
||||
*/
|
||||
hasAttrByPath = attrPath: e:
|
||||
let attr = head attrPath;
|
||||
in
|
||||
if attrPath == [] then true
|
||||
else if e ? ${attr}
|
||||
then hasAttrByPath (tail attrPath) e.${attr}
|
||||
else false;
|
||||
|
||||
|
||||
/* Return nested attribute set in which an attribute is set.
|
||||
|
||||
Example:
|
||||
setAttrByPath ["a" "b"] 3
|
||||
=> { a = { b = 3; }; }
|
||||
*/
|
||||
setAttrByPath = attrPath: value:
|
||||
if attrPath == [] then value
|
||||
else listToAttrs
|
||||
[ { name = head attrPath; value = setAttrByPath (tail attrPath) value; } ];
|
||||
|
||||
|
||||
/* Like `getAttrPath' without a default value. If it doesn't find the
|
||||
path it will throw.
|
||||
|
||||
Example:
|
||||
x = { a = { b = 3; }; }
|
||||
getAttrFromPath ["a" "b"] x
|
||||
=> 3
|
||||
getAttrFromPath ["z" "z"] x
|
||||
=> error: cannot find attribute `z.z'
|
||||
*/
|
||||
getAttrFromPath = attrPath: set:
|
||||
let errorMsg = "cannot find attribute `" + concatStringsSep "." attrPath + "'";
|
||||
in attrByPath attrPath (abort errorMsg) set;
|
||||
|
||||
|
||||
/* Return the specified attributes from a set.
|
||||
|
||||
Example:
|
||||
attrVals ["a" "b" "c"] as
|
||||
=> [as.a as.b as.c]
|
||||
*/
|
||||
attrVals = nameList: set: map (x: set.${x}) nameList;
|
||||
|
||||
|
||||
/* Return the values of all attributes in the given set, sorted by
|
||||
attribute name.
|
||||
|
||||
Example:
|
||||
attrValues {c = 3; a = 1; b = 2;}
|
||||
=> [1 2 3]
|
||||
*/
|
||||
attrValues = builtins.attrValues or (attrs: attrVals (attrNames attrs) attrs);
|
||||
|
||||
|
||||
/* Given a set of attribute names, return the set of the corresponding
|
||||
attributes from the given set.
|
||||
|
||||
Example:
|
||||
getAttrs [ "a" "b" ] { a = 1; b = 2; c = 3; }
|
||||
=> { a = 1; b = 2; }
|
||||
*/
|
||||
getAttrs = names: attrs: genAttrs names (name: attrs.${name});
|
||||
|
||||
/* Collect each attribute named `attr' from a list of attribute
|
||||
sets. Sets that don't contain the named attribute are ignored.
|
||||
|
||||
Example:
|
||||
catAttrs "a" [{a = 1;} {b = 0;} {a = 2;}]
|
||||
=> [1 2]
|
||||
*/
|
||||
catAttrs = builtins.catAttrs or
|
||||
(attr: l: concatLists (map (s: if s ? ${attr} then [s.${attr}] else []) l));
|
||||
|
||||
|
||||
/* Filter an attribute set by removing all attributes for which the
|
||||
given predicate return false.
|
||||
|
||||
Example:
|
||||
filterAttrs (n: v: n == "foo") { foo = 1; bar = 2; }
|
||||
=> { foo = 1; }
|
||||
*/
|
||||
filterAttrs = pred: set:
|
||||
listToAttrs (concatMap (name: let v = set.${name}; in if pred name v then [(nameValuePair name v)] else []) (attrNames set));
|
||||
|
||||
|
||||
/* Filter an attribute set recursively by removing all attributes for
|
||||
which the given predicate return false.
|
||||
|
||||
Example:
|
||||
filterAttrsRecursive (n: v: v != null) { foo = { bar = null; }; }
|
||||
=> { foo = {}; }
|
||||
*/
|
||||
filterAttrsRecursive = pred: set:
|
||||
listToAttrs (
|
||||
concatMap (name:
|
||||
let v = set.${name}; in
|
||||
if pred name v then [
|
||||
(nameValuePair name (
|
||||
if isAttrs v then filterAttrsRecursive pred v
|
||||
else v
|
||||
))
|
||||
] else []
|
||||
) (attrNames set)
|
||||
);
|
||||
|
||||
/* Apply fold functions to values grouped by key.
|
||||
|
||||
Example:
|
||||
foldAttrs (n: a: [n] ++ a) [] [{ a = 2; } { a = 3; }]
|
||||
=> { a = [ 2 3 ]; }
|
||||
*/
|
||||
foldAttrs = op: nul: list_of_attrs:
|
||||
fold (n: a:
|
||||
fold (name: o:
|
||||
o // { ${name} = op n.${name} (a.${name} or nul); }
|
||||
) a (attrNames n)
|
||||
) {} list_of_attrs;
|
||||
|
||||
|
||||
/* Recursively collect sets that verify a given predicate named `pred'
|
||||
from the set `attrs'. The recursion is stopped when the predicate is
|
||||
verified.
|
||||
|
||||
Type:
|
||||
collect ::
|
||||
(AttrSet -> Bool) -> AttrSet -> [x]
|
||||
|
||||
Example:
|
||||
collect isList { a = { b = ["b"]; }; c = [1]; }
|
||||
=> [["b"] [1]]
|
||||
|
||||
collect (x: x ? outPath)
|
||||
{ a = { outPath = "a/"; }; b = { outPath = "b/"; }; }
|
||||
=> [{ outPath = "a/"; } { outPath = "b/"; }]
|
||||
*/
|
||||
collect = pred: attrs:
|
||||
if pred attrs then
|
||||
[ attrs ]
|
||||
else if isAttrs attrs then
|
||||
concatMap (collect pred) (attrValues attrs)
|
||||
else
|
||||
[];
|
||||
|
||||
|
||||
/* Utility function that creates a {name, value} pair as expected by
|
||||
builtins.listToAttrs.
|
||||
|
||||
Example:
|
||||
nameValuePair "some" 6
|
||||
=> { name = "some"; value = 6; }
|
||||
*/
|
||||
nameValuePair = name: value: { inherit name value; };
|
||||
|
||||
|
||||
/* Apply a function to each element in an attribute set. The
|
||||
function takes two arguments --- the attribute name and its value
|
||||
--- and returns the new value for the attribute. The result is a
|
||||
new attribute set.
|
||||
|
||||
Example:
|
||||
mapAttrs (name: value: name + "-" + value)
|
||||
{ x = "foo"; y = "bar"; }
|
||||
=> { x = "x-foo"; y = "y-bar"; }
|
||||
*/
|
||||
mapAttrs = builtins.mapAttrs or
|
||||
(f: set:
|
||||
listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set)));
|
||||
|
||||
|
||||
/* Like `mapAttrs', but allows the name of each attribute to be
|
||||
changed in addition to the value. The applied function should
|
||||
return both the new name and value as a `nameValuePair'.
|
||||
|
||||
Example:
|
||||
mapAttrs' (name: value: nameValuePair ("foo_" + name) ("bar-" + value))
|
||||
{ x = "a"; y = "b"; }
|
||||
=> { foo_x = "bar-a"; foo_y = "bar-b"; }
|
||||
*/
|
||||
mapAttrs' = f: set:
|
||||
listToAttrs (map (attr: f attr set.${attr}) (attrNames set));
|
||||
|
||||
|
||||
/* Call a function for each attribute in the given set and return
|
||||
the result in a list.
|
||||
|
||||
Example:
|
||||
mapAttrsToList (name: value: name + value)
|
||||
{ x = "a"; y = "b"; }
|
||||
=> [ "xa" "yb" ]
|
||||
*/
|
||||
mapAttrsToList = f: attrs:
|
||||
map (name: f name attrs.${name}) (attrNames attrs);
|
||||
|
||||
|
||||
/* Like `mapAttrs', except that it recursively applies itself to
|
||||
attribute sets. Also, the first argument of the argument
|
||||
function is a *list* of the names of the containing attributes.
|
||||
|
||||
Type:
|
||||
mapAttrsRecursive ::
|
||||
([String] -> a -> b) -> AttrSet -> AttrSet
|
||||
|
||||
Example:
|
||||
mapAttrsRecursive (path: value: concatStringsSep "-" (path ++ [value]))
|
||||
{ n = { a = "A"; m = { b = "B"; c = "C"; }; }; d = "D"; }
|
||||
=> { n = { a = "n-a-A"; m = { b = "n-m-b-B"; c = "n-m-c-C"; }; }; d = "d-D"; }
|
||||
*/
|
||||
mapAttrsRecursive = mapAttrsRecursiveCond (as: true);
|
||||
|
||||
|
||||
/* Like `mapAttrsRecursive', but it takes an additional predicate
|
||||
function that tells it whether to recursive into an attribute
|
||||
set. If it returns false, `mapAttrsRecursiveCond' does not
|
||||
recurse, but does apply the map function. It is returns true, it
|
||||
does recurse, and does not apply the map function.
|
||||
|
||||
Type:
|
||||
mapAttrsRecursiveCond ::
|
||||
(AttrSet -> Bool) -> ([String] -> a -> b) -> AttrSet -> AttrSet
|
||||
|
||||
Example:
|
||||
# To prevent recursing into derivations (which are attribute
|
||||
# sets with the attribute "type" equal to "derivation"):
|
||||
mapAttrsRecursiveCond
|
||||
(as: !(as ? "type" && as.type == "derivation"))
|
||||
(x: ... do something ...)
|
||||
attrs
|
||||
*/
|
||||
mapAttrsRecursiveCond = cond: f: set:
|
||||
let
|
||||
recurse = path: set:
|
||||
let
|
||||
g =
|
||||
name: value:
|
||||
if isAttrs value && cond value
|
||||
then recurse (path ++ [name]) value
|
||||
else f (path ++ [name]) value;
|
||||
in mapAttrs g set;
|
||||
in recurse [] set;
|
||||
|
||||
|
||||
/* Generate an attribute set by mapping a function over a list of
|
||||
attribute names.
|
||||
|
||||
Example:
|
||||
genAttrs [ "foo" "bar" ] (name: "x_" + name)
|
||||
=> { foo = "x_foo"; bar = "x_bar"; }
|
||||
*/
|
||||
genAttrs = names: f:
|
||||
listToAttrs (map (n: nameValuePair n (f n)) names);
|
||||
|
||||
|
||||
/* Check whether the argument is a derivation. Any set with
|
||||
{ type = "derivation"; } counts as a derivation.
|
||||
|
||||
Example:
|
||||
nixpkgs = import <nixpkgs> {}
|
||||
isDerivation nixpkgs.ruby
|
||||
=> true
|
||||
isDerivation "foobar"
|
||||
=> false
|
||||
*/
|
||||
isDerivation = x: isAttrs x && x ? type && x.type == "derivation";
|
||||
|
||||
/* Converts a store path to a fake derivation. */
|
||||
toDerivation = path:
|
||||
let
|
||||
path' = builtins.storePath path;
|
||||
res =
|
||||
{ type = "derivation";
|
||||
name = builtins.unsafeDiscardStringContext (builtins.substring 33 (-1) (baseNameOf path'));
|
||||
outPath = path';
|
||||
outputs = [ "out" ];
|
||||
out = res;
|
||||
outputName = "out";
|
||||
};
|
||||
in res;
|
||||
|
||||
|
||||
/* If `cond' is true, return the attribute set `as',
|
||||
otherwise an empty attribute set.
|
||||
|
||||
Example:
|
||||
optionalAttrs (true) { my = "set"; }
|
||||
=> { my = "set"; }
|
||||
optionalAttrs (false) { my = "set"; }
|
||||
=> { }
|
||||
*/
|
||||
optionalAttrs = cond: as: if cond then as else {};
|
||||
|
||||
|
||||
/* Merge sets of attributes and use the function f to merge attributes
|
||||
values.
|
||||
|
||||
Example:
|
||||
zipAttrsWithNames ["a"] (name: vs: vs) [{a = "x";} {a = "y"; b = "z";}]
|
||||
=> { a = ["x" "y"]; }
|
||||
*/
|
||||
zipAttrsWithNames = names: f: sets:
|
||||
listToAttrs (map (name: {
|
||||
inherit name;
|
||||
value = f name (catAttrs name sets);
|
||||
}) names);
|
||||
|
||||
/* Implementation note: Common names appear multiple times in the list of
|
||||
names, hopefully this does not affect the system because the maximal
|
||||
laziness avoid computing twice the same expression and listToAttrs does
|
||||
not care about duplicated attribute names.
|
||||
|
||||
Example:
|
||||
zipAttrsWith (name: values: values) [{a = "x";} {a = "y"; b = "z";}]
|
||||
=> { a = ["x" "y"]; b = ["z"] }
|
||||
*/
|
||||
zipAttrsWith = f: sets: zipAttrsWithNames (concatMap attrNames sets) f sets;
|
||||
/* Like `zipAttrsWith' with `(name: values: value)' as the function.
|
||||
|
||||
Example:
|
||||
zipAttrs [{a = "x";} {a = "y"; b = "z";}]
|
||||
=> { a = ["x" "y"]; b = ["z"] }
|
||||
*/
|
||||
zipAttrs = zipAttrsWith (name: values: values);
|
||||
|
||||
/* Does the same as the update operator '//' except that attributes are
|
||||
merged until the given predicate is verified. The predicate should
|
||||
accept 3 arguments which are the path to reach the attribute, a part of
|
||||
the first attribute set and a part of the second attribute set. When
|
||||
the predicate is verified, the value of the first attribute set is
|
||||
replaced by the value of the second attribute set.
|
||||
|
||||
Example:
|
||||
recursiveUpdateUntil (path: l: r: path == ["foo"]) {
|
||||
# first attribute set
|
||||
foo.bar = 1;
|
||||
foo.baz = 2;
|
||||
bar = 3;
|
||||
} {
|
||||
#second attribute set
|
||||
foo.bar = 1;
|
||||
foo.quz = 2;
|
||||
baz = 4;
|
||||
}
|
||||
|
||||
returns: {
|
||||
foo.bar = 1; # 'foo.*' from the second set
|
||||
foo.quz = 2; #
|
||||
bar = 3; # 'bar' from the first set
|
||||
baz = 4; # 'baz' from the second set
|
||||
}
|
||||
|
||||
*/
|
||||
recursiveUpdateUntil = pred: lhs: rhs:
|
||||
let f = attrPath:
|
||||
zipAttrsWith (n: values:
|
||||
let here = attrPath ++ [n]; in
|
||||
if tail values == []
|
||||
|| pred here (head (tail values)) (head values) then
|
||||
head values
|
||||
else
|
||||
f here values
|
||||
);
|
||||
in f [] [rhs lhs];
|
||||
|
||||
/* A recursive variant of the update operator ‘//’. The recursion
|
||||
stops when one of the attribute values is not an attribute set,
|
||||
in which case the right hand side value takes precedence over the
|
||||
left hand side value.
|
||||
|
||||
Example:
|
||||
recursiveUpdate {
|
||||
boot.loader.grub.enable = true;
|
||||
boot.loader.grub.device = "/dev/hda";
|
||||
} {
|
||||
boot.loader.grub.device = "";
|
||||
}
|
||||
|
||||
returns: {
|
||||
boot.loader.grub.enable = true;
|
||||
boot.loader.grub.device = "";
|
||||
}
|
||||
|
||||
*/
|
||||
recursiveUpdate = lhs: rhs:
|
||||
recursiveUpdateUntil (path: lhs: rhs:
|
||||
!(isAttrs lhs && isAttrs rhs)
|
||||
) lhs rhs;
|
||||
|
||||
/* Returns true if the pattern is contained in the set. False otherwise.
|
||||
|
||||
Example:
|
||||
matchAttrs { cpu = {}; } { cpu = { bits = 64; }; }
|
||||
=> true
|
||||
*/
|
||||
matchAttrs = pattern: attrs: assert isAttrs pattern;
|
||||
fold and true (attrValues (zipAttrsWithNames (attrNames pattern) (n: values:
|
||||
let pat = head values; val = head (tail values); in
|
||||
if length values == 1 then false
|
||||
else if isAttrs pat then isAttrs val && matchAttrs pat val
|
||||
else pat == val
|
||||
) [pattern attrs]));
|
||||
|
||||
/* Override only the attributes that are already present in the old set
|
||||
useful for deep-overriding.
|
||||
|
||||
Example:
|
||||
overrideExisting {} { a = 1; }
|
||||
=> {}
|
||||
overrideExisting { b = 2; } { a = 1; }
|
||||
=> { b = 2; }
|
||||
overrideExisting { a = 3; b = 2; } { a = 1; }
|
||||
=> { a = 1; b = 2; }
|
||||
*/
|
||||
overrideExisting = old: new:
|
||||
mapAttrs (name: value: new.${name} or value) old;
|
||||
|
||||
/* Get a package output.
|
||||
If no output is found, fallback to `.out` and then to the default.
|
||||
|
||||
Example:
|
||||
getOutput "dev" pkgs.openssl
|
||||
=> "/nix/store/9rz8gxhzf8sw4kf2j2f1grr49w8zx5vj-openssl-1.0.1r-dev"
|
||||
*/
|
||||
getOutput = output: pkg:
|
||||
if pkg.outputUnspecified or false
|
||||
then pkg.${output} or pkg.out or pkg
|
||||
else pkg;
|
||||
|
||||
getBin = getOutput "bin";
|
||||
getLib = getOutput "lib";
|
||||
getDev = getOutput "dev";
|
||||
|
||||
/* Pick the outputs of packages to place in buildInputs */
|
||||
chooseDevOutputs = drvs: builtins.map getDev drvs;
|
||||
|
||||
/*** deprecated stuff ***/
|
||||
|
||||
zipWithNames = zipAttrsWithNames;
|
||||
zip = builtins.trace
|
||||
"lib.zip is deprecated, use lib.zipAttrsWith instead" zipAttrsWith;
|
||||
|
||||
}
|
137
ofborg/test-srcs/maintainers/lib/default.nix
Normal file
137
ofborg/test-srcs/maintainers/lib/default.nix
Normal file
|
@ -0,0 +1,137 @@
|
|||
/* Library of low-level helper functions for nix expressions.
|
||||
*
|
||||
* Please implement (mostly) exhaustive unit tests
|
||||
* for new functions in `./tests.nix'.
|
||||
*/
|
||||
let
|
||||
|
||||
inherit (import ./fixed-points.nix {}) makeExtensible;
|
||||
|
||||
lib = makeExtensible (self: let
|
||||
callLibs = file: import file { lib = self; };
|
||||
in with self; {
|
||||
|
||||
# often used, or depending on very little
|
||||
trivial = callLibs ./trivial.nix;
|
||||
fixedPoints = callLibs ./fixed-points.nix;
|
||||
|
||||
# datatypes
|
||||
attrsets = callLibs ./attrsets.nix;
|
||||
lists = callLibs ./lists.nix;
|
||||
strings = callLibs ./strings.nix;
|
||||
stringsWithDeps = callLibs ./strings-with-deps.nix;
|
||||
|
||||
# packaging
|
||||
customisation = callLibs ./customisation.nix;
|
||||
maintainers = import ../maintainers/maintainer-list.nix;
|
||||
meta = callLibs ./meta.nix;
|
||||
sources = callLibs ./sources.nix;
|
||||
versions = callLibs ./versions.nix;
|
||||
|
||||
# module system
|
||||
modules = callLibs ./modules.nix;
|
||||
options = callLibs ./options.nix;
|
||||
types = callLibs ./types.nix;
|
||||
|
||||
# constants
|
||||
licenses = callLibs ./licenses.nix;
|
||||
systems = callLibs ./systems;
|
||||
|
||||
# misc
|
||||
asserts = callLibs ./asserts.nix;
|
||||
debug = callLibs ./debug.nix;
|
||||
generators = callLibs ./generators.nix;
|
||||
misc = callLibs ./deprecated.nix;
|
||||
|
||||
# domain-specific
|
||||
fetchers = callLibs ./fetchers.nix;
|
||||
|
||||
# Eval-time filesystem handling
|
||||
filesystem = callLibs ./filesystem.nix;
|
||||
|
||||
# back-compat aliases
|
||||
platforms = systems.forMeta;
|
||||
|
||||
inherit (builtins) add addErrorContext attrNames concatLists
|
||||
deepSeq elem elemAt filter genericClosure genList getAttr
|
||||
hasAttr head isAttrs isBool isInt isList isString length
|
||||
lessThan listToAttrs pathExists readFile replaceStrings seq
|
||||
stringLength sub substring tail;
|
||||
inherit (trivial) id const concat or and bitAnd bitOr bitXor bitNot
|
||||
boolToString mergeAttrs flip mapNullable inNixShell min max
|
||||
importJSON warn info nixpkgsVersion version mod compare
|
||||
splitByAndCompare functionArgs setFunctionArgs isFunction;
|
||||
inherit (fixedPoints) fix fix' converge extends composeExtensions
|
||||
makeExtensible makeExtensibleWithCustomName;
|
||||
inherit (attrsets) attrByPath hasAttrByPath setAttrByPath
|
||||
getAttrFromPath attrVals attrValues getAttrs catAttrs filterAttrs
|
||||
filterAttrsRecursive foldAttrs collect nameValuePair mapAttrs
|
||||
mapAttrs' mapAttrsToList mapAttrsRecursive mapAttrsRecursiveCond
|
||||
genAttrs isDerivation toDerivation optionalAttrs
|
||||
zipAttrsWithNames zipAttrsWith zipAttrs recursiveUpdateUntil
|
||||
recursiveUpdate matchAttrs overrideExisting getOutput getBin
|
||||
getLib getDev chooseDevOutputs zipWithNames zip;
|
||||
inherit (lists) singleton foldr fold foldl foldl' imap0 imap1
|
||||
concatMap flatten remove findSingle findFirst any all count
|
||||
optional optionals toList range partition zipListsWith zipLists
|
||||
reverseList listDfs toposort sort naturalSort compareLists take
|
||||
drop sublist last init crossLists unique intersectLists
|
||||
subtractLists mutuallyExclusive groupBy groupBy';
|
||||
inherit (strings) concatStrings concatMapStrings concatImapStrings
|
||||
intersperse concatStringsSep concatMapStringsSep
|
||||
concatImapStringsSep makeSearchPath makeSearchPathOutput
|
||||
makeLibraryPath makeBinPath makePerlPath makeFullPerlPath optionalString
|
||||
hasPrefix hasSuffix stringToCharacters stringAsChars escape
|
||||
escapeShellArg escapeShellArgs replaceChars lowerChars
|
||||
upperChars toLower toUpper addContextFrom splitString
|
||||
removePrefix removeSuffix versionOlder versionAtLeast getVersion
|
||||
nameFromURL enableFeature enableFeatureAs withFeature
|
||||
withFeatureAs fixedWidthString fixedWidthNumber isStorePath
|
||||
toInt readPathsFromFile fileContents;
|
||||
inherit (stringsWithDeps) textClosureList textClosureMap
|
||||
noDepEntry fullDepEntry packEntry stringAfter;
|
||||
inherit (customisation) overrideDerivation makeOverridable
|
||||
callPackageWith callPackagesWith extendDerivation hydraJob
|
||||
makeScope;
|
||||
inherit (meta) addMetaAttrs dontDistribute setName updateName
|
||||
appendToName mapDerivationAttrset lowPrio lowPrioSet hiPrio
|
||||
hiPrioSet;
|
||||
inherit (sources) pathType pathIsDirectory cleanSourceFilter
|
||||
cleanSource sourceByRegex sourceFilesBySuffices
|
||||
commitIdFromGitRepo cleanSourceWith pathHasContext
|
||||
canCleanSource;
|
||||
inherit (modules) evalModules closeModules unifyModuleSyntax
|
||||
applyIfFunction unpackSubmodule packSubmodule mergeModules
|
||||
mergeModules' mergeOptionDecls evalOptionValue mergeDefinitions
|
||||
pushDownProperties dischargeProperties filterOverrides
|
||||
sortProperties fixupOptionType mkIf mkAssert mkMerge mkOverride
|
||||
mkOptionDefault mkDefault mkForce mkVMOverride mkStrict
|
||||
mkFixStrictness mkOrder mkBefore mkAfter mkAliasDefinitions
|
||||
mkAliasAndWrapDefinitions fixMergeModules mkRemovedOptionModule
|
||||
mkRenamedOptionModule mkMergedOptionModule mkChangedOptionModule
|
||||
mkAliasOptionModule doRename filterModules;
|
||||
inherit (options) isOption mkEnableOption mkSinkUndeclaredOptions
|
||||
mergeDefaultOption mergeOneOption mergeEqualOption getValues
|
||||
getFiles optionAttrSetToDocList optionAttrSetToDocList'
|
||||
scrubOptionValue literalExample showOption showFiles
|
||||
unknownModule mkOption;
|
||||
inherit (types) isType setType defaultTypeMerge defaultFunctor
|
||||
isOptionType mkOptionType;
|
||||
inherit (asserts)
|
||||
assertMsg assertOneOf;
|
||||
inherit (debug) addErrorContextToAttrs traceIf traceVal traceValFn
|
||||
traceXMLVal traceXMLValMarked traceSeq traceSeqN traceValSeq
|
||||
traceValSeqFn traceValSeqN traceValSeqNFn traceShowVal
|
||||
traceShowValMarked showVal traceCall traceCall2 traceCall3
|
||||
traceValIfNot runTests testAllTrue traceCallXml attrNamesToStr;
|
||||
inherit (misc) maybeEnv defaultMergeArg defaultMerge foldArgs
|
||||
maybeAttrNullable maybeAttr ifEnable checkFlag getValue
|
||||
checkReqs uniqList uniqListExt condConcat lazyGenericClosure
|
||||
innerModifySumArgs modifySumArgs innerClosePropagation
|
||||
closePropagation mapAttrsFlatten nvs setAttr setAttrMerge
|
||||
mergeAttrsWithFunc mergeAttrsConcatenateValues
|
||||
mergeAttrsNoOverride mergeAttrByFunc mergeAttrsByFuncDefaults
|
||||
mergeAttrsByFuncDefaultsClean mergeAttrBy
|
||||
nixType imap;
|
||||
});
|
||||
in lib
|
101
ofborg/test-srcs/maintainers/lib/fixed-points.nix
Normal file
101
ofborg/test-srcs/maintainers/lib/fixed-points.nix
Normal file
|
@ -0,0 +1,101 @@
|
|||
{ ... }:
|
||||
rec {
|
||||
# Compute the fixed point of the given function `f`, which is usually an
|
||||
# attribute set that expects its final, non-recursive representation as an
|
||||
# argument:
|
||||
#
|
||||
# f = self: { foo = "foo"; bar = "bar"; foobar = self.foo + self.bar; }
|
||||
#
|
||||
# Nix evaluates this recursion until all references to `self` have been
|
||||
# resolved. At that point, the final result is returned and `f x = x` holds:
|
||||
#
|
||||
# nix-repl> fix f
|
||||
# { bar = "bar"; foo = "foo"; foobar = "foobar"; }
|
||||
#
|
||||
# Type: fix :: (a -> a) -> a
|
||||
#
|
||||
# See https://en.wikipedia.org/wiki/Fixed-point_combinator for further
|
||||
# details.
|
||||
fix = f: let x = f x; in x;
|
||||
|
||||
# A variant of `fix` that records the original recursive attribute set in the
|
||||
# result. This is useful in combination with the `extends` function to
|
||||
# implement deep overriding. See pkgs/development/haskell-modules/default.nix
|
||||
# for a concrete example.
|
||||
fix' = f: let x = f x // { __unfix__ = f; }; in x;
|
||||
|
||||
# Return the fixpoint that `f` converges to when called recursively, starting
|
||||
# with the input `x`.
|
||||
#
|
||||
# nix-repl> converge (x: x / 2) 16
|
||||
# 0
|
||||
converge = f: x:
|
||||
if (f x) == x
|
||||
then x
|
||||
else converge f (f x);
|
||||
|
||||
# Modify the contents of an explicitly recursive attribute set in a way that
|
||||
# honors `self`-references. This is accomplished with a function
|
||||
#
|
||||
# g = self: super: { foo = super.foo + " + "; }
|
||||
#
|
||||
# that has access to the unmodified input (`super`) as well as the final
|
||||
# non-recursive representation of the attribute set (`self`). `extends`
|
||||
# differs from the native `//` operator insofar as that it's applied *before*
|
||||
# references to `self` are resolved:
|
||||
#
|
||||
# nix-repl> fix (extends g f)
|
||||
# { bar = "bar"; foo = "foo + "; foobar = "foo + bar"; }
|
||||
#
|
||||
# The name of the function is inspired by object-oriented inheritance, i.e.
|
||||
# think of it as an infix operator `g extends f` that mimics the syntax from
|
||||
# Java. It may seem counter-intuitive to have the "base class" as the second
|
||||
# argument, but it's nice this way if several uses of `extends` are cascaded.
|
||||
#
|
||||
# To get a better understanding how `extends` turns a function with a fix
|
||||
# point (the package set we start with) into a new function with a different fix
|
||||
# point (the desired packages set) lets just see, how `extends g f`
|
||||
# unfolds with `g` and `f` defined above:
|
||||
#
|
||||
# extends g f = self: let super = f self; in super // g self super;
|
||||
# = self: let super = { foo = "foo"; bar = "bar"; foobar = self.foo + self.bar; }; in super // g self super
|
||||
# = self: { foo = "foo"; bar = "bar"; foobar = self.foo + self.bar; } // g self { foo = "foo"; bar = "bar"; foobar = self.foo + self.bar; }
|
||||
# = self: { foo = "foo"; bar = "bar"; foobar = self.foo + self.bar; } // { foo = "foo" + " + "; }
|
||||
# = self: { foo = "foo + "; bar = "bar"; foobar = self.foo + self.bar; }
|
||||
#
|
||||
extends = f: rattrs: self: let super = rattrs self; in super // f self super;
|
||||
|
||||
# Compose two extending functions of the type expected by 'extends'
|
||||
# into one where changes made in the first are available in the
|
||||
# 'super' of the second
|
||||
composeExtensions =
|
||||
f: g: self: super:
|
||||
let fApplied = f self super;
|
||||
super' = super // fApplied;
|
||||
in fApplied // g self super';
|
||||
|
||||
# Create an overridable, recursive attribute set. For example:
|
||||
#
|
||||
# nix-repl> obj = makeExtensible (self: { })
|
||||
#
|
||||
# nix-repl> obj
|
||||
# { __unfix__ = «lambda»; extend = «lambda»; }
|
||||
#
|
||||
# nix-repl> obj = obj.extend (self: super: { foo = "foo"; })
|
||||
#
|
||||
# nix-repl> obj
|
||||
# { __unfix__ = «lambda»; extend = «lambda»; foo = "foo"; }
|
||||
#
|
||||
# nix-repl> obj = obj.extend (self: super: { foo = super.foo + " + "; bar = "bar"; foobar = self.foo + self.bar; })
|
||||
#
|
||||
# nix-repl> obj
|
||||
# { __unfix__ = «lambda»; bar = "bar"; extend = «lambda»; foo = "foo + "; foobar = "foo + bar"; }
|
||||
makeExtensible = makeExtensibleWithCustomName "extend";
|
||||
|
||||
# Same as `makeExtensible` but the name of the extending attribute is
|
||||
# customized.
|
||||
makeExtensibleWithCustomName = extenderName: rattrs:
|
||||
fix' rattrs // {
|
||||
${extenderName} = f: makeExtensibleWithCustomName extenderName (extends f rattrs);
|
||||
};
|
||||
}
|
663
ofborg/test-srcs/maintainers/lib/lists.nix
Normal file
663
ofborg/test-srcs/maintainers/lib/lists.nix
Normal file
|
@ -0,0 +1,663 @@
|
|||
# General list operations.
|
||||
|
||||
{ lib }:
|
||||
with lib.trivial;
|
||||
let
|
||||
inherit (lib.strings) toInt;
|
||||
in
|
||||
rec {
|
||||
|
||||
inherit (builtins) head tail length isList elemAt concatLists filter elem genList;
|
||||
|
||||
/* Create a list consisting of a single element. `singleton x` is
|
||||
sometimes more convenient with respect to indentation than `[x]`
|
||||
when x spans multiple lines.
|
||||
|
||||
Type: singleton :: a -> [a]
|
||||
|
||||
Example:
|
||||
singleton "foo"
|
||||
=> [ "foo" ]
|
||||
*/
|
||||
singleton = x: [x];
|
||||
|
||||
/* “right fold” a binary function `op` between successive elements of
|
||||
`list` with `nul' as the starting value, i.e.,
|
||||
`foldr op nul [x_1 x_2 ... x_n] == op x_1 (op x_2 ... (op x_n nul))`.
|
||||
|
||||
Type: foldr :: (a -> b -> b) -> b -> [a] -> b
|
||||
|
||||
Example:
|
||||
concat = foldr (a: b: a + b) "z"
|
||||
concat [ "a" "b" "c" ]
|
||||
=> "abcz"
|
||||
# different types
|
||||
strange = foldr (int: str: toString (int + 1) + str) "a"
|
||||
strange [ 1 2 3 4 ]
|
||||
=> "2345a"
|
||||
*/
|
||||
foldr = op: nul: list:
|
||||
let
|
||||
len = length list;
|
||||
fold' = n:
|
||||
if n == len
|
||||
then nul
|
||||
else op (elemAt list n) (fold' (n + 1));
|
||||
in fold' 0;
|
||||
|
||||
/* `fold` is an alias of `foldr` for historic reasons */
|
||||
# FIXME(Profpatsch): deprecate?
|
||||
fold = foldr;
|
||||
|
||||
|
||||
/* “left fold”, like `foldr`, but from the left:
|
||||
`foldl op nul [x_1 x_2 ... x_n] == op (... (op (op nul x_1) x_2) ... x_n)`.
|
||||
|
||||
Type: foldl :: (b -> a -> b) -> b -> [a] -> b
|
||||
|
||||
Example:
|
||||
lconcat = foldl (a: b: a + b) "z"
|
||||
lconcat [ "a" "b" "c" ]
|
||||
=> "zabc"
|
||||
# different types
|
||||
lstrange = foldl (str: int: str + toString (int + 1)) ""
|
||||
strange [ 1 2 3 4 ]
|
||||
=> "a2345"
|
||||
*/
|
||||
foldl = op: nul: list:
|
||||
let
|
||||
foldl' = n:
|
||||
if n == -1
|
||||
then nul
|
||||
else op (foldl' (n - 1)) (elemAt list n);
|
||||
in foldl' (length list - 1);
|
||||
|
||||
/* Strict version of `foldl`.
|
||||
|
||||
The difference is that evaluation is forced upon access. Usually used
|
||||
with small whole results (in contract with lazily-generated list or large
|
||||
lists where only a part is consumed.)
|
||||
|
||||
Type: foldl' :: (b -> a -> b) -> b -> [a] -> b
|
||||
*/
|
||||
foldl' = builtins.foldl' or foldl;
|
||||
|
||||
/* Map with index starting from 0
|
||||
|
||||
Type: imap0 :: (int -> a -> b) -> [a] -> [b]
|
||||
|
||||
Example:
|
||||
imap0 (i: v: "${v}-${toString i}") ["a" "b"]
|
||||
=> [ "a-0" "b-1" ]
|
||||
*/
|
||||
imap0 = f: list: genList (n: f n (elemAt list n)) (length list);
|
||||
|
||||
/* Map with index starting from 1
|
||||
|
||||
Type: imap1 :: (int -> a -> b) -> [a] -> [b]
|
||||
|
||||
Example:
|
||||
imap1 (i: v: "${v}-${toString i}") ["a" "b"]
|
||||
=> [ "a-1" "b-2" ]
|
||||
*/
|
||||
imap1 = f: list: genList (n: f (n + 1) (elemAt list n)) (length list);
|
||||
|
||||
/* Map and concatenate the result.
|
||||
|
||||
Type: concatMap :: (a -> [b]) -> [a] -> [b]
|
||||
|
||||
Example:
|
||||
concatMap (x: [x] ++ ["z"]) ["a" "b"]
|
||||
=> [ "a" "z" "b" "z" ]
|
||||
*/
|
||||
concatMap = builtins.concatMap or (f: list: concatLists (map f list));
|
||||
|
||||
/* Flatten the argument into a single list; that is, nested lists are
|
||||
spliced into the top-level lists.
|
||||
|
||||
Example:
|
||||
flatten [1 [2 [3] 4] 5]
|
||||
=> [1 2 3 4 5]
|
||||
flatten 1
|
||||
=> [1]
|
||||
*/
|
||||
flatten = x:
|
||||
if isList x
|
||||
then concatMap (y: flatten y) x
|
||||
else [x];
|
||||
|
||||
/* Remove elements equal to 'e' from a list. Useful for buildInputs.
|
||||
|
||||
Type: remove :: a -> [a] -> [a]
|
||||
|
||||
Example:
|
||||
remove 3 [ 1 3 4 3 ]
|
||||
=> [ 1 4 ]
|
||||
*/
|
||||
remove =
|
||||
# Element to remove from the list
|
||||
e: filter (x: x != e);
|
||||
|
||||
/* Find the sole element in the list matching the specified
|
||||
predicate, returns `default` if no such element exists, or
|
||||
`multiple` if there are multiple matching elements.
|
||||
|
||||
Type: findSingle :: (a -> bool) -> a -> a -> [a] -> a
|
||||
|
||||
Example:
|
||||
findSingle (x: x == 3) "none" "multiple" [ 1 3 3 ]
|
||||
=> "multiple"
|
||||
findSingle (x: x == 3) "none" "multiple" [ 1 3 ]
|
||||
=> 3
|
||||
findSingle (x: x == 3) "none" "multiple" [ 1 9 ]
|
||||
=> "none"
|
||||
*/
|
||||
findSingle =
|
||||
# Predicate
|
||||
pred:
|
||||
# Default value to return if element was not found.
|
||||
default:
|
||||
# Default value to return if more than one element was found
|
||||
multiple:
|
||||
# Input list
|
||||
list:
|
||||
let found = filter pred list; len = length found;
|
||||
in if len == 0 then default
|
||||
else if len != 1 then multiple
|
||||
else head found;
|
||||
|
||||
/* Find the first element in the list matching the specified
|
||||
predicate or return `default` if no such element exists.
|
||||
|
||||
Type: findFirst :: (a -> bool) -> a -> [a] -> a
|
||||
|
||||
Example:
|
||||
findFirst (x: x > 3) 7 [ 1 6 4 ]
|
||||
=> 6
|
||||
findFirst (x: x > 9) 7 [ 1 6 4 ]
|
||||
=> 7
|
||||
*/
|
||||
findFirst =
|
||||
# Predicate
|
||||
pred:
|
||||
# Default value to return
|
||||
default:
|
||||
# Input list
|
||||
list:
|
||||
let found = filter pred list;
|
||||
in if found == [] then default else head found;
|
||||
|
||||
/* Return true if function `pred` returns true for at least one
|
||||
element of `list`.
|
||||
|
||||
Type: any :: (a -> bool) -> [a] -> bool
|
||||
|
||||
Example:
|
||||
any isString [ 1 "a" { } ]
|
||||
=> true
|
||||
any isString [ 1 { } ]
|
||||
=> false
|
||||
*/
|
||||
any = builtins.any or (pred: foldr (x: y: if pred x then true else y) false);
|
||||
|
||||
/* Return true if function `pred` returns true for all elements of
|
||||
`list`.
|
||||
|
||||
Type: all :: (a -> bool) -> [a] -> bool
|
||||
|
||||
Example:
|
||||
all (x: x < 3) [ 1 2 ]
|
||||
=> true
|
||||
all (x: x < 3) [ 1 2 3 ]
|
||||
=> false
|
||||
*/
|
||||
all = builtins.all or (pred: foldr (x: y: if pred x then y else false) true);
|
||||
|
||||
/* Count how many elements of `list` match the supplied predicate
|
||||
function.
|
||||
|
||||
Type: count :: (a -> bool) -> [a] -> int
|
||||
|
||||
Example:
|
||||
count (x: x == 3) [ 3 2 3 4 6 ]
|
||||
=> 2
|
||||
*/
|
||||
count =
|
||||
# Predicate
|
||||
pred: foldl' (c: x: if pred x then c + 1 else c) 0;
|
||||
|
||||
/* Return a singleton list or an empty list, depending on a boolean
|
||||
value. Useful when building lists with optional elements
|
||||
(e.g. `++ optional (system == "i686-linux") flashplayer').
|
||||
|
||||
Type: optional :: bool -> a -> [a]
|
||||
|
||||
Example:
|
||||
optional true "foo"
|
||||
=> [ "foo" ]
|
||||
optional false "foo"
|
||||
=> [ ]
|
||||
*/
|
||||
optional = cond: elem: if cond then [elem] else [];
|
||||
|
||||
/* Return a list or an empty list, depending on a boolean value.
|
||||
|
||||
Type: optionals :: bool -> [a] -> [a]
|
||||
|
||||
Example:
|
||||
optionals true [ 2 3 ]
|
||||
=> [ 2 3 ]
|
||||
optionals false [ 2 3 ]
|
||||
=> [ ]
|
||||
*/
|
||||
optionals =
|
||||
# Condition
|
||||
cond:
|
||||
# List to return if condition is true
|
||||
elems: if cond then elems else [];
|
||||
|
||||
|
||||
/* If argument is a list, return it; else, wrap it in a singleton
|
||||
list. If you're using this, you should almost certainly
|
||||
reconsider if there isn't a more "well-typed" approach.
|
||||
|
||||
Example:
|
||||
toList [ 1 2 ]
|
||||
=> [ 1 2 ]
|
||||
toList "hi"
|
||||
=> [ "hi "]
|
||||
*/
|
||||
toList = x: if isList x then x else [x];
|
||||
|
||||
/* Return a list of integers from `first' up to and including `last'.
|
||||
|
||||
Type: range :: int -> int -> [int]
|
||||
|
||||
Example:
|
||||
range 2 4
|
||||
=> [ 2 3 4 ]
|
||||
range 3 2
|
||||
=> [ ]
|
||||
*/
|
||||
range =
|
||||
# First integer in the range
|
||||
first:
|
||||
# Last integer in the range
|
||||
last:
|
||||
if first > last then
|
||||
[]
|
||||
else
|
||||
genList (n: first + n) (last - first + 1);
|
||||
|
||||
/* Splits the elements of a list in two lists, `right` and
|
||||
`wrong`, depending on the evaluation of a predicate.
|
||||
|
||||
Type: (a -> bool) -> [a] -> { right :: [a], wrong :: [a] }
|
||||
|
||||
Example:
|
||||
partition (x: x > 2) [ 5 1 2 3 4 ]
|
||||
=> { right = [ 5 3 4 ]; wrong = [ 1 2 ]; }
|
||||
*/
|
||||
partition = builtins.partition or (pred:
|
||||
foldr (h: t:
|
||||
if pred h
|
||||
then { right = [h] ++ t.right; wrong = t.wrong; }
|
||||
else { right = t.right; wrong = [h] ++ t.wrong; }
|
||||
) { right = []; wrong = []; });
|
||||
|
||||
/* Splits the elements of a list into many lists, using the return value of a predicate.
|
||||
Predicate should return a string which becomes keys of attrset `groupBy' returns.
|
||||
|
||||
`groupBy'` allows to customise the combining function and initial value
|
||||
|
||||
Example:
|
||||
groupBy (x: boolToString (x > 2)) [ 5 1 2 3 4 ]
|
||||
=> { true = [ 5 3 4 ]; false = [ 1 2 ]; }
|
||||
groupBy (x: x.name) [ {name = "icewm"; script = "icewm &";}
|
||||
{name = "xfce"; script = "xfce4-session &";}
|
||||
{name = "icewm"; script = "icewmbg &";}
|
||||
{name = "mate"; script = "gnome-session &";}
|
||||
]
|
||||
=> { icewm = [ { name = "icewm"; script = "icewm &"; }
|
||||
{ name = "icewm"; script = "icewmbg &"; } ];
|
||||
mate = [ { name = "mate"; script = "gnome-session &"; } ];
|
||||
xfce = [ { name = "xfce"; script = "xfce4-session &"; } ];
|
||||
}
|
||||
|
||||
groupBy' builtins.add 0 (x: boolToString (x > 2)) [ 5 1 2 3 4 ]
|
||||
=> { true = 12; false = 3; }
|
||||
*/
|
||||
groupBy' = op: nul: pred: lst:
|
||||
foldl' (r: e:
|
||||
let
|
||||
key = pred e;
|
||||
in
|
||||
r // { ${key} = op (r.${key} or nul) e; }
|
||||
) {} lst;
|
||||
|
||||
groupBy = groupBy' (sum: e: sum ++ [e]) [];
|
||||
|
||||
/* Merges two lists of the same size together. If the sizes aren't the same
|
||||
the merging stops at the shortest. How both lists are merged is defined
|
||||
by the first argument.
|
||||
|
||||
Type: zipListsWith :: (a -> b -> c) -> [a] -> [b] -> [c]
|
||||
|
||||
Example:
|
||||
zipListsWith (a: b: a + b) ["h" "l"] ["e" "o"]
|
||||
=> ["he" "lo"]
|
||||
*/
|
||||
zipListsWith =
|
||||
# Function to zip elements of both lists
|
||||
f:
|
||||
# First list
|
||||
fst:
|
||||
# Second list
|
||||
snd:
|
||||
genList
|
||||
(n: f (elemAt fst n) (elemAt snd n)) (min (length fst) (length snd));
|
||||
|
||||
/* Merges two lists of the same size together. If the sizes aren't the same
|
||||
the merging stops at the shortest.
|
||||
|
||||
Type: zipLists :: [a] -> [b] -> [{ fst :: a, snd :: b}]
|
||||
|
||||
Example:
|
||||
zipLists [ 1 2 ] [ "a" "b" ]
|
||||
=> [ { fst = 1; snd = "a"; } { fst = 2; snd = "b"; } ]
|
||||
*/
|
||||
zipLists = zipListsWith (fst: snd: { inherit fst snd; });
|
||||
|
||||
/* Reverse the order of the elements of a list.
|
||||
|
||||
Type: reverseList :: [a] -> [a]
|
||||
|
||||
Example:
|
||||
|
||||
reverseList [ "b" "o" "j" ]
|
||||
=> [ "j" "o" "b" ]
|
||||
*/
|
||||
reverseList = xs:
|
||||
let l = length xs; in genList (n: elemAt xs (l - n - 1)) l;
|
||||
|
||||
/* Depth-First Search (DFS) for lists `list != []`.
|
||||
|
||||
`before a b == true` means that `b` depends on `a` (there's an
|
||||
edge from `b` to `a`).
|
||||
|
||||
Example:
|
||||
listDfs true hasPrefix [ "/home/user" "other" "/" "/home" ]
|
||||
== { minimal = "/"; # minimal element
|
||||
visited = [ "/home/user" ]; # seen elements (in reverse order)
|
||||
rest = [ "/home" "other" ]; # everything else
|
||||
}
|
||||
|
||||
listDfs true hasPrefix [ "/home/user" "other" "/" "/home" "/" ]
|
||||
== { cycle = "/"; # cycle encountered at this element
|
||||
loops = [ "/" ]; # and continues to these elements
|
||||
visited = [ "/" "/home/user" ]; # elements leading to the cycle (in reverse order)
|
||||
rest = [ "/home" "other" ]; # everything else
|
||||
|
||||
*/
|
||||
listDfs = stopOnCycles: before: list:
|
||||
let
|
||||
dfs' = us: visited: rest:
|
||||
let
|
||||
c = filter (x: before x us) visited;
|
||||
b = partition (x: before x us) rest;
|
||||
in if stopOnCycles && (length c > 0)
|
||||
then { cycle = us; loops = c; inherit visited rest; }
|
||||
else if length b.right == 0
|
||||
then # nothing is before us
|
||||
{ minimal = us; inherit visited rest; }
|
||||
else # grab the first one before us and continue
|
||||
dfs' (head b.right)
|
||||
([ us ] ++ visited)
|
||||
(tail b.right ++ b.wrong);
|
||||
in dfs' (head list) [] (tail list);
|
||||
|
||||
/* Sort a list based on a partial ordering using DFS. This
|
||||
implementation is O(N^2), if your ordering is linear, use `sort`
|
||||
instead.
|
||||
|
||||
`before a b == true` means that `b` should be after `a`
|
||||
in the result.
|
||||
|
||||
Example:
|
||||
|
||||
toposort hasPrefix [ "/home/user" "other" "/" "/home" ]
|
||||
== { result = [ "/" "/home" "/home/user" "other" ]; }
|
||||
|
||||
toposort hasPrefix [ "/home/user" "other" "/" "/home" "/" ]
|
||||
== { cycle = [ "/home/user" "/" "/" ]; # path leading to a cycle
|
||||
loops = [ "/" ]; } # loops back to these elements
|
||||
|
||||
toposort hasPrefix [ "other" "/home/user" "/home" "/" ]
|
||||
== { result = [ "other" "/" "/home" "/home/user" ]; }
|
||||
|
||||
toposort (a: b: a < b) [ 3 2 1 ] == { result = [ 1 2 3 ]; }
|
||||
|
||||
*/
|
||||
toposort = before: list:
|
||||
let
|
||||
dfsthis = listDfs true before list;
|
||||
toporest = toposort before (dfsthis.visited ++ dfsthis.rest);
|
||||
in
|
||||
if length list < 2
|
||||
then # finish
|
||||
{ result = list; }
|
||||
else if dfsthis ? "cycle"
|
||||
then # there's a cycle, starting from the current vertex, return it
|
||||
{ cycle = reverseList ([ dfsthis.cycle ] ++ dfsthis.visited);
|
||||
inherit (dfsthis) loops; }
|
||||
else if toporest ? "cycle"
|
||||
then # there's a cycle somewhere else in the graph, return it
|
||||
toporest
|
||||
# Slow, but short. Can be made a bit faster with an explicit stack.
|
||||
else # there are no cycles
|
||||
{ result = [ dfsthis.minimal ] ++ toporest.result; };
|
||||
|
||||
/* Sort a list based on a comparator function which compares two
|
||||
elements and returns true if the first argument is strictly below
|
||||
the second argument. The returned list is sorted in an increasing
|
||||
order. The implementation does a quick-sort.
|
||||
|
||||
Example:
|
||||
sort (a: b: a < b) [ 5 3 7 ]
|
||||
=> [ 3 5 7 ]
|
||||
*/
|
||||
sort = builtins.sort or (
|
||||
strictLess: list:
|
||||
let
|
||||
len = length list;
|
||||
first = head list;
|
||||
pivot' = n: acc@{ left, right }: let el = elemAt list n; next = pivot' (n + 1); in
|
||||
if n == len
|
||||
then acc
|
||||
else if strictLess first el
|
||||
then next { inherit left; right = [ el ] ++ right; }
|
||||
else
|
||||
next { left = [ el ] ++ left; inherit right; };
|
||||
pivot = pivot' 1 { left = []; right = []; };
|
||||
in
|
||||
if len < 2 then list
|
||||
else (sort strictLess pivot.left) ++ [ first ] ++ (sort strictLess pivot.right));
|
||||
|
||||
/* Compare two lists element-by-element.
|
||||
|
||||
Example:
|
||||
compareLists compare [] []
|
||||
=> 0
|
||||
compareLists compare [] [ "a" ]
|
||||
=> -1
|
||||
compareLists compare [ "a" ] []
|
||||
=> 1
|
||||
compareLists compare [ "a" "b" ] [ "a" "c" ]
|
||||
=> 1
|
||||
*/
|
||||
compareLists = cmp: a: b:
|
||||
if a == []
|
||||
then if b == []
|
||||
then 0
|
||||
else -1
|
||||
else if b == []
|
||||
then 1
|
||||
else let rel = cmp (head a) (head b); in
|
||||
if rel == 0
|
||||
then compareLists cmp (tail a) (tail b)
|
||||
else rel;
|
||||
|
||||
/* Sort list using "Natural sorting".
|
||||
Numeric portions of strings are sorted in numeric order.
|
||||
|
||||
Example:
|
||||
naturalSort ["disk11" "disk8" "disk100" "disk9"]
|
||||
=> ["disk8" "disk9" "disk11" "disk100"]
|
||||
naturalSort ["10.46.133.149" "10.5.16.62" "10.54.16.25"]
|
||||
=> ["10.5.16.62" "10.46.133.149" "10.54.16.25"]
|
||||
naturalSort ["v0.2" "v0.15" "v0.0.9"]
|
||||
=> [ "v0.0.9" "v0.2" "v0.15" ]
|
||||
*/
|
||||
naturalSort = lst:
|
||||
let
|
||||
vectorise = s: map (x: if isList x then toInt (head x) else x) (builtins.split "(0|[1-9][0-9]*)" s);
|
||||
prepared = map (x: [ (vectorise x) x ]) lst; # remember vectorised version for O(n) regex splits
|
||||
less = a: b: (compareLists compare (head a) (head b)) < 0;
|
||||
in
|
||||
map (x: elemAt x 1) (sort less prepared);
|
||||
|
||||
/* Return the first (at most) N elements of a list.
|
||||
|
||||
Type: take :: int -> [a] -> [a]
|
||||
|
||||
Example:
|
||||
take 2 [ "a" "b" "c" "d" ]
|
||||
=> [ "a" "b" ]
|
||||
take 2 [ ]
|
||||
=> [ ]
|
||||
*/
|
||||
take =
|
||||
# Number of elements to take
|
||||
count: sublist 0 count;
|
||||
|
||||
/* Remove the first (at most) N elements of a list.
|
||||
|
||||
Type: drop :: int -> [a] -> [a]
|
||||
|
||||
Example:
|
||||
drop 2 [ "a" "b" "c" "d" ]
|
||||
=> [ "c" "d" ]
|
||||
drop 2 [ ]
|
||||
=> [ ]
|
||||
*/
|
||||
drop =
|
||||
# Number of elements to drop
|
||||
count:
|
||||
# Input list
|
||||
list: sublist count (length list) list;
|
||||
|
||||
/* Return a list consisting of at most `count` elements of `list`,
|
||||
starting at index `start`.
|
||||
|
||||
Type: sublist :: int -> int -> [a] -> [a]
|
||||
|
||||
Example:
|
||||
sublist 1 3 [ "a" "b" "c" "d" "e" ]
|
||||
=> [ "b" "c" "d" ]
|
||||
sublist 1 3 [ ]
|
||||
=> [ ]
|
||||
*/
|
||||
sublist =
|
||||
# Index at which to start the sublist
|
||||
start:
|
||||
# Number of elements to take
|
||||
count:
|
||||
# Input list
|
||||
list:
|
||||
let len = length list; in
|
||||
genList
|
||||
(n: elemAt list (n + start))
|
||||
(if start >= len then 0
|
||||
else if start + count > len then len - start
|
||||
else count);
|
||||
|
||||
/* Return the last element of a list.
|
||||
|
||||
This function throws an error if the list is empty.
|
||||
|
||||
Type: last :: [a] -> a
|
||||
|
||||
Example:
|
||||
last [ 1 2 3 ]
|
||||
=> 3
|
||||
*/
|
||||
last = list:
|
||||
assert lib.assertMsg (list != []) "lists.last: list must not be empty!";
|
||||
elemAt list (length list - 1);
|
||||
|
||||
/* Return all elements but the last.
|
||||
|
||||
This function throws an error if the list is empty.
|
||||
|
||||
Type: init :: [a] -> [a]
|
||||
|
||||
Example:
|
||||
init [ 1 2 3 ]
|
||||
=> [ 1 2 ]
|
||||
*/
|
||||
init = list:
|
||||
assert lib.assertMsg (list != []) "lists.init: list must not be empty!";
|
||||
take (length list - 1) list;
|
||||
|
||||
|
||||
/* Return the image of the cross product of some lists by a function.
|
||||
|
||||
Example:
|
||||
crossLists (x:y: "${toString x}${toString y}") [[1 2] [3 4]]
|
||||
=> [ "13" "14" "23" "24" ]
|
||||
*/
|
||||
crossLists = f: foldl (fs: args: concatMap (f: map f args) fs) [f];
|
||||
|
||||
|
||||
/* Remove duplicate elements from the list. O(n^2) complexity.
|
||||
|
||||
Type: unique :: [a] -> [a]
|
||||
|
||||
Example:
|
||||
unique [ 3 2 3 4 ]
|
||||
=> [ 3 2 4 ]
|
||||
*/
|
||||
unique = list:
|
||||
if list == [] then
|
||||
[]
|
||||
else
|
||||
let
|
||||
x = head list;
|
||||
xs = unique (drop 1 list);
|
||||
in [x] ++ remove x xs;
|
||||
|
||||
/* Intersects list 'e' and another list. O(nm) complexity.
|
||||
|
||||
Example:
|
||||
intersectLists [ 1 2 3 ] [ 6 3 2 ]
|
||||
=> [ 3 2 ]
|
||||
*/
|
||||
intersectLists = e: filter (x: elem x e);
|
||||
|
||||
/* Subtracts list 'e' from another list. O(nm) complexity.
|
||||
|
||||
Example:
|
||||
subtractLists [ 3 2 ] [ 1 2 3 4 5 3 ]
|
||||
=> [ 1 4 5 ]
|
||||
*/
|
||||
subtractLists = e: filter (x: !(elem x e));
|
||||
|
||||
/* Test if two lists have no common element.
|
||||
It should be slightly more efficient than (intersectLists a b == [])
|
||||
*/
|
||||
mutuallyExclusive = a: b:
|
||||
(builtins.length a) == 0 ||
|
||||
(!(builtins.elem (builtins.head a) b) &&
|
||||
mutuallyExclusive (builtins.tail a) b);
|
||||
|
||||
}
|
684
ofborg/test-srcs/maintainers/lib/strings.nix
Normal file
684
ofborg/test-srcs/maintainers/lib/strings.nix
Normal file
|
@ -0,0 +1,684 @@
|
|||
/* String manipulation functions. */
|
||||
{ lib }:
|
||||
let
|
||||
|
||||
inherit (builtins) length;
|
||||
|
||||
in
|
||||
|
||||
rec {
|
||||
|
||||
inherit (builtins) stringLength substring head tail isString replaceStrings;
|
||||
|
||||
/* Concatenate a list of strings.
|
||||
|
||||
Type: concatStrings :: [string] -> string
|
||||
|
||||
Example:
|
||||
concatStrings ["foo" "bar"]
|
||||
=> "foobar"
|
||||
*/
|
||||
concatStrings = builtins.concatStringsSep "";
|
||||
|
||||
/* Map a function over a list and concatenate the resulting strings.
|
||||
|
||||
Type: concatMapStrings :: (a -> string) -> [a] -> string
|
||||
|
||||
Example:
|
||||
concatMapStrings (x: "a" + x) ["foo" "bar"]
|
||||
=> "afooabar"
|
||||
*/
|
||||
concatMapStrings = f: list: concatStrings (map f list);
|
||||
|
||||
/* Like `concatMapStrings` except that the f functions also gets the
|
||||
position as a parameter.
|
||||
|
||||
Type: concatImapStrings :: (int -> a -> string) -> [a] -> string
|
||||
|
||||
Example:
|
||||
concatImapStrings (pos: x: "${toString pos}-${x}") ["foo" "bar"]
|
||||
=> "1-foo2-bar"
|
||||
*/
|
||||
concatImapStrings = f: list: concatStrings (lib.imap1 f list);
|
||||
|
||||
/* Place an element between each element of a list
|
||||
|
||||
Type: intersperse :: a -> [a] -> [a]
|
||||
|
||||
Example:
|
||||
intersperse "/" ["usr" "local" "bin"]
|
||||
=> ["usr" "/" "local" "/" "bin"].
|
||||
*/
|
||||
intersperse =
|
||||
# Separator to add between elements
|
||||
separator:
|
||||
# Input list
|
||||
list:
|
||||
if list == [] || length list == 1
|
||||
then list
|
||||
else tail (lib.concatMap (x: [separator x]) list);
|
||||
|
||||
/* Concatenate a list of strings with a separator between each element
|
||||
|
||||
Type: concatStringsSep :: string -> [string] -> string
|
||||
|
||||
Example:
|
||||
concatStringsSep "/" ["usr" "local" "bin"]
|
||||
=> "usr/local/bin"
|
||||
*/
|
||||
concatStringsSep = builtins.concatStringsSep or (separator: list:
|
||||
concatStrings (intersperse separator list));
|
||||
|
||||
/* Maps a function over a list of strings and then concatenates the
|
||||
result with the specified separator interspersed between
|
||||
elements.
|
||||
|
||||
Type: concatMapStringsSep :: string -> (string -> string) -> [string] -> string
|
||||
|
||||
Example:
|
||||
concatMapStringsSep "-" (x: toUpper x) ["foo" "bar" "baz"]
|
||||
=> "FOO-BAR-BAZ"
|
||||
*/
|
||||
concatMapStringsSep =
|
||||
# Separator to add between elements
|
||||
sep:
|
||||
# Function to map over the list
|
||||
f:
|
||||
# List of input strings
|
||||
list: concatStringsSep sep (map f list);
|
||||
|
||||
/* Same as `concatMapStringsSep`, but the mapping function
|
||||
additionally receives the position of its argument.
|
||||
|
||||
Type: concatMapStringsSep :: string -> (int -> string -> string) -> [string] -> string
|
||||
|
||||
Example:
|
||||
concatImapStringsSep "-" (pos: x: toString (x / pos)) [ 6 6 6 ]
|
||||
=> "6-3-2"
|
||||
*/
|
||||
concatImapStringsSep =
|
||||
# Separator to add between elements
|
||||
sep:
|
||||
# Function that receives elements and their positions
|
||||
f:
|
||||
# List of input strings
|
||||
list: concatStringsSep sep (lib.imap1 f list);
|
||||
|
||||
/* Construct a Unix-style, colon-separated search path consisting of
|
||||
the given `subDir` appended to each of the given paths.
|
||||
|
||||
Type: makeSearchPath :: string -> [string] -> string
|
||||
|
||||
Example:
|
||||
makeSearchPath "bin" ["/root" "/usr" "/usr/local"]
|
||||
=> "/root/bin:/usr/bin:/usr/local/bin"
|
||||
makeSearchPath "bin" [""]
|
||||
=> "/bin"
|
||||
*/
|
||||
makeSearchPath =
|
||||
# Directory name to append
|
||||
subDir:
|
||||
# List of base paths
|
||||
paths:
|
||||
concatStringsSep ":" (map (path: path + "/" + subDir) (builtins.filter (x: x != null) paths));
|
||||
|
||||
/* Construct a Unix-style search path by appending the given
|
||||
`subDir` to the specified `output` of each of the packages. If no
|
||||
output by the given name is found, fallback to `.out` and then to
|
||||
the default.
|
||||
|
||||
Type: string -> string -> [package] -> string
|
||||
|
||||
Example:
|
||||
makeSearchPathOutput "dev" "bin" [ pkgs.openssl pkgs.zlib ]
|
||||
=> "/nix/store/9rz8gxhzf8sw4kf2j2f1grr49w8zx5vj-openssl-1.0.1r-dev/bin:/nix/store/wwh7mhwh269sfjkm6k5665b5kgp7jrk2-zlib-1.2.8/bin"
|
||||
*/
|
||||
makeSearchPathOutput =
|
||||
# Package output to use
|
||||
output:
|
||||
# Directory name to append
|
||||
subDir:
|
||||
# List of packages
|
||||
pkgs: makeSearchPath subDir (map (lib.getOutput output) pkgs);
|
||||
|
||||
/* Construct a library search path (such as RPATH) containing the
|
||||
libraries for a set of packages
|
||||
|
||||
Example:
|
||||
makeLibraryPath [ "/usr" "/usr/local" ]
|
||||
=> "/usr/lib:/usr/local/lib"
|
||||
pkgs = import <nixpkgs> { }
|
||||
makeLibraryPath [ pkgs.openssl pkgs.zlib ]
|
||||
=> "/nix/store/9rz8gxhzf8sw4kf2j2f1grr49w8zx5vj-openssl-1.0.1r/lib:/nix/store/wwh7mhwh269sfjkm6k5665b5kgp7jrk2-zlib-1.2.8/lib"
|
||||
*/
|
||||
makeLibraryPath = makeSearchPathOutput "lib" "lib";
|
||||
|
||||
/* Construct a binary search path (such as $PATH) containing the
|
||||
binaries for a set of packages.
|
||||
|
||||
Example:
|
||||
makeBinPath ["/root" "/usr" "/usr/local"]
|
||||
=> "/root/bin:/usr/bin:/usr/local/bin"
|
||||
*/
|
||||
makeBinPath = makeSearchPathOutput "bin" "bin";
|
||||
|
||||
|
||||
/* Construct a perl search path (such as $PERL5LIB)
|
||||
|
||||
Example:
|
||||
pkgs = import <nixpkgs> { }
|
||||
makePerlPath [ pkgs.perlPackages.libnet ]
|
||||
=> "/nix/store/n0m1fk9c960d8wlrs62sncnadygqqc6y-perl-Net-SMTP-1.25/lib/perl5/site_perl"
|
||||
*/
|
||||
# FIXME(zimbatm): this should be moved in perl-specific code
|
||||
makePerlPath = makeSearchPathOutput "lib" "lib/perl5/site_perl";
|
||||
|
||||
/* Construct a perl search path recursively including all dependencies (such as $PERL5LIB)
|
||||
|
||||
Example:
|
||||
pkgs = import <nixpkgs> { }
|
||||
makeFullPerlPath [ pkgs.perlPackages.CGI ]
|
||||
=> "/nix/store/fddivfrdc1xql02h9q500fpnqy12c74n-perl-CGI-4.38/lib/perl5/site_perl:/nix/store/8hsvdalmsxqkjg0c5ifigpf31vc4vsy2-perl-HTML-Parser-3.72/lib/perl5/site_perl:/nix/store/zhc7wh0xl8hz3y3f71nhlw1559iyvzld-perl-HTML-Tagset-3.20/lib/perl5/site_perl"
|
||||
*/
|
||||
makeFullPerlPath = deps: makePerlPath (lib.misc.closePropagation deps);
|
||||
|
||||
/* Depending on the boolean `cond', return either the given string
|
||||
or the empty string. Useful to concatenate against a bigger string.
|
||||
|
||||
Type: optionalString :: bool -> string -> string
|
||||
|
||||
Example:
|
||||
optionalString true "some-string"
|
||||
=> "some-string"
|
||||
optionalString false "some-string"
|
||||
=> ""
|
||||
*/
|
||||
optionalString =
|
||||
# Condition
|
||||
cond:
|
||||
# String to return if condition is true
|
||||
string: if cond then string else "";
|
||||
|
||||
/* Determine whether a string has given prefix.
|
||||
|
||||
Type: hasPrefix :: string -> string -> bool
|
||||
|
||||
Example:
|
||||
hasPrefix "foo" "foobar"
|
||||
=> true
|
||||
hasPrefix "foo" "barfoo"
|
||||
=> false
|
||||
*/
|
||||
hasPrefix =
|
||||
# Prefix to check for
|
||||
pref:
|
||||
# Input string
|
||||
str: substring 0 (stringLength pref) str == pref;
|
||||
|
||||
/* Determine whether a string has given suffix.
|
||||
|
||||
Type: hasSuffix :: string -> string -> bool
|
||||
|
||||
Example:
|
||||
hasSuffix "foo" "foobar"
|
||||
=> false
|
||||
hasSuffix "foo" "barfoo"
|
||||
=> true
|
||||
*/
|
||||
hasSuffix =
|
||||
# Suffix to check for
|
||||
suffix:
|
||||
# Input string
|
||||
content:
|
||||
let
|
||||
lenContent = stringLength content;
|
||||
lenSuffix = stringLength suffix;
|
||||
in lenContent >= lenSuffix &&
|
||||
substring (lenContent - lenSuffix) lenContent content == suffix;
|
||||
|
||||
/* Determine whether a string contains the given infix
|
||||
|
||||
Type: hasInfix :: string -> string -> bool
|
||||
|
||||
Example:
|
||||
hasInfix "bc" "abcd"
|
||||
=> true
|
||||
hasInfix "ab" "abcd"
|
||||
=> true
|
||||
hasInfix "cd" "abcd"
|
||||
=> true
|
||||
hasInfix "foo" "abcd"
|
||||
=> false
|
||||
*/
|
||||
hasInfix = infix: content:
|
||||
let
|
||||
drop = x: substring 1 (stringLength x) x;
|
||||
in hasPrefix infix content
|
||||
|| content != "" && hasInfix infix (drop content);
|
||||
|
||||
/* Convert a string to a list of characters (i.e. singleton strings).
|
||||
This allows you to, e.g., map a function over each character. However,
|
||||
note that this will likely be horribly inefficient; Nix is not a
|
||||
general purpose programming language. Complex string manipulations
|
||||
should, if appropriate, be done in a derivation.
|
||||
Also note that Nix treats strings as a list of bytes and thus doesn't
|
||||
handle unicode.
|
||||
|
||||
Type: stringtoCharacters :: string -> [string]
|
||||
|
||||
Example:
|
||||
stringToCharacters ""
|
||||
=> [ ]
|
||||
stringToCharacters "abc"
|
||||
=> [ "a" "b" "c" ]
|
||||
stringToCharacters "💩"
|
||||
=> [ "<EFBFBD>" "<EFBFBD>" "<EFBFBD>" "<EFBFBD>" ]
|
||||
*/
|
||||
stringToCharacters = s:
|
||||
map (p: substring p 1 s) (lib.range 0 (stringLength s - 1));
|
||||
|
||||
/* Manipulate a string character by character and replace them by
|
||||
strings before concatenating the results.
|
||||
|
||||
Type: stringAsChars :: (string -> string) -> string -> string
|
||||
|
||||
Example:
|
||||
stringAsChars (x: if x == "a" then "i" else x) "nax"
|
||||
=> "nix"
|
||||
*/
|
||||
stringAsChars =
|
||||
# Function to map over each individual character
|
||||
f:
|
||||
# Input string
|
||||
s: concatStrings (
|
||||
map f (stringToCharacters s)
|
||||
);
|
||||
|
||||
/* Escape occurrence of the elements of `list` in `string` by
|
||||
prefixing it with a backslash.
|
||||
|
||||
Type: escape :: [string] -> string -> string
|
||||
|
||||
Example:
|
||||
escape ["(" ")"] "(foo)"
|
||||
=> "\\(foo\\)"
|
||||
*/
|
||||
escape = list: replaceChars list (map (c: "\\${c}") list);
|
||||
|
||||
/* Quote string to be used safely within the Bourne shell.
|
||||
|
||||
Type: escapeShellArg :: string -> string
|
||||
|
||||
Example:
|
||||
escapeShellArg "esc'ape\nme"
|
||||
=> "'esc'\\''ape\nme'"
|
||||
*/
|
||||
escapeShellArg = arg: "'${replaceStrings ["'"] ["'\\''"] (toString arg)}'";
|
||||
|
||||
/* Quote all arguments to be safely passed to the Bourne shell.
|
||||
|
||||
Type: escapeShellArgs :: [string] -> string
|
||||
|
||||
Example:
|
||||
escapeShellArgs ["one" "two three" "four'five"]
|
||||
=> "'one' 'two three' 'four'\\''five'"
|
||||
*/
|
||||
escapeShellArgs = concatMapStringsSep " " escapeShellArg;
|
||||
|
||||
/* Turn a string into a Nix expression representing that string
|
||||
|
||||
Type: string -> string
|
||||
|
||||
Example:
|
||||
escapeNixString "hello\${}\n"
|
||||
=> "\"hello\\\${}\\n\""
|
||||
*/
|
||||
escapeNixString = s: escape ["$"] (builtins.toJSON s);
|
||||
|
||||
# Obsolete - use replaceStrings instead.
|
||||
replaceChars = builtins.replaceStrings or (
|
||||
del: new: s:
|
||||
let
|
||||
substList = lib.zipLists del new;
|
||||
subst = c:
|
||||
let found = lib.findFirst (sub: sub.fst == c) null substList; in
|
||||
if found == null then
|
||||
c
|
||||
else
|
||||
found.snd;
|
||||
in
|
||||
stringAsChars subst s);
|
||||
|
||||
# Case conversion utilities.
|
||||
lowerChars = stringToCharacters "abcdefghijklmnopqrstuvwxyz";
|
||||
upperChars = stringToCharacters "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
|
||||
|
||||
/* Converts an ASCII string to lower-case.
|
||||
|
||||
Type: toLower :: string -> string
|
||||
|
||||
Example:
|
||||
toLower "HOME"
|
||||
=> "home"
|
||||
*/
|
||||
toLower = replaceChars upperChars lowerChars;
|
||||
|
||||
/* Converts an ASCII string to upper-case.
|
||||
|
||||
Type: toUpper :: string -> string
|
||||
|
||||
Example:
|
||||
toUpper "home"
|
||||
=> "HOME"
|
||||
*/
|
||||
toUpper = replaceChars lowerChars upperChars;
|
||||
|
||||
/* Appends string context from another string. This is an implementation
|
||||
detail of Nix.
|
||||
|
||||
Strings in Nix carry an invisible `context` which is a list of strings
|
||||
representing store paths. If the string is later used in a derivation
|
||||
attribute, the derivation will properly populate the inputDrvs and
|
||||
inputSrcs.
|
||||
|
||||
Example:
|
||||
pkgs = import <nixpkgs> { };
|
||||
addContextFrom pkgs.coreutils "bar"
|
||||
=> "bar"
|
||||
*/
|
||||
addContextFrom = a: b: substring 0 0 a + b;
|
||||
|
||||
/* Cut a string with a separator and produces a list of strings which
|
||||
were separated by this separator.
|
||||
|
||||
NOTE: this function is not performant and should never be used.
|
||||
|
||||
Example:
|
||||
splitString "." "foo.bar.baz"
|
||||
=> [ "foo" "bar" "baz" ]
|
||||
splitString "/" "/usr/local/bin"
|
||||
=> [ "" "usr" "local" "bin" ]
|
||||
*/
|
||||
splitString = _sep: _s:
|
||||
let
|
||||
sep = addContextFrom _s _sep;
|
||||
s = addContextFrom _sep _s;
|
||||
sepLen = stringLength sep;
|
||||
sLen = stringLength s;
|
||||
lastSearch = sLen - sepLen;
|
||||
startWithSep = startAt:
|
||||
substring startAt sepLen s == sep;
|
||||
|
||||
recurse = index: startAt:
|
||||
let cutUntil = i: [(substring startAt (i - startAt) s)]; in
|
||||
if index <= lastSearch then
|
||||
if startWithSep index then
|
||||
let restartAt = index + sepLen; in
|
||||
cutUntil index ++ recurse restartAt restartAt
|
||||
else
|
||||
recurse (index + 1) startAt
|
||||
else
|
||||
cutUntil sLen;
|
||||
in
|
||||
recurse 0 0;
|
||||
|
||||
/* Return a string without the specified prefix, if the prefix matches.
|
||||
|
||||
Type: string -> string -> string
|
||||
|
||||
Example:
|
||||
removePrefix "foo." "foo.bar.baz"
|
||||
=> "bar.baz"
|
||||
removePrefix "xxx" "foo.bar.baz"
|
||||
=> "foo.bar.baz"
|
||||
*/
|
||||
removePrefix =
|
||||
# Prefix to remove if it matches
|
||||
prefix:
|
||||
# Input string
|
||||
str:
|
||||
let
|
||||
preLen = stringLength prefix;
|
||||
sLen = stringLength str;
|
||||
in
|
||||
if hasPrefix prefix str then
|
||||
substring preLen (sLen - preLen) str
|
||||
else
|
||||
str;
|
||||
|
||||
/* Return a string without the specified suffix, if the suffix matches.
|
||||
|
||||
Type: string -> string -> string
|
||||
|
||||
Example:
|
||||
removeSuffix "front" "homefront"
|
||||
=> "home"
|
||||
removeSuffix "xxx" "homefront"
|
||||
=> "homefront"
|
||||
*/
|
||||
removeSuffix =
|
||||
# Suffix to remove if it matches
|
||||
suffix:
|
||||
# Input string
|
||||
str:
|
||||
let
|
||||
sufLen = stringLength suffix;
|
||||
sLen = stringLength str;
|
||||
in
|
||||
if sufLen <= sLen && suffix == substring (sLen - sufLen) sufLen str then
|
||||
substring 0 (sLen - sufLen) str
|
||||
else
|
||||
str;
|
||||
|
||||
/* Return true if string v1 denotes a version older than v2.
|
||||
|
||||
Example:
|
||||
versionOlder "1.1" "1.2"
|
||||
=> true
|
||||
versionOlder "1.1" "1.1"
|
||||
=> false
|
||||
*/
|
||||
versionOlder = v1: v2: builtins.compareVersions v2 v1 == 1;
|
||||
|
||||
/* Return true if string v1 denotes a version equal to or newer than v2.
|
||||
|
||||
Example:
|
||||
versionAtLeast "1.1" "1.0"
|
||||
=> true
|
||||
versionAtLeast "1.1" "1.1"
|
||||
=> true
|
||||
versionAtLeast "1.1" "1.2"
|
||||
=> false
|
||||
*/
|
||||
versionAtLeast = v1: v2: !versionOlder v1 v2;
|
||||
|
||||
/* This function takes an argument that's either a derivation or a
|
||||
derivation's "name" attribute and extracts the version part from that
|
||||
argument.
|
||||
|
||||
Example:
|
||||
getVersion "youtube-dl-2016.01.01"
|
||||
=> "2016.01.01"
|
||||
getVersion pkgs.youtube-dl
|
||||
=> "2016.01.01"
|
||||
*/
|
||||
getVersion = x:
|
||||
let
|
||||
parse = drv: (builtins.parseDrvName drv).version;
|
||||
in if isString x
|
||||
then parse x
|
||||
else x.version or (parse x.name);
|
||||
|
||||
/* Extract name with version from URL. Ask for separator which is
|
||||
supposed to start extension.
|
||||
|
||||
Example:
|
||||
nameFromURL "https://nixos.org/releases/nix/nix-1.7/nix-1.7-x86_64-linux.tar.bz2" "-"
|
||||
=> "nix"
|
||||
nameFromURL "https://nixos.org/releases/nix/nix-1.7/nix-1.7-x86_64-linux.tar.bz2" "_"
|
||||
=> "nix-1.7-x86"
|
||||
*/
|
||||
nameFromURL = url: sep:
|
||||
let
|
||||
components = splitString "/" url;
|
||||
filename = lib.last components;
|
||||
name = builtins.head (splitString sep filename);
|
||||
in assert name != filename; name;
|
||||
|
||||
/* Create an --{enable,disable}-<feat> string that can be passed to
|
||||
standard GNU Autoconf scripts.
|
||||
|
||||
Example:
|
||||
enableFeature true "shared"
|
||||
=> "--enable-shared"
|
||||
enableFeature false "shared"
|
||||
=> "--disable-shared"
|
||||
*/
|
||||
enableFeature = enable: feat: "--${if enable then "enable" else "disable"}-${feat}";
|
||||
|
||||
/* Create an --{enable-<feat>=<value>,disable-<feat>} string that can be passed to
|
||||
standard GNU Autoconf scripts.
|
||||
|
||||
Example:
|
||||
enableFeature true "shared" "foo"
|
||||
=> "--enable-shared=foo"
|
||||
enableFeature false "shared" (throw "ignored")
|
||||
=> "--disable-shared"
|
||||
*/
|
||||
enableFeatureAs = enable: feat: value: enableFeature enable feat + optionalString enable "=${value}";
|
||||
|
||||
/* Create an --{with,without}-<feat> string that can be passed to
|
||||
standard GNU Autoconf scripts.
|
||||
|
||||
Example:
|
||||
withFeature true "shared"
|
||||
=> "--with-shared"
|
||||
withFeature false "shared"
|
||||
=> "--without-shared"
|
||||
*/
|
||||
withFeature = with_: feat: "--${if with_ then "with" else "without"}-${feat}";
|
||||
|
||||
/* Create an --{with-<feat>=<value>,without-<feat>} string that can be passed to
|
||||
standard GNU Autoconf scripts.
|
||||
|
||||
Example:
|
||||
with_Feature true "shared" "foo"
|
||||
=> "--with-shared=foo"
|
||||
with_Feature false "shared" (throw "ignored")
|
||||
=> "--without-shared"
|
||||
*/
|
||||
withFeatureAs = with_: feat: value: withFeature with_ feat + optionalString with_ "=${value}";
|
||||
|
||||
/* Create a fixed width string with additional prefix to match
|
||||
required width.
|
||||
|
||||
This function will fail if the input string is longer than the
|
||||
requested length.
|
||||
|
||||
Type: fixedWidthString :: int -> string -> string
|
||||
|
||||
Example:
|
||||
fixedWidthString 5 "0" (toString 15)
|
||||
=> "00015"
|
||||
*/
|
||||
fixedWidthString = width: filler: str:
|
||||
let
|
||||
strw = lib.stringLength str;
|
||||
reqWidth = width - (lib.stringLength filler);
|
||||
in
|
||||
assert lib.assertMsg (strw <= width)
|
||||
"fixedWidthString: requested string length (${
|
||||
toString width}) must not be shorter than actual length (${
|
||||
toString strw})";
|
||||
if strw == width then str else filler + fixedWidthString reqWidth filler str;
|
||||
|
||||
/* Format a number adding leading zeroes up to fixed width.
|
||||
|
||||
Example:
|
||||
fixedWidthNumber 5 15
|
||||
=> "00015"
|
||||
*/
|
||||
fixedWidthNumber = width: n: fixedWidthString width "0" (toString n);
|
||||
|
||||
/* Check whether a value can be coerced to a string */
|
||||
isCoercibleToString = x:
|
||||
builtins.elem (builtins.typeOf x) [ "path" "string" "null" "int" "float" "bool" ] ||
|
||||
(builtins.isList x && lib.all isCoercibleToString x) ||
|
||||
x ? outPath ||
|
||||
x ? __toString;
|
||||
|
||||
/* Check whether a value is a store path.
|
||||
|
||||
Example:
|
||||
isStorePath "/nix/store/d945ibfx9x185xf04b890y4f9g3cbb63-python-2.7.11/bin/python"
|
||||
=> false
|
||||
isStorePath "/nix/store/d945ibfx9x185xf04b890y4f9g3cbb63-python-2.7.11/"
|
||||
=> true
|
||||
isStorePath pkgs.python
|
||||
=> true
|
||||
isStorePath [] || isStorePath 42 || isStorePath {} || …
|
||||
=> false
|
||||
*/
|
||||
isStorePath = x:
|
||||
if isCoercibleToString x then
|
||||
let str = toString x; in
|
||||
builtins.substring 0 1 str == "/"
|
||||
&& dirOf str == builtins.storeDir
|
||||
else
|
||||
false;
|
||||
|
||||
/* Parse a string string as an int.
|
||||
|
||||
Type: string -> int
|
||||
|
||||
Example:
|
||||
toInt "1337"
|
||||
=> 1337
|
||||
toInt "-4"
|
||||
=> -4
|
||||
toInt "3.14"
|
||||
=> error: floating point JSON numbers are not supported
|
||||
*/
|
||||
# Obviously, it is a bit hacky to use fromJSON this way.
|
||||
toInt = str:
|
||||
let may_be_int = builtins.fromJSON str; in
|
||||
if builtins.isInt may_be_int
|
||||
then may_be_int
|
||||
else throw "Could not convert ${str} to int.";
|
||||
|
||||
/* Read a list of paths from `file`, relative to the `rootPath`.
|
||||
Lines beginning with `#` are treated as comments and ignored.
|
||||
Whitespace is significant.
|
||||
|
||||
NOTE: This function is not performant and should be avoided.
|
||||
|
||||
Example:
|
||||
readPathsFromFile /prefix
|
||||
./pkgs/development/libraries/qt-5/5.4/qtbase/series
|
||||
=> [ "/prefix/dlopen-resolv.patch" "/prefix/tzdir.patch"
|
||||
"/prefix/dlopen-libXcursor.patch" "/prefix/dlopen-openssl.patch"
|
||||
"/prefix/dlopen-dbus.patch" "/prefix/xdg-config-dirs.patch"
|
||||
"/prefix/nix-profiles-library-paths.patch"
|
||||
"/prefix/compose-search-path.patch" ]
|
||||
*/
|
||||
readPathsFromFile = rootPath: file:
|
||||
let
|
||||
lines = lib.splitString "\n" (builtins.readFile file);
|
||||
removeComments = lib.filter (line: line != "" && !(lib.hasPrefix "#" line));
|
||||
relativePaths = removeComments lines;
|
||||
absolutePaths = builtins.map (path: rootPath + "/${path}") relativePaths;
|
||||
in
|
||||
absolutePaths;
|
||||
|
||||
/* Read the contents of a file removing the trailing \n
|
||||
|
||||
Type: fileContents :: path -> string
|
||||
|
||||
Example:
|
||||
$ echo "1.0" > ./version
|
||||
|
||||
fileContents ./version
|
||||
=> "1.0"
|
||||
*/
|
||||
fileContents = file: removeSuffix "\n" (builtins.readFile file);
|
||||
}
|
24
ofborg/test-srcs/make-maintainer-pr.sh
Executable file
24
ofborg/test-srcs/make-maintainer-pr.sh
Executable file
|
@ -0,0 +1,24 @@
|
|||
#!/usr/bin/env bash
|
||||
set -eu
|
||||
|
||||
bare=$1
|
||||
co=$2
|
||||
|
||||
makepr() {
|
||||
git init --bare "$bare"
|
||||
git clone "$bare" "$co"
|
||||
|
||||
cp -r maintainers/* "$co/"
|
||||
git -C "$co" add .
|
||||
git -C "$co" commit --no-gpg-sign --author "GrahamCOfBorg <graham+cofborg@example.com>" -m "initial repo commit"
|
||||
git -C "$co" push origin master
|
||||
|
||||
cp maintainers-pr/* "$co/"
|
||||
git -C "$co" checkout -b my-cool-pr
|
||||
git -C "$co" add .
|
||||
git -C "$co" commit --no-gpg-sign --author "GrahamCOfBorg <graham+cofborg@example.com>" -m "check out this cool PR"
|
||||
git -C "$co" push origin my-cool-pr:refs/pull/1/head
|
||||
}
|
||||
|
||||
makepr >&2
|
||||
git -C "$co" rev-parse HEAD
|
Loading…
Reference in a new issue