feat(*): init Rust port
This is a Rust port of the original Perl script, legacy cruft is removed and it focuses on a modern Hydra deployment. Nonetheless, it knows how to perform migrations based on the channel versions. Signed-off-by: Raito Bezarius <masterancpp@gmail.com>
This commit is contained in:
parent
809d960f49
commit
2088f62eac
5
.gitignore
vendored
5
.gitignore
vendored
|
@ -1 +1,6 @@
|
|||
result
|
||||
|
||||
|
||||
# Added by cargo
|
||||
|
||||
/target
|
||||
|
|
2176
Cargo.lock
generated
Normal file
2176
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
16
Cargo.toml
Normal file
16
Cargo.toml
Normal file
|
@ -0,0 +1,16 @@
|
|||
[package]
|
||||
name = "nixos-channel-scripts"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
clap = { version = "4.5.13", features = [ "derive" ] }
|
||||
log = "0.4.22"
|
||||
object_store = { version = "0.10.2", features = [ "aws" ] }
|
||||
pretty_env_logger = "0.5.0"
|
||||
regex = "1.10.6"
|
||||
reqwest = "0.12.5"
|
||||
serde = "1.0.204"
|
||||
textwrap = "0.16.1"
|
||||
tokio = { version = "1.39.2", features = ["full"] }
|
||||
toml = "0.8.19"
|
14
default.nix
14
default.nix
|
@ -1,3 +1,11 @@
|
|||
(import (fetchTarball https://github.com/edolstra/flake-compat/archive/master.tar.gz) {
|
||||
src = builtins.fetchGit ./.;
|
||||
}).defaultNix
|
||||
{ pkgs ? import <nixpkgs> {} }:
|
||||
{
|
||||
shell = pkgs.mkShell {
|
||||
buildInputs = [
|
||||
pkgs.cargo
|
||||
pkgs.rustc
|
||||
pkgs.openssl
|
||||
pkgs.pkg-config
|
||||
];
|
||||
};
|
||||
}
|
||||
|
|
5
forkos.toml
Normal file
5
forkos.toml
Normal file
|
@ -0,0 +1,5 @@
|
|||
hydra_uri = "https://hydra.forkos.org"
|
||||
binary_cache_uri = "https://cache.forkos.org"
|
||||
nixpkgs_dir = "/var/lib/nixpkgs"
|
||||
s3_release_bucket_uri = "s3://cache.forkos.org/release"
|
||||
s3_channel_bucket_uri = "s3://cache.forkos.org/channel"
|
|
@ -1,3 +0,0 @@
|
|||
(import (fetchTarball https://github.com/edolstra/flake-compat/archive/master.tar.gz) {
|
||||
src = builtins.fetchGit ./.;
|
||||
}).shellNix
|
27
src/actions.rs
Normal file
27
src/actions.rs
Normal file
|
@ -0,0 +1,27 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
enum HydraProductType {
|
||||
BrotliJson,
|
||||
SourceDistribution,
|
||||
}
|
||||
|
||||
impl ToString for HydraProductType {
|
||||
fn to_string(&self) -> String {
|
||||
match self {
|
||||
Self::BrotliJson => "json-br".to_string(),
|
||||
Self::SourceDistribution => "source-dist".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum Action {
|
||||
WriteFile {
|
||||
dst_path: PathBuf,
|
||||
contents: String
|
||||
},
|
||||
WriteHydraProduct {
|
||||
dst_path: PathBuf,
|
||||
product_name: String,
|
||||
product_type: Option<HydraProductType>,
|
||||
}
|
||||
}
|
33
src/config.rs
Normal file
33
src/config.rs
Normal file
|
@ -0,0 +1,33 @@
|
|||
use object_store::aws::{AmazonS3, AmazonS3Builder};
|
||||
use serde::Deserialize;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct MirrorConfig {
|
||||
/// URI to Hydra instance
|
||||
pub hydra_uri: String,
|
||||
/// URI to the binary cache
|
||||
binary_cache_uri: String,
|
||||
/// A path to a checkout of nixpkgs
|
||||
nixpkgs_dir: PathBuf,
|
||||
/// S3 releases bucket URL
|
||||
s3_release_bucket_uri: String,
|
||||
/// S3 channels bucket URL
|
||||
s3_channel_bucket_uri: String,
|
||||
}
|
||||
|
||||
impl MirrorConfig {
|
||||
pub fn release_bucket(&self) -> AmazonS3 {
|
||||
AmazonS3Builder::from_env()
|
||||
.with_bucket_name(&self.s3_release_bucket_uri)
|
||||
.build()
|
||||
.expect("Failed to connect to the S3 release bucket")
|
||||
}
|
||||
|
||||
pub fn channel_bucket(&self) -> AmazonS3 {
|
||||
AmazonS3Builder::from_env()
|
||||
.with_bucket_name(&self.s3_channel_bucket_uri)
|
||||
.build()
|
||||
.expect("Failed to connect to the S3 channel bucket")
|
||||
}
|
||||
}
|
1
src/git.rs
Normal file
1
src/git.rs
Normal file
|
@ -0,0 +1 @@
|
|||
|
168
src/hydra.rs
Normal file
168
src/hydra.rs
Normal file
|
@ -0,0 +1,168 @@
|
|||
use std::{collections::HashMap, path::PathBuf};
|
||||
use regex::Regex;
|
||||
|
||||
use serde::{de::Error, Deserialize, Deserializer};
|
||||
|
||||
use crate::config::MirrorConfig;
|
||||
|
||||
pub type ReleaseId = u64;
|
||||
pub type EvaluationId = u64;
|
||||
pub type BuildId = u64;
|
||||
|
||||
fn deser_bool_as_int<'de, D>(deserializer: D) -> Result<bool, D::Error>
|
||||
where D: Deserializer<'de>
|
||||
{
|
||||
u8::deserialize(deserializer).map(|b| if b == 1 { true } else { false })
|
||||
}
|
||||
|
||||
fn deser_bool_as_string<'de, D>(deserializer: D) -> Result<bool, D::Error>
|
||||
where D: Deserializer<'de>
|
||||
{
|
||||
let s: &str = Deserialize::deserialize(deserializer)?;
|
||||
|
||||
match s {
|
||||
"false" => Ok(false),
|
||||
"true" => Ok(true),
|
||||
_ => Err(Error::unknown_variant(s, &["false", "true"]))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Channel {
|
||||
pub name: String
|
||||
}
|
||||
|
||||
impl Channel {
|
||||
pub fn version(&self) -> String {
|
||||
let re = Regex::new("([a-z]+)-(?<ver>.*)").unwrap();
|
||||
let caps = re.captures(&self.name).expect("Failed to parse the channel name");
|
||||
|
||||
caps["ver"].to_string()
|
||||
}
|
||||
|
||||
pub fn prefix(&self) -> String {
|
||||
let re = Regex::new("(?<name>[a-z]+)-(.*)").unwrap();
|
||||
let caps = re.captures(&self.name).expect("Failed to parse the channel name");
|
||||
|
||||
caps["name"].to_string()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct Release {
|
||||
pub id: ReleaseId,
|
||||
job: String,
|
||||
#[serde(rename = "releasename")]
|
||||
release_name: Option<String>,
|
||||
#[serde(rename = "starttime")]
|
||||
start_time: u64,
|
||||
#[serde(rename = "stoptime")]
|
||||
stop_time: u64,
|
||||
#[serde(rename = "nixname")]
|
||||
pub nix_name: String,
|
||||
#[serde(rename = "jobsetevals")]
|
||||
jobset_evals: Vec<EvaluationId>,
|
||||
jobset: String,
|
||||
#[serde(deserialize_with = "deser_bool_as_int")]
|
||||
finished: bool,
|
||||
priority: u64,
|
||||
system: String,
|
||||
timestamp: u64,
|
||||
project: String,
|
||||
#[serde(rename = "drvpath")]
|
||||
derivation_path: String,
|
||||
// ignored: buildproducts, buildoutputs, buildmetrics, buildstatus
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GitInput {
|
||||
uri: String,
|
||||
revision: String
|
||||
}
|
||||
|
||||
// FIXME(Raito): for some reason, #[serde(tag = "type"), rename_all = "lowercase"] doesn't behave
|
||||
// correctly, and causes deserialization failures in practice. `untagged` is suboptimal but works
|
||||
// because of the way responses works...
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(untagged, expecting = "An valid jobset input")]
|
||||
pub enum Input {
|
||||
Boolean {
|
||||
#[serde(deserialize_with = "deser_bool_as_string")]
|
||||
value: bool
|
||||
},
|
||||
Git { uri: String, revision: String },
|
||||
Nix { value: String },
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct Evaluation {
|
||||
pub id: EvaluationId,
|
||||
#[serde(rename="checkouttime")]
|
||||
checkout_time: u64,
|
||||
#[serde(rename="evaltime")]
|
||||
eval_time: u64,
|
||||
flake: Option<String>,
|
||||
#[serde(rename="jobsetevalinputs", default)]
|
||||
pub jobset_eval_inputs: HashMap<String, Input>,
|
||||
timestamp: u64,
|
||||
builds: Vec<BuildId>,
|
||||
}
|
||||
|
||||
impl Release {
|
||||
pub fn version(&self) -> String {
|
||||
let re = Regex::new(".+-(?<ver>[0-9].+)").unwrap();
|
||||
let caps = re.captures(&self.nix_name).expect("Failed to parse the release name");
|
||||
|
||||
caps["ver"].to_string()
|
||||
}
|
||||
|
||||
pub fn evaluation_url(&self, hydra_base_uri: &str) -> String {
|
||||
let eval_id = self.jobset_evals.first().expect("Failed to obtain the corresponding evaluation, malformed release?");
|
||||
format!("{}/eval/{}", hydra_base_uri, eval_id)
|
||||
}
|
||||
|
||||
/// Directory related to this release.
|
||||
fn directory(&self, channel: &Channel) -> String {
|
||||
match channel.name.as_str() {
|
||||
"nixpkgs-unstable" => "nixpkgs".to_string(),
|
||||
_ => format!("{}/{}", channel.prefix(), channel.version())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn prefix(&self, channel: &Channel) -> object_store::path::Path {
|
||||
format!("{}/{}", self.directory(channel), self.nix_name).into()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn release_uri(hydra_uri: &str, job_name: &str) -> String {
|
||||
format!("{}/job/{}/latest", hydra_uri, job_name)
|
||||
}
|
||||
|
||||
pub struct HydraClient<'a> {
|
||||
pub config: &'a MirrorConfig,
|
||||
}
|
||||
|
||||
impl HydraClient<'_> {
|
||||
pub async fn fetch_release(&self, job_name: &str) -> reqwest::Result<Release> {
|
||||
let client = reqwest::Client::new();
|
||||
let resp = client.get(release_uri(&self.config.hydra_uri, job_name))
|
||||
.header("Accept", "application/json")
|
||||
// TODO: put a proper version
|
||||
.header("User-Agent", "nixos-channel-scripts (rust)")
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
resp.json().await
|
||||
}
|
||||
|
||||
pub async fn fetch_evaluation(&self, release: &Release) -> reqwest::Result<Evaluation> {
|
||||
let client = reqwest::Client::new();
|
||||
let resp = client.get(release.evaluation_url(&self.config.hydra_uri))
|
||||
.header("Accept", "application/json")
|
||||
.header("User-Agent", "nixos-channel-scripts (rust)")
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
resp.json().await
|
||||
}
|
||||
}
|
143
src/main.rs
Normal file
143
src/main.rs
Normal file
|
@ -0,0 +1,143 @@
|
|||
mod config;
|
||||
mod actions;
|
||||
mod hydra;
|
||||
mod git;
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use clap::{Subcommand, Parser, Args};
|
||||
use hydra::{Channel, Evaluation, HydraClient, Release};
|
||||
use log::{info, trace, warn};
|
||||
use object_store::{aws::{AmazonS3, AmazonS3Builder}, ObjectStore};
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
struct ChannelArgs {
|
||||
/// Channel name to update
|
||||
channel_name: String,
|
||||
/// Job name to fetch from the Hydra instance configured
|
||||
job_name: String,
|
||||
/// If a channel rollback is detected, do not bail out and proceed to rollback the channel
|
||||
#[arg(short, long, default_value_t = false)]
|
||||
ignore_rollback_protection: bool
|
||||
}
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
struct GlobalOpts {
|
||||
/// TOML configuration file for channel updates
|
||||
#[arg(short, long)]
|
||||
config_file: PathBuf,
|
||||
/// Whether to execute no remote side effects (S3 uploads, redirections), etc.
|
||||
#[arg(short, long, default_value_t = false)]
|
||||
dry_run: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(version, about, long_about = None)]
|
||||
struct App {
|
||||
#[command(flatten)]
|
||||
global_opts: GlobalOpts,
|
||||
#[command(subcommand)]
|
||||
command: Commands
|
||||
}
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
enum Commands {
|
||||
/// Print the plan for the given channel name and job name
|
||||
Plan(ChannelArgs),
|
||||
/// Apply the plan that would be generated for the given channel name and job name
|
||||
Apply(ChannelArgs),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct PreflightCheckContext<'a> {
|
||||
target_channel: &'a Channel,
|
||||
new_release: &'a Release,
|
||||
new_evaluation: &'a Evaluation,
|
||||
current_release_path: Option<object_store::path::Path>,
|
||||
}
|
||||
|
||||
async fn run_preflight_checks(channel: &Channel, release: &Release, evaluation: &Evaluation, channel_bucket: AmazonS3) -> bool {
|
||||
info!("Running pre-flight checks...");
|
||||
let channel_name = object_store::path::Path::parse(&channel.name).expect("Channel name should be a valid S3 path");
|
||||
let mut context = PreflightCheckContext {
|
||||
target_channel: channel,
|
||||
new_release: release,
|
||||
new_evaluation: evaluation,
|
||||
current_release_path: None
|
||||
};
|
||||
match channel_bucket.get_opts(&channel_name, object_store::GetOptions { head: true, ..Default::default() }).await {
|
||||
Ok(object_store::GetResult { attributes, meta, .. }) => {
|
||||
info!("Release found: {:?}", meta);
|
||||
trace!("Attributes: {:#?}", attributes);
|
||||
if let Some(redirection_target) = attributes.get(&object_store::Attribute::Metadata("x-amz-website-redirect-location".into())) {
|
||||
context.current_release_path = Some(redirection_target.to_string().into());
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
warn!("Error while asking the channel bucket: {}", err);
|
||||
todo!();
|
||||
}
|
||||
}
|
||||
|
||||
trace!("Preflight context check assembled: {:?}", context);
|
||||
// TODO: run anti rollback protection
|
||||
true
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
pretty_env_logger::init();
|
||||
|
||||
let args = App::parse();
|
||||
|
||||
let config: config::MirrorConfig = toml::from_str(&std::fs::read_to_string(args.global_opts.config_file)
|
||||
.expect("Failed to read the configuration file"))
|
||||
.expect("Failed to deserialize the configuration file");
|
||||
|
||||
println!("config: {:?}", config);
|
||||
|
||||
let hydra_client: HydraClient = HydraClient {
|
||||
config: &config
|
||||
};
|
||||
|
||||
match args.command {
|
||||
Commands::Plan(channel) => {
|
||||
let chan = Channel {
|
||||
name: channel.channel_name,
|
||||
};
|
||||
info!("Planning for channel {} using job {}", chan.name, channel.job_name);
|
||||
let release = hydra_client.fetch_release(&channel.job_name)
|
||||
.await.expect("Failed to fetch release");
|
||||
trace!("{:#?}", release);
|
||||
let evaluation = hydra_client.fetch_evaluation(&release)
|
||||
.await.expect("Failed to fetch evaluation");
|
||||
trace!("{:?}", evaluation.jobset_eval_inputs);
|
||||
|
||||
if let hydra::Input::Git { revision, .. } = evaluation.jobset_eval_inputs.get("nixpkgs").expect("Expected a nixpkgs repository") {
|
||||
info!("Release information:\n- Release is: {} (build {})\n- Eval is: {}\n- Prefix is: {}\n- Git commit is {}\n",
|
||||
release.nix_name, release.id, evaluation.id, release.prefix(&chan), revision);
|
||||
|
||||
let release_bucket = config.release_bucket();
|
||||
|
||||
// If the release already exists, skip it.
|
||||
if release_bucket.head(&release.prefix(&chan)).await.is_ok() {
|
||||
log::warn!("Release already exists, skipping");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Runs the preflight checks.
|
||||
let channel_bucket = config.channel_bucket();
|
||||
if run_preflight_checks(&chan, &release, &evaluation, channel_bucket).await {
|
||||
info!("Preflight checks passed");
|
||||
} else {
|
||||
log::error!("Preflight check failed, cannot continue, pass `--force` if you want to bypass preflight checks");
|
||||
}
|
||||
} else {
|
||||
panic!("Nixpkgs input is not of type Git");
|
||||
}
|
||||
},
|
||||
Commands::Apply(_) => todo!(),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
Loading…
Reference in a new issue