Compare commits

...

1 commit
main ... rust

Author SHA1 Message Date
raito 734a2d8def feat(*): init Rust port
This is a Rust port of the original Perl script, legacy cruft is removed
and it focuses on a modern Hydra deployment.

Nonetheless, it knows how to perform migrations based on the channel
versions.

Signed-off-by: Raito Bezarius <masterancpp@gmail.com>
2024-08-02 21:05:35 +02:00
8 changed files with 2322 additions and 0 deletions

5
.gitignore vendored
View file

@ -1 +1,6 @@
result
# Added by cargo
/target

2090
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

14
Cargo.toml Normal file
View file

@ -0,0 +1,14 @@
[package]
name = "nixos-channel-scripts"
version = "0.1.0"
edition = "2021"
[dependencies]
clap = { version = "4.5.13", features = [ "derive" ] }
log = "0.4.22"
object_store = { version = "0.10.2", features = [ "aws" ] }
regex = "1.10.6"
reqwest = "0.12.5"
serde = "1.0.204"
tokio = { version = "1.39.2", features = ["full"] }
toml = "0.8.19"

4
forkos.toml Normal file
View file

@ -0,0 +1,4 @@
hydra_uri = "https://hydra.forkos.org"
binary_cache_uri = "https://cache.forkos.org"
nixpkgs_dir = "/var/lib/nixpkgs"
s3_release_bucket_uri = "s3://cache.forkos.org/release"

27
src/actions.rs Normal file
View file

@ -0,0 +1,27 @@
use std::path::PathBuf;
enum HydraProductType {
BrotliJson,
SourceDistribution,
}
impl ToString for HydraProductType {
fn to_string(&self) -> String {
match self {
Self::BrotliJson => "json-br".to_string(),
Self::SourceDistribution => "source-dist".to_string(),
}
}
}
enum Action {
WriteFile {
dst_path: PathBuf,
contents: String
},
WriteHydraProduct {
dst_path: PathBuf,
product_name: String,
product_type: Option<HydraProductType>,
}
}

14
src/config.rs Normal file
View file

@ -0,0 +1,14 @@
use serde::Deserialize;
use std::path::{Path, PathBuf};
#[derive(Debug, Deserialize)]
pub struct MirrorConfig {
/// URI to Hydra instance
pub hydra_uri: String,
/// URI to the binary cache
binary_cache_uri: String,
/// A path to a checkout of nixpkgs
nixpkgs_dir: PathBuf,
/// S3 releases bucket URL
s3_release_bucket_uri: String,
}

94
src/hydra.rs Normal file
View file

@ -0,0 +1,94 @@
use std::collections::HashMap;
use regex::Regex;
use serde::Deserialize;
use crate::config::MirrorConfig;
pub type ReleaseId = u64;
pub type EvaluationId = u64;
pub type BuildId = u64;
#[derive(Deserialize, Debug)]
pub struct Release {
id: ReleaseId,
job: String,
#[serde(rename = "releasename")]
release_name: String,
#[serde(rename = "starttime")]
start_time: u64,
#[serde(rename = "stoptime")]
stop_time: u64,
#[serde(rename = "nixname")]
nix_name: String,
#[serde(rename = "jobsetevals")]
jobset_evals: Vec<EvaluationId>,
jobset: String,
finished: bool,
priority: u64,
system: String,
timestamp: u64,
project: String,
#[serde(rename = "drvpath")]
derivation_path: String,
// ignored: buildproducts, buildoutputs, buildmetrics, buildstatus
}
pub struct GitInput {
uri: String,
revision: String
}
pub enum Input {
Boolean(bool),
Git(GitInput),
/// A Nix value
Nix(String),
}
pub struct Evaluation {
id: EvaluationId,
checkout_time: u64,
eval_time: u64,
flake: Option<String>,
jobset_eval_inputs: HashMap<String, Input>,
timestamp: u64,
builds: Vec<BuildId>,
}
impl Release {
pub fn version(&self) -> String {
let re = Regex::new(".+-(?<ver>[0-9].+)").unwrap();
let caps = re.captures(&self.nix_name).expect("Failed to parse the release name");
caps["ver"].to_string()
}
pub fn evaluation_url(&self, hydra_base_uri: &str) -> String {
let eval_id = self.jobset_evals.first().expect("Failed to obtain the corresponding evaluation, malformed release?");
format!("{}/eval/{}", hydra_base_uri, eval_id)
}
}
pub fn release_uri(hydra_uri: &str, job_name: &str) -> String {
format!("{}/job/{}/latest", hydra_uri, job_name)
}
pub struct HydraClient<'a> {
pub config: &'a MirrorConfig,
}
impl HydraClient<'_> {
pub async fn fetch_release(&self, job_name: &str) -> reqwest::Result<Release> {
println!("{:?}", release_uri(&self.config.hydra_uri, job_name));
let client = reqwest::Client::new();
client.get(release_uri(&self.config.hydra_uri, job_name))
.header("Accept", "application/json")
// TODO: put a proper version
.header("User-Agent", "nixos-channel-scripts (rust)")
.send()
.await?
.json()
.await
}
}

74
src/main.rs Normal file
View file

@ -0,0 +1,74 @@
mod config;
mod actions;
mod hydra;
use std::path::PathBuf;
use clap::{Subcommand, Parser, Args};
use hydra::HydraClient;
#[derive(Debug, Args)]
struct ChannelArgs {
/// Channel name to update
channel_name: String,
/// Job name to fetch from the Hydra instance configured
job_name: String,
/// If a channel rollback is detected, do not bail out and proceed to rollback the channel
#[arg(short, long, default_value_t = false)]
ignore_rollback_protection: bool
}
#[derive(Debug, Args)]
struct GlobalOpts {
/// TOML configuration file for channel updates
#[arg(short, long)]
config_file: PathBuf,
/// Whether to execute no remote side effects (S3 uploads, redirections), etc.
#[arg(short, long, default_value_t = false)]
dry_run: bool,
}
#[derive(Debug, Parser)]
#[command(version, about, long_about = None)]
struct App {
#[command(flatten)]
global_opts: GlobalOpts,
#[command(subcommand)]
command: Commands
}
#[derive(Debug, Subcommand)]
enum Commands {
/// Print the plan for the given channel name and job name
Plan(ChannelArgs),
/// Apply the plan that would be generated for the given channel name and job name
Apply(ChannelArgs),
}
#[tokio::main]
async fn main() -> std::io::Result<()> {
let args = App::parse();
let config: config::MirrorConfig = toml::from_str(&std::fs::read_to_string(args.global_opts.config_file)
.expect("Failed to read the configuration file"))
.expect("Failed to deserialize the configuration file");
println!("config: {:?}", config);
let hydra_client: HydraClient = HydraClient {
config: &config
};
match args.command {
Commands::Plan(channel) => {
println!("Planning for channel {} using job {}", channel.channel_name, channel.job_name);
let release = hydra_client.fetch_release(&channel.job_name)
.await.expect("Failed to fetch release");
println!("Release {:?}", release);
},
Commands::Apply(_) => todo!(),
}
Ok(())
}