it's janky but it works, somewhat

This commit is contained in:
Artemis Tosini 2024-07-13 22:27:27 +00:00
parent 6411d628c4
commit 0dda2cde4d
Signed by: artemist
GPG key ID: EE5227935FE3FF18

View file

@ -1,12 +1,16 @@
use std::{
collections::{HashMap, HashSet},
io::BufRead,
fmt::Write,
io::{BufRead, Read},
path::{Path, PathBuf},
};
use base64::Engine;
use bytes::Buf;
use ed25519_dalek::VerifyingKey;
use reqwest::Url;
use sha2::Digest;
use tokio::io::AsyncWriteExt;
use tracing::{span, Span};
use crate::{
@ -35,7 +39,7 @@ pub struct FetchAndUnpackNixSubstituter {
target: StorePath,
/// Destination directory, normally temporary.
/// For compatibility with tarballs, files will be placed in
/// the nix/store subdirectory of the destination
/// the nix-/store subdirectory of the destination
dest: PathBuf,
/// Proxy used for all requests from substituters
proxy: Option<Url>,
@ -191,8 +195,15 @@ impl Action for FetchAndUnpackNixSubstituter {
.map_err(ActionErrorKind::Reqwest)
.map_err(Self::error)?;
let nix_store_dir = self.dest.join("nix-/store");
tokio::fs::create_dir_all(&nix_store_dir)
.await
.map_err(|e| ActionErrorKind::CreateDirectory(nix_store_dir.clone(), e))
.map_err(Self::error)?;
let mut outputs_remaining = vec![self.target.clone()];
let mut outputs_done = HashSet::new();
let mut reginfo = String::new();
loop {
let Some(output) = outputs_remaining.pop() else {
@ -208,9 +219,99 @@ impl Action for FetchAndUnpackNixSubstituter {
for reference in &narinfo.references {
outputs_remaining.push(reference.clone());
}
let compressed_nar = client
.get(narinfo.url.clone())
.send()
.await
.map_err(ActionErrorKind::Reqwest)
.map_err(Self::error)?
.error_for_status()
.map_err(ActionErrorKind::Reqwest)
.map_err(Self::error)?
.bytes()
.await
.map_err(ActionErrorKind::Reqwest)
.map_err(Self::error)?;
let nar_size: usize = narinfo
.nar_size
.try_into()
.map_err(|_| Self::error(SubstitutionError::BadNarInfo))?;
// Decompress to a vec since we need to go through the data twice
// (once for hashing, one for unpacking).
// Otherwise we'd need to decompress twice
let decompressed_nar = match narinfo.compression {
NarCompression::Zstd => zstd::bulk::decompress(&compressed_nar, nar_size)
.map_err(|e| SubstitutionError::Decompress(narinfo.url.clone(), e))
.map_err(Self::error)?,
NarCompression::Xz => {
let mut decompressor = xz2::read::XzDecoder::new(compressed_nar.reader());
let mut result = Vec::with_capacity(nar_size);
decompressor
.read_to_end(&mut result)
.map_err(|e| SubstitutionError::Decompress(narinfo.url.clone(), e))
.map_err(Self::error)?;
result
},
};
if decompressed_nar.len() != nar_size {
return Err(Self::error(SubstitutionError::BadNar(narinfo.url.clone())));
}
let found_hash = {
let mut hasher = sha2::Sha256::new();
hasher.update(&decompressed_nar);
hasher.finalize()
};
if encode_nix32(&found_hash) != narinfo.nar_hash {
return Err(Self::error(SubstitutionError::BadNar(narinfo.url.clone())));
}
// TODO: Figure out a better way to make relative
// Maybe simplify StorePath?
let out_dir = self.dest.join("nix-/store").join(output.full_name);
let decoder = nix_nar::Decoder::new(decompressed_nar.reader())
.map_err(|e| SubstitutionError::Unpack(narinfo.url.clone(), e))
.map_err(Self::error)?;
decoder
.unpack(out_dir)
.map_err(|e| SubstitutionError::Unpack(narinfo.url.clone(), e))
.map_err(Self::error)?;
// File format isn't documented anywhere but implementation is simple:
// https://git.lix.systems/lix-project/lix/src/commit/d461cc1d7b2f489c3886f147166ba5b5e0e37541/src/libstore/store-api.cc#L846
// Unwrapping because string can't fail methods in std::fmt::Write
write!(reginfo, "{}\n", output.full_path).unwrap();
write!(reginfo, "sha256:{}\n", narinfo.nar_hash).unwrap();
write!(reginfo, "{}\n", narinfo.nar_size).unwrap();
// Leave deriver empty, same as lix binary tarballs
reginfo.push('\n');
write!(reginfo, "{}\n", narinfo.references.len()).unwrap();
for reference in &narinfo.references {
write!(reginfo, "{}\n", reference.full_path).unwrap();
}
}
todo!()
let reginfo_path = self.dest.join("nix-/.reginfo");
let mut reginfo_file = tokio::fs::File::create(&reginfo_path)
.await
.map_err(|e| ActionErrorKind::Write(reginfo_path.clone(), e))
.map_err(Self::error)?;
reginfo_file
.write_all(reginfo.as_bytes())
.await
.map_err(|e| ActionErrorKind::Write(reginfo_path.clone(), e))
.map_err(Self::error)?;
Ok(())
}
fn revert_description(&self) -> Vec<ActionDescription> {
@ -368,7 +469,7 @@ impl NarCompression {
pub fn from_name(name: &str) -> Option<Self> {
match name {
"zstd" => Some(Self::Zstd),
"xz" => Some(Self::Zstd),
"xz" => Some(Self::Xz),
_ => None,
}
}
@ -519,11 +620,54 @@ impl NarInfo {
}
}
static NIX32_CHARS: &[u8; 32] = b"0123456789abcdfghijklmnpqrsvwxyz";
fn encode_nix32(input: &[u8]) -> String {
let length = if input.len() == 0 {
0
} else {
// ceil(input.len() * 8 / 5)
(input.len() * 8 - 1) / 5 + 1
};
let mut output = String::with_capacity(length);
// nix32 hashes feel like they're a bug that stuck
// The output is backwards and bits are grouped
// from the least significant bit in each byte
// instead of the most significant bit.
// e.g. encoding "Meow" gives us:
// Char: M (0x4d) e (0x65) o (0x6f) w (0x77)
// Value: 0 1 0 0 1 1 0 1 | 0 1 1 0 0 1 0 1 | 0 1 1 0 1 1 1 1 | 0 1 1 1 0 1 1 1
// Out No.: 5 5 5 6 6 6 6 6 | 3 4 4 4 4 4 5 5 | 2 2 2 2 3 3 3 3 | 0 0 1 1 1 1 1 2
// Out Bit: 2 1 0 4 3 2 1 0 | 0 5 4 3 2 1 4 3 | 3 2 1 0 4 3 2 1 | 1 0 4 3 2 1 0 4
//
// where "Out No." is the index of the output charater responsible for a given bit,
// and 2**"Out Bit" is the value of a given bit for its output character.
//
// In this example, characters 0 to 6 have values
// 0x01, 0x1b, 0x16, 0x1e, 0x19, 0xa, 0xd.
// Indexing into the alphabet gives us "1vnyrad"
for char_no in 0..length {
let bit_no = (length - char_no - 1) * 5;
let byte_no = bit_no / 8;
let offset = bit_no % 8;
let next_byte = input.get(byte_no + 1).unwrap_or(&0);
let value = (input[byte_no] as u16 >> offset) | ((*next_byte as u16) << (8 - offset));
output.push(NIX32_CHARS[(value & 0x1f) as usize] as char);
}
output
}
#[non_exhaustive]
#[derive(Debug, thiserror::Error)]
pub enum SubstitutionError {
#[error("Unarchiving error")]
Unarchive(#[source] std::io::Error),
#[error("Decompression error for nar from {0}")]
Decompress(Url, #[source] std::io::Error),
#[error("Unpacking error for nar from {0}")]
Unpack(Url, #[source] nix_nar::NarError),
#[error("Unknown proxy scheme, `https://`, `socks5://`, and `http://` supported")]
UnknownProxyScheme,
#[error("Invalid public key")]
@ -536,6 +680,8 @@ pub enum SubstitutionError {
BadNarInfo,
#[error("Bad narinfo signature")]
BadSignature,
#[error("Incorrect nar size or hash for {0}")]
BadNar(Url),
#[error("No substituter has path {0}")]
NonexistantNarInfo(String),
#[error("Invalid nix store path")]