forked from lix-project/lix-installer
Merge pull request 'tools: add scripts to build all and upload all' (#5) from add_tools into main
Reviewed-on: lix-project/lix-installer#5
This commit is contained in:
commit
9ab13259f0
|
@ -5,7 +5,7 @@ Based on the [Determinate Installer](https://install.determinate.systems).
|
||||||
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
curl --proto '=https' --tlsv1.2 -sSf -L https://install.lix.systems/nix | sh -s -- install
|
curl --proto '=https' --tlsv1.2 -sSf -L https://install.lix.systems/lix | sh -s -- install
|
||||||
```
|
```
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
63
build-all.xsh
Executable file
63
build-all.xsh
Executable file
|
@ -0,0 +1,63 @@
|
||||||
|
#! /usr/bin/env nix-shell
|
||||||
|
#! nix-shell -i xonsh -p xonsh rustup cargo-zigbuild zig
|
||||||
|
#
|
||||||
|
# vim: ts=4 sw=4 et
|
||||||
|
#
|
||||||
|
# If the shebang line above was necessary, you probably should have used
|
||||||
|
# the flake, instead. But that's okay! You're valid. <3
|
||||||
|
#
|
||||||
|
""" Lix installer generation script.
|
||||||
|
|
||||||
|
This uses cargo-zigbuild to generate a cross-compiled variant for each platform,
|
||||||
|
and places the results in the `results` subdirectory of the current working dir.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import xonsh
|
||||||
|
import functools
|
||||||
|
|
||||||
|
# Ensure we fail if any of our subcommands do.
|
||||||
|
$RAISE_SUBPROC_ERROR=True
|
||||||
|
|
||||||
|
# Specify the platforms we want to build for.
|
||||||
|
TARGET_PLATFORMS = [
|
||||||
|
"aarch64-apple-darwin",
|
||||||
|
"x86_64-apple-darwin",
|
||||||
|
"x86_64-unknown-linux-musl",
|
||||||
|
"aarch64-unknown-linux-musl",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Create an alias for printing to stderr.
|
||||||
|
printerr = functools.partial(print, file=sys.stderr)
|
||||||
|
|
||||||
|
# Platform helpers.
|
||||||
|
IS_MACOS = not (xonsh.tools.ON_LINUX or xonsh.tools.ON_WINDOWS)
|
||||||
|
|
||||||
|
# Until our flake ships this with osxcross, we'll have to run this on macOS.
|
||||||
|
if not IS_MACOS:
|
||||||
|
printerr("This currently must be run from macOS due to cross-compile wonk. Sorry :(.")
|
||||||
|
sys.exit(-1)
|
||||||
|
|
||||||
|
|
||||||
|
# Pre-flight check: ensure we have all the rustup platforms we need.
|
||||||
|
all_targets_present = True
|
||||||
|
for platform in TARGET_PLATFORMS:
|
||||||
|
if platform not in $(rustup target list --installed):
|
||||||
|
printerr(f"ERROR: You don't have a rustup toolchain for {platform}! Install it with `rustup target add {platform}`")
|
||||||
|
all_targets_present = False
|
||||||
|
|
||||||
|
if not all_targets_present:
|
||||||
|
printerr("Failing out; install the platforms above and retry.")
|
||||||
|
sys.exit(-2)
|
||||||
|
|
||||||
|
# Build for each of our platforms.
|
||||||
|
printerr("> Building any platforms that need updating.")
|
||||||
|
for platform in TARGET_PLATFORMS:
|
||||||
|
|
||||||
|
# Build...
|
||||||
|
printerr(f"> Building for target {platform}")
|
||||||
|
cargo zigbuild --quiet --release --target=@(platform)
|
||||||
|
|
||||||
|
# ... and copy the output to the "results" directory.
|
||||||
|
mkdir -p ./results
|
||||||
|
cp target/@(platform)/release/lix-installer ./results/lix-installer-@(platform)
|
99
enter-env.sh
99
enter-env.sh
|
@ -1,99 +0,0 @@
|
||||||
#!/usr/bin/env nix-shell
|
|
||||||
#!nix-shell -p vault awscli2 jq -i bash
|
|
||||||
# shellcheck shell=bash
|
|
||||||
|
|
||||||
set +x # don't leak secrets!
|
|
||||||
set -eu
|
|
||||||
umask 077
|
|
||||||
|
|
||||||
scriptroot=$(dirname "$(realpath "$0")")
|
|
||||||
scratch=$(mktemp -d -t tmp.XXXXXXXXXX)
|
|
||||||
|
|
||||||
vault token lookup &>/dev/null || {
|
|
||||||
echo "You're not logged in to vault! Exiting."
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
function finish {
|
|
||||||
set +e
|
|
||||||
rm -rf "$scratch"
|
|
||||||
if [ "${VAULT_EXIT_ACCESSOR:-}" != "" ]; then
|
|
||||||
if vault token lookup &>/dev/null; then
|
|
||||||
echo "--> Revoking my token..." >&2
|
|
||||||
vault token revoke -self
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
set -e
|
|
||||||
}
|
|
||||||
trap finish EXIT
|
|
||||||
|
|
||||||
assume_role() {
|
|
||||||
role=$1
|
|
||||||
echo "--> Assuming role: $role" >&2
|
|
||||||
vault_creds=$(vault token create \
|
|
||||||
-display-name="$role" \
|
|
||||||
-format=json \
|
|
||||||
-role "$role")
|
|
||||||
|
|
||||||
VAULT_EXIT_ACCESSOR=$(jq -r .auth.accessor <<<"$vault_creds")
|
|
||||||
export VAULT_TOKEN
|
|
||||||
VAULT_TOKEN=$(jq -r .auth.client_token <<<"$vault_creds")
|
|
||||||
}
|
|
||||||
|
|
||||||
function provision_aws_creds() {
|
|
||||||
url="$1"
|
|
||||||
local ok=
|
|
||||||
echo "--> Setting AWS variables: " >&2
|
|
||||||
echo " AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_SESSION_TOKEN" >&2
|
|
||||||
|
|
||||||
aws_creds=$(vault kv get -format=json "$url")
|
|
||||||
export AWS_ACCESS_KEY_ID
|
|
||||||
AWS_ACCESS_KEY_ID=$(jq -r .data.access_key <<<"$aws_creds")
|
|
||||||
export AWS_SECRET_ACCESS_KEY
|
|
||||||
AWS_SECRET_ACCESS_KEY=$(jq -r .data.secret_key <<<"$aws_creds")
|
|
||||||
export AWS_SESSION_TOKEN
|
|
||||||
AWS_SESSION_TOKEN=$(jq -r .data.security_token <<<"$aws_creds")
|
|
||||||
if [ -z "$AWS_SESSION_TOKEN" ] || [ "$AWS_SESSION_TOKEN" == "null" ]; then
|
|
||||||
unset AWS_SESSION_TOKEN
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "--> Preflight testing the AWS credentials..." >&2
|
|
||||||
for _ in {0..20}; do
|
|
||||||
if check_output=$(aws sts get-caller-identity 2>&1 >/dev/null); then
|
|
||||||
ok=1
|
|
||||||
break
|
|
||||||
else
|
|
||||||
echo -n "." >&2
|
|
||||||
sleep 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
if [[ -z "$ok" ]]; then
|
|
||||||
echo $'\nPreflight test failed:\n'"$check_output" >&2
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
echo
|
|
||||||
unset aws_creds
|
|
||||||
}
|
|
||||||
|
|
||||||
assume_role "internalservices_nix_installer_developer"
|
|
||||||
provision_aws_creds "internalservices/aws/creds/nix_installer"
|
|
||||||
|
|
||||||
if [ "${1:-}" == "" ]; then
|
|
||||||
cat <<\BASH > "$scratch/bashrc"
|
|
||||||
expiration_ts=$(date +%s -d "$(vault token lookup -format=json | jq -r '.data.expire_time')")
|
|
||||||
vault_prompt() {
|
|
||||||
local remaining=$(( $expiration_ts - $(date '+%s')))
|
|
||||||
if [[ "$remaining" -lt 1 ]]; then
|
|
||||||
remaining=expired
|
|
||||||
printf '\n\e[01;33mtoken expired\e[m';
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
printf '\n\e[01;32mTTL:%ss\e[m' "$remaining"
|
|
||||||
}
|
|
||||||
PROMPT_COMMAND=vault_prompt
|
|
||||||
BASH
|
|
||||||
|
|
||||||
bash --init-file "$scratch/bashrc"
|
|
||||||
else
|
|
||||||
"$@"
|
|
||||||
fi
|
|
|
@ -133,9 +133,13 @@
|
||||||
|
|
||||||
nativeBuildInputs = with pkgs; [ ];
|
nativeBuildInputs = with pkgs; [ ];
|
||||||
buildInputs = with pkgs; [
|
buildInputs = with pkgs; [
|
||||||
|
zig
|
||||||
|
xonsh
|
||||||
|
awscli2
|
||||||
toolchain
|
toolchain
|
||||||
rust-analyzer
|
rust-analyzer
|
||||||
cargo-outdated
|
cargo-outdated
|
||||||
|
cargo-zigbuild
|
||||||
cacert
|
cacert
|
||||||
cargo-audit
|
cargo-audit
|
||||||
cargo-watch
|
cargo-watch
|
||||||
|
|
109
upload-to-lix.xsh
Executable file
109
upload-to-lix.xsh
Executable file
|
@ -0,0 +1,109 @@
|
||||||
|
#! /usr/bin/env nix-shell
|
||||||
|
#! nix-shell -i xonsh -p xonsh awscli2
|
||||||
|
#
|
||||||
|
# vim: ts=4 sw=4 et
|
||||||
|
#
|
||||||
|
# If the shebang line above was necessary, you probably should have used
|
||||||
|
# the flake, instead. But that's okay! You're valid. <3
|
||||||
|
#
|
||||||
|
""" Lix installer uploader.
|
||||||
|
|
||||||
|
Uploads our installers and install script to an S3 instance.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import argparse
|
||||||
|
import functools
|
||||||
|
|
||||||
|
# Specify the platforms we want to build for.
|
||||||
|
TARGET_PLATFORMS = {
|
||||||
|
"aarch64-apple-darwin": "aarch64-darwin",
|
||||||
|
"x86_64-apple-darwin": "x86_64-darwin",
|
||||||
|
"aarch64-unknown-linux-musl": "aarch64-linux",
|
||||||
|
"x86_64-unknown-linux-musl": "x86_64-linux",
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
# Helpers functions.
|
||||||
|
printerr = functools.partial(print, file=sys.stderr)
|
||||||
|
|
||||||
|
#
|
||||||
|
# Arguments -- parsed while you wait!
|
||||||
|
#
|
||||||
|
parser = argparse.ArgumentParser(description="upload a lix-installer binary")
|
||||||
|
parser.add_argument("tag", help="the tag name to use while uploading")
|
||||||
|
parser.add_argument("folder", help="the results folder to use for uploading")
|
||||||
|
parser.add_argument("--make-default", help="makes this version the default for new installations",
|
||||||
|
action="store_true")
|
||||||
|
parser.add_argument("-E", "--endpoint", help="the endpoint URL to use for S3", default="https://s3.lix.systems")
|
||||||
|
parser.add_argument("-R", "--region", help="the region to use for the S3 upload", default="garage")
|
||||||
|
parser.add_argument("-B", "--bucket", help="the s3 bucket to target", default="install")
|
||||||
|
parser.add_argument("--force", help="allows overwriting an existing tag", action="store_true")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Extract our AWS command arguments from our argparse ones.
|
||||||
|
path_for = lambda platform : pf"{args.folder}/lix-installer-{platform}"
|
||||||
|
aws_args = [
|
||||||
|
"--endpoint-url",
|
||||||
|
args.endpoint,
|
||||||
|
"--region",
|
||||||
|
args.region
|
||||||
|
]
|
||||||
|
|
||||||
|
# Validate that we have the environment variables necessary to build.
|
||||||
|
if ('AWS_ACCESS_KEY_ID' not in ${...}) or ('AWS_SECRET_ACCESS_KEY' not in ${...}):
|
||||||
|
printerr("ERROR: the AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY environment variables must be set")
|
||||||
|
sys.exit(-1)
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# First, make sure we have all of the artifacts that we need before we start.
|
||||||
|
#
|
||||||
|
found_all_files = True
|
||||||
|
for platform in TARGET_PLATFORMS:
|
||||||
|
if not path_for(platform).exists():
|
||||||
|
printerr(f"ERROR: {platform} installer not found in {path_for(platform)}\n")
|
||||||
|
found_all_files = False
|
||||||
|
|
||||||
|
if not found_all_files:
|
||||||
|
printerr("Aborting due to missing results. Perhaps you want to run `build-all.xsh`?\n")
|
||||||
|
sys.exit(-2)
|
||||||
|
|
||||||
|
#
|
||||||
|
# Next, handle our uploads.
|
||||||
|
#
|
||||||
|
tag = args.tag
|
||||||
|
bucket = args.bucket
|
||||||
|
folder = args.folder
|
||||||
|
target_path = f"s3://{bucket}/lix/{tag}"
|
||||||
|
default_path = f"s3://{bucket}/lix"
|
||||||
|
|
||||||
|
# First, check to ensure that the relevant tag does not exist.
|
||||||
|
tag_exists = !(aws s3 @(aws_args) ls @(target_path))
|
||||||
|
if tag_exists:
|
||||||
|
if args.force:
|
||||||
|
printerr(f"WARNING: Overwriting existing tag '{tag}' due to --force!")
|
||||||
|
else:
|
||||||
|
printerr(f"ERROR: Tag '{tag}' already exists! Refusing to overwrite without --force.\n")
|
||||||
|
sys.exit(-3)
|
||||||
|
|
||||||
|
|
||||||
|
# From this point forward, fail if any of our subcommands do.
|
||||||
|
$RAISE_SUBPROC_ERROR=True
|
||||||
|
|
||||||
|
# Copy the core inner pieces...
|
||||||
|
printerr(f"\n>> Uploading tag '{tag}' from folder '{folder}'.")
|
||||||
|
for in_filename, out_filename in TARGET_PLATFORMS.items():
|
||||||
|
aws s3 @(aws_args) cp @(folder)/lix-installer-@(in_filename) @(target_path)/lix-installer-@(out_filename) --acl public-read
|
||||||
|
|
||||||
|
# ... and, if requested, copy the pieces that make this the default.
|
||||||
|
if args.make_default:
|
||||||
|
printerr(f"\n>> Installing {tag} as the default install provider.")
|
||||||
|
for in_filename, out_filename in TARGET_PLATFORMS.items():
|
||||||
|
aws s3 @(aws_args) cp @(folder)/lix-installer-@(in_filename) @(default_path)/lix-installer-@(out_filename) --acl public-read
|
||||||
|
|
||||||
|
printerr(f"\n>> Updating base install script...")
|
||||||
|
aws s3 @(aws_args) cp nix-installer.sh @(default_path) --acl public-read
|
||||||
|
|
||||||
|
# Make sure all of our lines are out.
|
||||||
|
sys.stderr.flush()
|
Loading…
Reference in a new issue