upload_s3: provide git ish as argument (#169)

* upload_s3: provide git ish as argument

$GITHUB_SHA isn't the actual PR's HEAD's SHA, but a merge commit or
something.

* upload_s3: use an empty object if the file doesn't exist on s3

It's not pretty, but it works, and the md5sum of a file should never be
"null".

* release-branches: upload to `branch_${name}` instead
This commit is contained in:
Cole Helbling 2023-01-09 12:38:53 -08:00 committed by GitHub
parent f09bbae5a0
commit 12bf502bcb
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 16 additions and 13 deletions

View file

@ -29,5 +29,6 @@ jobs:
env:
AWS_BUCKET: ${{ secrets.AWS_S3_UPLOAD_BUCKET }}
run: |
LATEST_BRANCH="latest_${{ github.ref_name }}"
./upload_s3.sh "$LATEST_BRANCH"
BRANCH="branch_${{ github.ref_name }}"
GIT_ISH="$GITHUB_SHA"
./upload_s3.sh "$BRANCH" "$GIT_ISH"

View file

@ -33,4 +33,5 @@ jobs:
AWS_BUCKET: ${{ secrets.AWS_S3_UPLOAD_BUCKET }}
run: |
PR="pr_${{ github.event.pull_request.number }}"
./upload_s3.sh "$PR"
GIT_ISH="${{ github.event.pull_request.head.sha }}"
./upload_s3.sh "$PR" "$GIT_ISH"

View file

@ -1,30 +1,31 @@
set -eu
# If the revision directory has already been created in S3 somehow, we don't want to reupload
if aws s3 ls "$AWS_BUCKET"/"$GITHUB_SHA"/; then
echo "Revision $GITHUB_SHA was already uploaded; exiting"
if aws s3 ls "$AWS_BUCKET"/"$GIT_ISH"/; then
echo "Revision $GIT_ISH was already uploaded; exiting"
exit 1
fi
sudo chown $USER: -R artifacts/
DEST="$1"
GIT_ISH="$2"
mkdir "$GITHUB_SHA"
mkdir "$GIT_ISH"
sed -i "s@https://install.determinate.systems/nix@https://install.determinate.systems/nix/rev/$GITHUB_SHA@" nix-installer.sh
cp nix-installer.sh "$GITHUB_SHA"/
sed -i "s@https://install.determinate.systems/nix@https://install.determinate.systems/nix/rev/$GIT_ISH@" nix-installer.sh
cp nix-installer.sh "$GIT_ISH"/
for artifact in $(find artifacts/ -type f); do
chmod +x "$artifact"
cp "$artifact" "$GITHUB_SHA"/
cp "$artifact" "$GIT_ISH"/
done
# If any artifact already exists in S3 and the hash is the same, we don't want to reupload
for file in $(find "$GITHUB_SHA" -type f); do
for file in $(find "$GIT_ISH" -type f); do
artifact_path="$DEST"/"$(basename "$artifact")"
md5="$(md5sum "$artifact" | cut -d' ' -f1)"
obj="$(aws s3api head-object --bucket "$AWS_BUCKET" --key "$artifact_path")"
obj="$(aws s3api head-object --bucket "$AWS_BUCKET" --key "$artifact_path" || echo '{}')"
obj_md5="$(jq -r .ETag <<<"$obj" | jq -r)" # head-object call returns ETag quoted, so `jq -r` again to unquote it
if [[ "$md5" == "$obj_md5" ]]; then
@ -33,5 +34,5 @@ for file in $(find "$GITHUB_SHA" -type f); do
fi
done
aws s3 sync "$GITHUB_SHA"/ s3://"$AWS_BUCKET"/"$GITHUB_SHA"/ --acl public-read
aws s3 sync s3://"$AWS_BUCKET"/"$GITHUB_SHA"/ s3://"$AWS_BUCKET"/"$DEST"/ --acl public-read
aws s3 sync "$GIT_ISH"/ s3://"$AWS_BUCKET"/"$GIT_ISH"/ --acl public-read
aws s3 sync s3://"$AWS_BUCKET"/"$GIT_ISH"/ s3://"$AWS_BUCKET"/"$DEST"/ --acl public-read