#! @shell@ -e

url=$1
expHash=$2

# needed to make it work on NixOS
export PATH=$PATH:@coreutils@

hashType=$NIX_HASH_ALGO
if test -z "$hashType"; then
    hashType=sha256
fi

hashFormat=
if test "$hashType" != "md5"; then
    hashFormat=--base32
fi

if test -z "$url"; then
    echo "syntax: nix-prefetch-url URL [EXPECTED-HASH]" >&2
    exit 1
fi

name=$(basename "$url")
if test -z "$name"; then echo "invalid url"; exit 1; fi


# If the hash was given, a file with that hash may already be in the
# store.
if test -n "$expHash"; then
    finalPath=$(@bindir@/nix-store --print-fixed-path "$hashType" "$expHash" "$name")
    if ! @bindir@/nix-store --check-validity "$finalPath" 2> /dev/null; then
        finalPath=
    fi
    hash=$expHash
fi


mkTempDir() {
    local i=0
    while true; do
        if test -z "$TMPDIR"; then TMPDIR=/tmp; fi
        tmpPath=$TMPDIR/nix-prefetch-url-$$-$i
        if mkdir "$tmpPath"; then break; fi
        # !!! to bad we can't check for ENOENT in mkdir, so this check
        # is slightly racy (it bombs out if somebody just removed
        # $tmpPath...).
        if ! test -e "$tmpPath"; then exit 1; fi
        i=$((i + 1))
    done
    trap removeTempDir EXIT SIGINT SIGQUIT
}

removeTempDir() {
    if test -n "$tmpPath"; then
        rm -rf "$tmpPath" || true
    fi
}


doDownload() {
    @curl@ $cacheFlags --fail -# --location --max-redirs 20 --disable-epsv \
        --cookie-jar $tmpPath/cookies "$url" -o $tmpFile
}


# If we don't know the hash or a file with that hash doesn't exist,
# download the file and add it to the store.
if test -z "$finalPath"; then

    mkTempDir
    tmpFile=$tmpPath/$name

    # Optionally do timestamp-based caching of the download.
    # Actually, the only thing that we cache in $NIX_DOWNLOAD_CACHE is
    # the hash and the timestamp of the file at $url.  The caching of
    # the file *contents* is done in Nix store, where it can be
    # garbage-collected independently.
    if test -n "$NIX_DOWNLOAD_CACHE"; then
        echo -n "$url" > $tmpPath/url
        urlHash=$(nix-hash --type sha256 --base32 --flat $tmpPath/url)
        echo "$url" > "$NIX_DOWNLOAD_CACHE/$urlHash.url"
        cachedHashFN="$NIX_DOWNLOAD_CACHE/$urlHash.$hashType"
        cachedTimestampFN="$NIX_DOWNLOAD_CACHE/$urlHash.stamp"
        cacheFlags="--remote-time"
        if test -e "$cachedTimestampFN" -a -e "$cachedHashFN"; then
            # Only download the file if it is newer than the cached version.
            cacheFlags="$cacheFlags --time-cond $cachedTimestampFN"
        fi
    fi

    # Perform the download.
    doDownload

    if test -n "$NIX_DOWNLOAD_CACHE" -a ! -e $tmpFile; then
        # Curl didn't create $tmpFile, so apparently there's no newer
        # file on the server.
        hash=$(cat $cachedHashFN)
        finalPath=$(@bindir@/nix-store --print-fixed-path "$hashType" "$hash" "$name") 
        if ! @bindir@/nix-store --check-validity "$finalPath" 2> /dev/null; then
            echo "cached contents of \`$url' disappeared, redownloading..." >&2
            finalPath=
            cacheFlags="--remote-time"
            doDownload
        fi
    fi

    if test -z "$finalPath"; then

        # Compute the hash.
        hash=$(@bindir@/nix-hash --type "$hashType" $hashFormat --flat $tmpFile)
        if ! test -n "$QUIET"; then echo "hash is $hash" >&2; fi

        if test -n "$NIX_DOWNLOAD_CACHE"; then
            echo $hash > $cachedHashFN
            touch -r $tmpFile $cachedTimestampFN
        fi

        # Add the downloaded file to the Nix store.
        finalPath=$(@bindir@/nix-store --add-fixed "$hashType" $tmpFile)

        if test -n "$expHash" -a "$expHash" != "$hash"; then
            echo "hash mismatch for URL \`$url'" >&2
            exit 1
        fi
        
    fi
fi


if ! test -n "$QUIET"; then echo "path is $finalPath" >&2; fi

echo $hash

if test -n "$PRINT_PATH"; then
    echo $finalPath
fi