diff options
author | Eelco Dolstra <eelco.dolstra@logicblox.com> | 2007-08-23 15:22:30 +0000 |
---|---|---|
committer | Eelco Dolstra <eelco.dolstra@logicblox.com> | 2007-08-23 15:22:30 +0000 |
commit | 50321b735be9e326dd5b2ab4cc69ae1971ac9594 (patch) | |
tree | afe0dbb47355cbc3ddb5510adfe30dbc39f3460b /pkgs/build-support/fetchurl/builder.sh | |
parent | ec9d5c94715feb00c522e81c2f9ceef92ed505fb (diff) | |
download | nixlib-50321b735be9e326dd5b2ab4cc69ae1971ac9594.tar nixlib-50321b735be9e326dd5b2ab4cc69ae1971ac9594.tar.gz nixlib-50321b735be9e326dd5b2ab4cc69ae1971ac9594.tar.bz2 nixlib-50321b735be9e326dd5b2ab4cc69ae1971ac9594.tar.lz nixlib-50321b735be9e326dd5b2ab4cc69ae1971ac9594.tar.xz nixlib-50321b735be9e326dd5b2ab4cc69ae1971ac9594.tar.zst nixlib-50321b735be9e326dd5b2ab4cc69ae1971ac9594.zip |
* Basic transparent mirror support in fetchurl (NIXPKGS-70). When
fetching a file with hash HASH of type TYPE, we first try to download <base-url>/<type>/<hash>, where <base-url> is one of a list of mirrors. For instance, given src = fetchurl { url = http://releases.mozilla.org/pub/mozilla.org/firefox/releases/2.0.0.6/source/firefox-2.0.0.6-source.tar.bz2; sha1 = "eb72f55e4a8bf08e8c6ef227c0ade3d068ba1082"; }; and the mirror list [http://nix.cs.uu.nl/dist/tarballs], we first try to download http://nix.cs.uu.nl/dist/tarballs/sha1/eb72f55e4a8bf08e8c6ef227c0ade3d068ba1082 and if that fails, we use the original URL. The list of mirrors is not yet user-configurable. * `fetchurl' now also accepts an argument `urls' instead of `url' for a list of alternative download locations, which fetchurl will try in sequence. svn path=/nixpkgs/trunk/; revision=9190
Diffstat (limited to 'pkgs/build-support/fetchurl/builder.sh')
-rw-r--r-- | pkgs/build-support/fetchurl/builder.sh | 79 |
1 files changed, 66 insertions, 13 deletions
diff --git a/pkgs/build-support/fetchurl/builder.sh b/pkgs/build-support/fetchurl/builder.sh index 19514ac86675..c6ccf708855f 100644 --- a/pkgs/build-support/fetchurl/builder.sh +++ b/pkgs/build-support/fetchurl/builder.sh @@ -1,20 +1,73 @@ source $stdenv/setup -header "downloading $out from $url" +header "downloading file $name with $outputHashAlgo hash $outputHash..." -curl --fail --location --max-redirs 20 --disable-epsv \ - --cookie-jar cookies "$url" > "$out" +# Curl flags to handle redirects, not use EPSV, handle cookies for +# servers to need them during redirects, and work on SSL without a +# certificate (this isn't a security problem because we check the +# cryptographic hash of the output anyway). +curl="curl \ + --location --max-redirs 20 \ + --disable-epsv \ + --cookie-jar cookies \ + --insecure" -if test "$NIX_OUTPUT_CHECKED" != "1"; then - if test "$outputHashAlgo" != "md5"; then - echo "hashes other than md5 are unsupported in Nix <= 0.7, upgrade to Nix 0.8" - exit 1 + +tryDownload() { + local url="$1" + echo + header "trying $url" + success= + if $curl --fail "$url" --output "$out"; then + success=1 fi - actual=$(md5sum -b "$out" | cut -c1-32) - if test "$actual" != "$id"; then - echo "hash is $actual, expected $id" - exit 1 + stopNest +} + + +finish() { + # On old versions of Nix, verify the hash of the output. On newer + # versions, Nix verifies the hash itself. + if test "$NIX_OUTPUT_CHECKED" != "1"; then + if test "$outputHashAlgo" != "md5"; then + echo "hashes other than md5 are unsupported in Nix <= 0.7, upgrade to Nix 0.8" + exit 1 + fi + actual=$(md5sum -b "$out" | cut -c1-32) + if test "$actual" != "$id"; then + echo "hash is $actual, expected $id" + exit 1 + fi + fi + + stopNest + exit 0 +} + + +for mirror in $hashedMirrors; do + url="$mirror/$outputHashAlgo/$outputHash" + if $curl --fail --silent --show-error --head "$url" \ + --write-out "%{http_code}" --output /dev/null > code 2> log; then + tryDownload "$url" + if test -n "$success"; then finish; fi + else + # Be quiet about 404 errors, which we interpret as the file + # not being present on this particular mirror. + if test "$(cat code)" != 404; then + echo "error checking the existence of $url:" + cat log + fi fi -fi +done + + +success= +for url in $urls; do + tryDownload "$url" + if test -n "$success"; then finish; fi +done + -stopNest +echo "error: cannot download $name from any mirror" +exit 1 |