From 0cb623c3d996638c548c8622c7df96d2cb4d1b05 Mon Sep 17 00:00:00 2001 From: Dan Peebles Date: Mon, 15 Aug 2016 10:27:39 -0400 Subject: fetchurl: add user agent It would be nice to be able to track Nix requests. It's not trustworthy, but can be helpful for stats and routing in HTTP logs. Since `fetchurl` is used so widely, we should "magically" get a UA on `fetchzip`, `fetchFromGitHub`, and other related fetchers. Since `fetchurl` is only used for fixed-output derivations, this should cause no mass rebuild. User-Agent example: curl/7.57.0 Nixpkgs/18.03 --- pkgs/build-support/fetchurl/builder.sh | 25 ++++++++++++++----------- pkgs/build-support/fetchurl/default.nix | 4 +++- 2 files changed, 17 insertions(+), 12 deletions(-) (limited to 'pkgs/build-support/fetchurl') diff --git a/pkgs/build-support/fetchurl/builder.sh b/pkgs/build-support/fetchurl/builder.sh index 7c2bdf260b4e..530864742f60 100644 --- a/pkgs/build-support/fetchurl/builder.sh +++ b/pkgs/build-support/fetchurl/builder.sh @@ -2,20 +2,23 @@ source $stdenv/setup source $mirrorsFile +curlVersion=$(curl -V | head -1 | cut -d' ' -f2) # Curl flags to handle redirects, not use EPSV, handle cookies for # servers to need them during redirects, and work on SSL without a # certificate (this isn't a security problem because we check the # cryptographic hash of the output anyway). -curl="curl \ - --location --max-redirs 20 \ - --retry 3 \ - --disable-epsv \ - --cookie-jar cookies \ - --insecure \ - $curlOpts \ - $NIX_CURL_FLAGS" - +curl=( + curl + --location + --max-redirs 20 + --disable-epsv + --cookie-jar cookies + --insecure + --user-agent "curl/$curlVersion Nixpkgs/$nixpkgsVersion" + $curlOpts + $NIX_CURL_FLAGS +) downloadedFile="$out" if [ -n "$downloadToTemp" ]; then downloadedFile="$TMPDIR/file"; fi @@ -32,7 +35,7 @@ tryDownload() { # if we get error code 18, resume partial download while [ $curlexit -eq 18 ]; do # keep this inside an if statement, since on failure it doesn't abort the script - if $curl -C - --fail "$url" --output "$downloadedFile"; then + if "${curl[@]}" -C - --fail "$url" --output "$downloadedFile"; then success=1 break else @@ -61,7 +64,7 @@ tryHashedMirrors() { for mirror in $hashedMirrors; do url="$mirror/$outputHashAlgo/$outputHash" - if $curl --retry 0 --connect-timeout "${NIX_CONNECT_TIMEOUT:-15}" \ + if "${curl[@]}" --retry 0 --connect-timeout "${NIX_CONNECT_TIMEOUT:-15}" \ --fail --silent --show-error --head "$url" \ --write-out "%{http_code}" --output /dev/null > code 2> log; then tryDownload "$url" diff --git a/pkgs/build-support/fetchurl/default.nix b/pkgs/build-support/fetchurl/default.nix index 8dac273eb1ca..9ab3494b2b0a 100644 --- a/pkgs/build-support/fetchurl/default.nix +++ b/pkgs/build-support/fetchurl/default.nix @@ -95,7 +95,7 @@ assert sha512 != "" -> builtins.compareVersions "1.11" builtins.nixVersion <= 0; let - + inherit (stdenv.lib) fileContents; hasHash = showURLs || (outputHash != "" && outputHashAlgo != "") || sha1 != "" || sha256 != "" || sha512 != ""; urls_ = if urls != [] then urls else [url]; @@ -132,6 +132,8 @@ else stdenv.mkDerivation { impureEnvVars = impureEnvVars ++ netrcImpureEnvVars; + nixpkgsVersion = fileContents ../../../.version; + # Doing the download on a remote machine just duplicates network # traffic, so don't do that. preferLocalBuild = true; -- cgit 1.4.1