about summary refs log tree commit diff
path: root/nixpkgs/pkgs/build-support
diff options
context:
space:
mode:
Diffstat (limited to 'nixpkgs/pkgs/build-support')
-rw-r--r--nixpkgs/pkgs/build-support/add-opengl-runpath/default.nix12
-rw-r--r--nixpkgs/pkgs/build-support/add-opengl-runpath/setup-hook.sh29
-rw-r--r--nixpkgs/pkgs/build-support/agda/default.nix85
-rw-r--r--nixpkgs/pkgs/build-support/agda/lib.nix10
-rw-r--r--nixpkgs/pkgs/build-support/alternatives/blas/default.nix141
-rw-r--r--nixpkgs/pkgs/build-support/alternatives/lapack/default.nix107
-rwxr-xr-xnixpkgs/pkgs/build-support/appimage/appimage-exec.sh142
-rw-r--r--nixpkgs/pkgs/build-support/appimage/default.nix191
-rw-r--r--nixpkgs/pkgs/build-support/bintools-wrapper/add-darwin-ldflags-before.sh81
-rw-r--r--nixpkgs/pkgs/build-support/bintools-wrapper/add-flags.sh37
-rw-r--r--nixpkgs/pkgs/build-support/bintools-wrapper/add-hardening.sh58
-rwxr-xr-xnixpkgs/pkgs/build-support/bintools-wrapper/darwin-install_name_tool-wrapper.sh49
-rwxr-xr-xnixpkgs/pkgs/build-support/bintools-wrapper/darwin-strip-wrapper.sh78
-rw-r--r--nixpkgs/pkgs/build-support/bintools-wrapper/default.nix397
-rw-r--r--nixpkgs/pkgs/build-support/bintools-wrapper/ld-solaris-wrapper.sh29
-rw-r--r--nixpkgs/pkgs/build-support/bintools-wrapper/ld-wrapper.sh253
-rw-r--r--nixpkgs/pkgs/build-support/bintools-wrapper/macos-sierra-reexport-hack.bash246
-rw-r--r--nixpkgs/pkgs/build-support/bintools-wrapper/setup-hook.sh72
-rw-r--r--nixpkgs/pkgs/build-support/build-bazel-package/default.nix228
-rw-r--r--nixpkgs/pkgs/build-support/build-dotnet-package/default.nix116
-rw-r--r--nixpkgs/pkgs/build-support/build-fhs-userenv-bubblewrap/default.nix194
-rw-r--r--nixpkgs/pkgs/build-support/build-fhs-userenv-bubblewrap/env.nix184
-rw-r--r--nixpkgs/pkgs/build-support/build-fhs-userenv/chrootenv/chrootenv.c169
-rw-r--r--nixpkgs/pkgs/build-support/build-fhs-userenv/chrootenv/default.nix16
-rw-r--r--nixpkgs/pkgs/build-support/build-fhs-userenv/chrootenv/meson.build5
-rw-r--r--nixpkgs/pkgs/build-support/build-fhs-userenv/default.nix49
-rw-r--r--nixpkgs/pkgs/build-support/build-fhs-userenv/env.nix208
-rw-r--r--nixpkgs/pkgs/build-support/build-maven.nix81
-rw-r--r--nixpkgs/pkgs/build-support/build-pecl.nix37
-rw-r--r--nixpkgs/pkgs/build-support/build-setupcfg/default.nix25
-rwxr-xr-xnixpkgs/pkgs/build-support/buildenv/builder.pl251
-rw-r--r--nixpkgs/pkgs/build-support/buildenv/default.nix82
-rw-r--r--nixpkgs/pkgs/build-support/cc-wrapper/add-flags.sh77
-rw-r--r--nixpkgs/pkgs/build-support/cc-wrapper/add-hardening.sh72
-rw-r--r--nixpkgs/pkgs/build-support/cc-wrapper/cc-wrapper.sh215
-rw-r--r--nixpkgs/pkgs/build-support/cc-wrapper/default.nix531
-rw-r--r--nixpkgs/pkgs/build-support/cc-wrapper/fortran-hook.sh11
-rw-r--r--nixpkgs/pkgs/build-support/cc-wrapper/gnat-wrapper.sh167
-rw-r--r--nixpkgs/pkgs/build-support/cc-wrapper/setup-hook.sh120
-rw-r--r--nixpkgs/pkgs/build-support/closure-info.nix36
-rw-r--r--nixpkgs/pkgs/build-support/coq/default.nix109
-rw-r--r--nixpkgs/pkgs/build-support/coq/extra-lib.nix145
-rw-r--r--nixpkgs/pkgs/build-support/coq/meta-fetch/default.nix70
-rw-r--r--nixpkgs/pkgs/build-support/dhall-to-nix.nix38
-rw-r--r--nixpkgs/pkgs/build-support/docker/default.nix965
-rw-r--r--nixpkgs/pkgs/build-support/docker/detjson.py40
-rw-r--r--nixpkgs/pkgs/build-support/docker/examples.nix544
-rwxr-xr-xnixpkgs/pkgs/build-support/docker/nix-prefetch-docker173
-rw-r--r--nixpkgs/pkgs/build-support/docker/nix-prefetch-docker.nix24
-rw-r--r--nixpkgs/pkgs/build-support/docker/stream_layered_image.py391
-rw-r--r--nixpkgs/pkgs/build-support/docker/tarsum.go24
-rw-r--r--nixpkgs/pkgs/build-support/dotnetbuildhelpers/create-pkg-config-for-dll.sh23
-rw-r--r--nixpkgs/pkgs/build-support/dotnetbuildhelpers/default.nix18
-rwxr-xr-xnixpkgs/pkgs/build-support/dotnetbuildhelpers/patch-fsharp-targets.sh20
-rw-r--r--nixpkgs/pkgs/build-support/dotnetbuildhelpers/placate-nuget.sh7
-rw-r--r--nixpkgs/pkgs/build-support/dotnetbuildhelpers/placate-paket.sh7
-rw-r--r--nixpkgs/pkgs/build-support/dotnetbuildhelpers/remove-duplicated-dlls.sh22
-rw-r--r--nixpkgs/pkgs/build-support/dotnetenv/Wrapper/Wrapper.sln20
-rw-r--r--nixpkgs/pkgs/build-support/dotnetenv/Wrapper/Wrapper/Properties/AssemblyInfo.cs36
-rw-r--r--nixpkgs/pkgs/build-support/dotnetenv/Wrapper/Wrapper/Wrapper.cs.in66
-rw-r--r--nixpkgs/pkgs/build-support/dotnetenv/Wrapper/Wrapper/Wrapper.csproj.in57
-rw-r--r--nixpkgs/pkgs/build-support/dotnetenv/build-solution.nix85
-rw-r--r--nixpkgs/pkgs/build-support/dotnetenv/default.nix17
-rw-r--r--nixpkgs/pkgs/build-support/dotnetenv/wrapper.nix64
-rw-r--r--nixpkgs/pkgs/build-support/emacs/buffer.nix79
-rw-r--r--nixpkgs/pkgs/build-support/emacs/elpa.nix41
-rw-r--r--nixpkgs/pkgs/build-support/emacs/elpa2nix.el33
-rw-r--r--nixpkgs/pkgs/build-support/emacs/emacs-funcs.sh34
-rw-r--r--nixpkgs/pkgs/build-support/emacs/generic.nix93
-rw-r--r--nixpkgs/pkgs/build-support/emacs/melpa.nix97
-rw-r--r--nixpkgs/pkgs/build-support/emacs/melpa2nix.el16
-rw-r--r--nixpkgs/pkgs/build-support/emacs/trivial.nix29
-rw-r--r--nixpkgs/pkgs/build-support/emacs/wrapper.nix219
-rw-r--r--nixpkgs/pkgs/build-support/emacs/wrapper.sh47
-rw-r--r--nixpkgs/pkgs/build-support/expand-response-params/default.nix19
-rw-r--r--nixpkgs/pkgs/build-support/expand-response-params/expand-response-params.c84
-rw-r--r--nixpkgs/pkgs/build-support/fetchbitbucket/default.nix9
-rw-r--r--nixpkgs/pkgs/build-support/fetchbower/default.nix28
-rw-r--r--nixpkgs/pkgs/build-support/fetchbzr/builder.sh9
-rw-r--r--nixpkgs/pkgs/build-support/fetchbzr/default.nix15
-rwxr-xr-xnixpkgs/pkgs/build-support/fetchbzr/nix-prefetch-bzr74
-rw-r--r--nixpkgs/pkgs/build-support/fetchcvs/builder.sh27
-rw-r--r--nixpkgs/pkgs/build-support/fetchcvs/default.nix20
-rwxr-xr-xnixpkgs/pkgs/build-support/fetchcvs/nix-prefetch-cvs82
-rw-r--r--nixpkgs/pkgs/build-support/fetchdarcs/builder.sh19
-rw-r--r--nixpkgs/pkgs/build-support/fetchdarcs/default.nix18
-rw-r--r--nixpkgs/pkgs/build-support/fetchdocker/credentials.nix38
-rw-r--r--nixpkgs/pkgs/build-support/fetchdocker/default.nix61
-rw-r--r--nixpkgs/pkgs/build-support/fetchdocker/fetchDockerConfig.nix13
-rw-r--r--nixpkgs/pkgs/build-support/fetchdocker/fetchDockerLayer.nix13
-rw-r--r--nixpkgs/pkgs/build-support/fetchdocker/fetchdocker-builder.sh28
-rw-r--r--nixpkgs/pkgs/build-support/fetchdocker/generic-fetcher.nix95
-rw-r--r--nixpkgs/pkgs/build-support/fetchfirefoxaddon/default.nix41
-rw-r--r--nixpkgs/pkgs/build-support/fetchfossil/builder.sh22
-rw-r--r--nixpkgs/pkgs/build-support/fetchfossil/default.nix20
-rw-r--r--nixpkgs/pkgs/build-support/fetchgit/builder.sh17
-rw-r--r--nixpkgs/pkgs/build-support/fetchgit/default.nix74
-rwxr-xr-xnixpkgs/pkgs/build-support/fetchgit/nix-prefetch-git467
-rw-r--r--nixpkgs/pkgs/build-support/fetchgitea/default.nix7
-rw-r--r--nixpkgs/pkgs/build-support/fetchgithub/default.nix41
-rw-r--r--nixpkgs/pkgs/build-support/fetchgitiles/default.nix10
-rw-r--r--nixpkgs/pkgs/build-support/fetchgitlab/default.nix22
-rw-r--r--nixpkgs/pkgs/build-support/fetchgitlocal/default.nix40
-rw-r--r--nixpkgs/pkgs/build-support/fetchgx/default.nix28
-rw-r--r--nixpkgs/pkgs/build-support/fetchhg/builder.sh9
-rw-r--r--nixpkgs/pkgs/build-support/fetchhg/default.nix29
-rwxr-xr-xnixpkgs/pkgs/build-support/fetchhg/nix-prefetch-hg83
-rw-r--r--nixpkgs/pkgs/build-support/fetchipfs/builder.sh87
-rw-r--r--nixpkgs/pkgs/build-support/fetchipfs/default.nix52
-rw-r--r--nixpkgs/pkgs/build-support/fetchmavenartifact/default.nix76
-rw-r--r--nixpkgs/pkgs/build-support/fetchmtn/builder.sh51
-rw-r--r--nixpkgs/pkgs/build-support/fetchmtn/default.nix25
-rw-r--r--nixpkgs/pkgs/build-support/fetchnuget/default.nix43
-rw-r--r--nixpkgs/pkgs/build-support/fetchpatch/default.nix61
-rw-r--r--nixpkgs/pkgs/build-support/fetchrepoorcz/default.nix10
-rw-r--r--nixpkgs/pkgs/build-support/fetchrepoproject/default.nix78
-rw-r--r--nixpkgs/pkgs/build-support/fetchs3/default.nix36
-rw-r--r--nixpkgs/pkgs/build-support/fetchsavannah/default.nix10
-rw-r--r--nixpkgs/pkgs/build-support/fetchsourcehut/default.nix25
-rw-r--r--nixpkgs/pkgs/build-support/fetchsvn/builder.sh25
-rw-r--r--nixpkgs/pkgs/build-support/fetchsvn/default.nix54
-rwxr-xr-xnixpkgs/pkgs/build-support/fetchsvn/nix-prefetch-svn79
-rw-r--r--nixpkgs/pkgs/build-support/fetchsvnrevision/default.nix10
-rw-r--r--nixpkgs/pkgs/build-support/fetchsvnssh/builder.sh15
-rw-r--r--nixpkgs/pkgs/build-support/fetchsvnssh/default.nix20
-rwxr-xr-xnixpkgs/pkgs/build-support/fetchsvnssh/sshsubversion.exp22
-rw-r--r--nixpkgs/pkgs/build-support/fetchurl/boot.nix20
-rw-r--r--nixpkgs/pkgs/build-support/fetchurl/builder.sh170
-rw-r--r--nixpkgs/pkgs/build-support/fetchurl/default.nix163
-rw-r--r--nixpkgs/pkgs/build-support/fetchurl/mirrors.nix444
-rw-r--r--nixpkgs/pkgs/build-support/fetchurl/write-mirror-list.sh4
-rw-r--r--nixpkgs/pkgs/build-support/fetchzip/default.nix62
-rw-r--r--nixpkgs/pkgs/build-support/go/garble.nix34
-rwxr-xr-xnixpkgs/pkgs/build-support/icon-conv-tools/bin/extractWinRscIconsToStdFreeDesktopDir.sh74
-rwxr-xr-xnixpkgs/pkgs/build-support/icon-conv-tools/bin/icoFileToHiColorTheme28
-rw-r--r--nixpkgs/pkgs/build-support/icon-conv-tools/default.nix31
-rw-r--r--nixpkgs/pkgs/build-support/install-shell-files/default.nix12
-rw-r--r--nixpkgs/pkgs/build-support/kernel/initrd-compressor-meta.nix53
-rw-r--r--nixpkgs/pkgs/build-support/kernel/make-initrd.nix113
-rw-r--r--nixpkgs/pkgs/build-support/kernel/make-initrd.sh51
-rw-r--r--nixpkgs/pkgs/build-support/kernel/modules-closure.nix15
-rw-r--r--nixpkgs/pkgs/build-support/kernel/modules-closure.sh97
-rw-r--r--nixpkgs/pkgs/build-support/kernel/paths-from-graph.pl68
-rw-r--r--nixpkgs/pkgs/build-support/libredirect/default.nix58
-rw-r--r--nixpkgs/pkgs/build-support/libredirect/libredirect.c274
-rw-r--r--nixpkgs/pkgs/build-support/libredirect/test.c65
-rw-r--r--nixpkgs/pkgs/build-support/make-desktopitem/default.nix67
-rw-r--r--nixpkgs/pkgs/build-support/make-startupitem/default.nix35
-rw-r--r--nixpkgs/pkgs/build-support/make-symlinks/builder.sh9
-rw-r--r--nixpkgs/pkgs/build-support/make-symlinks/default.nix7
-rw-r--r--nixpkgs/pkgs/build-support/mkshell/default.nix49
-rw-r--r--nixpkgs/pkgs/build-support/mono-dll-fixer/builder.sh4
-rw-r--r--nixpkgs/pkgs/build-support/mono-dll-fixer/default.nix8
-rw-r--r--nixpkgs/pkgs/build-support/mono-dll-fixer/dll-fixer.pl32
-rw-r--r--nixpkgs/pkgs/build-support/nix-gitignore/default.nix178
-rw-r--r--nixpkgs/pkgs/build-support/nuke-references/darwin-sign-fixup.sh5
-rw-r--r--nixpkgs/pkgs/build-support/nuke-references/default.nix37
-rw-r--r--nixpkgs/pkgs/build-support/nuke-references/nuke-refs.sh33
-rw-r--r--nixpkgs/pkgs/build-support/ocaml/default.nix34
-rw-r--r--nixpkgs/pkgs/build-support/ocaml/dune.nix40
-rw-r--r--nixpkgs/pkgs/build-support/ocaml/oasis.nix44
-rw-r--r--nixpkgs/pkgs/build-support/oci-tools/default.nix78
-rw-r--r--nixpkgs/pkgs/build-support/pkg-config-wrapper/add-flags.sh12
-rw-r--r--nixpkgs/pkgs/build-support/pkg-config-wrapper/default.nix127
-rw-r--r--nixpkgs/pkgs/build-support/pkg-config-wrapper/pkg-config-wrapper.sh21
-rw-r--r--nixpkgs/pkgs/build-support/pkg-config-wrapper/setup-hook.sh29
-rw-r--r--nixpkgs/pkgs/build-support/plugins.nix29
-rw-r--r--nixpkgs/pkgs/build-support/prefer-remote-fetch/default.nix19
-rw-r--r--nixpkgs/pkgs/build-support/references-by-popularity/closure-graph.py567
-rw-r--r--nixpkgs/pkgs/build-support/references-by-popularity/default.nix15
-rw-r--r--nixpkgs/pkgs/build-support/release/ant-build.nix123
-rw-r--r--nixpkgs/pkgs/build-support/release/binary-tarball.nix78
-rw-r--r--nixpkgs/pkgs/build-support/release/debian-build.nix96
-rw-r--r--nixpkgs/pkgs/build-support/release/default.nix124
-rw-r--r--nixpkgs/pkgs/build-support/release/functions.sh40
-rw-r--r--nixpkgs/pkgs/build-support/release/maven-build.nix98
-rw-r--r--nixpkgs/pkgs/build-support/release/nix-build.nix175
-rw-r--r--nixpkgs/pkgs/build-support/release/rpm-build.nix54
-rw-r--r--nixpkgs/pkgs/build-support/release/source-tarball.nix130
-rw-r--r--nixpkgs/pkgs/build-support/remove-references-to/darwin-sign-fixup.sh5
-rw-r--r--nixpkgs/pkgs/build-support/remove-references-to/default.nix35
-rw-r--r--nixpkgs/pkgs/build-support/remove-references-to/remove-references-to.sh37
-rw-r--r--nixpkgs/pkgs/build-support/replace-dependency.nix83
-rw-r--r--nixpkgs/pkgs/build-support/replace-secret/replace-secret.nix35
-rwxr-xr-xnixpkgs/pkgs/build-support/replace-secret/replace-secret.py28
-rw-r--r--nixpkgs/pkgs/build-support/replace-secret/test/expected_long_output30
-rw-r--r--nixpkgs/pkgs/build-support/replace-secret/test/expected_short_output4
-rw-r--r--nixpkgs/pkgs/build-support/replace-secret/test/input_file4
-rw-r--r--nixpkgs/pkgs/build-support/replace-secret/test/passwd1
-rw-r--r--nixpkgs/pkgs/build-support/replace-secret/test/rsa27
-rw-r--r--nixpkgs/pkgs/build-support/rust/build-rust-crate/build-crate.nix112
-rw-r--r--nixpkgs/pkgs/build-support/rust/build-rust-crate/configure-crate.nix201
-rw-r--r--nixpkgs/pkgs/build-support/rust/build-rust-crate/default.nix325
-rw-r--r--nixpkgs/pkgs/build-support/rust/build-rust-crate/helpers.nix26
-rw-r--r--nixpkgs/pkgs/build-support/rust/build-rust-crate/install-crate.nix51
-rw-r--r--nixpkgs/pkgs/build-support/rust/build-rust-crate/lib.sh174
-rw-r--r--nixpkgs/pkgs/build-support/rust/build-rust-crate/log.nix59
-rw-r--r--nixpkgs/pkgs/build-support/rust/build-rust-crate/test/brotli-crates.nix95
-rw-r--r--nixpkgs/pkgs/build-support/rust/build-rust-crate/test/default.nix657
-rwxr-xr-xnixpkgs/pkgs/build-support/rust/cargo-vendor-normalise.py41
-rw-r--r--nixpkgs/pkgs/build-support/rust/carnix.nix259
-rw-r--r--nixpkgs/pkgs/build-support/rust/crates-io.nix7756
-rw-r--r--nixpkgs/pkgs/build-support/rust/default-crate-overrides.nix163
-rw-r--r--nixpkgs/pkgs/build-support/rust/default.nix161
-rw-r--r--nixpkgs/pkgs/build-support/rust/fetchCargoTarball.nix86
-rwxr-xr-xnixpkgs/pkgs/build-support/rust/fetchcargo-default-config.toml7
-rw-r--r--nixpkgs/pkgs/build-support/rust/fetchcrate.nix38
-rw-r--r--nixpkgs/pkgs/build-support/rust/hooks/cargo-build-hook.sh41
-rw-r--r--nixpkgs/pkgs/build-support/rust/hooks/cargo-check-hook.sh46
-rw-r--r--nixpkgs/pkgs/build-support/rust/hooks/cargo-install-hook.sh49
-rw-r--r--nixpkgs/pkgs/build-support/rust/hooks/cargo-setup-hook.sh86
-rw-r--r--nixpkgs/pkgs/build-support/rust/hooks/default.nix95
-rw-r--r--nixpkgs/pkgs/build-support/rust/hooks/maturin-build-hook.sh39
-rw-r--r--nixpkgs/pkgs/build-support/rust/import-cargo-lock.nix167
-rw-r--r--nixpkgs/pkgs/build-support/rust/patch-registry-deps/pkg-config8
-rw-r--r--nixpkgs/pkgs/build-support/rust/sysroot/Cargo.lock29
-rw-r--r--nixpkgs/pkgs/build-support/rust/sysroot/cargo.py45
-rw-r--r--nixpkgs/pkgs/build-support/rust/sysroot/default.nix41
-rwxr-xr-xnixpkgs/pkgs/build-support/rust/sysroot/update-lockfile.sh21
-rw-r--r--nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/Cargo.lock83
-rw-r--r--nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/Cargo.toml8
-rw-r--r--nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/default.nix18
-rw-r--r--nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/src/main.rs9
-rw-r--r--nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/default.nix8
-rw-r--r--nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-no-rev/Cargo.lock79
-rw-r--r--nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-no-rev/Cargo.toml8
-rw-r--r--nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-no-rev/default.nix21
-rw-r--r--nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-no-rev/src/main.rs9
-rw-r--r--nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/Cargo.lock79
-rw-r--r--nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/Cargo.toml8
-rw-r--r--nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/default.nix21
-rw-r--r--nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/src/main.rs9
-rw-r--r--nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/maturin/Cargo.lock682
-rw-r--r--nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/maturin/default.nix43
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/audit-blas.sh37
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/audit-tmpdir.sh41
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/auto-patchelf.sh265
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/autoreconf.sh7
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/breakpoint-hook.sh9
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/compress-man-pages.sh33
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/copy-desktop-items.sh42
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/die.sh21
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/enable-coverage-instrumentation.sh20
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/find-xml-catalogs.sh22
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/fix-darwin-dylib-names.sh40
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/gog-unpack.sh11
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/install-shell-files.sh230
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/keep-build-tree.sh6
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/ld-is-cc-hook.sh5
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/make-coverage-analysis-report.sh25
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/make-symlinks-relative.sh28
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/make-wrapper.sh146
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/move-docs.sh23
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/move-lib64.sh22
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/move-sbin.sh19
-rwxr-xr-xnixpkgs/pkgs/build-support/setup-hooks/move-systemd-user-units.sh25
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/multiple-outputs.sh199
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/patch-shebangs.sh119
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/prune-libtool-files.sh22
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/reproducible-builds.sh9
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/role.bash71
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/separate-debug-info.sh37
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/set-java-classpath.sh13
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/set-source-date-epoch-to-latest.sh34
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/setup-debug-info-dirs.sh5
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/shorten-perl-shebang.sh88
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/strip.sh57
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/update-autotools-gnu-config-scripts.sh12
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/use-old-cxx-abi.sh1
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/validate-pkg-config.sh18
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/win-dll-link.sh45
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/default.nix177
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/lib.nix30
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/sample-project/Makefile30
-rw-r--r--nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/wrap-gapps-hook.sh93
-rw-r--r--nixpkgs/pkgs/build-support/setup-systemd-units.nix83
-rw-r--r--nixpkgs/pkgs/build-support/singularity-tools/default.nix108
-rw-r--r--nixpkgs/pkgs/build-support/skaware/build-skaware-package.nix105
-rw-r--r--nixpkgs/pkgs/build-support/skaware/clean-packaging.nix53
-rw-r--r--nixpkgs/pkgs/build-support/snap/default.nix4
-rw-r--r--nixpkgs/pkgs/build-support/snap/make-snap.nix84
-rw-r--r--nixpkgs/pkgs/build-support/source-from-head-fun.nix16
-rw-r--r--nixpkgs/pkgs/build-support/src-only/default.nix31
-rw-r--r--nixpkgs/pkgs/build-support/substitute-files/substitute-all-files.nix26
-rw-r--r--nixpkgs/pkgs/build-support/substitute/substitute-all.nix12
-rw-r--r--nixpkgs/pkgs/build-support/substitute/substitute-all.sh19
-rw-r--r--nixpkgs/pkgs/build-support/templaterpm/default.nix25
-rwxr-xr-xnixpkgs/pkgs/build-support/templaterpm/nix-template-rpm.py518
-rw-r--r--nixpkgs/pkgs/build-support/trivial-builders.nix622
-rw-r--r--nixpkgs/pkgs/build-support/trivial-builders/test.nix53
-rwxr-xr-xnixpkgs/pkgs/build-support/trivial-builders/test.sh56
-rw-r--r--nixpkgs/pkgs/build-support/trivial-builders/test/invoke-writeDirectReferencesToFile.nix4
-rw-r--r--nixpkgs/pkgs/build-support/trivial-builders/test/invoke-writeReferencesToFile.nix4
-rw-r--r--nixpkgs/pkgs/build-support/trivial-builders/test/sample.nix19
-rw-r--r--nixpkgs/pkgs/build-support/upstream-updater/attrset-to-dir.nix20
-rwxr-xr-xnixpkgs/pkgs/build-support/upstream-updater/create-src-info-git.sh24
-rwxr-xr-xnixpkgs/pkgs/build-support/upstream-updater/create-src-info.sh20
-rw-r--r--nixpkgs/pkgs/build-support/upstream-updater/design.txt29
-rw-r--r--nixpkgs/pkgs/build-support/upstream-updater/snippets.sh14
-rw-r--r--nixpkgs/pkgs/build-support/upstream-updater/test-case.nix13
-rwxr-xr-xnixpkgs/pkgs/build-support/upstream-updater/update-upstream-data.sh182
-rw-r--r--nixpkgs/pkgs/build-support/upstream-updater/update-walker-service-specific.sh20
-rwxr-xr-xnixpkgs/pkgs/build-support/upstream-updater/update-walker.sh320
-rw-r--r--nixpkgs/pkgs/build-support/upstream-updater/update-walker.txt118
-rwxr-xr-xnixpkgs/pkgs/build-support/upstream-updater/urls-from-page.sh14
-rw-r--r--nixpkgs/pkgs/build-support/vm/deb/deb-closure.pl180
-rw-r--r--nixpkgs/pkgs/build-support/vm/default.nix1159
-rw-r--r--nixpkgs/pkgs/build-support/vm/rpm/rpm-closure.pl184
-rw-r--r--nixpkgs/pkgs/build-support/vm/test.nix39
-rw-r--r--nixpkgs/pkgs/build-support/wrapper-common/utils.bash131
-rw-r--r--nixpkgs/pkgs/build-support/writers/default.nix321
-rw-r--r--nixpkgs/pkgs/build-support/writers/test.nix206
312 files changed, 34343 insertions, 0 deletions
diff --git a/nixpkgs/pkgs/build-support/add-opengl-runpath/default.nix b/nixpkgs/pkgs/build-support/add-opengl-runpath/default.nix
new file mode 100644
index 000000000000..5cab0937e074
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/add-opengl-runpath/default.nix
@@ -0,0 +1,12 @@
+{ lib, stdenv }:
+
+stdenv.mkDerivation {
+  name = "add-opengl-runpath";
+
+  driverLink = "/run/opengl-driver" + lib.optionalString stdenv.isi686 "-32";
+
+  buildCommand = ''
+    mkdir -p $out/nix-support
+    substituteAll ${./setup-hook.sh} $out/nix-support/setup-hook
+  '';
+}
diff --git a/nixpkgs/pkgs/build-support/add-opengl-runpath/setup-hook.sh b/nixpkgs/pkgs/build-support/add-opengl-runpath/setup-hook.sh
new file mode 100644
index 000000000000..e556e7ead2a7
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/add-opengl-runpath/setup-hook.sh
@@ -0,0 +1,29 @@
+# Set RUNPATH so that driver libraries in /run/opengl-driver(-32)/lib can be found.
+# This is needed to not rely on LD_LIBRARY_PATH which does not work with setuid
+# executables. Fixes https://github.com/NixOS/nixpkgs/issues/22760. It must be run
+# in postFixup because RUNPATH stripping in fixup would undo it. Note that patchelf
+# actually sets RUNPATH not RPATH, which applies only to dependencies of the binary
+# it set on (including for dlopen), so the RUNPATH must indeed be set on these
+# libraries and would not work if set only on executables.
+addOpenGLRunpath() {
+    local forceRpath=
+
+    while [ $# -gt 0 ]; do
+        case "$1" in
+            --) shift; break;;
+            --force-rpath) shift; forceRpath=1;;
+            --*)
+                echo "addOpenGLRunpath: ERROR: Invalid command line" \
+                     "argument: $1" >&2
+                return 1;;
+            *) break;;
+        esac
+    done
+
+    for file in "$@"; do
+        if ! isELF "$file"; then continue; fi
+        local origRpath="$(patchelf --print-rpath "$file")"
+        patchelf --set-rpath "@driverLink@/lib:$origRpath" ${forceRpath:+--force-rpath} "$file"
+    done
+}
+
diff --git a/nixpkgs/pkgs/build-support/agda/default.nix b/nixpkgs/pkgs/build-support/agda/default.nix
new file mode 100644
index 000000000000..ed7d11a13147
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/agda/default.nix
@@ -0,0 +1,85 @@
+# Builder for Agda packages.
+
+{ stdenv, lib, self, Agda, runCommandNoCC, makeWrapper, writeText, ghcWithPackages, nixosTests }:
+
+with lib.strings;
+
+let
+  withPackages' = {
+    pkgs,
+    ghc ? ghcWithPackages (p: with p; [ ieee754 ])
+  }: let
+    pkgs' = if builtins.isList pkgs then pkgs else pkgs self;
+    library-file = writeText "libraries" ''
+      ${(concatMapStringsSep "\n" (p: "${p}/${p.libraryFile}") pkgs')}
+    '';
+    pname = "agdaWithPackages";
+    version = Agda.version;
+  in runCommandNoCC "${pname}-${version}" {
+    inherit pname version;
+    nativeBuildInputs = [ makeWrapper ];
+    passthru = {
+      unwrapped = Agda;
+      tests = { inherit (nixosTests) agda; };
+    };
+  } ''
+    mkdir -p $out/bin
+    makeWrapper ${Agda}/bin/agda $out/bin/agda \
+      --add-flags "--with-compiler=${ghc}/bin/ghc" \
+      --add-flags "--library-file=${library-file}" \
+      --add-flags "--local-interfaces"
+    makeWrapper ${Agda}/bin/agda-mode $out/bin/agda-mode
+    ''; # Local interfaces has been added for now: See https://github.com/agda/agda/issues/4526
+
+  withPackages = arg: if builtins.isAttrs arg then withPackages' arg else withPackages' { pkgs = arg; };
+
+  extensions = [
+    "agda"
+    "agda-lib"
+    "agdai"
+    "lagda"
+    "lagda.md"
+    "lagda.org"
+    "lagda.rst"
+    "lagda.tex"
+  ];
+
+  defaults =
+    { pname
+    , buildInputs ? []
+    , everythingFile ? "./Everything.agda"
+    , libraryName ? pname
+    , libraryFile ? "${libraryName}.agda-lib"
+    , buildPhase ? null
+    , installPhase ? null
+    , extraExtensions ? []
+    , ...
+    }: let
+      agdaWithArgs = withPackages (builtins.filter (p: p ? isAgdaDerivation) buildInputs);
+    in
+      {
+        inherit libraryName libraryFile;
+
+        isAgdaDerivation = true;
+
+        buildInputs = buildInputs ++ [ agdaWithArgs ];
+
+        buildPhase = if buildPhase != null then buildPhase else ''
+          runHook preBuild
+          agda -i ${dirOf everythingFile} ${everythingFile}
+          runHook postBuild
+        '';
+
+        installPhase = if installPhase != null then installPhase else ''
+          runHook preInstall
+          mkdir -p $out
+          find -not \( -path ${everythingFile} -or -path ${lib.interfaceFile everythingFile} \) -and \( ${concatMapStringsSep " -or " (p: "-name '*.${p}'") (extensions ++ extraExtensions)} \) -exec cp -p --parents -t "$out" {} +
+          runHook postInstall
+        '';
+      };
+in
+{
+  mkDerivation = args: stdenv.mkDerivation (args // defaults args);
+
+  inherit withPackages withPackages';
+}
diff --git a/nixpkgs/pkgs/build-support/agda/lib.nix b/nixpkgs/pkgs/build-support/agda/lib.nix
new file mode 100644
index 000000000000..976151a8283c
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/agda/lib.nix
@@ -0,0 +1,10 @@
+{ lib }:
+{
+  /* Returns the Agda interface file to a given Agda file.
+  *
+  * Examples:
+  * interfaceFile "Everything.agda" == "Everything.agdai"
+  * interfaceFile "src/Everything.lagda.tex" == "src/Everything.agdai"
+  */
+  interfaceFile = agdaFile: lib.head (builtins.match ''(.*\.)l?agda(\.(md|org|rst|tex))?'' agdaFile) + "agdai";
+}
diff --git a/nixpkgs/pkgs/build-support/alternatives/blas/default.nix b/nixpkgs/pkgs/build-support/alternatives/blas/default.nix
new file mode 100644
index 000000000000..cf880677fddc
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/alternatives/blas/default.nix
@@ -0,0 +1,141 @@
+{ lib, stdenv
+, lapack-reference, openblasCompat, openblas
+, isILP64 ? false
+, blasProvider ? if isILP64 then openblas else openblasCompat }:
+
+let
+  blasFortranSymbols = [
+    "caxpy" "ccopy" "cdotc" "cdotu" "cgbmv" "cgemm" "cgemv" "cgerc" "cgeru"
+    "chbmv" "chemm" "chemv" "cher" "cher2" "cher2k" "cherk" "chpmv" "chpr"
+    "chpr2" "crotg" "cscal" "csrot" "csscal" "cswap" "csymm" "csyr2k" "csyrk"
+    "ctbmv" "ctbsv" "ctpmv" "ctpsv" "ctrmm" "ctrmv" "ctrsm" "ctrsv" "dasum"
+    "daxpy" "dcabs1" "dcopy" "ddot" "dgbmv" "dgemm" "dgemv" "dger" "dnrm2"
+    "drot" "drotg" "drotm" "drotmg" "dsbmv" "dscal" "dsdot" "dspmv" "dspr"
+    "dspr2" "dswap" "dsymm" "dsymv" "dsyr" "dsyr2" "dsyr2k" "dsyrk" "dtbmv"
+    "dtbsv" "dtpmv" "dtpsv" "dtrmm" "dtrmv" "dtrsm" "dtrsv" "dzasum" "dznrm2"
+    "icamax" "idamax" "isamax" "izamax" "lsame" "sasum" "saxpy" "scabs1"
+    "scasum" "scnrm2" "scopy" "sdot" "sdsdot" "sgbmv" "sgemm" "sgemv"
+    "sger" "snrm2" "srot" "srotg" "srotm" "srotmg" "ssbmv" "sscal" "sspmv"
+    "sspr" "sspr2" "sswap" "ssymm" "ssymv" "ssyr" "ssyr2" "ssyr2k" "ssyrk"
+    "stbmv" "stbsv" "stpmv" "stpsv" "strmm" "strmv" "strsm" "strsv" "xerbla"
+    "xerbla_array" "zaxpy" "zcopy" "zdotc" "zdotu" "zdrot" "zdscal" "zgbmv"
+    "zgemm" "zgemv" "zgerc" "zgeru" "zhbmv" "zhemm" "zhemv" "zher" "zher2"
+    "zher2k" "zherk" "zhpmv" "zhpr" "zhpr2" "zrotg" "zscal" "zswap" "zsymm"
+    "zsyr2k" "zsyrk" "ztbmv" "ztbsv" "ztpmv" "ztpsv" "ztrmm" "ztrmv" "ztrsm"
+    "ztrsv"
+  ];
+
+  version = "3";
+  canonicalExtension = if stdenv.hostPlatform.isLinux
+                       then "${stdenv.hostPlatform.extensions.sharedLibrary}.${version}"
+                       else stdenv.hostPlatform.extensions.sharedLibrary;
+
+
+  isILP64 = blasProvider.blas64 or false;
+  blasImplementation = lib.getName blasProvider;
+
+in
+
+assert isILP64 -> (blasImplementation == "openblas" && blasProvider.blas64) || blasImplementation == "mkl";
+
+stdenv.mkDerivation {
+  pname = "blas";
+  inherit version;
+
+  outputs = [ "out" "dev" ];
+
+  meta = (blasProvider.meta or {}) // {
+    description = "${lib.getName blasProvider} with just the BLAS C and FORTRAN ABI";
+  };
+
+  passthru = {
+    inherit isILP64;
+    provider = blasProvider;
+    implementation = blasImplementation;
+  };
+
+  dontBuild = true;
+  dontConfigure = true;
+  unpackPhase = "src=$PWD";
+
+  dontPatchELF = true;
+
+  installPhase = (''
+  mkdir -p $out/lib $dev/include $dev/lib/pkgconfig
+
+  libblas="${lib.getLib blasProvider}/lib/libblas${canonicalExtension}"
+
+  if ! [ -e "$libblas" ]; then
+    echo "$libblas does not exist, ${blasProvider.name} does not provide libblas."
+    exit 1
+  fi
+
+  $NM -an "$libblas" | cut -f3 -d' ' > symbols
+  for symbol in ${toString blasFortranSymbols}; do
+    grep -q "^$symbol_$" symbols || { echo "$symbol" was not found in "$libblas"; exit 1; }
+  done
+
+  cp -L "$libblas" $out/lib/libblas${canonicalExtension}
+  chmod +w $out/lib/libblas${canonicalExtension}
+
+'' + (if stdenv.hostPlatform.parsed.kernel.execFormat.name == "elf" then ''
+  patchelf --set-soname libblas${canonicalExtension} $out/lib/libblas${canonicalExtension}
+  patchelf --set-rpath "$(patchelf --print-rpath $out/lib/libblas${canonicalExtension}):${lib.getLib blasProvider}/lib" $out/lib/libblas${canonicalExtension}
+'' else if stdenv.hostPlatform.isDarwin then ''
+  install_name_tool \
+    -id libblas${canonicalExtension} \
+    -add_rpath ${lib.getLib blasProvider}/lib \
+    $out/lib/libblas${canonicalExtension}
+'' else "") + ''
+
+  if [ "$out/lib/libblas${canonicalExtension}" != "$out/lib/libblas${stdenv.hostPlatform.extensions.sharedLibrary}" ]; then
+    ln -s $out/lib/libblas${canonicalExtension} "$out/lib/libblas${stdenv.hostPlatform.extensions.sharedLibrary}"
+  fi
+
+  cat <<EOF > $dev/lib/pkgconfig/blas.pc
+Name: blas
+Version: ${version}
+Description: BLAS FORTRAN implementation
+Libs: -L$out/lib -lblas
+Cflags: -I$dev/include
+EOF
+
+  libcblas="${lib.getLib blasProvider}/lib/libcblas${canonicalExtension}"
+
+  if ! [ -e "$libcblas" ]; then
+    echo "$libcblas does not exist, ${blasProvider.name} does not provide libcblas."
+    exit 1
+  fi
+
+  cp -L "$libcblas" $out/lib/libcblas${canonicalExtension}
+  chmod +w $out/lib/libcblas${canonicalExtension}
+
+'' + (if stdenv.hostPlatform.parsed.kernel.execFormat.name == "elf" then ''
+  patchelf --set-soname libcblas${canonicalExtension} $out/lib/libcblas${canonicalExtension}
+  patchelf --set-rpath "$(patchelf --print-rpath $out/lib/libcblas${canonicalExtension}):${lib.getLib blasProvider}/lib" $out/lib/libcblas${canonicalExtension}
+'' else if stdenv.hostPlatform.isDarwin then ''
+  install_name_tool \
+    -id libcblas${canonicalExtension} \
+    -add_rpath ${lib.getLib blasProvider}/lib \
+    $out/lib/libcblas${canonicalExtension}
+'' else "") + ''
+  if [ "$out/lib/libcblas${canonicalExtension}" != "$out/lib/libcblas${stdenv.hostPlatform.extensions.sharedLibrary}" ]; then
+    ln -s $out/lib/libcblas${canonicalExtension} "$out/lib/libcblas${stdenv.hostPlatform.extensions.sharedLibrary}"
+  fi
+
+  cp ${lib.getDev lapack-reference}/include/cblas{,_mangling}.h $dev/include
+
+  cat <<EOF > $dev/lib/pkgconfig/cblas.pc
+Name: cblas
+Version: ${version}
+Description: BLAS C implementation
+Cflags: -I$dev/include
+Libs: -L$out/lib -lcblas
+EOF
+'' + lib.optionalString (blasImplementation == "mkl") ''
+  mkdir -p $out/nix-support
+  echo 'export MKL_INTERFACE_LAYER=${lib.optionalString isILP64 "I"}LP64,GNU' > $out/nix-support/setup-hook
+  ln -s $out/lib/libblas${canonicalExtension} $out/lib/libmkl_rt${stdenv.hostPlatform.extensions.sharedLibrary}
+  ln -sf ${blasProvider}/include/* $dev/include
+'');
+}
diff --git a/nixpkgs/pkgs/build-support/alternatives/lapack/default.nix b/nixpkgs/pkgs/build-support/alternatives/lapack/default.nix
new file mode 100644
index 000000000000..7e74eb96b747
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/alternatives/lapack/default.nix
@@ -0,0 +1,107 @@
+{ lib, stdenv
+, lapack-reference, openblasCompat, openblas
+, isILP64 ? false
+, lapackProvider ? if isILP64 then openblas else openblasCompat }:
+
+let
+
+  version = "3";
+  canonicalExtension = if stdenv.hostPlatform.isLinux
+                       then "${stdenv.hostPlatform.extensions.sharedLibrary}.${version}"
+                       else stdenv.hostPlatform.extensions.sharedLibrary;
+
+  lapackImplementation = lib.getName lapackProvider;
+
+in
+
+assert isILP64 -> (lapackImplementation == "openblas" && lapackProvider.blas64) || lapackImplementation == "mkl";
+
+stdenv.mkDerivation {
+  pname = "lapack";
+  inherit version;
+
+  outputs = [ "out" "dev" ];
+
+  meta = (lapackProvider.meta or {}) // {
+    description = "${lib.getName lapackProvider} with just the LAPACK C and FORTRAN ABI";
+  };
+
+  passthru = {
+    inherit isILP64;
+    provider = lapackProvider;
+    implementation = lapackImplementation;
+  };
+
+  dontBuild = true;
+  dontConfigure = true;
+  unpackPhase = "src=$PWD";
+
+  dontPatchELF = true;
+
+  installPhase = (''
+  mkdir -p $out/lib $dev/include $dev/lib/pkgconfig
+
+  liblapack="${lib.getLib lapackProvider}/lib/liblapack${canonicalExtension}"
+
+  if ! [ -e "$liblapack" ]; then
+    echo "$liblapack does not exist, ${lapackProvider.name} does not provide liblapack."
+    exit 1
+  fi
+
+  cp -L "$liblapack" $out/lib/liblapack${canonicalExtension}
+  chmod +w $out/lib/liblapack${canonicalExtension}
+
+'' + (if stdenv.hostPlatform.parsed.kernel.execFormat.name == "elf" then ''
+  patchelf --set-soname liblapack${canonicalExtension} $out/lib/liblapack${canonicalExtension}
+  patchelf --set-rpath "$(patchelf --print-rpath $out/lib/liblapack${canonicalExtension}):${lapackProvider}/lib" $out/lib/liblapack${canonicalExtension}
+'' else "") + ''
+
+  if [ "$out/lib/liblapack${canonicalExtension}" != "$out/lib/liblapack${stdenv.hostPlatform.extensions.sharedLibrary}" ]; then
+    ln -s $out/lib/liblapack${canonicalExtension} "$out/lib/liblapack${stdenv.hostPlatform.extensions.sharedLibrary}"
+  fi
+
+  install -D ${lib.getDev lapack-reference}/include/lapack.h $dev/include/lapack.h
+
+  cat <<EOF > $dev/lib/pkgconfig/lapack.pc
+Name: lapack
+Version: ${version}
+Description: LAPACK FORTRAN implementation
+Cflags: -I$dev/include
+Libs: -L$out/lib -llapack
+EOF
+
+  liblapacke="${lib.getLib lapackProvider}/lib/liblapacke${canonicalExtension}"
+
+  if ! [ -e "$liblapacke" ]; then
+    echo "$liblapacke does not exist, ${lapackProvider.name} does not provide liblapacke."
+    exit 1
+  fi
+
+  cp -L "$liblapacke" $out/lib/liblapacke${canonicalExtension}
+  chmod +w $out/lib/liblapacke${canonicalExtension}
+
+'' + (if stdenv.hostPlatform.parsed.kernel.execFormat.name == "elf" then ''
+  patchelf --set-soname liblapacke${canonicalExtension} $out/lib/liblapacke${canonicalExtension}
+  patchelf --set-rpath "$(patchelf --print-rpath $out/lib/liblapacke${canonicalExtension}):${lib.getLib lapackProvider}/lib" $out/lib/liblapacke${canonicalExtension}
+'' else "") + ''
+
+  if [ -f "$out/lib/liblapacke.so.3" ]; then
+    ln -s $out/lib/liblapacke.so.3 $out/lib/liblapacke.so
+  fi
+
+  cp ${lib.getDev lapack-reference}/include/lapacke{,_mangling,_config}.h $dev/include
+
+  cat <<EOF > $dev/lib/pkgconfig/lapacke.pc
+Name: lapacke
+Version: ${version}
+Description: LAPACK C implementation
+Cflags: -I$dev/include
+Libs: -L$out/lib -llapacke
+EOF
+'' + lib.optionalString (lapackImplementation == "mkl") ''
+  mkdir -p $out/nix-support
+  echo 'export MKL_INTERFACE_LAYER=${lib.optionalString isILP64 "I"}LP64,GNU' > $out/nix-support/setup-hook
+  ln -s $out/lib/liblapack${canonicalExtension} $out/lib/libmkl_rt${stdenv.hostPlatform.extensions.sharedLibrary}
+  ln -sf ${lapackProvider}/include/* $dev/include
+'');
+}
diff --git a/nixpkgs/pkgs/build-support/appimage/appimage-exec.sh b/nixpkgs/pkgs/build-support/appimage/appimage-exec.sh
new file mode 100755
index 000000000000..4ff6802e6453
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/appimage/appimage-exec.sh
@@ -0,0 +1,142 @@
+#!@shell@
+# shellcheck shell=bash
+
+if [ -n "$DEBUG" ] ; then
+  set -x
+fi
+
+PATH="@path@:$PATH"
+apprun_opt=true
+
+# src : AppImage
+# dest : let's unpack() create the directory
+unpack() {
+  local src="$1"
+  local out="$2"
+
+  # https://github.com/AppImage/libappimage/blob/ca8d4b53bed5cbc0f3d0398e30806e0d3adeaaab/src/libappimage/utils/MagicBytesChecker.cpp#L45-L63
+  local appimageSignature;
+  appimageSignature="$(LC_ALL=C readelf -h "$src" | awk 'NR==2{print $10$11;}')"
+  local appimageType;
+  appimageType="$(LC_ALL=C readelf -h "$src" | awk 'NR==2{print $12;}')"
+
+  # check AppImage signature
+  if [ "$appimageSignature" != "4149" ]; then
+    echo "Not an AppImage file"
+    exit
+  fi
+
+  case "$appimageType" in
+    "01")
+      echo "Uncompress $(basename "$src") of type $appimageType"
+      mkdir "$out"
+      pv "$src" | bsdtar -x -C "$out" -f -
+      ;;
+
+    "02")
+      # This method avoid issues with non executable appimages,
+      # non-native packer, packer patching and squashfs-root destination prefix.
+
+      # multiarch offset one-liner using same method as AppImage
+      # see https://gist.github.com/probonopd/a490ba3401b5ef7b881d5e603fa20c93
+      offset=$(LC_ALL=C readelf -h "$src" | awk 'NR==13{e_shoff=$5} NR==18{e_shentsize=$5} NR==19{e_shnum=$5} END{print e_shoff+e_shentsize*e_shnum}')
+      echo "Uncompress $(basename "$src") of type $appimageType @ offset $offset"
+      unsquashfs -q -d "$out" -o "$offset" "$src"
+      chmod go-w "$out"
+      ;;
+
+    # "03")
+    #   get ready, https://github.com/TheAssassin/type3-runtime
+
+    *)
+      echo Unsupported AppImage Type: "$appimageType"
+      exit
+      ;;
+  esac
+  echo "$(basename "$src") is now installed in $out"
+}
+
+apprun() {
+
+  SHA256=$(sha256sum "$APPIMAGE" | awk '{print $1}')
+  export APPDIR="${XDG_CACHE_HOME:-$HOME/.cache}/appimage-run/$SHA256"
+
+  #compatibility
+  if [ -x "$APPDIR/squashfs-root" ]; then APPDIR="$APPDIR/squashfs-root"; fi
+
+  if [ ! -x "$APPDIR" ]; then
+    mkdir -p "$(dirname "$APPDIR")"
+    unpack "$APPIMAGE" "$APPDIR"
+  else echo "$(basename "$APPIMAGE")" installed in "$APPDIR"
+  fi
+
+  export PATH="$PATH:$PWD/usr/bin"
+}
+
+wrap() {
+
+  # quite same in appimageTools
+  export APPIMAGE_SILENT_INSTALL=1
+
+  if [ -n "$APPIMAGE_DEBUG_EXEC" ]; then
+    cd "$APPDIR" || true
+    exec "$APPIMAGE_DEBUG_EXEC"
+  fi
+
+  exec "$APPDIR/AppRun" "$@"
+}
+
+usage() {
+  cat <<EOF
+Usage: appimage-run [appimage-run options] <AppImage> [AppImage options]
+
+-h      show this message
+-d      debug mode
+-x      <directory> : extract appimage in the directory then exit.
+-w      <directory> : run uncompressed appimage directory (used in appimageTools)
+
+[AppImage options]: Options are passed on to the appimage.
+If you want to execute a custom command in the appimage's environment, set the APPIMAGE_DEBUG_EXEC environment variable.
+
+EOF
+  exit 1
+}
+
+while getopts "x:w:dh" option; do
+  case "${option}" in
+    d)  set -x
+        ;;
+    x)  # eXtract
+        unpack_opt=true
+        APPDIR=${OPTARG}
+        ;;
+    w)  # WrapAppImage
+        export APPDIR=${OPTARG}
+        wrap_opt=true
+        ;;
+    h)  usage
+        ;;
+    *)  usage
+        ;;
+  esac
+done
+shift "$((OPTIND-1))"
+
+if [ -n "$wrap_opt" ] && [ -d "$APPDIR" ]; then
+  wrap "$@"
+  exit
+else
+  APPIMAGE="$(realpath "$1")" || usage
+  shift
+fi
+
+if [ -n "$unpack_opt" ] && [ -f "$APPIMAGE" ]; then
+  unpack "$APPIMAGE" "$APPDIR"
+  exit
+fi
+
+if [ -n "$apprun_opt" ] && [ -f "$APPIMAGE" ]; then
+  apprun
+  wrap "$@"
+  exit
+fi
diff --git a/nixpkgs/pkgs/build-support/appimage/default.nix b/nixpkgs/pkgs/build-support/appimage/default.nix
new file mode 100644
index 000000000000..6ba5890885d6
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/appimage/default.nix
@@ -0,0 +1,191 @@
+{ lib
+
+, bash
+, binutils-unwrapped
+, coreutils
+, gawk
+, libarchive
+, pv
+, squashfsTools
+, buildFHSUserEnv
+, pkgs
+}:
+
+rec {
+  appimage-exec = pkgs.substituteAll {
+    src = ./appimage-exec.sh;
+    isExecutable = true;
+    dir = "bin";
+    path = lib.makeBinPath [
+      bash
+      binutils-unwrapped
+      coreutils
+      gawk
+      libarchive
+      pv
+      squashfsTools
+    ];
+  };
+
+  extract = { name, src }: pkgs.runCommand "${name}-extracted" {
+      buildInputs = [ appimage-exec ];
+    } ''
+      appimage-exec.sh -x $out ${src}
+    '';
+
+  # for compatibility, deprecated
+  extractType1 = extract;
+  extractType2 = extract;
+  wrapType1 = wrapType2;
+
+  wrapAppImage = args@{ name, src, extraPkgs, ... }: buildFHSUserEnv
+    (defaultFhsEnvArgs // {
+      inherit name;
+
+      targetPkgs = pkgs: [ appimage-exec ]
+        ++ defaultFhsEnvArgs.targetPkgs pkgs ++ extraPkgs pkgs;
+
+      runScript = "appimage-exec.sh -w ${src}";
+    } // (removeAttrs args (builtins.attrNames (builtins.functionArgs wrapAppImage))));
+
+  wrapType2 = args@{ name, src, extraPkgs ? pkgs: [ ], ... }: wrapAppImage
+    (args // {
+      inherit name extraPkgs;
+      src = extract { inherit name src; };
+    });
+
+  defaultFhsEnvArgs = {
+    name = "appimage-env";
+
+    # Most of the packages were taken from the Steam chroot
+    targetPkgs = pkgs: with pkgs; [
+      gtk3
+      bashInteractive
+      gnome.zenity
+      python2
+      xorg.xrandr
+      which
+      perl
+      xdg-utils
+      iana-etc
+      krb5
+    ];
+
+    # list of libraries expected in an appimage environment:
+    # https://github.com/AppImage/pkg2appimage/blob/master/excludelist
+    multiPkgs = pkgs: with pkgs; [
+      desktop-file-utils
+      xorg.libXcomposite
+      xorg.libXtst
+      xorg.libXrandr
+      xorg.libXext
+      xorg.libX11
+      xorg.libXfixes
+      libGL
+
+      gst_all_1.gstreamer
+      gst_all_1.gst-plugins-ugly
+      gst_all_1.gst-plugins-base
+      libdrm
+      xorg.xkeyboardconfig
+      xorg.libpciaccess
+
+      glib
+      gtk2
+      bzip2
+      zlib
+      gdk-pixbuf
+
+      xorg.libXinerama
+      xorg.libXdamage
+      xorg.libXcursor
+      xorg.libXrender
+      xorg.libXScrnSaver
+      xorg.libXxf86vm
+      xorg.libXi
+      xorg.libSM
+      xorg.libICE
+      gnome2.GConf
+      freetype
+      (curl.override { gnutlsSupport = true; sslSupport = false; })
+      nspr
+      nss
+      fontconfig
+      cairo
+      pango
+      expat
+      dbus
+      cups
+      libcap
+      SDL2
+      libusb1
+      udev
+      dbus-glib
+      atk
+      at-spi2-atk
+      libudev0-shim
+      networkmanager098
+
+      xorg.libXt
+      xorg.libXmu
+      xorg.libxcb
+      xorg.xcbutil
+      xorg.xcbutilwm
+      xorg.xcbutilimage
+      xorg.xcbutilkeysyms
+      xorg.xcbutilrenderutil
+      libGLU
+      libuuid
+      libogg
+      libvorbis
+      SDL
+      SDL2_image
+      glew110
+      openssl
+      libidn
+      tbb
+      wayland
+      mesa
+      libxkbcommon
+
+      flac
+      freeglut
+      libjpeg
+      libpng12
+      libsamplerate
+      libmikmod
+      libtheora
+      libtiff
+      pixman
+      speex
+      SDL_image
+      SDL_ttf
+      SDL_mixer
+      SDL2_ttf
+      SDL2_mixer
+      libappindicator-gtk2
+      libcaca
+      libcanberra
+      libgcrypt
+      libvpx
+      librsvg
+      xorg.libXft
+      libvdpau
+      alsa-lib
+
+      harfbuzz
+      e2fsprogs
+      libgpgerror
+      keyutils.lib
+      libjack2
+      fribidi
+      p11-kit
+
+      # libraries not on the upstream include list, but nevertheless expected
+      # by at least one appimage
+      libtool.lib # for Synfigstudio
+      xorg.libxshmfence # for apple-music-electron
+      at-spi2-core
+    ];
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/bintools-wrapper/add-darwin-ldflags-before.sh b/nixpkgs/pkgs/build-support/bintools-wrapper/add-darwin-ldflags-before.sh
new file mode 100644
index 000000000000..75d9484846a8
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/bintools-wrapper/add-darwin-ldflags-before.sh
@@ -0,0 +1,81 @@
+# Unconditionally adding in platform version flags will result in warnings that
+# will be treated as errors by some packages. Add any missing flags here.
+
+# There are two things to be configured: the "platform version" (oldest
+# supported version of macos, ios, etc), and the "sdk version".
+#
+# The modern way of configuring these is to use:
+#    -platform_version $platform $platform_version $sdk_version"
+#
+# The old way is still supported, and uses flags like:
+#    -${platform}_version_min $platform_version
+#    -sdk_version $sdk_version
+#
+# If both styles are specified ld will combine them. If multiple versions are
+# specified for the same platform, ld will emit an error.
+#
+# The following adds flags for whichever properties have not already been
+# provided.
+
+havePlatformVersionFlag=
+haveDarwinSDKVersion=
+haveDarwinPlatformVersion=
+
+# Roles will set by add-flags.sh, but add-flags.sh can be skipped when the
+# cc-wrapper has added the linker flags. Both the cc-wrapper and the binutils
+# wrapper mangle the same variable (MACOSX_DEPLOYMENT_TARGET), so if roles are
+# empty due to being run through the cc-wrapper then the mangle here is a no-op
+# and we still do the right thing.
+#
+# To be robust, make sure we always have the correct set of roles.
+accumulateRoles
+
+mangleVarSingle @darwinMinVersionVariable@ ${role_suffixes[@]+"${role_suffixes[@]}"}
+
+n=0
+nParams=${#params[@]}
+while (( n < nParams )); do
+    p=${params[n]}
+    case "$p" in
+        # the current platform
+        -@darwinPlatform@_version_min)
+            haveDarwinPlatformVersion=1
+            ;;
+
+        # legacy aliases
+        -macosx_version_min|-iphoneos_version_min|-iosmac_version_min|-uikitformac_version_min)
+            haveDarwinPlatformVersion=1
+            ;;
+
+        -sdk_version)
+            haveDarwinSDKVersion=1
+            ;;
+
+        -platform_version)
+            havePlatformVersionFlag=1
+
+            # If clang can't determine the sdk version it will pass 0.0.0. This
+            # has runtime effects so we override this to use the known sdk
+            # version.
+            if [ "${params[n+3]-}" = 0.0.0 ]; then
+                params[n+3]=@darwinSdkVersion@
+            fi
+            ;;
+    esac
+    n=$((n + 1))
+done
+
+# If the caller has set -platform_version, trust they're doing the right thing.
+# This will be the typical case for clang in nixpkgs.
+if [ ! "$havePlatformVersionFlag" ]; then
+    if [ ! "$haveDarwinSDKVersion" ] && [ ! "$haveDarwinPlatformVersion" ]; then
+        # Nothing provided. Use the modern "-platform_version" to set both.
+        extraBefore+=(-platform_version @darwinPlatform@ "${@darwinMinVersionVariable@_@suffixSalt@:-@darwinMinVersion@}" @darwinSdkVersion@)
+    elif [ ! "$haveDarwinSDKVersion" ]; then
+        # Add missing sdk version
+        extraBefore+=(-sdk_version @darwinSdkVersion@)
+    elif [ ! "$haveDarwinPlatformVersion" ]; then
+        # Add missing platform version
+        extraBefore+=(-@darwinPlatform@_version_min "${@darwinMinVersionVariable@_@suffixSalt@:-@darwinMinVersion@}")
+    fi
+fi
diff --git a/nixpkgs/pkgs/build-support/bintools-wrapper/add-flags.sh b/nixpkgs/pkgs/build-support/bintools-wrapper/add-flags.sh
new file mode 100644
index 000000000000..3b94daba65d7
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/bintools-wrapper/add-flags.sh
@@ -0,0 +1,37 @@
+# See cc-wrapper for comments.
+var_templates_list=(
+    NIX_IGNORE_LD_THROUGH_GCC
+    NIX_LDFLAGS
+    NIX_LDFLAGS_BEFORE
+    NIX_DYNAMIC_LINKER
+    NIX_LDFLAGS_AFTER
+    NIX_LDFLAGS_HARDEN
+    NIX_HARDENING_ENABLE
+)
+var_templates_bool=(
+    NIX_SET_BUILD_ID
+    NIX_DONT_SET_RPATH
+)
+
+accumulateRoles
+
+for var in "${var_templates_list[@]}"; do
+    mangleVarList "$var" ${role_suffixes[@]+"${role_suffixes[@]}"}
+done
+for var in "${var_templates_bool[@]}"; do
+    mangleVarBool "$var" ${role_suffixes[@]+"${role_suffixes[@]}"}
+done
+
+if [ -e @out@/nix-support/libc-ldflags ]; then
+    NIX_LDFLAGS_@suffixSalt@+=" $(< @out@/nix-support/libc-ldflags)"
+fi
+
+if [ -z "$NIX_DYNAMIC_LINKER_@suffixSalt@" ] && [ -e @out@/nix-support/ld-set-dynamic-linker ]; then
+    NIX_DYNAMIC_LINKER_@suffixSalt@="$(< @out@/nix-support/dynamic-linker)"
+fi
+
+if [ -e @out@/nix-support/libc-ldflags-before ]; then
+    NIX_LDFLAGS_BEFORE_@suffixSalt@="$(< @out@/nix-support/libc-ldflags-before) $NIX_LDFLAGS_BEFORE_@suffixSalt@"
+fi
+
+export NIX_BINTOOLS_WRAPPER_FLAGS_SET_@suffixSalt@=1
diff --git a/nixpkgs/pkgs/build-support/bintools-wrapper/add-hardening.sh b/nixpkgs/pkgs/build-support/bintools-wrapper/add-hardening.sh
new file mode 100644
index 000000000000..5e49b7bd9053
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/bintools-wrapper/add-hardening.sh
@@ -0,0 +1,58 @@
+declare -a hardeningLDFlags=()
+
+declare -A hardeningEnableMap=()
+
+# Intentionally word-split in case 'NIX_HARDENING_ENABLE' is defined in Nix. The
+# array expansion also prevents undefined variables from causing trouble with
+# `set -u`.
+for flag in ${NIX_HARDENING_ENABLE_@suffixSalt@-}; do
+  hardeningEnableMap["$flag"]=1
+done
+
+# Remove unsupported flags.
+for flag in @hardening_unsupported_flags@; do
+  unset -v "hardeningEnableMap[$flag]"
+done
+
+if (( "${NIX_DEBUG:-0}" >= 1 )); then
+  declare -a allHardeningFlags=(pie relro bindnow)
+  declare -A hardeningDisableMap=()
+
+  # Determine which flags were effectively disabled so we can report below.
+  for flag in "${allHardeningFlags[@]}"; do
+    if [[ -z "${hardeningEnableMap[$flag]-}" ]]; then
+      hardeningDisableMap[$flag]=1
+    fi
+  done
+
+  printf 'HARDENING: disabled flags:' >&2
+  (( "${#hardeningDisableMap[@]}" )) && printf ' %q' "${!hardeningDisableMap[@]}" >&2
+  echo >&2
+
+  if (( "${#hardeningEnableMap[@]}" )); then
+    echo 'HARDENING: Is active (not completely disabled with "all" flag)' >&2;
+  fi
+fi
+
+for flag in "${!hardeningEnableMap[@]}"; do
+  case $flag in
+    pie)
+      if [[ ! ("$*" =~ " -shared " || "$*" =~ " -static ") ]]; then
+        if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling LDFlags -pie >&2; fi
+        hardeningLDFlags+=('-pie')
+      fi
+      ;;
+    relro)
+      if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling relro >&2; fi
+      hardeningLDFlags+=('-z' 'relro')
+      ;;
+    bindnow)
+      if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling bindnow >&2; fi
+      hardeningLDFlags+=('-z' 'now')
+      ;;
+    *)
+      # Ignore unsupported. Checked in Nix that at least *some*
+      # tool supports each flag.
+      ;;
+  esac
+done
diff --git a/nixpkgs/pkgs/build-support/bintools-wrapper/darwin-install_name_tool-wrapper.sh b/nixpkgs/pkgs/build-support/bintools-wrapper/darwin-install_name_tool-wrapper.sh
new file mode 100755
index 000000000000..376a7abfe41c
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/bintools-wrapper/darwin-install_name_tool-wrapper.sh
@@ -0,0 +1,49 @@
+#! @shell@
+# shellcheck shell=bash
+
+set -eu -o pipefail +o posix
+shopt -s nullglob
+
+if (( "${NIX_DEBUG:-0}" >= 7 )); then
+    set -x
+fi
+
+source @signingUtils@
+
+extraAfter=()
+extraBefore=()
+params=("$@")
+
+input=
+
+pprev=
+prev=
+for p in \
+    ${extraBefore+"${extraBefore[@]}"} \
+    ${params+"${params[@]}"} \
+    ${extraAfter+"${extraAfter[@]}"}
+do
+    if [ "$pprev" != "-change" ] && [[ "$prev" != -* ]] && [[ "$p" != -* ]]; then
+        input="$p"
+    fi
+    pprev="$prev"
+    prev="$p"
+done
+
+# Optionally print debug info.
+if (( "${NIX_DEBUG:-0}" >= 1 )); then
+    # Old bash workaround, see above.
+    echo "extra flags before to @prog@:" >&2
+    printf "  %q\n" ${extraBefore+"${extraBefore[@]}"}  >&2
+    echo "original flags to @prog@:" >&2
+    printf "  %q\n" ${params+"${params[@]}"} >&2
+    echo "extra flags after to @prog@:" >&2
+    printf "  %q\n" ${extraAfter+"${extraAfter[@]}"} >&2
+fi
+
+@prog@ \
+    ${extraBefore+"${extraBefore[@]}"} \
+    ${params+"${params[@]}"} \
+    ${extraAfter+"${extraAfter[@]}"}
+
+sign "$input"
diff --git a/nixpkgs/pkgs/build-support/bintools-wrapper/darwin-strip-wrapper.sh b/nixpkgs/pkgs/build-support/bintools-wrapper/darwin-strip-wrapper.sh
new file mode 100755
index 000000000000..a67699547a6f
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/bintools-wrapper/darwin-strip-wrapper.sh
@@ -0,0 +1,78 @@
+#! @shell@
+# shellcheck shell=bash
+
+set -eu -o pipefail +o posix
+shopt -s nullglob
+
+if (( "${NIX_DEBUG:-0}" >= 7 )); then
+    set -x
+fi
+
+source @signingUtils@
+
+extraAfter=()
+extraBefore=()
+params=("$@")
+
+output=
+inputs=()
+
+restAreFiles=
+prev=
+for p in \
+    ${extraBefore+"${extraBefore[@]}"} \
+    ${params+"${params[@]}"} \
+    ${extraAfter+"${extraAfter[@]}"}
+do
+    if [ "$restAreFiles" ]; then
+        inputs+=("$p")
+    else
+        case "$prev" in
+            -s|-R|-d|-arch)
+                # Unrelated arguments with values
+                ;;
+            -o)
+                # Explicit output
+                output="$p"
+                ;;
+            *)
+                # Any other orgument either takes no value, or is a file.
+                if [[ "$p" != -* ]]; then
+                    inputs+=("$p")
+                fi
+                ;;
+        esac
+
+        if [ "$p" == - ]; then
+            restAreFiles=1
+        fi
+    fi
+
+    prev="$p"
+done
+
+# Optionally print debug info.
+if (( "${NIX_DEBUG:-0}" >= 1 )); then
+    # Old bash workaround, see above.
+    echo "extra flags before to @prog@:" >&2
+    printf "  %q\n" ${extraBefore+"${extraBefore[@]}"}  >&2
+    echo "original flags to @prog@:" >&2
+    printf "  %q\n" ${params+"${params[@]}"} >&2
+    echo "extra flags after to @prog@:" >&2
+    printf "  %q\n" ${extraAfter+"${extraAfter[@]}"} >&2
+fi
+
+@prog@ \
+    ${extraBefore+"${extraBefore[@]}"} \
+    ${params+"${params[@]}"} \
+    ${extraAfter+"${extraAfter[@]}"}
+
+if [ "$output" ]; then
+    # Single explicit output
+    signIfRequired "$output"
+else
+    # Multiple inputs, rewritten in place
+    for input in "${inputs[@]}"; do
+      signIfRequired "$input"
+    done
+fi
diff --git a/nixpkgs/pkgs/build-support/bintools-wrapper/default.nix b/nixpkgs/pkgs/build-support/bintools-wrapper/default.nix
new file mode 100644
index 000000000000..5d2f2f977a70
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/bintools-wrapper/default.nix
@@ -0,0 +1,397 @@
+# The Nixpkgs CC is not directly usable, since it doesn't know where
+# the C library and standard header files are. Therefore the compiler
+# produced by that package cannot be installed directly in a user
+# environment and used from the command line. So we use a wrapper
+# script that sets up the right environment variables so that the
+# compiler and the linker just "work".
+
+{ name ? ""
+, lib
+, stdenvNoCC
+, bintools ? null, libc ? null, coreutils ? null, shell ? stdenvNoCC.shell, gnugrep ? null
+, netbsd ? null, netbsdCross ? null
+, sharedLibraryLoader ?
+  if libc == null then
+    null
+  else if stdenvNoCC.targetPlatform.isNetBSD then
+    if !(targetPackages ? netbsdCross) then
+      netbsd.ld_elf_so
+    else if libc != targetPackages.netbsdCross.headers then
+      targetPackages.netbsdCross.ld_elf_so
+    else
+      null
+  else
+    lib.getLib libc
+, nativeTools, noLibc ? false, nativeLibc, nativePrefix ? ""
+, propagateDoc ? bintools != null && bintools ? man
+, extraPackages ? [], extraBuildCommands ? ""
+, buildPackages ? {}
+, targetPackages ? {}
+, useMacosReexportHack ? false
+
+# Darwin code signing support utilities
+, postLinkSignHook ? null, signingUtils ? null
+}:
+
+with lib;
+
+assert nativeTools -> !propagateDoc && nativePrefix != "";
+assert !nativeTools ->
+  bintools != null && coreutils != null && gnugrep != null;
+assert !(nativeLibc && noLibc);
+assert (noLibc || nativeLibc) == (libc == null);
+
+let
+  stdenv = stdenvNoCC;
+  inherit (stdenv) hostPlatform targetPlatform;
+
+  # Prefix for binaries. Customarily ends with a dash separator.
+  #
+  # TODO(@Ericson2314) Make unconditional, or optional but always true by
+  # default.
+  targetPrefix = lib.optionalString (targetPlatform != hostPlatform)
+                                        (targetPlatform.config + "-");
+
+  bintoolsVersion = lib.getVersion bintools;
+  bintoolsName = lib.removePrefix targetPrefix (lib.getName bintools);
+
+  libc_bin = if libc == null then null else getBin libc;
+  libc_dev = if libc == null then null else getDev libc;
+  libc_lib = if libc == null then null else getLib libc;
+  bintools_bin = if nativeTools then "" else getBin bintools;
+  # The wrapper scripts use 'cat' and 'grep', so we may need coreutils.
+  coreutils_bin = if nativeTools then "" else getBin coreutils;
+
+  # See description in cc-wrapper.
+  suffixSalt = replaceStrings ["-" "."] ["_" "_"] targetPlatform.config;
+
+  # The dynamic linker has different names on different platforms. This is a
+  # shell glob that ought to match it.
+  dynamicLinker =
+    /**/ if sharedLibraryLoader == null then null
+    else if targetPlatform.libc == "musl"             then "${sharedLibraryLoader}/lib/ld-musl-*"
+    else if (targetPlatform.libc == "bionic" && targetPlatform.is32bit) then "/system/bin/linker"
+    else if (targetPlatform.libc == "bionic" && targetPlatform.is64bit) then "/system/bin/linker64"
+    else if targetPlatform.libc == "nblibc"           then "${sharedLibraryLoader}/libexec/ld.elf_so"
+    else if targetPlatform.system == "i686-linux"     then "${sharedLibraryLoader}/lib/ld-linux.so.2"
+    else if targetPlatform.system == "x86_64-linux"   then "${sharedLibraryLoader}/lib/ld-linux-x86-64.so.2"
+    else if targetPlatform.system == "powerpc64le-linux" then "${sharedLibraryLoader}/lib/ld64.so.2"
+    # ARM with a wildcard, which can be "" or "-armhf".
+    else if (with targetPlatform; isAarch32 && isLinux)   then "${sharedLibraryLoader}/lib/ld-linux*.so.3"
+    else if targetPlatform.system == "aarch64-linux"  then "${sharedLibraryLoader}/lib/ld-linux-aarch64.so.1"
+    else if targetPlatform.system == "powerpc-linux"  then "${sharedLibraryLoader}/lib/ld.so.1"
+    else if targetPlatform.isMips                     then "${sharedLibraryLoader}/lib/ld.so.1"
+    else if targetPlatform.isDarwin                   then "/usr/lib/dyld"
+    else if targetPlatform.isFreeBSD                  then "/libexec/ld-elf.so.1"
+    else if lib.hasSuffix "pc-gnu" targetPlatform.config then "ld.so.1"
+    else null;
+
+  expand-response-params =
+    if buildPackages ? stdenv && buildPackages.stdenv.hasCC && buildPackages.stdenv.cc != "/dev/null"
+    then import ../expand-response-params { inherit (buildPackages) stdenv; }
+    else "";
+
+in
+
+stdenv.mkDerivation {
+  pname = targetPrefix
+    + (if name != "" then name else "${bintoolsName}-wrapper");
+  version = if bintools == null then null else bintoolsVersion;
+
+  preferLocalBuild = true;
+
+  inherit bintools_bin libc_bin libc_dev libc_lib coreutils_bin;
+  shell = getBin shell + shell.shellPath or "";
+  gnugrep_bin = if nativeTools then "" else gnugrep;
+
+  inherit targetPrefix suffixSalt;
+
+  outputs = [ "out" ] ++ optionals propagateDoc ([ "man" ] ++ optional (bintools ? info) "info");
+
+  passthru = {
+    inherit bintools libc nativeTools nativeLibc nativePrefix;
+
+    emacsBufferSetup = pkgs: ''
+      ; We should handle propagation here too
+      (mapc
+        (lambda (arg)
+          (when (file-directory-p (concat arg "/lib"))
+            (setenv "NIX_LDFLAGS_${suffixSalt}" (concat (getenv "NIX_LDFLAGS_${suffixSalt}") " -L" arg "/lib")))
+          (when (file-directory-p (concat arg "/lib64"))
+            (setenv "NIX_LDFLAGS_${suffixSalt}" (concat (getenv "NIX_LDFLAGS_${suffixSalt}") " -L" arg "/lib64"))))
+        '(${concatStringsSep " " (map (pkg: "\"${pkg}\"") pkgs)}))
+    '';
+  };
+
+  dontBuild = true;
+  dontConfigure = true;
+
+  unpackPhase = ''
+    src=$PWD
+  '';
+
+  installPhase =
+    ''
+      mkdir -p $out/bin $out/nix-support
+
+      wrap() {
+        local dst="$1"
+        local wrapper="$2"
+        export prog="$3"
+        substituteAll "$wrapper" "$out/bin/$dst"
+        chmod +x "$out/bin/$dst"
+      }
+    ''
+
+    + (if nativeTools then ''
+      echo ${nativePrefix} > $out/nix-support/orig-bintools
+
+      ldPath="${nativePrefix}/bin"
+    '' else ''
+      echo $bintools_bin > $out/nix-support/orig-bintools
+
+      ldPath="${bintools_bin}/bin"
+    ''
+
+    # Solaris needs an additional ld wrapper.
+    + optionalString (targetPlatform.isSunOS && nativePrefix != "") ''
+      ldPath="${nativePrefix}/bin"
+      exec="$ldPath/${targetPrefix}ld"
+      wrap ld-solaris ${./ld-solaris-wrapper.sh}
+    '')
+
+    # Create a symlink to as (the assembler).
+    + ''
+      if [ -e $ldPath/${targetPrefix}as ]; then
+        ln -s $ldPath/${targetPrefix}as $out/bin/${targetPrefix}as
+      fi
+
+    '' + (if !useMacosReexportHack then ''
+      wrap ${targetPrefix}ld ${./ld-wrapper.sh} ''${ld:-$ldPath/${targetPrefix}ld}
+    '' else ''
+      ldInner="${targetPrefix}ld-reexport-delegate"
+      wrap "$ldInner" ${./macos-sierra-reexport-hack.bash} ''${ld:-$ldPath/${targetPrefix}ld}
+      wrap "${targetPrefix}ld" ${./ld-wrapper.sh} "$out/bin/$ldInner"
+      unset ldInner
+    '') + ''
+
+      for variant in ld.gold ld.bfd ld.lld; do
+        local underlying=$ldPath/${targetPrefix}$variant
+        [[ -e "$underlying" ]] || continue
+        wrap ${targetPrefix}$variant ${./ld-wrapper.sh} $underlying
+      done
+    '';
+
+  emulation = let
+    fmt =
+      /**/ if targetPlatform.isDarwin  then "mach-o"
+      else if targetPlatform.isWindows then "pe"
+      else "elf" + toString targetPlatform.parsed.cpu.bits;
+    endianPrefix = if targetPlatform.isBigEndian then "big" else "little";
+    sep = optionalString (!targetPlatform.isMips && !targetPlatform.isPower && !targetPlatform.isRiscV) "-";
+    arch =
+      /**/ if targetPlatform.isAarch64 then endianPrefix + "aarch64"
+      else if targetPlatform.isAarch32     then endianPrefix + "arm"
+      else if targetPlatform.isx86_64  then "x86-64"
+      else if targetPlatform.isx86_32  then "i386"
+      else if targetPlatform.isMips    then {
+          mips     = "btsmipn32"; # n32 variant
+          mipsel   = "ltsmipn32"; # n32 variant
+          mips64   = "btsmip";
+          mips64el = "ltsmip";
+        }.${targetPlatform.parsed.cpu.name}
+      else if targetPlatform.isMmix then "mmix"
+      else if targetPlatform.isPower then if targetPlatform.isBigEndian then "ppc" else "lppc"
+      else if targetPlatform.isSparc then "sparc"
+      else if targetPlatform.isMsp430 then "msp430"
+      else if targetPlatform.isAvr then "avr"
+      else if targetPlatform.isAlpha then "alpha"
+      else if targetPlatform.isVc4 then "vc4"
+      else if targetPlatform.isOr1k then "or1k"
+      else if targetPlatform.isRiscV then "lriscv"
+      else throw "unknown emulation for platform: ${targetPlatform.config}";
+    in if targetPlatform.useLLVM or false then ""
+       else targetPlatform.bfdEmulation or (fmt + sep + arch);
+
+  strictDeps = true;
+  depsTargetTargetPropagated = extraPackages;
+
+  wrapperName = "BINTOOLS_WRAPPER";
+
+  setupHooks = [
+    ../setup-hooks/role.bash
+    ./setup-hook.sh
+  ];
+
+  postFixup =
+    ##
+    ## General libc support
+    ##
+    optionalString (libc != null) (''
+      touch "$out/nix-support/libc-ldflags"
+      echo "-L${libc_lib}${libc.libdir or "/lib"}" >> $out/nix-support/libc-ldflags
+
+      echo "${libc_lib}" > $out/nix-support/orig-libc
+      echo "${libc_dev}" > $out/nix-support/orig-libc-dev
+    ''
+
+    ##
+    ## Dynamic linker support
+    ##
+    + optionalString (sharedLibraryLoader != null) ''
+      if [[ -z ''${dynamicLinker+x} ]]; then
+        echo "Don't know the name of the dynamic linker for platform '${targetPlatform.config}', so guessing instead." >&2
+        local dynamicLinker="${sharedLibraryLoader}/lib/ld*.so.?"
+      fi
+    ''
+
+    # Expand globs to fill array of options
+    + ''
+      dynamicLinker=($dynamicLinker)
+
+      case ''${#dynamicLinker[@]} in
+        0) echo "No dynamic linker found for platform '${targetPlatform.config}'." >&2;;
+        1) echo "Using dynamic linker: '$dynamicLinker'" >&2;;
+        *) echo "Multiple dynamic linkers found for platform '${targetPlatform.config}'." >&2;;
+      esac
+
+      if [ -n "''${dynamicLinker-}" ]; then
+        echo $dynamicLinker > $out/nix-support/dynamic-linker
+
+        ${if targetPlatform.isDarwin then ''
+          printf "export LD_DYLD_PATH=%q\n" "$dynamicLinker" >> $out/nix-support/setup-hook
+        '' else lib.optionalString (sharedLibraryLoader != null) ''
+          if [ -e ${sharedLibraryLoader}/lib/32/ld-linux.so.2 ]; then
+            echo ${sharedLibraryLoader}/lib/32/ld-linux.so.2 > $out/nix-support/dynamic-linker-m32
+          fi
+          touch $out/nix-support/ld-set-dynamic-linker
+        ''}
+      fi
+    '')
+
+    ##
+    ## User env support
+    ##
+
+    # Propagate the underling unwrapped bintools so that if you
+    # install the wrapper, you get tools like objdump (same for any
+    # binaries of libc).
+    + optionalString (!nativeTools) ''
+      printWords ${bintools_bin} ${if libc == null then "" else libc_bin} > $out/nix-support/propagated-user-env-packages
+    ''
+
+    ##
+    ## Man page and info support
+    ##
+    + optionalString propagateDoc (''
+      ln -s ${bintools.man} $man
+    '' + optionalString (bintools ? info) ''
+      ln -s ${bintools.info} $info
+    '')
+
+    ##
+    ## Hardening support
+    ##
+
+    # some linkers on some platforms don't support specific -z flags
+    + ''
+      export hardening_unsupported_flags=""
+      if [[ "$($ldPath/${targetPrefix}ld -z now 2>&1 || true)" =~ un(recognized|known)\ option ]]; then
+        hardening_unsupported_flags+=" bindnow"
+      fi
+      if [[ "$($ldPath/${targetPrefix}ld -z relro 2>&1 || true)" =~ un(recognized|known)\ option ]]; then
+        hardening_unsupported_flags+=" relro"
+      fi
+    ''
+
+    + optionalString hostPlatform.isCygwin ''
+      hardening_unsupported_flags+=" pic"
+    ''
+
+    + optionalString targetPlatform.isAvr ''
+      hardening_unsupported_flags+=" relro bindnow"
+    ''
+
+    + optionalString (libc != null && targetPlatform.isAvr) ''
+      for isa in avr5 avr3 avr4 avr6 avr25 avr31 avr35 avr51 avrxmega2 avrxmega4 avrxmega5 avrxmega6 avrxmega7 tiny-stack; do
+        echo "-L${getLib libc}/avr/lib/$isa" >> $out/nix-support/libc-cflags
+      done
+    ''
+
+    + optionalString stdenv.targetPlatform.isDarwin ''
+      echo "-arch ${targetPlatform.darwinArch}" >> $out/nix-support/libc-ldflags
+    ''
+
+    ###
+    ### Remove LC_UUID
+    ###
+    + optionalString (stdenv.targetPlatform.isDarwin && !(stdenv.cc.bintools.bintools.isGNU or false)) ''
+      echo "-no_uuid" >> $out/nix-support/libc-ldflags-before
+    ''
+
+    + ''
+      for flags in "$out/nix-support"/*flags*; do
+        substituteInPlace "$flags" --replace $'\n' ' '
+      done
+
+      substituteAll ${./add-flags.sh} $out/nix-support/add-flags.sh
+      substituteAll ${./add-hardening.sh} $out/nix-support/add-hardening.sh
+      substituteAll ${../wrapper-common/utils.bash} $out/nix-support/utils.bash
+    ''
+
+    ###
+    ### Ensure consistent LC_VERSION_MIN_MACOSX
+    ###
+    + optionalString stdenv.targetPlatform.isDarwin (
+      let
+        inherit (stdenv.targetPlatform)
+          darwinPlatform darwinSdkVersion
+          darwinMinVersion darwinMinVersionVariable;
+      in ''
+        export darwinPlatform=${darwinPlatform}
+        export darwinMinVersion=${darwinMinVersion}
+        export darwinSdkVersion=${darwinSdkVersion}
+        export darwinMinVersionVariable=${darwinMinVersionVariable}
+        substituteAll ${./add-darwin-ldflags-before.sh} $out/nix-support/add-local-ldflags-before.sh
+      ''
+    )
+
+    ##
+    ## Code signing on Apple Silicon
+    ##
+    + optionalString (targetPlatform.isDarwin && targetPlatform.isAarch64) ''
+      echo 'source ${postLinkSignHook}' >> $out/nix-support/post-link-hook
+
+      export signingUtils=${signingUtils}
+
+      wrap \
+        ${targetPrefix}install_name_tool \
+        ${./darwin-install_name_tool-wrapper.sh} \
+        "${bintools_bin}/bin/${targetPrefix}install_name_tool"
+
+      wrap \
+        ${targetPrefix}strip ${./darwin-strip-wrapper.sh} \
+        "${bintools_bin}/bin/${targetPrefix}strip"
+    ''
+
+    ##
+    ## Extra custom steps
+    ##
+    + extraBuildCommands;
+
+  inherit dynamicLinker expand-response-params;
+
+  # for substitution in utils.bash
+  expandResponseParams = "${expand-response-params}/bin/expand-response-params";
+
+  meta =
+    let bintools_ = if bintools != null then bintools else {}; in
+    (if bintools_ ? meta then removeAttrs bintools.meta ["priority"] else {}) //
+    { description =
+        lib.attrByPath ["meta" "description"] "System binary utilities" bintools_
+        + " (wrapper script)";
+      priority = 10;
+  } // optionalAttrs useMacosReexportHack {
+    platforms = lib.platforms.darwin;
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/bintools-wrapper/ld-solaris-wrapper.sh b/nixpkgs/pkgs/build-support/bintools-wrapper/ld-solaris-wrapper.sh
new file mode 100644
index 000000000000..5d81e34a047f
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/bintools-wrapper/ld-solaris-wrapper.sh
@@ -0,0 +1,29 @@
+#!@shell@
+set -eu -o pipefail
+shopt -s nullglob
+
+if (( "${NIX_DEBUG:-0}" >= 7 )); then
+    set -x
+fi
+
+declare -a args=("$@")
+# I've also tried adding -z direct and -z lazyload, but it gave too many problems with C++ exceptions :'(
+# Also made sure libgcc would not be lazy-loaded, as suggested here: https://www.illumos.org/issues/2534#note-3
+#   but still no success.
+declare -a argsBefore=(-z ignore) argsAfter=()
+
+# This loop makes sure all -L arguments are before -l arguments, or ld may complain it cannot find a library.
+# GNU binutils does not have this problem:
+#   http://stackoverflow.com/questions/5817269/does-the-order-of-l-and-l-options-in-the-gnu-linker-matter
+while (( $# )); do
+    case "${args[$i]}" in
+        -L)   argsBefore+=("$1" "$2"); shift ;;
+        -L?*) argsBefore+=("$1") ;;
+        *)    argsAfter+=("$1") ;;
+    esac
+    shift
+done
+
+# Trace:
+set -x
+exec "@ld@" "${argsBefore[@]}" "${argsAfter[@]}"
diff --git a/nixpkgs/pkgs/build-support/bintools-wrapper/ld-wrapper.sh b/nixpkgs/pkgs/build-support/bintools-wrapper/ld-wrapper.sh
new file mode 100644
index 000000000000..e54dd6f47146
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/bintools-wrapper/ld-wrapper.sh
@@ -0,0 +1,253 @@
+#! @shell@
+set -eu -o pipefail +o posix
+shopt -s nullglob
+
+if (( "${NIX_DEBUG:-0}" >= 7 )); then
+    set -x
+fi
+
+path_backup="$PATH"
+
+# phase separation makes this look useless
+# shellcheck disable=SC2157
+if [ -n "@coreutils_bin@" ]; then
+    PATH="@coreutils_bin@/bin"
+fi
+
+source @out@/nix-support/utils.bash
+
+if [ -z "${NIX_BINTOOLS_WRAPPER_FLAGS_SET_@suffixSalt@:-}" ]; then
+    source @out@/nix-support/add-flags.sh
+fi
+
+setDynamicLinker=1
+
+# Optionally filter out paths not refering to the store.
+expandResponseParams "$@"
+if [[ "${NIX_ENFORCE_PURITY:-}" = 1 && -n "${NIX_STORE:-}"
+        && ( -z "$NIX_IGNORE_LD_THROUGH_GCC_@suffixSalt@" || -z "${NIX_LDFLAGS_SET_@suffixSalt@:-}" ) ]]; then
+    rest=()
+    nParams=${#params[@]}
+    declare -i n=0
+    while (( "$n" < "$nParams" )); do
+        p=${params[n]}
+        p2=${params[n+1]:-} # handle `p` being last one
+        if [ "${p:0:3}" = -L/ ] && badPath "${p:2}"; then
+            skip "${p:2}"
+        elif [ "$p" = -L ] && badPath "$p2"; then
+            n+=1; skip "$p2"
+        elif [ "$p" = -rpath ] && badPath "$p2"; then
+            n+=1; skip "$p2"
+        elif [ "$p" = -dynamic-linker ] && badPath "$p2"; then
+            n+=1; skip "$p2"
+        elif [ "${p:0:1}" = / ] && badPath "$p"; then
+            # We cannot skip this; barf.
+            echo "impure path \`$p' used in link" >&2
+            exit 1
+        elif [ "${p:0:9}" = --sysroot ]; then
+            # Our ld is not built with sysroot support (Can we fix that?)
+            :
+        else
+            if [[ "$p" = -static || "$p" = -static-pie ]]; then
+                # Using a dynamic linker for static binaries can lead to crashes.
+                # This was observed for rust binaries.
+                setDynamicLinker=0
+            fi
+            rest+=("$p")
+        fi
+        n+=1
+    done
+    # Old bash empty array hack
+    params=(${rest+"${rest[@]}"})
+fi
+
+source @out@/nix-support/add-hardening.sh
+
+extraAfter=()
+extraBefore=(${hardeningLDFlags[@]+"${hardeningLDFlags[@]}"})
+
+if [ -z "${NIX_LDFLAGS_SET_@suffixSalt@:-}" ]; then
+    extraAfter+=($NIX_LDFLAGS_@suffixSalt@)
+    extraBefore+=($NIX_LDFLAGS_BEFORE_@suffixSalt@)
+    # By adding dynamic linker to extraBefore we allow the users set their
+    # own dynamic linker as NIX_LD_FLAGS will override earlier set flags
+    if [[ "$setDynamicLinker" = 1 && -n "$NIX_DYNAMIC_LINKER_@suffixSalt@" ]]; then
+        extraBefore+=("-dynamic-linker" "$NIX_DYNAMIC_LINKER_@suffixSalt@")
+    fi
+fi
+
+extraAfter+=($NIX_LDFLAGS_AFTER_@suffixSalt@)
+
+# These flags *must not* be pulled up to -Wl, flags, so they can't go in
+# add-flags.sh. They must always be set, so must not be disabled by
+# NIX_LDFLAGS_SET.
+if [ -e @out@/nix-support/add-local-ldflags-before.sh ]; then
+    source @out@/nix-support/add-local-ldflags-before.sh
+fi
+
+
+# Specify the target emulation if nothing is passed in ("-m" overrides this
+# environment variable). Ensures we never blindly fallback on targeting the host
+# platform.
+: ${LDEMULATION:=@emulation@}
+
+# Three tasks:
+#
+#   1. Find all -L... switches for rpath
+#
+#   2. Find relocatable flag for build id.
+#
+#   3. Choose 32-bit dynamic linker if needed
+declare -a libDirs
+declare -A libs
+declare -i relocatable=0 link32=0
+
+linkerOutput="a.out"
+
+if
+    [ "$NIX_DONT_SET_RPATH_@suffixSalt@" != 1 ] \
+        || [ "$NIX_SET_BUILD_ID_@suffixSalt@" = 1 ] \
+        || [ -e @out@/nix-support/dynamic-linker-m32 ]
+then
+    prev=
+    # Old bash thinks empty arrays are undefined, ugh.
+    for p in \
+        ${extraBefore+"${extraBefore[@]}"} \
+        ${params+"${params[@]}"} \
+        ${extraAfter+"${extraAfter[@]}"}
+    do
+        case "$prev" in
+            -L)
+                libDirs+=("$p")
+                ;;
+            -l)
+                libs["lib${p}.so"]=1
+                ;;
+            -m)
+                # Presumably only the last `-m` flag has any effect.
+                case "$p" in
+                    elf_i386) link32=1;;
+                    *)        link32=0;;
+                esac
+                ;;
+            -dynamic-linker | -plugin)
+                # Ignore this argument, or it will match *.so and be added to rpath.
+                ;;
+            *)
+                case "$p" in
+                    -L/*)
+                        libDirs+=("${p:2}")
+                        ;;
+                    -l?*)
+                        libs["lib${p:2}.so"]=1
+                        ;;
+                    "${NIX_STORE:-}"/*.so | "${NIX_STORE:-}"/*.so.*)
+                        # This is a direct reference to a shared library.
+                        libDirs+=("${p%/*}")
+                        libs["${p##*/}"]=1
+                        ;;
+                    -r | --relocatable | -i)
+                        relocatable=1
+                esac
+                ;;
+        esac
+        prev="$p"
+    done
+fi
+
+# Determine linkerOutput
+prev=
+for p in \
+    ${extraBefore+"${extraBefore[@]}"} \
+    ${params+"${params[@]}"} \
+    ${extraAfter+"${extraAfter[@]}"}
+do
+    case "$prev" in
+        -o)
+            # Informational for post-link-hook
+            linkerOutput="$p"
+            ;;
+        *)
+            ;;
+    esac
+    prev="$p"
+done
+
+if [[ "$link32" = "1" && "$setDynamicLinker" = 1 && -e "@out@/nix-support/dynamic-linker-m32" ]]; then
+    # We have an alternate 32-bit linker and we're producing a 32-bit ELF, let's
+    # use it.
+    extraAfter+=(
+        '-dynamic-linker'
+        "$(< @out@/nix-support/dynamic-linker-m32)"
+    )
+fi
+
+# Add all used dynamic libraries to the rpath.
+if [ "$NIX_DONT_SET_RPATH_@suffixSalt@" != 1 ]; then
+    # For each directory in the library search path (-L...),
+    # see if it contains a dynamic library used by a -l... flag.  If
+    # so, add the directory to the rpath.
+    # It's important to add the rpath in the order of -L..., so
+    # the link time chosen objects will be those of runtime linking.
+    declare -A rpaths
+    for dir in ${libDirs+"${libDirs[@]}"}; do
+        if [[ "$dir" =~ [/.][/.] ]] && dir2=$(readlink -f "$dir"); then
+            dir="$dir2"
+        fi
+        if [ -n "${rpaths[$dir]:-}" ] || [[ "$dir" != "${NIX_STORE:-}"/* ]]; then
+            # If the path is not in the store, don't add it to the rpath.
+            # This typically happens for libraries in /tmp that are later
+            # copied to $out/lib.  If not, we're screwed.
+            continue
+        fi
+        for path in "$dir"/*; do
+            file="${path##*/}"
+            if [ "${libs[$file]:-}" ]; then
+                # This library may have been provided by a previous directory,
+                # but if that library file is inside an output of the current
+                # derivation, it can be deleted after this compilation and
+                # should be found in a later directory, so we add all
+                # directories that contain any of the libraries to rpath.
+                rpaths["$dir"]=1
+                extraAfter+=(-rpath "$dir")
+                break
+            fi
+        done
+    done
+
+fi
+
+# This is outside the DONT_SET_RPATH branch because it's more targeted and we
+# usually want it (on Darwin) even if DONT_SET_RPATH is set.
+if [ -n "${NIX_COREFOUNDATION_RPATH:-}" ]; then
+  extraAfter+=(-rpath $NIX_COREFOUNDATION_RPATH)
+fi
+
+# Only add --build-id if this is a final link. FIXME: should build gcc
+# with --enable-linker-build-id instead?
+if [ "$NIX_SET_BUILD_ID_@suffixSalt@" = 1 ] && ! (( "$relocatable" )); then
+    extraAfter+=(--build-id)
+fi
+
+
+# Optionally print debug info.
+if (( "${NIX_DEBUG:-0}" >= 1 )); then
+    # Old bash workaround, see above.
+    echo "extra flags before to @prog@:" >&2
+    printf "  %q\n" ${extraBefore+"${extraBefore[@]}"}  >&2
+    echo "original flags to @prog@:" >&2
+    printf "  %q\n" ${params+"${params[@]}"} >&2
+    echo "extra flags after to @prog@:" >&2
+    printf "  %q\n" ${extraAfter+"${extraAfter[@]}"} >&2
+fi
+
+PATH="$path_backup"
+# Old bash workaround, see above.
+@prog@ \
+    ${extraBefore+"${extraBefore[@]}"} \
+    ${params+"${params[@]}"} \
+    ${extraAfter+"${extraAfter[@]}"}
+
+if [ -e "@out@/nix-support/post-link-hook" ]; then
+    source @out@/nix-support/post-link-hook
+fi
diff --git a/nixpkgs/pkgs/build-support/bintools-wrapper/macos-sierra-reexport-hack.bash b/nixpkgs/pkgs/build-support/bintools-wrapper/macos-sierra-reexport-hack.bash
new file mode 100644
index 000000000000..71b9471cbc83
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/bintools-wrapper/macos-sierra-reexport-hack.bash
@@ -0,0 +1,246 @@
+#! @shell@
+
+set -eu -o pipefail
+
+# For cmd | while read; do ...; done
+shopt -s lastpipe
+
+path_backup="$PATH"
+if [ -n "@coreutils_bin@" ]; then
+  PATH="@coreutils_bin@/bin"
+fi
+
+declare -ri recurThreshold=200
+declare -i overflowCount=0
+
+declare -ar origArgs=("$@")
+
+# Throw away what we won't need
+declare -a parentArgs=()
+
+while (( $# )); do
+    case "$1" in
+        -l)
+            echo "cctools LD does not support '-l foo'" >&2
+            exit 1
+            ;;
+        -lazy_library | -reexport_library | -upward_library | -weak_library)
+            overflowCount+=1
+            shift 2
+            ;;
+        -l* | *.so.* | *.dylib | -lazy-l* | -reexport-l* | -upward-l* | -weak-l*)
+            overflowCount+=1
+            shift 1
+            ;;
+        *.a | *.o)
+            shift 1
+            ;;
+        -L | -F)
+            # Evidentally ld doesn't like using the child's RPATH, so it still
+            # needs these.
+            parentArgs+=("$1" "$2")
+            shift 2
+            ;;
+        -L?* | -F?*)
+            parentArgs+=("$1")
+            shift 1
+            ;;
+        -o)
+            outputName="$2"
+            parentArgs+=("$1" "$2")
+            shift 2
+            ;;
+        -install_name | -dylib_install_name | -dynamic-linker | -plugin)
+            parentArgs+=("$1" "$2")
+            shift 2
+            ;;
+        -rpath)
+            # Only an rpath to the child is needed, which we will add
+            shift 2
+            ;;
+        *)
+            if [[ -f "$1" ]]; then
+                # Propabably a non-standard object file like Haskell's
+                # `.dyn_o`. Skip it like other inputs
+                :
+            else
+                parentArgs+=("$1")
+            fi
+            shift 1
+            ;;
+    esac
+done
+
+
+
+if (( "$overflowCount" <= "$recurThreshold" )); then
+    if [ -n "${NIX_DEBUG:-}" ]; then
+        echo "ld-wrapper: Only ${overflowCount} inputs counted while ${recurThreshold} is the ceiling, linking normally. " >&2
+    fi
+    PATH="$path_backup"
+    exec @prog@ "${origArgs[@]}"
+fi
+
+
+
+if [ -n "${NIX_DEBUG:-}" ]; then
+    echo "ld-wrapper: ${overflowCount} inputs counted when ${recurThreshold} is the ceiling, inspecting further. " >&2
+fi
+
+# Collect the normalized linker input
+declare -a norm=()
+
+# Arguments are null-separated
+@prog@ --dump-normalized-lib-args "${origArgs[@]}" |
+    while IFS= read -r -d '' input; do
+        norm+=("$input")
+    done
+
+declare -i leafCount=0
+declare lastLeaf=''
+declare -a childrenInputs=() trailingInputs=()
+while (( "${#norm[@]}" )); do
+    case "${norm[0]}" in
+        -lazy_library | -upward_library)
+            # TODO(@Ericson2314): Don't do that, but intersperse children
+            # between such args.
+            echo "ld-wrapper: Warning: Potentially changing link order" >&2
+            trailingInputs+=("${norm[0]}" "${norm[1]}")
+            norm=("${norm[@]:2}")
+            ;;
+        -reexport_library | -weak_library)
+            childrenInputs+=("${norm[0]}" "${norm[1]}")
+            if [[ "${norm[1]}" != "$lastLeaf" ]]; then
+                leafCount+=1
+                lastLeaf="${norm[1]}"
+            fi
+            norm=("${norm[@]:2}")
+            ;;
+        *.so | *.dylib)
+            childrenInputs+=(-reexport_library "${norm[0]}")
+            if [[ "${norm[0]}" != "$lastLeaf" ]]; then
+                leafCount+=1
+                lastLeaf="${norm[0]}"
+            fi
+            norm=("${norm[@]:1}")
+            ;;
+        *.o | *.a)
+            # Don't delegate object files or static libs
+            parentArgs+=("${norm[0]}")
+            norm=("${norm[@]:1}")
+            ;;
+        *)
+            if [[ -f "${norm[0]}" ]]; then
+                # Propabably a non-standard object file. We'll let it by.
+                parentArgs+=("${norm[0]}")
+                norm=("${norm[@]:1}")
+            else
+                echo "ld-wrapper: Internal Error: Invalid normalized argument" >&2
+                exit -1
+            fi
+            ;;
+    esac
+done
+
+
+
+if (( "$leafCount" <= "$recurThreshold" )); then
+    if [ -n "${NIX_DEBUG:-}" ]; then
+        echo "ld-wrapper: Only ${leafCount} *dynamic* inputs counted while ${recurThreshold} is the ceiling, linking normally. " >&2
+    fi
+    PATH="$path_backup"
+    exec @prog@ "${origArgs[@]}"
+fi
+
+
+
+if [ -n "${NIX_DEBUG:-}" ]; then
+    echo "ld-wrapper: ${leafCount} *dynamic* inputs counted when ${recurThreshold} is the ceiling, delegating to children. " >&2
+fi
+
+declare -r outputNameLibless=$( \
+    if [[ -z "${outputName:+isUndefined}" ]]; then
+        echo unnamed
+        return 0;
+    fi
+    baseName=$(basename ${outputName})
+    if [[ "$baseName" = lib* ]]; then
+        baseName="${baseName:3}"
+    fi
+    echo "$baseName")
+
+declare -ra children=(
+    "$outputNameLibless-reexport-delegate-0"
+    "$outputNameLibless-reexport-delegate-1"
+)
+
+mkdir -p "$out/lib"
+
+symbolBloatObject=$outputNameLibless-symbol-hack.o
+if [[ ! -f $symbolBloatObject ]]; then
+    # `-Q` means use GNU Assembler rather than Clang, avoiding an awkward
+    # dependency cycle.
+    printf '.private_extern _______child_hack_foo\nchild_hack_foo:\n' |
+        PATH="$PATH:@out@/bin" @targetPrefix@as -Q -- -o $symbolBloatObject
+fi
+
+# Split inputs between children
+declare -a child0Inputs=() child1Inputs=("${childrenInputs[@]}")
+let "countFirstChild = $leafCount / 2" || true
+lastLeaf=''
+while (( "$countFirstChild" )); do
+    case "${child1Inputs[0]}" in
+        -reexport_library | -weak_library)
+            child0Inputs+=("${child1Inputs[0]}" "${child1Inputs[1]}")
+            if [[ "${child1Inputs[1]}" != "$lastLeaf" ]]; then
+                let countFirstChild-=1 || true
+                lastLeaf="${child1Inputs[1]}"
+            fi
+            child1Inputs=("${child1Inputs[@]:2}")
+            ;;
+        *.so | *.dylib)
+            child0Inputs+=(-reexport_library "${child1Inputs[0]}")
+            if [[ "${child1Inputs[0]}" != "$lastLeaf" ]]; then
+                let countFirstChild-=1 || true
+                lastLeaf="${child1Inputs[1]}"
+            fi
+            child1Inputs=("${child1Inputs[@]:2}")
+            ;;
+        *)
+            echo "ld-wrapper: Internal Error: Invalid delegated input" >&2
+            exit -1
+            ;;
+    esac
+done
+
+
+# First half of libs
+@out@/bin/@targetPrefix@ld \
+  -macosx_version_min $MACOSX_DEPLOYMENT_TARGET -arch x86_64 -dylib \
+  -o "$out/lib/lib${children[0]}.dylib" \
+  -install_name "$out/lib/lib${children[0]}.dylib" \
+  "$symbolBloatObject" "${child0Inputs[@]}" "${trailingInputs[@]}"
+
+# Second half of libs
+@out@/bin/@targetPrefix@ld \
+  -macosx_version_min $MACOSX_DEPLOYMENT_TARGET -arch x86_64 -dylib \
+  -o "$out/lib/lib${children[1]}.dylib" \
+  -install_name "$out/lib/lib${children[1]}.dylib" \
+  "$symbolBloatObject" "${child1Inputs[@]}" "${trailingInputs[@]}"
+
+parentArgs+=("-L$out/lib" -rpath "$out/lib")
+if [[ $outputName != *reexport-delegate* ]]; then
+	parentArgs+=("-l${children[0]}" "-l${children[1]}")
+else
+    parentArgs+=("-reexport-l${children[0]}" "-reexport-l${children[1]}")
+fi
+
+parentArgs+=("${trailingInputs[@]}")
+
+if [ -n "${NIX_DEBUG:-}" ]; then
+    echo "flags using delegated children to @prog@:" >&2
+    printf "  %q\n" "${parentArgs[@]}" >&2
+fi
+
+PATH="$path_backup"
+exec @prog@ "${parentArgs[@]}"
diff --git a/nixpkgs/pkgs/build-support/bintools-wrapper/setup-hook.sh b/nixpkgs/pkgs/build-support/bintools-wrapper/setup-hook.sh
new file mode 100644
index 000000000000..7e9547b96c25
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/bintools-wrapper/setup-hook.sh
@@ -0,0 +1,72 @@
+# Binutils Wrapper hygiene
+#
+# See comments in cc-wrapper's setup hook. This works exactly the same way.
+
+# Skip setup hook if we're neither a build-time dep, nor, temporarily, doing a
+# native compile.
+#
+# TODO(@Ericson2314): No native exception
+[[ -z ${strictDeps-} ]] || (( "$hostOffset" < 0 )) || return 0
+
+bintoolsWrapper_addLDVars () {
+    # See ../setup-hooks/role.bash
+    local role_post
+    getHostRoleEnvHook
+
+    if [[ -d "$1/lib64" && ! -L "$1/lib64" ]]; then
+        export NIX_LDFLAGS${role_post}+=" -L$1/lib64"
+    fi
+
+    if [[ -d "$1/lib" ]]; then
+        # Don't add the /lib directory if it actually doesn't contain any libraries. For instance,
+        # Python and Haskell packages often only have directories like $out/lib/ghc-8.4.3/ or
+        # $out/lib/python3.6/, so having them in LDFLAGS just makes the linker search unnecessary
+        # directories and bloats the size of the environment variable space.
+        local -a glob=( $1/lib/lib* )
+        if [ "${#glob[*]}" -gt 0 ]; then
+            export NIX_LDFLAGS${role_post}+=" -L$1/lib"
+        fi
+    fi
+}
+
+# See ../setup-hooks/role.bash
+getTargetRole
+getTargetRoleWrapper
+
+addEnvHooks "$targetOffset" bintoolsWrapper_addLDVars
+
+# shellcheck disable=SC2157
+if [ -n "@bintools_bin@" ]; then
+    addToSearchPath _PATH @bintools_bin@/bin
+fi
+
+# shellcheck disable=SC2157
+if [ -n "@libc_bin@" ]; then
+    addToSearchPath _PATH @libc_bin@/bin
+fi
+
+# shellcheck disable=SC2157
+if [ -n "@coreutils_bin@" ]; then
+    addToSearchPath _PATH @coreutils_bin@/bin
+fi
+
+# Export tool environment variables so various build systems use the right ones.
+
+export NIX_BINTOOLS${role_post}=@out@
+
+for cmd in \
+    ar as ld nm objcopy objdump readelf ranlib strip strings size windres
+do
+    if
+        PATH=$_PATH type -p "@targetPrefix@${cmd}" > /dev/null
+    then
+        export "${cmd^^}${role_post}=@targetPrefix@${cmd}";
+    fi
+done
+
+# If unset, assume the default hardening flags.
+: ${NIX_HARDENING_ENABLE="fortify stackprotector pic strictoverflow format relro bindnow"}
+export NIX_HARDENING_ENABLE
+
+# No local scope in sourced file
+unset -v role_post cmd upper_case
diff --git a/nixpkgs/pkgs/build-support/build-bazel-package/default.nix b/nixpkgs/pkgs/build-support/build-bazel-package/default.nix
new file mode 100644
index 000000000000..198b9c3f617f
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/build-bazel-package/default.nix
@@ -0,0 +1,228 @@
+{ stdenv
+, bazel
+, cacert
+, lib
+}:
+
+let
+  bazelPkg = bazel;
+in
+
+args@{
+  name ? "${args.pname}-${args.version}"
+, bazel ? bazelPkg
+, bazelFlags ? []
+, bazelBuildFlags ? []
+, bazelFetchFlags ? []
+, bazelTarget
+, buildAttrs
+, fetchAttrs
+
+# Newer versions of Bazel are moving away from built-in rules_cc and instead
+# allow fetching it as an external dependency in a WORKSPACE file[1]. If
+# removed in the fixed-output fetch phase, building will fail to download it.
+# This can be seen e.g. in #73097
+#
+# This option allows configuring the removal of rules_cc in cases where a
+# project depends on it via an external dependency.
+#
+# [1]: https://github.com/bazelbuild/rules_cc
+, removeRulesCC ? true
+, removeLocalConfigCc ? true
+, removeLocal ? true
+
+# Use build --nobuild instead of fetch. This allows fetching the dependencies
+# required for the build as configured, rather than fetching all the dependencies
+# which may not work in some situations (e.g. Java code which ends up relying on
+# Debian-specific /usr/share/java paths, but doesn't in the configured build).
+, fetchConfigured ? true
+
+# Don’t add Bazel --copt and --linkopt from NIX_CFLAGS_COMPILE /
+# NIX_LDFLAGS. This is necessary when using a custom toolchain which
+# Bazel wants all headers / libraries to come from, like when using
+# CROSSTOOL. Weirdly, we can still get the flags through the wrapped
+# compiler.
+, dontAddBazelOpts ? false
+, ...
+}:
+
+let
+  fArgs = removeAttrs args [ "buildAttrs" "fetchAttrs" "removeRulesCC" ];
+  fBuildAttrs = fArgs // buildAttrs;
+  fFetchAttrs = fArgs // removeAttrs fetchAttrs [ "sha256" ];
+
+in stdenv.mkDerivation (fBuildAttrs // {
+  inherit name bazelFlags bazelBuildFlags bazelFetchFlags bazelTarget;
+
+  deps = stdenv.mkDerivation (fFetchAttrs // {
+    name = "${name}-deps.tar.gz";
+    inherit bazelFlags bazelBuildFlags bazelFetchFlags bazelTarget;
+
+    impureEnvVars = lib.fetchers.proxyImpureEnvVars;
+
+    nativeBuildInputs = fFetchAttrs.nativeBuildInputs or [] ++ [ bazel ];
+
+    preHook = fFetchAttrs.preHook or "" + ''
+      export bazelOut="$(echo ''${NIX_BUILD_TOP}/output | sed -e 's,//,/,g')"
+      export bazelUserRoot="$(echo ''${NIX_BUILD_TOP}/tmp | sed -e 's,//,/,g')"
+      export HOME="$NIX_BUILD_TOP"
+      export USER="nix"
+      # This is needed for git_repository with https remotes
+      export GIT_SSL_CAINFO="${cacert}/etc/ssl/certs/ca-bundle.crt"
+      # This is needed for Bazel fetchers that are themselves programs (e.g.
+      # rules_go using the go toolchain)
+      export SSL_CERT_FILE="${cacert}/etc/ssl/certs/ca-bundle.crt"
+    '';
+
+    buildPhase = fFetchAttrs.buildPhase or ''
+      runHook preBuild
+
+      # Bazel computes the default value of output_user_root before parsing the
+      # flag. The computation of the default value involves getting the $USER
+      # from the environment. I don't have that variable when building with
+      # sandbox enabled. Code here
+      # https://github.com/bazelbuild/bazel/blob/9323c57607d37f9c949b60e293b573584906da46/src/main/cpp/startup_options.cc#L123-L124
+      #
+      # On macOS Bazel will use the system installed Xcode or CLT toolchain instead of the one in the PATH unless we pass BAZEL_USE_CPP_ONLY_TOOLCHAIN
+
+      # We disable multithreading for the fetching phase since it can lead to timeouts with many dependencies/threads:
+      # https://github.com/bazelbuild/bazel/issues/6502
+      BAZEL_USE_CPP_ONLY_TOOLCHAIN=1 \
+      USER=homeless-shelter \
+      bazel \
+        --output_base="$bazelOut" \
+        --output_user_root="$bazelUserRoot" \
+        ${if fetchConfigured then "build --nobuild" else "fetch"} \
+        --loading_phase_threads=1 \
+        $bazelFlags \
+        $bazelFetchFlags \
+        $bazelTarget
+
+      runHook postBuild
+    '';
+
+    installPhase = fFetchAttrs.installPhase or ''
+      runHook preInstall
+
+      # Remove all built in external workspaces, Bazel will recreate them when building
+      rm -rf $bazelOut/external/{bazel_tools,\@bazel_tools.marker}
+      ${if removeRulesCC then "rm -rf $bazelOut/external/{rules_cc,\\@rules_cc.marker}" else ""}
+      rm -rf $bazelOut/external/{embedded_jdk,\@embedded_jdk.marker}
+      ${if removeLocalConfigCc then "rm -rf $bazelOut/external/{local_config_cc,\\@local_config_cc.marker}" else ""}
+      ${if removeLocal then "rm -rf $bazelOut/external/{local_*,\\@local_*.marker}" else ""}
+
+      # Clear markers
+      find $bazelOut/external -name '@*\.marker' -exec sh -c 'echo > {}' \;
+
+      # Remove all vcs files
+      rm -rf $(find $bazelOut/external -type d -name .git)
+      rm -rf $(find $bazelOut/external -type d -name .svn)
+      rm -rf $(find $bazelOut/external -type d -name .hg)
+
+      # Removing top-level symlinks along with their markers.
+      # This is needed because they sometimes point to temporary paths (?).
+      # For example, in Tensorflow-gpu build:
+      # platforms -> NIX_BUILD_TOP/tmp/install/35282f5123611afa742331368e9ae529/_embedded_binaries/platforms
+      find $bazelOut/external -maxdepth 1 -type l | while read symlink; do
+        name="$(basename "$symlink")"
+        rm "$symlink"
+        test -f "$bazelOut/external/@$name.marker" && rm "$bazelOut/external/@$name.marker" || true
+      done
+
+      # Patching symlinks to remove build directory reference
+      find $bazelOut/external -type l | while read symlink; do
+        new_target="$(readlink "$symlink" | sed "s,$NIX_BUILD_TOP,NIX_BUILD_TOP,")"
+        rm "$symlink"
+        ln -sf "$new_target" "$symlink"
+      done
+
+      echo '${bazel.name}' > $bazelOut/external/.nix-bazel-version
+
+      (cd $bazelOut/ && tar czf $out --sort=name --mtime='@1' --owner=0 --group=0 --numeric-owner external/)
+
+      runHook postInstall
+    '';
+
+    dontFixup = true;
+    allowedRequisites = [];
+
+    outputHashAlgo = "sha256";
+    outputHash = fetchAttrs.sha256;
+  });
+
+  nativeBuildInputs = fBuildAttrs.nativeBuildInputs or [] ++ [ (bazel.override { enableNixHacks = true; }) ];
+
+  preHook = fBuildAttrs.preHook or "" + ''
+    export bazelOut="$NIX_BUILD_TOP/output"
+    export bazelUserRoot="$NIX_BUILD_TOP/tmp"
+    export HOME="$NIX_BUILD_TOP"
+  '';
+
+  preConfigure = ''
+    mkdir -p "$bazelOut"
+
+    (cd $bazelOut && tar xfz $deps)
+
+    test "${bazel.name}" = "$(<$bazelOut/external/.nix-bazel-version)" || {
+      echo "fixed output derivation was built for a different bazel version" >&2
+      echo "     got: $(<$bazelOut/external/.nix-bazel-version)" >&2
+      echo "expected: ${bazel.name}" >&2
+      exit 1
+    }
+
+    chmod -R +w $bazelOut
+    find $bazelOut -type l | while read symlink; do
+      if [[ $(readlink "$symlink") == *NIX_BUILD_TOP* ]]; then
+        ln -sf $(readlink "$symlink" | sed "s,NIX_BUILD_TOP,$NIX_BUILD_TOP,") "$symlink"
+      fi
+    done
+  '' + fBuildAttrs.preConfigure or "";
+
+  inherit dontAddBazelOpts;
+
+  buildPhase = fBuildAttrs.buildPhase or ''
+    runHook preBuild
+
+    # Bazel sandboxes the execution of the tools it invokes, so even though we are
+    # calling the correct nix wrappers, the values of the environment variables
+    # the wrappers are expecting will not be set. So instead of relying on the
+    # wrappers picking them up, pass them in explicitly via `--copt`, `--linkopt`
+    # and related flags.
+    #
+    copts=()
+    host_copts=()
+    linkopts=()
+    host_linkopts=()
+    if [ -z "''${dontAddBazelOpts:-}" ]; then
+      for flag in $NIX_CFLAGS_COMPILE; do
+        copts+=( "--copt=$flag" )
+        host_copts+=( "--host_copt=$flag" )
+      done
+      for flag in $NIX_CXXSTDLIB_COMPILE; do
+        copts+=( "--copt=$flag" )
+        host_copts+=( "--host_copt=$flag" )
+      done
+      for flag in $NIX_LDFLAGS; do
+        linkopts+=( "--linkopt=-Wl,$flag" )
+        host_linkopts+=( "--host_linkopt=-Wl,$flag" )
+      done
+    fi
+
+    BAZEL_USE_CPP_ONLY_TOOLCHAIN=1 \
+    USER=homeless-shelter \
+    bazel \
+      --output_base="$bazelOut" \
+      --output_user_root="$bazelUserRoot" \
+      build \
+      -j $NIX_BUILD_CORES \
+      "''${copts[@]}" \
+      "''${host_copts[@]}" \
+      "''${linkopts[@]}" \
+      "''${host_linkopts[@]}" \
+      $bazelFlags \
+      $bazelBuildFlags \
+      $bazelTarget
+
+    runHook postBuild
+  '';
+})
diff --git a/nixpkgs/pkgs/build-support/build-dotnet-package/default.nix b/nixpkgs/pkgs/build-support/build-dotnet-package/default.nix
new file mode 100644
index 000000000000..440b10044f0f
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/build-dotnet-package/default.nix
@@ -0,0 +1,116 @@
+{ stdenv, lib, makeWrapper, pkg-config, mono, dotnetbuildhelpers }:
+
+attrsOrig @
+{ baseName
+, version
+, buildInputs ? []
+, xBuildFiles ? [ ]
+, xBuildFlags ? [ "/p:Configuration=Release" ]
+, outputFiles ? [ "bin/Release/*" ]
+, dllFiles ? [ "*.dll" ]
+, exeFiles ? [ "*.exe" ]
+# Additional arguments to pass to the makeWrapper function, which wraps
+# generated binaries.
+, makeWrapperArgs ? [ ]
+, ... }:
+  let
+    arrayToShell = (a: toString (map (lib.escape (lib.stringToCharacters "\\ ';$`()|<>\t") ) a));
+
+    attrs = {
+      name = "${baseName}-${version}";
+
+      nativeBuildInputs = [ pkg-config ];
+      buildInputs = [
+        mono
+        dotnetbuildhelpers
+        makeWrapper
+      ] ++ buildInputs;
+
+      configurePhase = ''
+        runHook preConfigure
+
+        [ -z "''${dontPlacateNuget-}" ] && placate-nuget.sh
+        [ -z "''${dontPlacatePaket-}" ] && placate-paket.sh
+        [ -z "''${dontPatchFSharpTargets-}" ] && patch-fsharp-targets.sh
+
+        runHook postConfigure
+      '';
+
+      buildPhase = ''
+        runHook preBuild
+
+        echo Building dotNET packages...
+
+        # Probably needs to be moved to fsharp
+        if pkg-config FSharp.Core
+        then
+          export FSharpTargetsPath="$(dirname $(pkg-config FSharp.Core --variable=Libraries))/Microsoft.FSharp.Targets"
+        fi
+
+        ran=""
+        for xBuildFile in ${arrayToShell xBuildFiles} ''${xBuildFilesExtra}
+        do
+          ran="yes"
+          xbuild ${arrayToShell xBuildFlags} ''${xBuildFlagsArray} $xBuildFile
+        done
+
+        [ -z "$ran" ] && xbuild ${arrayToShell xBuildFlags} ''${xBuildFlagsArray}
+
+        runHook postBuild
+      '';
+
+      dontStrip = true;
+
+      installPhase = ''
+        runHook preInstall
+
+        target="$out/lib/dotnet/${baseName}"
+        mkdir -p "$target"
+
+        cp -rv ${arrayToShell outputFiles} "''${outputFilesArray[@]}" "$target"
+
+        if [ -z "''${dontRemoveDuplicatedDlls-}" ]
+        then
+          pushd "$out"
+          remove-duplicated-dlls.sh
+          popd
+        fi
+
+        set -f
+        for dllPattern in ${arrayToShell dllFiles} ''${dllFilesArray[@]}
+        do
+          set +f
+          for dll in "$target"/$dllPattern
+          do
+            [ -f "$dll" ] || continue
+            if pkg-config $(basename -s .dll "$dll")
+            then
+              echo "$dll already exported by a buildInputs, not re-exporting"
+            else
+              ${dotnetbuildhelpers}/bin/create-pkg-config-for-dll.sh "$out/lib/pkgconfig" "$dll"
+            fi
+          done
+        done
+
+        set -f
+        for exePattern in ${arrayToShell exeFiles} ''${exeFilesArray[@]}
+        do
+          set +f
+          for exe in "$target"/$exePattern
+          do
+            [ -f "$exe" ] || continue
+            mkdir -p "$out"/bin
+            commandName="$(basename -s .exe "$(echo "$exe" | tr "[A-Z]" "[a-z]")")"
+            makeWrapper \
+              "${mono}/bin/mono" \
+              "$out"/bin/"$commandName" \
+              --add-flags "\"$exe\"" \
+              ''${makeWrapperArgs}
+          done
+        done
+
+        runHook postInstall
+      '';
+    };
+  in
+    stdenv.mkDerivation (attrs // (builtins.removeAttrs attrsOrig [ "buildInputs" ] ))
diff --git a/nixpkgs/pkgs/build-support/build-fhs-userenv-bubblewrap/default.nix b/nixpkgs/pkgs/build-support/build-fhs-userenv-bubblewrap/default.nix
new file mode 100644
index 000000000000..868686bd5c01
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/build-fhs-userenv-bubblewrap/default.nix
@@ -0,0 +1,194 @@
+{ lib, callPackage, runCommandLocal, writeShellScriptBin, glibc, pkgsi686Linux, coreutils, bubblewrap }:
+
+let buildFHSEnv = callPackage ./env.nix { }; in
+
+args @ {
+  name
+, runScript ? "bash"
+, extraInstallCommands ? ""
+, meta ? {}
+, passthru ? {}
+, unshareUser ? true
+, unshareIpc ? true
+, unsharePid ? true
+, unshareNet ? false
+, unshareUts ? true
+, unshareCgroup ? true
+, dieWithParent ? true
+, ...
+}:
+
+with builtins;
+let
+  buildFHSEnv = callPackage ./env.nix { };
+
+  env = buildFHSEnv (removeAttrs args [
+    "runScript" "extraInstallCommands" "meta" "passthru" "dieWithParent"
+    "unshareUser" "unshareCgroup" "unshareUts" "unshareNet" "unsharePid" "unshareIpc"
+  ]);
+
+  etcBindFlags = let
+    files = [
+      # NixOS Compatibility
+      "static"
+      "nix" # mainly for nixUnstable users, but also for access to nix/netrc
+      # Shells
+      "bashrc"
+      "zshenv"
+      "zshrc"
+      "zinputrc"
+      "zprofile"
+      # Users, Groups, NSS
+      "passwd"
+      "group"
+      "shadow"
+      "hosts"
+      "resolv.conf"
+      "nsswitch.conf"
+      # User profiles
+      "profiles"
+      # Sudo & Su
+      "login.defs"
+      "sudoers"
+      "sudoers.d"
+      # Time
+      "localtime"
+      "zoneinfo"
+      # Other Core Stuff
+      "machine-id"
+      "os-release"
+      # PAM
+      "pam.d"
+      # Fonts
+      "fonts"
+      # ALSA
+      "asound.conf"
+      # SSL
+      "ssl/certs"
+      "pki"
+    ];
+  in concatStringsSep "\n  "
+  (map (file: "--ro-bind-try /etc/${file} /etc/${file}") files);
+
+  # Create this on the fly instead of linking from /nix
+  # The container might have to modify it and re-run ldconfig if there are
+  # issues running some binary with LD_LIBRARY_PATH
+  createLdConfCache = ''
+    cat > /etc/ld.so.conf <<EOF
+    /lib
+    /lib/x86_64-linux-gnu
+    /lib64
+    /usr/lib
+    /usr/lib/x86_64-linux-gnu
+    /usr/lib64
+    /lib/i386-linux-gnu
+    /lib32
+    /usr/lib/i386-linux-gnu
+    /usr/lib32
+    EOF
+    ldconfig &> /dev/null
+  '';
+  init = run: writeShellScriptBin "${name}-init" ''
+    source /etc/profile
+    ${createLdConfCache}
+    exec ${run} "$@"
+  '';
+
+  bwrapCmd = { initArgs ? "" }: ''
+    blacklist=(/nix /dev /proc /etc)
+    ro_mounts=()
+    symlinks=()
+    for i in ${env}/*; do
+      path="/''${i##*/}"
+      if [[ $path == '/etc' ]]; then
+        :
+      elif [[ -L $i ]]; then
+        symlinks+=(--symlink "$(${coreutils}/bin/readlink "$i")" "$path")
+        blacklist+=("$path")
+      else
+        ro_mounts+=(--ro-bind "$i" "$path")
+        blacklist+=("$path")
+      fi
+    done
+
+    if [[ -d ${env}/etc ]]; then
+      for i in ${env}/etc/*; do
+        path="/''${i##*/}"
+        # NOTE: we're binding /etc/fonts and /etc/ssl/certs from the host so we
+        # don't want to override it with a path from the FHS environment.
+        if [[ $path == '/fonts' || $path == '/ssl' ]]; then
+          continue
+        fi
+        ro_mounts+=(--ro-bind "$i" "/etc$path")
+      done
+    fi
+
+    declare -a auto_mounts
+    # loop through all directories in the root
+    for dir in /*; do
+      # if it is a directory and it is not in the blacklist
+      if [[ -d "$dir" ]] && [[ ! "''${blacklist[@]}" =~ "$dir" ]]; then
+        # add it to the mount list
+        auto_mounts+=(--bind "$dir" "$dir")
+      fi
+    done
+
+    cmd=(
+      ${bubblewrap}/bin/bwrap
+      --dev-bind /dev /dev
+      --proc /proc
+      --chdir "$(pwd)"
+      ${lib.optionalString unshareUser "--unshare-user"}
+      ${lib.optionalString unshareIpc "--unshare-ipc"}
+      ${lib.optionalString unsharePid "--unshare-pid"}
+      ${lib.optionalString unshareNet "--unshare-net"}
+      ${lib.optionalString unshareUts "--unshare-uts"}
+      ${lib.optionalString unshareCgroup "--unshare-cgroup"}
+      ${lib.optionalString dieWithParent "--die-with-parent"}
+      --ro-bind /nix /nix
+      # Our glibc will look for the cache in its own path in `/nix/store`.
+      # As such, we need a cache to exist there, because pressure-vessel
+      # depends on the existence of an ld cache. However, adding one
+      # globally proved to be a bad idea (see #100655), the solution we
+      # settled on being mounting one via bwrap.
+      # Also, the cache needs to go to both 32 and 64 bit glibcs, for games
+      # of both architectures to work.
+      --tmpfs ${glibc}/etc \
+      --symlink /etc/ld.so.conf ${glibc}/etc/ld.so.conf \
+      --symlink /etc/ld.so.cache ${glibc}/etc/ld.so.cache \
+      --ro-bind ${glibc}/etc/rpc ${glibc}/etc/rpc \
+      --remount-ro ${glibc}/etc \
+      --tmpfs ${pkgsi686Linux.glibc}/etc \
+      --symlink /etc/ld.so.conf ${pkgsi686Linux.glibc}/etc/ld.so.conf \
+      --symlink /etc/ld.so.cache ${pkgsi686Linux.glibc}/etc/ld.so.cache \
+      --ro-bind ${pkgsi686Linux.glibc}/etc/rpc ${pkgsi686Linux.glibc}/etc/rpc \
+      --remount-ro ${pkgsi686Linux.glibc}/etc \
+      ${etcBindFlags}
+      "''${ro_mounts[@]}"
+      "''${symlinks[@]}"
+      "''${auto_mounts[@]}"
+      ${init runScript}/bin/${name}-init ${initArgs}
+    )
+    exec "''${cmd[@]}"
+  '';
+
+  bin = writeShellScriptBin name (bwrapCmd { initArgs = ''"$@"''; });
+
+in runCommandLocal name {
+  inherit meta;
+
+  passthru = passthru // {
+    env = runCommandLocal "${name}-shell-env" {
+      shellHook = bwrapCmd {};
+    } ''
+      echo >&2 ""
+      echo >&2 "*** User chroot 'env' attributes are intended for interactive nix-shell sessions, not for building! ***"
+      echo >&2 ""
+      exit 1
+    '';
+  };
+} ''
+  mkdir -p $out/bin
+  ln -s ${bin}/bin/${name} $out/bin/${name}
+  ${extraInstallCommands}
+''
diff --git a/nixpkgs/pkgs/build-support/build-fhs-userenv-bubblewrap/env.nix b/nixpkgs/pkgs/build-support/build-fhs-userenv-bubblewrap/env.nix
new file mode 100644
index 000000000000..b9c719a4c78b
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/build-fhs-userenv-bubblewrap/env.nix
@@ -0,0 +1,184 @@
+{ stdenv, lib, buildEnv, writeText, writeShellScriptBin, pkgs, pkgsi686Linux }:
+
+{ name, profile ? ""
+, targetPkgs ? pkgs: [], multiPkgs ? pkgs: []
+, extraBuildCommands ? "", extraBuildCommandsMulti ? ""
+, extraOutputsToInstall ? []
+}:
+
+# HOWTO:
+# All packages (most likely programs) returned from targetPkgs will only be
+# installed once--matching the host's architecture (64bit on x86_64 and 32bit on
+# x86).
+#
+# Packages (most likely libraries) returned from multiPkgs are installed
+# once on x86 systems and twice on x86_64 systems.
+# On x86 they are merged with packages from targetPkgs.
+# On x86_64 they are added to targetPkgs and in addition their 32bit
+# versions are also installed. The final directory structure looks as
+# follows:
+# /lib32 will include 32bit libraries from multiPkgs
+# /lib64 will include 64bit libraries from multiPkgs and targetPkgs
+# /lib will link to /lib32
+
+let
+  is64Bit = stdenv.hostPlatform.parsed.cpu.bits == 64;
+  isMultiBuild  = multiPkgs != null && is64Bit;
+  isTargetBuild = !isMultiBuild;
+
+  # list of packages (usually programs) which are only be installed for the
+  # host's architecture
+  targetPaths = targetPkgs pkgs ++ (if multiPkgs == null then [] else multiPkgs pkgs);
+
+  # list of packages which are installed for both x86 and x86_64 on x86_64
+  # systems
+  multiPaths = multiPkgs pkgsi686Linux;
+
+  # base packages of the chroot
+  # these match the host's architecture, glibc_multi is used for multilib
+  # builds. glibcLocales must be before glibc or glibc_multi as otherwiese
+  # the wrong LOCALE_ARCHIVE will be used where only C.UTF-8 is available.
+  basePkgs = with pkgs;
+    [ glibcLocales
+      (if isMultiBuild then glibc_multi else glibc)
+      (toString gcc.cc.lib) bashInteractive coreutils less shadow su
+      gawk diffutils findutils gnused gnugrep
+      gnutar gzip bzip2 xz
+    ];
+  baseMultiPkgs = with pkgsi686Linux;
+    [ (toString gcc.cc.lib)
+    ];
+
+  ldconfig = writeShellScriptBin "ldconfig" ''
+    exec ${pkgs.glibc.bin}/bin/ldconfig -f /etc/ld.so.conf -C /etc/ld.so.cache "$@"
+  '';
+  etcProfile = writeText "profile" ''
+    export PS1='${name}-chrootenv:\u@\h:\w\$ '
+    export LOCALE_ARCHIVE='/usr/lib/locale/locale-archive'
+    export LD_LIBRARY_PATH="/run/opengl-driver/lib:/run/opengl-driver-32/lib:/usr/lib:/usr/lib32''${LD_LIBRARY_PATH:+:}$LD_LIBRARY_PATH"
+    export PATH="/run/wrappers/bin:/usr/bin:/usr/sbin:$PATH"
+    export TZDIR='/etc/zoneinfo'
+
+    # Force compilers and other tools to look in default search paths
+    unset NIX_ENFORCE_PURITY
+    export NIX_CC_WRAPPER_TARGET_HOST_${stdenv.cc.suffixSalt}=1
+    export NIX_CFLAGS_COMPILE='-idirafter /usr/include'
+    export NIX_CFLAGS_LINK='-L/usr/lib -L/usr/lib32'
+    export NIX_LDFLAGS='-L/usr/lib -L/usr/lib32'
+    export PKG_CONFIG_PATH=/usr/lib/pkgconfig
+    export ACLOCAL_PATH=/usr/share/aclocal
+
+    ${profile}
+  '';
+
+  # Compose /etc for the chroot environment
+  etcPkg = stdenv.mkDerivation {
+    name         = "${name}-chrootenv-etc";
+    buildCommand = ''
+      mkdir -p $out/etc
+      cd $out/etc
+
+      # environment variables
+      ln -s ${etcProfile} profile
+
+      # symlink /etc/mtab -> /proc/mounts (compat for old userspace progs)
+      ln -s /proc/mounts mtab
+    '';
+  };
+
+  # Composes a /usr-like directory structure
+  staticUsrProfileTarget = buildEnv {
+    name = "${name}-usr-target";
+    # ldconfig wrapper must come first so it overrides the original ldconfig
+    paths = [ etcPkg ldconfig ] ++ basePkgs ++ targetPaths;
+    extraOutputsToInstall = [ "out" "lib" "bin" ] ++ extraOutputsToInstall;
+    ignoreCollisions = true;
+  };
+
+  staticUsrProfileMulti = buildEnv {
+    name = "${name}-usr-multi";
+    paths = baseMultiPkgs ++ multiPaths;
+    extraOutputsToInstall = [ "out" "lib" ] ++ extraOutputsToInstall;
+    ignoreCollisions = true;
+  };
+
+  # setup library paths only for the targeted architecture
+  setupLibDirsTarget = ''
+    # link content of targetPaths
+    cp -rsHf ${staticUsrProfileTarget}/lib lib
+    ln -s lib lib${if is64Bit then "64" else "32"}
+  '';
+
+  # setup /lib, /lib32 and /lib64
+  setupLibDirsMulti = ''
+    mkdir -m0755 lib32
+    mkdir -m0755 lib64
+    ln -s lib64 lib
+
+    # copy glibc stuff
+    cp -rsHf ${staticUsrProfileTarget}/lib/32/* lib32/ && chmod u+w -R lib32/
+
+    # copy content of multiPaths (32bit libs)
+    [ -d ${staticUsrProfileMulti}/lib ] && cp -rsHf ${staticUsrProfileMulti}/lib/* lib32/ && chmod u+w -R lib32/
+
+    # copy content of targetPaths (64bit libs)
+    cp -rsHf ${staticUsrProfileTarget}/lib/* lib64/ && chmod u+w -R lib64/
+
+    # symlink 32-bit ld-linux.so
+    ln -Ls ${staticUsrProfileTarget}/lib/32/ld-linux.so.2 lib/
+  '';
+
+  setupLibDirs = if isTargetBuild then setupLibDirsTarget
+                                  else setupLibDirsMulti;
+
+  # the target profile is the actual profile that will be used for the chroot
+  setupTargetProfile = ''
+    mkdir -m0755 usr
+    cd usr
+    ${setupLibDirs}
+    ${lib.optionalString isMultiBuild ''
+    if [ -d "${staticUsrProfileMulti}/share" ]; then
+      cp -rLf ${staticUsrProfileMulti}/share share
+    fi
+    ''}
+    if [ -d "${staticUsrProfileTarget}/share" ]; then
+      if [ -d share ]; then
+        chmod -R 755 share
+        cp -rLTf ${staticUsrProfileTarget}/share share
+      else
+        cp -rLf ${staticUsrProfileTarget}/share share
+      fi
+    fi
+    for i in bin sbin include; do
+      if [ -d "${staticUsrProfileTarget}/$i" ]; then
+        cp -rsHf "${staticUsrProfileTarget}/$i" "$i"
+      fi
+    done
+    cd ..
+
+    for i in var etc; do
+      if [ -d "${staticUsrProfileTarget}/$i" ]; then
+        cp -rsHf "${staticUsrProfileTarget}/$i" "$i"
+      fi
+    done
+    for i in usr/{bin,sbin,lib,lib32,lib64}; do
+      if [ -d "$i" ]; then
+        ln -s "$i"
+      fi
+    done
+  '';
+
+in stdenv.mkDerivation {
+  name         = "${name}-fhs";
+  buildCommand = ''
+    mkdir -p $out
+    cd $out
+    ${setupTargetProfile}
+    cd $out
+    ${extraBuildCommands}
+    cd $out
+    ${if isMultiBuild then extraBuildCommandsMulti else ""}
+  '';
+  preferLocalBuild = true;
+  allowSubstitutes = false;
+}
diff --git a/nixpkgs/pkgs/build-support/build-fhs-userenv/chrootenv/chrootenv.c b/nixpkgs/pkgs/build-support/build-fhs-userenv/chrootenv/chrootenv.c
new file mode 100644
index 000000000000..27e70e3fe5c4
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/build-fhs-userenv/chrootenv/chrootenv.c
@@ -0,0 +1,169 @@
+#define _GNU_SOURCE
+
+#include <glib.h>
+#include <glib/gstdio.h>
+
+#include <errno.h>
+#include <sched.h>
+#include <unistd.h>
+
+#include <sys/mount.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <sys/wait.h>
+#include <sys/syscall.h>
+
+#define fail(s, err) g_error("%s: %s: %s", __func__, s, g_strerror(err))
+#define fail_if(expr)                                                          \
+  if (expr)                                                                    \
+    fail(#expr, errno);
+
+const gchar *bind_blacklist[] = {"bin", "etc", "host", "real-host", "usr", "lib", "lib64", "lib32", "sbin", NULL};
+
+int pivot_root(const char *new_root, const char *put_old) {
+  return syscall(SYS_pivot_root, new_root, put_old);
+}
+
+void mount_tmpfs(const gchar *target) {
+  fail_if(mount("none", target, "tmpfs", 0, NULL));
+}
+
+void bind_mount(const gchar *source, const gchar *target) {
+  fail_if(g_mkdir(target, 0755));
+  fail_if(mount(source, target, NULL, MS_BIND | MS_REC, NULL));
+}
+
+const gchar *create_tmpdir() {
+  gchar *prefix =
+      g_build_filename(g_get_tmp_dir(), "chrootenvXXXXXX", NULL);
+  fail_if(!g_mkdtemp_full(prefix, 0755));
+  return prefix;
+}
+
+void pivot_host(const gchar *guest) {
+  g_autofree gchar *point = g_build_filename(guest, "host", NULL);
+  fail_if(g_mkdir(point, 0755));
+  fail_if(pivot_root(guest, point));
+}
+
+void bind_mount_item(const gchar *host, const gchar *guest, const gchar *name) {
+  g_autofree gchar *source = g_build_filename(host, name, NULL);
+  g_autofree gchar *target = g_build_filename(guest, name, NULL);
+
+  if (G_LIKELY(g_file_test(source, G_FILE_TEST_IS_DIR)))
+    bind_mount(source, target);
+}
+
+void bind(const gchar *host, const gchar *guest) {
+  mount_tmpfs(guest);
+
+  pivot_host(guest);
+
+  g_autofree gchar *host_dir = g_build_filename("/host", host, NULL);
+
+  g_autoptr(GError) err = NULL;
+  g_autoptr(GDir) dir = g_dir_open(host_dir, 0, &err);
+
+  if (err != NULL)
+    fail("g_dir_open", errno);
+
+  const gchar *item;
+
+  while ((item = g_dir_read_name(dir)))
+    if (!g_strv_contains(bind_blacklist, item))
+      bind_mount_item(host_dir, "/", item);
+}
+
+void spit(const char *path, char *fmt, ...) {
+  va_list args;
+  va_start(args, fmt);
+
+  FILE *f = g_fopen(path, "w");
+
+  if (f == NULL)
+    fail("g_fopen", errno);
+
+  g_vfprintf(f, fmt, args);
+  fclose(f);
+}
+
+int main(gint argc, gchar **argv) {
+  const gchar *self = *argv++;
+
+  if (argc < 2) {
+    g_message("%s command [arguments...]", self);
+    return 1;
+  }
+
+  g_autofree const gchar *prefix = create_tmpdir();
+
+  pid_t cpid = fork();
+
+  if (cpid < 0)
+    fail("fork", errno);
+
+  else if (cpid == 0) {
+    uid_t uid = getuid();
+    gid_t gid = getgid();
+
+    int namespaces = CLONE_NEWNS;
+    if (uid != 0) {
+      namespaces |= CLONE_NEWUSER;
+    }
+    if (unshare(namespaces) < 0) {
+      int unshare_errno = errno;
+
+      g_message("Requires Linux version >= 3.19 built with CONFIG_USER_NS");
+      if (g_file_test("/proc/sys/kernel/unprivileged_userns_clone",
+                      G_FILE_TEST_EXISTS))
+        g_message("Run: sudo sysctl -w kernel.unprivileged_userns_clone=1");
+
+      fail("unshare", unshare_errno);
+    }
+
+    // hide all mounts we do from the parent
+    fail_if(mount(0, "/", 0, MS_PRIVATE | MS_REC, 0));
+
+    if (uid != 0) {
+      spit("/proc/self/setgroups", "deny");
+      spit("/proc/self/uid_map", "%d %d 1", uid, uid);
+      spit("/proc/self/gid_map", "%d %d 1", gid, gid);
+    }
+
+    // If there is a /host directory, assume this is nested chrootenv and use it as host instead.
+    gboolean nested_host = g_file_test("/host", G_FILE_TEST_EXISTS | G_FILE_TEST_IS_DIR);
+    g_autofree const gchar *host = nested_host ? "/host" : "/";
+
+    bind(host, prefix);
+
+    // Replace /host by an actual (inner) /host.
+    if (nested_host) {
+      fail_if(g_mkdir("/real-host", 0755));
+      fail_if(mount("/host/host", "/real-host", NULL, MS_BIND | MS_REC, NULL));
+      // For some reason umount("/host") returns EBUSY even immediately after
+      // pivot_root. We detach it at least to keep `/proc/mounts` from blowing
+      // up in nested cases.
+      fail_if(umount2("/host", MNT_DETACH));
+      fail_if(mount("/real-host", "/host", NULL, MS_MOVE, NULL));
+      fail_if(rmdir("/real-host"));
+    }
+
+    fail_if(chdir("/"));
+    fail_if(execvp(*argv, argv));
+  }
+
+  else {
+    int status;
+
+    fail_if(waitpid(cpid, &status, 0) != cpid);
+    fail_if(rmdir(prefix));
+
+    if (WIFEXITED(status))
+      return WEXITSTATUS(status);
+
+    else if (WIFSIGNALED(status))
+      kill(getpid(), WTERMSIG(status));
+
+    return 1;
+  }
+}
diff --git a/nixpkgs/pkgs/build-support/build-fhs-userenv/chrootenv/default.nix b/nixpkgs/pkgs/build-support/build-fhs-userenv/chrootenv/default.nix
new file mode 100644
index 000000000000..15d7b3153580
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/build-fhs-userenv/chrootenv/default.nix
@@ -0,0 +1,16 @@
+{ lib, stdenv, meson, ninja, pkg-config, glib }:
+
+stdenv.mkDerivation {
+  name = "chrootenv";
+  src = ./.;
+
+  nativeBuildInputs = [ meson ninja pkg-config ];
+  buildInputs = [ glib ];
+
+  meta = with lib; {
+    description = "Setup mount/user namespace for FHS emulation";
+    license = licenses.mit;
+    maintainers = with maintainers; [ yegortimoshenko ];
+    platforms = platforms.linux;
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/build-fhs-userenv/chrootenv/meson.build b/nixpkgs/pkgs/build-support/build-fhs-userenv/chrootenv/meson.build
new file mode 100644
index 000000000000..6d0770a0dc4a
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/build-fhs-userenv/chrootenv/meson.build
@@ -0,0 +1,5 @@
+project('chrootenv', 'c')
+
+glib = dependency('glib-2.0')
+
+executable('chrootenv', 'chrootenv.c', dependencies: [glib], install: true)
diff --git a/nixpkgs/pkgs/build-support/build-fhs-userenv/default.nix b/nixpkgs/pkgs/build-support/build-fhs-userenv/default.nix
new file mode 100644
index 000000000000..e7db6a75297d
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/build-fhs-userenv/default.nix
@@ -0,0 +1,49 @@
+{ callPackage, runCommandLocal, writeScript, stdenv, coreutils }:
+
+let buildFHSEnv = callPackage ./env.nix { }; in
+
+args@{ name, runScript ? "bash", extraInstallCommands ? "", meta ? {}, passthru ? {}, ... }:
+
+let
+  env = buildFHSEnv (removeAttrs args [ "runScript" "extraInstallCommands" "meta" "passthru" ]);
+
+  chrootenv = callPackage ./chrootenv {};
+
+  init = run: writeScript "${name}-init" ''
+    #! ${stdenv.shell}
+    for i in ${env}/* /host/*; do
+      path="/''${i##*/}"
+      [ -e "$path" ] || ${coreutils}/bin/ln -s "$i" "$path"
+    done
+
+    [ -d "$1" ] && [ -r "$1" ] && cd "$1"
+    shift
+
+    source /etc/profile
+    exec ${run} "$@"
+  '';
+
+in runCommandLocal name {
+  inherit meta;
+
+  passthru = passthru // {
+    env = runCommandLocal "${name}-shell-env" {
+      shellHook = ''
+        exec ${chrootenv}/bin/chrootenv ${init runScript} "$(pwd)"
+      '';
+    } ''
+      echo >&2 ""
+      echo >&2 "*** User chroot 'env' attributes are intended for interactive nix-shell sessions, not for building! ***"
+      echo >&2 ""
+      exit 1
+    '';
+  };
+} ''
+  mkdir -p $out/bin
+  cat <<EOF >$out/bin/${name}
+  #! ${stdenv.shell}
+  exec ${chrootenv}/bin/chrootenv ${init runScript} "\$(pwd)" "\$@"
+  EOF
+  chmod +x $out/bin/${name}
+  ${extraInstallCommands}
+''
diff --git a/nixpkgs/pkgs/build-support/build-fhs-userenv/env.nix b/nixpkgs/pkgs/build-support/build-fhs-userenv/env.nix
new file mode 100644
index 000000000000..226904f311b6
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/build-fhs-userenv/env.nix
@@ -0,0 +1,208 @@
+{ stdenv, buildEnv, writeText, pkgs, pkgsi686Linux }:
+
+{ name, profile ? ""
+, targetPkgs ? pkgs: [], multiPkgs ? pkgs: []
+, extraBuildCommands ? "", extraBuildCommandsMulti ? ""
+, extraOutputsToInstall ? []
+}:
+
+# HOWTO:
+# All packages (most likely programs) returned from targetPkgs will only be
+# installed once--matching the host's architecture (64bit on x86_64 and 32bit on
+# x86).
+#
+# Packages (most likely libraries) returned from multiPkgs are installed
+# once on x86 systems and twice on x86_64 systems.
+# On x86 they are merged with packages from targetPkgs.
+# On x86_64 they are added to targetPkgs and in addition their 32bit
+# versions are also installed. The final directory structure looks as
+# follows:
+# /lib32 will include 32bit libraries from multiPkgs
+# /lib64 will include 64bit libraries from multiPkgs and targetPkgs
+# /lib will link to /lib32
+
+let
+  is64Bit = stdenv.hostPlatform.parsed.cpu.bits == 64;
+  isMultiBuild  = multiPkgs != null && is64Bit;
+  isTargetBuild = !isMultiBuild;
+
+  # list of packages (usually programs) which are only be installed for the
+  # host's architecture
+  targetPaths = targetPkgs pkgs ++ (if multiPkgs == null then [] else multiPkgs pkgs);
+
+  # list of packages which are installed for both x86 and x86_64 on x86_64
+  # systems
+  multiPaths = multiPkgs pkgsi686Linux;
+
+  # base packages of the chroot
+  # these match the host's architecture, glibc_multi is used for multilib
+  # builds. glibcLocales must be before glibc or glibc_multi as otherwiese
+  # the wrong LOCALE_ARCHIVE will be used where only C.UTF-8 is available.
+  basePkgs = with pkgs;
+    [ glibcLocales
+      (if isMultiBuild then glibc_multi else glibc)
+      (toString gcc.cc.lib) bashInteractive coreutils less shadow su
+      gawk diffutils findutils gnused gnugrep
+      gnutar gzip bzip2 xz
+    ];
+  baseMultiPkgs = with pkgsi686Linux;
+    [ (toString gcc.cc.lib)
+    ];
+
+  etcProfile = writeText "profile" ''
+    export PS1='${name}-chrootenv:\u@\h:\w\$ '
+    export LOCALE_ARCHIVE='/usr/lib/locale/locale-archive'
+    export LD_LIBRARY_PATH="/run/opengl-driver/lib:/run/opengl-driver-32/lib:/usr/lib:/usr/lib32''${LD_LIBRARY_PATH:+:}$LD_LIBRARY_PATH"
+    export PATH="/run/wrappers/bin:/usr/bin:/usr/sbin:$PATH"
+    export TZDIR='/etc/zoneinfo'
+
+    # Force compilers and other tools to look in default search paths
+    unset NIX_ENFORCE_PURITY
+    export NIX_CC_WRAPPER_TARGET_HOST_${stdenv.cc.suffixSalt}=1
+    export NIX_CFLAGS_COMPILE='-idirafter /usr/include'
+    export NIX_CFLAGS_LINK='-L/usr/lib -L/usr/lib32'
+    export NIX_LDFLAGS='-L/usr/lib -L/usr/lib32'
+    export PKG_CONFIG_PATH=/usr/lib/pkgconfig
+    export ACLOCAL_PATH=/usr/share/aclocal
+
+    ${profile}
+  '';
+
+  # Compose /etc for the chroot environment
+  etcPkg = stdenv.mkDerivation {
+    name         = "${name}-chrootenv-etc";
+    buildCommand = ''
+      mkdir -p $out/etc
+      cd $out/etc
+
+      # environment variables
+      ln -s ${etcProfile} profile
+
+      # compatibility with NixOS
+      ln -s /host/etc/static static
+
+      # symlink nix config
+      ln -s /host/etc/nix nix
+
+      # symlink some NSS stuff
+      ln -s /host/etc/passwd passwd
+      ln -s /host/etc/group group
+      ln -s /host/etc/shadow shadow
+      ln -s /host/etc/hosts hosts
+      ln -s /host/etc/resolv.conf resolv.conf
+      ln -s /host/etc/nsswitch.conf nsswitch.conf
+
+      # symlink user profiles
+      ln -s /host/etc/profiles profiles
+
+      # symlink sudo and su stuff
+      ln -s /host/etc/login.defs login.defs
+      ln -s /host/etc/sudoers sudoers
+      ln -s /host/etc/sudoers.d sudoers.d
+
+      # symlink other core stuff
+      ln -s /host/etc/localtime localtime
+      ln -s /host/etc/zoneinfo zoneinfo
+      ln -s /host/etc/machine-id machine-id
+      ln -s /host/etc/os-release os-release
+
+      # symlink PAM stuff
+      ln -s /host/etc/pam.d pam.d
+
+      # symlink fonts stuff
+      ln -s /host/etc/fonts fonts
+
+      # symlink ALSA stuff
+      ln -s /host/etc/asound.conf asound.conf
+
+      # symlink SSL certs
+      mkdir -p ssl
+      ln -s /host/etc/ssl/certs ssl/certs
+
+      # symlink /etc/mtab -> /proc/mounts (compat for old userspace progs)
+      ln -s /proc/mounts mtab
+    '';
+  };
+
+  # Composes a /usr-like directory structure
+  staticUsrProfileTarget = buildEnv {
+    name = "${name}-usr-target";
+    paths = [ etcPkg ] ++ basePkgs ++ targetPaths;
+    extraOutputsToInstall = [ "out" "lib" "bin" ] ++ extraOutputsToInstall;
+    ignoreCollisions = true;
+  };
+
+  staticUsrProfileMulti = buildEnv {
+    name = "${name}-usr-multi";
+    paths = baseMultiPkgs ++ multiPaths;
+    extraOutputsToInstall = [ "out" "lib" ] ++ extraOutputsToInstall;
+    ignoreCollisions = true;
+  };
+
+  # setup library paths only for the targeted architecture
+  setupLibDirs_target = ''
+    # link content of targetPaths
+    cp -rsHf ${staticUsrProfileTarget}/lib lib
+    ln -s lib lib${if is64Bit then "64" else "32"}
+  '';
+
+  # setup /lib, /lib32 and /lib64
+  setupLibDirs_multi = ''
+    mkdir -m0755 lib32
+    mkdir -m0755 lib64
+    ln -s lib64 lib
+
+    # copy glibc stuff
+    cp -rsHf ${staticUsrProfileTarget}/lib/32/* lib32/ && chmod u+w -R lib32/
+
+    # copy content of multiPaths (32bit libs)
+    [ -d ${staticUsrProfileMulti}/lib ] && cp -rsHf ${staticUsrProfileMulti}/lib/* lib32/ && chmod u+w -R lib32/
+
+    # copy content of targetPaths (64bit libs)
+    cp -rsHf ${staticUsrProfileTarget}/lib/* lib64/ && chmod u+w -R lib64/
+
+    # symlink 32-bit ld-linux.so
+    ln -Ls ${staticUsrProfileTarget}/lib/32/ld-linux.so.2 lib/
+  '';
+
+  setupLibDirs = if isTargetBuild then setupLibDirs_target
+                                  else setupLibDirs_multi;
+
+  # the target profile is the actual profile that will be used for the chroot
+  setupTargetProfile = ''
+    mkdir -m0755 usr
+    cd usr
+    ${setupLibDirs}
+    for i in bin sbin share include; do
+      if [ -d "${staticUsrProfileTarget}/$i" ]; then
+        cp -rsHf "${staticUsrProfileTarget}/$i" "$i"
+      fi
+    done
+    cd ..
+
+    for i in var etc; do
+      if [ -d "${staticUsrProfileTarget}/$i" ]; then
+        cp -rsHf "${staticUsrProfileTarget}/$i" "$i"
+      fi
+    done
+    for i in usr/{bin,sbin,lib,lib32,lib64}; do
+      if [ -d "$i" ]; then
+        ln -s "$i"
+      fi
+    done
+  '';
+
+in stdenv.mkDerivation {
+  name         = "${name}-fhs";
+  buildCommand = ''
+    mkdir -p $out
+    cd $out
+    ${setupTargetProfile}
+    cd $out
+    ${extraBuildCommands}
+    cd $out
+    ${if isMultiBuild then extraBuildCommandsMulti else ""}
+  '';
+  preferLocalBuild = true;
+  allowSubstitutes = false;
+}
diff --git a/nixpkgs/pkgs/build-support/build-maven.nix b/nixpkgs/pkgs/build-support/build-maven.nix
new file mode 100644
index 000000000000..f47e3ebc61c2
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/build-maven.nix
@@ -0,0 +1,81 @@
+{ stdenv, maven, runCommand, writeText, fetchurl, lib, requireFile }:
+/* Takes an info file generated by mvn2nix
+ * (https://github.com/NixOS/mvn2nix-maven-plugin) and builds the maven
+ * project with it.
+ *
+ * repo: A local maven repository with the project's dependencies.
+ *
+ * settings: A settings.xml to pass to maven to use the repo.
+ *
+ * build: A simple build derivation that uses mvn compile and package to build
+ *        the project.
+ */
+infoFile: let
+  info = lib.importJSON infoFile;
+
+  script = writeText "build-maven-repository.sh" ''
+    ${lib.concatStrings (map (dep: let
+      inherit (dep) sha1 groupId artifactId version metadata repository-id;
+
+      versionDir = dep.unresolved-version or version;
+      authenticated = dep.authenticated or false;
+      url = dep.url or "";
+
+      fetch = if (url != "") then ((if authenticated then requireFile else fetchurl) {
+        inherit url sha1;
+      }) else "";
+
+      fetchMetadata = (if authenticated then requireFile else fetchurl) {
+        inherit (metadata) url sha1;
+      };
+    in ''
+      dir=$out/$(echo ${groupId} | sed 's|\.|/|g')/${artifactId}/${versionDir}
+      mkdir -p $dir
+
+      ${lib.optionalString (fetch != "") ''
+        ln -sv ${fetch} $dir/${fetch.name}
+      ''}
+      ${lib.optionalString (dep ? metadata) ''
+        ln -svf ${fetchMetadata} $dir/maven-metadata-${repository-id}.xml
+        ${lib.optionalString (fetch != "") ''
+          ln -sv ${fetch} $dir/$(echo ${fetch.name} | sed 's|${version}|${dep.unresolved-version}|')
+        ''}
+      ''}
+    '') info.dependencies)}
+  '';
+
+  repo = runCommand "maven-repository" {} ''
+    bash ${script}
+  '';
+
+  settings = writeText "settings.xml" ''
+    <settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
+      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+      xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0
+                          http://maven.apache.org/xsd/settings-1.0.0.xsd">
+      <localRepository>${repo}</localRepository>
+    </settings>
+  '';
+
+  src = dirOf infoFile;
+in {
+  inherit repo settings info;
+
+  build = stdenv.mkDerivation {
+    name = "${info.project.artifactId}-${info.project.version}.jar";
+
+    src = builtins.filterSource (path: type:
+      (toString path) != (toString (src + "/target")) &&
+        (toString path) != (toString (src + "/.git"))
+    ) src;
+
+    buildInputs = [ maven ];
+
+    buildPhase = "mvn --offline --settings ${settings} compile";
+
+    installPhase = ''
+      mvn --offline --settings ${settings} package
+      mv target/*.jar $out
+    '';
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/build-pecl.nix b/nixpkgs/pkgs/build-support/build-pecl.nix
new file mode 100644
index 000000000000..d3a8cc54a146
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/build-pecl.nix
@@ -0,0 +1,37 @@
+{ stdenv, lib, php, autoreconfHook, fetchurl, re2c }:
+
+{ pname
+, version
+, internalDeps ? []
+, peclDeps ? []
+, buildInputs ? []
+, nativeBuildInputs ? []
+, postPhpize ? ""
+, makeFlags ? []
+, src ? fetchurl {
+    url = "http://pecl.php.net/get/${pname}-${version}.tgz";
+    inherit (args) sha256;
+  }
+, ...
+}@args:
+
+stdenv.mkDerivation (args // {
+  name = "php-${pname}-${version}";
+  extensionName = pname;
+
+  inherit src;
+
+  nativeBuildInputs = [ autoreconfHook re2c ] ++ nativeBuildInputs;
+  buildInputs = [ php ] ++ peclDeps ++ buildInputs;
+
+  makeFlags = [ "EXTENSION_DIR=$(out)/lib/php/extensions" ] ++ makeFlags;
+
+  autoreconfPhase = ''
+    phpize
+    ${postPhpize}
+    ${lib.concatMapStringsSep "\n"
+      (dep: "mkdir -p ext; ln -s ${dep.dev}/include ext/${dep.extensionName}")
+      internalDeps}
+  '';
+  checkPhase = "NO_INTERACTON=yes make test";
+})
diff --git a/nixpkgs/pkgs/build-support/build-setupcfg/default.nix b/nixpkgs/pkgs/build-support/build-setupcfg/default.nix
new file mode 100644
index 000000000000..bc6482f0a88e
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/build-setupcfg/default.nix
@@ -0,0 +1,25 @@
+# Build a python package from info made available by setupcfg2nix.
+#
+# * src: The source of the package.
+# * info: The package information generated by setupcfg2nix.
+# * meta: Standard nixpkgs metadata.
+# * application: Whether this package is a python library or an
+#   application which happens to be written in python.
+# * doCheck: Whether to run the test suites.
+pythonPackages:
+{ src, info, meta ? {}, application ? false, doCheck ? true }: let
+  build = if application
+    then pythonPackages.buildPythonApplication
+  else pythonPackages.buildPythonPackage;
+in build {
+  inherit (info) pname version;
+
+  inherit src meta doCheck;
+
+  nativeBuildInputs = map (p: pythonPackages.${p}) (
+    (info.setup_requires or []) ++
+    (if doCheck then (info.tests_require or []) else []));
+
+  propagatedBuildInputs = map (p: pythonPackages.${p})
+    (info.install_requires or []);
+}
diff --git a/nixpkgs/pkgs/build-support/buildenv/builder.pl b/nixpkgs/pkgs/build-support/buildenv/builder.pl
new file mode 100755
index 000000000000..411b147cc58e
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/buildenv/builder.pl
@@ -0,0 +1,251 @@
+#! @perl@ -w
+
+use strict;
+use Cwd 'abs_path';
+use IO::Handle;
+use File::Path;
+use File::Basename;
+use File::Compare;
+use JSON::PP;
+
+STDOUT->autoflush(1);
+
+my $out = $ENV{"out"};
+
+my @pathsToLink = split ' ', $ENV{"pathsToLink"};
+
+sub isInPathsToLink {
+    my $path = shift;
+    $path = "/" if $path eq "";
+    foreach my $elem (@pathsToLink) {
+        return 1 if
+            $elem eq "/" ||
+            (substr($path, 0, length($elem)) eq $elem
+             && (($path eq $elem) || (substr($path, length($elem), 1) eq "/")));
+    }
+    return 0;
+}
+
+# Returns whether a path in one of the linked packages may contain
+# files in one of the elements of pathsToLink.
+sub hasPathsToLink {
+    my $path = shift;
+    foreach my $elem (@pathsToLink) {
+        return 1 if
+            $path eq "" ||
+            (substr($elem, 0, length($path)) eq $path
+             && (($path eq $elem) || (substr($elem, length($path), 1) eq "/")));
+    }
+    return 0;
+}
+
+# Similar to `lib.isStorePath`
+sub isStorePath {
+    my $path = shift;
+    my $storePath = "@storeDir@";
+
+    return substr($path, 0, 1) eq "/" && dirname($path) eq $storePath;
+}
+
+# For each activated package, determine what symlinks to create.
+
+my %symlinks;
+
+# Add all pathsToLink and all parent directories.
+#
+# For "/a/b/c" that will include
+# [ "", "/a", "/a/b", "/a/b/c" ]
+#
+# That ensures the whole directory tree needed by pathsToLink is
+# created as directories and not symlinks.
+$symlinks{""} = ["", 0];
+for my $p (@pathsToLink) {
+    my @parts = split '/', $p;
+
+    my $cur = "";
+    for my $x (@parts) {
+        $cur = $cur . "/$x";
+        $cur = "" if $cur eq "/";
+        $symlinks{$cur} = ["", 0];
+    }
+}
+
+sub findFiles;
+
+sub findFilesInDir {
+    my ($relName, $target, $ignoreCollisions, $checkCollisionContents, $priority) = @_;
+
+    opendir DIR, "$target" or die "cannot open `$target': $!";
+    my @names = readdir DIR or die;
+    closedir DIR;
+
+    foreach my $name (@names) {
+        next if $name eq "." || $name eq "..";
+        findFiles("$relName/$name", "$target/$name", $name, $ignoreCollisions, $checkCollisionContents, $priority);
+    }
+}
+
+sub checkCollision {
+    my ($path1, $path2) = @_;
+
+    my $stat1 = (stat($path1))[2];
+    my $stat2 = (stat($path2))[2];
+
+    if ($stat1 != $stat2) {
+        warn "different permissions in `$path1' and `$path2': "
+           . sprintf("%04o", $stat1 & 07777) . " <-> "
+           . sprintf("%04o", $stat2 & 07777);
+        return 0;
+    }
+
+    return compare($path1, $path2) == 0;
+}
+
+sub findFiles {
+    my ($relName, $target, $baseName, $ignoreCollisions, $checkCollisionContents, $priority) = @_;
+
+    # The store path must not be a file
+    if (-f $target && isStorePath $target) {
+        die "The store path $target is a file and can't be merged into an environment using pkgs.buildEnv!";
+    }
+
+    # Urgh, hacky...
+    return if
+        $relName eq "/propagated-build-inputs" ||
+        $relName eq "/nix-support" ||
+        $relName =~ /info\/dir/ ||
+        ( $relName =~ /^\/share\/mime\// && !( $relName =~ /^\/share\/mime\/packages/ ) ) ||
+        $baseName eq "perllocal.pod" ||
+        $baseName eq "log" ||
+        ! (hasPathsToLink($relName) || isInPathsToLink($relName));
+
+    my ($oldTarget, $oldPriority) = @{$symlinks{$relName} // [undef, undef]};
+
+    # If target doesn't exist, create it. If it already exists as a
+    # symlink to a file (not a directory) in a lower-priority package,
+    # overwrite it.
+    if (!defined $oldTarget || ($priority < $oldPriority && ($oldTarget ne "" && ! -d $oldTarget))) {
+        $symlinks{$relName} = [$target, $priority];
+        return;
+    }
+
+    # If target already exists and both targets resolves to the same path, skip
+    if (defined $oldTarget && $oldTarget ne "" && abs_path($target) eq abs_path($oldTarget)) {
+        # Prefer the target that is not a symlink, if any
+        if (-l $oldTarget && ! -l $target) {
+            $symlinks{$relName} = [$target, $priority];
+        }
+        return;
+    }
+
+    # If target already exists as a symlink to a file (not a
+    # directory) in a higher-priority package, skip.
+    if (defined $oldTarget && $priority > $oldPriority && $oldTarget ne "" && ! -d $oldTarget) {
+        return;
+    }
+
+    unless (-d $target && ($oldTarget eq "" || -d $oldTarget)) {
+        if ($ignoreCollisions) {
+            warn "collision between `$target' and `$oldTarget'\n" if $ignoreCollisions == 1;
+            return;
+        } elsif ($checkCollisionContents && checkCollision($oldTarget, $target)) {
+            return;
+        } else {
+            die "collision between `$target' and `$oldTarget'\n";
+        }
+    }
+
+    findFilesInDir($relName, $oldTarget, $ignoreCollisions, $checkCollisionContents, $oldPriority) unless $oldTarget eq "";
+    findFilesInDir($relName, $target, $ignoreCollisions, $checkCollisionContents, $priority);
+
+    $symlinks{$relName} = ["", $priority]; # denotes directory
+}
+
+
+my %done;
+my %postponed;
+
+sub addPkg {
+    my ($pkgDir, $ignoreCollisions, $checkCollisionContents, $priority)  = @_;
+
+    return if (defined $done{$pkgDir});
+    $done{$pkgDir} = 1;
+
+    findFiles("", $pkgDir, "", $ignoreCollisions, $checkCollisionContents, $priority);
+
+    my $propagatedFN = "$pkgDir/nix-support/propagated-user-env-packages";
+    if (-e $propagatedFN) {
+        open PROP, "<$propagatedFN" or die;
+        my $propagated = <PROP>;
+        close PROP;
+        my @propagated = split ' ', $propagated;
+        foreach my $p (@propagated) {
+            $postponed{$p} = 1 unless defined $done{$p};
+        }
+    }
+}
+
+# Read packages list.
+my $pkgs;
+
+if (exists $ENV{"pkgsPath"}) {
+    open FILE, $ENV{"pkgsPath"};
+    $pkgs = <FILE>;
+    close FILE;
+} else {
+    $pkgs = $ENV{"pkgs"}
+}
+
+# Symlink to the packages that have been installed explicitly by the
+# user.
+for my $pkg (@{decode_json $pkgs}) {
+    for my $path (@{$pkg->{paths}}) {
+        addPkg($path,
+               $ENV{"ignoreCollisions"} eq "1",
+               $ENV{"checkCollisionContents"} eq "1",
+               $pkg->{priority})
+           if -e $path;
+    }
+}
+
+
+# Symlink to the packages that have been "propagated" by packages
+# installed by the user (i.e., package X declares that it wants Y
+# installed as well).  We do these later because they have a lower
+# priority in case of collisions.
+my $priorityCounter = 1000; # don't care about collisions
+while (scalar(keys %postponed) > 0) {
+    my @pkgDirs = keys %postponed;
+    %postponed = ();
+    foreach my $pkgDir (sort @pkgDirs) {
+        addPkg($pkgDir, 2, $ENV{"checkCollisionContents"} eq "1", $priorityCounter++);
+    }
+}
+
+
+# Create the symlinks.
+my $extraPrefix = $ENV{"extraPrefix"};
+my $nrLinks = 0;
+foreach my $relName (sort keys %symlinks) {
+    my ($target, $priority) = @{$symlinks{$relName}};
+    my $abs = "$out" . "$extraPrefix" . "/$relName";
+    next unless isInPathsToLink $relName;
+    if ($target eq "") {
+        #print "creating directory $relName\n";
+        mkpath $abs or die "cannot create directory `$abs': $!";
+    } else {
+        #print "creating symlink $relName to $target\n";
+        symlink $target, $abs ||
+            die "error creating link `$abs': $!";
+        $nrLinks++;
+    }
+}
+
+
+print STDERR "created $nrLinks symlinks in user environment\n";
+
+
+my $manifest = $ENV{"manifest"};
+if ($manifest) {
+    symlink($manifest, "$out/manifest") or die "cannot create manifest";
+}
diff --git a/nixpkgs/pkgs/build-support/buildenv/default.nix b/nixpkgs/pkgs/build-support/buildenv/default.nix
new file mode 100644
index 000000000000..c2186cf6bfa0
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/buildenv/default.nix
@@ -0,0 +1,82 @@
+# buildEnv creates a tree of symlinks to the specified paths.  This is
+# a fork of the buildEnv in the Nix distribution.  Most changes should
+# eventually be merged back into the Nix distribution.
+
+{ buildPackages, runCommand, lib, substituteAll }:
+
+lib.makeOverridable
+({ name
+
+, # The manifest file (if any).  A symlink $out/manifest will be
+  # created to it.
+  manifest ? ""
+
+, # The paths to symlink.
+  paths
+
+, # Whether to ignore collisions or abort.
+  ignoreCollisions ? false
+
+, # If there is a collision, check whether the contents and permissions match
+  # and only if not, throw a collision error.
+  checkCollisionContents ? true
+
+, # The paths (relative to each element of `paths') that we want to
+  # symlink (e.g., ["/bin"]).  Any file not inside any of the
+  # directories in the list is not symlinked.
+  pathsToLink ? ["/"]
+
+, # The package outputs to include. By default, only the default
+  # output is included.
+  extraOutputsToInstall ? []
+
+, # Root the result in directory "$out${extraPrefix}", e.g. "/share".
+  extraPrefix ? ""
+
+, # Shell commands to run after building the symlink tree.
+  postBuild ? ""
+
+# Additional inputs
+, nativeBuildInputs ? [] # Handy e.g. if using makeWrapper in `postBuild`.
+, buildInputs ? []
+
+, passthru ? {}
+, meta ? {}
+}:
+
+let
+  builder = substituteAll {
+    src = ./builder.pl;
+    inherit (builtins) storeDir;
+  };
+in
+
+runCommand name
+  rec {
+    inherit manifest ignoreCollisions checkCollisionContents passthru
+            meta pathsToLink extraPrefix postBuild
+            nativeBuildInputs buildInputs;
+    pkgs = builtins.toJSON (map (drv: {
+      paths =
+        # First add the usual output(s): respect if user has chosen explicitly,
+        # and otherwise use `meta.outputsToInstall`. The attribute is guaranteed
+        # to exist in mkDerivation-created cases. The other cases (e.g. runCommand)
+        # aren't expected to have multiple outputs.
+        (if drv.outputUnspecified or false
+            && drv.meta.outputsToInstall or null != null
+          then map (outName: drv.${outName}) drv.meta.outputsToInstall
+          else [ drv ])
+        # Add any extra outputs specified by the caller of `buildEnv`.
+        ++ lib.filter (p: p!=null)
+          (builtins.map (outName: drv.${outName} or null) extraOutputsToInstall);
+      priority = drv.meta.priority or 5;
+    }) paths);
+    preferLocalBuild = true;
+    allowSubstitutes = false;
+    # XXX: The size is somewhat arbitrary
+    passAsFile = if builtins.stringLength pkgs >= 128*1024 then [ "pkgs" ] else [ ];
+  }
+  ''
+    ${buildPackages.perl}/bin/perl -w ${builder}
+    eval "$postBuild"
+  '')
diff --git a/nixpkgs/pkgs/build-support/cc-wrapper/add-flags.sh b/nixpkgs/pkgs/build-support/cc-wrapper/add-flags.sh
new file mode 100644
index 000000000000..a6e991914e6b
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/cc-wrapper/add-flags.sh
@@ -0,0 +1,77 @@
+# N.B. It may be a surprise that the derivation-specific variables are exported,
+# since this is just sourced by the wrapped binaries---the end consumers. This
+# is because one wrapper binary may invoke another (e.g. cc invoking ld). In
+# that case, it is cheaper/better to not repeat this step and let the forked
+# wrapped binary just inherit the work of the forker's wrapper script.
+
+var_templates_list=(
+    NIX_CFLAGS_COMPILE
+    NIX_CFLAGS_COMPILE_BEFORE
+    NIX_CFLAGS_LINK
+    NIX_CXXSTDLIB_COMPILE
+    NIX_CXXSTDLIB_LINK
+    NIX_GNATFLAGS_COMPILE
+)
+var_templates_bool=(
+    NIX_ENFORCE_NO_NATIVE
+)
+
+accumulateRoles
+
+# We need to mangle names for hygiene, but also take parameters/overrides
+# from the environment.
+for var in "${var_templates_list[@]}"; do
+    mangleVarList "$var" ${role_suffixes[@]+"${role_suffixes[@]}"}
+done
+for var in "${var_templates_bool[@]}"; do
+    mangleVarBool "$var" ${role_suffixes[@]+"${role_suffixes[@]}"}
+done
+
+# `-B@out@/bin' forces cc to use ld-wrapper.sh when calling ld.
+NIX_CFLAGS_COMPILE_@suffixSalt@="-B@out@/bin/ $NIX_CFLAGS_COMPILE_@suffixSalt@"
+
+# Export and assign separately in order that a failing $(..) will fail
+# the script.
+
+if [[ "$cInclude" = 1 ]] && [ -e @out@/nix-support/libc-cflags ]; then
+    NIX_CFLAGS_COMPILE_@suffixSalt@="$(< @out@/nix-support/libc-cflags) $NIX_CFLAGS_COMPILE_@suffixSalt@"
+fi
+
+if [ -e @out@/nix-support/libc-crt1-cflags ]; then
+    NIX_CFLAGS_COMPILE_@suffixSalt@="$(< @out@/nix-support/libc-crt1-cflags) $NIX_CFLAGS_COMPILE_@suffixSalt@"
+fi
+
+if [ -e @out@/nix-support/libcxx-cxxflags ]; then
+    NIX_CXXSTDLIB_COMPILE_@suffixSalt@+=" $(< @out@/nix-support/libcxx-cxxflags)"
+fi
+
+if [ -e @out@/nix-support/libcxx-ldflags ]; then
+    NIX_CXXSTDLIB_LINK_@suffixSalt@+=" $(< @out@/nix-support/libcxx-ldflags)"
+fi
+
+if [ -e @out@/nix-support/cc-cflags ]; then
+    NIX_CFLAGS_COMPILE_@suffixSalt@="$(< @out@/nix-support/cc-cflags) $NIX_CFLAGS_COMPILE_@suffixSalt@"
+fi
+
+if [ -e @out@/nix-support/gnat-cflags ]; then
+    NIX_GNATFLAGS_COMPILE_@suffixSalt@="$(< @out@/nix-support/gnat-cflags) $NIX_GNATFLAGS_COMPILE_@suffixSalt@"
+fi
+
+if [ -e @out@/nix-support/cc-ldflags ]; then
+    NIX_LDFLAGS_@suffixSalt@+=" $(< @out@/nix-support/cc-ldflags)"
+fi
+
+if [ -e @out@/nix-support/cc-cflags-before ]; then
+    NIX_CFLAGS_COMPILE_BEFORE_@suffixSalt@="$(< @out@/nix-support/cc-cflags-before) $NIX_CFLAGS_COMPILE_BEFORE_@suffixSalt@"
+fi
+
+# Only add darwin min version flag if a default darwin min version is set,
+# which is a signal that we're targetting darwin.
+if [ "@darwinMinVersion@" ]; then
+    mangleVarSingle @darwinMinVersionVariable@ ${role_suffixes[@]+"${role_suffixes[@]}"}
+
+    NIX_CFLAGS_COMPILE_BEFORE_@suffixSalt@="-m@darwinPlatformForCC@-version-min=${@darwinMinVersionVariable@_@suffixSalt@:-@darwinMinVersion@} $NIX_CFLAGS_COMPILE_BEFORE_@suffixSalt@"
+fi
+
+# That way forked processes will not extend these environment variables again.
+export NIX_CC_WRAPPER_FLAGS_SET_@suffixSalt@=1
diff --git a/nixpkgs/pkgs/build-support/cc-wrapper/add-hardening.sh b/nixpkgs/pkgs/build-support/cc-wrapper/add-hardening.sh
new file mode 100644
index 000000000000..8e2fe6c407ea
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/cc-wrapper/add-hardening.sh
@@ -0,0 +1,72 @@
+declare -a hardeningCFlags=()
+
+declare -A hardeningEnableMap=()
+
+# Intentionally word-split in case 'NIX_HARDENING_ENABLE' is defined in Nix. The
+# array expansion also prevents undefined variables from causing trouble with
+# `set -u`.
+for flag in ${NIX_HARDENING_ENABLE_@suffixSalt@-}; do
+  hardeningEnableMap["$flag"]=1
+done
+
+# Remove unsupported flags.
+for flag in @hardening_unsupported_flags@; do
+  unset -v "hardeningEnableMap[$flag]"
+done
+
+if (( "${NIX_DEBUG:-0}" >= 1 )); then
+  declare -a allHardeningFlags=(fortify stackprotector pie pic strictoverflow format)
+  declare -A hardeningDisableMap=()
+
+  # Determine which flags were effectively disabled so we can report below.
+  for flag in "${allHardeningFlags[@]}"; do
+    if [[ -z "${hardeningEnableMap[$flag]-}" ]]; then
+      hardeningDisableMap["$flag"]=1
+    fi
+  done
+
+  printf 'HARDENING: disabled flags:' >&2
+  (( "${#hardeningDisableMap[@]}" )) && printf ' %q' "${!hardeningDisableMap[@]}" >&2
+  echo >&2
+
+  if (( "${#hardeningEnableMap[@]}" )); then
+    echo 'HARDENING: Is active (not completely disabled with "all" flag)' >&2;
+  fi
+fi
+
+for flag in "${!hardeningEnableMap[@]}"; do
+  case $flag in
+    fortify)
+      if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling fortify >&2; fi
+      hardeningCFlags+=('-O2' '-D_FORTIFY_SOURCE=2')
+      ;;
+    stackprotector)
+      if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling stackprotector >&2; fi
+      hardeningCFlags+=('-fstack-protector-strong' '--param' 'ssp-buffer-size=4')
+      ;;
+    pie)
+      if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling CFlags -fPIE >&2; fi
+      hardeningCFlags+=('-fPIE')
+      if [[ ! ("$*" =~ " -shared " || "$*" =~ " -static ") ]]; then
+        if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling LDFlags -pie >&2; fi
+        hardeningCFlags+=('-pie')
+      fi
+      ;;
+    pic)
+      if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling pic >&2; fi
+      hardeningCFlags+=('-fPIC')
+      ;;
+    strictoverflow)
+       if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling strictoverflow >&2; fi
+      hardeningCFlags+=('-fno-strict-overflow')
+      ;;
+    format)
+      if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling format >&2; fi
+      hardeningCFlags+=('-Wformat' '-Wformat-security' '-Werror=format-security')
+      ;;
+    *)
+      # Ignore unsupported. Checked in Nix that at least *some*
+      # tool supports each flag.
+      ;;
+  esac
+done
diff --git a/nixpkgs/pkgs/build-support/cc-wrapper/cc-wrapper.sh b/nixpkgs/pkgs/build-support/cc-wrapper/cc-wrapper.sh
new file mode 100644
index 000000000000..2e62aef46048
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/cc-wrapper/cc-wrapper.sh
@@ -0,0 +1,215 @@
+#! @shell@
+set -eu -o pipefail +o posix
+shopt -s nullglob
+
+if (( "${NIX_DEBUG:-0}" >= 7 )); then
+    set -x
+fi
+
+path_backup="$PATH"
+
+# That @-vars are substituted separately from bash evaluation makes
+# shellcheck think this, and others like it, are useless conditionals.
+# shellcheck disable=SC2157
+if [[ -n "@coreutils_bin@" && -n "@gnugrep_bin@" ]]; then
+    PATH="@coreutils_bin@/bin:@gnugrep_bin@/bin"
+fi
+
+source @out@/nix-support/utils.bash
+
+
+# Parse command line options and set several variables.
+# For instance, figure out if linker flags should be passed.
+# GCC prints annoying warnings when they are not needed.
+dontLink=0
+nonFlagArgs=0
+cc1=0
+# shellcheck disable=SC2193
+[[ "@prog@" = *++ ]] && isCxx=1 || isCxx=0
+cxxInclude=1
+cxxLibrary=1
+cInclude=1
+setDynamicLinker=1
+
+expandResponseParams "$@"
+declare -i n=0
+nParams=${#params[@]}
+while (( "$n" < "$nParams" )); do
+    p=${params[n]}
+    p2=${params[n+1]:-} # handle `p` being last one
+    if [ "$p" = -c ]; then
+        dontLink=1
+    elif [ "$p" = -S ]; then
+        dontLink=1
+    elif [ "$p" = -E ]; then
+        dontLink=1
+    elif [ "$p" = -E ]; then
+        dontLink=1
+    elif [ "$p" = -M ]; then
+        dontLink=1
+    elif [ "$p" = -MM ]; then
+        dontLink=1
+    elif [[ "$p" = -x && "$p2" = *-header ]]; then
+        dontLink=1
+    elif [[ "$p" = -x && "$p2" = c++* && "$isCxx" = 0 ]]; then
+        isCxx=1
+    elif [ "$p" = -nostdlib ]; then
+        cxxLibrary=0
+    elif [ "$p" = -nostdinc ]; then
+        cInclude=0
+        cxxInclude=0
+    elif [ "$p" = -nostdinc++ ]; then
+        cxxInclude=0
+    elif [[ "$p" = -static || "$p" = -static-pie ]]; then
+        setDynamicLinker=0
+    elif [[ "$p" != -?* ]]; then
+        # A dash alone signifies standard input; it is not a flag
+        nonFlagArgs=1
+    elif [ "$p" = -cc1 ]; then
+        cc1=1
+    fi
+    n+=1
+done
+
+# If we pass a flag like -Wl, then gcc will call the linker unless it
+# can figure out that it has to do something else (e.g., because of a
+# "-c" flag).  So if no non-flag arguments are given, don't pass any
+# linker flags.  This catches cases like "gcc" (should just print
+# "gcc: no input files") and "gcc -v" (should print the version).
+if [ "$nonFlagArgs" = 0 ]; then
+    dontLink=1
+fi
+
+# Optionally filter out paths not refering to the store.
+if [[ "${NIX_ENFORCE_PURITY:-}" = 1 && -n "$NIX_STORE" ]]; then
+    rest=()
+    nParams=${#params[@]}
+    declare -i n=0
+    while (( "$n" < "$nParams" )); do
+        p=${params[n]}
+        p2=${params[n+1]:-} # handle `p` being last one
+        if [ "${p:0:3}" = -L/ ] && badPath "${p:2}"; then
+            skip "${p:2}"
+        elif [ "$p" = -L ] && badPath "$p2"; then
+            n+=1; skip "$p2"
+        elif [ "${p:0:3}" = -I/ ] && badPath "${p:2}"; then
+            skip "${p:2}"
+        elif [ "$p" = -I ] && badPath "$p2"; then
+            n+=1; skip "$p2"
+        elif [ "$p" = -isystem ] && badPath "$p2"; then
+            n+=1; skip "$p2"
+        else
+            rest+=("$p")
+        fi
+        n+=1
+    done
+    # Old bash empty array hack
+    params=(${rest+"${rest[@]}"})
+fi
+
+# Flirting with a layer violation here.
+if [ -z "${NIX_BINTOOLS_WRAPPER_FLAGS_SET_@suffixSalt@:-}" ]; then
+    source @bintools@/nix-support/add-flags.sh
+fi
+
+# Put this one second so libc ldflags take priority.
+if [ -z "${NIX_CC_WRAPPER_FLAGS_SET_@suffixSalt@:-}" ]; then
+    source @out@/nix-support/add-flags.sh
+fi
+
+# Clear march/mtune=native -- they bring impurity.
+if [ "$NIX_ENFORCE_NO_NATIVE_@suffixSalt@" = 1 ]; then
+    rest=()
+    # Old bash empty array hack
+    for p in ${params+"${params[@]}"}; do
+        if [[ "$p" = -m*=native ]]; then
+            skip "$p"
+        else
+            rest+=("$p")
+        fi
+    done
+    # Old bash empty array hack
+    params=(${rest+"${rest[@]}"})
+fi
+
+if [[ "$isCxx" = 1 ]]; then
+    if [[ "$cxxInclude" = 1 ]]; then
+        NIX_CFLAGS_COMPILE_@suffixSalt@+=" $NIX_CXXSTDLIB_COMPILE_@suffixSalt@"
+    fi
+    if [[ "$cxxLibrary" = 1 ]]; then
+        NIX_CFLAGS_LINK_@suffixSalt@+=" $NIX_CXXSTDLIB_LINK_@suffixSalt@"
+    fi
+fi
+
+source @out@/nix-support/add-hardening.sh
+
+# Add the flags for the C compiler proper.
+extraAfter=($NIX_CFLAGS_COMPILE_@suffixSalt@)
+extraBefore=(${hardeningCFlags[@]+"${hardeningCFlags[@]}"} $NIX_CFLAGS_COMPILE_BEFORE_@suffixSalt@)
+
+if [ "$dontLink" != 1 ]; then
+
+    # Add the flags that should only be passed to the compiler when
+    # linking.
+    extraAfter+=($NIX_CFLAGS_LINK_@suffixSalt@)
+
+    # Add the flags that should be passed to the linker (and prevent
+    # `ld-wrapper' from adding NIX_LDFLAGS_@suffixSalt@ again).
+    for i in $NIX_LDFLAGS_BEFORE_@suffixSalt@; do
+        extraBefore+=("-Wl,$i")
+    done
+    if [[ "$setDynamicLinker" = 1 && -n "$NIX_DYNAMIC_LINKER_@suffixSalt@" ]]; then
+        extraBefore+=("-Wl,-dynamic-linker=$NIX_DYNAMIC_LINKER_@suffixSalt@")
+    fi
+    for i in $NIX_LDFLAGS_@suffixSalt@; do
+        if [ "${i:0:3}" = -L/ ]; then
+            extraAfter+=("$i")
+        else
+            extraAfter+=("-Wl,$i")
+        fi
+    done
+    export NIX_LDFLAGS_SET_@suffixSalt@=1
+fi
+
+# As a very special hack, if the arguments are just `-v', then don't
+# add anything.  This is to prevent `gcc -v' (which normally prints
+# out the version number and returns exit code 0) from printing out
+# `No input files specified' and returning exit code 1.
+if [ "$*" = -v ]; then
+    extraAfter=()
+    extraBefore=()
+fi
+
+# clang's -cc1 mode is not compatible with most options
+# that we would pass. Rather than trying to pass only
+# options that would work, let's just remove all of them.
+if [ "$cc1" = 1 ]; then
+  extraAfter=()
+  extraBefore=()
+fi
+
+# Optionally print debug info.
+if (( "${NIX_DEBUG:-0}" >= 1 )); then
+    # Old bash workaround, see ld-wrapper for explanation.
+    echo "extra flags before to @prog@:" >&2
+    printf "  %q\n" ${extraBefore+"${extraBefore[@]}"}  >&2
+    echo "original flags to @prog@:" >&2
+    printf "  %q\n" ${params+"${params[@]}"} >&2
+    echo "extra flags after to @prog@:" >&2
+    printf "  %q\n" ${extraAfter+"${extraAfter[@]}"} >&2
+fi
+
+PATH="$path_backup"
+# Old bash workaround, see above.
+
+if (( "${NIX_CC_USE_RESPONSE_FILE:-@use_response_file_by_default@}" >= 1 )); then
+    exec @prog@ @<(printf "%q\n" \
+       ${extraBefore+"${extraBefore[@]}"} \
+       ${params+"${params[@]}"} \
+       ${extraAfter+"${extraAfter[@]}"})
+else
+    exec @prog@ \
+       ${extraBefore+"${extraBefore[@]}"} \
+       ${params+"${params[@]}"} \
+       ${extraAfter+"${extraAfter[@]}"}
+fi
diff --git a/nixpkgs/pkgs/build-support/cc-wrapper/default.nix b/nixpkgs/pkgs/build-support/cc-wrapper/default.nix
new file mode 100644
index 000000000000..235d244a7c0d
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/cc-wrapper/default.nix
@@ -0,0 +1,531 @@
+# The Nixpkgs CC is not directly usable, since it doesn't know where
+# the C library and standard header files are. Therefore the compiler
+# produced by that package cannot be installed directly in a user
+# environment and used from the command line. So we use a wrapper
+# script that sets up the right environment variables so that the
+# compiler and the linker just "work".
+
+{ name ? ""
+, lib
+, stdenvNoCC
+, cc ? null, libc ? null, bintools, coreutils ? null, shell ? stdenvNoCC.shell
+, gccForLibs ? null
+, zlib ? null
+, nativeTools, noLibc ? false, nativeLibc, nativePrefix ? ""
+, propagateDoc ? cc != null && cc ? man
+, extraTools ? [], extraPackages ? [], extraBuildCommands ? ""
+, isGNU ? false, isClang ? cc.isClang or false, gnugrep ? null
+, buildPackages ? {}
+, libcxx ? null
+}:
+
+with lib;
+
+assert nativeTools -> !propagateDoc && nativePrefix != "";
+assert !nativeTools ->
+  cc != null && coreutils != null && gnugrep != null;
+assert !(nativeLibc && noLibc);
+assert (noLibc || nativeLibc) == (libc == null);
+
+let
+  stdenv = stdenvNoCC;
+  inherit (stdenv) hostPlatform targetPlatform;
+
+  # Prefix for binaries. Customarily ends with a dash separator.
+  #
+  # TODO(@Ericson2314) Make unconditional, or optional but always true by
+  # default.
+  targetPrefix = lib.optionalString (targetPlatform != hostPlatform)
+                                           (targetPlatform.config + "-");
+
+  ccVersion = lib.getVersion cc;
+  ccName = lib.removePrefix targetPrefix (lib.getName cc);
+
+  libc_bin = if libc == null then null else getBin libc;
+  libc_dev = if libc == null then null else getDev libc;
+  libc_lib = if libc == null then null else getLib libc;
+  cc_solib = getLib cc
+    + optionalString (targetPlatform != hostPlatform) "/${targetPlatform.config}";
+
+  # The wrapper scripts use 'cat' and 'grep', so we may need coreutils.
+  coreutils_bin = if nativeTools then "" else getBin coreutils;
+
+  # The "suffix salt" is a arbitrary string added in the end of env vars
+  # defined by cc-wrapper's hooks so that multiple cc-wrappers can be used
+  # without interfering. For the moment, it is defined as the target triple,
+  # adjusted to be a valid bash identifier. This should be considered an
+  # unstable implementation detail, however.
+  suffixSalt = replaceStrings ["-" "."] ["_" "_"] targetPlatform.config;
+
+  expand-response-params =
+    if (buildPackages.stdenv.hasCC or false) && buildPackages.stdenv.cc != "/dev/null"
+    then import ../expand-response-params { inherit (buildPackages) stdenv; }
+    else "";
+
+  useGccForLibs = isClang
+    && libcxx == null
+    && !stdenv.targetPlatform.isDarwin
+    && !(stdenv.targetPlatform.useLLVM or false)
+    && !(stdenv.targetPlatform.useAndroidPrebuilt or false)
+    && !(stdenv.targetPlatform.isiOS or false)
+    && gccForLibs != null;
+
+  # older compilers (for example bootstrap's GCC 5) fail with -march=too-modern-cpu
+  isGccArchSupported = arch:
+    if isGNU then
+      { # Intel
+        skylake        = versionAtLeast ccVersion "6.0";
+        skylake-avx512 = versionAtLeast ccVersion "6.0";
+        cannonlake     = versionAtLeast ccVersion "8.0";
+        icelake-client = versionAtLeast ccVersion "8.0";
+        icelake-server = versionAtLeast ccVersion "8.0";
+        cascadelake    = versionAtLeast ccVersion "9.0";
+        cooperlake     = versionAtLeast ccVersion "10.0";
+        tigerlake      = versionAtLeast ccVersion "10.0";
+        knm            = versionAtLeast ccVersion "8.0";
+        # AMD
+        znver1         = versionAtLeast ccVersion "6.0";
+        znver2         = versionAtLeast ccVersion "9.0";
+        znver3         = versionAtLeast ccVersion "11.0";
+      }.${arch} or true
+    else if isClang then
+      { # Intel
+        cannonlake     = versionAtLeast ccVersion "5.0";
+        icelake-client = versionAtLeast ccVersion "7.0";
+        icelake-server = versionAtLeast ccVersion "7.0";
+        knm            = versionAtLeast ccVersion "7.0";
+        # AMD
+        znver1         = versionAtLeast ccVersion "4.0";
+        znver2         = versionAtLeast ccVersion "9.0";
+      }.${arch} or true
+    else
+      false;
+
+
+  darwinPlatformForCC = optionalString stdenv.targetPlatform.isDarwin (
+    if (targetPlatform.darwinPlatform == "macos" && isGNU) then "macosx"
+    else targetPlatform.darwinPlatform
+  );
+
+  darwinMinVersion = optionalString stdenv.targetPlatform.isDarwin (
+    stdenv.targetPlatform.darwinMinVersion
+  );
+
+  darwinMinVersionVariable = optionalString stdenv.targetPlatform.isDarwin
+    stdenv.targetPlatform.darwinMinVersionVariable;
+in
+
+# Ensure bintools matches
+assert libc_bin == bintools.libc_bin;
+assert libc_dev == bintools.libc_dev;
+assert libc_lib == bintools.libc_lib;
+assert nativeTools == bintools.nativeTools;
+assert nativeLibc == bintools.nativeLibc;
+assert nativePrefix == bintools.nativePrefix;
+
+stdenv.mkDerivation {
+  pname = targetPrefix
+    + (if name != "" then name else "${ccName}-wrapper");
+  version = if cc == null then null else ccVersion;
+
+  preferLocalBuild = true;
+
+  inherit cc libc_bin libc_dev libc_lib bintools coreutils_bin;
+  shell = getBin shell + shell.shellPath or "";
+  gnugrep_bin = if nativeTools then "" else gnugrep;
+
+  inherit targetPrefix suffixSalt;
+  inherit darwinPlatformForCC darwinMinVersion darwinMinVersionVariable;
+
+  outputs = [ "out" ] ++ optionals propagateDoc [ "man" "info" ];
+
+  passthru = {
+    # "cc" is the generic name for a C compiler, but there is no one for package
+    # providing the linker and related tools. The two we use now are GNU
+    # Binutils, and Apple's "cctools"; "bintools" as an attempt to find an
+    # unused middle-ground name that evokes both.
+    inherit bintools;
+    inherit libc nativeTools nativeLibc nativePrefix isGNU isClang;
+
+    emacsBufferSetup = pkgs: ''
+      ; We should handle propagation here too
+      (mapc
+        (lambda (arg)
+          (when (file-directory-p (concat arg "/include"))
+            (setenv "NIX_CFLAGS_COMPILE_${suffixSalt}" (concat (getenv "NIX_CFLAGS_COMPILE_${suffixSalt}") " -isystem " arg "/include"))))
+        '(${concatStringsSep " " (map (pkg: "\"${pkg}\"") pkgs)}))
+    '';
+  };
+
+  dontBuild = true;
+  dontConfigure = true;
+
+  unpackPhase = ''
+    src=$PWD
+  '';
+
+  wrapper = ./cc-wrapper.sh;
+
+  installPhase =
+    ''
+      mkdir -p $out/bin $out/nix-support
+
+      wrap() {
+        local dst="$1"
+        local wrapper="$2"
+        export prog="$3"
+        export use_response_file_by_default=${if isClang then "1" else "0"}
+        substituteAll "$wrapper" "$out/bin/$dst"
+        chmod +x "$out/bin/$dst"
+      }
+    ''
+
+    + (if nativeTools then ''
+      echo ${if targetPlatform.isDarwin then cc else nativePrefix} > $out/nix-support/orig-cc
+
+      ccPath="${if targetPlatform.isDarwin then cc else nativePrefix}/bin"
+    '' else ''
+      echo $cc > $out/nix-support/orig-cc
+
+      ccPath="${cc}/bin"
+    '')
+
+    # Create symlinks to everything in the bintools wrapper.
+    + ''
+      for bbin in $bintools/bin/*; do
+        mkdir -p "$out/bin"
+        ln -s "$bbin" "$out/bin/$(basename $bbin)"
+      done
+    ''
+
+    # We export environment variables pointing to the wrapped nonstandard
+    # cmds, lest some lousy configure script use those to guess compiler
+    # version.
+    + ''
+      export named_cc=${targetPrefix}cc
+      export named_cxx=${targetPrefix}c++
+
+      if [ -e $ccPath/${targetPrefix}gcc ]; then
+        wrap ${targetPrefix}gcc $wrapper $ccPath/${targetPrefix}gcc
+        ln -s ${targetPrefix}gcc $out/bin/${targetPrefix}cc
+        export named_cc=${targetPrefix}gcc
+        export named_cxx=${targetPrefix}g++
+      elif [ -e $ccPath/clang ]; then
+        wrap ${targetPrefix}clang $wrapper $ccPath/clang
+        ln -s ${targetPrefix}clang $out/bin/${targetPrefix}cc
+        export named_cc=${targetPrefix}clang
+        export named_cxx=${targetPrefix}clang++
+      fi
+
+      if [ -e $ccPath/${targetPrefix}g++ ]; then
+        wrap ${targetPrefix}g++ $wrapper $ccPath/${targetPrefix}g++
+        ln -s ${targetPrefix}g++ $out/bin/${targetPrefix}c++
+      elif [ -e $ccPath/clang++ ]; then
+        wrap ${targetPrefix}clang++ $wrapper $ccPath/clang++
+        ln -s ${targetPrefix}clang++ $out/bin/${targetPrefix}c++
+      fi
+
+      if [ -e $ccPath/cpp ]; then
+        wrap ${targetPrefix}cpp $wrapper $ccPath/cpp
+      fi
+    ''
+
+    + optionalString cc.langAda or false ''
+      wrap ${targetPrefix}gnatmake ${./gnat-wrapper.sh} $ccPath/${targetPrefix}gnatmake
+      wrap ${targetPrefix}gnatbind ${./gnat-wrapper.sh} $ccPath/${targetPrefix}gnatbind
+      wrap ${targetPrefix}gnatlink ${./gnat-wrapper.sh} $ccPath/${targetPrefix}gnatlink
+    ''
+
+    + optionalString cc.langD or false ''
+      wrap ${targetPrefix}gdc $wrapper $ccPath/${targetPrefix}gdc
+    ''
+
+    + optionalString cc.langFortran or false ''
+      wrap ${targetPrefix}gfortran $wrapper $ccPath/${targetPrefix}gfortran
+      ln -sv ${targetPrefix}gfortran $out/bin/${targetPrefix}g77
+      ln -sv ${targetPrefix}gfortran $out/bin/${targetPrefix}f77
+      export named_fc=${targetPrefix}gfortran
+    ''
+
+    + optionalString cc.langJava or false ''
+      wrap ${targetPrefix}gcj $wrapper $ccPath/${targetPrefix}gcj
+    ''
+
+    + optionalString cc.langGo or false ''
+      wrap ${targetPrefix}gccgo $wrapper $ccPath/${targetPrefix}gccgo
+    '';
+
+  strictDeps = true;
+  propagatedBuildInputs = [ bintools ] ++ extraTools ++ optionals cc.langD or false [ zlib ];
+  depsTargetTargetPropagated = optional (libcxx != null) libcxx ++ extraPackages;
+
+  wrapperName = "CC_WRAPPER";
+
+  setupHooks = [
+    ../setup-hooks/role.bash
+  ] ++ lib.optional (cc.langC or true) ./setup-hook.sh
+    ++ lib.optional (cc.langFortran or false) ./fortran-hook.sh;
+
+  postFixup =
+    # Ensure flags files exists, as some other programs cat them. (That these
+    # are considered an exposed interface is a bit dubious, but fine for now.)
+    ''
+      touch "$out/nix-support/cc-cflags"
+      touch "$out/nix-support/cc-ldflags"
+    ''
+
+    # Backwards compatability for packages expecting this file, e.g. with
+    # `$NIX_CC/nix-support/dynamic-linker`.
+    #
+    # TODO(@Ericson2314): Remove this after stable release and force
+    # everyone to refer to bintools-wrapper directly.
+    + ''
+      if [[ -f "$bintools/nix-support/dynamic-linker" ]]; then
+        ln -s "$bintools/nix-support/dynamic-linker" "$out/nix-support"
+      fi
+      if [[ -f "$bintools/nix-support/dynamic-linker-m32" ]]; then
+        ln -s "$bintools/nix-support/dynamic-linker-m32" "$out/nix-support"
+      fi
+    ''
+
+    ##
+    ## General Clang support
+    ##
+    + optionalString isClang ''
+
+      echo "-target ${targetPlatform.config}" >> $out/nix-support/cc-cflags
+    ''
+
+    ##
+    ## GCC libs for non-GCC support
+    ##
+    + optionalString useGccForLibs ''
+
+      echo "-B${gccForLibs}/lib/gcc/${targetPlatform.config}/${gccForLibs.version}" >> $out/nix-support/cc-cflags
+      echo "-L${gccForLibs}/lib/gcc/${targetPlatform.config}/${gccForLibs.version}" >> $out/nix-support/cc-ldflags
+      echo "-L${gccForLibs.lib}/${targetPlatform.config}/lib" >> $out/nix-support/cc-ldflags
+    ''
+
+    # TODO We would like to connect this to `useGccForLibs`, but we cannot yet
+    # because `libcxxStdenv` on linux still needs this. Maybe someday we'll
+    # always set `useLLVM` on Darwin, and maybe also break down `useLLVM` into
+    # fine-grained use flags (libgcc vs compiler-rt, ld.lld vs legacy, libc++
+    # vs libstdc++, etc.) since Darwin isn't `useLLVM` on all counts. (See
+    # https://clang.llvm.org/docs/Toolchain.html for all the axes one might
+    # break `useLLVM` into.)
+    + optionalString (isClang
+                      && targetPlatform.isLinux
+                      && !(stdenv.targetPlatform.useAndroidPrebuilt or false)
+                      && !(stdenv.targetPlatform.useLLVM or false)
+                      && gccForLibs != null) ''
+      echo "--gcc-toolchain=${gccForLibs}" >> $out/nix-support/cc-cflags
+    ''
+
+    ##
+    ## General libc support
+    ##
+
+    # The "-B${libc_lib}/lib/" flag is a quick hack to force gcc to link
+    # against the crt1.o from our own glibc, rather than the one in
+    # /usr/lib.  (This is only an issue when using an `impure'
+    # compiler/linker, i.e., one that searches /usr/lib and so on.)
+    #
+    # Unfortunately, setting -B appears to override the default search
+    # path. Thus, the gcc-specific "../includes-fixed" directory is
+    # now longer searched and glibc's <limits.h> header fails to
+    # compile, because it uses "#include_next <limits.h>" to find the
+    # limits.h file in ../includes-fixed. To remedy the problem,
+    # another -idirafter is necessary to add that directory again.
+    + optionalString (libc != null) (''
+      touch "$out/nix-support/libc-cflags"
+      touch "$out/nix-support/libc-ldflags"
+      echo "-B${libc_lib}${libc.libdir or "/lib/"}" >> $out/nix-support/libc-crt1-cflags
+    '' + optionalString (!(cc.langD or false)) ''
+      echo "-idirafter ${libc_dev}${libc.incdir or "/include"}" >> $out/nix-support/libc-cflags
+    '' + optionalString (isGNU && (!(cc.langD or false))) ''
+      for dir in "${cc}"/lib/gcc/*/*/include-fixed; do
+        echo '-idirafter' ''${dir} >> $out/nix-support/libc-cflags
+      done
+    '' + ''
+
+      echo "${libc_lib}" > $out/nix-support/orig-libc
+      echo "${libc_dev}" > $out/nix-support/orig-libc-dev
+    '')
+
+    ##
+    ## General libc++ support
+    ##
+
+    # We have a libc++ directly, we have one via "smuggled" GCC, or we have one
+    # bundled with the C compiler because it is GCC
+    + optionalString (libcxx != null || (useGccForLibs && gccForLibs.langCC or false) || (isGNU && cc.langCC or false)) ''
+      touch "$out/nix-support/libcxx-cxxflags"
+      touch "$out/nix-support/libcxx-ldflags"
+    ''
+    + optionalString (libcxx == null && (useGccForLibs && gccForLibs.langCC or false)) ''
+      for dir in ${gccForLibs}/include/c++/*; do
+        echo "-isystem $dir" >> $out/nix-support/libcxx-cxxflags
+      done
+      for dir in ${gccForLibs}/include/c++/*/${targetPlatform.config}; do
+        echo "-isystem $dir" >> $out/nix-support/libcxx-cxxflags
+      done
+    ''
+    + optionalString (libcxx.isLLVM or false) (''
+      echo "-isystem ${lib.getDev libcxx}/include/c++/v1" >> $out/nix-support/libcxx-cxxflags
+      echo "-stdlib=libc++" >> $out/nix-support/libcxx-ldflags
+    '' + lib.optionalString stdenv.targetPlatform.isLinux ''
+      echo "-lc++abi" >> $out/nix-support/libcxx-ldflags
+    '')
+
+    ##
+    ## Initial CFLAGS
+    ##
+
+    # GCC shows ${cc_solib}/lib in `gcc -print-search-dirs', but not
+    # ${cc_solib}/lib64 (even though it does actually search there...)..
+    # This confuses libtool.  So add it to the compiler tool search
+    # path explicitly.
+    + optionalString (!nativeTools) ''
+      if [ -e "${cc_solib}/lib64" -a ! -L "${cc_solib}/lib64" ]; then
+        ccLDFlags+=" -L${cc_solib}/lib64"
+        ccCFlags+=" -B${cc_solib}/lib64"
+      fi
+      ccLDFlags+=" -L${cc_solib}/lib"
+      ccCFlags+=" -B${cc_solib}/lib"
+
+    '' + optionalString cc.langAda or false ''
+      touch "$out/nix-support/gnat-cflags"
+      touch "$out/nix-support/gnat-ldflags"
+      basePath=$(echo $cc/lib/*/*/*)
+      ccCFlags+=" -B$basePath -I$basePath/adainclude"
+      gnatCFlags="-I$basePath/adainclude -I$basePath/adalib"
+
+      echo "$gnatCFlags" >> $out/nix-support/gnat-cflags
+    '' + ''
+      echo "$ccLDFlags" >> $out/nix-support/cc-ldflags
+      echo "$ccCFlags" >> $out/nix-support/cc-cflags
+    '' + optionalString (targetPlatform.isDarwin && (libcxx != null) && (cc.isClang or false)) ''
+      echo " -L${lib.getLib libcxx}/lib" >> $out/nix-support/cc-ldflags
+    ''
+
+    ##
+    ## Man page and info support
+    ##
+    + optionalString propagateDoc ''
+      ln -s ${cc.man} $man
+      ln -s ${cc.info} $info
+    '' + optionalString (cc.langD or false) ''
+      echo "-B${zlib}${zlib.libdir or "/lib/"}" >> $out/nix-support/libc-cflags
+    ''
+
+    ##
+    ## Hardening support
+    ##
+    + ''
+      export hardening_unsupported_flags="${builtins.concatStringsSep " " (cc.hardeningUnsupportedFlags or [])}"
+    ''
+
+    # Machine flags. These are necessary to support
+
+    # TODO: We should make a way to support miscellaneous machine
+    # flags and other gcc flags as well.
+
+    # Always add -march based on cpu in triple. Sometimes there is a
+    # discrepency (x86_64 vs. x86-64), so we provide an "arch" arg in
+    # that case.
+    # TODO: aarch64-darwin has mcpu incompatible with gcc
+    + optionalString ((targetPlatform ? gcc.arch) && (isClang || !(stdenv.isDarwin && stdenv.isAarch64)) &&
+                      isGccArchSupported targetPlatform.gcc.arch) ''
+      echo "-march=${targetPlatform.gcc.arch}" >> $out/nix-support/cc-cflags-before
+    ''
+
+    # -mcpu is not very useful. You should use mtune and march
+    # instead. It’s provided here for backwards compatibility.
+    # TODO: aarch64-darwin has mcpu incompatible with gcc
+    + optionalString ((targetPlatform ? gcc.cpu) && (isClang || !(stdenv.isDarwin && stdenv.isAarch64))) ''
+      echo "-mcpu=${targetPlatform.gcc.cpu}" >> $out/nix-support/cc-cflags-before
+    ''
+
+    # -mfloat-abi only matters on arm32 but we set it here
+    # unconditionally just in case. If the abi specifically sets hard
+    # vs. soft floats we use it here.
+    + optionalString (targetPlatform ? gcc.float-abi) ''
+      echo "-mfloat-abi=${targetPlatform.gcc.float-abi}" >> $out/nix-support/cc-cflags-before
+    ''
+    + optionalString (targetPlatform ? gcc.fpu) ''
+      echo "-mfpu=${targetPlatform.gcc.fpu}" >> $out/nix-support/cc-cflags-before
+    ''
+    + optionalString (targetPlatform ? gcc.mode) ''
+      echo "-mmode=${targetPlatform.gcc.mode}" >> $out/nix-support/cc-cflags-before
+    ''
+    + optionalString (targetPlatform ? gcc.tune &&
+                      isGccArchSupported targetPlatform.gcc.tune) ''
+      echo "-mtune=${targetPlatform.gcc.tune}" >> $out/nix-support/cc-cflags-before
+    ''
+
+    # TODO: categorize these and figure out a better place for them
+    + optionalString hostPlatform.isCygwin ''
+      hardening_unsupported_flags+=" pic"
+    '' + optionalString targetPlatform.isMinGW ''
+      hardening_unsupported_flags+=" stackprotector"
+    '' + optionalString targetPlatform.isAvr ''
+      hardening_unsupported_flags+=" stackprotector pic"
+    '' + optionalString (targetPlatform.libc == "newlib") ''
+      hardening_unsupported_flags+=" stackprotector fortify pie pic"
+    '' + optionalString (targetPlatform.libc == "musl" && targetPlatform.isx86_32) ''
+      hardening_unsupported_flags+=" stackprotector"
+    '' + optionalString targetPlatform.isNetBSD ''
+      hardening_unsupported_flags+=" stackprotector fortify"
+    '' + optionalString cc.langAda or false ''
+      hardening_unsupported_flags+=" format stackprotector strictoverflow"
+    '' + optionalString cc.langD or false ''
+      hardening_unsupported_flags+=" format"
+    '' + optionalString targetPlatform.isWasm ''
+      hardening_unsupported_flags+=" stackprotector fortify pie pic"
+    ''
+
+    + optionalString (libc != null && targetPlatform.isAvr) ''
+      for isa in avr5 avr3 avr4 avr6 avr25 avr31 avr35 avr51 avrxmega2 avrxmega4 avrxmega5 avrxmega6 avrxmega7 tiny-stack; do
+        echo "-B${getLib libc}/avr/lib/$isa" >> $out/nix-support/libc-crt1-cflags
+      done
+    ''
+
+    + optionalString stdenv.targetPlatform.isDarwin ''
+        echo "-arch ${targetPlatform.darwinArch}" >> $out/nix-support/cc-cflags
+    ''
+
+    + optionalString targetPlatform.isAndroid ''
+      echo "-D__ANDROID_API__=${targetPlatform.sdkVer}" >> $out/nix-support/cc-cflags
+    ''
+
+    # There are a few tools (to name one libstdcxx5) which do not work
+    # well with multi line flags, so make the flags single line again
+    + ''
+      for flags in "$out/nix-support"/*flags*; do
+        substituteInPlace "$flags" --replace $'\n' ' '
+      done
+
+      substituteAll ${./add-flags.sh} $out/nix-support/add-flags.sh
+      substituteAll ${./add-hardening.sh} $out/nix-support/add-hardening.sh
+      substituteAll ${../wrapper-common/utils.bash} $out/nix-support/utils.bash
+    ''
+
+    ##
+    ## Extra custom steps
+    ##
+    + extraBuildCommands;
+
+  inherit expand-response-params;
+
+  # for substitution in utils.bash
+  expandResponseParams = "${expand-response-params}/bin/expand-response-params";
+
+  meta =
+    let cc_ = if cc != null then cc else {}; in
+    (if cc_ ? meta then removeAttrs cc.meta ["priority"] else {}) //
+    { description =
+        lib.attrByPath ["meta" "description"] "System C compiler" cc_
+        + " (wrapper script)";
+      priority = 10;
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/cc-wrapper/fortran-hook.sh b/nixpkgs/pkgs/build-support/cc-wrapper/fortran-hook.sh
new file mode 100644
index 000000000000..d72f314c01ce
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/cc-wrapper/fortran-hook.sh
@@ -0,0 +1,11 @@
+getTargetRole
+getTargetRoleWrapper
+
+export FC${role_post}=@named_fc@
+
+# If unset, assume the default hardening flags.
+# These are different for fortran.
+: ${NIX_HARDENING_ENABLE="stackprotector pic strictoverflow relro bindnow"}
+export NIX_HARDENING_ENABLE
+
+unset -v role_post
diff --git a/nixpkgs/pkgs/build-support/cc-wrapper/gnat-wrapper.sh b/nixpkgs/pkgs/build-support/cc-wrapper/gnat-wrapper.sh
new file mode 100644
index 000000000000..5714b228c595
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/cc-wrapper/gnat-wrapper.sh
@@ -0,0 +1,167 @@
+#! @shell@
+set -eu -o pipefail +o posix
+shopt -s nullglob
+
+if (( "${NIX_DEBUG:-0}" >= 7 )); then
+    set -x
+fi
+
+path_backup="$PATH"
+
+# That @-vars are substituted separately from bash evaluation makes
+# shellcheck think this, and others like it, are useless conditionals.
+# shellcheck disable=SC2157
+if [[ -n "@coreutils_bin@" && -n "@gnugrep_bin@" ]]; then
+    PATH="@coreutils_bin@/bin:@gnugrep_bin@/bin"
+fi
+
+cInclude=0
+
+source @out@/nix-support/utils.bash
+
+# Flirting with a layer violation here.
+if [ -z "${NIX_BINTOOLS_WRAPPER_FLAGS_SET_@suffixSalt@:-}" ]; then
+    source @bintools@/nix-support/add-flags.sh
+fi
+
+# Put this one second so libc ldflags take priority.
+if [ -z "${NIX_CC_WRAPPER_FLAGS_SET_@suffixSalt@:-}" ]; then
+    source @out@/nix-support/add-flags.sh
+fi
+
+
+# Parse command line options and set several variables.
+# For instance, figure out if linker flags should be passed.
+# GCC prints annoying warnings when they are not needed.
+dontLink=0
+nonFlagArgs=0
+# shellcheck disable=SC2193
+
+expandResponseParams "$@"
+declare -i n=0
+nParams=${#params[@]}
+while (( "$n" < "$nParams" )); do
+    p=${params[n]}
+    p2=${params[n+1]:-} # handle `p` being last one
+    if [ "$p" = -c ]; then
+        dontLink=1
+    elif [ "$p" = -S ]; then
+        dontLink=1
+    elif [ "$p" = -E ]; then
+        dontLink=1
+    elif [ "$p" = -E ]; then
+        dontLink=1
+    elif [ "$p" = -M ]; then
+        dontLink=1
+    elif [ "$p" = -MM ]; then
+        dontLink=1
+    elif [[ "$p" = -x && "$p2" = *-header ]]; then
+        dontLink=1
+    elif [[ "$p" != -?* ]]; then
+        # A dash alone signifies standard input; it is not a flag
+        nonFlagArgs=1
+    fi
+    n+=1
+done
+
+# If we pass a flag like -Wl, then gcc will call the linker unless it
+# can figure out that it has to do something else (e.g., because of a
+# "-c" flag).  So if no non-flag arguments are given, don't pass any
+# linker flags.  This catches cases like "gcc" (should just print
+# "gcc: no input files") and "gcc -v" (should print the version).
+if [ "$nonFlagArgs" = 0 ]; then
+    dontLink=1
+fi
+
+# Optionally filter out paths not refering to the store.
+if [[ "${NIX_ENFORCE_PURITY:-}" = 1 && -n "$NIX_STORE" ]]; then
+    rest=()
+    nParams=${#params[@]}
+    declare -i n=0
+    while (( "$n" < "$nParams" )); do
+        p=${params[n]}
+        p2=${params[n+1]:-} # handle `p` being last one
+        if [ "${p:0:3}" = -L/ ] && badPath "${p:2}"; then
+            skip "${p:2}"
+        elif [ "$p" = -L ] && badPath "$p2"; then
+            n+=1; skip "$p2"
+        elif [ "${p:0:3}" = -I/ ] && badPath "${p:2}"; then
+            skip "${p:2}"
+        elif [ "$p" = -I ] && badPath "$p2"; then
+            n+=1; skip "$p2"
+        elif [ "${p:0:4}" = -aI/ ] && badPath "${p:3}"; then
+            skip "${p:3}"
+        elif [ "$p" = -aI ] && badPath "$p2"; then
+            n+=1; skip "$p2"
+        elif [ "${p:0:4}" = -aO/ ] && badPath "${p:3}"; then
+            skip "${p:3}"
+        elif [ "$p" = -aO ] && badPath "$p2"; then
+            n+=1; skip "$p2"
+        elif [ "$p" = -isystem ] && badPath "$p2"; then
+            n+=1; skip "$p2"
+        else
+            rest+=("$p")
+        fi
+        n+=1
+    done
+    # Old bash empty array hack
+    params=(${rest+"${rest[@]}"})
+fi
+
+
+# Clear march/mtune=native -- they bring impurity.
+if [ "$NIX_ENFORCE_NO_NATIVE_@suffixSalt@" = 1 ]; then
+    rest=()
+    # Old bash empty array hack
+    for p in ${params+"${params[@]}"}; do
+        if [[ "$p" = -m*=native ]]; then
+            skip "$p"
+        else
+            rest+=("$p")
+        fi
+    done
+    # Old bash empty array hack
+    params=(${rest+"${rest[@]}"})
+fi
+
+if [ "$(basename $0)x" = "gnatmakex" ]; then
+    extraBefore=("--GNATBIND=@out@/bin/gnatbind" "--GNATLINK=@out@/bin/gnatlink")
+    extraAfter=($NIX_GNATFLAGS_COMPILE_@suffixSalt@)
+fi
+
+if [ "$(basename $0)x" = "gnatbindx" ]; then
+    extraBefore=()
+    extraAfter=($NIX_GNATFLAGS_COMPILE_@suffixSalt@)
+fi
+
+if [ "$(basename $0)x" = "gnatlinkx" ]; then
+    extraBefore=()
+    extraAfter=("--GCC=@out@/bin/gcc")
+fi
+
+# As a very special hack, if the arguments are just `-v', then don't
+# add anything.  This is to prevent `gcc -v' (which normally prints
+# out the version number and returns exit code 0) from printing out
+# `No input files specified' and returning exit code 1.
+if [ "$*" = -v ]; then
+    extraAfter=()
+    extraBefore=()
+fi
+
+# Optionally print debug info.
+if (( "${NIX_DEBUG:-0}" >= 1 )); then
+    # Old bash workaround, see ld-wrapper for explanation.
+    echo "extra flags before to @prog@:" >&2
+    printf "  %q\n" ${extraBefore+"${extraBefore[@]}"}  >&2
+    echo "original flags to @prog@:" >&2
+    printf "  %q\n" ${params+"${params[@]}"} >&2
+    echo "extra flags after to @prog@:" >&2
+    printf "  %q\n" ${extraAfter+"${extraAfter[@]}"} >&2
+fi
+
+PATH="$path_backup"
+# Old bash workaround, see above.
+exec @prog@ \
+    ${extraBefore+"${extraBefore[@]}"} \
+    ${params+"${params[@]}"} \
+    ${extraAfter+"${extraAfter[@]}"}
diff --git a/nixpkgs/pkgs/build-support/cc-wrapper/setup-hook.sh b/nixpkgs/pkgs/build-support/cc-wrapper/setup-hook.sh
new file mode 100644
index 000000000000..6a913cc4eac7
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/cc-wrapper/setup-hook.sh
@@ -0,0 +1,120 @@
+# CC Wrapper hygiene
+#
+# For at least cross compilation, we need to depend on multiple cc-wrappers at
+# once---specifically up to one per sort of dependency. This follows from having
+# different tools targeting different platforms, and different flags for those
+# tools. For example:
+#
+#   # Flags for compiling (whether or not linking) C code for the...
+#   NIX_CFLAGS_COMPILE_FOR_BUILD  # ...build platform
+#   NIX_CFLAGS_COMPILE            # ...host platform
+#   NIX_CFLAGS_COMPILE_FOR_TARGET # ...target platform
+#
+# Notice that these platforms are the 3 *relative* to the package using
+# cc-wrapper, not absolute like `x86_64-pc-linux-gnu`.
+#
+# The simplest solution would be to have separate cc-wrappers per (3 intended
+# use-cases * n absolute concrete platforms). For the use-case axis, we would
+# @-splice in 'BUILD_' '' 'TARGET_' to use the write environment variables when
+# building the cc-wrapper, and likewise prefix the binaries' names so they didn't
+# clobber each other on the PATH. But the need for 3x cc-wrappers, along with
+# non-standard name prefixes, is annoying and liable to break packages' build
+# systems.
+#
+# Instead, we opt to have just one cc-wrapper per absolute platform. Matching
+# convention, the binaries' names can just be prefixed with their target
+# platform. On the other hand, that means packages will depend on not just
+# multiple cc-wrappers, but the exact same cc-wrapper derivation multiple ways.
+# That means the exact same cc-wrapper derivation must be able to avoid
+# conflicting with itself, despite the fact that `setup-hook.sh`, the `addCvars`
+# function, and `add-flags.sh` are all communicating with each other with
+# environment variables. Yuck.
+#
+# The basic strategy is:
+#
+#  - Everyone exclusively *adds information* to relative-platform-specific
+#    environment variables, like `NIX_CFLAGS_COMPILE_FOR_TARGET`, to communicate
+#    with the wrapped binaries.
+#
+#  - The wrapped binaries will exclusively *read* cc-wrapper-derivation-specific
+#    environment variables distinguished with with `suffixSalt`, like
+#    `NIX_CFLAGS_COMPILE_@suffixSalt@`.
+#
+#  - `add-flags`, beyond its old task of reading extra flags stuck inside the
+#    cc-wrapper derivation, will convert the relative-platform-specific
+#    variables to cc-wrapper-derivation-specific variables. This conversion is
+#    the only time all but one of the cc-wrapper-derivation-specific variables
+#    are set.
+#
+# This ensures the flow of information is exclusive from
+# relative-platform-specific variables to cc-wrapper-derivation-specific
+# variables. This allows us to support the general case of a many--many relation
+# between relative platforms and cc-wrapper derivations.
+#
+# For more details, read the individual files where the mechanisms used to
+# accomplish this will be individually documented.
+
+# Skip setup hook if we're neither a build-time dep, nor, temporarily, doing a
+# native compile.
+#
+# TODO(@Ericson2314): No native exception
+[[ -z ${strictDeps-} ]] || (( "$hostOffset" < 0 )) || return 0
+
+# It's fine that any other cc-wrapper will redefine this. Bash functions close
+# over no state, and there's no @-substitutions within, so any redefined
+# function is guaranteed to be exactly the same.
+ccWrapper_addCVars () {
+    # See ../setup-hooks/role.bash
+    local role_post
+    getHostRoleEnvHook
+
+    if [ -d "$1/include" ]; then
+        export NIX_CFLAGS_COMPILE${role_post}+=" -isystem $1/include"
+    fi
+
+    if [ -d "$1/Library/Frameworks" ]; then
+        export NIX_CFLAGS_COMPILE${role_post}+=" -iframework $1/Library/Frameworks"
+    fi
+}
+
+# See ../setup-hooks/role.bash
+getTargetRole
+getTargetRoleWrapper
+
+# We use the `targetOffset` to choose the right env hook to accumulate the right
+# sort of deps (those with that offset).
+addEnvHooks "$targetOffset" ccWrapper_addCVars
+
+# Note 1: these come *after* $out in the PATH (see setup.sh).
+# Note 2: phase separation makes this look useless to shellcheck.
+
+# shellcheck disable=SC2157
+if [ -n "@cc@" ]; then
+    addToSearchPath _PATH @cc@/bin
+fi
+
+# shellcheck disable=SC2157
+if [ -n "@libc_bin@" ]; then
+    addToSearchPath _PATH @libc_bin@/bin
+fi
+
+# shellcheck disable=SC2157
+if [ -n "@coreutils_bin@" ]; then
+    addToSearchPath _PATH @coreutils_bin@/bin
+fi
+
+# Export tool environment variables so various build systems use the right ones.
+
+export NIX_CC${role_post}=@out@
+
+export CC${role_post}=@named_cc@
+export CXX${role_post}=@named_cxx@
+export CC${role_post}=@named_cc@
+export CXX${role_post}=@named_cxx@
+
+# If unset, assume the default hardening flags.
+: ${NIX_HARDENING_ENABLE="fortify stackprotector pic strictoverflow format relro bindnow"}
+export NIX_HARDENING_ENABLE
+
+# No local scope in sourced file
+unset -v role_post
diff --git a/nixpkgs/pkgs/build-support/closure-info.nix b/nixpkgs/pkgs/build-support/closure-info.nix
new file mode 100644
index 000000000000..6b3ff6fd62b0
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/closure-info.nix
@@ -0,0 +1,36 @@
+# This derivation builds two files containing information about the
+# closure of 'rootPaths': $out/store-paths contains the paths in the
+# closure, and $out/registration contains a file suitable for use with
+# "nix-store --load-db" and "nix-store --register-validity
+# --hash-given".
+
+{ stdenv, buildPackages }:
+
+{ rootPaths }:
+
+assert builtins.langVersion >= 5;
+
+stdenv.mkDerivation {
+  name = "closure-info";
+
+  __structuredAttrs = true;
+
+  exportReferencesGraph.closure = rootPaths;
+
+  preferLocalBuild = true;
+
+  PATH = "${buildPackages.coreutils}/bin:${buildPackages.jq}/bin";
+
+  builder = builtins.toFile "builder"
+    ''
+      . .attrs.sh
+
+      out=''${outputs[out]}
+
+      mkdir $out
+
+      jq -r ".closure | map(.narSize) | add" < .attrs.json > $out/total-nar-size
+      jq -r '.closure | map([.path, .narHash, .narSize, "", (.references | length)] + .references) | add | map("\(.)\n") | add' < .attrs.json | head -n -1 > $out/registration
+      jq -r .closure[].path < .attrs.json > $out/store-paths
+    '';
+}
diff --git a/nixpkgs/pkgs/build-support/coq/default.nix b/nixpkgs/pkgs/build-support/coq/default.nix
new file mode 100644
index 000000000000..ba300f2f8cf5
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/coq/default.nix
@@ -0,0 +1,109 @@
+{ lib, stdenv, coqPackages, coq, fetchzip }@args:
+let lib = import ./extra-lib.nix {inherit (args) lib;}; in
+with builtins; with lib;
+let
+  isGitHubDomain = d: match "^github.*" d != null;
+  isGitLabDomain = d: match "^gitlab.*" d != null;
+in
+{ pname,
+  version ? null,
+  fetcher ? null,
+  owner ? "coq-community",
+  domain ? "github.com",
+  repo ? pname,
+  defaultVersion ? null,
+  releaseRev ? (v: v),
+  displayVersion ? {},
+  release ? {},
+  extraBuildInputs ? [],
+  namePrefix ? [],
+  enableParallelBuilding ? true,
+  extraInstallFlags ? [],
+  setCOQBIN ? true,
+  mlPlugin ? false,
+  useMelquiondRemake ? null,
+  dropAttrs ? [],
+  keepAttrs ? [],
+  dropDerivationAttrs ? [],
+  useDune2ifVersion ? (x: false),
+  useDune2 ? false,
+  ...
+}@args:
+let
+  args-to-remove = foldl (flip remove) ([
+    "version" "fetcher" "repo" "owner" "domain" "releaseRev"
+    "displayVersion" "defaultVersion" "useMelquiondRemake"
+    "release" "extraBuildInputs" "extraPropagatedBuildInputs" "namePrefix"
+    "meta" "useDune2ifVersion" "useDune2"
+    "extraInstallFlags" "setCOQBIN" "mlPlugin"
+    "dropAttrs" "dropDerivationAttrs" "keepAttrs" ] ++ dropAttrs) keepAttrs;
+  fetch = import ../coq/meta-fetch/default.nix
+    { inherit lib stdenv fetchzip; } ({
+      inherit release releaseRev;
+      location = { inherit domain owner repo; };
+    } // optionalAttrs (args?fetcher) {inherit fetcher;});
+  fetched = fetch (if !isNull version then version else defaultVersion);
+  namePrefix = args.namePrefix or [ "coq" ];
+  display-pkg = n: sep: v:
+    let d = displayVersion.${n} or (if sep == "" then ".." else true); in
+    n + optionalString (v != "" && v != null) (switch d [
+      { case = true;       out = sep + v; }
+      { case = ".";        out = sep + versions.major v; }
+      { case = "..";       out = sep + versions.majorMinor v; }
+      { case = "...";      out = sep + versions.majorMinorPatch v; }
+      { case = isFunction; out = optionalString (d v != "") (sep + d v); }
+      { case = isString;   out = optionalString (d != "") (sep + d); }
+    ] "") + optionalString (v == null) "-broken";
+  append-version = p: n: p + display-pkg n "" coqPackages.${n}.version + "-";
+  prefix-name = foldl append-version "" namePrefix;
+  var-coqlib-install = (optionalString (versions.isGe "8.7" coq.coq-version) "COQMF_") + "COQLIB";
+  useDune2 = args.useDune2 or (useDune2ifVersion fetched.version);
+in
+
+stdenv.mkDerivation (removeAttrs ({
+
+  name = prefix-name + (display-pkg pname "-" fetched.version);
+
+  inherit (fetched) version src;
+
+  buildInputs = [ coq ]
+    ++ optionals mlPlugin coq.ocamlBuildInputs
+    ++ optionals useDune2 [coq.ocaml coq.ocamlPackages.dune_2]
+    ++ extraBuildInputs;
+  inherit enableParallelBuilding;
+
+  meta = ({ platforms = coq.meta.platforms; } //
+    (switch domain [{
+        case = pred.union isGitHubDomain isGitLabDomain;
+        out = { homepage = "https://${domain}/${owner}/${repo}"; };
+      }] {}) //
+    optionalAttrs (fetched.broken or false) { coqFilter = true; broken = true; }) //
+    (args.meta or {}) ;
+
+}
+// (optionalAttrs setCOQBIN { COQBIN = "${coq}/bin/"; })
+// (optionalAttrs (!args?installPhase && !args?useMelquiondRemake) {
+  installFlags =
+    [ "${var-coqlib-install}=$(out)/lib/coq/${coq.coq-version}/" ] ++
+    optional (match ".*doc$" (args.installTargets or "") != null)
+      "DOCDIR=$(out)/share/coq/${coq.coq-version}/" ++
+    extraInstallFlags;
+})
+// (optionalAttrs useDune2 {
+  installPhase = ''
+    runHook preInstall
+    dune install --prefix=$out
+    mv $out/lib/coq $out/lib/TEMPORARY
+    mkdir $out/lib/coq/
+    mv $out/lib/TEMPORARY $out/lib/coq/${coq.coq-version}
+    runHook postInstall
+  '';
+})
+// (optionalAttrs (args?useMelquiondRemake) rec {
+  COQUSERCONTRIB = "$out/lib/coq/${coq.coq-version}/user-contrib";
+  preConfigurePhases = "autoconf";
+  configureFlags = [ "--libdir=${COQUSERCONTRIB}/${useMelquiondRemake.logpath or ""}" ];
+  buildPhase = "./remake -j$NIX_BUILD_CORES";
+  installPhase = "./remake install";
+})
+// (removeAttrs args args-to-remove)) dropDerivationAttrs)
diff --git a/nixpkgs/pkgs/build-support/coq/extra-lib.nix b/nixpkgs/pkgs/build-support/coq/extra-lib.nix
new file mode 100644
index 000000000000..65b48f511267
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/coq/extra-lib.nix
@@ -0,0 +1,145 @@
+{ lib }:
+with builtins; with lib; recursiveUpdate lib (rec {
+
+  versions =
+    let
+      truncate = n: v: concatStringsSep "." (take n (splitVersion v));
+      opTruncate = op: v0: v: let n = length (splitVersion v0); in
+         op (truncate n v) (truncate n v0);
+    in rec {
+
+    /* Get string of the first n parts of a version string.
+
+       Example:
+       - truncate 2 "1.2.3-stuff"
+         => "1.2"
+
+       - truncate 4 "1.2.3-stuff"
+         => "1.2.3.stuff"
+    */
+
+    inherit truncate;
+
+    /* Get string of the first three parts (major, minor and patch)
+       of a version string.
+
+       Example:
+         majorMinorPatch "1.2.3-stuff"
+         => "1.2.3"
+    */
+    majorMinorPatch = truncate 3;
+
+    /* Version comparison predicates,
+      - isGe v0 v <-> v is greater or equal than v0   [*]
+      - isLe v0 v <-> v is lesser  or equal than v0   [*]
+      - isGt v0 v <-> v is strictly greater than v0   [*]
+      - isLt v0 v <-> v is strictly lesser  than v0   [*]
+      - isEq v0 v <-> v is equal to v0                [*]
+      - range low high v <-> v is between low and high [**]
+
+    [*]  truncating v to the same number of digits as v0
+    [**] truncating v to low for the lower bound and high for the upper bound
+
+      Examples:
+      - isGe "8.10" "8.10.1"
+        => true
+      - isLe "8.10" "8.10.1"
+        => true
+      - isGt "8.10" "8.10.1"
+        => false
+      - isGt "8.10.0" "8.10.1"
+        => true
+      - isEq "8.10" "8.10.1"
+        => true
+      - range "8.10" "8.11" "8.11.1"
+        => true
+      - range "8.10" "8.11+" "8.11.0"
+        => false
+      - range "8.10" "8.11+" "8.11+beta1"
+        => false
+
+    */
+    isGe = opTruncate versionAtLeast;
+    isGt = opTruncate (flip versionOlder);
+    isLe = opTruncate (flip versionAtLeast);
+    isLt = opTruncate versionOlder;
+    isEq = opTruncate pred.equal;
+    range = low: high: pred.inter (versions.isGe low) (versions.isLe high);
+  };
+
+  /* Returns a list of list, splitting it using a predicate.
+     This is analoguous to builtins.split sep list,
+     with a predicate as a separator and a list instead of a string.
+
+    Type: splitList :: (a -> bool) -> [a] -> [[a]]
+
+    Example:
+      splitList (x: x == "x") [ "y" "x" "z" "t" ]
+      => [ [ "y" ] "x" [ "z" "t" ] ]
+  */
+  splitList = pred: l: # put in file lists
+    let loop = (vv: v: l: if l == [] then vv ++ [v]
+      else let hd = head l; tl = tail l; in
+      if pred hd then loop (vv ++ [ v hd ]) [] tl else loop vv (v ++ [hd]) tl);
+    in loop [] [] l;
+
+  pred = {
+    /* Predicate intersection, union, and complement */
+    inter = p: q: x: p x && q x;
+    union = p: q: x: p x || q x;
+    compl = p:    x: ! p x;
+    true  = p: true;
+    false = p: false;
+
+    /* predicate "being equal to y" */
+    equal = y:    x: x == y;
+  };
+
+  /* Emulate a "switch - case" construct,
+   instead of relying on `if then else if ...` */
+  /* Usage:
+  ```nix
+  switch-if [
+    if-clause-1
+    ..
+    if-clause-k
+  ] default-out
+  ```
+  where a if-clause has the form `{ cond = b; out = r; }`
+  the first branch such as `b` is true */
+
+  switch-if = c: d: (findFirst (getAttr "cond") {} c).out or d;
+
+  /* Usage:
+  ```nix
+  switch x [
+    simple-clause-1
+    ..
+    simple-clause-k
+  ] default-out
+  ```
+  where a simple-clause has the form `{ case = p; out = r; }`
+  the first branch such as `p x` is true
+  or
+  ```nix
+  switch [ x1 .. xn ] [
+    complex-clause-1
+    ..
+    complex-clause-k
+  ] default-out
+  ```
+  where a complex-clause is either a simple-clause
+  or has the form { cases = [ p1 .. pn ]; out = r; }
+  in which case the first branch such as all `pi x` are true
+
+  if the variables p are not functions,
+  they are converted to a equal p
+  if out is missing the default-out is taken */
+
+  switch = var: clauses: default: with pred; let
+      compare = f:  if isFunction f then f else equal f;
+      combine = cl: var:
+        if cl?case then compare cl.case var
+        else all (equal true) (zipListsWith compare cl.cases var); in
+    switch-if (map (cl: { cond = combine cl var; inherit (cl) out; }) clauses) default;
+})
diff --git a/nixpkgs/pkgs/build-support/coq/meta-fetch/default.nix b/nixpkgs/pkgs/build-support/coq/meta-fetch/default.nix
new file mode 100644
index 000000000000..e7b15af4f06e
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/coq/meta-fetch/default.nix
@@ -0,0 +1,70 @@
+{ lib, stdenv, fetchzip }@args:
+let lib' = lib; in
+let lib = import ../extra-lib.nix {lib = lib';}; in
+with builtins; with lib;
+let
+  default-fetcher = {domain ? "github.com", owner ? "", repo, rev, name ? "source", sha256 ? null, ...}@args:
+    let ext = if args?sha256 then "zip" else "tar.gz";
+        fmt = if args?sha256 then "zip" else "tarball";
+        pr  = match "^#(.*)$" rev;
+        url = switch-if [
+          { cond = isNull pr && !isNull (match "^github.*" domain);
+            out = "https://${domain}/${owner}/${repo}/archive/${rev}.${ext}"; }
+          { cond = !isNull pr && !isNull (match "^github.*" domain);
+            out = "https://api.${domain}/repos/${owner}/${repo}/${fmt}/pull/${head pr}/head"; }
+          { cond = isNull pr && !isNull (match "^gitlab.*" domain);
+            out = "https://${domain}/${owner}/${repo}/-/archive/${rev}/${repo}-${rev}.${ext}"; }
+          { cond = !isNull (match "(www.)?mpi-sws.org" domain);
+            out = "https://www.mpi-sws.org/~${owner}/${repo}/download/${repo}-${rev}.${ext}";}
+        ] (throw "meta-fetch: no fetcher found for domain ${domain} on ${rev}");
+        fetch = x: if args?sha256 then fetchzip (x // { inherit sha256; }) else fetchTarball x;
+    in fetch { inherit url ; };
+in
+{
+  fetcher ? default-fetcher,
+  location,
+  release ? {},
+  releaseRev ? (v: v),
+}:
+let isVersion      = x: isString x && match "^/.*" x == null && release?${x};
+    shortVersion   = x: if (isString x && match "^/.*" x == null)
+      then findFirst (v: versions.majorMinor v == x) null
+        (sort versionAtLeast (attrNames release))
+      else null;
+    isShortVersion = x: shortVersion x != null;
+    isPathString   = x: isString x && match "^/.*" x != null && pathExists x; in
+arg:
+switch arg [
+  { case = isNull;       out = { version = "broken"; src = ""; broken = true; }; }
+  { case = isPathString; out = { version = "dev"; src = arg; }; }
+  { case = pred.union isVersion isShortVersion;
+    out = let v = if isVersion arg then arg else shortVersion arg; in
+      let
+        given-sha256 = release.${v}.sha256 or "";
+        sha256 = if given-sha256 == "" then lib.fakeSha256 else given-sha256;
+        rv = release.${v} // { inherit sha256; }; in
+      {
+        version = rv.version or v;
+        src = rv.src or fetcher (location // { rev = releaseRev v; } // rv);
+      };
+    }
+  { case = isString;
+    out = let
+        splitted  = filter isString (split ":" arg);
+        rev       = last splitted;
+        has-owner = length splitted > 1;
+        version   = "dev"; in {
+      inherit version;
+      src = fetcher (location // { inherit rev; } //
+        (optionalAttrs has-owner { owner = head splitted; }));
+    }; }
+  { case = isAttrs;
+    out = let
+    { version = arg.version or "dev";
+      src = (arg.fetcher or fetcher) (location // (arg.location or {}));
+    }; }
+  { case = isPath;
+    out = {
+      version = "dev" ;
+      src = builtins.path {path = arg; name = location.name or "source";}; }; }
+] (throw "not a valid source description")
diff --git a/nixpkgs/pkgs/build-support/dhall-to-nix.nix b/nixpkgs/pkgs/build-support/dhall-to-nix.nix
new file mode 100644
index 000000000000..3805656dfa0e
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/dhall-to-nix.nix
@@ -0,0 +1,38 @@
+/* `dhallToNix` is a utility function to convert expressions in the Dhall
+    configuration language to their corresponding Nix expressions.
+
+    Example:
+      dhallToNix "{ foo = 1, bar = True }"
+      => { foo = 1; bar = true; }
+      dhallToNix "λ(x : Bool) → x == False"
+      => x : x == false
+      dhallToNix "λ(x : Bool) → x == False" false
+      => true
+
+    See https://hackage.haskell.org/package/dhall-nix/docs/Dhall-Nix.html for
+    a longer tutorial
+
+    Note that this uses "import from derivation", meaning that Nix will perform
+    a build during the evaluation phase if you use this `dhallToNix` utility
+*/
+{ stdenv, dhall-nix }:
+
+let
+  dhallToNix = code :
+    let
+      file = builtins.toFile "dhall-expression" code;
+
+      drv = stdenv.mkDerivation {
+        name = "dhall-compiled.nix";
+
+        buildCommand = ''
+          dhall-to-nix <<< "${file}" > $out
+        '';
+
+        buildInputs = [ dhall-nix ];
+      };
+
+    in
+      import drv;
+in
+  dhallToNix
diff --git a/nixpkgs/pkgs/build-support/docker/default.nix b/nixpkgs/pkgs/build-support/docker/default.nix
new file mode 100644
index 000000000000..9369e7d3158f
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/docker/default.nix
@@ -0,0 +1,965 @@
+{
+  bashInteractive,
+  buildPackages,
+  cacert,
+  callPackage,
+  closureInfo,
+  coreutils,
+  docker,
+  e2fsprogs,
+  fakeroot,
+  findutils,
+  go,
+  jq,
+  jshon,
+  lib,
+  makeWrapper,
+  moreutils,
+  nix,
+  pigz,
+  rsync,
+  runCommand,
+  runtimeShell,
+  shadow,
+  skopeo,
+  storeDir ? builtins.storeDir,
+  substituteAll,
+  symlinkJoin,
+  util-linux,
+  vmTools,
+  writeReferencesToFile,
+  writeScript,
+  writeText,
+  writeTextDir,
+  writePython3,
+  system,  # Note: This is the cross system we're compiling for
+}:
+
+let
+
+  inherit (lib)
+    optionals
+    ;
+
+  mkDbExtraCommand = contents: let
+    contentsList = if builtins.isList contents then contents else [ contents ];
+  in ''
+    echo "Generating the nix database..."
+    echo "Warning: only the database of the deepest Nix layer is loaded."
+    echo "         If you want to use nix commands in the container, it would"
+    echo "         be better to only have one layer that contains a nix store."
+
+    export NIX_REMOTE=local?root=$PWD
+    # A user is required by nix
+    # https://github.com/NixOS/nix/blob/9348f9291e5d9e4ba3c4347ea1b235640f54fd79/src/libutil/util.cc#L478
+    export USER=nobody
+    ${buildPackages.nix}/bin/nix-store --load-db < ${closureInfo {rootPaths = contentsList;}}/registration
+
+    mkdir -p nix/var/nix/gcroots/docker/
+    for i in ${lib.concatStringsSep " " contentsList}; do
+    ln -s $i nix/var/nix/gcroots/docker/$(basename $i)
+    done;
+  '';
+
+  # The OCI Image specification recommends that configurations use values listed
+  # in the Go Language document for GOARCH.
+  # Reference: https://github.com/opencontainers/image-spec/blob/master/config.md#properties
+  # For the mapping from Nixpkgs system parameters to GOARCH, we can reuse the
+  # mapping from the go package.
+  defaultArch = go.GOARCH;
+
+in
+rec {
+
+  examples = callPackage ./examples.nix {
+    inherit buildImage buildLayeredImage fakeNss pullImage shadowSetup buildImageWithNixDb;
+  };
+
+  pullImage = let
+    fixName = name: builtins.replaceStrings ["/" ":"] ["-" "-"] name;
+  in
+    { imageName
+      # To find the digest of an image, you can use skopeo:
+      # see doc/functions.xml
+    , imageDigest
+    , sha256
+    , os ? "linux"
+    , arch ? defaultArch
+
+      # This is used to set name to the pulled image
+    , finalImageName ? imageName
+      # This used to set a tag to the pulled image
+    , finalImageTag ? "latest"
+      # This is used to disable TLS certificate verification, allowing access to http registries on (hopefully) trusted networks
+    , tlsVerify ? true
+
+    , name ? fixName "docker-image-${finalImageName}-${finalImageTag}.tar"
+    }:
+
+    runCommand name {
+      inherit imageDigest;
+      imageName = finalImageName;
+      imageTag = finalImageTag;
+      impureEnvVars = lib.fetchers.proxyImpureEnvVars;
+      outputHashMode = "flat";
+      outputHashAlgo = "sha256";
+      outputHash = sha256;
+
+      nativeBuildInputs = lib.singleton skopeo;
+      SSL_CERT_FILE = "${cacert.out}/etc/ssl/certs/ca-bundle.crt";
+
+      sourceURL = "docker://${imageName}@${imageDigest}";
+      destNameTag = "${finalImageName}:${finalImageTag}";
+    } ''
+      skopeo \
+        --src-tls-verify=${lib.boolToString tlsVerify} \
+        --insecure-policy \
+        --tmpdir=$TMPDIR \
+        --override-os ${os} \
+        --override-arch ${arch} \
+        copy "$sourceURL" "docker-archive://$out:$destNameTag"
+    '';
+
+  # We need to sum layer.tar, not a directory, hence tarsum instead of nix-hash.
+  # And we cannot untar it, because then we cannot preserve permissions etc.
+  tarsum = runCommand "tarsum" {
+    nativeBuildInputs = [ go ];
+  } ''
+    mkdir tarsum
+    cd tarsum
+
+    cp ${./tarsum.go} tarsum.go
+    export GOPATH=$(pwd)
+    export GOCACHE="$TMPDIR/go-cache"
+    mkdir -p src/github.com/docker/docker/pkg
+    ln -sT ${docker.moby-src}/pkg/tarsum src/github.com/docker/docker/pkg/tarsum
+    go build
+
+    mkdir -p $out/bin
+
+    cp tarsum $out/bin/
+  '';
+
+  # buildEnv creates symlinks to dirs, which is hard to edit inside the overlay VM
+  mergeDrvs = {
+    derivations,
+    onlyDeps ? false
+  }:
+    runCommand "merge-drvs" {
+      inherit derivations onlyDeps;
+    } ''
+      if [[ -n "$onlyDeps" ]]; then
+        echo $derivations > $out
+        exit 0
+      fi
+
+      mkdir $out
+      for derivation in $derivations; do
+        echo "Merging $derivation..."
+        if [[ -d "$derivation" ]]; then
+          # If it's a directory, copy all of its contents into $out.
+          cp -drf --preserve=mode -f $derivation/* $out/
+        else
+          # Otherwise treat the derivation as a tarball and extract it
+          # into $out.
+          tar -C $out -xpf $drv || true
+        fi
+      done
+    '';
+
+  # Helper for setting up the base files for managing users and
+  # groups, only if such files don't exist already. It is suitable for
+  # being used in a runAsRoot script.
+  shadowSetup = ''
+    export PATH=${shadow}/bin:$PATH
+    mkdir -p /etc/pam.d
+    if [[ ! -f /etc/passwd ]]; then
+      echo "root:x:0:0::/root:${runtimeShell}" > /etc/passwd
+      echo "root:!x:::::::" > /etc/shadow
+    fi
+    if [[ ! -f /etc/group ]]; then
+      echo "root:x:0:" > /etc/group
+      echo "root:x::" > /etc/gshadow
+    fi
+    if [[ ! -f /etc/pam.d/other ]]; then
+      cat > /etc/pam.d/other <<EOF
+    account sufficient pam_unix.so
+    auth sufficient pam_rootok.so
+    password requisite pam_unix.so nullok sha512
+    session required pam_unix.so
+    EOF
+    fi
+    if [[ ! -f /etc/login.defs ]]; then
+      touch /etc/login.defs
+    fi
+  '';
+
+  # Run commands in a virtual machine.
+  runWithOverlay = {
+    name,
+    fromImage ? null,
+    fromImageName ? null,
+    fromImageTag ? null,
+    diskSize ? 1024,
+    preMount ? "",
+    postMount ? "",
+    postUmount ? ""
+  }:
+  let
+    result = vmTools.runInLinuxVM (
+      runCommand name {
+        preVM = vmTools.createEmptyImage {
+          size = diskSize;
+          fullName = "docker-run-disk";
+        };
+        inherit fromImage fromImageName fromImageTag;
+
+        nativeBuildInputs = [ util-linux e2fsprogs jshon rsync jq ];
+      } ''
+      mkdir disk
+      mkfs /dev/${vmTools.hd}
+      mount /dev/${vmTools.hd} disk
+      cd disk
+
+      if [[ -n "$fromImage" ]]; then
+        echo "Unpacking base image..."
+        mkdir image
+        tar -C image -xpf "$fromImage"
+
+        if [[ -n "$fromImageName" ]] && [[ -n "$fromImageTag" ]]; then
+          parentID="$(
+            cat "image/manifest.json" |
+              jq -r '.[] | select(.RepoTags | contains([$desiredTag])) | rtrimstr(".json")' \
+                --arg desiredTag "$fromImageName:$fromImageTag"
+          )"
+        else
+          echo "From-image name or tag wasn't set. Reading the first ID."
+          parentID="$(cat "image/manifest.json" | jq -r '.[0].Config | rtrimstr(".json")')"
+        fi
+
+        cat ./image/manifest.json  | jq -r '.[0].Layers | .[]' > layer-list
+      else
+        touch layer-list
+      fi
+
+      # Unpack all of the parent layers into the image.
+      lowerdir=""
+      extractionID=0
+      for layerTar in $(tac layer-list); do
+        echo "Unpacking layer $layerTar"
+        extractionID=$((extractionID + 1))
+
+        mkdir -p image/$extractionID/layer
+        tar -C image/$extractionID/layer -xpf image/$layerTar
+        rm image/$layerTar
+
+        find image/$extractionID/layer -name ".wh.*" -exec bash -c 'name="$(basename {}|sed "s/^.wh.//")"; mknod "$(dirname {})/$name" c 0 0; rm {}' \;
+
+        # Get the next lower directory and continue the loop.
+        lowerdir=image/$extractionID/layer''${lowerdir:+:}$lowerdir
+      done
+
+      mkdir work
+      mkdir layer
+      mkdir mnt
+
+      ${lib.optionalString (preMount != "") ''
+        # Execute pre-mount steps
+        echo "Executing pre-mount steps..."
+        ${preMount}
+      ''}
+
+      if [ -n "$lowerdir" ]; then
+        mount -t overlay overlay -olowerdir=$lowerdir,workdir=work,upperdir=layer mnt
+      else
+        mount --bind layer mnt
+      fi
+
+      ${lib.optionalString (postMount != "") ''
+        # Execute post-mount steps
+        echo "Executing post-mount steps..."
+        ${postMount}
+      ''}
+
+      umount mnt
+
+      (
+        cd layer
+        cmd='name="$(basename {})"; touch "$(dirname {})/.wh.$name"; rm "{}"'
+        find . -type c -exec bash -c "$cmd" \;
+      )
+
+      ${postUmount}
+      '');
+    in
+    runCommand name {} ''
+      mkdir -p $out
+      cd ${result}
+      cp layer.tar json VERSION $out
+    '';
+
+  exportImage = { name ? fromImage.name, fromImage, fromImageName ? null, fromImageTag ? null, diskSize ? 1024 }:
+    runWithOverlay {
+      inherit name fromImage fromImageName fromImageTag diskSize;
+
+      postMount = ''
+        echo "Packing raw image..."
+        tar -C mnt --hard-dereference --sort=name --mtime="@$SOURCE_DATE_EPOCH" -cf $out .
+      '';
+    };
+
+
+  # Create an executable shell script which has the coreutils in its
+  # PATH. Since root scripts are executed in a blank environment, even
+  # things like `ls` or `echo` will be missing.
+  shellScript = name: text:
+    writeScript name ''
+      #!${runtimeShell}
+      set -e
+      export PATH=${coreutils}/bin:/bin
+      ${text}
+    '';
+
+  # Create a "layer" (set of files).
+  mkPureLayer = {
+    # Name of the layer
+    name,
+    # JSON containing configuration and metadata for this layer.
+    baseJson,
+    # Files to add to the layer.
+    contents ? null,
+    # When copying the contents into the image, preserve symlinks to
+    # directories (see `rsync -K`).  Otherwise, transform those symlinks
+    # into directories.
+    keepContentsDirlinks ? false,
+    # Additional commands to run on the layer before it is tar'd up.
+    extraCommands ? "", uid ? 0, gid ? 0
+  }:
+    runCommand "docker-layer-${name}" {
+      inherit baseJson contents extraCommands;
+      nativeBuildInputs = [ jshon rsync tarsum ];
+    }
+    ''
+      mkdir layer
+      if [[ -n "$contents" ]]; then
+        echo "Adding contents..."
+        for item in $contents; do
+          echo "Adding $item"
+          rsync -a${if keepContentsDirlinks then "K" else "k"} --chown=0:0 $item/ layer/
+        done
+      else
+        echo "No contents to add to layer."
+      fi
+
+      chmod ug+w layer
+
+      if [[ -n "$extraCommands" ]]; then
+        (cd layer; eval "$extraCommands")
+      fi
+
+      # Tar up the layer and throw it into 'layer.tar'.
+      echo "Packing layer..."
+      mkdir $out
+      tarhash=$(tar -C layer --hard-dereference --sort=name --mtime="@$SOURCE_DATE_EPOCH" --owner=${toString uid} --group=${toString gid} -cf - . | tee -p $out/layer.tar | tarsum)
+
+      # Add a 'checksum' field to the JSON, with the value set to the
+      # checksum of the tarball.
+      cat ${baseJson} | jshon -s "$tarhash" -i checksum > $out/json
+
+      # Indicate to docker that we're using schema version 1.0.
+      echo -n "1.0" > $out/VERSION
+
+      echo "Finished building layer '${name}'"
+    '';
+
+  # Make a "root" layer; required if we need to execute commands as a
+  # privileged user on the image. The commands themselves will be
+  # performed in a virtual machine sandbox.
+  mkRootLayer = {
+    # Name of the image.
+    name,
+    # Script to run as root. Bash.
+    runAsRoot,
+    # Files to add to the layer. If null, an empty layer will be created.
+    contents ? null,
+    # When copying the contents into the image, preserve symlinks to
+    # directories (see `rsync -K`).  Otherwise, transform those symlinks
+    # into directories.
+    keepContentsDirlinks ? false,
+    # JSON containing configuration and metadata for this layer.
+    baseJson,
+    # Existing image onto which to append the new layer.
+    fromImage ? null,
+    # Name of the image we're appending onto.
+    fromImageName ? null,
+    # Tag of the image we're appending onto.
+    fromImageTag ? null,
+    # How much disk to allocate for the temporary virtual machine.
+    diskSize ? 1024,
+    # Commands (bash) to run on the layer; these do not require sudo.
+    extraCommands ? ""
+  }:
+    # Generate an executable script from the `runAsRoot` text.
+    let
+      runAsRootScript = shellScript "run-as-root.sh" runAsRoot;
+      extraCommandsScript = shellScript "extra-commands.sh" extraCommands;
+    in runWithOverlay {
+      name = "docker-layer-${name}";
+
+      inherit fromImage fromImageName fromImageTag diskSize;
+
+      preMount = lib.optionalString (contents != null && contents != []) ''
+        echo "Adding contents..."
+        for item in ${toString contents}; do
+          echo "Adding $item..."
+          rsync -a${if keepContentsDirlinks then "K" else "k"} --chown=0:0 $item/ layer/
+        done
+
+        chmod ug+w layer
+      '';
+
+      postMount = ''
+        mkdir -p mnt/{dev,proc,sys} mnt${storeDir}
+
+        # Mount /dev, /sys and the nix store as shared folders.
+        mount --rbind /dev mnt/dev
+        mount --rbind /sys mnt/sys
+        mount --rbind ${storeDir} mnt${storeDir}
+
+        # Execute the run as root script. See 'man unshare' for
+        # details on what's going on here; basically this command
+        # means that the runAsRootScript will be executed in a nearly
+        # completely isolated environment.
+        #
+        # Ideally we would use --mount-proc=mnt/proc or similar, but this
+        # doesn't work. The workaround is to setup proc after unshare.
+        # See: https://github.com/karelzak/util-linux/issues/648
+        unshare -imnpuf --mount-proc sh -c 'mount --rbind /proc mnt/proc && chroot mnt ${runAsRootScript}'
+
+        # Unmount directories and remove them.
+        umount -R mnt/dev mnt/sys mnt${storeDir}
+        rmdir --ignore-fail-on-non-empty \
+          mnt/dev mnt/proc mnt/sys mnt${storeDir} \
+          mnt$(dirname ${storeDir})
+      '';
+
+      postUmount = ''
+        (cd layer; ${extraCommandsScript})
+
+        echo "Packing layer..."
+        mkdir -p $out
+        tarhash=$(tar -C layer --hard-dereference --sort=name --mtime="@$SOURCE_DATE_EPOCH" -cf - . |
+                    tee -p $out/layer.tar |
+                    ${tarsum}/bin/tarsum)
+
+        cat ${baseJson} | jshon -s "$tarhash" -i checksum > $out/json
+        # Indicate to docker that we're using schema version 1.0.
+        echo -n "1.0" > $out/VERSION
+
+        echo "Finished building layer '${name}'"
+      '';
+    };
+
+  buildLayeredImage = {name, ...}@args:
+    let
+      stream = streamLayeredImage args;
+    in
+      runCommand "${baseNameOf name}.tar.gz" {
+        inherit (stream) imageName;
+        passthru = { inherit (stream) imageTag; };
+        nativeBuildInputs = [ pigz ];
+      } "${stream} | pigz -nT > $out";
+
+  # 1. extract the base image
+  # 2. create the layer
+  # 3. add layer deps to the layer itself, diffing with the base image
+  # 4. compute the layer id
+  # 5. put the layer in the image
+  # 6. repack the image
+  buildImage = args@{
+    # Image name.
+    name,
+    # Image tag, when null then the nix output hash will be used.
+    tag ? null,
+    # Parent image, to append to.
+    fromImage ? null,
+    # Name of the parent image; will be read from the image otherwise.
+    fromImageName ? null,
+    # Tag of the parent image; will be read from the image otherwise.
+    fromImageTag ? null,
+    # Files to put on the image (a nix store path or list of paths).
+    contents ? null,
+    # When copying the contents into the image, preserve symlinks to
+    # directories (see `rsync -K`).  Otherwise, transform those symlinks
+    # into directories.
+    keepContentsDirlinks ? false,
+    # Docker config; e.g. what command to run on the container.
+    config ? null,
+    # Optional bash script to run on the files prior to fixturizing the layer.
+    extraCommands ? "", uid ? 0, gid ? 0,
+    # Optional bash script to run as root on the image when provisioning.
+    runAsRoot ? null,
+    # Size of the virtual machine disk to provision when building the image.
+    diskSize ? 1024,
+    # Time of creation of the image.
+    created ? "1970-01-01T00:00:01Z",
+  }:
+
+    let
+      baseName = baseNameOf name;
+
+      # Create a JSON blob of the configuration. Set the date to unix zero.
+      baseJson = let
+          pure = writeText "${baseName}-config.json" (builtins.toJSON {
+            inherit created config;
+            architecture = defaultArch;
+            os = "linux";
+          });
+          impure = runCommand "${baseName}-config.json"
+            { nativeBuildInputs = [ jq ]; }
+            ''
+               jq ".created = \"$(TZ=utc date --iso-8601="seconds")\"" ${pure} > $out
+            '';
+        in if created == "now" then impure else pure;
+
+      layer =
+        if runAsRoot == null
+        then mkPureLayer {
+          name = baseName;
+          inherit baseJson contents keepContentsDirlinks extraCommands uid gid;
+        } else mkRootLayer {
+          name = baseName;
+          inherit baseJson fromImage fromImageName fromImageTag
+                  contents keepContentsDirlinks runAsRoot diskSize
+                  extraCommands;
+        };
+      result = runCommand "docker-image-${baseName}.tar.gz" {
+        nativeBuildInputs = [ jshon pigz coreutils findutils jq moreutils ];
+        # Image name must be lowercase
+        imageName = lib.toLower name;
+        imageTag = if tag == null then "" else tag;
+        inherit fromImage baseJson;
+        layerClosure = writeReferencesToFile layer;
+        passthru.buildArgs = args;
+        passthru.layer = layer;
+        passthru.imageTag =
+          if tag != null
+            then tag
+            else
+              lib.head (lib.strings.splitString "-" (baseNameOf result.outPath));
+        # Docker can't be made to run darwin binaries
+        meta.badPlatforms = lib.platforms.darwin;
+      } ''
+        ${lib.optionalString (tag == null) ''
+          outName="$(basename "$out")"
+          outHash=$(echo "$outName" | cut -d - -f 1)
+
+          imageTag=$outHash
+        ''}
+
+        # Print tar contents:
+        # 1: Interpreted as relative to the root directory
+        # 2: With no trailing slashes on directories
+        # This is useful for ensuring that the output matches the
+        # values generated by the "find" command
+        ls_tar() {
+          for f in $(tar -tf $1 | xargs realpath -ms --relative-to=.); do
+            if [[ "$f" != "." ]]; then
+              echo "/$f"
+            fi
+          done
+        }
+
+        mkdir image
+        touch baseFiles
+        baseEnvs='[]'
+        if [[ -n "$fromImage" ]]; then
+          echo "Unpacking base image..."
+          tar -C image -xpf "$fromImage"
+
+          # Store the layers and the environment variables from the base image
+          cat ./image/manifest.json  | jq -r '.[0].Layers | .[]' > layer-list
+          configName="$(cat ./image/manifest.json | jq -r '.[0].Config')"
+          baseEnvs="$(cat "./image/$configName" | jq '.config.Env // []')"
+
+          # Extract the parentID from the manifest
+          if [[ -n "$fromImageName" ]] && [[ -n "$fromImageTag" ]]; then
+            parentID="$(
+              cat "image/manifest.json" |
+                jq -r '.[] | select(.RepoTags | contains([$desiredTag])) | rtrimstr(".json")' \
+                  --arg desiredTag "$fromImageName:$fromImageTag"
+            )"
+          else
+            echo "From-image name or tag wasn't set. Reading the first ID."
+            parentID="$(cat "image/manifest.json" | jq -r '.[0].Config | rtrimstr(".json")')"
+          fi
+
+          # Otherwise do not import the base image configuration and manifest
+          chmod a+w image image/*.json
+          rm -f image/*.json
+
+          for l in image/*/layer.tar; do
+            ls_tar $l >> baseFiles
+          done
+        else
+          touch layer-list
+        fi
+
+        chmod -R ug+rw image
+
+        mkdir temp
+        cp ${layer}/* temp/
+        chmod ug+w temp/*
+
+        for dep in $(cat $layerClosure); do
+          find $dep >> layerFiles
+        done
+
+        echo "Adding layer..."
+        # Record the contents of the tarball with ls_tar.
+        ls_tar temp/layer.tar >> baseFiles
+
+        # Append nix/store directory to the layer so that when the layer is loaded in the
+        # image /nix/store has read permissions for non-root users.
+        # nix/store is added only if the layer has /nix/store paths in it.
+        if [ $(wc -l < $layerClosure) -gt 1 ] && [ $(grep -c -e "^/nix/store$" baseFiles) -eq 0 ]; then
+          mkdir -p nix/store
+          chmod -R 555 nix
+          echo "./nix" >> layerFiles
+          echo "./nix/store" >> layerFiles
+        fi
+
+        # Get the files in the new layer which were *not* present in
+        # the old layer, and record them as newFiles.
+        comm <(sort -n baseFiles|uniq) \
+             <(sort -n layerFiles|uniq|grep -v ${layer}) -1 -3 > newFiles
+        # Append the new files to the layer.
+        tar -rpf temp/layer.tar --hard-dereference --sort=name --mtime="@$SOURCE_DATE_EPOCH" \
+          --owner=0 --group=0 --no-recursion --files-from newFiles
+
+        echo "Adding meta..."
+
+        # If we have a parentID, add it to the json metadata.
+        if [[ -n "$parentID" ]]; then
+          cat temp/json | jshon -s "$parentID" -i parent > tmpjson
+          mv tmpjson temp/json
+        fi
+
+        # Take the sha256 sum of the generated json and use it as the layer ID.
+        # Compute the size and add it to the json under the 'Size' field.
+        layerID=$(sha256sum temp/json|cut -d ' ' -f 1)
+        size=$(stat --printf="%s" temp/layer.tar)
+        cat temp/json | jshon -s "$layerID" -i id -n $size -i Size > tmpjson
+        mv tmpjson temp/json
+
+        # Use the temp folder we've been working on to create a new image.
+        mv temp image/$layerID
+
+        # Add the new layer ID to the end of the layer list
+        (
+          cat layer-list
+          # originally this used `sed -i "1i$layerID" layer-list`, but
+          # would fail if layer-list was completely empty.
+          echo "$layerID/layer.tar"
+        ) | sponge layer-list
+
+        # Create image json and image manifest
+        imageJson=$(cat ${baseJson} | jq '.config.Env = $baseenv + .config.Env' --argjson baseenv "$baseEnvs")
+        imageJson=$(echo "$imageJson" | jq ". + {\"rootfs\": {\"diff_ids\": [], \"type\": \"layers\"}}")
+        manifestJson=$(jq -n "[{\"RepoTags\":[\"$imageName:$imageTag\"]}]")
+
+        for layerTar in $(cat ./layer-list); do
+          layerChecksum=$(sha256sum image/$layerTar | cut -d ' ' -f1)
+          imageJson=$(echo "$imageJson" | jq ".history |= . + [{\"created\": \"$(jq -r .created ${baseJson})\"}]")
+          # diff_ids order is from the bottom-most to top-most layer
+          imageJson=$(echo "$imageJson" | jq ".rootfs.diff_ids |= . + [\"sha256:$layerChecksum\"]")
+          manifestJson=$(echo "$manifestJson" | jq ".[0].Layers |= . + [\"$layerTar\"]")
+        done
+
+        imageJsonChecksum=$(echo "$imageJson" | sha256sum | cut -d ' ' -f1)
+        echo "$imageJson" > "image/$imageJsonChecksum.json"
+        manifestJson=$(echo "$manifestJson" | jq ".[0].Config = \"$imageJsonChecksum.json\"")
+        echo "$manifestJson" > image/manifest.json
+
+        # Store the json under the name image/repositories.
+        jshon -n object \
+          -n object -s "$layerID" -i "$imageTag" \
+          -i "$imageName" > image/repositories
+
+        # Make the image read-only.
+        chmod -R a-w image
+
+        echo "Cooking the image..."
+        tar -C image --hard-dereference --sort=name --mtime="@$SOURCE_DATE_EPOCH" --owner=0 --group=0 --xform s:'^./':: -c . | pigz -nT > $out
+
+        echo "Finished."
+      '';
+
+    in
+    result;
+
+  # Merge the tarballs of images built with buildImage into a single
+  # tarball that contains all images. Running `docker load` on the resulting
+  # tarball will load the images into the docker daemon.
+  mergeImages = images: runCommand "merge-docker-images"
+    {
+      inherit images;
+      nativeBuildInputs = [ pigz jq ];
+    } ''
+    mkdir image inputs
+    # Extract images
+    repos=()
+    manifests=()
+    for item in $images; do
+      name=$(basename $item)
+      mkdir inputs/$name
+      tar -I pigz -xf $item -C inputs/$name
+      if [ -f inputs/$name/repositories ]; then
+        repos+=(inputs/$name/repositories)
+      fi
+      if [ -f inputs/$name/manifest.json ]; then
+        manifests+=(inputs/$name/manifest.json)
+      fi
+    done
+    # Copy all layers from input images to output image directory
+    cp -R --no-clobber inputs/*/* image/
+    # Merge repositories objects and manifests
+    jq -s add "''${repos[@]}" > repositories
+    jq -s add "''${manifests[@]}" > manifest.json
+    # Replace output image repositories and manifest with merged versions
+    mv repositories image/repositories
+    mv manifest.json image/manifest.json
+    # Create tarball and gzip
+    tar -C image --hard-dereference --sort=name --mtime="@$SOURCE_DATE_EPOCH" --owner=0 --group=0 --xform s:'^./':: -c . | pigz -nT > $out
+  '';
+
+
+  # Provide a /etc/passwd and /etc/group that contain root and nobody.
+  # Useful when packaging binaries that insist on using nss to look up
+  # username/groups (like nginx).
+  # /bin/sh is fine to not exist, and provided by another shim.
+  fakeNss = symlinkJoin {
+    name = "fake-nss";
+    paths = [
+      (writeTextDir "etc/passwd" ''
+        root:x:0:0:root user:/var/empty:/bin/sh
+        nobody:x:65534:65534:nobody:/var/empty:/bin/sh
+      '')
+      (writeTextDir "etc/group" ''
+        root:x:0:
+        nobody:x:65534:
+      '')
+      (runCommand "var-empty" {} ''
+        mkdir -p $out/var/empty
+      '')
+    ];
+  };
+
+  # This provides /bin/sh, pointing to bashInteractive.
+  binSh = runCommand "bin-sh" {} ''
+    mkdir -p $out/bin
+    ln -s ${bashInteractive}/bin/bash $out/bin/sh
+  '';
+
+  # Build an image and populate its nix database with the provided
+  # contents. The main purpose is to be able to use nix commands in
+  # the container.
+  # Be careful since this doesn't work well with multilayer.
+  buildImageWithNixDb = args@{ contents ? null, extraCommands ? "", ... }: (
+    buildImage (args // {
+      extraCommands = (mkDbExtraCommand contents) + extraCommands;
+    })
+  );
+
+  buildLayeredImageWithNixDb = args@{ contents ? null, extraCommands ? "", ... }: (
+    buildLayeredImage (args // {
+      extraCommands = (mkDbExtraCommand contents) + extraCommands;
+    })
+  );
+
+  streamLayeredImage = {
+    # Image Name
+    name,
+    # Image tag, the Nix's output hash will be used if null
+    tag ? null,
+    # Parent image, to append to.
+    fromImage ? null,
+    # Files to put on the image (a nix store path or list of paths).
+    contents ? [],
+    # Docker config; e.g. what command to run on the container.
+    config ? {},
+    # Time of creation of the image. Passing "now" will make the
+    # created date be the time of building.
+    created ? "1970-01-01T00:00:01Z",
+    # Optional bash script to run on the files prior to fixturizing the layer.
+    extraCommands ? "",
+    # Optional bash script to run inside fakeroot environment.
+    # Could be used for changing ownership of files in customisation layer.
+    fakeRootCommands ? "",
+    # We pick 100 to ensure there is plenty of room for extension. I
+    # believe the actual maximum is 128.
+    maxLayers ? 100,
+    # Whether to include store paths in the image. You generally want to leave
+    # this on, but tooling may disable this to insert the store paths more
+    # efficiently via other means, such as bind mounting the host store.
+    includeStorePaths ? true,
+  }:
+    assert
+      (lib.assertMsg (maxLayers > 1)
+      "the maxLayers argument of dockerTools.buildLayeredImage function must be greather than 1 (current value: ${toString maxLayers})");
+    let
+      baseName = baseNameOf name;
+
+      streamScript = writePython3 "stream" {} ./stream_layered_image.py;
+      baseJson = writeText "${baseName}-base.json" (builtins.toJSON {
+         inherit config;
+         architecture = defaultArch;
+         os = "linux";
+      });
+
+      contentsList = if builtins.isList contents then contents else [ contents ];
+
+      # We store the customisation layer as a tarball, to make sure that
+      # things like permissions set on 'extraCommands' are not overriden
+      # by Nix. Then we precompute the sha256 for performance.
+      customisationLayer = symlinkJoin {
+        name = "${baseName}-customisation-layer";
+        paths = contentsList;
+        inherit extraCommands fakeRootCommands;
+        nativeBuildInputs = [ fakeroot ];
+        postBuild = ''
+          mv $out old_out
+          (cd old_out; eval "$extraCommands" )
+
+          mkdir $out
+
+          fakeroot bash -c '
+            source $stdenv/setup
+            cd old_out
+            eval "$fakeRootCommands"
+            tar \
+              --sort name \
+              --numeric-owner --mtime "@$SOURCE_DATE_EPOCH" \
+              --hard-dereference \
+              -cf $out/layer.tar .
+          '
+
+          sha256sum $out/layer.tar \
+            | cut -f 1 -d ' ' \
+            > $out/checksum
+        '';
+      };
+
+      closureRoots = optionals includeStorePaths /* normally true */ (
+        [ baseJson ] ++ contentsList
+      );
+      overallClosure = writeText "closure" (lib.concatStringsSep " " closureRoots);
+
+      # These derivations are only created as implementation details of docker-tools,
+      # so they'll be excluded from the created images.
+      unnecessaryDrvs = [ baseJson overallClosure ];
+
+      conf = runCommand "${baseName}-conf.json" {
+        inherit fromImage maxLayers created;
+        imageName = lib.toLower name;
+        passthru.imageTag =
+          if tag != null
+            then tag
+            else
+              lib.head (lib.strings.splitString "-" (baseNameOf conf.outPath));
+        paths = buildPackages.referencesByPopularity overallClosure;
+        nativeBuildInputs = [ jq ];
+      } ''
+        ${if (tag == null) then ''
+          outName="$(basename "$out")"
+          outHash=$(echo "$outName" | cut -d - -f 1)
+
+          imageTag=$outHash
+        '' else ''
+          imageTag="${tag}"
+        ''}
+
+        # convert "created" to iso format
+        if [[ "$created" != "now" ]]; then
+            created="$(date -Iseconds -d "$created")"
+        fi
+
+        paths() {
+          cat $paths ${lib.concatMapStringsSep " "
+                         (path: "| (grep -v ${path} || true)")
+                         unnecessaryDrvs}
+        }
+
+        # Compute the number of layers that are already used by a potential
+        # 'fromImage' as well as the customization layer. Ensure that there is
+        # still at least one layer available to store the image contents.
+        usedLayers=0
+
+        # subtract number of base image layers
+        if [[ -n "$fromImage" ]]; then
+          (( usedLayers += $(tar -xOf "$fromImage" manifest.json | jq '.[0].Layers | length') ))
+        fi
+
+        # one layer will be taken up by the customisation layer
+        (( usedLayers += 1 ))
+
+        if ! (( $usedLayers < $maxLayers )); then
+          echo >&2 "Error: usedLayers $usedLayers layers to store 'fromImage' and" \
+                    "'extraCommands', but only maxLayers=$maxLayers were" \
+                    "allowed. At least 1 layer is required to store contents."
+          exit 1
+        fi
+        availableLayers=$(( maxLayers - usedLayers ))
+
+        # Create $maxLayers worth of Docker Layers, one layer per store path
+        # unless there are more paths than $maxLayers. In that case, create
+        # $maxLayers-1 for the most popular layers, and smush the remainaing
+        # store paths in to one final layer.
+        #
+        # The following code is fiddly w.r.t. ensuring every layer is
+        # created, and that no paths are missed. If you change the
+        # following lines, double-check that your code behaves properly
+        # when the number of layers equals:
+        #      maxLayers-1, maxLayers, and maxLayers+1, 0
+        store_layers="$(
+          paths |
+            jq -sR '
+              rtrimstr("\n") | split("\n")
+                | (.[:$maxLayers-1] | map([.])) + [ .[$maxLayers-1:] ]
+                | map(select(length > 0))
+            ' \
+              --argjson maxLayers "$availableLayers"
+        )"
+
+        cat ${baseJson} | jq '
+          . + {
+            "store_dir": $store_dir,
+            "from_image": $from_image,
+            "store_layers": $store_layers,
+            "customisation_layer", $customisation_layer,
+            "repo_tag": $repo_tag,
+            "created": $created
+          }
+          ' --arg store_dir "${storeDir}" \
+            --argjson from_image ${if fromImage == null then "null" else "'\"${fromImage}\"'"} \
+            --argjson store_layers "$store_layers" \
+            --arg customisation_layer ${customisationLayer} \
+            --arg repo_tag "$imageName:$imageTag" \
+            --arg created "$created" |
+          tee $out
+      '';
+      result = runCommand "stream-${baseName}" {
+        inherit (conf) imageName;
+        passthru = {
+          inherit (conf) imageTag;
+
+          # Distinguish tarballs and exes at the Nix level so functions that
+          # take images can know in advance how the image is supposed to be used.
+          isExe = true;
+        };
+        nativeBuildInputs = [ makeWrapper ];
+      } ''
+        makeWrapper ${streamScript} $out --add-flags ${conf}
+      '';
+    in result;
+}
diff --git a/nixpkgs/pkgs/build-support/docker/detjson.py b/nixpkgs/pkgs/build-support/docker/detjson.py
new file mode 100644
index 000000000000..fe82cbea11bb
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/docker/detjson.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Deterministic layer json: https://github.com/docker/hub-feedback/issues/488
+
+import sys
+reload(sys)
+sys.setdefaultencoding('UTF8')
+import json
+
+# If any of the keys below are equal to a certain value
+# then we can delete it because it's the default value
+SAFEDELS = {
+    "Size": 0,
+    "config": {
+        "ExposedPorts": None,
+        "MacAddress": "",
+        "NetworkDisabled": False,
+        "PortSpecs": None,
+        "VolumeDriver": ""
+    }
+}
+SAFEDELS["container_config"] = SAFEDELS["config"]
+
+def makedet(j, safedels):
+    for k,v in safedels.items():
+        if k not in j:
+            continue
+        if type(v) == dict:
+            makedet(j[k], v)
+        elif j[k] == v:
+            del j[k]
+
+def main():
+    j = json.load(sys.stdin)
+    makedet(j, SAFEDELS)
+    json.dump(j, sys.stdout, sort_keys=True)
+
+if __name__ == '__main__':
+    main()
diff --git a/nixpkgs/pkgs/build-support/docker/examples.nix b/nixpkgs/pkgs/build-support/docker/examples.nix
new file mode 100644
index 000000000000..f890d0a77a26
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/docker/examples.nix
@@ -0,0 +1,544 @@
+# Examples of using the docker tools to build packages.
+#
+# This file defines several docker images. In order to use an image,
+# build its derivation with `nix-build`, and then load the result with
+# `docker load`. For example:
+#
+#  $ nix-build '<nixpkgs>' -A dockerTools.examples.redis
+#  $ docker load < result
+
+{ pkgs, buildImage, buildLayeredImage, fakeNss, pullImage, shadowSetup, buildImageWithNixDb, pkgsCross }:
+
+rec {
+  # 1. basic example
+  bash = buildImage {
+    name = "bash";
+    tag = "latest";
+    contents = pkgs.bashInteractive;
+  };
+
+  # 2. service example, layered on another image
+  redis = buildImage {
+    name = "redis";
+    tag = "latest";
+
+    # for example's sake, we can layer redis on top of bash or debian
+    fromImage = bash;
+    # fromImage = debian;
+
+    contents = pkgs.redis;
+    runAsRoot = ''
+      mkdir -p /data
+    '';
+
+    config = {
+      Cmd = [ "/bin/redis-server" ];
+      WorkingDir = "/data";
+      Volumes = {
+        "/data" = {};
+      };
+    };
+  };
+
+  # 3. another service example
+  nginx = let
+    nginxPort = "80";
+    nginxConf = pkgs.writeText "nginx.conf" ''
+      user nobody nobody;
+      daemon off;
+      error_log /dev/stdout info;
+      pid /dev/null;
+      events {}
+      http {
+        access_log /dev/stdout;
+        server {
+          listen ${nginxPort};
+          index index.html;
+          location / {
+            root ${nginxWebRoot};
+          }
+        }
+      }
+    '';
+    nginxWebRoot = pkgs.writeTextDir "index.html" ''
+      <html><body><h1>Hello from NGINX</h1></body></html>
+    '';
+  in
+  buildLayeredImage {
+    name = "nginx-container";
+    tag = "latest";
+    contents = [
+      fakeNss
+      pkgs.nginx
+    ];
+
+    extraCommands = ''
+      # nginx still tries to read this directory even if error_log
+      # directive is specifying another file :/
+      mkdir -p var/log/nginx
+      mkdir -p var/cache/nginx
+    '';
+
+    config = {
+      Cmd = [ "nginx" "-c" nginxConf ];
+      ExposedPorts = {
+        "${nginxPort}/tcp" = {};
+      };
+    };
+  };
+
+  # 4. example of pulling an image. could be used as a base for other images
+  nixFromDockerHub = pullImage {
+    imageName = "nixos/nix";
+    imageDigest = "sha256:85299d86263a3059cf19f419f9d286cc9f06d3c13146a8ebbb21b3437f598357";
+    sha256 = "19fw0n3wmddahzr20mhdqv6jkjn1kanh6n2mrr08ai53dr8ph5n7";
+    finalImageTag = "2.2.1";
+    finalImageName = "nix";
+  };
+
+  # 5. example of multiple contents, emacs and vi happily coexisting
+  editors = buildImage {
+    name = "editors";
+    contents = [
+      pkgs.coreutils
+      pkgs.bash
+      pkgs.emacs
+      pkgs.vim
+      pkgs.nano
+    ];
+  };
+
+  # 6. nix example to play with the container nix store
+  # docker run -it --rm nix nix-store -qR $(nix-build '<nixpkgs>' -A nix)
+  nix = buildImageWithNixDb {
+    name = "nix";
+    tag = "latest";
+    contents = [
+      # nix-store uses cat program to display results as specified by
+      # the image env variable NIX_PAGER.
+      pkgs.coreutils
+      pkgs.nix
+      pkgs.bash
+    ];
+    config = {
+      Env = [
+        "NIX_PAGER=cat"
+        # A user is required by nix
+        # https://github.com/NixOS/nix/blob/9348f9291e5d9e4ba3c4347ea1b235640f54fd79/src/libutil/util.cc#L478
+        "USER=nobody"
+      ];
+    };
+  };
+
+  # 7. example of adding something on top of an image pull by our
+  # dockerTools chain.
+  onTopOfPulledImage = buildImage {
+    name = "onTopOfPulledImage";
+    tag = "latest";
+    fromImage = nixFromDockerHub;
+    contents = [ pkgs.hello ];
+  };
+
+  # 8. regression test for erroneous use of eval and string expansion.
+  # See issue #34779 and PR #40947 for details.
+  runAsRootExtraCommands = pkgs.dockerTools.buildImage {
+    name = "runAsRootExtraCommands";
+    tag = "latest";
+    contents = [ pkgs.coreutils ];
+    # The parens here are to create problematic bash to embed and eval. In case
+    # this is *embedded* into the script (with nix expansion) the initial quotes
+    # will close the string and the following parens are unexpected
+    runAsRoot = ''echo "(runAsRoot)" > runAsRoot'';
+    extraCommands = ''echo "(extraCommand)" > extraCommands'';
+  };
+
+  # 9. Ensure that setting created to now results in a date which
+  # isn't the epoch + 1
+  unstableDate = pkgs.dockerTools.buildImage {
+    name = "unstable-date";
+    tag = "latest";
+    contents = [ pkgs.coreutils ];
+    created = "now";
+  };
+
+  # 10. Create a layered image
+  layered-image = pkgs.dockerTools.buildLayeredImage {
+    name = "layered-image";
+    tag = "latest";
+    extraCommands = ''echo "(extraCommand)" > extraCommands'';
+    config.Cmd = [ "${pkgs.hello}/bin/hello" ];
+    contents = [ pkgs.hello pkgs.bash pkgs.coreutils ];
+  };
+
+  # 11. Create an image on top of a layered image
+  layered-on-top = pkgs.dockerTools.buildImage {
+    name = "layered-on-top";
+    tag = "latest";
+    fromImage = layered-image;
+    extraCommands = ''
+      mkdir ./example-output
+      chmod 777 ./example-output
+    '';
+    config = {
+      Env = [ "PATH=${pkgs.coreutils}/bin/" ];
+      WorkingDir = "/example-output";
+      Cmd = [
+        "${pkgs.bash}/bin/bash" "-c" "echo hello > foo; cat foo"
+      ];
+    };
+  };
+
+  # 12 Create a layered image on top of a layered image
+  layered-on-top-layered = pkgs.dockerTools.buildLayeredImage {
+    name = "layered-on-top-layered";
+    tag = "latest";
+    fromImage = layered-image;
+    extraCommands = ''
+      mkdir ./example-output
+      chmod 777 ./example-output
+    '';
+    config = {
+      Env = [ "PATH=${pkgs.coreutils}/bin/" ];
+      WorkingDir = "/example-output";
+      Cmd = [
+        "${pkgs.bash}/bin/bash" "-c" "echo hello > foo; cat foo"
+      ];
+    };
+  };
+
+  # 13. example of running something as root on top of a parent image
+  # Regression test related to PR #52109
+  runAsRootParentImage = buildImage {
+    name = "runAsRootParentImage";
+    tag = "latest";
+    runAsRoot = "touch /example-file";
+    fromImage = bash;
+  };
+
+  # 14. example of 3 layers images This image is used to verify the
+  # order of layers is correct.
+  # It allows to validate
+  # - the layer of parent are below
+  # - the order of parent layer is preserved at image build time
+  #   (this is why there are 3 images)
+  layersOrder = let
+    l1 = pkgs.dockerTools.buildImage {
+      name = "l1";
+      tag = "latest";
+      extraCommands = ''
+        mkdir -p tmp
+        echo layer1 > tmp/layer1
+        echo layer1 > tmp/layer2
+        echo layer1 > tmp/layer3
+      '';
+    };
+    l2 = pkgs.dockerTools.buildImage {
+      name = "l2";
+      fromImage = l1;
+      tag = "latest";
+      extraCommands = ''
+        mkdir -p tmp
+        echo layer2 > tmp/layer2
+        echo layer2 > tmp/layer3
+      '';
+    };
+  in pkgs.dockerTools.buildImage {
+    name = "l3";
+    fromImage = l2;
+    tag = "latest";
+    contents = [ pkgs.coreutils ];
+    extraCommands = ''
+      mkdir -p tmp
+      echo layer3 > tmp/layer3
+    '';
+  };
+
+  # 15. Environment variable inheritance.
+  # Child image should inherit parents environment variables,
+  # optionally overriding them.
+  environmentVariablesParent = pkgs.dockerTools.buildImage {
+    name = "parent";
+    tag = "latest";
+    config = {
+      Env = [
+        "FROM_PARENT=true"
+        "LAST_LAYER=parent"
+      ];
+    };
+  };
+
+  environmentVariables = pkgs.dockerTools.buildImage {
+    name = "child";
+    fromImage = environmentVariablesParent;
+    tag = "latest";
+    contents = [ pkgs.coreutils ];
+    config = {
+      Env = [
+        "FROM_CHILD=true"
+        "LAST_LAYER=child"
+      ];
+    };
+  };
+
+  environmentVariablesLayered = pkgs.dockerTools.buildLayeredImage {
+    name = "child";
+    fromImage = environmentVariablesParent;
+    tag = "latest";
+    contents = [ pkgs.coreutils ];
+    config = {
+      Env = [
+        "FROM_CHILD=true"
+        "LAST_LAYER=child"
+      ];
+    };
+  };
+
+  # 16. Create another layered image, for comparing layers with image 10.
+  another-layered-image = pkgs.dockerTools.buildLayeredImage {
+    name = "another-layered-image";
+    tag = "latest";
+    config.Cmd = [ "${pkgs.hello}/bin/hello" ];
+  };
+
+  # 17. Create a layered image with only 2 layers
+  two-layered-image = pkgs.dockerTools.buildLayeredImage {
+    name = "two-layered-image";
+    tag = "latest";
+    config.Cmd = [ "${pkgs.hello}/bin/hello" ];
+    contents = [ pkgs.bash pkgs.hello ];
+    maxLayers = 2;
+  };
+
+  # 18. Create a layered image with more packages than max layers.
+  # coreutils and hello are part of the same layer
+  bulk-layer = pkgs.dockerTools.buildLayeredImage {
+    name = "bulk-layer";
+    tag = "latest";
+    contents = with pkgs; [
+      coreutils hello
+    ];
+    maxLayers = 2;
+  };
+
+  # 19. Create a layered image with a base image and more packages than max
+  # layers. coreutils and hello are part of the same layer
+  layered-bulk-layer = pkgs.dockerTools.buildLayeredImage {
+    name = "layered-bulk-layer";
+    tag = "latest";
+    fromImage = two-layered-image;
+    contents = with pkgs; [
+      coreutils hello
+    ];
+    maxLayers = 4;
+  };
+
+  # 20. Create a "layered" image without nix store layers. This is not
+  # recommended, but can be useful for base images in rare cases.
+  no-store-paths = pkgs.dockerTools.buildLayeredImage {
+    name = "no-store-paths";
+    tag = "latest";
+    extraCommands = ''
+      # This removes sharing of busybox and is not recommended. We do this
+      # to make the example suitable as a test case with working binaries.
+      cp -r ${pkgs.pkgsStatic.busybox}/* .
+    '';
+  };
+
+  nixLayered = pkgs.dockerTools.buildLayeredImageWithNixDb {
+    name = "nix-layered";
+    tag = "latest";
+    contents = [
+      # nix-store uses cat program to display results as specified by
+      # the image env variable NIX_PAGER.
+      pkgs.coreutils
+      pkgs.nix
+      pkgs.bash
+    ];
+    config = {
+      Env = [
+        "NIX_PAGER=cat"
+        # A user is required by nix
+        # https://github.com/NixOS/nix/blob/9348f9291e5d9e4ba3c4347ea1b235640f54fd79/src/libutil/util.cc#L478
+        "USER=nobody"
+      ];
+    };
+  };
+
+  # 21. Support files in the store on buildLayeredImage
+  # See: https://github.com/NixOS/nixpkgs/pull/91084#issuecomment-653496223
+  filesInStore = pkgs.dockerTools.buildLayeredImageWithNixDb {
+    name = "file-in-store";
+    tag = "latest";
+    contents = [
+      pkgs.coreutils
+      pkgs.nix
+      (pkgs.writeScriptBin "myscript" ''
+        #!${pkgs.runtimeShell}
+        cat ${pkgs.writeText "somefile" "some data"}
+      '')
+    ];
+    config = {
+      Cmd = [ "myscript" ];
+      # For some reason 'nix-store --verify' requires this environment variable
+      Env = [ "USER=root" ];
+    };
+  };
+
+  # 22. Ensure that setting created to now results in a date which
+  # isn't the epoch + 1 for layered images.
+  unstableDateLayered = pkgs.dockerTools.buildLayeredImage {
+    name = "unstable-date-layered";
+    tag = "latest";
+    contents = [ pkgs.coreutils ];
+    created = "now";
+  };
+
+  # buildImage without explicit tag
+  bashNoTag = pkgs.dockerTools.buildImage {
+    name = "bash-no-tag";
+    contents = pkgs.bashInteractive;
+  };
+
+  # buildLayeredImage without explicit tag
+  bashNoTagLayered = pkgs.dockerTools.buildLayeredImage {
+    name = "bash-no-tag-layered";
+    contents = pkgs.bashInteractive;
+  };
+
+  # buildImage without explicit tag
+  bashNoTagStreamLayered = pkgs.dockerTools.streamLayeredImage {
+    name = "bash-no-tag-stream-layered";
+    contents = pkgs.bashInteractive;
+  };
+
+  # buildLayeredImage with non-root user
+  bashLayeredWithUser =
+  let
+    nonRootShadowSetup = { user, uid, gid ? uid }: with pkgs; [
+      (
+      writeTextDir "etc/shadow" ''
+        root:!x:::::::
+        ${user}:!:::::::
+      ''
+      )
+      (
+      writeTextDir "etc/passwd" ''
+        root:x:0:0::/root:${runtimeShell}
+        ${user}:x:${toString uid}:${toString gid}::/home/${user}:
+      ''
+      )
+      (
+      writeTextDir "etc/group" ''
+        root:x:0:
+        ${user}:x:${toString gid}:
+      ''
+      )
+      (
+      writeTextDir "etc/gshadow" ''
+        root:x::
+        ${user}:x::
+      ''
+      )
+    ];
+  in
+    pkgs.dockerTools.buildLayeredImage {
+      name = "bash-layered-with-user";
+      tag = "latest";
+      contents = [ pkgs.bash pkgs.coreutils ] ++ nonRootShadowSetup { uid = 999; user = "somebody"; };
+    };
+
+  # basic example, with cross compilation
+  cross = let
+    # Cross compile for x86_64 if on aarch64
+    crossPkgs =
+      if pkgs.system == "aarch64-linux" then pkgsCross.gnu64
+      else pkgsCross.aarch64-multiplatform;
+  in crossPkgs.dockerTools.buildImage {
+    name = "hello-cross";
+    tag = "latest";
+    contents = crossPkgs.hello;
+  };
+
+  # layered image where a store path is itself a symlink
+  layeredStoreSymlink =
+  let
+    target = pkgs.writeTextDir "dir/target" "Content doesn't matter.";
+    symlink = pkgs.runCommandNoCC "symlink" {} "ln -s ${target} $out";
+  in
+    pkgs.dockerTools.buildLayeredImage {
+      name = "layeredstoresymlink";
+      tag = "latest";
+      contents = [ pkgs.bash symlink ];
+    } // { passthru = { inherit symlink; }; };
+
+  # image with registry/ prefix
+  prefixedImage = pkgs.dockerTools.buildImage {
+    name = "registry-1.docker.io/image";
+    tag = "latest";
+    config.Cmd = [ "${pkgs.hello}/bin/hello" ];
+  };
+
+  # layered image with registry/ prefix
+  prefixedLayeredImage = pkgs.dockerTools.buildLayeredImage {
+    name = "registry-1.docker.io/layered-image";
+    tag = "latest";
+    config.Cmd = [ "${pkgs.hello}/bin/hello" ];
+  };
+
+  # layered image with files owned by a user other than root
+  layeredImageWithFakeRootCommands = pkgs.dockerTools.buildLayeredImage {
+    name = "layered-image-with-fake-root-commands";
+    tag = "latest";
+    contents = [
+      pkgs.pkgsStatic.busybox
+    ];
+    fakeRootCommands = ''
+      mkdir -p ./home/jane
+      chown 1000 ./home/jane
+    '';
+  };
+
+  # tarball consisting of both bash and redis images
+  mergedBashAndRedis = pkgs.dockerTools.mergeImages [
+    bash
+    redis
+  ];
+
+  # tarball consisting of bash (without tag) and redis images
+  mergedBashNoTagAndRedis = pkgs.dockerTools.mergeImages [
+    bashNoTag
+    redis
+  ];
+
+  # tarball consisting of bash and layered image with different owner of the
+  # /home/jane directory
+  mergedBashFakeRoot = pkgs.dockerTools.mergeImages [
+    bash
+    layeredImageWithFakeRootCommands
+  ];
+
+  helloOnRoot = pkgs.dockerTools.streamLayeredImage {
+    name = "hello";
+    tag = "latest";
+    contents = [
+      (pkgs.buildEnv {
+        name = "hello-root";
+        paths = [ pkgs.hello ];
+      })
+    ];
+    config.Cmd = [ "hello" ];
+  };
+
+  helloOnRootNoStore = pkgs.dockerTools.streamLayeredImage {
+    name = "hello";
+    tag = "latest";
+    contents = [
+      (pkgs.buildEnv {
+        name = "hello-root";
+        paths = [ pkgs.hello ];
+      })
+    ];
+    config.Cmd = [ "hello" ];
+    includeStorePaths = false;
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/docker/nix-prefetch-docker b/nixpkgs/pkgs/build-support/docker/nix-prefetch-docker
new file mode 100755
index 000000000000..5798ab5984f1
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/docker/nix-prefetch-docker
@@ -0,0 +1,173 @@
+#! /usr/bin/env bash
+
+set -e -o pipefail
+
+os=
+arch=
+imageName=
+imageTag=
+imageDigest=
+finalImageName=
+finalImageTag=
+hashType=$NIX_HASH_ALGO
+hashFormat=$hashFormat
+format=nix
+
+usage(){
+    echo  >&2 "syntax: nix-prefetch-docker [options] [IMAGE_NAME [IMAGE_TAG|IMAGE_DIGEST]]
+
+Options:
+      --os os                   OS to fetch image for
+      --arch linux              Arch to fetch image for
+      --image-name name         Name of the image to fetch
+      --image-tag tag           Image tag
+      --image-digest digest     Image digest
+      --final-image-name name   Desired name of the image
+      --final-image-tag tag     Desired image tag
+      --json                    Output result in json format instead of nix
+      --quiet                   Only print the final result
+"
+    exit 1
+}
+
+get_image_digest(){
+    local imageName=$1
+    local imageTag=$2
+
+    if test -z "$imageTag"; then
+        imageTag="latest"
+    fi
+
+    skopeo --insecure-policy --tmpdir=$TMPDIR inspect "docker://$imageName:$imageTag" | jq '.Digest' -r
+}
+
+get_name() {
+    local imageName=$1
+    local imageTag=$2
+
+    echo "docker-image-$(echo "$imageName:$imageTag" | tr '/:' '-').tar"
+}
+
+argi=0
+argfun=""
+for arg; do
+    if test -z "$argfun"; then
+        case $arg in
+            --os) argfun=set_os;;
+            --arch) argfun=set_arch;;
+            --image-name) argfun=set_imageName;;
+            --image-tag) argfun=set_imageTag;;
+            --image-digest) argfun=set_imageDigest;;
+            --final-image-name) argfun=set_finalImageName;;
+            --final-image-tag) argfun=set_finalImageTag;;
+            --quiet) QUIET=true;;
+            --json) format=json;;
+            --help) usage; exit;;
+            *)
+                : $((++argi))
+                case $argi in
+                    1) imageName=$arg;;
+                    2) [[ $arg == *"sha256"*  ]] && imageDigest=$arg || imageTag=$arg;;
+                    *) exit 1;;
+                esac
+                ;;
+        esac
+    else
+        case $argfun in
+            set_*)
+                var=${argfun#set_}
+                eval $var=$arg
+                ;;
+        esac
+        argfun=""
+    fi
+done
+
+if test -z "$imageName"; then
+    usage
+fi
+
+if test -z "$os"; then
+    os=linux
+fi
+
+if test -z "$arch"; then
+    arch=amd64
+fi
+
+if test -z "$hashType"; then
+    hashType=sha256
+fi
+
+if test -z "$hashFormat"; then
+    hashFormat=base32
+fi
+
+if test -z "$finalImageName"; then
+    finalImageName="$imageName"
+fi
+
+if test -z "$finalImageTag"; then
+    if test -z "$imageTag"; then
+        finalImageTag="latest"
+    else
+        finalImageTag="$imageTag"
+    fi
+fi
+
+if test -z "$imageDigest"; then
+    imageDigest=$(get_image_digest $imageName $imageTag)
+fi
+
+sourceUrl="docker://$imageName@$imageDigest"
+
+tmpPath="$(mktemp -d "${TMPDIR:-/tmp}/skopeo-copy-tmp-XXXXXXXX")"
+trap "rm -rf \"$tmpPath\"" EXIT
+
+tmpFile="$tmpPath/$(get_name $finalImageName $finalImageTag)"
+
+if test -z "$QUIET"; then
+    skopeo --insecure-policy --tmpdir=$TMPDIR --override-os ${os} --override-arch ${arch} copy "$sourceUrl" "docker-archive://$tmpFile:$finalImageName:$finalImageTag" >&2
+else
+    skopeo --insecure-policy --tmpdir=$TMPDIR --override-os ${os} --override-arch ${arch} copy "$sourceUrl" "docker-archive://$tmpFile:$finalImageName:$finalImageTag" > /dev/null
+fi
+
+# Compute the hash.
+imageHash=$(nix-hash --flat --type $hashType --base32 "$tmpFile")
+
+# Add the downloaded file to Nix store.
+finalPath=$(nix-store --add-fixed "$hashType" "$tmpFile")
+
+if test -z "$QUIET"; then
+    echo "-> ImageName: $imageName" >&2
+    echo "-> ImageDigest: $imageDigest" >&2
+    echo "-> FinalImageName: $finalImageName" >&2
+    echo "-> FinalImageTag: $finalImageTag" >&2
+    echo "-> ImagePath: $finalPath" >&2
+    echo "-> ImageHash: $imageHash" >&2
+fi
+
+if [ "$format" == "nix" ]; then
+cat <<EOF
+{
+  imageName = "$imageName";
+  imageDigest = "$imageDigest";
+  sha256 = "$imageHash";
+  finalImageName = "$finalImageName";
+  finalImageTag = "$finalImageTag";
+}
+EOF
+
+else
+
+cat <<EOF
+{
+  "imageName": "$imageName",
+  "imageDigest": "$imageDigest",
+  "sha256": "$imageHash",
+  "finalImageName": "$finalImageName",
+  "finalImageTag": "$finalImageTag"
+}
+EOF
+
+fi
diff --git a/nixpkgs/pkgs/build-support/docker/nix-prefetch-docker.nix b/nixpkgs/pkgs/build-support/docker/nix-prefetch-docker.nix
new file mode 100644
index 000000000000..61e917461ed9
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/docker/nix-prefetch-docker.nix
@@ -0,0 +1,24 @@
+{ lib, stdenv, makeWrapper, nix, skopeo, jq }:
+
+stdenv.mkDerivation {
+  name = "nix-prefetch-docker";
+
+  nativeBuildInputs = [ makeWrapper ];
+
+  dontUnpack = true;
+
+  installPhase = ''
+    install -vD ${./nix-prefetch-docker} $out/bin/$name;
+    wrapProgram $out/bin/$name \
+      --prefix PATH : ${lib.makeBinPath [ nix skopeo jq ]} \
+      --set HOME /homeless-shelter
+  '';
+
+  preferLocalBuild = true;
+
+  meta = with lib; {
+    description = "Script used to obtain source hashes for dockerTools.pullImage";
+    maintainers = with maintainers; [ offline ];
+    platforms = platforms.unix;
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/docker/stream_layered_image.py b/nixpkgs/pkgs/build-support/docker/stream_layered_image.py
new file mode 100644
index 000000000000..d7c63eb43a78
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/docker/stream_layered_image.py
@@ -0,0 +1,391 @@
+"""
+This script generates a Docker image from a set of store paths. Uses
+Docker Image Specification v1.2 as reference [1].
+
+It expects a JSON file with the following properties and writes the
+image as an uncompressed tarball to stdout:
+
+* "architecture", "config", "os", "created", "repo_tag" correspond to
+  the fields with the same name on the image spec [2].
+* "created" can be "now".
+* "created" is also used as mtime for files added to the image.
+* "store_layers" is a list of layers in ascending order, where each
+  layer is the list of store paths to include in that layer.
+
+The main challenge for this script to create the final image in a
+streaming fashion, without dumping any intermediate data to disk
+for performance.
+
+A docker image has each layer contents archived as separate tarballs,
+and they later all get enveloped into a single big tarball in a
+content addressed fashion. However, because how "tar" format works,
+we have to know about the name (which includes the checksum in our
+case) and the size of the tarball before we can start adding it to the
+outer tarball.  We achieve that by creating the layer tarballs twice;
+on the first iteration we calculate the file size and the checksum,
+and on the second one we actually stream the contents. 'add_layer_dir'
+function does all this.
+
+[1]: https://github.com/moby/moby/blob/master/image/spec/v1.2.md
+[2]: https://github.com/moby/moby/blob/4fb59c20a4fb54f944fe170d0ff1d00eb4a24d6f/image/spec/v1.2.md#image-json-field-descriptions
+"""  # noqa: E501
+
+
+import io
+import os
+import re
+import sys
+import json
+import hashlib
+import pathlib
+import tarfile
+import itertools
+import threading
+from datetime import datetime, timezone
+from collections import namedtuple
+
+
+def archive_paths_to(obj, paths, mtime):
+    """
+    Writes the given store paths as a tar file to the given stream.
+
+    obj: Stream to write to. Should have a 'write' method.
+    paths: List of store paths.
+    """
+
+    # gettarinfo makes the paths relative, this makes them
+    # absolute again
+    def append_root(ti):
+        ti.name = "/" + ti.name
+        return ti
+
+    def apply_filters(ti):
+        ti.mtime = mtime
+        ti.uid = 0
+        ti.gid = 0
+        ti.uname = "root"
+        ti.gname = "root"
+        return ti
+
+    def nix_root(ti):
+        ti.mode = 0o0555  # r-xr-xr-x
+        return ti
+
+    def dir(path):
+        ti = tarfile.TarInfo(path)
+        ti.type = tarfile.DIRTYPE
+        return ti
+
+    with tarfile.open(fileobj=obj, mode="w|") as tar:
+        # To be consistent with the docker utilities, we need to have
+        # these directories first when building layer tarballs.
+        tar.addfile(apply_filters(nix_root(dir("/nix"))))
+        tar.addfile(apply_filters(nix_root(dir("/nix/store"))))
+
+        for path in paths:
+            path = pathlib.Path(path)
+            if path.is_symlink():
+                files = [path]
+            else:
+                files = itertools.chain([path], path.rglob("*"))
+
+            for filename in sorted(files):
+                ti = append_root(tar.gettarinfo(filename))
+
+                # copy hardlinks as regular files
+                if ti.islnk():
+                    ti.type = tarfile.REGTYPE
+                    ti.linkname = ""
+                    ti.size = filename.stat().st_size
+
+                ti = apply_filters(ti)
+                if ti.isfile():
+                    with open(filename, "rb") as f:
+                        tar.addfile(ti, f)
+                else:
+                    tar.addfile(ti)
+
+
+class ExtractChecksum:
+    """
+    A writable stream which only calculates the final file size and
+    sha256sum, while discarding the actual contents.
+    """
+
+    def __init__(self):
+        self._digest = hashlib.sha256()
+        self._size = 0
+
+    def write(self, data):
+        self._digest.update(data)
+        self._size += len(data)
+
+    def extract(self):
+        """
+        Returns: Hex-encoded sha256sum and size as a tuple.
+        """
+        return (self._digest.hexdigest(), self._size)
+
+
+FromImage = namedtuple("FromImage", ["tar", "manifest_json", "image_json"])
+# Some metadata for a layer
+LayerInfo = namedtuple("LayerInfo", ["size", "checksum", "path", "paths"])
+
+
+def load_from_image(from_image_str):
+    """
+    Loads the given base image, if any.
+
+    from_image_str: Path to the base image archive.
+
+    Returns: A 'FromImage' object with references to the loaded base image,
+             or 'None' if no base image was provided.
+    """
+    if from_image_str is None:
+        return None
+
+    base_tar = tarfile.open(from_image_str)
+
+    manifest_json_tarinfo = base_tar.getmember("manifest.json")
+    with base_tar.extractfile(manifest_json_tarinfo) as f:
+        manifest_json = json.load(f)
+
+    image_json_tarinfo = base_tar.getmember(manifest_json[0]["Config"])
+    with base_tar.extractfile(image_json_tarinfo) as f:
+        image_json = json.load(f)
+
+    return FromImage(base_tar, manifest_json, image_json)
+
+
+def add_base_layers(tar, from_image):
+    """
+    Adds the layers from the given base image to the final image.
+
+    tar: 'tarfile.TarFile' object for new layers to be added to.
+    from_image: 'FromImage' object with references to the loaded base image.
+    """
+    if from_image is None:
+        print("No 'fromImage' provided", file=sys.stderr)
+        return []
+
+    layers = from_image.manifest_json[0]["Layers"]
+    checksums = from_image.image_json["rootfs"]["diff_ids"]
+    layers_checksums = zip(layers, checksums)
+
+    for num, (layer, checksum) in enumerate(layers_checksums, start=1):
+        layer_tarinfo = from_image.tar.getmember(layer)
+        checksum = re.sub(r"^sha256:", "", checksum)
+
+        tar.addfile(layer_tarinfo, from_image.tar.extractfile(layer_tarinfo))
+        path = layer_tarinfo.path
+        size = layer_tarinfo.size
+
+        print("Adding base layer", num, "from", path, file=sys.stderr)
+        yield LayerInfo(size=size, checksum=checksum, path=path, paths=[path])
+
+    from_image.tar.close()
+
+
+def overlay_base_config(from_image, final_config):
+    """
+    Overlays the final image 'config' JSON on top of selected defaults from the
+    base image 'config' JSON.
+
+    from_image: 'FromImage' object with references to the loaded base image.
+    final_config: 'dict' object of the final image 'config' JSON.
+    """
+    if from_image is None:
+        return final_config
+
+    base_config = from_image.image_json["config"]
+
+    # Preserve environment from base image
+    final_env = base_config.get("Env", []) + final_config.get("Env", [])
+    if final_env:
+        # Resolve duplicates (last one wins) and format back as list
+        resolved_env = {entry.split("=", 1)[0]: entry for entry in final_env}
+        final_config["Env"] = list(resolved_env.values())
+    return final_config
+
+
+def add_layer_dir(tar, paths, store_dir, mtime):
+    """
+    Appends given store paths to a TarFile object as a new layer.
+
+    tar: 'tarfile.TarFile' object for the new layer to be added to.
+    paths: List of store paths.
+    store_dir: the root directory of the nix store
+    mtime: 'mtime' of the added files and the layer tarball.
+           Should be an integer representing a POSIX time.
+
+    Returns: A 'LayerInfo' object containing some metadata of
+             the layer added.
+    """
+
+    invalid_paths = [i for i in paths if not i.startswith(store_dir)]
+    assert len(invalid_paths) == 0, \
+        f"Expecting absolute paths from {store_dir}, but got: {invalid_paths}"
+
+    # First, calculate the tarball checksum and the size.
+    extract_checksum = ExtractChecksum()
+    archive_paths_to(
+        extract_checksum,
+        paths,
+        mtime=mtime,
+    )
+    (checksum, size) = extract_checksum.extract()
+
+    path = f"{checksum}/layer.tar"
+    layer_tarinfo = tarfile.TarInfo(path)
+    layer_tarinfo.size = size
+    layer_tarinfo.mtime = mtime
+
+    # Then actually stream the contents to the outer tarball.
+    read_fd, write_fd = os.pipe()
+    with open(read_fd, "rb") as read, open(write_fd, "wb") as write:
+        def producer():
+            archive_paths_to(
+                write,
+                paths,
+                mtime=mtime,
+            )
+            write.close()
+
+        # Closing the write end of the fifo also closes the read end,
+        # so we don't need to wait until this thread is finished.
+        #
+        # Any exception from the thread will get printed by the default
+        # exception handler, and the 'addfile' call will fail since it
+        # won't be able to read required amount of bytes.
+        threading.Thread(target=producer).start()
+        tar.addfile(layer_tarinfo, read)
+
+    return LayerInfo(size=size, checksum=checksum, path=path, paths=paths)
+
+
+def add_customisation_layer(target_tar, customisation_layer, mtime):
+    """
+    Adds the customisation layer as a new layer. This is layer is structured
+    differently; given store path has the 'layer.tar' and corresponding
+    sha256sum ready.
+
+    tar: 'tarfile.TarFile' object for the new layer to be added to.
+    customisation_layer: Path containing the layer archive.
+    mtime: 'mtime' of the added layer tarball.
+    """
+
+    checksum_path = os.path.join(customisation_layer, "checksum")
+    with open(checksum_path) as f:
+        checksum = f.read().strip()
+    assert len(checksum) == 64, f"Invalid sha256 at ${checksum_path}."
+
+    layer_path = os.path.join(customisation_layer, "layer.tar")
+
+    path = f"{checksum}/layer.tar"
+    tarinfo = target_tar.gettarinfo(layer_path)
+    tarinfo.name = path
+    tarinfo.mtime = mtime
+
+    with open(layer_path, "rb") as f:
+        target_tar.addfile(tarinfo, f)
+
+    return LayerInfo(
+      size=None,
+      checksum=checksum,
+      path=path,
+      paths=[customisation_layer]
+    )
+
+
+def add_bytes(tar, path, content, mtime):
+    """
+    Adds a file to the tarball with given path and contents.
+
+    tar: 'tarfile.TarFile' object.
+    path: Path of the file as a string.
+    content: Contents of the file.
+    mtime: 'mtime' of the file. Should be an integer representing a POSIX time.
+    """
+    assert type(content) is bytes
+
+    ti = tarfile.TarInfo(path)
+    ti.size = len(content)
+    ti.mtime = mtime
+    tar.addfile(ti, io.BytesIO(content))
+
+
+def main():
+    with open(sys.argv[1], "r") as f:
+        conf = json.load(f)
+
+    created = (
+      datetime.now(tz=timezone.utc)
+      if conf["created"] == "now"
+      else datetime.fromisoformat(conf["created"])
+    )
+    mtime = int(created.timestamp())
+    store_dir = conf["store_dir"]
+
+    from_image = load_from_image(conf["from_image"])
+
+    with tarfile.open(mode="w|", fileobj=sys.stdout.buffer) as tar:
+        layers = []
+        layers.extend(add_base_layers(tar, from_image))
+
+        start = len(layers) + 1
+        for num, store_layer in enumerate(conf["store_layers"], start=start):
+            print("Creating layer", num, "from paths:", store_layer,
+                  file=sys.stderr)
+            info = add_layer_dir(tar, store_layer, store_dir, mtime=mtime)
+            layers.append(info)
+
+        print("Creating layer", len(layers) + 1, "with customisation...",
+              file=sys.stderr)
+        layers.append(
+          add_customisation_layer(
+            tar,
+            conf["customisation_layer"],
+            mtime=mtime
+          )
+        )
+
+        print("Adding manifests...", file=sys.stderr)
+
+        image_json = {
+            "created": datetime.isoformat(created),
+            "architecture": conf["architecture"],
+            "os": "linux",
+            "config": overlay_base_config(from_image, conf["config"]),
+            "rootfs": {
+                "diff_ids": [f"sha256:{layer.checksum}" for layer in layers],
+                "type": "layers",
+            },
+            "history": [
+                {
+                  "created": datetime.isoformat(created),
+                  "comment": f"store paths: {layer.paths}"
+                }
+                for layer in layers
+            ],
+        }
+
+        image_json = json.dumps(image_json, indent=4).encode("utf-8")
+        image_json_checksum = hashlib.sha256(image_json).hexdigest()
+        image_json_path = f"{image_json_checksum}.json"
+        add_bytes(tar, image_json_path, image_json, mtime=mtime)
+
+        manifest_json = [
+            {
+                "Config": image_json_path,
+                "RepoTags": [conf["repo_tag"]],
+                "Layers": [layer.path for layer in layers],
+            }
+        ]
+        manifest_json = json.dumps(manifest_json, indent=4).encode("utf-8")
+        add_bytes(tar, "manifest.json", manifest_json, mtime=mtime)
+
+        print("Done.", file=sys.stderr)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/nixpkgs/pkgs/build-support/docker/tarsum.go b/nixpkgs/pkgs/build-support/docker/tarsum.go
new file mode 100644
index 000000000000..f91a90bdbdab
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/docker/tarsum.go
@@ -0,0 +1,24 @@
+package main
+
+import (
+	"fmt"
+	"io"
+	"io/ioutil"
+	"os"
+	"github.com/docker/docker/pkg/tarsum"
+)
+
+func main() {
+	ts, err := tarsum.NewTarSum(os.Stdin, true, tarsum.Version1)
+	if err != nil {
+		fmt.Println(err)
+		os.Exit(1)
+	}
+
+	if _, err = io.Copy(ioutil.Discard, ts); err != nil {
+		fmt.Println(err)
+		os.Exit(1)
+	}
+
+	fmt.Println(ts.Sum(nil))
+}
diff --git a/nixpkgs/pkgs/build-support/dotnetbuildhelpers/create-pkg-config-for-dll.sh b/nixpkgs/pkgs/build-support/dotnetbuildhelpers/create-pkg-config-for-dll.sh
new file mode 100644
index 000000000000..379141704523
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/dotnetbuildhelpers/create-pkg-config-for-dll.sh
@@ -0,0 +1,23 @@
+#!/usr/bin/env bash
+
+targetDir="$1"
+dllFullPath="$2"
+
+dllVersion="$(monodis --assembly "$dllFullPath" | grep ^Version: | cut -f 2 -d : | xargs)"
+[ -z "$dllVersion" ] && echo "Defaulting dllVersion to 0.0.0" && dllVersion="0.0.0"
+dllFileName="$(basename $dllFullPath)"
+dllRootName="$(basename -s .dll $dllFileName)"
+targetPcFile="$targetDir"/"$dllRootName".pc
+
+mkdir -p "$targetDir"
+
+cat > $targetPcFile << EOF
+Libraries=$dllFullPath
+
+Name: $dllRootName
+Description: $dllRootName
+Version: $dllVersion
+Libs: -r:$dllFileName
+EOF
+
+echo "Created $targetPcFile"
diff --git a/nixpkgs/pkgs/build-support/dotnetbuildhelpers/default.nix b/nixpkgs/pkgs/build-support/dotnetbuildhelpers/default.nix
new file mode 100644
index 000000000000..4348832ac04c
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/dotnetbuildhelpers/default.nix
@@ -0,0 +1,18 @@
+{ runCommand, mono, pkg-config }:
+  runCommand
+    "dotnetbuildhelpers"
+    { preferLocalBuild = true; }
+    ''
+      target="$out/bin"
+      mkdir -p "$target"
+
+      for script in ${./create-pkg-config-for-dll.sh} ${./patch-fsharp-targets.sh} ${./remove-duplicated-dlls.sh} ${./placate-nuget.sh} ${./placate-paket.sh}
+      do
+        scriptName="$(basename "$script" | cut -f 2- -d -)"
+        cp -v "$script" "$target"/"$scriptName"
+        chmod 755 "$target"/"$scriptName"
+        patchShebangs "$target"/"$scriptName"
+        substituteInPlace "$target"/"$scriptName" --replace pkg-config ${pkg-config}/bin/${pkg-config.targetPrefix}pkg-config
+        substituteInPlace "$target"/"$scriptName" --replace monodis ${mono}/bin/monodis
+      done
+    ''
diff --git a/nixpkgs/pkgs/build-support/dotnetbuildhelpers/patch-fsharp-targets.sh b/nixpkgs/pkgs/build-support/dotnetbuildhelpers/patch-fsharp-targets.sh
new file mode 100755
index 000000000000..3f81cc73e801
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/dotnetbuildhelpers/patch-fsharp-targets.sh
@@ -0,0 +1,20 @@
+#!/bin/bash
+
+# Some project files look for F# targets in $(FSharpTargetsPath)
+# so it's a good idea to add something like this to your ~/.bash_profile:
+
+# export FSharpTargetsPath=$(dirname $(which fsharpc))/../lib/mono/4.0/Microsoft.FSharp.Targets
+
+# In build scripts, you would add somehting like this:
+
+# export FSharpTargetsPath="${fsharp}/lib/mono/4.0/Microsoft.FSharp.Targets"
+
+# However, some project files look for F# targets in the main Mono directory. When that happens
+# patch the project files using this script so they will look in $(FSharpTargetsPath) instead.
+
+echo "Patching F# targets in fsproj files..."
+
+find -iname \*.fsproj -print -exec \
+  sed --in-place=.bak \
+    -e 's,<FSharpTargetsPath>\([^<]*\)</FSharpTargetsPath>,<FSharpTargetsPath Condition="Exists('\'\\1\'')">\1</FSharpTargetsPath>,'g \
+    {} \;
diff --git a/nixpkgs/pkgs/build-support/dotnetbuildhelpers/placate-nuget.sh b/nixpkgs/pkgs/build-support/dotnetbuildhelpers/placate-nuget.sh
new file mode 100644
index 000000000000..8a7f36522a3d
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/dotnetbuildhelpers/placate-nuget.sh
@@ -0,0 +1,7 @@
+#!/usr/bin/env bash
+
+echo Placating Nuget in nuget.targets
+find -iname nuget.targets -print -exec sed --in-place=bak -e 's,mono --runtime[^<]*,true NUGET PLACATED BY buildDotnetPackage,g' {} \;
+
+echo Just to be sure, replacing Nuget executables by empty files.
+find . -iname nuget.exe \! -size 0 -exec mv -v {} {}.bak \; -exec touch {} \;
diff --git a/nixpkgs/pkgs/build-support/dotnetbuildhelpers/placate-paket.sh b/nixpkgs/pkgs/build-support/dotnetbuildhelpers/placate-paket.sh
new file mode 100644
index 000000000000..0dbf1eecbad8
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/dotnetbuildhelpers/placate-paket.sh
@@ -0,0 +1,7 @@
+#!/usr/bin/env bash
+
+echo Placating Paket in paket.targets
+find -iname paket.targets -print -exec sed --in-place=bak -e 's,mono --runtime[^<]*,true PAKET PLACATED BY buildDotnetPackage,g' {} \;
+
+echo Just to be sure, replacing Paket executables by empty files.
+find . -iname paket\*.exe \! -size 0 -exec mv -v {} {}.bak \; -exec touch {} \;
diff --git a/nixpkgs/pkgs/build-support/dotnetbuildhelpers/remove-duplicated-dlls.sh b/nixpkgs/pkgs/build-support/dotnetbuildhelpers/remove-duplicated-dlls.sh
new file mode 100644
index 000000000000..d8d29912c8fa
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/dotnetbuildhelpers/remove-duplicated-dlls.sh
@@ -0,0 +1,22 @@
+#!/usr/bin/env bash
+
+IFS="
+"
+
+for dll in $(find -iname \*.dll)
+do
+    baseName="$(basename "$dll" | sed "s/.dll$//i")"
+    if pkg-config "$baseName"
+    then
+        candidateDll="$(pkg-config "$baseName" --variable=Libraries)"
+
+        if diff "$dll" "$candidateDll" >/dev/null
+        then
+            echo "$dll is identical to $candidateDll. Substituting..."
+            rm -vf "$dll"
+            ln -sv "$candidateDll" "$dll"
+        else
+            echo "$dll and $candidateDll share the same name but have different contents, leaving alone."
+        fi
+    fi
+done
diff --git a/nixpkgs/pkgs/build-support/dotnetenv/Wrapper/Wrapper.sln b/nixpkgs/pkgs/build-support/dotnetenv/Wrapper/Wrapper.sln
new file mode 100644
index 000000000000..f2e7d4cf8b2c
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/dotnetenv/Wrapper/Wrapper.sln
@@ -0,0 +1,20 @@
+

+Microsoft Visual Studio Solution File, Format Version 11.00

+# Visual Studio 2010

+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Wrapper", "Wrapper\Wrapper.csproj", "{D01B3597-E85E-42F4-940A-EF5AE712942F}"

+EndProject

+Global

+	GlobalSection(SolutionConfigurationPlatforms) = preSolution

+		Debug|x86 = Debug|x86

+		Release|x86 = Release|x86

+	EndGlobalSection

+	GlobalSection(ProjectConfigurationPlatforms) = postSolution

+		{D01B3597-E85E-42F4-940A-EF5AE712942F}.Debug|x86.ActiveCfg = Debug|x86

+		{D01B3597-E85E-42F4-940A-EF5AE712942F}.Debug|x86.Build.0 = Debug|x86

+		{D01B3597-E85E-42F4-940A-EF5AE712942F}.Release|x86.ActiveCfg = Release|x86

+		{D01B3597-E85E-42F4-940A-EF5AE712942F}.Release|x86.Build.0 = Release|x86

+	EndGlobalSection

+	GlobalSection(SolutionProperties) = preSolution

+		HideSolutionNode = FALSE

+	EndGlobalSection

+EndGlobal

diff --git a/nixpkgs/pkgs/build-support/dotnetenv/Wrapper/Wrapper/Properties/AssemblyInfo.cs b/nixpkgs/pkgs/build-support/dotnetenv/Wrapper/Wrapper/Properties/AssemblyInfo.cs
new file mode 100644
index 000000000000..633d23c05ff2
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/dotnetenv/Wrapper/Wrapper/Properties/AssemblyInfo.cs
@@ -0,0 +1,36 @@
+using System.Reflection;

+using System.Runtime.CompilerServices;

+using System.Runtime.InteropServices;

+

+// General Information about an assembly is controlled through the following 

+// set of attributes. Change these attribute values to modify the information

+// associated with an assembly.

+[assembly: AssemblyTitle("Wrapper")]

+[assembly: AssemblyDescription("")]

+[assembly: AssemblyConfiguration("")]

+[assembly: AssemblyCompany("Philips Healthcare")]

+[assembly: AssemblyProduct("Wrapper")]

+[assembly: AssemblyCopyright("Copyright © Philips Healthcare 2011")]

+[assembly: AssemblyTrademark("")]

+[assembly: AssemblyCulture("")]

+

+// Setting ComVisible to false makes the types in this assembly not visible 

+// to COM components.  If you need to access a type in this assembly from 

+// COM, set the ComVisible attribute to true on that type.

+[assembly: ComVisible(false)]

+

+// The following GUID is for the ID of the typelib if this project is exposed to COM

+[assembly: Guid("2045ce22-78c7-4cd6-ad0a-9367f8a49738")]

+

+// Version information for an assembly consists of the following four values:

+//

+//      Major Version

+//      Minor Version 

+//      Build Number

+//      Revision

+//

+// You can specify all the values or you can default the Build and Revision Numbers 

+// by using the '*' as shown below:

+// [assembly: AssemblyVersion("1.0.*")]

+[assembly: AssemblyVersion("1.0.0.0")]

+[assembly: AssemblyFileVersion("1.0.0.0")]

diff --git a/nixpkgs/pkgs/build-support/dotnetenv/Wrapper/Wrapper/Wrapper.cs.in b/nixpkgs/pkgs/build-support/dotnetenv/Wrapper/Wrapper/Wrapper.cs.in
new file mode 100644
index 000000000000..abad090ebcbf
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/dotnetenv/Wrapper/Wrapper/Wrapper.cs.in
@@ -0,0 +1,66 @@
+using System;

+using System.Reflection;

+using System.IO;

+

+namespace @NAMESPACE@Wrapper

+{

+    class @MAINCLASSNAME@Wrapper

+    {

+        private String[] AssemblySearchPaths = { @ASSEMBLYSEARCHPATH@ };

+

+        private String ExePath = @"@EXEPATH@";

+

+        private String MainClassName = "@NAMESPACE@.@MAINCLASSNAME@";

+

+        private Assembly exeAssembly;

+

+        public @MAINCLASSNAME@Wrapper(string[] args)

+        {

+            // Attach the resolve event handler to the AppDomain so that missing library assemblies will be searched

+            AppDomain currentDomain = AppDomain.CurrentDomain;

+            currentDomain.AssemblyResolve += new ResolveEventHandler(MyResolveEventHandler);

+

+            // Dynamically load the executable assembly

+            exeAssembly = Assembly.LoadFrom(ExePath);

+

+            // Lookup the main class

+            Type mainClass = exeAssembly.GetType(MainClassName);

+

+            // Lookup the main method

+            MethodInfo mainMethod = mainClass.GetMethod("Main");

+

+            // Invoke the main method

+            mainMethod.Invoke(this, new Object[] {args});

+        }

+

+        static void Main(string[] args)

+        {

+            new @MAINCLASSNAME@Wrapper(args);

+        }

+

+        private Assembly MyResolveEventHandler(object sender, ResolveEventArgs args)

+        {

+            // This handler is called only when the common language runtime tries to bind to the assembly and fails.

+

+            Assembly MyAssembly;

+            String assemblyPath = "";

+            String requestedAssemblyName = args.Name.Substring(0, args.Name.IndexOf(","));

+

+            // Search for the right path of the library assembly

+            foreach (String currentAssemblyPath in AssemblySearchPaths)

+            {

+                assemblyPath = currentAssemblyPath + "/" + requestedAssemblyName + ".dll";

+

+                if (File.Exists(assemblyPath))

+                    break;

+            }

+

+            // Load the assembly from the specified path. 					

+            MyAssembly = Assembly.LoadFrom(assemblyPath);

+

+            // Return the loaded assembly.

+            return MyAssembly;

+        }

+

+    }

+}

diff --git a/nixpkgs/pkgs/build-support/dotnetenv/Wrapper/Wrapper/Wrapper.csproj.in b/nixpkgs/pkgs/build-support/dotnetenv/Wrapper/Wrapper/Wrapper.csproj.in
new file mode 100644
index 000000000000..a991bcb6933a
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/dotnetenv/Wrapper/Wrapper/Wrapper.csproj.in
@@ -0,0 +1,57 @@
+<?xml version="1.0" encoding="utf-8"?>

+<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">

+  <PropertyGroup>

+    <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>

+    <Platform Condition=" '$(Platform)' == '' ">x86</Platform>

+    <ProductVersion>8.0.30703</ProductVersion>

+    <SchemaVersion>2.0</SchemaVersion>

+    <ProjectGuid>{D01B3597-E85E-42F4-940A-EF5AE712942F}</ProjectGuid>

+    <OutputType>Exe</OutputType>

+    <AppDesignerFolder>Properties</AppDesignerFolder>

+    <RootNamespace>@ROOTNAMESPACE@</RootNamespace>

+    <AssemblyName>@ASSEMBLYNAME@</AssemblyName>

+    <TargetFrameworkVersion>v4.0</TargetFrameworkVersion>

+    <TargetFrameworkProfile>Client</TargetFrameworkProfile>

+    <FileAlignment>512</FileAlignment>

+  </PropertyGroup>

+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|x86' ">

+    <PlatformTarget>x86</PlatformTarget>

+    <DebugSymbols>true</DebugSymbols>

+    <DebugType>full</DebugType>

+    <Optimize>false</Optimize>

+    <OutputPath>bin\Debug\</OutputPath>

+    <DefineConstants>DEBUG;TRACE</DefineConstants>

+    <ErrorReport>prompt</ErrorReport>

+    <WarningLevel>4</WarningLevel>

+  </PropertyGroup>

+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|x86' ">

+    <PlatformTarget>x86</PlatformTarget>

+    <DebugType>pdbonly</DebugType>

+    <Optimize>true</Optimize>

+    <OutputPath>bin\Release\</OutputPath>

+    <DefineConstants>TRACE</DefineConstants>

+    <ErrorReport>prompt</ErrorReport>

+    <WarningLevel>4</WarningLevel>

+  </PropertyGroup>

+  <ItemGroup>

+    <Reference Include="System" />

+    <Reference Include="System.Core" />

+    <Reference Include="System.Xml.Linq" />

+    <Reference Include="System.Data.DataSetExtensions" />

+    <Reference Include="Microsoft.CSharp" />

+    <Reference Include="System.Data" />

+    <Reference Include="System.Xml" />

+  </ItemGroup>

+  <ItemGroup>

+    <Compile Include="Wrapper.cs" />

+    <Compile Include="Properties\AssemblyInfo.cs" />

+  </ItemGroup>

+  <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />

+  <!-- To modify your build process, add your task inside one of the targets below and uncomment it. 

+       Other similar extension points exist, see Microsoft.Common.targets.

+  <Target Name="BeforeBuild">

+  </Target>

+  <Target Name="AfterBuild">

+  </Target>

+  -->

+</Project>
\ No newline at end of file
diff --git a/nixpkgs/pkgs/build-support/dotnetenv/build-solution.nix b/nixpkgs/pkgs/build-support/dotnetenv/build-solution.nix
new file mode 100644
index 000000000000..b3372b942177
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/dotnetenv/build-solution.nix
@@ -0,0 +1,85 @@
+{ lib, stdenv, dotnetfx }:
+{ name
+, src
+, baseDir ? "."
+, slnFile
+, targets ? "ReBuild"
+, verbosity ? "detailed"
+, options ? "/p:Configuration=Debug;Platform=Win32"
+, assemblyInputs ? []
+, preBuild ? ""
+, modifyPublicMain ? false
+, mainClassFile ? null
+}:
+
+assert modifyPublicMain -> mainClassFile != null;
+
+stdenv.mkDerivation {
+  inherit name src;
+
+  buildInputs = [ dotnetfx ];
+
+  preConfigure = ''
+    cd ${baseDir}
+  '';
+
+  preBuild = ''
+    ${lib.optionalString modifyPublicMain ''
+      sed -i -e "s|static void Main|public static void Main|" ${mainClassFile}
+    ''}
+    ${preBuild}
+  '';
+
+  installPhase = ''
+    addDeps()
+    {
+	if [ -f $1/nix-support/dotnet-assemblies ]
+	then
+	    for i in $(cat $1/nix-support/dotnet-assemblies)
+	    do
+		windowsPath=$(cygpath --windows $i)
+		assemblySearchPaths="$assemblySearchPaths;$windowsPath"
+
+		addDeps $i
+	    done
+	fi
+    }
+
+    for i in ${toString assemblyInputs}
+    do
+	windowsPath=$(cygpath --windows $i)
+	echo "Using assembly path: $windowsPath"
+
+	if [ "$assemblySearchPaths" = "" ]
+	then
+	    assemblySearchPaths="$windowsPath"
+	else
+	    assemblySearchPaths="$assemblySearchPaths;$windowsPath"
+	fi
+
+	addDeps $i
+    done
+
+    echo "Assembly search paths are: $assemblySearchPaths"
+
+    if [ "$assemblySearchPaths" != "" ]
+    then
+	echo "Using assembly search paths args: $assemblySearchPathsArg"
+	export AssemblySearchPaths=$assemblySearchPaths
+    fi
+
+    mkdir -p $out
+    MSBuild.exe ${toString slnFile} /nologo /t:${targets} /p:IntermediateOutputPath=$(cygpath --windows $out)\\ /p:OutputPath=$(cygpath --windows $out)\\ /verbosity:${verbosity} ${options}
+
+    # Because .NET assemblies store strings as UTF-16 internally, we cannot detect
+    # hashes. Therefore a text files containing the proper paths is created
+    # We can also use this file the propagate transitive dependencies.
+
+    mkdir -p $out/nix-support
+
+    for i in ${toString assemblyInputs}
+    do
+        echo $i >> $out/nix-support/dotnet-assemblies
+    done
+  '';
+}
diff --git a/nixpkgs/pkgs/build-support/dotnetenv/default.nix b/nixpkgs/pkgs/build-support/dotnetenv/default.nix
new file mode 100644
index 000000000000..3015db42b07b
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/dotnetenv/default.nix
@@ -0,0 +1,17 @@
+{ lib, stdenv, dotnetfx }:
+
+let dotnetenv =
+{
+  buildSolution = import ./build-solution.nix {
+    inherit lib stdenv;
+    dotnetfx = dotnetfx.pkg;
+  };
+
+  buildWrapper = import ./wrapper.nix {
+    inherit dotnetenv;
+  };
+
+  inherit (dotnetfx) assembly20Path wcfPath referenceAssembly30Path referenceAssembly35Path;
+};
+in
+dotnetenv
diff --git a/nixpkgs/pkgs/build-support/dotnetenv/wrapper.nix b/nixpkgs/pkgs/build-support/dotnetenv/wrapper.nix
new file mode 100644
index 000000000000..423303c3084a
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/dotnetenv/wrapper.nix
@@ -0,0 +1,64 @@
+{dotnetenv}:
+
+{ name
+, src
+, baseDir ? "."
+, slnFile
+, targets ? "ReBuild"
+, verbosity ? "detailed"
+, options ? "/p:Configuration=Debug;Platform=Win32"
+, assemblyInputs ? []
+, preBuild ? ""
+, namespace
+, mainClassName
+, mainClassFile
+, modifyPublicMain ? true
+}:
+
+let
+  application = dotnetenv.buildSolution {
+    inherit name src baseDir slnFile targets verbosity;
+    inherit options assemblyInputs preBuild;
+    inherit modifyPublicMain mainClassFile;
+  };
+in
+dotnetenv.buildSolution {
+  name = "${name}-wrapper";
+  src = ./Wrapper;
+  slnFile = "Wrapper.sln";
+  assemblyInputs = [ application ];
+  preBuild = ''
+    addRuntimeDeps()
+    {
+	if [ -f $1/nix-support/dotnet-assemblies ]
+	then
+	    for i in $(cat $1/nix-support/dotnet-assemblies)
+	    do
+		windowsPath=$(cygpath --windows $i | sed 's|\\|\\\\|g')
+		assemblySearchArray="$assemblySearchArray @\"$windowsPath\""
+
+		addRuntimeDeps $i
+	    done
+	fi
+    }
+
+    export exePath=$(cygpath --windows $(find ${application} -name \*.exe) | sed 's|\\|\\\\|g')
+
+    # Generate assemblySearchPaths string array contents
+    for path in ${toString assemblyInputs}
+    do
+        assemblySearchArray="$assemblySearchArray @\"$(cygpath --windows $path | sed 's|\\|\\\\|g')\", "
+	addRuntimeDeps $path
+    done
+
+    sed -e "s|@ROOTNAMESPACE@|${namespace}Wrapper|" \
+        -e "s|@ASSEMBLYNAME@|${namespace}|" \
+        Wrapper/Wrapper.csproj.in > Wrapper/Wrapper.csproj
+
+    sed -e "s|@NAMESPACE@|${namespace}|g" \
+        -e "s|@MAINCLASSNAME@|${mainClassName}|g" \
+	-e "s|@EXEPATH@|$exePath|g" \
+	-e "s|@ASSEMBLYSEARCHPATH@|$assemblySearchArray|" \
+        Wrapper/Wrapper.cs.in > Wrapper/Wrapper.cs
+  '';
+}
diff --git a/nixpkgs/pkgs/build-support/emacs/buffer.nix b/nixpkgs/pkgs/build-support/emacs/buffer.nix
new file mode 100644
index 000000000000..8f824f4e1a57
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/emacs/buffer.nix
@@ -0,0 +1,79 @@
+# Functions to build elisp files to locally configure emcas buffers.
+# See https://github.com/shlevy/nix-buffer
+
+{ lib, writeText, inherit-local }:
+
+rec {
+  withPackages = pkgs': let
+      pkgs = builtins.filter (x: x != null) pkgs';
+      extras = map (x: x.emacsBufferSetup pkgs) (builtins.filter (builtins.hasAttr "emacsBufferSetup") pkgs);
+    in writeText "dir-locals.el" ''
+      (require 'inherit-local "${inherit-local}/share/emacs/site-lisp/elpa/inherit-local-${inherit-local.version}/inherit-local.elc")
+
+      ; Only set up nixpkgs buffer handling when we have some buffers active
+      (defvar nixpkgs--buffer-count 0)
+      (when (eq nixpkgs--buffer-count 0)
+        (make-variable-buffer-local 'nixpkgs--is-nixpkgs-buffer)
+        ; When generating a new temporary buffer (one whose name starts with a space), do inherit-local inheritance and make it a nixpkgs buffer
+        (defun nixpkgs--around-generate (orig name)
+          (if (and nixpkgs--is-nixpkgs-buffer (eq (aref name 0) ?\s))
+              (let ((buf (funcall orig name)))
+                (progn
+                  (inherit-local-inherit-child buf)
+                  (with-current-buffer buf
+                    (setq nixpkgs--buffer-count (1+ nixpkgs--buffer-count))
+                    (add-hook 'kill-buffer-hook 'nixpkgs--decrement-buffer-count nil t)))
+                buf)
+            (funcall orig name)))
+        (advice-add 'generate-new-buffer :around #'nixpkgs--around-generate)
+        ; When we have no more nixpkgs buffers, tear down the buffer handling
+        (defun nixpkgs--decrement-buffer-count ()
+          (setq nixpkgs--buffer-count (1- nixpkgs--buffer-count))
+          (when (eq nixpkgs--buffer-count 0)
+            (advice-remove 'generate-new-buffer #'nixpkgs--around-generate)
+            (fmakunbound 'nixpkgs--around-generate)
+            (fmakunbound 'nixpkgs--decrement-buffer-count))))
+      (setq nixpkgs--buffer-count (1+ nixpkgs--buffer-count))
+      (add-hook 'kill-buffer-hook 'nixpkgs--decrement-buffer-count nil t)
+
+      ; Add packages to PATH and exec-path
+      (make-local-variable 'process-environment)
+      (put 'process-environment 'permanent-local t)
+      (inherit-local 'process-environment)
+      ; setenv modifies in place, so copy the environment first
+      (setq process-environment (copy-tree process-environment))
+      (setenv "PATH" (concat "${lib.makeSearchPath "bin" pkgs}:" (getenv "PATH")))
+      (inherit-local-permanent exec-path (append '(${builtins.concatStringsSep " " (map (p: "\"${p}/bin\"") pkgs)}) exec-path))
+
+      (inherit-local-permanent eshell-path-env (concat "${lib.makeSearchPath "bin" pkgs}:" eshell-path-env))
+
+      (setq nixpkgs--is-nixpkgs-buffer t)
+      (inherit-local 'nixpkgs--is-nixpkgs-buffer)
+
+      ${lib.concatStringsSep "\n" extras}
+    '';
+  # nix-buffer function for a project with a bunch of haskell packages
+  # in one directory
+  haskellMonoRepo = { project-root # The monorepo root
+                    , haskellPackages # The composed haskell packages set that contains all of the packages
+                    }: { root }:
+    let # The haskell paths.
+        haskell-paths = lib.filesystem.haskellPathsInDir project-root;
+        # Find the haskell package that the 'root' is in, if any.
+        haskell-path-parent =
+          let filtered = builtins.filter (name:
+            lib.hasPrefix (toString (project-root + "/${name}")) (toString root)
+          ) (builtins.attrNames haskell-paths);
+          in
+            if filtered == [] then null else builtins.head filtered;
+        # We're in the directory of a haskell package
+        is-haskell-package = haskell-path-parent != null;
+        haskell-package = haskellPackages.${haskell-path-parent};
+        # GHC environment with all needed deps for the haskell package
+        haskell-package-env =
+          builtins.head haskell-package.env.nativeBuildInputs;
+    in
+      if is-haskell-package
+        then withPackages [ haskell-package-env ]
+        else {};
+}
diff --git a/nixpkgs/pkgs/build-support/emacs/elpa.nix b/nixpkgs/pkgs/build-support/emacs/elpa.nix
new file mode 100644
index 000000000000..41a0670d0c84
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/emacs/elpa.nix
@@ -0,0 +1,41 @@
+# builder for Emacs packages built for packages.el
+
+{ lib, stdenv, emacs, texinfo, writeText }:
+
+with lib;
+
+{ pname
+, version
+, src
+, meta ? {}
+, ...
+}@args:
+
+let
+
+  defaultMeta = {
+    homepage = args.src.meta.homepage or "https://elpa.gnu.org/packages/${pname}.html";
+  };
+
+in
+
+import ./generic.nix { inherit lib stdenv emacs texinfo writeText; } ({
+
+  phases = "installPhase fixupPhase distPhase";
+
+  installPhase = ''
+    runHook preInstall
+
+    emacs --batch -Q -l ${./elpa2nix.el} \
+        -f elpa2nix-install-package \
+        "${src}" "$out/share/emacs/site-lisp/elpa"
+
+    runHook postInstall
+  '';
+
+  meta = defaultMeta // meta;
+}
+
+// removeAttrs args [ "files" "fileSpecs"
+                      "meta"
+                    ])
diff --git a/nixpkgs/pkgs/build-support/emacs/elpa2nix.el b/nixpkgs/pkgs/build-support/emacs/elpa2nix.el
new file mode 100644
index 000000000000..64587c0fad1a
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/emacs/elpa2nix.el
@@ -0,0 +1,33 @@
+(require 'package)
+(package-initialize)
+
+(defun elpa2nix-install-package ()
+  (if (not noninteractive)
+      (error "`elpa2nix-install-package' is to be used only with -batch"))
+  (pcase command-line-args-left
+    (`(,archive ,elpa)
+     (progn (setq package-user-dir elpa)
+            (elpa2nix-install-file archive)))))
+
+(defun elpa2nix-install-from-buffer ()
+  "Install a package from the current buffer."
+  (let ((pkg-desc (if (derived-mode-p 'tar-mode)
+                      (package-tar-file-info)
+                    (package-buffer-info))))
+    ;; Install the package itself.
+    (package-unpack pkg-desc)
+    pkg-desc))
+
+(defun elpa2nix-install-file (file)
+  "Install a package from a file.
+The file can either be a tar file or an Emacs Lisp file."
+  (let ((is-tar (string-match "\\.tar\\'" file)))
+    (with-temp-buffer
+      (if is-tar
+          (insert-file-contents-literally file)
+        (insert-file-contents file))
+      (when is-tar (tar-mode))
+      (elpa2nix-install-from-buffer))))
+
+;; Allow installing package tarfiles larger than 10MB
+(setq large-file-warning-threshold nil)
diff --git a/nixpkgs/pkgs/build-support/emacs/emacs-funcs.sh b/nixpkgs/pkgs/build-support/emacs/emacs-funcs.sh
new file mode 100644
index 000000000000..e1e6a3b62208
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/emacs/emacs-funcs.sh
@@ -0,0 +1,34 @@
+addToEmacsLoadPath() {
+  local lispDir="$1"
+  if [[ -d $lispDir && ${EMACSLOADPATH-} != *"$lispDir":* ]] ; then
+    # It turns out, that the trailing : is actually required
+    # see https://www.gnu.org/software/emacs/manual/html_node/elisp/Library-Search.html
+    export EMACSLOADPATH="$lispDir:${EMACSLOADPATH-}"
+  fi
+}
+
+addToEmacsNativeLoadPath() {
+  local nativeDir="$1"
+  if [[ -d $nativeDir && ${EMACSNATIVELOADPATH-} != *"$nativeDir":* ]]; then
+    export EMACSNATIVELOADPATH="$nativeDir:${EMACSNATIVELOADPATH-}"
+  fi
+}
+
+addEmacsVars () {
+  addToEmacsLoadPath "$1/share/emacs/site-lisp"
+
+  if [ -n "${addEmacsNativeLoadPath:-}" ]; then
+    addToEmacsNativeLoadPath "$1/share/emacs/native-lisp"
+  fi
+
+  # Add sub paths to the Emacs load path if it is a directory
+  # containing .el files. This is necessary to build some packages,
+  # e.g., using trivialBuild.
+  for lispDir in \
+      "$1/share/emacs/site-lisp/"* \
+      "$1/share/emacs/site-lisp/elpa/"*; do
+    if [[ -d $lispDir && "$(echo "$lispDir"/*.el)" ]] ; then
+      addToEmacsLoadPath "$lispDir"
+    fi
+  done
+}
diff --git a/nixpkgs/pkgs/build-support/emacs/generic.nix b/nixpkgs/pkgs/build-support/emacs/generic.nix
new file mode 100644
index 000000000000..1456d9e423d8
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/emacs/generic.nix
@@ -0,0 +1,93 @@
+# generic builder for Emacs packages
+
+{ lib, stdenv, emacs, texinfo, writeText, ... }:
+
+with lib;
+
+{ pname
+, version ? null
+
+, buildInputs ? []
+, packageRequires ? []
+
+, meta ? {}
+
+, ...
+}@args:
+
+let
+
+  defaultMeta = {
+    broken = false;
+    platforms = emacs.meta.platforms;
+  } // optionalAttrs ((args.src.meta.homepage or "") != "") {
+    homepage = args.src.meta.homepage;
+  };
+
+in
+
+stdenv.mkDerivation ({
+  name = "emacs-${pname}${optionalString (version != null) "-${version}"}";
+
+  unpackCmd = ''
+    case "$curSrc" in
+      *.el)
+        # keep original source filename without the hash
+        local filename=$(basename "$curSrc")
+        filename="''${filename:33}"
+        cp $curSrc $filename
+        chmod +w $filename
+        sourceRoot="."
+        ;;
+      *)
+        _defaultUnpack "$curSrc"
+        ;;
+    esac
+  '';
+
+  buildInputs = [emacs texinfo] ++ packageRequires ++ buildInputs;
+  propagatedBuildInputs = packageRequires;
+  propagatedUserEnvPkgs = packageRequires;
+
+  setupHook = writeText "setup-hook.sh" ''
+    source ${./emacs-funcs.sh}
+
+    if [[ ! -v emacsHookDone ]]; then
+      emacsHookDone=1
+
+      # If this is for a wrapper derivation, emacs and the dependencies are all
+      # run-time dependencies. If this is for precompiling packages into bytecode,
+      # emacs is a compile-time dependency of the package.
+      addEnvHooks "$hostOffset" addEmacsVars
+      addEnvHooks "$targetOffset" addEmacsVars
+    fi
+  '';
+
+  doCheck = false;
+
+  meta = defaultMeta // meta;
+}
+
+// lib.optionalAttrs (emacs.nativeComp or false) {
+
+  LIBRARY_PATH = "${lib.getLib stdenv.cc.libc}/lib";
+
+  addEmacsNativeLoadPath = true;
+
+  postInstall = ''
+    # Besides adding the output directory to the native load path, make sure
+    # the current package's elisp files are in the load path, otherwise
+    # (require 'file-b) from file-a.el in the same package will fail.
+    mkdir -p $out/share/emacs/native-lisp
+    source ${./emacs-funcs.sh}
+    addEmacsVars "$out"
+
+    find $out/share/emacs -type f -name '*.el' -print0 \
+      | xargs -0 -n 1 -I {} -P $NIX_BUILD_CORES sh -c \
+          "emacs --batch -f batch-native-compile {} || true"
+  '';
+}
+
+// removeAttrs args [ "buildInputs" "packageRequires"
+                      "meta"
+                    ])
diff --git a/nixpkgs/pkgs/build-support/emacs/melpa.nix b/nixpkgs/pkgs/build-support/emacs/melpa.nix
new file mode 100644
index 000000000000..824611b20c8a
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/emacs/melpa.nix
@@ -0,0 +1,97 @@
+# builder for Emacs packages built for packages.el
+# using MELPA package-build.el
+
+{ lib, stdenv, fetchFromGitHub, emacs, texinfo, writeText }:
+
+with lib;
+
+{ /*
+    pname: Nix package name without special symbols and without version or
+    "emacs-" prefix.
+  */
+  pname
+  /*
+    ename: Original Emacs package name, possibly containing special symbols.
+  */
+, ename ? null
+, version
+, recipe
+, meta ? {}
+, ...
+}@args:
+
+let
+
+  defaultMeta = {
+    homepage = args.src.meta.homepage or "https://melpa.org/#/${pname}";
+  };
+
+in
+
+import ./generic.nix { inherit lib stdenv emacs texinfo writeText; } ({
+
+  ename =
+    if ename == null
+    then pname
+    else ename;
+
+  packageBuild = fetchFromGitHub {
+    owner = "melpa";
+    repo = "package-build";
+    rev = "0a22c3fbbf661822ec1791739953b937a12fa623";
+    sha256 = "0dpy5p34il600sc8ic5jdgb3glya9si3lrvhxab0swks8fdydjgs";
+  };
+
+  elpa2nix = ./elpa2nix.el;
+  melpa2nix = ./melpa2nix.el;
+
+  preUnpack = ''
+    mkdir -p "$NIX_BUILD_TOP/recipes"
+    if [ -n "$recipe" ]; then
+      cp "$recipe" "$NIX_BUILD_TOP/recipes/$ename"
+    fi
+
+    ln -s "$packageBuild" "$NIX_BUILD_TOP/package-build"
+
+    mkdir -p "$NIX_BUILD_TOP/packages"
+  '';
+
+  postUnpack = ''
+    mkdir -p "$NIX_BUILD_TOP/working"
+    ln -s "$NIX_BUILD_TOP/$sourceRoot" "$NIX_BUILD_TOP/working/$ename"
+  '';
+
+  buildPhase = ''
+    runHook preBuild
+
+    cd "$NIX_BUILD_TOP"
+
+    emacs --batch -Q \
+        -L "$NIX_BUILD_TOP/package-build" \
+        -l "$melpa2nix" \
+        -f melpa2nix-build-package \
+        $ename $version
+
+    runHook postBuild
+    '';
+
+  installPhase = ''
+    runHook preInstall
+
+    archive="$NIX_BUILD_TOP/packages/$ename-$version.el"
+    if [ ! -f "$archive" ]; then
+        archive="$NIX_BUILD_TOP/packages/$ename-$version.tar"
+    fi
+
+    emacs --batch -Q \
+        -l "$elpa2nix" \
+        -f elpa2nix-install-package \
+        "$archive" "$out/share/emacs/site-lisp/elpa"
+
+    runHook postInstall
+  '';
+
+  meta = defaultMeta // meta;
+}
+
+// removeAttrs args [ "meta" ])
diff --git a/nixpkgs/pkgs/build-support/emacs/melpa2nix.el b/nixpkgs/pkgs/build-support/emacs/melpa2nix.el
new file mode 100644
index 000000000000..99c755e2afcb
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/emacs/melpa2nix.el
@@ -0,0 +1,16 @@
+(require 'package)
+(package-initialize)
+
+(require 'package-recipe)
+(require 'package-build)
+
+(setq package-build-working-dir (expand-file-name "working/"))
+(setq package-build-archive-dir (expand-file-name "packages/"))
+(setq package-build-recipes-dir (expand-file-name "recipes/"))
+
+(defun melpa2nix-build-package ()
+  (if (not noninteractive)
+      (error "`melpa2nix-build-package' is to be used only with -batch"))
+  (pcase command-line-args-left
+    (`(,package ,version)
+     (package-build--package (package-recipe-lookup package) version))))
diff --git a/nixpkgs/pkgs/build-support/emacs/trivial.nix b/nixpkgs/pkgs/build-support/emacs/trivial.nix
new file mode 100644
index 000000000000..f1aa078df272
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/emacs/trivial.nix
@@ -0,0 +1,29 @@
+# trivial builder for Emacs packages
+
+{ callPackage, lib, ... }@envargs:
+
+with lib;
+
+args:
+
+callPackage ./generic.nix envargs ({
+  buildPhase = ''
+    runHook preBuild
+
+    emacs -L . --batch -f batch-byte-compile *.el
+
+    runHook postBuild
+  '';
+
+  installPhase = ''
+    runHook preInstall
+
+    LISPDIR=$out/share/emacs/site-lisp
+    install -d $LISPDIR
+    install *.el *.elc $LISPDIR
+
+    runHook postInstall
+  '';
+}
+
+// args)
diff --git a/nixpkgs/pkgs/build-support/emacs/wrapper.nix b/nixpkgs/pkgs/build-support/emacs/wrapper.nix
new file mode 100644
index 000000000000..571d0eb687ce
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/emacs/wrapper.nix
@@ -0,0 +1,219 @@
+/*
+
+# Usage
+
+`emacs.pkgs.withPackages` takes a single argument: a function from a package
+set to a list of packages (the packages that will be available in
+Emacs). For example,
+```
+emacs.pkgs.withPackages (epkgs: [ epkgs.evil epkgs.magit ])
+```
+All the packages in the list should come from the provided package
+set. It is possible to add any package to the list, but the provided
+set is guaranteed to have consistent dependencies and be built with
+the correct version of Emacs.
+
+# Overriding
+
+`emacs.pkgs.withPackages` inherits the package set which contains it, so the
+correct way to override the provided package set is to override the
+set which contains `emacs.pkgs.withPackages`. For example, to override
+`emacs.pkgs.emacs.pkgs.withPackages`,
+```
+let customEmacsPackages =
+      emacs.pkgs.overrideScope' (self: super: {
+        # use a custom version of emacs
+        emacs = ...;
+        # use the unstable MELPA version of magit
+        magit = self.melpaPackages.magit;
+      });
+in customEmacsPackages.emacs.pkgs.withPackages (epkgs: [ epkgs.evil epkgs.magit ])
+```
+
+*/
+
+{ lib, lndir, makeWrapper, runCommand }: self:
+
+with lib;
+
+let
+
+  inherit (self) emacs;
+
+  nativeComp = emacs.nativeComp or false;
+
+in
+
+packagesFun: # packages explicitly requested by the user
+
+let
+  explicitRequires =
+    if lib.isFunction packagesFun
+      then packagesFun self
+    else packagesFun;
+in
+
+runCommand
+  (appendToName "with-packages" emacs).name
+  {
+    nativeBuildInputs = [ emacs lndir makeWrapper ];
+    inherit emacs explicitRequires;
+
+    preferLocalBuild = true;
+    allowSubstitutes = false;
+
+    # Store all paths we want to add to emacs here, so that we only need to add
+    # one path to the load lists
+    deps = runCommand "emacs-packages-deps"
+      { inherit explicitRequires lndir emacs; }
+      ''
+        findInputsOld() {
+          local pkg="$1"; shift
+          local var="$1"; shift
+          local propagatedBuildInputsFiles=("$@")
+
+          # TODO(@Ericson2314): Restore using associative array once Darwin
+          # nix-shell doesn't use impure bash. This should replace the O(n)
+          # case with an O(1) hash map lookup, assuming bash is implemented
+          # well :D.
+          local varSlice="$var[*]"
+          # ''${..-} to hack around old bash empty array problem
+          case "''${!varSlice-}" in
+              *" $pkg "*) return 0 ;;
+          esac
+          unset -v varSlice
+
+          eval "$var"'+=("$pkg")'
+
+          if ! [ -e "$pkg" ]; then
+              echo "build input $pkg does not exist" >&2
+              exit 1
+          fi
+
+          local file
+          for file in "''${propagatedBuildInputsFiles[@]}"; do
+              file="$pkg/nix-support/$file"
+              [[ -f "$file" ]] || continue
+
+              local pkgNext
+              for pkgNext in $(< "$file"); do
+                  findInputsOld "$pkgNext" "$var" "''${propagatedBuildInputsFiles[@]}"
+              done
+          done
+        }
+        mkdir -p $out/bin
+        mkdir -p $out/share/emacs/site-lisp
+        ${optionalString nativeComp ''
+          mkdir -p $out/share/emacs/native-lisp
+        ''}
+
+        local requires
+        for pkg in $explicitRequires; do
+          findInputsOld $pkg requires propagated-user-env-packages
+        done
+        # requires now holds all requested packages and their transitive dependencies
+
+        linkPath() {
+          local pkg=$1
+          local origin_path=$2
+          local dest_path=$3
+
+          # Add the path to the search path list, but only if it exists
+          if [[ -d "$pkg/$origin_path" ]]; then
+            $lndir/bin/lndir -silent "$pkg/$origin_path" "$out/$dest_path"
+          fi
+        }
+
+        linkEmacsPackage() {
+          linkPath "$1" "bin" "bin"
+          linkPath "$1" "share/emacs/site-lisp" "share/emacs/site-lisp"
+          ${optionalString nativeComp ''
+            linkPath "$1" "share/emacs/native-lisp" "share/emacs/native-lisp"
+          ''}
+        }
+
+        # Iterate over the array of inputs (avoiding nix's own interpolation)
+        for pkg in "''${requires[@]}"; do
+          linkEmacsPackage $pkg
+        done
+
+        siteStart="$out/share/emacs/site-lisp/site-start.el"
+        siteStartByteCompiled="$siteStart"c
+        subdirs="$out/share/emacs/site-lisp/subdirs.el"
+        subdirsByteCompiled="$subdirs"c
+
+        # A dependency may have brought the original siteStart or subdirs, delete
+        # it and create our own
+        # Begin the new site-start.el by loading the original, which sets some
+        # NixOS-specific paths. Paths are searched in the reverse of the order
+        # they are specified in, so user and system profile paths are searched last.
+        #
+        # NOTE: Avoid displaying messages early at startup by binding
+        # inhibit-message to t. This would prevent the Emacs GUI from showing up
+        # prematurely. The messages would still be logged to the *Messages*
+        # buffer.
+        rm -f $siteStart $siteStartByteCompiled $subdirs $subdirsByteCompiled
+        cat >"$siteStart" <<EOF
+        (let ((inhibit-message t))
+          (load-file "$emacs/share/emacs/site-lisp/site-start.el"))
+        (add-to-list 'load-path "$out/share/emacs/site-lisp")
+        (add-to-list 'exec-path "$out/bin")
+        ${optionalString nativeComp ''
+          (add-to-list 'native-comp-eln-load-path "$out/share/emacs/native-lisp/")
+        ''}
+        EOF
+        # Link subdirs.el from the emacs distribution
+        ln -s $emacs/share/emacs/site-lisp/subdirs.el -T $subdirs
+
+        # Byte-compiling improves start-up time only slightly, but costs nothing.
+        $emacs/bin/emacs --batch -f batch-byte-compile "$siteStart" "$subdirs"
+
+        ${optionalString nativeComp ''
+          $emacs/bin/emacs --batch \
+            --eval "(add-to-list 'native-comp-eln-load-path \"$out/share/emacs/native-lisp/\")" \
+            -f batch-native-compile "$siteStart" "$subdirs"
+        ''}
+      '';
+
+    inherit (emacs) meta;
+  }
+  ''
+    mkdir -p "$out/bin"
+
+    # Wrap emacs and friends so they find our site-start.el before the original.
+    for prog in $emacs/bin/*; do # */
+      local progname=$(basename "$prog")
+      rm -f "$out/bin/$progname"
+
+      substitute ${./wrapper.sh} $out/bin/$progname \
+        --subst-var-by bash ${emacs.stdenv.shell} \
+        --subst-var-by wrapperSiteLisp "$deps/share/emacs/site-lisp" \
+        --subst-var-by wrapperSiteLispNative "$deps/share/emacs/native-lisp:" \
+        --subst-var prog
+      chmod +x $out/bin/$progname
+    done
+
+    # Wrap MacOS app
+    # this has to pick up resources and metadata
+    # to recognize it as an "app"
+    if [ -d "$emacs/Applications/Emacs.app" ]; then
+      mkdir -p $out/Applications/Emacs.app/Contents/MacOS
+      cp -r $emacs/Applications/Emacs.app/Contents/Info.plist \
+            $emacs/Applications/Emacs.app/Contents/PkgInfo \
+            $emacs/Applications/Emacs.app/Contents/Resources \
+            $out/Applications/Emacs.app/Contents
+
+
+      substitute ${./wrapper.sh} $out/Applications/Emacs.app/Contents/MacOS/Emacs \
+        --subst-var-by bash ${emacs.stdenv.shell} \
+        --subst-var-by wrapperSiteLisp "$deps/share/emacs/site-lisp" \
+        --subst-var-by prog "$emacs/Applications/Emacs.app/Contents/MacOS/Emacs"
+      chmod +x $out/Applications/Emacs.app/Contents/MacOS/Emacs
+    fi
+
+    mkdir -p $out/share
+    # Link icons and desktop files into place
+    for dir in applications icons info man emacs; do
+      ln -s $emacs/share/$dir $out/share/$dir
+    done
+  ''
diff --git a/nixpkgs/pkgs/build-support/emacs/wrapper.sh b/nixpkgs/pkgs/build-support/emacs/wrapper.sh
new file mode 100644
index 000000000000..e8eecb8c8696
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/emacs/wrapper.sh
@@ -0,0 +1,47 @@
+#!@bash@
+
+IFS=:
+
+newLoadPath=()
+newNativeLoadPath=()
+added=
+
+if [[ -n $EMACSLOADPATH ]]
+then
+    while read -rd: entry
+    do
+        if [[ -z $entry && -z $added ]]
+        then
+            newLoadPath+=(@wrapperSiteLisp@)
+            added=1
+        fi
+        newLoadPath+=("$entry")
+    done <<< "$EMACSLOADPATH:"
+else
+    newLoadPath+=(@wrapperSiteLisp@)
+    newLoadPath+=("")
+fi
+
+if [[ -n $EMACSNATIVELOADPATH ]]
+then
+    while read -rd: entry
+    do
+        if [[ -z $entry && -z $added ]]
+        then
+            newNativeLoadPath+=(@wrapperSiteLispNative@)
+            added=1
+        fi
+        newNativeLoadPath+=("$entry")
+    done <<< "$EMACSNATIVELOADPATH:"
+else
+    newNativeLoadPath+=(@wrapperSiteLispNative@)
+    newNativeLoadPath+=("")
+fi
+
+export EMACSLOADPATH="${newLoadPath[*]}"
+export emacsWithPackages_siteLisp=@wrapperSiteLisp@
+
+export EMACSNATIVELOADPATH="${newNativeLoadPath[*]}"
+export emacsWithPackages_siteLispNative=@wrapperSiteLispNative@
+
+exec @prog@ "$@"
diff --git a/nixpkgs/pkgs/build-support/expand-response-params/default.nix b/nixpkgs/pkgs/build-support/expand-response-params/default.nix
new file mode 100644
index 000000000000..402f0071a533
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/expand-response-params/default.nix
@@ -0,0 +1,19 @@
+{ stdenv }:
+
+stdenv.mkDerivation {
+  name = "expand-response-params";
+  src = ./expand-response-params.c;
+  # Work around "stdenv-darwin-boot-2 is not allowed to refer to path
+  # /nix/store/...-expand-response-params.c"
+  unpackPhase = ''
+    cp "$src" expand-response-params.c
+    src=$PWD
+  '';
+  buildPhase = ''
+    NIX_CC_USE_RESPONSE_FILE=0 "$CC" -std=c99 -O3 -o "expand-response-params" expand-response-params.c
+  '';
+  installPhase = ''
+    mkdir -p $prefix/bin
+    mv expand-response-params $prefix/bin/
+  '';
+}
diff --git a/nixpkgs/pkgs/build-support/expand-response-params/expand-response-params.c b/nixpkgs/pkgs/build-support/expand-response-params/expand-response-params.c
new file mode 100644
index 000000000000..05b9c62b1e8d
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/expand-response-params/expand-response-params.c
@@ -0,0 +1,84 @@
+#include <assert.h>
+#include <ctype.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+typedef struct { char *data; size_t len, cap; } String;
+
+void resize(String *s, size_t len) {
+    s->len = len;
+    if (s->cap < s->len) {
+        s->cap = s->len * 2;
+        s->data = (char *)realloc(s->data, s->cap);
+        assert(s->data);
+    }
+}
+
+void append(String *s, const char *data, size_t len) {
+    resize(s, s->len + len);
+    memcpy(s->data + s->len - len, data, len);
+}
+
+typedef enum { space = 0, other = 1, backslash = 2, apostrophe = 3, quotation_mark = 4 } CharClass;
+typedef enum { outside, unq, unq_esc, sq, sq_esc, dq, dq_esc } State;
+
+// current State -> CharClass -> next State
+const State transitions[][5] = {
+    [outside] = {outside, unq, unq_esc, sq,  dq},
+    [unq]     = {outside, unq, unq_esc, sq,  dq},
+    [unq_esc] = {unq,     unq, unq,     unq, unq},
+    [sq]      = {sq,      sq,  sq_esc,  unq, sq},
+    [sq_esc]  = {sq,      sq,  sq,      sq,  sq},
+    [dq]      = {dq,      dq,  dq_esc,  dq,  unq},
+    [dq_esc]  = {dq,      dq,  dq,      dq,  dq},
+};
+
+CharClass charClass(int c) {
+    return c == '\\' ? backslash : c == '\'' ? apostrophe : c == '"' ? quotation_mark :
+            isspace(c) ? space : other;
+}
+
+// expandArg writes NULL-terminated expansions of `arg', a NULL-terminated
+// string, to stdout.  If arg does not begin with `@' or does not refer to a
+// file, it is written as is.  Otherwise the contents of the file are
+// recursively expanded.  On unexpected EOF in malformed response files an
+// incomplete final argument is written, even if it is empty, to parse like GCC.
+void expandArg(String *arg) {
+    FILE *f;
+    if (arg->data[0] != '@' || !(f = fopen(&arg->data[1], "r"))) {
+        fwrite(arg->data, 1, arg->len, stdout);
+        return;
+    }
+
+    resize(arg, 0);
+    State cur = outside;
+    int c;
+    do {
+        c = fgetc(f);
+        State next = transitions[cur][charClass(c)];
+        if ((cur == unq && next == outside) || (cur != outside && c == EOF)) {
+            append(arg, "", 1);
+            expandArg(arg);
+            resize(arg, 0);
+        } else if (cur == unq_esc || cur == sq_esc || cur == dq_esc ||
+                   (cur == outside ? next == unq : cur == next)) {
+            char s = c;
+            append(arg, &s, 1);
+        }
+        cur = next;
+    } while (c != EOF);
+
+    fclose(f);
+}
+
+int main(int argc, char **argv) {
+    String arg = { 0 };
+    while (*++argv) {
+        resize(&arg, 0);
+        append(&arg, *argv, strlen(*argv) + 1);
+        expandArg(&arg);
+    }
+    free(arg.data);
+    return EXIT_SUCCESS;
+}
diff --git a/nixpkgs/pkgs/build-support/fetchbitbucket/default.nix b/nixpkgs/pkgs/build-support/fetchbitbucket/default.nix
new file mode 100644
index 000000000000..e6e40c4379bb
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchbitbucket/default.nix
@@ -0,0 +1,9 @@
+{ fetchzip }:
+
+{ owner, repo, rev, name ? "source"
+, ... # For hash agility
+}@args: fetchzip ({
+  inherit name;
+  url = "https://bitbucket.org/${owner}/${repo}/get/${rev}.tar.gz";
+  meta.homepage = "https://bitbucket.org/${owner}/${repo}/";
+} // removeAttrs args [ "owner" "repo" "rev" ]) // { inherit rev; }
diff --git a/nixpkgs/pkgs/build-support/fetchbower/default.nix b/nixpkgs/pkgs/build-support/fetchbower/default.nix
new file mode 100644
index 000000000000..fd971d431df8
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchbower/default.nix
@@ -0,0 +1,28 @@
+{ stdenvNoCC, lib, bower2nix, cacert }:
+let
+  bowerVersion = version:
+    let
+      components = lib.splitString "#" version;
+      hash = lib.last components;
+      ver = if builtins.length components == 1 then (cleanName version) else hash;
+    in ver;
+
+  cleanName = name: lib.replaceStrings ["/" ":"] ["-" "-"] name;
+
+  fetchbower = name: version: target: outputHash: stdenvNoCC.mkDerivation {
+    name = "${cleanName name}-${bowerVersion version}";
+    buildCommand = ''
+      fetch-bower --quiet --out=$PWD/out "${name}" "${target}" "${version}"
+      # In some cases, the result of fetchBower is different depending
+      # on the output directory (e.g. if the bower package contains
+      # symlinks). So use a local output directory before copying to
+      # $out.
+      cp -R out $out
+    '';
+    outputHashMode = "recursive";
+    outputHashAlgo = "sha256";
+    inherit outputHash;
+    nativeBuildInputs = [ bower2nix cacert ];
+  };
+
+in fetchbower
diff --git a/nixpkgs/pkgs/build-support/fetchbzr/builder.sh b/nixpkgs/pkgs/build-support/fetchbzr/builder.sh
new file mode 100644
index 000000000000..e424fd92d51e
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchbzr/builder.sh
@@ -0,0 +1,9 @@
+source "$stdenv/setup"
+
+header "exporting \`$url' (revision $rev) into \`$out'"
+
+# Perform a lightweight checkout so that we don't end up importing
+# all the repository's history.
+BZR_LOG=/dev/null bzr -Ossl.cert_reqs=none export -r "$rev" --format=dir "$out" "$url"
+
+stopNest
diff --git a/nixpkgs/pkgs/build-support/fetchbzr/default.nix b/nixpkgs/pkgs/build-support/fetchbzr/default.nix
new file mode 100644
index 000000000000..b7db9e9274da
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchbzr/default.nix
@@ -0,0 +1,15 @@
+{ stdenvNoCC, breezy }:
+{ url, rev, sha256 }:
+
+stdenvNoCC.mkDerivation {
+  name = "bzr-export";
+
+  builder = ./builder.sh;
+  nativeBuildInputs = [ breezy ];
+
+  outputHashAlgo = "sha256";
+  outputHashMode = "recursive";
+  outputHash = sha256;
+
+  inherit url rev;
+}
diff --git a/nixpkgs/pkgs/build-support/fetchbzr/nix-prefetch-bzr b/nixpkgs/pkgs/build-support/fetchbzr/nix-prefetch-bzr
new file mode 100755
index 000000000000..dbe8a7ef8013
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchbzr/nix-prefetch-bzr
@@ -0,0 +1,74 @@
+#! /bin/sh -e
+
+url=$1
+rev=$2
+expHash=$3
+
+hashType=$NIX_HASH_ALGO
+if test -z "$hashType"; then
+    hashType=sha256
+fi
+if test -z "$hashFormat"; then
+    hashFormat=--base32
+fi
+
+if test -z "$url"; then
+    echo "syntax: nix-prefetch-bzr URL [REVISION [EXPECTED-HASH]]" >&2
+    exit 1
+fi
+
+revarg="-r $rev"
+test -n "$rev" || revarg=""
+
+repoName=$(echo $url | sed '
+  s,.*/\([^/]\+\)/trunk/*$,\1,;t
+  s,.*/\([^/]\+\)/branches/\([^/]\+\)/*$,\1-\2,;t
+  s,.*/\([^/]\+\)/tags/\([^/]\+\)/*$,\1-\2,;t
+  s,.*/\([^/]\+\)/*$,\1,;t
+')
+dstFile="bzr-export"
+
+# If the hash was given, a file with that hash may already be in the
+# store.
+if test -n "$expHash"; then
+    finalPath=$(nix-store --print-fixed-path --recursive "$hashType" "$expHash" $dstFile)
+    if ! nix-store --check-validity "$finalPath" 2> /dev/null; then
+        finalPath=
+    fi
+    hash=$expHash
+fi
+
+
+# If we don't know the hash or a path with that hash doesn't exist,
+# download the file and add it to the store.
+if test -z "$finalPath"; then
+    tmpPath="$(mktemp -d "${TMPDIR:-/tmp}/bzr-checkout-tmp-XXXXXXXX")"
+    trap "rm -rf \"$tmpPath\"" EXIT
+
+    tmpFile="$tmpPath/$dstFile"
+
+    # Perform the checkout.
+    bzr -Ossl.cert_reqs=none export $revarg --format=dir "$tmpFile" "$url"
+
+    echo "bzr revision is $(bzr revno $revarg "$url")"
+
+    # Compute the hash.
+    hash=$(nix-hash --type $hashType $hashFormat $tmpFile)
+    if ! test -n "$QUIET"; then echo "hash is $hash" >&2; fi
+
+    # Add the downloaded file to the Nix store.
+    finalPath=$(nix-store --add-fixed --recursive "$hashType" $tmpFile)
+
+    if test -n "$expHash" -a "$expHash" != "$hash"; then
+        echo "hash mismatch for URL \`$url'"
+        exit 1
+    fi
+fi
+
+if ! test -n "$QUIET"; then echo "path is $finalPath" >&2; fi
+
+echo $hash
+
+if test -n "$PRINT_PATH"; then
+    echo $finalPath
+fi
diff --git a/nixpkgs/pkgs/build-support/fetchcvs/builder.sh b/nixpkgs/pkgs/build-support/fetchcvs/builder.sh
new file mode 100644
index 000000000000..fe1019aafc2f
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchcvs/builder.sh
@@ -0,0 +1,27 @@
+source $stdenv/setup
+
+(echo "#!$SHELL"; \
+ echo 'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no "$@"') > ssh
+chmod +x ssh
+export CVS_RSH=$PWD/ssh
+
+# creating the export drictory and checking out there only to be able to
+# move the content without the root directory into $out ...
+# cvs -f -d "$url" export $tag -d "$out" "$module"
+# should work (but didn't - got no response on #cvs)
+# See als man Page for those options
+
+mkdir -p export
+if [ -n "$tag" ]; then
+    tag="-r $tag"
+else
+    if [ -n "$date" ]; then
+        tag="-D $date"
+    else
+        tag="-D NOW"
+    fi
+fi
+(cd export && cvs -f -z0 -d "$cvsRoot" export $tag "$module")
+mv export/* $out
+
+stopNest
diff --git a/nixpkgs/pkgs/build-support/fetchcvs/default.nix b/nixpkgs/pkgs/build-support/fetchcvs/default.nix
new file mode 100644
index 000000000000..1f7947d4d452
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchcvs/default.nix
@@ -0,0 +1,20 @@
+# example tags:
+# date="2007-20-10"; (get the last version before given date)
+# tag="<tagname>" (get version by tag name)
+# If you don't specify neither one date="NOW" will be used (get latest)
+
+{stdenvNoCC, cvs, openssh}:
+
+{cvsRoot, module, tag ? null, date ? null, sha256}:
+
+stdenvNoCC.mkDerivation {
+  name = "cvs-export";
+  builder = ./builder.sh;
+  nativeBuildInputs = [cvs openssh];
+
+  outputHashAlgo = "sha256";
+  outputHashMode = "recursive";
+  outputHash = sha256;
+
+  inherit cvsRoot module sha256 tag date;
+}
diff --git a/nixpkgs/pkgs/build-support/fetchcvs/nix-prefetch-cvs b/nixpkgs/pkgs/build-support/fetchcvs/nix-prefetch-cvs
new file mode 100755
index 000000000000..f9ed8ffa066f
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchcvs/nix-prefetch-cvs
@@ -0,0 +1,82 @@
+#! /bin/sh -e
+
+cvsRoot=$1
+module=$2
+tag=$3
+expHash=$4
+
+hashType=$NIX_HASH_ALGO
+if test -z "$hashType"; then
+    hashType=sha256
+fi
+
+if test -z "$cvsRoot"; then
+    echo "syntax: nix-prefetch-cvs CVSROOT MODULE [TAG [HASH]]" >&2
+    exit 1
+elif test -z "$module"; then
+    echo "syntax: nix-prefetch-cvs CVSROOT MODULE [TAG [HASH]]" >&2
+    exit 1
+fi
+
+
+mkTempDir() {
+    tmpPath="$(mktemp -d "${TMPDIR:-/tmp}/nix-prefetch-cvs-XXXXXXXX")"
+    trap removeTempDir EXIT SIGINT SIGQUIT
+}
+
+removeTempDir() {
+    if test -n "$tmpPath"; then
+        rm -rf "$tmpPath" || true
+    fi
+}
+
+
+# If the hash was given, a file with that hash may already be in the
+# store.
+if test -n "$expHash"; then
+    finalPath=$(nix-store --print-fixed-path --recursive "$hashType" "$expHash" cvs-export)
+    if ! nix-store --check-validity "$finalPath" 2> /dev/null; then
+        finalPath=
+    fi
+    hash=$expHash
+fi
+
+
+# If we don't know the hash or a path with that hash doesn't exist,
+# download the file and add it to the store.
+if test -z "$finalPath"; then
+
+    mkTempDir
+    tmpFile=$tmpPath/cvs-export
+    #mkdir $tmpPath
+
+    # Perform the checkout.
+    if test -z "$tag"; then
+        args=(-D "now")
+    elif test "$USE_DATE" = "1"; then
+        args=(-D "$tag")
+    else
+        args=(-r "$tag")
+    fi
+    (cd "$tmpPath" && cvs -f -z0 -d $cvsRoot export "${args[*]}" -d cvs-export $module >&2)
+
+    # Compute the hash.
+    hash=$(nix-hash --type $hashType $hashFormat $tmpFile)
+    if ! test -n "$QUIET"; then echo "hash is $hash" >&2; fi
+
+    # Add the downloaded file to the Nix store.
+    finalPath=$(nix-store --add-fixed --recursive "$hashType" $tmpFile)
+
+    if test -n "$expHash" -a "$expHash" != "$hash"; then
+        echo "hash mismatch for CVS root \`$cvsRoot'"
+        exit 1
+    fi
+fi
+
+if ! test -n "$QUIET"; then echo "path is $finalPath" >&2; fi
+
+echo $hash
+
+if test -n "$PRINT_PATH"; then
+    echo $finalPath
+fi
diff --git a/nixpkgs/pkgs/build-support/fetchdarcs/builder.sh b/nixpkgs/pkgs/build-support/fetchdarcs/builder.sh
new file mode 100644
index 000000000000..301deb98307f
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchdarcs/builder.sh
@@ -0,0 +1,19 @@
+source $stdenv/setup
+
+tagtext=""
+tagflags=""
+if test -n "$rev"; then
+    tagtext="(tag $rev) "
+    tagflags="--tag=$rev"
+elif test -n "$context"; then
+    tagtext="(context) "
+    tagflags="--context=$context"
+fi
+
+header "getting $url $partial ${tagtext} into $out"
+
+darcs get --lazy $tagflags "$url" "$out"
+# remove metadata, because it can change
+rm -rf "$out/_darcs"
+
+stopNest
diff --git a/nixpkgs/pkgs/build-support/fetchdarcs/default.nix b/nixpkgs/pkgs/build-support/fetchdarcs/default.nix
new file mode 100644
index 000000000000..02777c9900dc
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchdarcs/default.nix
@@ -0,0 +1,18 @@
+{stdenvNoCC, darcs, cacert}:
+
+{url, rev ? null, context ? null, md5 ? "", sha256 ? ""}:
+
+if md5 != "" then
+  throw "fetchdarcs does not support md5 anymore, please use sha256"
+else
+stdenvNoCC.mkDerivation {
+  name = "fetchdarcs";
+  builder = ./builder.sh;
+  nativeBuildInputs = [cacert darcs];
+
+  outputHashAlgo = "sha256";
+  outputHashMode = "recursive";
+  outputHash = sha256;
+
+  inherit url rev context;
+}
diff --git a/nixpkgs/pkgs/build-support/fetchdocker/credentials.nix b/nixpkgs/pkgs/build-support/fetchdocker/credentials.nix
new file mode 100644
index 000000000000..6d084d3c77ed
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchdocker/credentials.nix
@@ -0,0 +1,38 @@
+# We provide three paths to get the credentials into the builder's
+# environment:
+#
+# 1. Via impureEnvVars. This method is difficult for multi-user Nix
+#    installations (but works very well for single-user Nix
+#    installations!) because it requires setting the environment
+#    variables on the nix-daemon which is either complicated or unsafe
+#    (i.e: configuring via Nix means the secrets will be persisted
+#    into the store)
+#
+# 2. If the DOCKER_CREDENTIALS key with a path to a credentials file
+#    is added to the NIX_PATH (usually via the '-I ' argument to most
+#    Nix tools) then an attempt will be made to read credentials from
+#    it. The semantics are simple, the file should contain two lines
+#    for the username and password based authentication:
+#
+# $ cat ./credentials-file.txt
+# DOCKER_USER=myusername
+# DOCKER_PASS=mypassword
+#
+#    ... and a single line for the token based authentication:
+#
+# $ cat ./credentials-file.txt
+# DOCKER_TOKEN=mytoken
+#
+# 3. A credential file at /etc/nix-docker-credentials.txt with the
+#    same format as the file described in #2 can also be used to
+#    communicate credentials to the builder. This is necessary for
+#    situations (like Hydra) where you cannot customize the NIX_PATH
+#    given to the nix-build invocation to provide it with the
+#    DOCKER_CREDENTIALS path
+let
+  pathParts =
+   (builtins.filter
+    ({prefix, path}: "DOCKER_CREDENTIALS" == prefix)
+    builtins.nixPath);
+in
+  if (pathParts != []) then (builtins.head pathParts).path else ""
diff --git a/nixpkgs/pkgs/build-support/fetchdocker/default.nix b/nixpkgs/pkgs/build-support/fetchdocker/default.nix
new file mode 100644
index 000000000000..57d2e4ad82d2
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchdocker/default.nix
@@ -0,0 +1,61 @@
+{ stdenv, lib, coreutils, bash, gnutar, writeText }:
+let
+  stripScheme =
+    builtins.replaceStrings [ "https://" "http://" ] [ "" "" ];
+  stripNixStore =
+    s: lib.removePrefix "/nix/store/" s;
+in
+{ name
+, registry         ? "https://registry-1.docker.io/v2/"
+, repository       ? "library"
+, imageName
+, tag
+, imageLayers
+, imageConfig
+, image            ? "${stripScheme registry}/${repository}/${imageName}:${tag}"
+}:
+
+# Make sure there are *no* slashes in the repository or container
+# names since we use these to make the output derivation name for the
+# nix-store path.
+assert null == lib.findFirst (c: "/"==c) null (lib.stringToCharacters repository);
+assert null == lib.findFirst (c: "/"==c) null (lib.stringToCharacters imageName);
+
+let
+  # Abuse paths to collapse possible double slashes
+  repoTag0 = builtins.toString (/. + "/${stripScheme registry}/${repository}/${imageName}");
+  repoTag1 = lib.removePrefix "/" repoTag0;
+
+  layers = builtins.map stripNixStore imageLayers;
+
+  manifest =
+    writeText "manifest.json" (builtins.toJSON [
+      { Config   = stripNixStore imageConfig;
+        Layers   = layers;
+        RepoTags = [ "${repoTag1}:${tag}" ];
+      }]);
+
+  repositories =
+    writeText "repositories" (builtins.toJSON {
+      ${repoTag1} = {
+        ${tag} = lib.last layers;
+      };
+    });
+
+  imageFileStorePaths =
+    writeText "imageFileStorePaths.txt"
+      (lib.concatStringsSep "\n" ((lib.unique imageLayers) ++ [imageConfig]));
+in
+stdenv.mkDerivation {
+  builder     = ./fetchdocker-builder.sh;
+  buildInputs = [ coreutils ];
+  preferLocalBuild = true;
+
+  inherit name imageName repository tag;
+  inherit bash gnutar manifest repositories;
+  inherit imageFileStorePaths;
+
+  passthru = {
+    inherit image;
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/fetchdocker/fetchDockerConfig.nix b/nixpkgs/pkgs/build-support/fetchdocker/fetchDockerConfig.nix
new file mode 100644
index 000000000000..9fd813bfa575
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchdocker/fetchDockerConfig.nix
@@ -0,0 +1,13 @@
+pkgargs@{ stdenv, lib, haskellPackages, writeText, gawk }:
+let
+  generic-fetcher =
+    import ./generic-fetcher.nix pkgargs;
+in
+
+args@{ repository ? "library", imageName, tag, ... }:
+
+generic-fetcher ({
+  fetcher = "hocker-config";
+  name    = "${repository}_${imageName}_${tag}-config.json";
+  tag     = "unused";
+} // args)
diff --git a/nixpkgs/pkgs/build-support/fetchdocker/fetchDockerLayer.nix b/nixpkgs/pkgs/build-support/fetchdocker/fetchDockerLayer.nix
new file mode 100644
index 000000000000..869ba637429c
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchdocker/fetchDockerLayer.nix
@@ -0,0 +1,13 @@
+pkgargs@{ stdenv, lib, haskellPackages, writeText, gawk }:
+let
+  generic-fetcher =
+    import ./generic-fetcher.nix pkgargs;
+in
+
+args@{ layerDigest, ... }:
+
+generic-fetcher ({
+  fetcher = "hocker-layer";
+  name    = "docker-layer-${layerDigest}.tar.gz";
+  tag     = "unused";
+} // args)
diff --git a/nixpkgs/pkgs/build-support/fetchdocker/fetchdocker-builder.sh b/nixpkgs/pkgs/build-support/fetchdocker/fetchdocker-builder.sh
new file mode 100644
index 000000000000..7443591e6569
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchdocker/fetchdocker-builder.sh
@@ -0,0 +1,28 @@
+source "${stdenv}/setup"
+header "exporting ${repository}/${imageName} (tag: ${tag}) into ${out}"
+mkdir -p "${out}"
+
+cat <<EOF > "${out}/compositeImage.sh"
+#! ${bash}/bin/bash
+#
+# Create a tar archive of a docker image's layers, docker image config
+# json, manifest.json, and repositories json; this streams directly to
+# stdout and is intended to be used in concert with docker load, i.e:
+# 
+# ${out}/compositeImage.sh | docker load
+
+# The first character follow the 's' command for sed becomes the
+# delimiter sed will use; this makes the transformation regex easy to
+# read. We feed tar a file listing the files we want in the archive,
+# because the paths are absolute and docker load wants them flattened in
+# the archive, we need to transform all of the paths going in by
+# stripping everything *including* the last solidus so that we end up
+# with the basename of the path.
+${gnutar}/bin/tar \
+  --transform='s=.*/==' \
+  --transform="s=.*-manifest.json=manifest.json=" \
+  --transform="s=.*-repositories=repositories=" \
+  -c "${manifest}" "${repositories}" -T "${imageFileStorePaths}"
+EOF
+chmod +x "${out}/compositeImage.sh"
+stopNest
diff --git a/nixpkgs/pkgs/build-support/fetchdocker/generic-fetcher.nix b/nixpkgs/pkgs/build-support/fetchdocker/generic-fetcher.nix
new file mode 100644
index 000000000000..3b0c33770467
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchdocker/generic-fetcher.nix
@@ -0,0 +1,95 @@
+{ stdenv, lib, haskellPackages, writeText, gawk }:
+let
+  awk                   = "${gawk}/bin/awk";
+  dockerCredentialsFile = import ./credentials.nix;
+in
+{ fetcher
+, name
+ , registry    ? "https://registry-1.docker.io/v2/"
+ , repository  ? "library"
+ , imageName
+ , sha256
+ , tag         ? ""
+ , layerDigest ? ""
+}:
+
+# There must be no slashes in the repository or container names since
+# we use these to make the output derivation name for the nix store
+# path
+assert null == lib.findFirst (c: "/"==c) null (lib.stringToCharacters repository);
+assert null == lib.findFirst (c: "/"==c) null (lib.stringToCharacters imageName);
+
+# Only allow hocker-config and hocker-layer as fetchers for now
+assert (builtins.elem fetcher ["hocker-config" "hocker-layer"]);
+
+# If layerDigest is non-empty then it must not have a 'sha256:' prefix!
+assert
+  (if layerDigest != ""
+   then !lib.hasPrefix "sha256:" layerDigest
+   else true);
+
+let
+  layerDigestFlag =
+    lib.optionalString (layerDigest != "") "--layer ${layerDigest}";
+in
+stdenv.mkDerivation {
+  inherit name;
+  builder = writeText "${fetcher}-builder.sh" ''
+    source "$stdenv/setup"
+    header "${fetcher} exporting to $out"
+
+    declare -A creds
+
+    # This is a hack for Hydra since we have no way of adding values
+    # to the NIX_PATH for Hydra jobsets!!
+    staticCredentialsFile="/etc/nix-docker-credentials.txt"
+    if [ ! -f "$dockerCredentialsFile" -a -f "$staticCredentialsFile" ]; then
+      echo "credentials file not set, falling back on static credentials file at: $staticCredentialsFile"
+      dockerCredentialsFile=$staticCredentialsFile
+    fi
+
+    if [ -f "$dockerCredentialsFile" ]; then
+      header "using credentials from $dockerCredentialsFile"
+
+      CREDSFILE=$(cat "$dockerCredentialsFile")
+      creds[token]=$(${awk} -F'=' '/DOCKER_TOKEN/ {print $2}' <<< "$CREDSFILE" | head -n1)
+
+      # Prefer DOCKER_TOKEN over the username and password
+      # authentication method
+      if [ -z "''${creds[token]}" ]; then
+        creds[user]=$(${awk} -F'=' '/DOCKER_USER/  {print $2}' <<< "$CREDSFILE" | head -n1)
+        creds[pass]=$(${awk} -F'=' '/DOCKER_PASS/  {print $2}' <<< "$CREDSFILE" | head -n1)
+      fi
+    fi
+
+    # These variables will be filled in first by the impureEnvVars, if
+    # those variables are empty then they will default to the
+    # credentials that may have been read in from the 'DOCKER_CREDENTIALS'
+    DOCKER_USER="''${DOCKER_USER:-''${creds[user]}}"
+    DOCKER_PASS="''${DOCKER_PASS:-''${creds[pass]}}"
+    DOCKER_TOKEN="''${DOCKER_TOKEN:-''${creds[token]}}"
+
+    ${fetcher} --out="$out" \
+      ''${registry:+--registry "$registry"} \
+      ''${DOCKER_USER:+--username "$DOCKER_USER"} \
+      ''${DOCKER_PASS:+--password "$DOCKER_PASS"} \
+      ''${DOCKER_TOKEN:+--token "$DOCKER_TOKEN"} \
+      ${layerDigestFlag} \
+      "${repository}/${imageName}" \
+      "${tag}"
+
+    stopNest
+  '';
+
+  buildInputs = [ haskellPackages.hocker ];
+
+  outputHashAlgo = "sha256";
+  outputHashMode = "flat";
+  outputHash = sha256;
+
+  preferLocalBuild = true;
+
+  impureEnvVars = [ "DOCKER_USER" "DOCKER_PASS" "DOCKER_TOKEN" ];
+
+  inherit registry dockerCredentialsFile;
+}
diff --git a/nixpkgs/pkgs/build-support/fetchfirefoxaddon/default.nix b/nixpkgs/pkgs/build-support/fetchfirefoxaddon/default.nix
new file mode 100644
index 000000000000..127f32dd61b6
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchfirefoxaddon/default.nix
@@ -0,0 +1,41 @@
+{stdenv, lib, coreutils, unzip, jq, zip, fetchurl,writeScript,  ...}:
+
+{
+  name
+, url
+, md5 ? ""
+, sha1 ? ""
+, sha256 ? ""
+, sha512 ? ""
+, fixedExtid ? null
+, hash ? ""
+}:
+
+stdenv.mkDerivation rec {
+
+  inherit name;
+  extid = if fixedExtid == null then "nixos@${name}" else fixedExtid;
+  passthru = {
+    inherit extid;
+  };
+
+  builder = writeScript "xpibuilder" ''
+    source $stdenv/setup
+
+    header "firefox addon $name into $out"
+
+    UUID="${extid}"
+    mkdir -p "$out/$UUID"
+    unzip -q ${src} -d "$out/$UUID"
+    NEW_MANIFEST=$(jq '. + {"applications": { "gecko": { "id": "${extid}" }}, "browser_specific_settings":{"gecko":{"id": "${extid}"}}}' "$out/$UUID/manifest.json")
+    echo "$NEW_MANIFEST" > "$out/$UUID/manifest.json"
+    cd "$out/$UUID"
+    zip -r -q -FS "$out/$UUID.xpi" *
+    rm -r "$out/$UUID"
+  '';
+  src = fetchurl {
+    url = url;
+    inherit md5 sha1 sha256 sha512 hash;
+  };
+  nativeBuildInputs = [ coreutils unzip zip jq  ];
+}
diff --git a/nixpkgs/pkgs/build-support/fetchfossil/builder.sh b/nixpkgs/pkgs/build-support/fetchfossil/builder.sh
new file mode 100644
index 000000000000..5f08aca424fe
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchfossil/builder.sh
@@ -0,0 +1,22 @@
+source $stdenv/setup
+header "Cloning Fossil $url [$rev] into $out"
+
+# Fossil, bless its adorable little heart, wants to write global configuration
+# to $HOME/.fossil. AFAICT, there is no way to disable this functionality.
+# Instead, we'll let it write to the build directory.
+export HOME=$(pwd)
+
+# We must explicitly set the admin user for the clone to something reasonable.
+fossil clone -A nobody "$url" fossil-clone.fossil
+
+mkdir fossil-clone
+WORKDIR=$(pwd)
+mkdir $out
+pushd $out
+fossil open "$WORKDIR/fossil-clone.fossil" "$rev"
+popd
+
+# Just nuke the checkout file.
+rm $out/.fslckout
+
+stopNest
diff --git a/nixpkgs/pkgs/build-support/fetchfossil/default.nix b/nixpkgs/pkgs/build-support/fetchfossil/default.nix
new file mode 100644
index 000000000000..3a4876bc5de3
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchfossil/default.nix
@@ -0,0 +1,20 @@
+{stdenv, fossil, cacert}:
+
+{name ? null, url, rev, sha256}:
+
+stdenv.mkDerivation {
+  name = "fossil-archive" + (if name != null then "-${name}" else "");
+  builder = ./builder.sh;
+  nativeBuildInputs = [fossil cacert];
+
+  # Envvar docs are hard to find. A link for the future:
+  # https://www.fossil-scm.org/index.html/doc/trunk/www/env-opts.md
+  impureEnvVars = [ "http_proxy" ];
+
+  outputHashAlgo = "sha256";
+  outputHashMode = "recursive";
+  outputHash = sha256;
+
+  inherit url rev;
+  preferLocalBuild = true;
+}
diff --git a/nixpkgs/pkgs/build-support/fetchgit/builder.sh b/nixpkgs/pkgs/build-support/fetchgit/builder.sh
new file mode 100644
index 000000000000..0047a335c76c
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchgit/builder.sh
@@ -0,0 +1,17 @@
+# tested so far with:
+# - no revision specified and remote has a HEAD which is used
+# - revision specified and remote has a HEAD
+# - revision specified and remote without HEAD
+source $stdenv/setup
+
+header "exporting $url (rev $rev) into $out"
+
+$SHELL $fetcher --builder --url "$url" --out "$out" --rev "$rev" \
+  ${leaveDotGit:+--leave-dotGit} \
+  ${fetchLFS:+--fetch-lfs} \
+  ${deepClone:+--deepClone} \
+  ${fetchSubmodules:+--fetch-submodules} \
+  ${branchName:+--branch-name "$branchName"}
+
+runHook postFetch
+stopNest
diff --git a/nixpkgs/pkgs/build-support/fetchgit/default.nix b/nixpkgs/pkgs/build-support/fetchgit/default.nix
new file mode 100644
index 000000000000..3222866dc781
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchgit/default.nix
@@ -0,0 +1,74 @@
+{lib, stdenvNoCC, git, git-lfs, cacert}: let
+  urlToName = url: rev: let
+    inherit (lib) removeSuffix splitString last;
+    base = last (splitString ":" (baseNameOf (removeSuffix "/" url)));
+
+    matched = builtins.match "(.*)\\.git" base;
+
+    short = builtins.substring 0 7 rev;
+
+    appendShort = if (builtins.match "[a-f0-9]*" rev) != null
+      then "-${short}"
+      else "";
+  in "${if matched == null then base else builtins.head matched}${appendShort}";
+in
+{ url, rev ? "HEAD", md5 ? "", sha256 ? "", leaveDotGit ? deepClone
+, fetchSubmodules ? true, deepClone ? false
+, branchName ? null
+, name ? urlToName url rev
+, # Shell code executed after the file has been fetched
+  # successfully. This can do things like check or transform the file.
+  postFetch ? ""
+, preferLocalBuild ? true
+, fetchLFS ? false
+}:
+
+/* NOTE:
+   fetchgit has one problem: git fetch only works for refs.
+   This is because fetching arbitrary (maybe dangling) commits may be a security risk
+   and checking whether a commit belongs to a ref is expensive. This may
+   change in the future when some caching is added to git (?)
+   Usually refs are either tags (refs/tags/*) or branches (refs/heads/*)
+   Cloning branches will make the hash check fail when there is an update.
+   But not all patches we want can be accessed by tags.
+
+   The workaround is getting the last n commits so that it's likely that they
+   still contain the hash we want.
+
+   for now : increase depth iteratively (TODO)
+
+   real fix: ask git folks to add a
+   git fetch $HASH contained in $BRANCH
+   facility because checking that $HASH is contained in $BRANCH is less
+   expensive than fetching --depth $N.
+   Even if git folks implemented this feature soon it may take years until
+   server admins start using the new version?
+*/
+
+assert deepClone -> leaveDotGit;
+
+if md5 != "" then
+  throw "fetchgit does not support md5 anymore, please use sha256"
+else
+stdenvNoCC.mkDerivation {
+  inherit name;
+  builder = ./builder.sh;
+  fetcher = ./nix-prefetch-git;  # This must be a string to ensure it's called with bash.
+
+  nativeBuildInputs = [ git ]
+    ++ lib.optionals fetchLFS [ git-lfs ];
+
+  outputHashAlgo = "sha256";
+  outputHashMode = "recursive";
+  outputHash = sha256;
+
+  inherit url rev leaveDotGit fetchLFS fetchSubmodules deepClone branchName postFetch;
+
+  GIT_SSL_CAINFO = "${cacert}/etc/ssl/certs/ca-bundle.crt";
+
+  impureEnvVars = lib.fetchers.proxyImpureEnvVars ++ [
+    "GIT_PROXY_COMMAND" "SOCKS_SERVER"
+  ];
+
+  inherit preferLocalBuild;
+}
diff --git a/nixpkgs/pkgs/build-support/fetchgit/nix-prefetch-git b/nixpkgs/pkgs/build-support/fetchgit/nix-prefetch-git
new file mode 100755
index 000000000000..661e0d674c58
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchgit/nix-prefetch-git
@@ -0,0 +1,467 @@
+#! /usr/bin/env bash
+
+set -e -o pipefail
+
+url=
+rev=
+expHash=
+hashType=$NIX_HASH_ALGO
+deepClone=$NIX_PREFETCH_GIT_DEEP_CLONE
+leaveDotGit=$NIX_PREFETCH_GIT_LEAVE_DOT_GIT
+fetchSubmodules=
+fetchLFS=
+builder=
+branchName=$NIX_PREFETCH_GIT_BRANCH_NAME
+
+# ENV params
+out=${out:-}
+http_proxy=${http_proxy:-}
+
+# populated by clone_user_rev()
+fullRev=
+humanReadableRev=
+commitDate=
+commitDateStrict8601=
+
+if test -n "$deepClone"; then
+    deepClone=true
+else
+    deepClone=
+fi
+
+if test "$leaveDotGit" != 1; then
+    leaveDotGit=
+else
+    leaveDotGit=true
+fi
+
+usage(){
+    echo  >&2 "syntax: nix-prefetch-git [options] [URL [REVISION [EXPECTED-HASH]]]
+
+Options:
+      --out path      Path where the output would be stored.
+      --url url       Any url understood by 'git clone'.
+      --rev ref       Any sha1 or references (such as refs/heads/master)
+      --hash h        Expected hash.
+      --branch-name   Branch name to check out into
+      --deepClone     Clone the entire repository.
+      --no-deepClone  Make a shallow clone of just the required ref.
+      --leave-dotGit  Keep the .git directories.
+      --fetch-lfs     Fetch git Large File Storage (LFS) files.
+      --fetch-submodules Fetch submodules.
+      --builder       Clone as fetchgit does, but url, rev, and out option are mandatory.
+      --quiet         Only print the final json summary.
+"
+    exit 1
+}
+
+# some git commands print to stdout, which would contaminate our JSON output
+clean_git(){
+    git "$@" >&2
+}
+
+argi=0
+argfun=""
+for arg; do
+    if test -z "$argfun"; then
+        case $arg in
+            --out) argfun=set_out;;
+            --url) argfun=set_url;;
+            --rev) argfun=set_rev;;
+            --hash) argfun=set_hashType;;
+            --branch-name) argfun=set_branchName;;
+            --deepClone) deepClone=true;;
+            --quiet) QUIET=true;;
+            --no-deepClone) deepClone=;;
+            --leave-dotGit) leaveDotGit=true;;
+            --fetch-lfs) fetchLFS=true;;
+            --fetch-submodules) fetchSubmodules=true;;
+            --builder) builder=true;;
+            -h|--help) usage; exit;;
+            *)
+                : $((++argi))
+                case $argi in
+                    1) url=$arg;;
+                    2) rev=$arg;;
+                    3) expHash=$arg;;
+                    *) exit 1;;
+                esac
+                ;;
+        esac
+    else
+        case $argfun in
+            set_*)
+                var=${argfun#set_}
+                eval $var=$arg
+                ;;
+        esac
+        argfun=""
+    fi
+done
+
+if test -z "$url"; then
+    usage
+fi
+
+
+init_remote(){
+    local url=$1
+    clean_git init --initial-branch=master
+    clean_git remote add origin "$url"
+    ( [ -n "$http_proxy" ] && clean_git config http.proxy "$http_proxy" ) || true
+}
+
+# Return the reference of an hash if it exists on the remote repository.
+ref_from_hash(){
+    local hash=$1
+    git ls-remote origin | sed -n "\,$hash\t, { s,\(.*\)\t\(.*\),\2,; p; q}"
+}
+
+# Return the hash of a reference if it exists on the remote repository.
+hash_from_ref(){
+    local ref=$1
+    git ls-remote origin | sed -n "\,\t$ref, { s,\(.*\)\t\(.*\),\1,; p; q}"
+}
+
+# Returns a name based on the url and reference
+#
+# This function needs to be in sync with nix's fetchgit implementation
+# of urlToName() to re-use the same nix store paths.
+url_to_name(){
+    local url=$1
+    local ref=$2
+    local base
+    base=$(basename "$url" .git | cut -d: -f2)
+
+    if [[ $ref =~ ^[a-z0-9]+$ ]]; then
+        echo "$base-${ref:0:7}"
+    else
+        echo "$base"
+    fi
+}
+
+# Fetch everything and checkout the right sha1
+checkout_hash(){
+    local hash="$1"
+    local ref="$2"
+
+    if test -z "$hash"; then
+        hash=$(hash_from_ref "$ref")
+    fi
+
+    clean_git fetch -t ${builder:+--progress} origin || return 1
+
+    local object_type=$(git cat-file -t "$hash")
+    if [[ "$object_type" == "commit" ]]; then
+        clean_git checkout -b "$branchName" "$hash" || return 1
+    elif [[ "$object_type" == "tree" ]]; then
+        clean_git config user.email "nix-prefetch-git@localhost"
+        clean_git config user.name "nix-prefetch-git"
+        local commit_id=$(git commit-tree "$hash" -m "Commit created from tree hash $hash")
+        clean_git checkout -b "$branchName" "$commit_id" || return 1
+    else
+        echo "Unrecognized git object type: $object_type"
+        return 1
+    fi
+}
+
+# Fetch only a branch/tag and checkout it.
+checkout_ref(){
+    local hash="$1"
+    local ref="$2"
+
+    if [[ -n "$deepClone" ]]; then
+        # The caller explicitly asked for a deep clone.  Deep clones
+        # allow "git describe" and similar tools to work.  See
+        # https://marc.info/?l=nix-dev&m=139641582514772
+        # for a discussion.
+        return 1
+    fi
+
+    if test -z "$ref"; then
+        ref=$(ref_from_hash "$hash")
+    fi
+
+    if test -n "$ref"; then
+        # --depth option is ignored on http repository.
+        clean_git fetch ${builder:+--progress} --depth 1 origin +"$ref" || return 1
+        clean_git checkout -b "$branchName" FETCH_HEAD || return 1
+    else
+        return 1
+    fi
+}
+
+# Update submodules
+init_submodules(){
+    # Add urls into .git/config file
+    clean_git submodule init
+
+    # list submodule directories and their hashes
+    git submodule status |
+    while read -r l; do
+        local hash
+        local dir
+        local name
+        local url
+
+        # checkout each submodule
+        hash=$(echo "$l" | awk '{print $1}' | tr -d '-')
+        dir=$(echo "$l" | sed -n 's/^.[0-9a-f]\+ \(.*[^)]*\)\( (.*)\)\?$/\1/p')
+        name=$(
+            git config -f .gitmodules --get-regexp submodule\..*\.path |
+            sed -n "s,^\(.*\)\.path $dir\$,\\1,p")
+        url=$(git config --get "${name}.url")
+
+        clone "$dir" "$url" "$hash" ""
+    done
+}
+
+clone(){
+    local top=$PWD
+    local dir="$1"
+    local url="$2"
+    local hash="$3"
+    local ref="$4"
+
+    cd "$dir"
+
+    # Initialize the repository.
+    init_remote "$url"
+
+    # Download data from the repository.
+    checkout_ref "$hash" "$ref" ||
+    checkout_hash "$hash" "$ref" || (
+        echo 1>&2 "Unable to checkout $hash$ref from $url."
+        exit 1
+    )
+
+    # Checkout linked sources.
+    if test -n "$fetchSubmodules"; then
+        init_submodules
+    fi
+
+    if [ -z "$builder" ] && [ -f .topdeps ]; then
+        if tg help &>/dev/null; then
+            echo "populating TopGit branches..."
+            tg remote --populate origin
+        else
+            echo "WARNING: would populate TopGit branches but TopGit is not available" >&2
+            echo "WARNING: install TopGit to fix the problem" >&2
+        fi
+    fi
+
+    cd "$top"
+}
+
+# Remove all remote branches, remove tags not reachable from HEAD, do a full
+# repack and then garbage collect unreferenced objects.
+make_deterministic_repo(){
+    local repo="$1"
+
+    # run in sub-shell to not touch current working directory
+    (
+    cd "$repo"
+    # Remove files that contain timestamps or otherwise have non-deterministic
+    # properties.
+    rm -rf .git/logs/ .git/hooks/ .git/index .git/FETCH_HEAD .git/ORIG_HEAD \
+        .git/refs/remotes/origin/HEAD .git/config
+
+    # Remove all remote branches.
+    git branch -r | while read -r branch; do
+        clean_git branch -rD "$branch"
+    done
+
+    # Remove tags not reachable from HEAD. If we're exactly on a tag, don't
+    # delete it.
+    maybe_tag=$(git tag --points-at HEAD)
+    git tag --contains HEAD | while read -r tag; do
+        if [ "$tag" != "$maybe_tag" ]; then
+            clean_git tag -d "$tag"
+        fi
+    done
+
+    # Do a full repack. Must run single-threaded, or else we lose determinism.
+    clean_git config pack.threads 1
+    clean_git repack -A -d -f
+    rm -f .git/config
+
+    # Garbage collect unreferenced objects.
+    # Note: --keep-largest-pack prevents non-deterministic ordering of packs
+    #   listed in .git/objects/info/packs by only using a single pack
+    clean_git gc --prune=all --keep-largest-pack
+    )
+}
+
+
+clone_user_rev() {
+    local dir="$1"
+    local url="$2"
+    local rev="${3:-HEAD}"
+
+    if [ -n "$fetchLFS" ]; then
+        HOME=$TMPDIR
+        git lfs install
+    fi
+
+    # Perform the checkout.
+    case "$rev" in
+        HEAD|refs/*)
+            clone "$dir" "$url" "" "$rev" 1>&2;;
+        *)
+            if test -z "$(echo "$rev" | tr -d 0123456789abcdef)"; then
+                clone "$dir" "$url" "$rev" "" 1>&2
+            else
+                # if revision is not hexadecimal it might be a tag
+                clone "$dir" "$url" "" "refs/tags/$rev" 1>&2
+            fi;;
+    esac
+
+    pushd "$dir" >/dev/null
+    fullRev=$( (git rev-parse "$rev" 2>/dev/null || git rev-parse "refs/heads/$branchName") | tail -n1)
+    humanReadableRev=$(git describe "$fullRev" 2> /dev/null || git describe --tags "$fullRev" 2> /dev/null || echo -- none --)
+    commitDate=$(git show -1 --no-patch --pretty=%ci "$fullRev")
+    commitDateStrict8601=$(git show -1 --no-patch --pretty=%cI "$fullRev")
+    popd >/dev/null
+
+    # Allow doing additional processing before .git removal
+    eval "$NIX_PREFETCH_GIT_CHECKOUT_HOOK"
+    if test -z "$leaveDotGit"; then
+        echo "removing \`.git'..." >&2
+        find "$dir" -name .git -print0 | xargs -0 rm -rf
+    else
+        find "$dir" -name .git | while read -r gitdir; do
+            make_deterministic_repo "$(readlink -f "$gitdir/..")"
+        done
+    fi
+}
+
+exit_handlers=()
+
+run_exit_handlers() {
+    exit_status=$?
+    for handler in "${exit_handlers[@]}"; do
+        eval "$handler $exit_status"
+    done
+}
+
+trap run_exit_handlers EXIT
+
+quiet_exit_handler() {
+    exec 2>&3 3>&-
+    if [ $1 -ne 0 ]; then
+        cat "$errfile" >&2
+    fi
+    rm -f "$errfile"
+}
+
+quiet_mode() {
+    errfile="$(mktemp "${TMPDIR:-/tmp}/git-checkout-err-XXXXXXXX")"
+    exit_handlers+=(quiet_exit_handler)
+    exec 3>&2 2>"$errfile"
+}
+
+json_escape() {
+    local s="$1"
+    s="${s//\\/\\\\}" # \
+    s="${s//\"/\\\"}" # "
+    s="${s//^H/\\\b}" # \b (backspace)
+    s="${s//^L/\\\f}" # \f (form feed)
+    s="${s//
+/\\\n}" # \n (newline)
+    s="${s//^M/\\\r}" # \r (carriage return)
+    s="${s//   /\\t}" # \t (tab)
+    echo "$s"
+}
+
+print_results() {
+    hash="$1"
+    if ! test -n "$QUIET"; then
+        echo "" >&2
+        echo "git revision is $fullRev" >&2
+        if test -n "$finalPath"; then
+            echo "path is $finalPath" >&2
+        fi
+        echo "git human-readable version is $humanReadableRev" >&2
+        echo "Commit date is $commitDate" >&2
+        if test -n "$hash"; then
+            echo "hash is $hash" >&2
+        fi
+    fi
+    if test -n "$hash"; then
+        cat <<EOF
+{
+  "url": "$(json_escape "$url")",
+  "rev": "$(json_escape "$fullRev")",
+  "date": "$(json_escape "$commitDateStrict8601")",
+  "path": "$(json_escape "$finalPath")",
+  "$(json_escape "$hashType")": "$(json_escape "$hash")",
+  "fetchSubmodules": $([[ -n "$fetchSubmodules" ]] && echo true || echo false),
+  "deepClone": $([[ -n "$deepClone" ]] && echo true || echo false),
+  "leaveDotGit": $([[ -n "$leaveDotGit" ]] && echo true || echo false)
+}
+EOF
+    fi
+}
+
+remove_tmpPath() {
+    rm -rf "$tmpPath"
+}
+
+if test -n "$QUIET"; then
+    quiet_mode
+fi
+
+if test -z "$branchName"; then
+    branchName=fetchgit
+fi
+
+if test -n "$builder"; then
+    test -n "$out" -a -n "$url" -a -n "$rev" || usage
+    mkdir -p "$out"
+    clone_user_rev "$out" "$url" "$rev"
+else
+    if test -z "$hashType"; then
+        hashType=sha256
+    fi
+
+    # If the hash was given, a file with that hash may already be in the
+    # store.
+    if test -n "$expHash"; then
+        finalPath=$(nix-store --print-fixed-path --recursive "$hashType" "$expHash" "$(url_to_name "$url" "$rev")")
+        if ! nix-store --check-validity "$finalPath" 2> /dev/null; then
+            finalPath=
+        fi
+        hash=$expHash
+    fi
+
+    # If we don't know the hash or a path with that hash doesn't exist,
+    # download the file and add it to the store.
+    if test -z "$finalPath"; then
+
+        tmpPath="$(mktemp -d "${TMPDIR:-/tmp}/git-checkout-tmp-XXXXXXXX")"
+        exit_handlers+=(remove_tmpPath)
+
+        tmpFile="$tmpPath/$(url_to_name "$url" "$rev")"
+        mkdir -p "$tmpFile"
+
+        # Perform the checkout.
+        clone_user_rev "$tmpFile" "$url" "$rev"
+
+        # Compute the hash.
+        hash=$(nix-hash --type $hashType --base32 "$tmpFile")
+
+        # Add the downloaded file to the Nix store.
+        finalPath=$(nix-store --add-fixed --recursive "$hashType" "$tmpFile")
+
+        if test -n "$expHash" -a "$expHash" != "$hash"; then
+            echo "hash mismatch for URL \`$url'. Got \`$hash'; expected \`$expHash'." >&2
+            exit 1
+        fi
+    fi
+
+    print_results "$hash"
+
+    if test -n "$PRINT_PATH"; then
+        echo "$finalPath"
+    fi
+fi
diff --git a/nixpkgs/pkgs/build-support/fetchgitea/default.nix b/nixpkgs/pkgs/build-support/fetchgitea/default.nix
new file mode 100644
index 000000000000..79804588cfe5
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchgitea/default.nix
@@ -0,0 +1,7 @@
+# Gitea's URLs are compatible with GitHub
+
+{ lib, fetchFromGitHub }:
+
+{ domain, ... }@args:
+
+fetchFromGitHub ((removeAttrs args [ "domain" ]) // { githubBase = domain; })
diff --git a/nixpkgs/pkgs/build-support/fetchgithub/default.nix b/nixpkgs/pkgs/build-support/fetchgithub/default.nix
new file mode 100644
index 000000000000..3f355d10f8a1
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchgithub/default.nix
@@ -0,0 +1,41 @@
+{ lib, fetchgit, fetchzip }:
+
+{ owner, repo, rev, name ? "source"
+, fetchSubmodules ? false, leaveDotGit ? null
+, deepClone ? false, private ? false
+, githubBase ? "github.com", varPrefix ? null
+, ... # For hash agility
+}@args:
+let
+  baseUrl = "https://${githubBase}/${owner}/${repo}";
+  passthruAttrs = removeAttrs args [ "owner" "repo" "rev" "fetchSubmodules" "private" "githubBase" "varPrefix" ];
+  varBase = "NIX${if varPrefix == null then "" else "_${varPrefix}"}_GITHUB_PRIVATE_";
+  useFetchGit = fetchSubmodules || (leaveDotGit == true) || deepClone;
+  # We prefer fetchzip in cases we don't need submodules as the hash
+  # is more stable in that case.
+  fetcher = if useFetchGit then fetchgit else fetchzip;
+  privateAttrs = lib.optionalAttrs private {
+    netrcPhase = ''
+      if [ -z "''$${varBase}USERNAME" -o -z "''$${varBase}PASSWORD" ]; then
+        echo "Error: Private fetchFromGitHub requires the nix building process (nix-daemon in multi user mode) to have the ${varBase}USERNAME and ${varBase}PASSWORD env vars set." >&2
+        exit 1
+      fi
+      cat > netrc <<EOF
+      machine ${githubBase}
+              login ''$${varBase}USERNAME
+              password ''$${varBase}PASSWORD
+      EOF
+    '';
+    netrcImpureEnvVars = [ "${varBase}USERNAME" "${varBase}PASSWORD" ];
+  };
+  fetcherArgs = (if useFetchGit
+    then {
+      inherit rev deepClone fetchSubmodules; url = "${baseUrl}.git";
+    } // lib.optionalAttrs (leaveDotGit != null) { inherit leaveDotGit; }
+    else ({ url = "${baseUrl}/archive/${rev}.tar.gz"; } // privateAttrs)
+  ) // passthruAttrs // { inherit name; };
+in
+
+assert private -> !useFetchGit;
+
+fetcher fetcherArgs // { meta.homepage = baseUrl; inherit rev; }
diff --git a/nixpkgs/pkgs/build-support/fetchgitiles/default.nix b/nixpkgs/pkgs/build-support/fetchgitiles/default.nix
new file mode 100644
index 000000000000..827680992d69
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchgitiles/default.nix
@@ -0,0 +1,10 @@
+{ fetchzip, lib }:
+
+{ url, rev, name ? "source", ... } @ args:
+
+fetchzip ({
+  inherit name;
+  url = "${url}/+archive/${rev}.tar.gz";
+  stripRoot = false;
+  meta.homepage = url;
+} // removeAttrs args [ "url" "rev" ]) // { inherit rev; }
diff --git a/nixpkgs/pkgs/build-support/fetchgitlab/default.nix b/nixpkgs/pkgs/build-support/fetchgitlab/default.nix
new file mode 100644
index 000000000000..77512510a7c6
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchgitlab/default.nix
@@ -0,0 +1,22 @@
+{ fetchzip, lib }:
+
+# gitlab example
+{ owner, repo, rev, domain ? "gitlab.com", name ? "source", group ? null
+, ... # For hash agility
+} @ args:
+
+with lib;
+
+let
+  slug = concatStringsSep "/"
+    ((optional (group != null) group) ++ [ owner repo ]);
+
+  escapedSlug = replaceStrings ["." "/"] ["%2E" "%2F"] slug;
+  escapedRev = replaceStrings ["+" "%" "/"] ["%2B" "%25" "%2F"] rev;
+in
+
+fetchzip ({
+  inherit name;
+  url = "https://${domain}/api/v4/projects/${escapedSlug}/repository/archive.tar.gz?sha=${escapedRev}";
+  meta.homepage = "https://${domain}/${slug}/";
+} // removeAttrs args [ "domain" "owner" "group" "repo" "rev" ]) // { inherit rev; }
diff --git a/nixpkgs/pkgs/build-support/fetchgitlocal/default.nix b/nixpkgs/pkgs/build-support/fetchgitlocal/default.nix
new file mode 100644
index 000000000000..fa1945775537
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchgitlocal/default.nix
@@ -0,0 +1,40 @@
+{ runCommand, git }: src:
+
+let
+  srcStr = toString src;
+
+  # Adds the current directory (respecting ignored files) to the git store, and returns the hash
+  gitHashFile = runCommand "put-in-git" {
+      nativeBuildInputs = [ git ];
+      dummy = builtins.currentTime; # impure, do every time
+      preferLocalBuild = true;
+    } ''
+      cd ${srcStr}
+      DOT_GIT=$(git rev-parse --resolve-git-dir .git) # path to repo
+
+      cp $DOT_GIT/index $DOT_GIT/index-user # backup index
+      git reset # reset index
+      git add . # add current directory
+
+      # hash of current directory
+      # remove trailing newline
+      git rev-parse $(git write-tree) \
+        | tr -d '\n' > $out
+
+      mv $DOT_GIT/index-user $DOT_GIT/index # restore index
+    '';
+
+  gitHash = builtins.readFile gitHashFile; # cache against git hash
+
+  nixPath = runCommand "put-in-nix" {
+      nativeBuildInputs = [ git ];
+      preferLocalBuild = true;
+    } ''
+      mkdir $out
+
+      # dump tar of *current directory* at given revision
+      git -C ${srcStr} archive --format=tar ${gitHash} \
+        | tar xf - -C $out
+    '';
+
+in nixPath
diff --git a/nixpkgs/pkgs/build-support/fetchgx/default.nix b/nixpkgs/pkgs/build-support/fetchgx/default.nix
new file mode 100644
index 000000000000..3ccf5d273fc5
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchgx/default.nix
@@ -0,0 +1,28 @@
+{ stdenvNoCC, gx, gx-go, go, cacert }:
+
+{ name, src, sha256 }:
+
+stdenvNoCC.mkDerivation {
+  name = "${name}-gxdeps";
+  inherit src;
+
+  nativeBuildInputs = [ cacert go gx gx-go ];
+
+  outputHashAlgo = "sha256";
+  outputHashMode = "recursive";
+  outputHash = sha256;
+
+  phases = [ "unpackPhase" "buildPhase" "installPhase" ];
+
+  buildPhase = ''
+    export GOPATH=$(pwd)/vendor
+    mkdir -p vendor
+    gx install
+  '';
+
+  installPhase = ''
+    mv vendor $out
+  '';
+
+  preferLocalBuild = true;
+}
diff --git a/nixpkgs/pkgs/build-support/fetchhg/builder.sh b/nixpkgs/pkgs/build-support/fetchhg/builder.sh
new file mode 100644
index 000000000000..847f18fa5975
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchhg/builder.sh
@@ -0,0 +1,9 @@
+source $stdenv/setup
+header "getting $url${rev:+ ($rev)} into $out"
+
+hg clone --insecure "$url" hg-clone
+
+hg archive -q$subrepoClause -y ${rev:+-r "$rev"} --cwd hg-clone $out
+rm -f $out/.hg_archival.txt
+
+stopNest
diff --git a/nixpkgs/pkgs/build-support/fetchhg/default.nix b/nixpkgs/pkgs/build-support/fetchhg/default.nix
new file mode 100644
index 000000000000..15309d0a1950
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchhg/default.nix
@@ -0,0 +1,29 @@
+{ lib, stdenvNoCC, mercurial }:
+{ name ? null
+, url
+, rev ? null
+, md5 ? null
+, sha256 ? null
+, fetchSubrepos ? false
+, preferLocalBuild ? true }:
+
+if md5 != null then
+  throw "fetchhg does not support md5 anymore, please use sha256"
+else
+# TODO: statically check if mercurial as the https support if the url starts woth https.
+stdenvNoCC.mkDerivation {
+  name = "hg-archive" + (if name != null then "-${name}" else "");
+  builder = ./builder.sh;
+  nativeBuildInputs = [mercurial];
+
+  impureEnvVars = lib.fetchers.proxyImpureEnvVars;
+
+  subrepoClause = if fetchSubrepos then "S" else "";
+
+  outputHashAlgo = "sha256";
+  outputHashMode = "recursive";
+  outputHash = sha256;
+
+  inherit url rev;
+  inherit preferLocalBuild;
+}
diff --git a/nixpkgs/pkgs/build-support/fetchhg/nix-prefetch-hg b/nixpkgs/pkgs/build-support/fetchhg/nix-prefetch-hg
new file mode 100755
index 000000000000..94c6b1ec6945
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchhg/nix-prefetch-hg
@@ -0,0 +1,83 @@
+#! /usr/bin/env bash
+set -e
+
+url=$1
+rev=$2
+expHash=$3
+
+hashType="${NIX_HASH_ALGO:-sha256}"
+hashFormat=${hashFormat:-"--base32"}
+rev="${rev:-tip}"
+
+LOG() {
+  echo "$@" >&2
+}
+
+die() {
+  LOG "$@"
+  exit 1
+}
+
+if [[ -z "$url" || "$url" == "--help" ]]; then
+    die "Usage: nix-prefetch-hg URL [rev [EXPECTED-HASH]]"
+fi
+
+if [[ "${fetchSubrepos:-0}" == 1 ]]; then
+    subrepoClause=S
+else
+    subrepoClause=
+fi
+
+# If the hash was given, a file with that hash may already be in the
+# store.
+if [[ -n "$expHash" ]]; then
+    finalPath=$(nix-store --print-fixed-path --recursive "$hashType" "$expHash" hg-archive)
+    if ! nix-store --check-validity "$finalPath" 2> /dev/null; then
+        finalPath=
+    fi
+    hash="$expHash"
+fi
+
+
+# If we don't know the hash or a path with that hash doesn't exist,
+# download the file and add it to the store.
+if [[ -z "$finalPath" ]]; then
+
+    tmpPath="$(mktemp -d "${TMPDIR:-/tmp}/hg-checkout-tmp-XXXXXXXX")"
+    cleanup() { x=$?; rm -rf "$tmpPath"; exit $x; }; trap cleanup EXIT
+
+    tmpArchive="$tmpPath/hg-archive"
+
+    # Perform the checkout.
+    if [[ "$url" != /* ]]; then
+      tmpClone="$tmpPath/hg-clone"
+      hg clone -q -y -U "$url" "$tmpClone" >&2
+    else
+      tmpClone=$url
+    fi
+    hg archive -q$subrepoClause -y -r "$rev" --cwd "$tmpClone" "$tmpArchive"
+    rm -f "$tmpArchive/.hg_archival.txt"
+
+    LOG "hg revision is $(cd "$tmpClone"; hg id -r "$rev" -i)"
+
+    # Compute the hash.
+    hash=$(nix-hash --type "$hashType" "$hashFormat" "$tmpArchive")
+    if [[ -z "$QUIET" ]]; then LOG "hash is $hash"; fi
+
+    # Add the downloaded file to the Nix store.
+    finalPath=$(nix-store --add-fixed --recursive "$hashType" "$tmpArchive")
+
+    if [[ -n "$expHash" && "$expHash" != "$hash" ]]; then
+        die "ERROR: hash mismatch for URL \`$url'"
+    fi
+
+
+fi
+
+if [[ -z "$QUIET" ]]; then LOG "path is $finalPath"; fi
+
+echo "$hash"
+
+if [[ -n "$PRINT_PATH" ]]; then
+    echo "$finalPath"
+fi
diff --git a/nixpkgs/pkgs/build-support/fetchipfs/builder.sh b/nixpkgs/pkgs/build-support/fetchipfs/builder.sh
new file mode 100644
index 000000000000..7a6a517566f5
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchipfs/builder.sh
@@ -0,0 +1,87 @@
+source $stdenv/setup
+
+# Curl flags to handle redirects, not use EPSV, handle cookies for
+# servers to need them during redirects, and work on SSL without a
+# certificate (this isn't a security problem because we check the
+# cryptographic hash of the output anyway).
+
+set -o noglob
+
+curl="curl            \
+ --location           \
+ --max-redirs 20      \
+ --retry 2            \
+ --disable-epsv       \
+ --cookie-jar cookies \
+ --insecure           \
+ --speed-time 5       \
+ -#                   \
+ --fail               \
+ $curlOpts            \
+ $NIX_CURL_FLAGS"
+
+finish() {
+    runHook postFetch
+    set +o noglob
+    exit 0
+}
+
+ipfs_add() {
+    if curl --retry 0 --head --silent "localhost:5001" > /dev/null; then
+        echo "=IPFS= add $ipfs"
+        tar --owner=root --group=root -cWf "source.tar" $(echo *)
+        res=$(curl -# -F "file=@source.tar" "localhost:5001/api/v0/tar/add" | sed 's/.*"Hash":"\(.*\)".*/\1/')
+        if [ $ipfs != $res ]; then
+            echo "\`ipfs tar add' results in $res when $ipfs is expected"
+            exit 1
+        fi
+        rm "source.tar"
+    fi
+}
+
+echo
+
+mkdir download
+cd download
+
+if curl --retry 0 --head --silent "localhost:5001" > /dev/null; then
+    curlexit=18;
+    echo "=IPFS= get $ipfs"
+    # if we get error code 18, resume partial download
+    while [ $curlexit -eq 18 ]; do
+        # keep this inside an if statement, since on failure it doesn't abort the script
+        if $curl -C - "http://localhost:5001/api/v0/tar/cat?arg=$ipfs" --output "$ipfs.tar"; then
+            unpackFile "$ipfs.tar"
+            rm "$ipfs.tar"
+            set +o noglob
+            mv $(echo *) "$out"
+            finish
+        else
+            curlexit=$?;
+        fi
+    done
+fi
+
+if test -n "$url"; then
+    curlexit=18;
+    echo "Downloading $url"
+    while [ $curlexit -eq 18 ]; do
+        # keep this inside an if statement, since on failure it doesn't abort the script
+        if $curl "$url" -O; then
+            set +o noglob
+            tmpfile=$(echo *)
+            unpackFile $tmpfile
+            rm $tmpfile
+            ipfs_add
+            mv $(echo *) "$out"
+            finish
+        else
+            curlexit=$?;
+        fi
+    done
+fi
+
+echo "error: cannot download $ipfs from ipfs or the given url"
+echo
+set +o noglob
+exit 1
diff --git a/nixpkgs/pkgs/build-support/fetchipfs/default.nix b/nixpkgs/pkgs/build-support/fetchipfs/default.nix
new file mode 100644
index 000000000000..7a66999ec569
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchipfs/default.nix
@@ -0,0 +1,52 @@
+{ stdenv
+, curl
+}:
+
+{ ipfs
+, url            ? ""
+, curlOpts       ? ""
+, outputHash     ? ""
+, outputHashAlgo ? ""
+, md5            ? ""
+, sha1           ? ""
+, sha256         ? ""
+, sha512         ? ""
+, meta           ? {}
+, port           ? "8080"
+, postFetch      ? ""
+, preferLocalBuild ? true
+}:
+
+assert sha512 != "" -> builtins.compareVersions "1.11" builtins.nixVersion <= 0;
+
+let
+
+  hasHash = (outputHash != "" && outputHashAlgo != "")
+    || md5 != "" || sha1 != "" || sha256 != "" || sha512 != "";
+
+in
+
+if (!hasHash) then throw "Specify sha for fetchipfs fixed-output derivation" else stdenv.mkDerivation {
+  name = ipfs;
+  builder = ./builder.sh;
+  nativeBuildInputs = [ curl ];
+
+  # New-style output content requirements.
+  outputHashAlgo = if outputHashAlgo != "" then outputHashAlgo else
+      if sha512 != "" then "sha512" else if sha256 != "" then "sha256" else if sha1 != "" then "sha1" else "md5";
+  outputHash = if outputHash != "" then outputHash else
+      if sha512 != "" then sha512 else if sha256 != "" then sha256 else if sha1 != "" then sha1 else md5;
+
+  outputHashMode = "recursive";
+
+  inherit curlOpts
+          postFetch
+          ipfs
+          url
+          port
+          meta;
+
+  # Doing the download on a remote machine just duplicates network
+  # traffic, so don't do that.
+  inherit preferLocalBuild;
+}
diff --git a/nixpkgs/pkgs/build-support/fetchmavenartifact/default.nix b/nixpkgs/pkgs/build-support/fetchmavenartifact/default.nix
new file mode 100644
index 000000000000..4274b4b52bfa
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchmavenartifact/default.nix
@@ -0,0 +1,76 @@
+# Adaptation of the MIT-licensed work on `sbt2nix` done by Charles O'Farrell
+
+{ lib, fetchurl, stdenv }:
+let
+  defaultRepos = [
+    "https://repo1.maven.org/maven2"
+    "https://oss.sonatype.org/content/repositories/releases"
+    "https://oss.sonatype.org/content/repositories/public"
+    "https://repo.typesafe.com/typesafe/releases"
+  ];
+in
+
+args@
+{ # Example: "org.apache.httpcomponents"
+  groupId
+, # Example: "httpclient"
+  artifactId
+, # Example: "4.3.6"
+  version
+, # Example: "jdk11"
+  classifier ? null
+, # List of maven repositories from where to fetch the artifact.
+  # Example: [ http://oss.sonatype.org/content/repositories/public ].
+  repos ? defaultRepos
+  # The `url` and `urls` parameters, if specified should point to the JAR
+  # file and will take precedence over the `repos` parameter. Only one of `url`
+  # and `urls` can be specified, not both.
+, url ? ""
+, urls ? []
+, # The rest of the arguments are just forwarded to `fetchurl`.
+  ...
+}:
+
+# only one of url and urls can be specified at a time.
+assert (url == "") || (urls == []);
+# if repos is empty, then url or urls must be specified.
+assert (repos != []) || (url != "") || (urls != []);
+
+let
+  name_ =
+    lib.concatStrings [
+      (lib.replaceChars ["."] ["_"] groupId) "_"
+      (lib.replaceChars ["."] ["_"] artifactId) "-"
+      version
+    ];
+  mkJarUrl = repoUrl:
+    lib.concatStringsSep "/" [
+      (lib.removeSuffix "/" repoUrl)
+      (lib.replaceChars ["."] ["/"] groupId)
+      artifactId
+      version
+      "${artifactId}-${version}${lib.optionalString (!isNull classifier) "-${classifier}"}.jar"
+    ];
+  urls_ =
+    if url != "" then [url]
+    else if urls != [] then urls
+    else map mkJarUrl repos;
+  jar =
+    fetchurl (
+      builtins.removeAttrs args ["groupId" "artifactId" "version" "classifier" "repos" "url" ]
+        // { urls = urls_; name = "${name_}.jar"; }
+    );
+in
+  stdenv.mkDerivation {
+    name = name_;
+    phases = "installPhase fixupPhase";
+    # By moving the jar to $out/share/java we make it discoverable by java
+    # packages packages that mention this derivation in their buildInputs.
+    installPhase = ''
+      mkdir -p $out/share/java
+      ln -s ${jar} $out/share/java/${artifactId}-${version}.jar
+    '';
+    # We also add a `jar` attribute that can be used to easily obtain the path
+    # to the downloaded jar file.
+    passthru.jar = jar;
+  }
diff --git a/nixpkgs/pkgs/build-support/fetchmtn/builder.sh b/nixpkgs/pkgs/build-support/fetchmtn/builder.sh
new file mode 100644
index 000000000000..c1b0db895bc1
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchmtn/builder.sh
@@ -0,0 +1,51 @@
+source $stdenv/setup
+
+set -x
+
+if ! [ -f "$cacheDB" ]; then 
+	echo "Creating cache DB $cacheDB"
+	mtn --db "$cacheDB" db init
+fi
+
+header "getting revision $selector";
+
+done=;
+for source in $dbs; do
+	if mtn pull --db "$cacheDB" "$source" "${branch}"; then
+		revision="$(mtn --db "$cacheDB" au toposort $(mtn --db "$cacheDB" au select "$selector") | tail -1)";
+		if [ -n "$revision" ]; then
+			if mtn --db "$cacheDB" au get_revision "$revision"; then
+				echo "found revision $revision"
+				done=1;
+			else
+				echo "revision $revision does not exist";
+			fi
+		else
+			echo "selector $selector does not match any revision";
+		fi
+	else
+		echo "pulling branch $branch wasn't successful";
+	fi;
+	if test -n "$done"; then
+		break;
+	fi;
+done;
+
+stopNest;
+
+header "checking out the revision $revision";
+
+if test -n "$done"; then
+	mtn checkout --db "$cacheDB" -r "$revision" "$out" -b "${branch}"
+else
+	echo "Needed revision still not found. Exiting";
+	exit 1;
+fi;
+
+stopNest
+
+header "clearing _MTN in the output"
+
+rm -rf "$out/_MTN"
+
+stopNest
diff --git a/nixpkgs/pkgs/build-support/fetchmtn/default.nix b/nixpkgs/pkgs/build-support/fetchmtn/default.nix
new file mode 100644
index 000000000000..4aa134242aa7
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchmtn/default.nix
@@ -0,0 +1,25 @@
+# You can specify some extra mirrors and a cache DB via options
+{lib, stdenvNoCC, monotone, defaultDBMirrors ? [], cacheDB ? "./mtn-checkout.db"}:
+# dbs is a list of strings
+# each is an url for sync
+
+# selector is mtn selector, like h:org.example.branch
+#
+{name ? "mtn-checkout", dbs ? [], sha256
+, selector ? "h:" + branch, branch}:
+
+stdenvNoCC.mkDerivation {
+  builder = ./builder.sh;
+  nativeBuildInputs = [monotone];
+
+  outputHashAlgo = "sha256";
+  outputHashMode = "recursive";
+  outputHash = sha256;
+
+  dbs = defaultDBMirrors ++ dbs;
+  inherit branch cacheDB name selector;
+
+  impureEnvVars = lib.fetchers.proxyImpureEnvVars;
+
+}
+
diff --git a/nixpkgs/pkgs/build-support/fetchnuget/default.nix b/nixpkgs/pkgs/build-support/fetchnuget/default.nix
new file mode 100644
index 000000000000..ad61b9a51d2b
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchnuget/default.nix
@@ -0,0 +1,43 @@
+{ fetchurl, buildDotnetPackage, unzip }:
+
+attrs @
+{ baseName
+, version
+, url ? "https://www.nuget.org/api/v2/package/${baseName}/${version}"
+, sha256 ? ""
+, md5 ? ""
+, ...
+}:
+if md5 != "" then
+  throw "fetchnuget does not support md5 anymore, please use sha256"
+else
+  buildDotnetPackage ({
+    src = fetchurl {
+      inherit url sha256;
+      name = "${baseName}.${version}.zip";
+    };
+
+    sourceRoot = ".";
+
+    nativeBuildInputs = [ unzip ];
+
+    dontBuild = true;
+
+    preInstall = ''
+      function traverseRename () {
+        for e in *
+        do
+          t="$(echo "$e" | sed -e "s/%20/\ /g" -e "s/%2B/+/g")"
+          [ "$t" != "$e" ] && mv -vn "$e" "$t"
+          if [ -d "$t" ]
+          then
+            cd "$t"
+            traverseRename
+            cd ..
+          fi
+        done
+      }
+
+      traverseRename
+   '';
+  } // attrs)
diff --git a/nixpkgs/pkgs/build-support/fetchpatch/default.nix b/nixpkgs/pkgs/build-support/fetchpatch/default.nix
new file mode 100644
index 000000000000..71c0d4664983
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchpatch/default.nix
@@ -0,0 +1,61 @@
+# This function downloads and normalizes a patch/diff file.
+# This is primarily useful for dynamically generated patches,
+# such as GitHub's or cgit's, where the non-significant content parts
+# often change with updating of git or cgit.
+# stripLen acts as the -p parameter when applying a patch.
+
+{ lib, fetchurl, buildPackages }:
+let
+  # 0.3.4 would change hashes: https://github.com/NixOS/nixpkgs/issues/25154
+  patchutils = buildPackages.patchutils_0_3_3;
+in
+{ stripLen ? 0, extraPrefix ? null, excludes ? [], includes ? [], revert ? false, ... }@args:
+
+let
+  # Make base-64 encoded SRI hash filename-safe using RFC 4648 §5
+  tmpname = lib.replaceStrings [ "+" "/" "=" ] [ "-" "_" "" ] args.sha256;
+in
+fetchurl ({
+  postFetch = ''
+    tmpfile="$TMPDIR/${tmpname}"
+    if [ ! -s "$out" ]; then
+      echo "error: Fetched patch file '$out' is empty!" 1>&2
+      exit 1
+    fi
+    "${patchutils}/bin/lsdiff" "$out" \
+      | sort -u | sed -e 's/[*?]/\\&/g' \
+      | xargs -I{} \
+        "${patchutils}/bin/filterdiff" \
+        --include={} \
+        --strip=${toString stripLen} \
+        ${lib.optionalString (extraPrefix != null) ''
+           --addoldprefix=a/${extraPrefix} \
+           --addnewprefix=b/${extraPrefix} \
+        ''} \
+        --clean "$out" > "$tmpfile"
+    if [ ! -s "$tmpfile" ]; then
+      echo "error: Normalized patch '$tmpfile' is empty (while the fetched file was not)!" 1>&2
+      echo "Did you maybe fetch a HTML representation of a patch instead of a raw patch?" 1>&2
+      echo "Fetched file was:" 1>&2
+      cat "$out" 1>&2
+      exit 1
+    fi
+    ${patchutils}/bin/filterdiff \
+      -p1 \
+      ${builtins.toString (builtins.map (x: "-x ${lib.escapeShellArg x}") excludes)} \
+      ${builtins.toString (builtins.map (x: "-i ${lib.escapeShellArg x}") includes)} \
+      "$tmpfile" > "$out"
+
+    if [ ! -s "$out" ]; then
+      echo "error: Filtered patch '$out$' is empty (while the original patch file was not)!" 1>&2
+      echo "Check your includes and excludes." 1>&2
+      echo "Normalizd patch file was:" 1>&2
+      cat "$tmpfile" 1>&2
+      exit 1
+    fi
+  '' + lib.optionalString revert ''
+    ${patchutils}/bin/interdiff "$out" /dev/null > "$tmpfile"
+    mv "$tmpfile" "$out"
+  '' + (args.postFetch or "");
+  meta.broken = excludes != [] && includes != [];
+} // builtins.removeAttrs args ["stripLen" "extraPrefix" "excludes" "includes" "revert" "postFetch"])
diff --git a/nixpkgs/pkgs/build-support/fetchrepoorcz/default.nix b/nixpkgs/pkgs/build-support/fetchrepoorcz/default.nix
new file mode 100644
index 000000000000..3ac7cace0dcf
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchrepoorcz/default.nix
@@ -0,0 +1,10 @@
+{ fetchzip }:
+
+# gitweb example, snapshot support is optional in gitweb
+{ repo, rev, name ? "source"
+, ... # For hash agility
+}@args: fetchzip ({
+  inherit name;
+  url = "https://repo.or.cz/${repo}.git/snapshot/${rev}.tar.gz";
+  meta.homepage = "https://repo.or.cz/${repo}.git/";
+} // removeAttrs args [ "repo" "rev" ]) // { inherit rev; }
diff --git a/nixpkgs/pkgs/build-support/fetchrepoproject/default.nix b/nixpkgs/pkgs/build-support/fetchrepoproject/default.nix
new file mode 100644
index 000000000000..69b1bd1aef74
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchrepoproject/default.nix
@@ -0,0 +1,78 @@
+{ lib, stdenvNoCC, gitRepo, cacert, copyPathsToStore }:
+
+{ name, manifest, rev ? "HEAD", sha256
+# Optional parameters:
+, repoRepoURL ? "", repoRepoRev ? "", referenceDir ? "", manifestName ? ""
+, localManifests ? [], createMirror ? false, useArchive ? false
+}:
+
+assert repoRepoRev != "" -> repoRepoURL != "";
+assert createMirror -> !useArchive;
+
+with lib;
+
+let
+  extraRepoInitFlags = [
+    (optionalString (repoRepoURL != "") "--repo-url=${repoRepoURL}")
+    (optionalString (repoRepoRev != "") "--repo-branch=${repoRepoRev}")
+    (optionalString (referenceDir != "") "--reference=${referenceDir}")
+    (optionalString (manifestName != "") "--manifest-name=${manifestName}")
+  ];
+
+  repoInitFlags = [
+    "--manifest-url=${manifest}"
+    "--manifest-branch=${rev}"
+    "--depth=1"
+    (optionalString createMirror "--mirror")
+    (optionalString useArchive "--archive")
+  ] ++ extraRepoInitFlags;
+
+  local_manifests = copyPathsToStore localManifests;
+
+in stdenvNoCC.mkDerivation {
+  inherit name;
+
+  inherit cacert manifest rev repoRepoURL repoRepoRev referenceDir; # TODO
+
+  outputHashAlgo = "sha256";
+  outputHashMode = "recursive";
+  outputHash = sha256;
+
+  preferLocalBuild = true;
+  enableParallelBuilding = true;
+
+  impureEnvVars = fetchers.proxyImpureEnvVars ++ [
+    "GIT_PROXY_COMMAND" "SOCKS_SERVER"
+  ];
+
+  nativeBuildInputs = [ gitRepo cacert ];
+
+  GIT_SSL_CAINFO = "${cacert}/etc/ssl/certs/ca-bundle.crt";
+
+  buildCommand = ''
+    # Path must be absolute (e.g. for GnuPG: ~/.repoconfig/gnupg/pubring.kbx)
+    export HOME="$(pwd)"
+
+    mkdir $out
+    cd $out
+
+    mkdir .repo
+    ${optionalString (local_manifests != []) ''
+      mkdir .repo/local_manifests
+      for local_manifest in ${concatMapStringsSep " " toString local_manifests}; do
+        cp $local_manifest .repo/local_manifests/$(stripHash $local_manifest; echo $strippedName)
+      done
+    ''}
+
+    repo init ${concatStringsSep " " repoInitFlags}
+    repo sync --jobs=$NIX_BUILD_CORES --current-branch
+
+    # TODO: The git-index files (and probably the files in .repo as well) have
+    # different contents each time and will therefore change the final hash
+    # (i.e. creating a mirror probably won't work).
+    ${optionalString (!createMirror) ''
+      rm -rf .repo
+      find -type d -name '.git' -prune -exec rm -rf {} +
+    ''}
+  '';
+}
diff --git a/nixpkgs/pkgs/build-support/fetchs3/default.nix b/nixpkgs/pkgs/build-support/fetchs3/default.nix
new file mode 100644
index 000000000000..acad0749b663
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchs3/default.nix
@@ -0,0 +1,36 @@
+{ lib, runCommand, awscli }:
+
+{ s3url
+, name ? builtins.baseNameOf s3url
+, sha256
+, region ? "us-east-1"
+, credentials ? null # Default to looking at local EC2 metadata service
+, recursiveHash ? false
+, postFetch ? null
+}:
+
+let
+  mkCredentials = { access_key_id, secret_access_key, session_token ? null }: {
+    AWS_ACCESS_KEY_ID = access_key_id;
+    AWS_SECRET_ACCESS_KEY = secret_access_key;
+    AWS_SESSION_TOKEN = session_token;
+  };
+
+  credentialAttrs = lib.optionalAttrs (credentials != null) (mkCredentials credentials);
+in runCommand name ({
+  nativeBuildInputs = [ awscli ];
+
+  outputHashAlgo = "sha256";
+  outputHash = sha256;
+  outputHashMode = if recursiveHash then "recursive" else "flat";
+
+  preferLocalBuild = true;
+
+  AWS_DEFAULT_REGION = region;
+} // credentialAttrs) (if postFetch != null then ''
+  downloadedFile="$(mktemp)"
+  aws s3 cp ${s3url} $downloadedFile
+  ${postFetch}
+'' else  ''
+  aws s3 cp ${s3url} $out
+'')
diff --git a/nixpkgs/pkgs/build-support/fetchsavannah/default.nix b/nixpkgs/pkgs/build-support/fetchsavannah/default.nix
new file mode 100644
index 000000000000..994922a58679
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchsavannah/default.nix
@@ -0,0 +1,10 @@
+{ fetchzip }:
+
+# cgit example, snapshot support is optional in cgit
+{ repo, rev, name ? "source"
+, ... # For hash agility
+}@args: fetchzip ({
+  inherit name;
+  url = "https://git.savannah.gnu.org/cgit/${repo}.git/snapshot/${repo}-${rev}.tar.gz";
+  meta.homepage = "https://git.savannah.gnu.org/cgit/${repo}.git/";
+} // removeAttrs args [ "repo" "rev" ]) // { inherit rev; }
diff --git a/nixpkgs/pkgs/build-support/fetchsourcehut/default.nix b/nixpkgs/pkgs/build-support/fetchsourcehut/default.nix
new file mode 100644
index 000000000000..ed2f074200cd
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchsourcehut/default.nix
@@ -0,0 +1,25 @@
+{ fetchzip, lib }:
+
+{ owner
+, repo, rev
+, domain ? "sr.ht"
+, vc ? "git"
+, name ? "source"
+, ... # For hash agility
+} @ args:
+
+with lib;
+
+assert (lib.assertOneOf "vc" vc [ "hg" "git" ]);
+
+let
+  baseUrl = "https://${vc}.${domain}/${owner}/${repo}";
+
+in fetchzip (recursiveUpdate {
+  inherit name;
+  url = "${baseUrl}/archive/${rev}.tar.gz";
+  meta.homepage = "${baseUrl}/";
+  extraPostFetch = optionalString (vc == "hg") ''
+    rm -f "$out/.hg_archival.txt"
+  ''; # impure file; see #12002
+} (removeAttrs args [ "owner" "repo" "rev" "domain" "vc" ])) // { inherit rev; }
diff --git a/nixpkgs/pkgs/build-support/fetchsvn/builder.sh b/nixpkgs/pkgs/build-support/fetchsvn/builder.sh
new file mode 100644
index 000000000000..ed3e65f07695
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchsvn/builder.sh
@@ -0,0 +1,25 @@
+source $stdenv/setup
+
+header "exporting $url (r$rev) into $out"
+
+if test -n "$http_proxy"; then
+    # Configure proxy
+    mkdir .subversion
+    proxy="${http_proxy#*://}"
+
+    echo '[global]' > .subversion/servers
+    echo "http-proxy-host = ${proxy%:*}" >> .subversion/servers
+    echo "http-proxy-port = ${proxy##*:}" >> .subversion/servers
+
+    export HOME="$PWD"
+fi;
+
+if test -z "$LC_ALL"; then
+    export LC_ALL="en_US.UTF-8"
+fi;
+
+svn export --trust-server-cert --non-interactive \
+    ${ignoreExternals:+--ignore-externals} ${ignoreKeywords:+--ignore-keywords} \
+    -r "$rev" "$url" "$out"
+
+stopNest
diff --git a/nixpkgs/pkgs/build-support/fetchsvn/default.nix b/nixpkgs/pkgs/build-support/fetchsvn/default.nix
new file mode 100644
index 000000000000..82dececc124a
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchsvn/default.nix
@@ -0,0 +1,54 @@
+{ lib, stdenvNoCC, buildPackages
+, subversion, glibcLocales, sshSupport ? true, openssh ? null
+}:
+
+{ url, rev ? "HEAD", md5 ? "", sha256 ? ""
+, ignoreExternals ? false, ignoreKeywords ? false, name ? null
+, preferLocalBuild ? true
+}:
+
+assert sshSupport -> openssh != null;
+
+let
+  repoName = with lib;
+    let
+      fst = head;
+      snd = l: head (tail l);
+      trd = l: head (tail (tail l));
+      path_ =
+        (p: if head p == "" then tail p else p) # ~ drop final slash if any
+        (reverseList (splitString "/" url));
+      path = [ (removeSuffix "/" (head path_)) ] ++ (tail path_);
+    in
+      # ../repo/trunk -> repo
+      if fst path == "trunk" then snd path
+      # ../repo/branches/branch -> repo-branch
+      else if snd path == "branches" then "${trd path}-${fst path}"
+      # ../repo/tags/tag -> repo-tag
+      else if snd path == "tags" then     "${trd path}-${fst path}"
+      # ../repo (no trunk) -> repo
+      else fst path;
+
+  name_ = if name == null then "${repoName}-r${toString rev}" else name;
+in
+
+if md5 != "" then
+  throw "fetchsvn does not support md5 anymore, please use sha256"
+else
+stdenvNoCC.mkDerivation {
+  name = name_;
+  builder = ./builder.sh;
+  nativeBuildInputs = [ subversion glibcLocales ]
+    ++ lib.optional sshSupport openssh;
+
+  SVN_SSH = if sshSupport then "${buildPackages.openssh}/bin/ssh" else null;
+
+  outputHashAlgo = "sha256";
+  outputHashMode = "recursive";
+  outputHash = sha256;
+
+  inherit url rev ignoreExternals ignoreKeywords;
+
+  impureEnvVars = lib.fetchers.proxyImpureEnvVars;
+  inherit preferLocalBuild;
+}
diff --git a/nixpkgs/pkgs/build-support/fetchsvn/nix-prefetch-svn b/nixpkgs/pkgs/build-support/fetchsvn/nix-prefetch-svn
new file mode 100755
index 000000000000..03b9eb9a03df
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchsvn/nix-prefetch-svn
@@ -0,0 +1,79 @@
+#! /bin/sh -e
+
+url=$1
+rev=$2
+expHash=$3
+
+hashType=$NIX_HASH_ALGO
+if test -z "$hashType"; then
+    hashType=sha256
+fi
+if test -z "$hashFormat"; then
+    hashFormat=--base32
+fi
+
+if test -z "$url"; then
+    echo "syntax: nix-prefetch-svn URL [REVISION [EXPECTED-HASH]]" >&2
+    exit 1
+fi
+
+test -n "$rev" || rev="HEAD"
+
+repoName=$(echo $url | sed '
+  s,.*/\([^/]\+\)/trunk/*$,\1,;t
+  s,.*/\([^/]\+\)/branches/\([^/]\+\)/*$,\1-\2,;t
+  s,.*/\([^/]\+\)/tags/\([^/]\+\)/*$,\1-\2,;t
+  s,.*/\([^/]\+\)/*$,\1,;t
+')
+dstFile=$repoName-r$rev
+
+# If the hash was given, a file with that hash may already be in the
+# store.
+if test -n "$expHash"; then
+    finalPath=$(nix-store --print-fixed-path --recursive "$hashType" "$expHash" $dstFile)
+    if ! nix-store --check-validity "$finalPath" 2> /dev/null; then
+        finalPath=
+    fi
+    hash=$expHash
+fi
+
+
+# If we don't know the hash or a path with that hash doesn't exist,
+# download the file and add it to the store.
+if test -z "$finalPath"; then
+    tmpPath="$(mktemp -d "${TMPDIR:-/tmp}/svn-checkout-tmp-XXXXXXXX")"
+    trap "rm -rf \"$tmpPath\"" EXIT
+
+    tmpFile="$tmpPath/$dstFile"
+
+    # Perform the checkout.
+    if test "$NIX_PREFETCH_SVN_LEAVE_DOT_SVN" != 1
+    then
+	command="export"
+    else
+	command="checkout"
+    fi
+
+    echo p | svn "$command" --quiet -r "$rev" "$url" "$tmpFile" >&2
+    echo "svn revision is $(svn info -r "$rev" "$url" | grep "Revision: " | cut -d' ' -f2)"
+
+    # Compute the hash.
+    hash=$(nix-hash --type $hashType $hashFormat $tmpFile)
+    if ! test -n "$QUIET"; then echo "hash is $hash" >&2; fi
+
+    # Add the downloaded file to the Nix store.
+    finalPath=$(nix-store --add-fixed --recursive "$hashType" $tmpFile)
+
+    if test -n "$expHash" -a "$expHash" != "$hash"; then
+        echo "hash mismatch for URL \`$url'"
+        exit 1
+    fi
+fi
+
+if ! test -n "$QUIET"; then echo "path is $finalPath" >&2; fi
+
+echo $hash
+
+if test -n "$PRINT_PATH"; then
+    echo $finalPath
+fi
diff --git a/nixpkgs/pkgs/build-support/fetchsvnrevision/default.nix b/nixpkgs/pkgs/build-support/fetchsvnrevision/default.nix
new file mode 100644
index 000000000000..f2e2a11da8d5
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchsvnrevision/default.nix
@@ -0,0 +1,10 @@
+runCommand: subversion: repository:
+  import (runCommand "head-revision"
+    { buildInputs = [ subversion ];
+      dummy = builtins.currentTime;
+    }
+    ''
+      rev=$(echo p | svn ls -v --depth empty  ${repository} |awk '{ print $1 }')
+      echo "[ \"$rev\" ]" > $out
+      echo Latest revision is $rev
+    '')
diff --git a/nixpkgs/pkgs/build-support/fetchsvnssh/builder.sh b/nixpkgs/pkgs/build-support/fetchsvnssh/builder.sh
new file mode 100644
index 000000000000..d9c6dc7da31a
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchsvnssh/builder.sh
@@ -0,0 +1,15 @@
+source $stdenv/setup
+
+header "exporting $url (r$rev) into $out"
+
+if test "$sshSupport"; then
+    export SVN_SSH="$openssh/bin/ssh"
+fi
+
+# Pipe the "p" character into Subversion to force it to accept the
+# server's certificate.  This is perfectly safe: we don't care
+# whether the server is being spoofed --- only the cryptographic
+# hash of the output matters.
+expect -f $sshSubversion "$username" "$password" "$rev" "$url" $out
+
+stopNest
diff --git a/nixpkgs/pkgs/build-support/fetchsvnssh/default.nix b/nixpkgs/pkgs/build-support/fetchsvnssh/default.nix
new file mode 100644
index 000000000000..fbd74efd750a
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchsvnssh/default.nix
@@ -0,0 +1,20 @@
+{stdenvNoCC, subversion, sshSupport ? true, openssh ? null, expect}:
+{username, password, url, rev ? "HEAD", md5 ? "", sha256 ? ""}:
+
+
+if md5 != "" then
+  throw "fetchsvnssh does not support md5 anymore, please use sha256"
+else
+stdenvNoCC.mkDerivation {
+  name = "svn-export-ssh";
+  builder = ./builder.sh;
+  nativeBuildInputs = [subversion expect];
+
+  outputHashAlgo = "sha256";
+  outputHashMode = "recursive";
+  outputHash = sha256;
+
+  sshSubversion = ./sshsubversion.exp;
+
+  inherit username password url rev sshSupport openssh;
+}
diff --git a/nixpkgs/pkgs/build-support/fetchsvnssh/sshsubversion.exp b/nixpkgs/pkgs/build-support/fetchsvnssh/sshsubversion.exp
new file mode 100755
index 000000000000..c00f39714e5b
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchsvnssh/sshsubversion.exp
@@ -0,0 +1,22 @@
+#!/nix/var/nix/profiles/default/bin/expect -f
+
+# Set variables
+set username [lindex $argv 0]
+set password [lindex $argv 1]
+set rev [lindex $argv 2]
+set url [lindex $argv 3]
+set out [lindex $argv 4]
+set timeout -1
+
+spawn svn export -r$rev svn+ssh://$username@$url $out
+match_max 100000
+
+expect "*continue connecting*" { send -- "yes\r"; expect "*?assword:*"; send -- "$password\r" } \
+       "*?assword:*" { send -- "$password\r" }
+
+expect "*?assword:*"
+send -- "$password\r"
+
+# Send blank line
+send -- "\r"
+expect eof
diff --git a/nixpkgs/pkgs/build-support/fetchurl/boot.nix b/nixpkgs/pkgs/build-support/fetchurl/boot.nix
new file mode 100644
index 000000000000..bd71f93c5291
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchurl/boot.nix
@@ -0,0 +1,20 @@
+let mirrors = import ./mirrors.nix; in
+
+{ system }:
+
+{ url ? builtins.head urls
+, urls ? []
+, sha256
+, name ? baseNameOf (toString url)
+}:
+
+import <nix/fetchurl.nix> {
+  inherit system sha256 name;
+
+  url =
+    # Handle mirror:// URIs. Since <nix/fetchurl.nix> currently
+    # supports only one URI, use the first listed mirror.
+    let m = builtins.match "mirror://([a-z]+)/(.*)" url; in
+    if m == null then url
+    else builtins.head (mirrors.${builtins.elemAt m 0}) + (builtins.elemAt m 1);
+}
diff --git a/nixpkgs/pkgs/build-support/fetchurl/builder.sh b/nixpkgs/pkgs/build-support/fetchurl/builder.sh
new file mode 100644
index 000000000000..5b04a702aff4
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchurl/builder.sh
@@ -0,0 +1,170 @@
+source $stdenv/setup
+
+source $mirrorsFile
+
+curlVersion=$(curl -V | head -1 | cut -d' ' -f2)
+
+# Curl flags to handle redirects, not use EPSV, handle cookies for
+# servers to need them during redirects, and work on SSL without a
+# certificate (this isn't a security problem because we check the
+# cryptographic hash of the output anyway).
+curl=(
+    curl
+    --location
+    --max-redirs 20
+    --retry 3
+    --disable-epsv
+    --cookie-jar cookies
+    --user-agent "curl/$curlVersion Nixpkgs/$nixpkgsVersion"
+)
+
+if ! [ -f "$SSL_CERT_FILE" ]; then
+    curl+=(--insecure)
+fi
+
+curl+=(
+    $curlOpts
+    $NIX_CURL_FLAGS
+)
+
+downloadedFile="$out"
+if [ -n "$downloadToTemp" ]; then downloadedFile="$TMPDIR/file"; fi
+
+
+tryDownload() {
+    local url="$1"
+    echo
+    header "trying $url"
+    local curlexit=18;
+
+    success=
+
+    # if we get error code 18, resume partial download
+    while [ $curlexit -eq 18 ]; do
+       # keep this inside an if statement, since on failure it doesn't abort the script
+       if "${curl[@]}" -C - --fail "$url" --output "$downloadedFile"; then
+          success=1
+          break
+       else
+          curlexit=$?;
+       fi
+    done
+}
+
+
+finish() {
+    local skipPostFetch="$1"
+
+    set +o noglob
+
+    if [[ $executable == "1" ]]; then
+      chmod +x $downloadedFile
+    fi
+
+    if [ -z "$skipPostFetch" ]; then
+        runHook postFetch
+    fi
+
+    exit 0
+}
+
+
+tryHashedMirrors() {
+    if test -n "$NIX_HASHED_MIRRORS"; then
+        hashedMirrors="$NIX_HASHED_MIRRORS"
+    fi
+
+    for mirror in $hashedMirrors; do
+        url="$mirror/$outputHashAlgo/$outputHash"
+        if "${curl[@]}" --retry 0 --connect-timeout "${NIX_CONNECT_TIMEOUT:-15}" \
+            --fail --silent --show-error --head "$url" \
+            --write-out "%{http_code}" --output /dev/null > code 2> log; then
+            tryDownload "$url"
+
+            # We skip postFetch here, because hashed-mirrors are
+            # already content addressed. So if $outputHash is in the
+            # hashed-mirror, changes from ‘postFetch’ would already be
+            # made. So, running postFetch will end up applying the
+            # change /again/, which we don’t want.
+            if test -n "$success"; then finish skipPostFetch; fi
+        else
+            # Be quiet about 404 errors, which we interpret as the file
+            # not being present on this particular mirror.
+            if test "$(cat code)" != 404; then
+                echo "error checking the existence of $url:"
+                cat log
+            fi
+        fi
+    done
+}
+
+
+# URL list may contain ?. No glob expansion for that, please
+set -o noglob
+
+urls2=
+for url in $urls; do
+    if test "${url:0:9}" != "mirror://"; then
+        urls2="$urls2 $url"
+    else
+        url2="${url:9}"; echo "${url2/\// }" > split; read site fileName < split
+        #varName="mirror_$site"
+        varName="$site" # !!! danger of name clash, fix this
+        if test -z "${!varName}"; then
+            echo "warning: unknown mirror:// site \`$site'"
+        else
+            mirrors=${!varName}
+
+            # Allow command-line override by setting NIX_MIRRORS_$site.
+            varName="NIX_MIRRORS_$site"
+            if test -n "${!varName}"; then mirrors="${!varName}"; fi
+
+            for url3 in $mirrors; do
+                urls2="$urls2 $url3$fileName";
+            done
+        fi
+    fi
+done
+urls="$urls2"
+
+# Restore globbing settings
+set +o noglob
+
+if test -n "$showURLs"; then
+    echo "$urls" > $out
+    exit 0
+fi
+
+if test -n "$preferHashedMirrors"; then
+    tryHashedMirrors
+fi
+
+# URL list may contain ?. No glob expansion for that, please
+set -o noglob
+
+success=
+for url in $urls; do
+    if [ -z "$postFetch" ]; then
+       case "$url" in
+           https://github.com/*/archive/*)
+               echo "warning: archives from GitHub revisions should use fetchFromGitHub"
+               ;;
+           https://gitlab.com/*/-/archive/*)
+               echo "warning: archives from GitLab revisions should use fetchFromGitLab"
+               ;;
+       esac
+    fi
+    tryDownload "$url"
+    if test -n "$success"; then finish; fi
+done
+
+# Restore globbing settings
+set +o noglob
+
+if test -z "$preferHashedMirrors"; then
+    tryHashedMirrors
+fi
+
+
+echo "error: cannot download $name from any mirror"
+exit 1
diff --git a/nixpkgs/pkgs/build-support/fetchurl/default.nix b/nixpkgs/pkgs/build-support/fetchurl/default.nix
new file mode 100644
index 000000000000..8ce69a7f1878
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchurl/default.nix
@@ -0,0 +1,163 @@
+{ lib, buildPackages ? { inherit stdenvNoCC; }, stdenvNoCC
+, curl # Note that `curl' may be `null', in case of the native stdenvNoCC.
+, cacert ? null }:
+
+let
+
+  mirrors = import ./mirrors.nix;
+
+  # Write the list of mirrors to a file that we can reuse between
+  # fetchurl instantiations, instead of passing the mirrors to
+  # fetchurl instantiations via environment variables.  This makes the
+  # resulting store derivations (.drv files) much smaller, which in
+  # turn makes nix-env/nix-instantiate faster.
+  mirrorsFile =
+    buildPackages.stdenvNoCC.mkDerivation ({
+      name = "mirrors-list";
+      builder = ./write-mirror-list.sh;
+      preferLocalBuild = true;
+    } // mirrors);
+
+  # Names of the master sites that are mirrored (i.e., "sourceforge",
+  # "gnu", etc.).
+  sites = builtins.attrNames mirrors;
+
+  impureEnvVars = lib.fetchers.proxyImpureEnvVars ++ [
+    # This variable allows the user to pass additional options to curl
+    "NIX_CURL_FLAGS"
+
+    # This variable allows the user to override hashedMirrors from the
+    # command-line.
+    "NIX_HASHED_MIRRORS"
+
+    # This variable allows overriding the timeout for connecting to
+    # the hashed mirrors.
+    "NIX_CONNECT_TIMEOUT"
+  ] ++ (map (site: "NIX_MIRRORS_${site}") sites);
+
+in
+
+{ # URL to fetch.
+  url ? ""
+
+, # Alternatively, a list of URLs specifying alternative download
+  # locations.  They are tried in order.
+  urls ? []
+
+, # Additional curl options needed for the download to succeed.
+  curlOpts ? ""
+
+, # Name of the file.  If empty, use the basename of `url' (or of the
+  # first element of `urls').
+  name ? ""
+
+, # SRI hash.
+  hash ? ""
+
+, # Legacy ways of specifying the hash.
+  outputHash ? ""
+, outputHashAlgo ? ""
+, md5 ? ""
+, sha1 ? ""
+, sha256 ? ""
+, sha512 ? ""
+
+, recursiveHash ? false
+
+, # Shell code to build a netrc file for BASIC auth
+  netrcPhase ? null
+
+, # Impure env vars (https://nixos.org/nix/manual/#sec-advanced-attributes)
+  # needed for netrcPhase
+  netrcImpureEnvVars ? []
+
+, # Shell code executed after the file has been fetched
+  # successfully. This can do things like check or transform the file.
+  postFetch ? ""
+
+, # Whether to download to a temporary path rather than $out. Useful
+  # in conjunction with postFetch. The location of the temporary file
+  # is communicated to postFetch via $downloadedFile.
+  downloadToTemp ? false
+
+, # If true, set executable bit on downloaded file
+  executable ? false
+
+, # If set, don't download the file, but write a list of all possible
+  # URLs (resulting from resolving mirror:// URLs) to $out.
+  showURLs ? false
+
+, # Meta information, if any.
+  meta ? {}
+
+  # Passthru information, if any.
+, passthru ? {}
+  # Doing the download on a remote machine just duplicates network
+  # traffic, so don't do that by default
+, preferLocalBuild ? true
+}:
+
+assert sha512 != "" -> builtins.compareVersions "1.11" builtins.nixVersion <= 0;
+
+let
+  urls_ =
+    if urls != [] && url == "" then
+      (if lib.isList urls then urls
+       else throw "`urls` is not a list")
+    else if urls == [] && url != "" then
+      (if lib.isString url then [url]
+       else throw "`url` is not a string")
+    else throw "fetchurl requires either `url` or `urls` to be set";
+
+  hash_ =
+    if hash != "" then { outputHashAlgo = null; outputHash = hash; }
+    else if md5 != "" then throw "fetchurl does not support md5 anymore, please use sha256 or sha512"
+    else if (outputHash != "" && outputHashAlgo != "") then { inherit outputHashAlgo outputHash; }
+    else if sha512 != "" then { outputHashAlgo = "sha512"; outputHash = sha512; }
+    else if sha256 != "" then { outputHashAlgo = "sha256"; outputHash = sha256; }
+    else if sha1   != "" then { outputHashAlgo = "sha1";   outputHash = sha1; }
+    else if cacert != null then { outputHashAlgo = "sha256"; outputHash = ""; }
+    else throw "fetchurl requires a hash for fixed-output derivation: ${lib.concatStringsSep ", " urls_}";
+in
+
+stdenvNoCC.mkDerivation {
+  name =
+    if showURLs then "urls"
+    else if name != "" then name
+    else baseNameOf (toString (builtins.head urls_));
+
+  builder = ./builder.sh;
+
+  nativeBuildInputs = [ curl ];
+
+  urls = urls_;
+
+  # If set, prefer the content-addressable mirrors
+  # (http://tarballs.nixos.org) over the original URLs.
+  preferHashedMirrors = true;
+
+  # New-style output content requirements.
+  inherit (hash_) outputHashAlgo outputHash;
+
+  SSL_CERT_FILE = if hash_.outputHash == ""
+                  then "${cacert}/etc/ssl/certs/ca-bundle.crt"
+                  else "/no-cert-file.crt";
+
+  outputHashMode = if (recursiveHash || executable) then "recursive" else "flat";
+
+  inherit curlOpts showURLs mirrorsFile postFetch downloadToTemp executable;
+
+  impureEnvVars = impureEnvVars ++ netrcImpureEnvVars;
+
+  nixpkgsVersion = lib.trivial.release;
+
+  inherit preferLocalBuild;
+
+  postHook = if netrcPhase == null then null else ''
+    ${netrcPhase}
+    curlOpts="$curlOpts --netrc-file $PWD/netrc"
+  '';
+
+  inherit meta;
+  inherit passthru;
+}
diff --git a/nixpkgs/pkgs/build-support/fetchurl/mirrors.nix b/nixpkgs/pkgs/build-support/fetchurl/mirrors.nix
new file mode 100644
index 000000000000..292baed20dfe
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchurl/mirrors.nix
@@ -0,0 +1,444 @@
+{
+
+  # Content-addressable Nix mirrors.
+  hashedMirrors = [
+    "http://tarballs.nixos.org"
+  ];
+
+  # Mirrors for mirror://site/filename URIs, where "site" is
+  # "sourceforge", "gnu", etc.
+
+  luarocks = [
+    "https://luarocks.org/"
+    "https://raw.githubusercontent.com/rocks-moonscript-org/moonrocks-mirror/master/"
+    "http://luafr.org/moonrocks/"
+    "http://luarocks.logiceditor.com/rocks/"
+  ];
+
+  # SourceForge.
+  sourceforge = [
+    "https://downloads.sourceforge.net/"
+    "https://prdownloads.sourceforge.net/"
+    "https://heanet.dl.sourceforge.net/sourceforge/"
+    "https://surfnet.dl.sourceforge.net/sourceforge/"
+    "https://dfn.dl.sourceforge.net/sourceforge/"
+    "https://osdn.dl.sourceforge.net/sourceforge/"
+    "https://kent.dl.sourceforge.net/sourceforge/"
+  ];
+
+  # OSDN (formerly SourceForge.jp).
+  osdn = [
+    "https://osdn.dl.osdn.jp/"
+    "https://osdn.mirror.constant.com/"
+    "https://mirrors.gigenet.com/OSDN/"
+    "https://osdn.dl.sourceforge.jp/"
+    "https://jaist.dl.sourceforge.jp/"
+  ];
+
+  # GNU (https://www.gnu.org/prep/ftp.html).
+  gnu = [
+    # This one redirects to a (supposedly) nearby and (supposedly) up-to-date
+    # mirror.
+    "https://ftpmirror.gnu.org/"
+
+    "http://ftp.nluug.nl/pub/gnu/"
+    "http://mirrors.kernel.org/gnu/"
+    "ftp://mirror.cict.fr/gnu/"
+    "ftp://ftp.cs.tu-berlin.de/pub/gnu/"
+    "ftp://ftp.chg.ru/pub/gnu/"
+    "ftp://ftp.funet.fi/pub/mirrors/ftp.gnu.org/gnu/"
+
+    # This one is the master repository, and thus it's always up-to-date.
+    "http://ftp.gnu.org/pub/gnu/"
+  ];
+
+  # GCC.
+  gcc = [
+    "https://bigsearcher.com/mirrors/gcc/"
+    "http://mirror.koddos.net/gcc/"
+    "ftp://ftp.nluug.nl/mirror/languages/gcc/"
+    "ftp://ftp.fu-berlin.de/unix/languages/gcc/"
+    "ftp://ftp.irisa.fr/pub/mirrors/gcc.gnu.org/gcc/"
+    "ftp://gcc.gnu.org/pub/gcc/"
+  ];
+
+  # GnuPG.
+  gnupg = [
+    "https://gnupg.org/ftp/gcrypt/"
+    "http://www.ring.gr.jp/pub/net/"
+    "http://gd.tuwien.ac.at/privacy/"
+    "http://mirrors.dotsrc.org/gcrypt/"
+    "http://ftp.heanet.ie/mirrors/ftp.gnupg.org/gcrypt/"
+    "http://www.mirrorservice.org/sites/ftp.gnupg.org/gcrypt/"
+  ];
+
+  # kernel.org's /pub (/pub/{linux,software}) tree.
+  kernel = [
+    "http://cdn.kernel.org/pub/"
+    "http://www.all.kernel.org/pub/"
+    "http://ramses.wh2.tu-dresden.de/pub/mirrors/kernel.org/"
+    "http://linux-kernel.uio.no/pub/"
+    "http://kernel.osuosl.org/pub/"
+    "ftp://ftp.funet.fi/pub/mirrors/ftp.kernel.org/pub/"
+  ];
+
+  # Mirrors from https://download.kde.org/extra/download-mirrors.html
+  kde = [
+    "https://download.kde.org/download.php?url="
+    "https://ftp.gwdg.de/pub/linux/kde/"
+    "https://mirrors.ocf.berkeley.edu/kde/"
+    "http://mirrors.mit.edu/kde/"
+    "https://mirrors.ustc.edu.cn/kde/"
+    "http://ftp.funet.fi/pub/mirrors/ftp.kde.org/pub/kde/"
+    "ftp://ftp.kde.org/pub/kde/"
+  ];
+
+  # Gentoo files.
+  gentoo = [
+    "http://ftp.snt.utwente.nl/pub/os/linux/gentoo/"
+    "http://distfiles.gentoo.org/"
+    "ftp://mirrors.kernel.org/gentoo/"
+  ];
+
+  savannah = [
+    # Mirrors from https://download-mirror.savannah.gnu.org/releases/00_MIRRORS.html
+    "http://mirror.easyname.at/nongnu/"
+    "http://mirror2.klaus-uwe.me/nongnu/"
+    "http://savannah.c3sl.ufpr.br/"
+    "http://mirror.csclub.uwaterloo.ca/nongnu/"
+    "http://mirror.cedia.org.ec/nongnu/"
+    "http://ftp.igh.cnrs.fr/pub/nongnu/"
+    "http://mirror6.layerjet.com/nongnu"
+    "http://mirror.netcologne.de/savannah/"
+    "http://ftp.cc.uoc.gr/mirrors/nongnu.org/"
+    "http://nongnu.uib.no/"
+    "http://mirrors.fe.up.pt/pub/nongnu/"
+    "http://mirror.lihnidos.org/GNU/savannah/"
+    "http://savannah.mirror.si/"
+    "http://ftp.acc.umu.se/mirror/gnu.org/savannah/"
+    "http://ftp.twaren.net/Unix/NonGNU/"
+    "http://ftp.yzu.edu.tw/pub/nongnu/"
+    "http://mirror.rackdc.com/savannah/"
+    "http://savannah-nongnu-org.ip-connect.vn.ua/"
+    "http://www.mirrorservice.org/sites/download.savannah.gnu.org/releases/"
+    "http://savannah.spinellicreations.com/"
+    "http://gnu.mirrors.pair.com/savannah/savannah/"
+    "ftp://mirror.easyname.at/nongnu/"
+    "ftp://mirror2.klaus-uwe.me/nongnu/"
+    "ftp://savannah.c3sl.ufpr.br/savannah-nongnu/"
+    "ftp://mirror.csclub.uwaterloo.ca/nongnu/"
+    "ftp://mirror.cedia.org.ec/nongnu"
+    "ftp://ftp.igh.cnrs.fr/pub/nongnu/"
+    "ftp://mirror6.layerjet.com/nongnu/"
+    "ftp://mirror.netcologne.de/savannah/"
+    "ftp://nongnu.uib.no/pub/nongnu/"
+    "ftp://mirrors.fe.up.pt/pub/nongnu/"
+    "ftp://savannah.mirror.si/savannah/"
+    "ftp://ftp.twaren.net/Unix/NonGNU/"
+    "ftp://ftp.yzu.edu.tw/pub/nongnu/"
+    "ftp://savannah-nongnu-org.ip-connect.vn.ua/mirror/savannah.nongnu.org/"
+    "ftp://ftp.mirrorservice.org/sites/download.savannah.gnu.org/releases/"
+    "ftp://spinellicreations.com/gnu_dot_org_savannah_mirror/"
+  ];
+
+  samba = [
+    "https://www.samba.org/ftp/"
+    "http://www.samba.org/ftp/"
+  ];
+
+  # BitlBee mirrors, see https://www.bitlbee.org/main.php/mirrors.html .
+  bitlbee = [
+    "http://get.bitlbee.org/"
+    "http://get.bitlbee.be/"
+    "http://get.us.bitlbee.org/"
+    "http://ftp.snt.utwente.nl/pub/software/bitlbee/"
+    "http://bitlbee.intergenia.de/"
+  ];
+
+  # ImageMagick mirrors, see https://www.imagemagick.org/script/mirror.php
+  imagemagick = [
+    "https://www.imagemagick.org/download/"
+    "https://mirror.checkdomain.de/imagemagick/"
+    "https://ftp.nluug.nl/ImageMagick/"
+    "ftp://ftp.sunet.se/pub/multimedia/graphics/ImageMagick/" # also contains older versions removed from most mirrors
+    "http://ftp.sunet.se/pub/multimedia/graphics/ImageMagick/"
+    "ftp://ftp.imagemagick.org/pub/ImageMagick/"
+    "http://ftp.fifi.org/ImageMagick/"
+    "ftp://ftp.fifi.org/ImageMagick/"
+    "http://imagemagick.mirrorcatalogs.com/"
+    "ftp://imagemagick.mirrorcatalogs.com/imagemagick"
+  ];
+
+  # CPAN mirrors.
+  cpan = [
+    "https://cpan.metacpan.org/"
+    "https://cpan.perl.org/"
+    "http://backpan.perl.org/"  # for old releases
+  ];
+
+  # CentOS.
+  centos = [
+    "http://mirror.centos.org/centos/"
+    # For old releases
+    "http://vault.centos.org/"
+    "https://archive.kernel.org/centos-vault/"
+    "http://ftp.jaist.ac.jp/pub/Linux/CentOS-vault/"
+    "http://mirrors.aliyun.com/centos-vault/"
+    "https://mirror.chpc.utah.edu/pub/vault.centos.org/"
+    "https://mirror.its.sfu.ca/mirror/CentOS-vault/"
+    "https://mirror.math.princeton.edu/pub/centos-vault/"
+    "https://mirrors.tripadvisor.com/centos-vault/"
+  ];
+
+  # Debian.
+  debian = [
+    "http://httpredir.debian.org/debian/"
+    "ftp://ftp.au.debian.org/debian/"
+    "ftp://ftp.de.debian.org/debian/"
+    "ftp://ftp.es.debian.org/debian/"
+    "ftp://ftp.fr.debian.org/debian/"
+    "ftp://ftp.it.debian.org/debian/"
+    "ftp://ftp.nl.debian.org/debian/"
+    "ftp://ftp.ru.debian.org/debian/"
+    "ftp://ftp.debian.org/debian/"
+    "http://ftp.debian.org/debian/"
+    "http://archive.debian.org/debian-archive/debian/"
+    "ftp://ftp.funet.fi/pub/mirrors/ftp.debian.org/debian/"
+  ];
+
+  # Ubuntu.
+  ubuntu = [
+    "http://nl.archive.ubuntu.com/ubuntu/"
+    "http://de.archive.ubuntu.com/ubuntu/"
+    "http://archive.ubuntu.com/ubuntu/"
+    "http://old-releases.ubuntu.com/ubuntu/"
+  ];
+
+  # Fedora (please only add full mirrors that carry old Fedora distributions as well).
+  # See: https://mirrors.fedoraproject.org/publiclist (but not all carry old content).
+  fedora = [
+    "http://archives.fedoraproject.org/pub/fedora/"
+    "http://fedora.osuosl.org/"
+    "http://ftp.nluug.nl/pub/os/Linux/distr/fedora/"
+    "http://ftp.funet.fi/pub/mirrors/ftp.redhat.com/pub/fedora/"
+    "http://fedora.bhs.mirrors.ovh.net/"
+    "http://mirror.csclub.uwaterloo.ca/fedora/"
+    "http://ftp.linux.cz/pub/linux/fedora/"
+    "http://ftp.heanet.ie/pub/fedora/"
+    "http://mirror.1000mbps.com/fedora/"
+    "http://archives.fedoraproject.org/pub/archive/fedora/"
+  ];
+
+  # Old SUSE distributions.  Unfortunately there is no master site,
+  # since SUSE actually delete their old distributions (see
+  # ftp://ftp.suse.com/pub/suse/discontinued/deleted-20070817/README.txt).
+  oldsuse = [
+    "ftp://ftp.gmd.de/ftp.suse.com-discontinued/"
+  ];
+
+  # openSUSE.
+  opensuse = [
+    "http://opensuse.hro.nl/opensuse/distribution/"
+    "http://ftp.funet.fi/pub/linux/mirrors/opensuse/distribution/"
+    "http://ftp.belnet.be/mirror/ftp.opensuse.org/distribution/"
+    "http://ftp.uni-kassel.de/opensuse/distribution/"
+    "http://ftp.opensuse.org/pub/opensuse/distribution/"
+    "http://ftp5.gwdg.de/pub/opensuse/discontinued/distribution/"
+    "http://ftp.hosteurope.de/mirror/ftp.opensuse.org/discontinued/"
+    "http://opensuse.mirror.server4you.net/distribution/"
+    "http://ftp.nsysu.edu.tw/Linux/OpenSuSE/distribution/"
+  ];
+
+  # Gnome (see http://ftp.gnome.org/pub/GNOME/MIRRORS).
+  gnome = [
+    # This one redirects to some mirror closeby, so it should be all you need.
+    "http://download.gnome.org/"
+
+    "http://ftp.unina.it/pub/linux/GNOME/"
+    "http://fr2.rpmfind.net/linux/gnome.org/"
+    "ftp://ftp.dit.upm.es/pub/GNOME/"
+    "ftp://ftp.no.gnome.org/pub/GNOME/"
+    "http://ftp.acc.umu.se/pub/GNOME/"
+    "http://ftp.belnet.be/mirror/ftp.gnome.org/"
+    "http://ftp.df.lth.se/pub/gnome/"
+    "http://linorg.usp.br/gnome/"
+    "http://mirror.aarnet.edu.au/pub/GNOME/"
+    "ftp://ftp.cse.buffalo.edu/pub/Gnome/"
+    "ftp://ftp.nara.wide.ad.jp/pub/X11/GNOME/"
+  ];
+
+  xfce = [
+    "http://archive.xfce.org/"
+    "http://mirror.netcologne.de/xfce/"
+    "http://archive.se.xfce.org/xfce/"
+    "http://archive.be.xfce.org/xfce/"
+    "http://mirror.perldude.de/archive.xfce.org/"
+    "http://archive.be2.xfce.org/"
+    "http://ftp.udc.es/xfce/"
+    "http://archive.al-us.xfce.org/"
+    "http://mirror.yongbok.net/X11/xfce-mirror/"
+    "http://mirrors.tummy.com/pub/archive.xfce.org/"
+    "http://xfce.mirror.uber.com.au/"
+  ];
+
+  # X.org.
+  xorg = [
+    "https://xorg.freedesktop.org/releases/"
+    "https://ftp.x.org/archive/"
+  ];
+
+  # Apache mirrors (see http://www.apache.org/mirrors/).
+  apache = [
+    "https://www-eu.apache.org/dist/"
+    "https://www-us.apache.org/dist/"
+    "http://www.eu.apache.org/dist/"
+    "ftp://ftp.fu-berlin.de/unix/www/apache/"
+    "http://ftp.tudelft.nl/apache/"
+    "http://mirror.cc.columbia.edu/pub/software/apache/"
+    "https://www.apache.org/dist/"
+    "https://archive.apache.org/dist/" # fallback for old releases
+    "ftp://ftp.funet.fi/pub/mirrors/apache.org/"
+    "http://apache.cs.uu.nl/"
+    "http://apache.cs.utah.edu/"
+  ];
+
+  postgresql = [
+    "http://ftp.postgresql.org/pub/"
+    "ftp://ftp.postgresql.org/pub/"
+    "ftp://ftp-archives.postgresql.org/pub/"
+  ];
+
+  metalab = [
+    "ftp://mirrors.kernel.org/metalab/"
+    "ftp://ftp.gwdg.de/pub/linux/metalab/"
+    "ftp://ftp.xemacs.org/sites/metalab.unc.edu/"
+  ];
+
+  # Bioconductor mirrors (from http://bioconductor.org/about/mirrors)
+  # The commented-out ones don't seem to allow direct package downloads;
+  # they serve error messages that result in hash mismatches instead.
+  bioc = [
+    # http://bioc.ism.ac.jp/
+    # http://bioc.openanalytics.eu/
+    # http://bioconductor.fmrp.usp.br/
+    # http://mirror.aarnet.edu.au/pub/bioconductor/
+    # http://watson.nci.nih.gov/bioc_mirror/
+    "http://bioconductor.jp/packages/"
+    "http://bioconductor.statistik.tu-dortmund.de/packages/"
+    "http://mirrors.ebi.ac.uk/bioconductor/packages/"
+    "http://mirrors.ustc.edu.cn/bioc/"
+  ];
+
+  # Hackage mirrors
+  hackage = [
+    "http://hackage.haskell.org/package/"
+    "http://hdiff.luite.com/packages/archive/package/"
+    "http://hackage.fpcomplete.com/package/"
+    "http://objects-us-east-1.dream.io/hackage-mirror/package/"
+  ];
+
+  # Roy marples mirrors
+  roy = [
+    "http://roy.marples.name/downloads/"
+    "http://roy.aydogan.net/"
+    "http://cflags.cc/roy/"
+  ];
+
+  # Sage mirrors (http://www.sagemath.org/mirrors.html)
+  sageupstream = [
+    # Africa
+    "http://sagemath.polytechnic.edu.na/spkg/upstream/"
+    "ftp://ftp.sun.ac.za/pub/mirrors/www.sagemath.org/spkg/upstream/"
+    "http://sagemath.mirror.ac.za/spkg/upstream/"
+    "https://ftp.leg.uct.ac.za/pub/packages/sage/spkg/upstream/"
+    "http://mirror.ufs.ac.za/sagemath/spkg/upstream/"
+
+    # America, North
+    "http://mirrors-usa.go-parts.com/sage/sagemath/spkg/upstream/"
+    "http://mirrors.mit.edu/sage/spkg/upstream/"
+    "http://www.cecm.sfu.ca/sage/spkg/upstream/"
+    "http://files.sagemath.org/spkg/upstream/"
+    "http://mirror.clibre.uqam.ca/sage/spkg/upstream/"
+    "https://mirrors.xmission.com/sage/spkg/upstream/"
+
+    # America, South
+    "http://sagemath.c3sl.ufpr.br/spkg/upstream/"
+    "http://linorg.usp.br/sage/spkg/upstream"
+
+    # Asia
+    "http://sage.asis.io/spkg/upstream/"
+    "http://mirror.hust.edu.cn/sagemath/spkg/upstream/"
+    "https://ftp.iitm.ac.in/sage/spkg/upstream/"
+    "http://ftp.kaist.ac.kr/sage/spkg/upstream/"
+    "http://ftp.riken.jp/sagemath/spkg/upstream/"
+    "https://mirrors.tuna.tsinghua.edu.cn/sagemath/spkg/upstream/"
+    "https://mirrors.ustc.edu.cn/sagemath/spkg/upstream/"
+    "http://ftp.tsukuba.wide.ad.jp/software/sage/spkg/upstream/"
+    "http://ftp.yz.yamagata-u.ac.jp/pub/math/sage/spkg/upstream/"
+    "https://mirror.yandex.ru/mirrors/sage.math.washington.edu/spkg/upstream/"
+
+    # Australia
+    "http://echidna.maths.usyd.edu.au/sage/spkg/upstream/"
+
+    # Europe
+    "http://sage.mirror.garr.it/mirrors/sage/spkg/upstream/"
+    "http://sunsite.rediris.es/mirror/sagemath/spkg/upstream/"
+    "http://mirror.switch.ch/mirror/sagemath/spkg/upstream/"
+    "http://mirrors.fe.up.pt/pub/sage/spkg/upstream/"
+    "http://www-ftp.lip6.fr/pub/math/sagemath/spkg/upstream/"
+    "http://ftp.ntua.gr/pub/sagemath/spkg/upstream/"
+  ];
+
+  # MySQL mirrors
+  mysql = [
+    "http://cdn.mysql.com/Downloads/"
+  ];
+
+  # OpenBSD mirrors
+  openbsd = [
+    "http://ftp.openbsd.org/pub/OpenBSD/"
+    "ftp://ftp.nluug.nl/pub/OpenBSD/"
+    "ftp://ftp-stud.fht-esslingen.de/pub/OpenBSD/"
+    "ftp://ftp.halifax.rwth-aachen.de/pub/OpenBSD/"
+    "ftp://mirror.switch.ch/pub/OpenBSD/"
+  ];
+
+  # Steam Runtime mirrors
+  steamrt = [
+    "http://repo.steampowered.com/steamrt/"
+    "https://public.abbradar.moe/steamrt/"
+  ];
+
+  # Python PyPI mirrors
+  pypi = [
+    "https://files.pythonhosted.org/packages/source/"
+    # pypi.io is a more semantic link, but atm it’s referencing
+    # files.pythonhosted.org over two redirects
+    "https://pypi.io/packages/source/"
+  ];
+
+  # Python Test-PyPI mirror
+  testpypi = [
+    "https://test.pypi.io/packages/source/"
+  ];
+
+  # Mozilla projects.
+  mozilla = [
+    "http://download.cdn.mozilla.net/pub/mozilla.org/"
+    "https://archive.mozilla.org/pub/"
+  ];
+
+  # Maven Central
+  maven = [
+    "https://repo1.maven.org/maven2/"
+  ];
+
+  # Alsa Project
+  alsa = [
+    "https://www.alsa-project.org/files/pub/"
+    "ftp://ftp.alsa-project.org/pub/"
+    "http://alsa.cybermirror.org/"
+    "http://www.mirrorservice.org/sites/ftp.alsa-project.org/pub/"
+    "http://alsa.mirror.fr/"
+  ];
+}
diff --git a/nixpkgs/pkgs/build-support/fetchurl/write-mirror-list.sh b/nixpkgs/pkgs/build-support/fetchurl/write-mirror-list.sh
new file mode 100644
index 000000000000..2dabd2e722be
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchurl/write-mirror-list.sh
@@ -0,0 +1,4 @@
+source $stdenv/setup
+
+# !!! this is kinda hacky.
+set | grep -E '^[a-zA-Z]+=.*://' > $out
diff --git a/nixpkgs/pkgs/build-support/fetchzip/default.nix b/nixpkgs/pkgs/build-support/fetchzip/default.nix
new file mode 100644
index 000000000000..cde4d4f579f4
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/fetchzip/default.nix
@@ -0,0 +1,62 @@
+# This function downloads and unpacks an archive file, such as a zip
+# or tar file. This is primarily useful for dynamically generated
+# archives, such as GitHub's /archive URLs, where the unpacked content
+# of the zip file doesn't change, but the zip file itself may
+# (e.g. due to minor changes in the compression algorithm, or changes
+# in timestamps).
+
+{ lib, fetchurl, unzip }:
+
+{ # Optionally move the contents of the unpacked tree up one level.
+  stripRoot ? true
+, url ? ""
+, urls ? []
+, extraPostFetch ? ""
+, name ? "source"
+, ... } @ args:
+
+(fetchurl (let
+  basename = baseNameOf (if url != "" then url else builtins.head urls);
+in {
+  inherit name;
+
+  recursiveHash = true;
+
+  downloadToTemp = true;
+
+  postFetch =
+    ''
+      unpackDir="$TMPDIR/unpack"
+      mkdir "$unpackDir"
+      cd "$unpackDir"
+
+      renamed="$TMPDIR/${basename}"
+      mv "$downloadedFile" "$renamed"
+      unpackFile "$renamed"
+    ''
+    + (if stripRoot then ''
+      if [ $(ls "$unpackDir" | wc -l) != 1 ]; then
+        echo "error: zip file must contain a single file or directory."
+        echo "hint: Pass stripRoot=false; to fetchzip to assume flat list of files."
+        exit 1
+      fi
+      fn=$(cd "$unpackDir" && echo *)
+      if [ -f "$unpackDir/$fn" ]; then
+        mkdir $out
+      fi
+      mv "$unpackDir/$fn" "$out"
+    '' else ''
+      mv "$unpackDir" "$out"
+    '')
+    + ''
+      ${extraPostFetch}
+    ''
+    # Remove non-owner write permissions
+    # Fixes https://github.com/NixOS/nixpkgs/issues/38649
+    + ''
+      chmod 755 "$out"
+    '';
+} // removeAttrs args [ "stripRoot" "extraPostFetch" ])).overrideAttrs (x: {
+  # Hackety-hack: we actually need unzip hooks, too
+  nativeBuildInputs = x.nativeBuildInputs ++ [ unzip ];
+})
diff --git a/nixpkgs/pkgs/build-support/go/garble.nix b/nixpkgs/pkgs/build-support/go/garble.nix
new file mode 100644
index 000000000000..da1e3152ba4c
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/go/garble.nix
@@ -0,0 +1,34 @@
+{ stdenv
+, buildGoModule
+, fetchFromGitHub
+, lib
+}:
+buildGoModule rec {
+  pname = "garble";
+  version = "20200107";
+
+  src = fetchFromGitHub {
+    owner = "burrowers";
+    repo = pname;
+    rev = "835f4aadf321521acf06aac4d5068473dc4b2ac1";
+    sha256 = "sha256-NodsVHRll2YZoxrhmniJvelQOStG82u3kJyc0t8OXD8=";
+  };
+
+  vendorSha256 = "sha256-x2fk2QmZDK2yjyfYdK7x+sQjvt7tuggmm8ieVjsNKek=";
+
+  preBuild = ''
+    # https://github.com/burrowers/garble/issues/184
+    substituteInPlace testdata/scripts/tiny.txt \
+      --replace "{6,8}" "{4,8}"
+  '' + lib.optionalString (!stdenv.isx86_64) ''
+    # The test assumex amd64 assembly
+    rm testdata/scripts/asm.txt
+  '';
+
+  meta = {
+    description = "Obfuscate Go code by wrapping the Go toolchain";
+    homepage = "https://github.com/burrowers/garble/";
+    maintainers = with lib.maintainers; [ davhau ];
+    license = lib.licenses.bsd3;
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/icon-conv-tools/bin/extractWinRscIconsToStdFreeDesktopDir.sh b/nixpkgs/pkgs/build-support/icon-conv-tools/bin/extractWinRscIconsToStdFreeDesktopDir.sh
new file mode 100755
index 000000000000..994adbd91dae
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/icon-conv-tools/bin/extractWinRscIconsToStdFreeDesktopDir.sh
@@ -0,0 +1,74 @@
+#!/bin/sh
+
+# The file from which to extract *.ico files or a particular *.ico file.
+# (e.g.: './KeePass.exe', './myLibrary.dll', './my/path/to/app.ico'). 
+# As you notived, the utility can extract icons from a windows executable or
+# dll.
+rscFile=$1
+
+# A regexp that can extract the image size from the file name. Because we
+# use 'icotool', this value should usually be set to something like
+# '[^\.]+\.exe_[0-9]+_[0-9]+_[0-9]+_[0-9]+_([0-9]+x[0-9]+)x[0-9]+\.png'.
+# A reg expression may be written at some point that relegate this to
+# an implementation detail.
+sizeRegex=$2
+
+# A regexp replace expression that will be used with 'sizeRegex' to create
+# a proper size directory (e.g.: '48x48'). Usually this is left to '\1'.
+sizeReplaceExp=$3
+
+# A regexp that can extract the name of the target image from the file name
+# of the image (usually png) extracted from the *.ico file(s). A good
+# default is '([^\.]+).+' which gets the basename without extension.
+nameRegex=$4
+
+# A regexp replace expression that will be used alongside 'nameRegex' to create
+# a icon file name. Note that you usually put directly you icon name here
+# without any extension (e.g.: 'my-app'). But in case you've got something
+# fancy, it will usually be '\1'.
+nameReplaceExp=$5
+
+# The 
+# out=./myOut
+out=$6
+
+# An optional temp dir.
+if [ "" != "$7" ]; then
+  tmp=$7
+  isOwnerOfTmpDir=false
+else
+  tmp=`mktemp -d`
+  isOwnerOfTmpDir=true
+fi
+
+rm -rf $tmp/png $tmp/ico
+mkdir -p $tmp/png $tmp/ico
+
+# Extract the ressource file's extension.
+rscFileExt=`echo "$rscFile" | sed -re 's/.+\.(.+)$/\1/'`
+
+if [ "ico" = "$rscFileExt" ]; then
+  cp -p $rscFile $tmp/ico
+else
+  wrestool -x --output=$tmp/ico -t14 $rscFile
+fi
+    
+icotool --icon -x --palette-size=0 -o $tmp/png $tmp/ico/*.ico
+
+mkdir -p $out
+
+for i in $tmp/png/*.png; do
+  fn=`basename "$i"`
+  size=$(echo $fn | sed -re 's/'${sizeRegex}'/'${sizeReplaceExp}'/')
+  name=$(echo $fn | sed -re 's/'${nameRegex}'/'${nameReplaceExp}'/')
+  targetDir=$out/share/icons/hicolor/$size/apps
+  targetFile=$targetDir/$name.png
+  mkdir -p $targetDir
+  mv $i $targetFile
+done
+
+rm -rf "$tmp/png" "$tmp/ico"
+
+if $isOwnerOfTmpDir; then
+  rm -rf "$tmp"
+fi
diff --git a/nixpkgs/pkgs/build-support/icon-conv-tools/bin/icoFileToHiColorTheme b/nixpkgs/pkgs/build-support/icon-conv-tools/bin/icoFileToHiColorTheme
new file mode 100755
index 000000000000..192f3bb54c29
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/icon-conv-tools/bin/icoFileToHiColorTheme
@@ -0,0 +1,28 @@
+#!/bin/sh
+
+SCRIPT_DIR=`cd "$(dirname $0)" && pwd`
+
+# The '*.ico' file that needs to be converted (e.g.: "./my/path/to/file.ico").
+icoFile="$1"
+
+# The desired name of created icon files without extension. (e.g.: "my-app").
+targetIconName="$2"
+
+# The output directory where the free desktop hierarchy will be created.
+# (e.g.: "./path/to/my/out" or usually in nix "$out"). Note that the
+# whole directory hierarchy to the icon will be created in the specified
+# output directory (e.g.: "$out/share/icons/hicolor/48x48/apps/my-app.png").
+out="$3"
+
+# An optional temp directory location (e.g.: ./tmp). If not specified
+# a random '/tmp' directory will be created.
+tmp="$4"
+
+$SCRIPT_DIR/extractWinRscIconsToStdFreeDesktopDir.sh \
+  "$icoFile" \
+  '[^\.]+_[0-9]+_([0-9]+x[0-9]+)x[0-9]+\.png' \
+  '\1' \
+  '([^\.]+).+' \
+  "$targetIconName" \
+  "$out" \
+  "$tmp"
diff --git a/nixpkgs/pkgs/build-support/icon-conv-tools/default.nix b/nixpkgs/pkgs/build-support/icon-conv-tools/default.nix
new file mode 100644
index 000000000000..79d3838e6884
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/icon-conv-tools/default.nix
@@ -0,0 +1,31 @@
+{ lib, stdenv, icoutils }:
+
+stdenv.mkDerivation {
+  name = "icon-conv-tools-0.0.0";
+
+  src = ./bin;
+
+  buildInputs = [ icoutils ];
+
+  patchPhase = ''
+    substituteInPlace extractWinRscIconsToStdFreeDesktopDir.sh \
+      --replace "icotool" "${icoutils}/bin/icotool" \
+      --replace "wrestool" "${icoutils}/bin/wrestool"
+  '';
+
+  buildPhase = ''
+    mkdir -p "$out/bin"
+    cp -p * "$out/bin"
+  '';
+
+  installPhase = "true";
+
+  dontPatchELF = true;
+  dontStrip = true;
+
+  meta = with lib; {
+    description = "Tools for icon conversion specific to nix package manager";
+    maintainers = with maintainers; [ jraygauthier ];
+    platforms = platforms.linux;
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/install-shell-files/default.nix b/nixpkgs/pkgs/build-support/install-shell-files/default.nix
new file mode 100644
index 000000000000..d50661ddc65d
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/install-shell-files/default.nix
@@ -0,0 +1,12 @@
+{ makeSetupHook, tests }:
+
+# See the header comment in ../setup-hooks/install-shell-files.sh for example usage.
+let
+  setupHook = makeSetupHook { name = "install-shell-files"; } ../setup-hooks/install-shell-files.sh;
+in
+
+setupHook.overrideAttrs (oldAttrs: {
+  passthru = (oldAttrs.passthru or {}) // {
+    tests = tests.install-shell-files;
+  };
+})
diff --git a/nixpkgs/pkgs/build-support/kernel/initrd-compressor-meta.nix b/nixpkgs/pkgs/build-support/kernel/initrd-compressor-meta.nix
new file mode 100644
index 000000000000..443e599a239e
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/kernel/initrd-compressor-meta.nix
@@ -0,0 +1,53 @@
+rec {
+  cat = {
+    executable = pkgs: "cat";
+    ubootName = "none";
+    extension = ".cpio";
+  };
+  gzip = {
+    executable = pkgs: "${pkgs.gzip}/bin/gzip";
+    defaultArgs = ["-9n"];
+    ubootName = "gzip";
+    extension = ".gz";
+  };
+  bzip2 = {
+    executable = pkgs: "${pkgs.bzip2}/bin/bzip2";
+    ubootName = "bzip2";
+    extension = ".bz2";
+  };
+  xz = {
+    executable = pkgs: "${pkgs.xz}/bin/xz";
+    defaultArgs = ["--check=crc32" "--lzma2=dict=512KiB"];
+    extension = ".xz";
+  };
+  lzma = {
+    executable = pkgs: "${pkgs.xz}/bin/lzma";
+    defaultArgs = ["--check=crc32" "--lzma1=dict=512KiB"];
+    ubootName = "lzma";
+    extension = ".lzma";
+  };
+  lz4 = {
+    executable = pkgs: "${pkgs.lz4}/bin/lz4";
+    defaultArgs = ["-l"];
+    ubootName = "lz4";
+    extension = ".lz4";
+  };
+  lzop = {
+    executable = pkgs: "${pkgs.lzop}/bin/lzop";
+    ubootName = "lzo";
+    extension = ".lzo";
+  };
+  zstd = {
+    executable = pkgs: "${pkgs.zstd}/bin/zstd";
+    defaultArgs = ["-10"];
+    ubootName = "zstd";
+    extension = ".zst";
+  };
+  pigz = gzip // {
+    executable = pkgs: "${pkgs.pigz}/bin/pigz";
+  };
+  pixz = xz // {
+    executable = pkgs: "${pkgs.pixz}/bin/pixz";
+    defaultArgs = [];
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/kernel/make-initrd.nix b/nixpkgs/pkgs/build-support/kernel/make-initrd.nix
new file mode 100644
index 000000000000..83d3bb65baec
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/kernel/make-initrd.nix
@@ -0,0 +1,113 @@
+# Create an initramfs containing the closure of the specified
+# file system objects.  An initramfs is used during the initial
+# stages of booting a Linux system.  It is loaded by the boot loader
+# along with the kernel image.  It's supposed to contain everything
+# (such as kernel modules) necessary to allow us to mount the root
+# file system.  Once the root file system is mounted, the `real' boot
+# script can be called.
+#
+# An initramfs is a cpio archive, and may be compressed with a number
+# of algorithms.
+let
+  # Some metadata on various compression programs, relevant to naming
+  # the initramfs file and, if applicable, generating a u-boot image
+  # from it.
+  compressors = import ./initrd-compressor-meta.nix;
+  # Get the basename of the actual compression program from the whole
+  # compression command, for the purpose of guessing the u-boot
+  # compression type and filename extension.
+  compressorName = fullCommand: builtins.elemAt (builtins.match "([^ ]*/)?([^ ]+).*" fullCommand) 1;
+in
+{ stdenvNoCC, perl, cpio, ubootTools, lib, pkgsBuildHost
+# Name of the derivation (not of the resulting file!)
+, name ? "initrd"
+
+# Program used to compress the cpio archive; use "cat" for no compression.
+# This can also be a function which takes a package set and returns the path to the compressor,
+# such as `pkgs: "${pkgs.lzop}/bin/lzop"`.
+, compressor ? "gzip"
+, _compressorFunction ?
+  if lib.isFunction compressor then compressor
+  else if ! builtins.hasContext compressor && builtins.hasAttr compressor compressors then compressors.${compressor}.executable
+  else _: compressor
+, _compressorExecutable ? _compressorFunction pkgsBuildHost
+, _compressorName ? compressorName _compressorExecutable
+, _compressorMeta ? compressors.${_compressorName} or {}
+
+# List of arguments to pass to the compressor program, or null to use its defaults
+, compressorArgs ? null
+, _compressorArgsReal ? if compressorArgs == null then _compressorMeta.defaultArgs or [] else compressorArgs
+
+# Filename extension to use for the compressed initramfs. This is
+# included for clarity, but $out/initrd will always be a symlink to
+# the final image.
+# If this isn't guessed, you may want to complete the metadata above and send a PR :)
+, extension ? _compressorMeta.extension or
+    (throw "Unrecognised compressor ${_compressorName}, please specify filename extension")
+
+# List of { object = path_or_derivation; symlink = "/path"; }
+# The paths are copied into the initramfs in their nix store path
+# form, then linked at the root according to `symlink`.
+, contents
+
+# List of uncompressed cpio files to prepend to the initramfs. This
+# can be used to add files in specified paths without them becoming
+# symlinks to store paths.
+, prepend ? []
+
+# Whether to wrap the initramfs in a u-boot image.
+, makeUInitrd ? stdenvNoCC.hostPlatform.linux-kernel.target == "uImage"
+
+# If generating a u-boot image, the architecture to use. The default
+# guess may not align with u-boot's nomenclature correctly, so it can
+# be overridden.
+# See https://gitlab.denx.de/u-boot/u-boot/-/blob/9bfb567e5f1bfe7de8eb41f8c6d00f49d2b9a426/common/image.c#L81-106 for a list.
+, uInitrdArch ? stdenvNoCC.hostPlatform.linuxArch
+
+# The name of the compression, as recognised by u-boot.
+# See https://gitlab.denx.de/u-boot/u-boot/-/blob/9bfb567e5f1bfe7de8eb41f8c6d00f49d2b9a426/common/image.c#L195-204 for a list.
+# If this isn't guessed, you may want to complete the metadata above and send a PR :)
+, uInitrdCompression ? _compressorMeta.ubootName or
+    (throw "Unrecognised compressor ${_compressorName}, please specify uInitrdCompression")
+}:
+let
+  # !!! Move this into a public lib function, it is probably useful for others
+  toValidStoreName = x: with builtins;
+    lib.concatStringsSep "-" (filter (x: !(isList x)) (split "[^a-zA-Z0-9_=.?-]+" x));
+
+in stdenvNoCC.mkDerivation rec {
+  inherit name makeUInitrd extension uInitrdArch prepend;
+
+  ${if makeUInitrd then "uinitrdCompression" else null} = uInitrdCompression;
+
+  builder = ./make-initrd.sh;
+
+  nativeBuildInputs = [ perl cpio ]
+    ++ lib.optional makeUInitrd ubootTools;
+
+  compress = "${_compressorExecutable} ${lib.escapeShellArgs _compressorArgsReal}";
+
+  # Pass the function through, for reuse in append-initrd-secrets. The
+  # function is used instead of the string, in order to support
+  # cross-compilation (append-initrd-secrets running on a different
+  # architecture than what the main initramfs is built on).
+  passthru = {
+    compressorExecutableFunction = _compressorFunction;
+    compressorArgs = _compressorArgsReal;
+  };
+
+  # !!! should use XML.
+  objects = map (x: x.object) contents;
+  symlinks = map (x: x.symlink) contents;
+  suffices = map (x: if x ? suffix then x.suffix else "none") contents;
+
+  # For obtaining the closure of `contents'.
+  # Note: we don't use closureInfo yet, as that won't build with nix-1.x.
+  # See #36268.
+  exportReferencesGraph =
+    lib.zipListsWith
+      (x: i: [("closure-${toValidStoreName (baseNameOf x.symlink)}-${toString i}") x.object])
+      contents
+      (lib.range 0 (lib.length contents - 1));
+  pathsFromGraph = ./paths-from-graph.pl;
+}
diff --git a/nixpkgs/pkgs/build-support/kernel/make-initrd.sh b/nixpkgs/pkgs/build-support/kernel/make-initrd.sh
new file mode 100644
index 000000000000..c0619ef14ae0
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/kernel/make-initrd.sh
@@ -0,0 +1,51 @@
+source $stdenv/setup
+
+set -o pipefail
+
+objects=($objects)
+symlinks=($symlinks)
+suffices=($suffices)
+
+mkdir root
+
+# Needed for splash_helper, which gets run before init.
+mkdir root/dev
+mkdir root/sys
+mkdir root/proc
+
+
+for ((n = 0; n < ${#objects[*]}; n++)); do
+    object=${objects[$n]}
+    symlink=${symlinks[$n]}
+    suffix=${suffices[$n]}
+    if test "$suffix" = none; then suffix=; fi
+
+    mkdir -p $(dirname root/$symlink)
+    ln -s $object$suffix root/$symlink
+done
+
+
+# Get the paths in the closure of `object'.
+storePaths=$(perl $pathsFromGraph closure-*)
+
+
+# Paths in cpio archives *must* be relative, otherwise the kernel
+# won't unpack 'em.
+(cd root && cp -prd --parents $storePaths .)
+
+
+# Put the closure in a gzipped cpio archive.
+mkdir -p $out
+for PREP in $prepend; do
+  cat $PREP >> $out/initrd
+done
+(cd root && find * .[^.*] -exec touch -h -d '@1' '{}' +)
+(cd root && find * .[^.*] -print0 | sort -z | cpio -o -H newc -R +0:+0 --reproducible --null | eval -- $compress >> "$out/initrd")
+
+if [ -n "$makeUInitrd" ]; then
+    mkimage -A $uInitrdArch -O linux -T ramdisk -C "$uInitrdCompression" -d $out/initrd"$extension" $out/initrd.img
+    # Compatibility symlink
+    ln -s "initrd.img" "$out/initrd"
+else
+    ln -s "initrd" "$out/initrd$extension"
+fi
diff --git a/nixpkgs/pkgs/build-support/kernel/modules-closure.nix b/nixpkgs/pkgs/build-support/kernel/modules-closure.nix
new file mode 100644
index 000000000000..d82e279799ba
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/kernel/modules-closure.nix
@@ -0,0 +1,15 @@
+# Given a kernel build (with modules in $kernel/lib/modules/VERSION),
+# produce a module tree in $out/lib/modules/VERSION that contains only
+# the modules identified by `rootModules', plus their dependencies.
+# Also generate an appropriate modules.dep.
+
+{ stdenvNoCC, kernel, firmware, nukeReferences, rootModules
+, kmod, allowMissing ? false }:
+
+stdenvNoCC.mkDerivation {
+  name = kernel.name + "-shrunk";
+  builder = ./modules-closure.sh;
+  nativeBuildInputs = [ nukeReferences kmod ];
+  inherit kernel firmware rootModules allowMissing;
+  allowedReferences = ["out"];
+}
diff --git a/nixpkgs/pkgs/build-support/kernel/modules-closure.sh b/nixpkgs/pkgs/build-support/kernel/modules-closure.sh
new file mode 100644
index 000000000000..3b3a38ea1d33
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/kernel/modules-closure.sh
@@ -0,0 +1,97 @@
+source $stdenv/setup
+
+# When no modules are built, the $out/lib/modules directory will not
+# exist. Because the rest of the script assumes it does exist, we
+# handle this special case first.
+if ! test -d "$kernel/lib/modules"; then
+    if test -z "$rootModules" || test -n "$allowMissing"; then
+        mkdir -p "$out"
+        exit 0
+    else
+        echo "Required modules: $rootModules"
+        echo "Can not derive a closure of kernel modules because no modules were provided."
+        exit 1
+    fi
+fi
+
+version=$(cd $kernel/lib/modules && ls -d *)
+
+echo "kernel version is $version"
+
+# Determine the dependencies of each root module.
+mkdir -p $out/lib/modules/"$version"
+touch closure
+for module in $rootModules; do
+    echo "root module: $module"
+    modprobe --config no-config -d $kernel --set-version "$version" --show-depends "$module" \
+    | while read cmd module args; do
+        case "$cmd" in
+            builtin)
+                touch found
+                echo "$module" >>closure
+                echo "  builtin dependency: $module";;
+            insmod)
+                touch found
+                if ! test -e "$module"; then
+                    echo "  dependency not found: $module"
+                    exit 1
+                fi
+                target=$(echo "$module" | sed "s^$NIX_STORE.*/lib/modules/^$out/lib/modules/^")
+                if test -e "$target"; then
+                    echo "  dependency already copied: $module"
+                    continue
+                fi
+                echo "$module" >>closure
+                echo "  copying dependency: $module"
+                mkdir -p $(dirname $target)
+                cp "$module" "$target"
+                # If the kernel is compiled with coverage instrumentation, it
+                # contains the paths of the *.gcda coverage data output files
+                # (which it doesn't actually use...).  Get rid of them to prevent
+                # the whole kernel from being included in the initrd.
+                nuke-refs "$target"
+                echo "$target" >> $out/insmod-list;;
+             *)
+                echo "  unexpected modprobe output: $cmd $module"
+                exit 1;;
+        esac
+    done || test -n "$allowMissing"
+    if ! test -e found; then
+        echo "  not found"
+        if test -z "$allowMissing"; then
+            exit 1
+        fi
+    else
+        rm found
+    fi
+done
+
+mkdir -p $out/lib/firmware
+for module in $(cat closure); do
+    # for builtin modules, modinfo will reply with a wrong output looking like:
+    #   $ modinfo -F firmware unix
+    #   name:           unix
+    #
+    # There is a pending attempt to fix this:
+    #   https://github.com/NixOS/nixpkgs/pull/96153
+    #   https://lore.kernel.org/linux-modules/20200823215433.j5gc5rnsmahpf43v@blumerang/T/#u
+    #
+    # For now, the workaround is just to filter out the extraneous lines out
+    # of its output.
+    for i in $(modinfo -b $kernel --set-version "$version" -F firmware $module | grep -v '^name:'); do
+        mkdir -p "$out/lib/firmware/$(dirname "$i")"
+        echo "firmware for $module: $i"
+        cp "$firmware/lib/firmware/$i" "$out/lib/firmware/$i" 2>/dev/null \
+            || echo "WARNING: missing firmware $i for module $module"
+    done
+done
+
+# copy module ordering hints for depmod
+cp $kernel/lib/modules/"$version"/modules.order $out/lib/modules/"$version"/.
+cp $kernel/lib/modules/"$version"/modules.builtin $out/lib/modules/"$version"/.
+
+depmod -b $out -a $version
+
+# remove original hints from final derivation
+rm $out/lib/modules/"$version"/modules.order
+rm $out/lib/modules/"$version"/modules.builtin
diff --git a/nixpkgs/pkgs/build-support/kernel/paths-from-graph.pl b/nixpkgs/pkgs/build-support/kernel/paths-from-graph.pl
new file mode 100644
index 000000000000..1465b73fddb6
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/kernel/paths-from-graph.pl
@@ -0,0 +1,68 @@
+# NOTE: this script is deprecated. Use closureInfo instead.
+
+# Parses a /nix/store/*-closure file and prints
+# various information.
+# By default, the nodes in the graph are printed to stdout.
+# If printRegistration is set, then the graph is written
+# as a registration file for a manifest is written
+# in the `nix-store --load-db' format.
+
+use strict;
+use File::Basename;
+
+my %storePaths;
+my %refs;
+
+# Each argument on the command line is a graph file.
+# The graph file contains line-triples and a variable
+# number of references:
+# <store-path>
+# <deriver>
+# <count>
+# <ref-#1>
+# ...
+# <ref-#count>
+foreach my $graph (@ARGV) {
+    open GRAPH, "<$graph" or die;
+
+    while (<GRAPH>) {
+        chomp;
+        my $storePath = "$_";
+        $storePaths{$storePath} = 1;
+
+        my $deriver = <GRAPH>; chomp $deriver;
+        my $count = <GRAPH>; chomp $count;
+
+        my @refs = ();
+        for (my $i = 0; $i < $count; ++$i) {
+            my $ref = <GRAPH>; chomp $ref;
+            push @refs, $ref;
+        }
+        $refs{$storePath} = \@refs;
+        
+    }
+    
+    close GRAPH;
+}
+
+
+if ($ENV{"printRegistration"} eq "1") {
+    # This is the format used by `nix-store --register-validity
+    # --hash-given' / `nix-store --load-db'.
+    foreach my $storePath (sort (keys %storePaths)) {
+        print "$storePath\n";
+        print "0000000000000000000000000000000000000000000000000000000000000000\n"; # !!! fix
+        print "0\n"; # !!! fix	
+        print "\n"; # don't care about preserving the deriver
+        print scalar(@{$refs{$storePath}}), "\n";
+        foreach my $ref (@{$refs{$storePath}}) {
+            print "$ref\n";
+        }
+    }
+}
+
+else {
+    foreach my $storePath (sort (keys %storePaths)) {
+        print "$storePath\n";
+    }
+}
diff --git a/nixpkgs/pkgs/build-support/libredirect/default.nix b/nixpkgs/pkgs/build-support/libredirect/default.nix
new file mode 100644
index 000000000000..70da5bf5b5fb
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/libredirect/default.nix
@@ -0,0 +1,58 @@
+{ stdenv, lib, coreutils }:
+
+stdenv.mkDerivation {
+  name = "libredirect-0";
+
+  unpackPhase = ''
+    cp ${./libredirect.c} libredirect.c
+    cp ${./test.c} test.c
+  '';
+
+  libName = "libredirect" + stdenv.targetPlatform.extensions.sharedLibrary;
+
+  outputs = ["out" "hook"];
+
+  buildPhase = ''
+    $CC -Wall -std=c99 -O3 -fPIC -ldl -shared \
+      ${lib.optionalString stdenv.isDarwin "-Wl,-install_name,$out/lib/$libName"} \
+      -o "$libName" \
+      libredirect.c
+
+    if [ -n "$doInstallCheck" ]; then
+      $CC -Wall -std=c99 -O3 test.c -o test
+    fi
+  '';
+
+  installPhase = ''
+    install -vD "$libName" "$out/lib/$libName"
+
+    mkdir -p "$hook/nix-support"
+    cat <<SETUP_HOOK > "$hook/nix-support/setup-hook"
+    ${if stdenv.isDarwin then ''
+    export DYLD_INSERT_LIBRARIES="$out/lib/$libName"
+    export DYLD_FORCE_FLAT_NAMESPACE=1
+    '' else ''
+    export LD_PRELOAD="$out/lib/$libName"
+    ''}
+    SETUP_HOOK
+  '';
+
+  doInstallCheck = true;
+
+  installCheckPhase = ''
+    (
+      source "$hook/nix-support/setup-hook"
+      NIX_REDIRECTS="/foo/bar/test=${coreutils}/bin/true" ./test
+    )
+  '';
+
+  meta = with lib; {
+    platforms = platforms.unix;
+    description = "An LD_PRELOAD library to intercept and rewrite the paths in glibc calls";
+    longDescription = ''
+      libredirect is an LD_PRELOAD library to intercept and rewrite the paths in
+      glibc calls based on the value of $NIX_REDIRECTS, a colon-separated list
+      of path prefixes to be rewritten, e.g. "/src=/dst:/usr/=/nix/store/".
+    '';
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/libredirect/libredirect.c b/nixpkgs/pkgs/build-support/libredirect/libredirect.c
new file mode 100644
index 000000000000..dfa2978e9f44
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/libredirect/libredirect.c
@@ -0,0 +1,274 @@
+#define _GNU_SOURCE
+#include <stdio.h>
+#include <stdarg.h>
+#include <stdlib.h>
+#include <dlfcn.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <limits.h>
+#include <string.h>
+#include <spawn.h>
+#include <dirent.h>
+
+#define MAX_REDIRECTS 128
+
+static int nrRedirects = 0;
+static char * from[MAX_REDIRECTS];
+static char * to[MAX_REDIRECTS];
+
+// FIXME: might run too late.
+static void init() __attribute__((constructor));
+
+static void init()
+{
+    char * spec = getenv("NIX_REDIRECTS");
+    if (!spec) return;
+
+    unsetenv("NIX_REDIRECTS");
+
+    char * spec2 = malloc(strlen(spec) + 1);
+    strcpy(spec2, spec);
+
+    char * pos = spec2, * eq;
+    while ((eq = strchr(pos, '='))) {
+        *eq = 0;
+        from[nrRedirects] = pos;
+        pos = eq + 1;
+        to[nrRedirects] = pos;
+        nrRedirects++;
+        if (nrRedirects == MAX_REDIRECTS) break;
+        char * end = strchr(pos, ':');
+        if (!end) break;
+        *end = 0;
+        pos = end + 1;
+    }
+
+}
+
+static const char * rewrite(const char * path, char * buf)
+{
+    if (path == NULL) return path;
+    for (int n = 0; n < nrRedirects; ++n) {
+        int len = strlen(from[n]);
+        if (strncmp(path, from[n], len) != 0) continue;
+        if (snprintf(buf, PATH_MAX, "%s%s", to[n], path + len) >= PATH_MAX)
+            abort();
+        return buf;
+    }
+
+    return path;
+}
+
+static int open_needs_mode(int flags)
+{
+#ifdef O_TMPFILE
+    return (flags & O_CREAT) || (flags & O_TMPFILE) == O_TMPFILE;
+#else
+    return flags & O_CREAT;
+#endif
+}
+
+/* The following set of Glibc library functions is very incomplete -
+   it contains only what we needed for programs in Nixpkgs. Just add
+   more functions as needed. */
+
+int open(const char * path, int flags, ...)
+{
+    int (*open_real) (const char *, int, mode_t) = dlsym(RTLD_NEXT, "open");
+    mode_t mode = 0;
+    if (open_needs_mode(flags)) {
+        va_list ap;
+        va_start(ap, flags);
+        mode = va_arg(ap, mode_t);
+        va_end(ap);
+    }
+    char buf[PATH_MAX];
+    return open_real(rewrite(path, buf), flags, mode);
+}
+
+int open64(const char * path, int flags, ...)
+{
+    int (*open64_real) (const char *, int, mode_t) = dlsym(RTLD_NEXT, "open64");
+    mode_t mode = 0;
+    if (open_needs_mode(flags)) {
+        va_list ap;
+        va_start(ap, flags);
+        mode = va_arg(ap, mode_t);
+        va_end(ap);
+    }
+    char buf[PATH_MAX];
+    return open64_real(rewrite(path, buf), flags, mode);
+}
+
+int openat(int dirfd, const char * path, int flags, ...)
+{
+    int (*openat_real) (int, const char *, int, mode_t) = dlsym(RTLD_NEXT, "openat");
+    mode_t mode = 0;
+    if (open_needs_mode(flags)) {
+        va_list ap;
+        va_start(ap, flags);
+        mode = va_arg(ap, mode_t);
+        va_end(ap);
+    }
+    char buf[PATH_MAX];
+    return openat_real(dirfd, rewrite(path, buf), flags, mode);
+}
+
+FILE * fopen(const char * path, const char * mode)
+{
+    FILE * (*fopen_real) (const char *, const char *) = dlsym(RTLD_NEXT, "fopen");
+    char buf[PATH_MAX];
+    return fopen_real(rewrite(path, buf), mode);
+}
+
+FILE * __nss_files_fopen(const char * path)
+{
+    FILE * (*__nss_files_fopen_real) (const char *) = dlsym(RTLD_NEXT, "__nss_files_fopen");
+    char buf[PATH_MAX];
+    return __nss_files_fopen_real(rewrite(path, buf));
+}
+
+FILE * fopen64(const char * path, const char * mode)
+{
+    FILE * (*fopen64_real) (const char *, const char *) = dlsym(RTLD_NEXT, "fopen64");
+    char buf[PATH_MAX];
+    return fopen64_real(rewrite(path, buf), mode);
+}
+
+int __xstat(int ver, const char * path, struct stat * st)
+{
+    int (*__xstat_real) (int ver, const char *, struct stat *) = dlsym(RTLD_NEXT, "__xstat");
+    char buf[PATH_MAX];
+    return __xstat_real(ver, rewrite(path, buf), st);
+}
+
+int __xstat64(int ver, const char * path, struct stat64 * st)
+{
+    int (*__xstat64_real) (int ver, const char *, struct stat64 *) = dlsym(RTLD_NEXT, "__xstat64");
+    char buf[PATH_MAX];
+    return __xstat64_real(ver, rewrite(path, buf), st);
+}
+
+int stat(const char * path, struct stat * st)
+{
+    int (*__stat_real) (const char *, struct stat *) = dlsym(RTLD_NEXT, "stat");
+    char buf[PATH_MAX];
+    return __stat_real(rewrite(path, buf), st);
+}
+
+int access(const char * path, int mode)
+{
+    int (*access_real) (const char *, int mode) = dlsym(RTLD_NEXT, "access");
+    char buf[PATH_MAX];
+    return access_real(rewrite(path, buf), mode);
+}
+
+int posix_spawn(pid_t * pid, const char * path,
+    const posix_spawn_file_actions_t * file_actions,
+    const posix_spawnattr_t * attrp,
+    char * const argv[], char * const envp[])
+{
+    int (*posix_spawn_real) (pid_t *, const char *,
+        const posix_spawn_file_actions_t *,
+        const posix_spawnattr_t *,
+        char * const argv[], char * const envp[]) = dlsym(RTLD_NEXT, "posix_spawn");
+    char buf[PATH_MAX];
+    return posix_spawn_real(pid, rewrite(path, buf), file_actions, attrp, argv, envp);
+}
+
+int posix_spawnp(pid_t * pid, const char * file,
+    const posix_spawn_file_actions_t * file_actions,
+    const posix_spawnattr_t * attrp,
+    char * const argv[], char * const envp[])
+{
+    int (*posix_spawnp_real) (pid_t *, const char *,
+        const posix_spawn_file_actions_t *,
+        const posix_spawnattr_t *,
+        char * const argv[], char * const envp[]) = dlsym(RTLD_NEXT, "posix_spawnp");
+    char buf[PATH_MAX];
+    return posix_spawnp_real(pid, rewrite(file, buf), file_actions, attrp, argv, envp);
+}
+
+int execv(const char * path, char * const argv[])
+{
+    int (*execv_real) (const char * path, char * const argv[]) = dlsym(RTLD_NEXT, "execv");
+    char buf[PATH_MAX];
+    return execv_real(rewrite(path, buf), argv);
+}
+
+int execvp(const char * path, char * const argv[])
+{
+    int (*_execvp) (const char *, char * const argv[]) = dlsym(RTLD_NEXT, "execvp");
+    char buf[PATH_MAX];
+    return _execvp(rewrite(path, buf), argv);
+}
+
+int execve(const char * path, char * const argv[], char * const envp[])
+{
+    int (*_execve) (const char *, char * const argv[], char * const envp[]) = dlsym(RTLD_NEXT, "execve");
+    char buf[PATH_MAX];
+    return _execve(rewrite(path, buf), argv, envp);
+}
+
+DIR * opendir(const char * path)
+{
+    char buf[PATH_MAX];
+    DIR * (*_opendir) (const char*) = dlsym(RTLD_NEXT, "opendir");
+
+    return _opendir(rewrite(path, buf));
+}
+
+#define SYSTEM_CMD_MAX 512
+
+char *replace_substring(char * source, char * buf, char * replace_string, char * start_ptr, char * suffix_ptr) {
+    char head[SYSTEM_CMD_MAX] = {0};
+    strncpy(head, source, start_ptr - source);
+
+    char tail[SYSTEM_CMD_MAX] = {0};
+    if(suffix_ptr < source + strlen(source)) {
+       strcpy(tail, suffix_ptr);
+    }
+
+    sprintf(buf, "%s%s%s", head, replace_string, tail);
+    return buf;
+}
+
+char *replace_string(char * buf, char * from, char * to) {
+    int num_matches = 0;
+    char * matches[SYSTEM_CMD_MAX];
+    int from_len = strlen(from);
+    for(int i=0; i<strlen(buf); i++){
+       char *cmp_start = buf + i;
+       if(strncmp(from, cmp_start, from_len) == 0){
+          matches[num_matches] = cmp_start;
+          num_matches++;
+       }
+    }
+    int len_diff = strlen(to) - strlen(from);
+    for(int n = 0; n < num_matches; n++) {
+       char replaced[SYSTEM_CMD_MAX];
+       replace_substring(buf, replaced, to, matches[n], matches[n]+from_len);
+       strcpy(buf, replaced);
+       for(int nn = n+1; nn < num_matches; nn++) {
+          matches[nn] += len_diff;
+       }
+    }
+    return buf;
+}
+
+void rewriteSystemCall(const char * command, char * buf) {
+    strcpy(buf, command);
+    for (int n = 0; n < nrRedirects; ++n) {
+       replace_string(buf, from[n], to[n]);
+    }
+}
+
+int system(const char *command)
+{
+    int (*_system) (const char*) = dlsym(RTLD_NEXT, "system");
+
+    char newCommand[SYSTEM_CMD_MAX];
+    rewriteSystemCall(command, newCommand);
+    return _system(newCommand);
+}
diff --git a/nixpkgs/pkgs/build-support/libredirect/test.c b/nixpkgs/pkgs/build-support/libredirect/test.c
new file mode 100644
index 000000000000..722d1303771c
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/libredirect/test.c
@@ -0,0 +1,65 @@
+#include <assert.h>
+#include <fcntl.h>
+#include <spawn.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <unistd.h>
+
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <sys/wait.h>
+
+#define TESTPATH "/foo/bar/test"
+
+extern char **environ;
+
+void test_spawn(void) {
+    pid_t pid;
+    int ret;
+    posix_spawn_file_actions_t file_actions;
+    char *argv[] = {"true", NULL};
+
+    assert(posix_spawn_file_actions_init(&file_actions) == 0);
+
+    ret = posix_spawn(&pid, TESTPATH, &file_actions, NULL, argv, environ);
+
+    assert(ret == 0);
+    assert(waitpid(pid, NULL, 0) != -1);
+}
+
+void test_execv(void) {
+    char *argv[] = {"true", NULL};
+    assert(execv(TESTPATH, argv) == 0);
+}
+
+void test_system(void) {
+    assert(system(TESTPATH) == 0);
+}
+
+int main(void)
+{
+    FILE *testfp;
+    int testfd;
+    struct stat testsb;
+
+    testfp = fopen(TESTPATH, "r");
+    assert(testfp != NULL);
+    fclose(testfp);
+
+    testfd = open(TESTPATH, O_RDONLY);
+    assert(testfd != -1);
+    close(testfd);
+
+    assert(access(TESTPATH, X_OK) == 0);
+
+    assert(stat(TESTPATH, &testsb) != -1);
+
+    test_spawn();
+    test_system();
+    test_execv();
+
+    /* If all goes well, this is never reached because test_execv() replaces
+     * the current process.
+     */
+    return 0;
+}
diff --git a/nixpkgs/pkgs/build-support/make-desktopitem/default.nix b/nixpkgs/pkgs/build-support/make-desktopitem/default.nix
new file mode 100644
index 000000000000..1491a3ad9119
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/make-desktopitem/default.nix
@@ -0,0 +1,67 @@
+{ lib, runCommandLocal, desktop-file-utils }:
+
+# See https://specifications.freedesktop.org/desktop-entry-spec/desktop-entry-spec-latest.html
+{ name # The name of the desktop file
+, type ? "Application"
+, exec
+, icon ? null
+, comment ? null
+, terminal ? false
+, desktopName # The name of the application
+, genericName ? null
+, mimeType ? null
+, categories ? null
+, startupNotify ? null
+, noDisplay ? null
+, prefersNonDefaultGPU ? null
+, extraDesktopEntries ? { } # Extra key-value pairs to add to the [Desktop Entry] section. This may override other values
+, extraEntries ? "" # Extra configuration. Will be appended to the end of the file and may thus contain extra sections
+, fileValidation ? true # whether to validate resulting desktop file.
+}:
+let
+  # like builtins.toString, but null -> null instead of null -> ""
+  nullableToString = value:
+    if value == null then null
+    else if builtins.isBool value then lib.boolToString value
+    else builtins.toString value;
+
+  # The [Desktop entry] section of the desktop file, as attribute set.
+  mainSection = {
+    "Type" = toString type;
+    "Exec" = nullableToString exec;
+    "Icon" = nullableToString icon;
+    "Comment" = nullableToString comment;
+    "Terminal" = nullableToString terminal;
+    "Name" = toString desktopName;
+    "GenericName" = nullableToString genericName;
+    "MimeType" = nullableToString mimeType;
+    "Categories" = nullableToString categories;
+    "StartupNotify" = nullableToString startupNotify;
+    "NoDisplay" = nullableToString noDisplay;
+    "PrefersNonDefaultGPU" = nullableToString prefersNonDefaultGPU;
+  } // extraDesktopEntries;
+
+  # Map all entries to a list of lines
+  desktopFileStrings =
+    [ "[Desktop Entry]" ]
+    ++ builtins.filter
+      (v: v != null)
+      (lib.mapAttrsToList
+        (name: value: if value != null then "${name}=${value}" else null)
+        mainSection
+      )
+    ++ (if extraEntries == "" then [ ] else [ "${extraEntries}" ]);
+in
+runCommandLocal "${name}.desktop"
+{
+  nativeBuildInputs = [ desktop-file-utils ];
+}
+  (''
+    mkdir -p "$out/share/applications"
+    cat > "$out/share/applications/${name}.desktop" <<EOF
+    ${builtins.concatStringsSep "\n" desktopFileStrings}
+    EOF
+  '' + lib.optionalString fileValidation ''
+    echo "Running desktop-file validation"
+    desktop-file-validate "$out/share/applications/${name}.desktop"
+  '')
diff --git a/nixpkgs/pkgs/build-support/make-startupitem/default.nix b/nixpkgs/pkgs/build-support/make-startupitem/default.nix
new file mode 100644
index 000000000000..da1d4105c89f
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/make-startupitem/default.nix
@@ -0,0 +1,35 @@
+# given a pakcage with a $name.desktop file, makes a copy
+# as autostart item.
+
+{stdenv, lib}:
+{ name            # name of the desktop file (without .desktop)
+, package         # package where the desktop file resides in
+, srcPrefix ? ""  # additional prefix that the desktop file may have in the 'package'
+, after ? null
+, condition ? null
+, phase ? "2"
+}:
+
+# the builder requires that
+#   $package/share/applications/$name.desktop
+# exists as file.
+
+stdenv.mkDerivation {
+  name = "autostart-${name}";
+  priority = 5;
+
+  buildCommand = ''
+    mkdir -p $out/etc/xdg/autostart
+    target=${name}.desktop
+    cp ${package}/share/applications/${srcPrefix}${name}.desktop $target
+    chmod +rw $target
+    echo "X-KDE-autostart-phase=${phase}" >> $target
+    ${lib.optionalString (after != null) ''echo "${after}" >> $target''}
+    ${lib.optionalString (condition != null) ''echo "${condition}" >> $target''}
+    cp $target $out/etc/xdg/autostart
+  '';
+
+  # this will automatically put 'package' in the environment when you
+  # put its startup item in there.
+  propagatedBuildInputs = [ package ];
+}
diff --git a/nixpkgs/pkgs/build-support/make-symlinks/builder.sh b/nixpkgs/pkgs/build-support/make-symlinks/builder.sh
new file mode 100644
index 000000000000..70f1d2ca1b25
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/make-symlinks/builder.sh
@@ -0,0 +1,9 @@
+source $stdenv/setup
+
+mkdir $out
+for file in $files
+do
+  subdir=`dirname $file`
+  mkdir -p $out/$subdir
+  ln -s $dir/$file $out/$file
+done
diff --git a/nixpkgs/pkgs/build-support/make-symlinks/default.nix b/nixpkgs/pkgs/build-support/make-symlinks/default.nix
new file mode 100644
index 000000000000..30584aceb9f8
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/make-symlinks/default.nix
@@ -0,0 +1,7 @@
+{name ? "", stdenv, dir, files}:
+
+stdenv.mkDerivation {
+  inherit dir files;
+  name = if name == "" then dir.name else name;
+  builder = ./builder.sh;
+}
diff --git a/nixpkgs/pkgs/build-support/mkshell/default.nix b/nixpkgs/pkgs/build-support/mkshell/default.nix
new file mode 100644
index 000000000000..7ca4cc23c1d5
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/mkshell/default.nix
@@ -0,0 +1,49 @@
+{ lib, stdenv }:
+
+# A special kind of derivation that is only meant to be consumed by the
+# nix-shell.
+{
+  # a list of packages to add to the shell environment
+  packages ? [ ]
+, # propagate all the inputs from the given derivations
+  inputsFrom ? [ ]
+, buildInputs ? [ ]
+, nativeBuildInputs ? [ ]
+, propagatedBuildInputs ? [ ]
+, propagatedNativeBuildInputs ? [ ]
+, ...
+}@attrs:
+let
+  mergeInputs = name: lib.concatLists (lib.catAttrs name
+    ([ attrs ] ++ inputsFrom));
+
+  rest = builtins.removeAttrs attrs [
+    "packages"
+    "inputsFrom"
+    "buildInputs"
+    "nativeBuildInputs"
+    "propagatedBuildInputs"
+    "propagatedNativeBuildInputs"
+    "shellHook"
+  ];
+in
+
+stdenv.mkDerivation ({
+  name = "nix-shell";
+  phases = [ "nobuildPhase" ];
+
+  buildInputs = mergeInputs "buildInputs";
+  nativeBuildInputs = packages ++ (mergeInputs "nativeBuildInputs");
+  propagatedBuildInputs = mergeInputs "propagatedBuildInputs";
+  propagatedNativeBuildInputs = mergeInputs "propagatedNativeBuildInputs";
+
+  shellHook = lib.concatStringsSep "\n" (lib.catAttrs "shellHook"
+    (lib.reverseList inputsFrom ++ [ attrs ]));
+
+  nobuildPhase = ''
+    echo
+    echo "This derivation is not meant to be built, aborting";
+    echo
+    exit 1
+  '';
+} // rest)
diff --git a/nixpkgs/pkgs/build-support/mono-dll-fixer/builder.sh b/nixpkgs/pkgs/build-support/mono-dll-fixer/builder.sh
new file mode 100644
index 000000000000..67abc465a9bd
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/mono-dll-fixer/builder.sh
@@ -0,0 +1,4 @@
+source $stdenv/setup
+
+substitute $dllFixer $out --subst-var-by perl $perl/bin/perl
+chmod +x $out
diff --git a/nixpkgs/pkgs/build-support/mono-dll-fixer/default.nix b/nixpkgs/pkgs/build-support/mono-dll-fixer/default.nix
new file mode 100644
index 000000000000..8f7d1e795d79
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/mono-dll-fixer/default.nix
@@ -0,0 +1,8 @@
+{stdenv, perl}:
+
+stdenv.mkDerivation {
+  name = "mono-dll-fixer";
+  builder = ./builder.sh;
+  dllFixer = ./dll-fixer.pl;
+  inherit perl;
+}
diff --git a/nixpkgs/pkgs/build-support/mono-dll-fixer/dll-fixer.pl b/nixpkgs/pkgs/build-support/mono-dll-fixer/dll-fixer.pl
new file mode 100644
index 000000000000..4a8b468692f0
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/mono-dll-fixer/dll-fixer.pl
@@ -0,0 +1,32 @@
+#! @perl@ -w
+
+use strict;
+
+my @paths = split ' ', $ENV{"ALL_INPUTS"};
+
+open IN, "<$ARGV[0]" or die;
+open OUT, ">$ARGV[0].tmp" or die;
+
+while (<IN>) {
+    # !!! should use a real XML library here.
+    if (!/<dllmap dll="(.*)" target="(.*)"\/>/) {
+        print OUT;
+        next;
+    }
+    my $dll = $1;
+    my $target = $2;
+
+    foreach my $path (@paths) {
+        my $fullPath = "$path/lib/$target";
+        if (-e "$fullPath") {
+            $target = $fullPath;
+            last;
+        }
+    }
+
+    print OUT "  <dllmap dll=\"$dll\" target=\"$target\"/>\n";
+}
+
+close IN;
+
+rename "$ARGV[0].tmp", "$ARGV[0]" or die "cannot rename $ARGV[0]";
diff --git a/nixpkgs/pkgs/build-support/nix-gitignore/default.nix b/nixpkgs/pkgs/build-support/nix-gitignore/default.nix
new file mode 100644
index 000000000000..5d7b945bf1b1
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/nix-gitignore/default.nix
@@ -0,0 +1,178 @@
+# https://github.com/siers/nix-gitignore/
+
+{ lib, runCommand }:
+
+# An interesting bit from the gitignore(5):
+# - A slash followed by two consecutive asterisks then a slash matches
+# - zero or more directories. For example, "a/**/b" matches "a/b",
+# - "a/x/b", "a/x/y/b" and so on.
+
+with builtins;
+
+let
+  debug = a: trace a a;
+  last = l: elemAt l ((length l) - 1);
+
+  throwIfOldNix = let required = "2.0"; in
+    if compareVersions nixVersion required == -1
+    then throw "nix (v${nixVersion} =< v${required}) is too old for nix-gitignore"
+    else true;
+in rec {
+  # [["good/relative/source/file" true] ["bad.tmpfile" false]] -> root -> path
+  filterPattern = patterns: root:
+    (name: _type:
+      let
+        relPath = lib.removePrefix ((toString root) + "/") name;
+        matches = pair: (match (head pair) relPath) != null;
+        matched = map (pair: [(matches pair) (last pair)]) patterns;
+      in
+        last (last ([[true true]] ++ (filter head matched)))
+    );
+
+  # string -> [[regex bool]]
+  gitignoreToPatterns = gitignore:
+    assert throwIfOldNix;
+    let
+      # ignore -> bool
+      isComment = i: (match "^(#.*|$)" i) != null;
+
+      # ignore -> [ignore bool]
+      computeNegation = l:
+        let split = match "^(!?)(.*)" l;
+        in [(elemAt split 1) (head split == "!")];
+
+      # ignore -> regex
+      substWildcards =
+        let
+          special = "^$.+{}()";
+          escs = "\\*?";
+          splitString =
+            let recurse = str : [(substring 0 1 str)] ++
+                                 (if str == "" then [] else (recurse (substring 1 (stringLength(str)) str) ));
+            in str : recurse str;
+          chars = s: filter (c: c != "" && !isList c) (splitString s);
+          escape = s: map (c: "\\" + c) (chars s);
+        in
+          replaceStrings
+            ((chars special)  ++ (escape escs) ++ ["**/"    "**" "*"     "?"])
+            ((escape special) ++ (escape escs) ++ ["(.*/)?" ".*" "[^/]*" "[^/]"]);
+
+      # (regex -> regex) -> regex -> regex
+      mapAroundCharclass = f: r: # rl = regex or list
+        let slightFix = replaceStrings ["\\]"] ["]"];
+        in
+          concatStringsSep ""
+          (map (rl: if isList rl then slightFix (elemAt rl 0) else f rl)
+          (split "(\\[([^\\\\]|\\\\.)+])" r));
+
+      # regex -> regex
+      handleSlashPrefix = l:
+        let
+          split = (match "^(/?)(.*)" l);
+          findSlash = l: if (match ".+/.+" l) != null then "" else l;
+          hasSlash = mapAroundCharclass findSlash l != l;
+        in
+          (if (elemAt split 0) == "/" || hasSlash
+          then "^"
+          else "(^|.*/)"
+          ) + (elemAt split 1);
+
+      # regex -> regex
+      handleSlashSuffix = l:
+        let split = (match "^(.*)/$" l);
+        in if split != null then (elemAt split 0) + "($|/.*)" else l;
+
+      # (regex -> regex) -> [regex, bool] -> [regex, bool]
+      mapPat = f: l: [(f (head l)) (last l)];
+    in
+      map (l: # `l' for "line"
+        mapPat (l: handleSlashSuffix (handleSlashPrefix (mapAroundCharclass substWildcards l)))
+        (computeNegation l))
+      (filter (l: !isList l && !isComment l)
+      (split "\n" gitignore));
+
+  gitignoreFilter = ign: root: filterPattern (gitignoreToPatterns ign) root;
+
+  # string|[string|file] (→ [string|file] → [string]) -> string
+  gitignoreCompileIgnore = file_str_patterns: root:
+    let
+      onPath = f: a: if typeOf a == "path" then f a else a;
+      str_patterns = map (onPath readFile) (lib.toList file_str_patterns);
+    in concatStringsSep "\n" str_patterns;
+
+  gitignoreFilterPure = filter: patterns: root: name: type:
+    gitignoreFilter (gitignoreCompileIgnore patterns root) root name type
+    && filter name type;
+
+  # This is a very hacky way of programming this!
+  # A better way would be to reuse existing filtering by making multiple gitignore functions per each root.
+  # Then for each file find the set of roots with gitignores (and functions).
+  # This would make gitignoreFilterSource very different from gitignoreFilterPure.
+  # rootPath → gitignoresConcatenated
+  compileRecursiveGitignore = root:
+    let
+      dirOrIgnore = file: type: baseNameOf file == ".gitignore" || type == "directory";
+      ignores = builtins.filterSource dirOrIgnore root;
+    in readFile (
+      runCommand "${baseNameOf root}-recursive-gitignore" {} ''
+        cd ${ignores}
+
+        find -type f -exec sh -c '
+          rel="$(realpath --relative-to=. "$(dirname "$1")")/"
+          if [ "$rel" = "./" ]; then rel=""; fi
+
+          awk -v prefix="$rel" -v root="$1" -v top="$(test -z "$rel" && echo 1)" "
+            BEGIN { print \"# \"root }
+
+            /^!?[^\\/]+\/?$/ {
+              match(\$0, /^!?/, negation)
+              sub(/^!?/, \"\")
+
+              if (top) { middle = \"\" } else { middle = \"**/\" }
+
+              print negation[0] prefix middle \$0
+            }
+
+            /^!?(\\/|.*\\/.+$)/ {
+              match(\$0, /^!?/, negation)
+              sub(/^!?/, \"\")
+
+              if (!top) sub(/^\//, \"\")
+
+              print negation[0] prefix \$0
+            }
+
+            END { print \"\" }
+          " "$1"
+        ' sh {} \; > $out
+      '');
+
+  withGitignoreFile = patterns: root:
+    lib.toList patterns ++ [ ".git" ] ++ [(root + "/.gitignore")];
+
+  withRecursiveGitignoreFile = patterns: root:
+    lib.toList patterns ++ [ ".git" ] ++ [(compileRecursiveGitignore root)];
+
+  # filterSource derivatives
+
+  gitignoreFilterSourcePure = filter: patterns: root:
+    filterSource (gitignoreFilterPure filter patterns root) root;
+
+  gitignoreFilterSource = filter: patterns: root:
+    gitignoreFilterSourcePure filter (withGitignoreFile patterns root) root;
+
+  gitignoreFilterRecursiveSource = filter: patterns: root:
+    gitignoreFilterSourcePure filter (withRecursiveGitignoreFile patterns root) root;
+
+  # "Filter"-less alternatives
+
+  gitignoreSourcePure = gitignoreFilterSourcePure (_: _: true);
+  gitignoreSource = patterns: let type = typeOf patterns; in
+    if (type == "string" && pathExists patterns) || type == "path"
+    then throw
+      "type error in gitignoreSource(patterns -> source -> path), "
+      "use [] or \"\" if there are no additional patterns"
+    else gitignoreFilterSource (_: _: true) patterns;
+
+  gitignoreRecursiveSource = gitignoreFilterSourcePure (_: _: true);
+}
diff --git a/nixpkgs/pkgs/build-support/nuke-references/darwin-sign-fixup.sh b/nixpkgs/pkgs/build-support/nuke-references/darwin-sign-fixup.sh
new file mode 100644
index 000000000000..940c18e5a627
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/nuke-references/darwin-sign-fixup.sh
@@ -0,0 +1,5 @@
+# Fixup hook for nukeReferences, not stdenv
+
+source @signingUtils@
+
+fixupHooks+=(signIfRequired)
diff --git a/nixpkgs/pkgs/build-support/nuke-references/default.nix b/nixpkgs/pkgs/build-support/nuke-references/default.nix
new file mode 100644
index 000000000000..03f6fe53b544
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/nuke-references/default.nix
@@ -0,0 +1,37 @@
+# The program `nuke-refs' created by this derivation replaces all
+# references to the Nix store in the specified files by a non-existant
+# path (/nix/store/eeee...).  This is useful for getting rid of
+# dependencies that you know are not actually needed at runtime.
+
+{ lib, stdenvNoCC, perl, signingUtils, shell ? stdenvNoCC.shell }:
+
+let
+  stdenv = stdenvNoCC;
+
+  darwinCodeSign = stdenv.targetPlatform.isDarwin && stdenv.targetPlatform.isAarch64;
+in
+
+stdenvNoCC.mkDerivation {
+  name = "nuke-references";
+
+  dontUnpack = true;
+  dontConfigure = true;
+  dontBuild = true;
+
+  installPhase = ''
+    mkdir -p $out/bin
+    substituteAll ${./nuke-refs.sh} $out/bin/nuke-refs
+    chmod a+x $out/bin/nuke-refs
+  '';
+
+  postFixup = lib.optionalString darwinCodeSign ''
+    mkdir -p $out/nix-support
+    substituteAll ${./darwin-sign-fixup.sh} $out/nix-support/setup-hooks.sh
+  '';
+
+  # FIXME: get rid of perl dependency.
+  inherit perl;
+  inherit (builtins) storeDir;
+  shell = lib.getBin shell + (shell.shellPath or "");
+  signingUtils = if darwinCodeSign then signingUtils else null;
+}
diff --git a/nixpkgs/pkgs/build-support/nuke-references/nuke-refs.sh b/nixpkgs/pkgs/build-support/nuke-references/nuke-refs.sh
new file mode 100644
index 000000000000..21eb855cbad9
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/nuke-references/nuke-refs.sh
@@ -0,0 +1,33 @@
+#! @shell@
+
+fixupHooks=()
+
+if [ -e @out@/nix-support/setup-hooks.sh ]; then
+    source @out@/nix-support/setup-hooks.sh
+fi
+
+excludes=""
+while getopts e: o; do
+    case "$o" in
+        e) storeId=$(echo "$OPTARG" | @perl@/bin/perl -ne "print \"\$1\" if m|^\Q@storeDir@\E/([a-z0-9]{32})-.*|")
+           if [ -z "$storeId" ]; then
+               echo "-e argument must be a Nix store path"
+               exit 1
+           fi
+           excludes="$excludes(?!$storeId)"
+        ;;
+    esac
+done
+shift $(($OPTIND-1))
+
+for i in "$@"; do
+    if test ! -L "$i" -a -f "$i"; then
+        cat "$i" | @perl@/bin/perl -pe "s|\Q@storeDir@\E/$excludes[a-z0-9]{32}-|@storeDir@/eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee-|g" > "$i.tmp"
+        if test -x "$i"; then chmod +x "$i.tmp"; fi
+        mv "$i.tmp" "$i"
+
+        for hook in "${fixupHooks[@]}"; do
+            eval "$hook" "$i"
+        done
+    fi
+done
diff --git a/nixpkgs/pkgs/build-support/ocaml/default.nix b/nixpkgs/pkgs/build-support/ocaml/default.nix
new file mode 100644
index 000000000000..88ed3dfc2c2f
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/ocaml/default.nix
@@ -0,0 +1,34 @@
+{ lib, stdenv, writeText, ocaml, findlib, ocamlbuild, camlp4 }:
+
+{ name, version, buildInputs ? [],
+  createFindlibDestdir ?  true,
+  dontStrip ? true,
+  minimumSupportedOcamlVersion ? null,
+  hasSharedObjects ? false,
+  setupHook ? null,
+  meta ? {}, ...
+}@args:
+let
+  defaultMeta = {
+    platforms = ocaml.meta.platforms or [];
+  };
+in
+  assert minimumSupportedOcamlVersion != null ->
+          lib.versionOlder minimumSupportedOcamlVersion ocaml.version;
+
+stdenv.mkDerivation (args // {
+  name = "ocaml-${name}-${version}";
+
+  buildInputs = [ ocaml findlib ocamlbuild camlp4 ] ++ buildInputs;
+
+  setupHook = if setupHook == null && hasSharedObjects
+  then writeText "setupHook.sh" ''
+    export CAML_LD_LIBRARY_PATH="''${CAML_LD_LIBRARY_PATH-}''${CAML_LD_LIBRARY_PATH:+:}''$1/lib/ocaml/${ocaml.version}/site-lib/${name}/"
+    ''
+  else setupHook;
+
+  inherit createFindlibDestdir;
+  inherit dontStrip;
+
+  meta = defaultMeta // meta;
+})
diff --git a/nixpkgs/pkgs/build-support/ocaml/dune.nix b/nixpkgs/pkgs/build-support/ocaml/dune.nix
new file mode 100644
index 000000000000..c049878d0131
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/ocaml/dune.nix
@@ -0,0 +1,40 @@
+{ lib, stdenv, ocaml, findlib, dune_1, dune_2 }:
+
+{ pname, version, buildInputs ? [], enableParallelBuilding ? true, ... }@args:
+
+let Dune = if args.useDune2 or false then dune_2 else dune_1; in
+
+if (args ? minimumOCamlVersion && ! lib.versionAtLeast ocaml.version args.minimumOCamlVersion) ||
+   (args ? minimalOCamlVersion && ! lib.versionAtLeast ocaml.version args.minimalOCamlVersion)
+then throw "${pname}-${version} is not available for OCaml ${ocaml.version}"
+else
+
+stdenv.mkDerivation ({
+
+  inherit enableParallelBuilding;
+
+  buildPhase = ''
+    runHook preBuild
+    dune build -p ${pname} ''${enableParallelBuilding:+-j $NIX_BUILD_CORES}
+    runHook postBuild
+  '';
+  checkPhase = ''
+    runHook preCheck
+    dune runtest -p ${pname} ''${enableParallelBuilding:+-j $NIX_BUILD_CORES}
+    runHook postCheck
+  '';
+  installPhase = ''
+    runHook preInstall
+    dune install --prefix $out --libdir $OCAMLFIND_DESTDIR ${pname}
+    runHook postInstall
+  '';
+
+} // (builtins.removeAttrs args [ "minimalOCamlVersion" ]) // {
+
+  name = "ocaml${ocaml.version}-${pname}-${version}";
+
+  buildInputs = [ ocaml Dune findlib ] ++ buildInputs;
+
+  meta = (args.meta or {}) // { platforms = args.meta.platforms or ocaml.meta.platforms; };
+
+})
diff --git a/nixpkgs/pkgs/build-support/ocaml/oasis.nix b/nixpkgs/pkgs/build-support/ocaml/oasis.nix
new file mode 100644
index 000000000000..ee231a6e258c
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/ocaml/oasis.nix
@@ -0,0 +1,44 @@
+{ lib, stdenv, ocaml_oasis, ocaml, findlib, ocamlbuild }:
+
+{ pname, version, buildInputs ? [], meta ? { platforms = ocaml.meta.platforms or []; },
+  minimumOCamlVersion ? null,
+  createFindlibDestdir ? true,
+  dontStrip ? true,
+  ...
+}@args:
+
+if args ? minimumOCamlVersion &&
+   ! lib.versionAtLeast ocaml.version args.minimumOCamlVersion
+then throw "${pname}-${version} is not available for OCaml ${ocaml.version}"
+else
+
+stdenv.mkDerivation (args // {
+  name = "ocaml${ocaml.version}-${pname}-${version}";
+
+  buildInputs = [ ocaml findlib ocamlbuild ocaml_oasis ] ++ buildInputs;
+
+  inherit createFindlibDestdir;
+  inherit dontStrip;
+
+  buildPhase = ''
+    runHook preBuild
+    oasis setup
+    ocaml setup.ml -configure --prefix $OCAMLFIND_DESTDIR --exec-prefix $out
+    ocaml setup.ml -build
+    runHook postBuild
+  '';
+
+  checkPhase = ''
+    runHook preCheck
+    ocaml setup.ml -test
+    runHook postCheck
+  '';
+
+  installPhase = ''
+    runHook preInstall
+    mkdir -p $out
+    ocaml setup.ml -install
+    runHook postInstall
+  '';
+
+})
diff --git a/nixpkgs/pkgs/build-support/oci-tools/default.nix b/nixpkgs/pkgs/build-support/oci-tools/default.nix
new file mode 100644
index 000000000000..18b238033ffd
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/oci-tools/default.nix
@@ -0,0 +1,78 @@
+{ lib, writeText, runCommand, writeReferencesToFile }:
+
+{
+  buildContainer =
+    { args
+    , mounts ? {}
+    , os ? "linux"
+    , arch ? "x86_64"
+    , readonly ? false
+    }:
+  let
+    sysMounts = {
+      "/proc" = {
+        type = "proc";
+        source = "proc";
+      };
+      "/dev" = {
+        type = "tmpfs";
+        source = "tmpfs";
+        options = [ "nosuid" "strictatime" "mode=755" "size=65536k" ];
+      };
+      "/dev/pts" = {
+        type = "devpts";
+        source = "devpts";
+        options = [ "nosuid" "noexec" "newinstance" "ptmxmode=0666" "mode=755" "gid=5" ];
+      };
+      "/dev/shm" = {
+        type = "tmpfs";
+        source = "shm";
+        options = [ "nosuid" "noexec" "nodev" "mode=1777" "size=65536k" ];
+      };
+      "/dev/mqueue" = {
+        type = "mqueue";
+        source = "mqueue";
+        options = [ "nosuid" "noexec" "nodev" ];
+      };
+      "/sys" = {
+        type = "sysfs";
+        source = "sysfs";
+        options = [ "nosuid" "noexec" "nodev" "ro" ];
+      };
+      "/sys/fs/cgroup" = {
+        type = "cgroup";
+        source = "cgroup";
+        options = [ "nosuid" "noexec" "nodev" "realatime" "ro" ];
+      };
+    };
+    config = writeText "config.json" (builtins.toJSON {
+      ociVersion = "1.0.0";
+      platform = {
+        inherit os arch;
+      };
+
+      linux = {
+        namespaces = map (type: { inherit type; }) [ "pid" "network" "mount" "ipc" "uts" ];
+      };
+
+      root = { path = "rootfs"; inherit readonly; };
+
+      process = {
+        inherit args;
+        user = { uid = 0; gid = 0; };
+        cwd = "/";
+      };
+
+      mounts = lib.mapAttrsToList (destination: { type, source, options ? null }: {
+        inherit destination type source options;
+      }) sysMounts;
+    });
+  in
+    runCommand "join" {} ''
+      set -o pipefail
+      mkdir -p $out/rootfs/{dev,proc,sys}
+      cp ${config} $out/config.json
+      xargs tar c < ${writeReferencesToFile args} | tar -xC $out/rootfs/
+    '';
+}
+
diff --git a/nixpkgs/pkgs/build-support/pkg-config-wrapper/add-flags.sh b/nixpkgs/pkgs/build-support/pkg-config-wrapper/add-flags.sh
new file mode 100644
index 000000000000..90aee712be63
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/pkg-config-wrapper/add-flags.sh
@@ -0,0 +1,12 @@
+# See cc-wrapper for comments.
+var_templates_list=(
+    PKG_CONFIG_PATH
+)
+
+accumulateRoles
+
+for var in "${var_templates_list[@]}"; do
+    mangleVarListGeneric ":" "$var" ${role_suffixes[@]+"${role_suffixes[@]}"}
+done
+
+export NIX_PKG_CONFIG_WRAPPER_FLAGS_SET_@suffixSalt@=1
diff --git a/nixpkgs/pkgs/build-support/pkg-config-wrapper/default.nix b/nixpkgs/pkgs/build-support/pkg-config-wrapper/default.nix
new file mode 100644
index 000000000000..bbc49d6728c9
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/pkg-config-wrapper/default.nix
@@ -0,0 +1,127 @@
+# The wrapper script ensures variables like PKG_CONFIG_PATH and
+# PKG_CONFIG_PATH_FOR_BUILD work properly.
+
+{ stdenvNoCC
+, lib
+, buildPackages
+, pkg-config
+, baseBinName ? "pkg-config"
+, propagateDoc ? pkg-config != null && pkg-config ? man
+, extraPackages ? [], extraBuildCommands ? ""
+}:
+
+with lib;
+
+let
+  stdenv = stdenvNoCC;
+  inherit (stdenv) hostPlatform targetPlatform;
+
+  # Prefix for binaries. Customarily ends with a dash separator.
+  #
+  # TODO(@Ericson2314) Make unconditional, or optional but always true by
+  # default.
+  targetPrefix = lib.optionalString (targetPlatform != hostPlatform)
+                                        (targetPlatform.config + "-");
+
+  # See description in cc-wrapper.
+  suffixSalt = replaceStrings ["-" "."] ["_" "_"] targetPlatform.config;
+
+in
+
+stdenv.mkDerivation {
+  pname = targetPrefix + pkg-config.pname + "-wrapper";
+  inherit (pkg-config) version;
+
+  preferLocalBuild = true;
+
+  shell = getBin stdenvNoCC.shell + stdenvNoCC.shell.shellPath or "";
+
+  inherit targetPrefix suffixSalt baseBinName;
+
+  outputs = [ "out" ] ++ optionals propagateDoc ([ "man" ] ++ optional (pkg-config ? doc) "doc");
+
+  passthru = {
+    inherit pkg-config;
+  };
+
+  dontBuild = true;
+  dontConfigure = true;
+
+  unpackPhase = ''
+    src=$PWD
+  '';
+
+  installPhase =
+    ''
+      mkdir -p $out/bin $out/nix-support
+
+      wrap() {
+        local dst="$1"
+        local wrapper="$2"
+        export prog="$3"
+        substituteAll "$wrapper" "$out/bin/$dst"
+        chmod +x "$out/bin/$dst"
+      }
+
+      echo $pkg-config > $out/nix-support/orig-pkg-config
+
+      wrap ${targetPrefix}${baseBinName} ${./pkg-config-wrapper.sh} "${getBin pkg-config}/bin/${baseBinName}"
+    ''
+    # symlink in share for autoconf to find macros
+
+    # TODO(@Ericson2314): in the future just make the unwrapped pkg-config a
+    # propagated dep once we can rely on downstream deps comming first in
+    # search paths. (https://github.com/NixOS/nixpkgs/pull/31414 took a crack
+    # at this.)
+    + ''
+      ln -s ${pkg-config}/share $out/share
+    '';
+
+  strictDeps = true;
+
+  wrapperName = "PKG_CONFIG_WRAPPER";
+
+  setupHooks = [
+    ../setup-hooks/role.bash
+    ./setup-hook.sh
+  ];
+
+  postFixup =
+    ##
+    ## User env support
+    ##
+
+    # Propagate the underling unwrapped pkg-config so that if you
+    # install the wrapper, you get anything else it might provide.
+    ''
+      printWords ${pkg-config} > $out/nix-support/propagated-user-env-packages
+    ''
+
+    ##
+    ## Man page and doc support
+    ##
+    + optionalString propagateDoc (''
+      ln -s ${pkg-config.man} $man
+    '' + optionalString (pkg-config ? doc) ''
+      ln -s ${pkg-config.doc} $doc
+    '')
+
+    + ''
+      substituteAll ${./add-flags.sh} $out/nix-support/add-flags.sh
+      substituteAll ${../wrapper-common/utils.bash} $out/nix-support/utils.bash
+    ''
+
+    ##
+    ## Extra custom steps
+    ##
+    + extraBuildCommands;
+
+  meta =
+    let pkg-config_ = if pkg-config != null then pkg-config else {}; in
+    (if pkg-config_ ? meta then removeAttrs pkg-config.meta ["priority"] else {}) //
+    { description =
+        lib.attrByPath ["meta" "description"] "pkg-config" pkg-config_
+        + " (wrapper script)";
+      priority = 10;
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/pkg-config-wrapper/pkg-config-wrapper.sh b/nixpkgs/pkgs/build-support/pkg-config-wrapper/pkg-config-wrapper.sh
new file mode 100644
index 000000000000..f7c7429eb0b3
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/pkg-config-wrapper/pkg-config-wrapper.sh
@@ -0,0 +1,21 @@
+#! @shell@
+set -eu -o pipefail +o posix
+shopt -s nullglob
+
+if (( "${NIX_DEBUG:-0}" >= 7 )); then
+    set -x
+fi
+
+source @out@/nix-support/utils.bash
+
+if [ -z "${NIX_PKG_CONFIG_WRAPPER_FLAGS_SET_@suffixSalt@:-}" ]; then
+    source @out@/nix-support/add-flags.sh
+fi
+
+if (( ${#role_suffixes[@]} > 0 )); then
+	# replace env var with nix-modified one
+    PKG_CONFIG_PATH=$PKG_CONFIG_PATH_@suffixSalt@ exec @prog@ "$@"
+else
+	# pkg-config isn't a bonafied dependency so ignore setup hook entirely
+	exec @prog@ "$@"
+fi
diff --git a/nixpkgs/pkgs/build-support/pkg-config-wrapper/setup-hook.sh b/nixpkgs/pkgs/build-support/pkg-config-wrapper/setup-hook.sh
new file mode 100644
index 000000000000..34f1a999a82e
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/pkg-config-wrapper/setup-hook.sh
@@ -0,0 +1,29 @@
+# pkg-config Wrapper hygiene
+#
+# See comments in cc-wrapper's setup hook. This works exactly the same way.
+
+# Skip setup hook if we're neither a build-time dep, nor, temporarily, doing a
+# native compile.
+#
+# TODO(@Ericson2314): No native exception
+[[ -z ${strictDeps-} ]] || (( "$hostOffset" < 0 )) || return 0
+
+pkgConfigWrapper_addPkgConfigPath () {
+    # See ../setup-hooks/role.bash
+    local role_post
+    getHostRoleEnvHook
+
+    addToSearchPath "PKG_CONFIG_PATH${role_post}" "$1/lib/pkgconfig"
+    addToSearchPath "PKG_CONFIG_PATH${role_post}" "$1/share/pkgconfig"
+}
+
+# See ../setup-hooks/role.bash
+getTargetRole
+getTargetRoleWrapper
+
+addEnvHooks "$targetOffset" pkgConfigWrapper_addPkgConfigPath
+
+export PKG_CONFIG${role_post}=@targetPrefix@@baseBinName@
+
+# No local scope in sourced file
+unset -v role_post
diff --git a/nixpkgs/pkgs/build-support/plugins.nix b/nixpkgs/pkgs/build-support/plugins.nix
new file mode 100644
index 000000000000..31b478c6c0de
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/plugins.nix
@@ -0,0 +1,29 @@
+{ lib }:
+# helper functions for packaging programs with plugin systems
+{
+
+  /* Takes a list of expected plugin names
+   * and compares it to the found plugins given in the file,
+   * one plugin per line.
+   * If the lists differ, the build fails with a nice message.
+   *
+   * This is helpful to ensure maintainers don’t miss
+   * the addition or removal of a plugin.
+   */
+  diffPlugins = expectedPlugins: foundPluginsFilePath: ''
+     # sort both lists first
+     plugins_expected=$(mktemp)
+     (${lib.concatMapStrings (s: "echo \"${s}\";") expectedPlugins}) \
+       | sort -u > "$plugins_expected"
+     plugins_found=$(mktemp)
+     sort -u "${foundPluginsFilePath}" > "$plugins_found"
+
+     if ! mismatches="$(diff -y "$plugins_expected" "$plugins_found")"; then
+       echo "The the list of expected plugins (left side) doesn't match" \
+           "the list of plugins we found (right side):" >&2
+       echo "$mismatches" >&2
+       exit 1
+     fi
+   '';
+
+}
diff --git a/nixpkgs/pkgs/build-support/prefer-remote-fetch/default.nix b/nixpkgs/pkgs/build-support/prefer-remote-fetch/default.nix
new file mode 100644
index 000000000000..2e55e3707421
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/prefer-remote-fetch/default.nix
@@ -0,0 +1,19 @@
+# An overlay that download sources on remote builder.
+# This is useful when the evaluating machine has a slow
+# upload while the builder can fetch faster directly from the source.
+# Usage: Put the following snippet in your usual overlay definition:
+#
+#   self: super:
+#     (super.prefer-remote-fetch self super)
+# Full configuration example for your own account:
+#
+# $ mkdir ~/.config/nixpkgs/overlays/
+# $ echo 'self: super: super.prefer-remote-fetch self super' > ~/.config/nixpkgs/overlays/prefer-remote-fetch.nix
+#
+self: super: {
+  fetchurl = args: super.fetchurl (args // { preferLocalBuild = false; });
+  fetchgit = args: super.fetchgit (args // { preferLocalBuild = false; });
+  fetchhg = args: super.fetchhg (args // { preferLocalBuild = false; });
+  fetchsvn = args: super.fetchsvn (args // { preferLocalBuild = false; });
+  fetchipfs = args: super.fetchipfs (args // { preferLocalBuild = false; });
+}
diff --git a/nixpkgs/pkgs/build-support/references-by-popularity/closure-graph.py b/nixpkgs/pkgs/build-support/references-by-popularity/closure-graph.py
new file mode 100644
index 000000000000..579f3b041fa8
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/references-by-popularity/closure-graph.py
@@ -0,0 +1,567 @@
+# IMPORTANT: Making changes?
+#
+# Validate your changes with python3 ./closure-graph.py --test
+
+
+# Using a simple algorithm, convert the references to a path in to a
+# sorted list of dependent paths based on how often they're referenced
+# and how deep in the tree they live. Equally-"popular" paths are then
+# sorted by name.
+#
+# The existing writeReferencesToFile prints the paths in a simple
+# ascii-based sorting of the paths.
+#
+# Sorting the paths by graph improves the chances that the difference
+# between two builds appear near the end of the list, instead of near
+# the beginning. This makes a difference for Nix builds which export a
+# closure for another program to consume, if that program implements its
+# own level of binary diffing.
+#
+# For an example, Docker Images. If each store path is a separate layer
+# then Docker Images can be very efficiently transfered between systems,
+# and we get very good cache reuse between images built with the same
+# version of Nixpkgs. However, since Docker only reliably supports a
+# small number of layers (42) it is important to pick the individual
+# layers carefully. By storing very popular store paths in the first 40
+# layers, we improve the chances that the next Docker image will share
+# many of those layers.*
+#
+# Given the dependency tree:
+#
+#     A - B - C - D -\
+#      \   \   \      \
+#       \   \   \      \
+#        \   \ - E ---- F
+#         \- G
+#
+# Nodes which have multiple references are duplicated:
+#
+#     A - B - C - D - F
+#      \   \   \
+#       \   \   \- E - F
+#        \   \
+#         \   \- E - F
+#          \
+#           \- G
+#
+# Each leaf node is now replaced by a counter defaulted to 1:
+#
+#     A - B - C - D - (F:1)
+#      \   \   \
+#       \   \   \- E - (F:1)
+#        \   \
+#         \   \- E - (F:1)
+#          \
+#           \- (G:1)
+#
+# Then each leaf counter is merged with its parent node, replacing the
+# parent node with a counter of 1, and each existing counter being
+# incremented by 1. That is to say `- D - (F:1)` becomes `- (D:1, F:2)`:
+#
+#     A - B - C - (D:1, F:2)
+#      \   \   \
+#       \   \   \- (E:1, F:2)
+#        \   \
+#         \   \- (E:1, F:2)
+#          \
+#           \- (G:1)
+#
+# Then each leaf counter is merged with its parent node again, merging
+# any counters, then incrementing each:
+#
+#     A - B - (C:1, D:2, E:2, F:5)
+#      \   \
+#       \   \- (E:1, F:2)
+#        \
+#         \- (G:1)
+#
+# And again:
+#
+#     A - (B:1, C:2, D:3, E:4, F:8)
+#      \
+#       \- (G:1)
+#
+# And again:
+#
+#     (A:1, B:2, C:3, D:4, E:5, F:9, G:2)
+#
+# and then paths have the following "popularity":
+#
+#     A     1
+#     B     2
+#     C     3
+#     D     4
+#     E     5
+#     F     9
+#     G     2
+#
+# and the popularity contest would result in the paths being printed as:
+#
+#     F
+#     E
+#     D
+#     C
+#     B
+#     G
+#     A
+#
+# * Note: People who have used a Dockerfile before assume Docker's
+# Layers are inherently ordered. However, this is not true -- Docker
+# layers are content-addressable and are not explicitly layered until
+# they are composed in to an Image.
+
+import sys
+import json
+import unittest
+
+from pprint import pprint
+from collections import defaultdict
+
+
+def debug(msg, *args, **kwargs):
+    if False:
+        print(
+            "DEBUG: {}".format(
+                msg.format(*args, **kwargs)
+            ),
+            file=sys.stderr
+        )
+
+
+# Find paths in the original dataset which are never referenced by
+# any other paths
+def find_roots(closures):
+    roots = [];
+
+    for closure in closures:
+        path = closure['path']
+        if not any_refer_to(path, closures):
+            roots.append(path)
+
+    return roots
+
+class TestFindRoots(unittest.TestCase):
+    def test_find_roots(self):
+        self.assertCountEqual(
+            find_roots([
+                {
+                    "path": "/nix/store/foo",
+                    "references": [
+                        "/nix/store/foo",
+                        "/nix/store/bar"
+                    ]
+                },
+                {
+                    "path": "/nix/store/bar",
+                    "references": [
+                        "/nix/store/bar",
+                        "/nix/store/tux"
+                    ]
+                },
+                {
+                    "path": "/nix/store/hello",
+                    "references": [
+                    ]
+                }
+            ]),
+            ["/nix/store/foo", "/nix/store/hello"]
+        )
+
+
+def any_refer_to(path, closures):
+    for closure in closures:
+        if path != closure['path']:
+            if path in closure['references']:
+                return True
+    return False
+
+class TestAnyReferTo(unittest.TestCase):
+    def test_has_references(self):
+        self.assertTrue(
+            any_refer_to(
+                "/nix/store/bar",
+                [
+                    {
+                        "path": "/nix/store/foo",
+                        "references": [
+                            "/nix/store/bar"
+                        ]
+                    },
+                ]
+            ),
+        )
+    def test_no_references(self):
+        self.assertFalse(
+            any_refer_to(
+                "/nix/store/foo",
+                [
+                    {
+                        "path": "/nix/store/foo",
+                        "references": [
+                            "/nix/store/foo",
+                            "/nix/store/bar"
+                        ]
+                    },
+                ]
+            ),
+        )
+
+def all_paths(closures):
+    paths = []
+    for closure in closures:
+        paths.append(closure['path'])
+        paths.extend(closure['references'])
+    paths.sort()
+    return list(set(paths))
+
+
+class TestAllPaths(unittest.TestCase):
+    def test_returns_all_paths(self):
+        self.assertCountEqual(
+            all_paths([
+                {
+                    "path": "/nix/store/foo",
+                    "references": [
+                        "/nix/store/foo",
+                        "/nix/store/bar"
+                    ]
+                },
+                {
+                    "path": "/nix/store/bar",
+                    "references": [
+                        "/nix/store/bar",
+                        "/nix/store/tux"
+                    ]
+                },
+                {
+                    "path": "/nix/store/hello",
+                    "references": [
+                    ]
+                }
+            ]),
+            ["/nix/store/foo", "/nix/store/bar", "/nix/store/hello", "/nix/store/tux",]
+        )
+    def test_no_references(self):
+        self.assertFalse(
+            any_refer_to(
+                "/nix/store/foo",
+                [
+                    {
+                        "path": "/nix/store/foo",
+                        "references": [
+                            "/nix/store/foo",
+                            "/nix/store/bar"
+                        ]
+                    },
+                ]
+            ),
+        )
+
+# Convert:
+#
+# [
+#    { path: /nix/store/foo, references: [ /nix/store/foo, /nix/store/bar, /nix/store/baz ] },
+#    { path: /nix/store/bar, references: [ /nix/store/bar, /nix/store/baz ] },
+#    { path: /nix/store/baz, references: [ /nix/store/baz, /nix/store/tux ] },
+#    { path: /nix/store/tux, references: [ /nix/store/tux ] }
+#  ]
+#
+# To:
+#    {
+#      /nix/store/foo: [ /nix/store/bar, /nix/store/baz ],
+#      /nix/store/bar: [ /nix/store/baz ],
+#      /nix/store/baz: [ /nix/store/tux ] },
+#      /nix/store/tux: [ ]
+#    }
+#
+# Note that it drops self-references to avoid loops.
+def make_lookup(closures):
+    lookup = {}
+
+    for closure in closures:
+        # paths often self-refer
+        nonreferential_paths = [ref for ref in closure['references'] if ref != closure['path']]
+        lookup[closure['path']] = nonreferential_paths
+
+    return lookup
+
+class TestMakeLookup(unittest.TestCase):
+    def test_returns_lookp(self):
+        self.assertDictEqual(
+            make_lookup([
+                {
+                    "path": "/nix/store/foo",
+                    "references": [
+                        "/nix/store/foo",
+                        "/nix/store/bar"
+                    ]
+                },
+                {
+                    "path": "/nix/store/bar",
+                    "references": [
+                        "/nix/store/bar",
+                        "/nix/store/tux"
+                    ]
+                },
+                {
+                    "path": "/nix/store/hello",
+                    "references": [
+                    ]
+                }
+            ]),
+            {
+                "/nix/store/foo": [ "/nix/store/bar" ],
+                "/nix/store/bar": [ "/nix/store/tux" ],
+                "/nix/store/hello": [ ],
+            }
+        )
+
+# Convert:
+#
+# /nix/store/foo with
+#  {
+#    /nix/store/foo: [ /nix/store/bar, /nix/store/baz ],
+#    /nix/store/bar: [ /nix/store/baz ],
+#    /nix/store/baz: [ /nix/store/tux ] },
+#    /nix/store/tux: [ ]
+#  }
+#
+# To:
+#
+# {
+#   /nix/store/bar: {
+#                    /nix/store/baz: {
+#                                     /nix/store/tux: {}
+#                    }
+#   },
+#   /nix/store/baz: {
+#                   /nix/store/tux: {}
+#   }
+# }
+subgraphs_cache = {}
+def make_graph_segment_from_root(root, lookup):
+    global subgraphs_cache
+    children = {}
+    for ref in lookup[root]:
+        # make_graph_segment_from_root is a pure function, and will
+        # always return the same result based on a given input. Thus,
+        # cache computation.
+        #
+        # Python's assignment will use a pointer, preventing memory
+        # bloat for large graphs.
+        if ref not in subgraphs_cache:
+            debug("Subgraph Cache miss on {}".format(ref))
+            subgraphs_cache[ref] = make_graph_segment_from_root(ref, lookup)
+        else:
+            debug("Subgraph Cache hit on {}".format(ref))
+        children[ref] = subgraphs_cache[ref]
+    return children
+
+class TestMakeGraphSegmentFromRoot(unittest.TestCase):
+    def test_returns_graph(self):
+        self.assertDictEqual(
+            make_graph_segment_from_root("/nix/store/foo", {
+                "/nix/store/foo": [ "/nix/store/bar" ],
+                "/nix/store/bar": [ "/nix/store/tux" ],
+                "/nix/store/tux": [ ],
+                "/nix/store/hello": [ ],
+            }),
+            {
+                "/nix/store/bar": {
+                    "/nix/store/tux": {}
+                }
+            }
+        )
+    def test_returns_graph_tiny(self):
+        self.assertDictEqual(
+            make_graph_segment_from_root("/nix/store/tux", {
+                "/nix/store/foo": [ "/nix/store/bar" ],
+                "/nix/store/bar": [ "/nix/store/tux" ],
+                "/nix/store/tux": [ ],
+            }),
+            {}
+        )
+
+# Convert a graph segment in to a popularity-counted dictionary:
+#
+# From:
+# {
+#    /nix/store/foo: {
+#                      /nix/store/bar: {
+#                                        /nix/store/baz: {
+#                                                           /nix/store/tux: {}
+#                                        }
+#                      }
+#                      /nix/store/baz: {
+#                                         /nix/store/tux: {}
+#                      }
+#    }
+# }
+#
+# to:
+# [
+#   /nix/store/foo: 1
+#   /nix/store/bar: 2
+#   /nix/store/baz: 4
+#   /nix/store/tux: 6
+# ]
+popularity_cache = {}
+def graph_popularity_contest(full_graph):
+    global popularity_cache
+    popularity = defaultdict(int)
+    for path, subgraph in full_graph.items():
+        popularity[path] += 1
+        # graph_popularity_contest is a pure function, and will
+        # always return the same result based on a given input. Thus,
+        # cache computation.
+        #
+        # Python's assignment will use a pointer, preventing memory
+        # bloat for large graphs.
+        if path not in popularity_cache:
+            debug("Popularity Cache miss on {}", path)
+            popularity_cache[path] = graph_popularity_contest(subgraph)
+        else:
+            debug("Popularity Cache hit on {}", path)
+
+        subcontest = popularity_cache[path]
+        for subpath, subpopularity in subcontest.items():
+            debug("Calculating popularity for {}", subpath)
+            popularity[subpath] += subpopularity + 1
+
+    return popularity
+
+class TestGraphPopularityContest(unittest.TestCase):
+    def test_counts_popularity(self):
+        self.assertDictEqual(
+            graph_popularity_contest({
+                "/nix/store/foo": {
+                    "/nix/store/bar": {
+                        "/nix/store/baz": {
+                            "/nix/store/tux": {}
+                        }
+                    },
+                    "/nix/store/baz": {
+                        "/nix/store/tux": {}
+                    }
+                }
+            }),
+            {
+                   "/nix/store/foo": 1,
+                   "/nix/store/bar": 2,
+                   "/nix/store/baz": 4,
+                   "/nix/store/tux": 6,
+            }
+        )
+
+# Emit a list of packages by popularity, most first:
+#
+# From:
+# [
+#   /nix/store/foo: 1
+#   /nix/store/bar: 1
+#   /nix/store/baz: 2
+#   /nix/store/tux: 2
+# ]
+#
+# To:
+# [ /nix/store/baz /nix/store/tux /nix/store/bar /nix/store/foo ]
+def order_by_popularity(paths):
+    paths_by_popularity = defaultdict(list)
+    popularities = []
+    for path, popularity in paths.items():
+        popularities.append(popularity)
+        paths_by_popularity[popularity].append(path)
+
+    popularities = list(set(popularities))
+    popularities.sort()
+
+    flat_ordered = []
+    for popularity in popularities:
+        paths = paths_by_popularity[popularity]
+        paths.sort(key=package_name)
+
+        flat_ordered.extend(reversed(paths))
+    return list(reversed(flat_ordered))
+
+
+class TestOrderByPopularity(unittest.TestCase):
+    def test_returns_in_order(self):
+        self.assertEqual(
+            order_by_popularity({
+                   "/nix/store/foo": 1,
+                   "/nix/store/bar": 1,
+                   "/nix/store/baz": 2,
+                   "/nix/store/tux": 2,
+            }),
+            [
+                "/nix/store/baz",
+                "/nix/store/tux",
+                "/nix/store/bar",
+                "/nix/store/foo"
+            ]
+        )
+
+def package_name(path):
+    parts = path.split('-')
+    start = parts.pop(0)
+    # don't throw away any data, so the order is always the same.
+    # even in cases where only the hash at the start has changed.
+    parts.append(start)
+    return '-'.join(parts)
+
+def main():
+    filename = sys.argv[1]
+    key = sys.argv[2]
+
+    debug("Loading from {}", filename)
+    with open(filename) as f:
+        data = json.load(f)
+
+    # Data comes in as:
+    # [
+    #    { path: /nix/store/foo, references: [ /nix/store/foo, /nix/store/bar, /nix/store/baz ] },
+    #    { path: /nix/store/bar, references: [ /nix/store/bar, /nix/store/baz ] },
+    #    { path: /nix/store/baz, references: [ /nix/store/baz, /nix/store/tux ] },
+    #    { path: /nix/store/tux, references: [ /nix/store/tux ] }
+    #  ]
+    #
+    # and we want to get out a list of paths ordered by how universally,
+    # important they are, ie: tux is referenced by every path, transitively
+    # so it should be #1
+    #
+    # [
+    #   /nix/store/tux,
+    #   /nix/store/baz,
+    #   /nix/store/bar,
+    #   /nix/store/foo,
+    # ]
+    graph = data[key]
+
+    debug("Finding roots from {}", key)
+    roots = find_roots(graph);
+    debug("Making lookup for {}", key)
+    lookup = make_lookup(graph)
+
+    full_graph = {}
+    for root in roots:
+        debug("Making full graph for {}", root)
+        full_graph[root] = make_graph_segment_from_root(root, lookup)
+
+    debug("Running contest")
+    contest = graph_popularity_contest(full_graph)
+    debug("Ordering by popularity")
+    ordered = order_by_popularity(contest)
+    debug("Checking for missing paths")
+    missing = []
+    for path in all_paths(graph):
+        if path not in ordered:
+            missing.append(path)
+
+    ordered.extend(missing)
+    print("\n".join(ordered))
+
+if "--test" in sys.argv:
+    # Don't pass --test otherwise unittest gets mad
+    unittest.main(argv = [f for f in sys.argv if f != "--test" ])
+else:
+    main()
diff --git a/nixpkgs/pkgs/build-support/references-by-popularity/default.nix b/nixpkgs/pkgs/build-support/references-by-popularity/default.nix
new file mode 100644
index 000000000000..4cae2dcf3ca9
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/references-by-popularity/default.nix
@@ -0,0 +1,15 @@
+{ runCommand, python3, coreutils }:
+# Write the references of `path' to a file, in order of how "popular" each
+# reference is. Nix 2 only.
+path: runCommand "closure-paths"
+{
+  exportReferencesGraph.graph = path;
+  __structuredAttrs = true;
+  PATH = "${coreutils}/bin:${python3}/bin";
+  builder = builtins.toFile "builder"
+    ''
+      . .attrs.sh
+      python3 ${./closure-graph.py} .attrs.json graph > ''${outputs[out]}
+    '';
+  }
+  ""
diff --git a/nixpkgs/pkgs/build-support/release/ant-build.nix b/nixpkgs/pkgs/build-support/release/ant-build.nix
new file mode 100644
index 000000000000..6b59241e01ed
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/release/ant-build.nix
@@ -0,0 +1,123 @@
+{ src
+, pkgs
+, lib
+, stdenv ? pkgs.stdenv
+, name
+, antTargets ? []
+, jars ? []
+, jarWrappers ? []
+, antProperties ? []
+, antBuildInputs ? []
+, buildfile ? "build.xml"
+, ant ? pkgs.ant
+, jre ? pkgs.jdk
+, hydraAntLogger ? pkgs.hydraAntLogger
+, zip ? pkgs.zip
+, unzip ? pkgs.unzip
+, ... } @ args:
+
+let
+  antFlags = "-f ${buildfile} " + lib.concatMapStrings ({name, value}: "-D${name}=${value} " ) antProperties ;
+in
+stdenv.mkDerivation (
+
+  {
+    inherit jre ant;
+    showBuildStats = true;
+
+    postPhases =
+      ["generateWrappersPhase" "finalPhase"];
+
+    prePhases =
+      ["antSetupPhase"];
+
+    antSetupPhase = with lib; ''
+      if test "$hydraAntLogger" != "" ; then
+        export ANT_ARGS="-logger org.hydra.ant.HydraLogger -lib `ls $hydraAntLogger/share/java/*.jar | head -1`"
+      fi
+      for abi in ${concatStringsSep " " (map (f: "`find ${f} -name '*.jar'`") antBuildInputs)}; do
+        export ANT_ARGS="$ANT_ARGS -lib $abi"
+      done
+    '';
+
+    installPhase = ''
+      runHook preInstall
+
+      mkdir -p $out/share/java
+      ${ if jars == [] then ''
+           find . -name "*.jar" | xargs -I{} cp -v {} $out/share/java
+         '' else lib.concatMapStrings (j: ''
+           cp -v ${j} $out/share/java
+         '') jars }
+
+      . ${./functions.sh}
+      for j in $out/share/java/*.jar ; do
+        canonicalizeJar $j
+        echo file jar $j >> $out/nix-support/hydra-build-products
+      done
+
+      runHook postInstall
+    '';
+
+    generateWrappersPhase =
+      let
+        cp = w: "-cp '${lib.optionalString (w ? classPath) w.classPath}${lib.optionalString (w ? mainClass) ":$out/share/java/*"}'";
+      in
+      ''
+      header "Generating jar wrappers"
+    '' + (lib.concatMapStrings (w: ''
+
+      mkdir -p $out/bin
+      cat >> $out/bin/${w.name} <<EOF
+      #!${pkgs.runtimeShell}
+      export JAVA_HOME=$jre
+      $jre/bin/java ${cp w} ${if w ? mainClass then w.mainClass else "-jar ${w.jar}"} \$@
+      EOF
+
+      chmod a+x $out/bin/${w.name} || exit 1
+    '') jarWrappers) + ''
+      closeNest
+    '';
+
+    buildPhase = ''
+      runHook preBuild
+    '' + (if antTargets == [] then ''
+      header "Building default ant target"
+      ant ${antFlags}
+      closeNest
+    '' else lib.concatMapStrings (t: ''
+      header "Building '${t}' target"
+      ant ${antFlags} ${t}
+      closeNest
+    '') antTargets) + ''
+      runHook postBuild
+    '';
+
+    finalPhase =
+      ''
+        # Propagate the release name of the source tarball.  This is
+        # to get nice package names in channels.
+        if test -e $origSrc/nix-support/hydra-release-name; then
+          cp $origSrc/nix-support/hydra-release-name $out/nix-support/hydra-release-name
+        fi
+      '';
+  }
+
+  // removeAttrs args ["antProperties" "buildInputs" "pkgs" "lib" "jarWrappers"] //
+
+  {
+    name = name + (if src ? version then "-" + src.version else "");
+
+    nativeBuildInputs = [ unzip ];
+    buildInputs = [ant jre zip] ++ lib.optional (args ? buildInputs) args.buildInputs ;
+
+    postHook = ''
+      mkdir -p $out/nix-support
+      echo "$system" > $out/nix-support/system
+      . ${./functions.sh}
+
+      origSrc=$src
+      src=$(findTarball $src)
+    '';
+  }
+)
diff --git a/nixpkgs/pkgs/build-support/release/binary-tarball.nix b/nixpkgs/pkgs/build-support/release/binary-tarball.nix
new file mode 100644
index 000000000000..168343c80821
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/release/binary-tarball.nix
@@ -0,0 +1,78 @@
+/* This function builds a binary tarball.  The resulting binaries are
+   usually only useful if they are don't have any runtime dependencies
+   on any paths in the Nix store, since those aren't distributed in
+   the tarball.  For instance, the binaries should be statically
+   linked: they can't depend on dynamic libraries in the store
+   (including Glibc).
+
+   The binaries are built and installed with a prefix of /usr/local by
+   default.  They are installed by setting DESTDIR to a temporary
+   directory, so the Makefile of the package should support DESTDIR.
+*/
+
+{ src, stdenv
+, name ? "binary-tarball"
+, ... } @ args:
+
+stdenv.mkDerivation (
+
+  {
+    # Also run a `make check'.
+    doCheck = true;
+
+    showBuildStats = true;
+
+    prefix = "/usr/local";
+
+    postPhases = "finalPhase";
+  }
+
+  // args //
+
+  {
+    name = name + (if src ? version then "-" + src.version else "");
+
+    postHook = ''
+      mkdir -p $out/nix-support
+      echo "$system" > $out/nix-support/system
+      . ${./functions.sh}
+
+      origSrc=$src
+      src=$(findTarball $src)
+
+      if test -e $origSrc/nix-support/hydra-release-name; then
+          releaseName=$(cat $origSrc/nix-support/hydra-release-name)
+      fi
+
+      installFlagsArray=(DESTDIR=$TMPDIR/inst)
+
+      # Prefix hackery because of a bug in stdenv (it tries to `mkdir
+      # $prefix', which doesn't work due to the DESTDIR).
+      configureFlags="--prefix=$prefix $configureFlags"
+      dontAddPrefix=1
+      prefix=$TMPDIR/inst$prefix
+    '';
+
+    doDist = true;
+
+    distPhase = ''
+      mkdir -p $out/tarballs
+      tar cvfj $out/tarballs/''${releaseName:-binary-dist}.tar.bz2 -C $TMPDIR/inst .
+    '';
+
+    finalPhase = ''
+      for i in $out/tarballs/*; do
+          echo "file binary-dist $i" >> $out/nix-support/hydra-build-products
+      done
+
+      # Propagate the release name of the source tarball.  This is
+      # to get nice package names in channels.
+      test -n "$releaseName" && (echo "$releaseName" >> $out/nix-support/hydra-release-name)
+    '';
+
+    meta = (if args ? meta then args.meta else {}) // {
+      description = "Build of a generic binary distribution";
+    };
+
+  }
+)
diff --git a/nixpkgs/pkgs/build-support/release/debian-build.nix b/nixpkgs/pkgs/build-support/release/debian-build.nix
new file mode 100644
index 000000000000..bd54401e2356
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/release/debian-build.nix
@@ -0,0 +1,96 @@
+# This function compiles a source tarball in a virtual machine image
+# that contains a Debian-like (i.e. dpkg-based) OS.
+
+{ name ? "debian-build"
+, diskImage
+, src, stdenv, vmTools, checkinstall
+, fsTranslation ? false
+, # Features provided by this package.
+  debProvides ? []
+, # Features required by this package.
+  debRequires ? []
+, ... } @ args:
+
+vmTools.runInLinuxImage (stdenv.mkDerivation (
+
+  {
+    doCheck = true;
+
+    prefix = "/usr";
+
+    prePhases = "installExtraDebsPhase sysInfoPhase";
+  }
+
+  // removeAttrs args ["vmTools" "lib"] //
+
+  {
+    name = name + "-" + diskImage.name + (if src ? version then "-" + src.version else "");
+
+    # !!! cut&paste from rpm-build.nix
+    postHook = ''
+      . ${./functions.sh}
+      propagateImageName
+      src=$(findTarball $src)
+    '';
+
+    installExtraDebsPhase = ''
+      for i in $extraDebs; do
+        dpkg --install $(ls $i/debs/*.deb | sort | head -1)
+      done
+    '';
+
+    sysInfoPhase = ''
+      [ ! -f /etc/lsb-release ] || (source /etc/lsb-release; echo "OS release: $DISTRIB_DESCRIPTION")
+      echo "System/kernel: $(uname -a)"
+      if test -e /etc/debian_version; then echo "Debian release: $(cat /etc/debian_version)"; fi
+      header "installed Debian packages"
+      dpkg-query --list
+      stopNest
+    '';
+
+    installPhase = ''
+      eval "$preInstall"
+      export LOGNAME=root
+
+      # otherwise build hangs when it wants to display
+      # the log file
+      export PAGER=cat
+      ${checkinstall}/sbin/checkinstall --nodoc -y -D \
+        --fstrans=${if fsTranslation then "yes" else "no"} \
+        --requires="${lib.concatStringsSep "," debRequires}" \
+        --provides="${lib.concatStringsSep "," debProvides}" \
+        ${if (src ? version) then "--pkgversion=$(echo ${src.version} | tr _ -)"
+                             else "--pkgversion=0.0.0"} \
+        ''${debMaintainer:+--maintainer="'$debMaintainer'"} \
+        ''${debName:+--pkgname="'$debName'"} \
+        $checkInstallFlags \
+        -- \
+        $SHELL -c "''${installCommand:-make install}"
+
+      mkdir -p $out/debs
+      find . -name "*.deb" -exec cp {} $out/debs \;
+
+      [ "$(echo $out/debs/*.deb)" != "" ]
+
+      for i in $out/debs/*.deb; do
+        header "Generated DEB package: $i"
+        dpkg-deb --info "$i"
+        pkgName=$(dpkg-deb -W "$i" | awk '{print $1}')
+        echo "file deb $i" >> $out/nix-support/hydra-build-products
+        stopNest
+      done
+      dpkg -i $out/debs/*.deb
+
+      for i in $extraDebs; do
+        echo "file deb-extra $(ls $i/debs/*.deb | sort | head -1)" >> $out/nix-support/hydra-build-products
+      done
+
+      eval "$postInstall"
+    '';
+
+    meta = (if args ? meta then args.meta else {}) // {
+      description = "Deb package for ${diskImage.fullName}";
+    };
+  }
+
+))
diff --git a/nixpkgs/pkgs/build-support/release/default.nix b/nixpkgs/pkgs/build-support/release/default.nix
new file mode 100644
index 000000000000..83f755b2bece
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/release/default.nix
@@ -0,0 +1,124 @@
+{ lib, pkgs }:
+
+with pkgs;
+
+rec {
+
+  sourceTarball = args: import ./source-tarball.nix (
+    { inherit stdenv autoconf automake libtool;
+    } // args);
+
+  makeSourceTarball = sourceTarball; # compatibility
+
+  binaryTarball = args: import ./binary-tarball.nix (
+    { inherit stdenv;
+    } // args);
+
+  antBuild = args: import ./ant-build.nix (
+    { inherit lib pkgs;
+    } // args);
+
+  mvnBuild = args: import ./maven-build.nix (
+    { inherit stdenv;
+    } // args);
+
+  nixBuild = args: import ./nix-build.nix (
+    { inherit lib stdenv;
+    } // args);
+
+  coverageAnalysis = args: nixBuild (
+    { inherit lcov enableGCOVInstrumentation makeGCOVReport;
+      doCoverageAnalysis = true;
+    } // args);
+
+  clangAnalysis = args: nixBuild (
+    { inherit clang-analyzer;
+      doClangAnalysis = true;
+    } // args);
+
+  coverityAnalysis = args: nixBuild (
+    { inherit cov-build xz;
+      doCoverityAnalysis = true;
+    } // args);
+
+  rpmBuild = args: import ./rpm-build.nix (
+    { inherit vmTools;
+    } // args);
+
+  debBuild = args: import ./debian-build.nix (
+    { inherit lib stdenv vmTools checkinstall;
+    } // args);
+
+  aggregate =
+    { name, constituents, meta ? { } }:
+    pkgs.runCommand name
+      { inherit constituents meta;
+        preferLocalBuild = true;
+        _hydraAggregate = true;
+      }
+      ''
+        mkdir -p $out/nix-support
+        touch $out/nix-support/hydra-build-products
+        echo $constituents > $out/nix-support/hydra-aggregate-constituents
+
+        # Propagate build failures.
+        for i in $constituents; do
+          if [ -e $i/nix-support/failed ]; then
+            touch $out/nix-support/failed
+          fi
+        done
+      '';
+
+  /* Create a channel job which success depends on the success of all of
+     its contituents. Channel jobs are a special type of jobs that are
+     listed in the channel tab of Hydra and that can be suscribed.
+     A tarball of the src attribute is distributed via the channel.
+
+     - constituents: a list of derivations on which the channel success depends.
+     - name: the channel name that will be used in the hydra interface.
+     - src: should point to the root folder of the nix-expressions used by the
+            channel, typically a folder containing a `default.nix`.
+
+       channel {
+         constituents = [ foo bar baz ];
+         name = "my-channel";
+         src = ./.;
+       };
+
+  */
+  channel =
+    { name, src, constituents ? [], meta ? {}, isNixOS ? true, ... }@args:
+    stdenv.mkDerivation ({
+      preferLocalBuild = true;
+      _hydraAggregate = true;
+
+      phases = [ "unpackPhase" "patchPhase" "installPhase" ];
+
+      patchPhase = lib.optionalString isNixOS ''
+        touch .update-on-nixos-rebuild
+      '';
+
+      installPhase = ''
+        mkdir -p $out/{tarballs,nix-support}
+
+        tar cJf "$out/tarballs/nixexprs.tar.xz" \
+          --owner=0 --group=0 --mtime="1970-01-01 00:00:00 UTC" \
+          --transform='s!^\.!${name}!' .
+
+        echo "channel - $out/tarballs/nixexprs.tar.xz" > "$out/nix-support/hydra-build-products"
+        echo $constituents > "$out/nix-support/hydra-aggregate-constituents"
+
+        # Propagate build failures.
+        for i in $constituents; do
+          if [ -e "$i/nix-support/failed" ]; then
+            touch "$out/nix-support/failed"
+          fi
+        done
+      '';
+
+      meta = meta // {
+        isHydraChannel = true;
+      };
+    } // removeAttrs args [ "meta" ]);
+
+}
diff --git a/nixpkgs/pkgs/build-support/release/functions.sh b/nixpkgs/pkgs/build-support/release/functions.sh
new file mode 100644
index 000000000000..120d19685427
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/release/functions.sh
@@ -0,0 +1,40 @@
+findTarball() {
+    local suffix i
+    if [ -d "$1/tarballs/" ]; then
+        for suffix in tar.gz tgz tar.bz2 tbz2 tbz tar.xz txz tar.lzma; do
+            for i in $1/tarballs/*.$suffix; do echo $i; break; done
+        done | sort | head -1
+        return
+    else
+        echo "$1"
+        return
+    fi
+}
+
+canonicalizeJarManifest() {
+    local input=$1
+    # http://docs.oracle.com/javase/7/docs/technotes/guides/jar/jar.html#Notes_on_Manifest_and_Signature_Files
+    (head -n 1 $input && tail -n +2 $input | sort | grep -v '^\s*$') > $input-tmp
+    mv $input-tmp $input
+}
+
+# Post-process a jar file to contain canonical timestamps and metadata ordering
+canonicalizeJar() {
+    local input=$1
+    local outer=$(pwd)
+    unzip -qq $input -d $input-tmp
+    canonicalizeJarManifest $input-tmp/META-INF/MANIFEST.MF
+    # Set all timestamps to Jan 1 1980, which is the earliest date the zip format supports...
+    find $input-tmp -exec touch -t 198001010000.00 {} +
+    rm $input
+    pushd $input-tmp
+    zip -q -r -o -X $outer/tmp-out.jar . 2> /dev/null
+    popd
+    rm -rf $input-tmp
+    mv $outer/tmp-out.jar $input
+}
+
+propagateImageName() {
+    mkdir -p $out/nix-support
+    cat "$diskImage"/nix-support/full-name > $out/nix-support/full-name
+}
diff --git a/nixpkgs/pkgs/build-support/release/maven-build.nix b/nixpkgs/pkgs/build-support/release/maven-build.nix
new file mode 100644
index 000000000000..eaa47647287c
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/release/maven-build.nix
@@ -0,0 +1,98 @@
+{ stdenv
+, name
+, src
+, doTest ? true
+, doTestCompile ? true
+, doJavadoc ? false
+, doCheckstyle ? false
+, doRelease ? false
+, includeTestClasses ? true
+, extraMvnFlags ? ""
+, ...
+} @ args :
+
+let
+  mvnFlags = "-Dmaven.repo.local=$M2_REPO ${if doTest then "" else "-Dmaven.test.skip.exec=true"} ${extraMvnFlags}";
+in
+
+stdenv.mkDerivation ( {
+  inherit name src;
+  phases = "setupPhase unpackPhase patchPhase mvnCompile ${if doTestCompile then "mvnTestCompile mvnTestJar" else ""} ${if doTest then "mvnTest" else ""} ${if doJavadoc then "mvnJavadoc" else ""} ${if doCheckstyle then "mvnCheckstyle" else ""} mvnJar mvnAssembly mvnRelease finalPhase";
+
+  setupPhase = ''
+    runHook preSetupPhase
+
+    mkdir -p $out/nix-support
+    export LANG="en_US.UTF-8"
+    export LOCALE_ARCHIVE=$glibcLocales/lib/locale/locale-archive
+    export M2_REPO=$TMPDIR/repository
+
+    runHook postSetupPhase
+  '';
+
+  mvnCompile = ''
+    mvn compile ${mvnFlags}
+  '';
+
+  mvnTestCompile = ''
+    mvn test-compile ${mvnFlags}
+  '';
+
+  mvnTestJar = ''
+    mvn jar:test-jar ${mvnFlags}
+  '';
+
+  mvnTest = ''
+    mvn test ${mvnFlags}
+
+    if [ -d target/site/cobertura ] ; then
+      echo "report coverage $out/site/cobertura" >> $out/nix-support/hydra-build-products
+    fi
+
+    if [ -d target/surefire-reports ] ; then
+      mvn surefire-report:report-only
+      echo "report coverage $out/site/surefire-report.html" >> $out/nix-support/hydra-build-products
+    fi
+  '';
+
+  mvnJavadoc = ''
+    mvn javadoc:javadoc ${mvnFlags}
+    echo "report javadoc $out/site/apidocs" >> $out/nix-support/hydra-build-products
+  '';
+
+  mvnCheckstyle = ''
+    mvn checkstyle:checkstyle ${mvnFlags}
+    echo "report checkstyle $out/site/checkstyle.html" >> $out/nix-support/hydra-build-products
+  '';
+
+  mvnJar = ''
+    mvn jar:jar ${mvnFlags}
+  '';
+
+  mvnAssembly = ''
+    mvn assembly:assembly -Dmaven.test.skip=true ${mvnFlags}
+  '';
+
+  mvnRelease = ''
+    mkdir -p $out/release
+
+    zip=$(ls target/*.zip| head -1)
+    releaseName=$(basename $zip .zip)
+    releaseName="$releaseName-r${toString src.rev or "0"}"
+    cp $zip $out/release/$releaseName.zip
+
+    echo "$releaseName" > $out/nix-support/hydra-release-name
+
+    ${if doRelease then  ''
+    echo "file zip $out/release/$releaseName.zip" >> $out/nix-support/hydra-build-products
+    '' else ""}
+  '';
+
+  finalPhase = ''
+    if [ -d target/site ] ; then
+      cp -R target/site $out/
+      echo "report site $out/site" >> $out/nix-support/hydra-build-products
+    fi
+  '';
+} // args
+)
diff --git a/nixpkgs/pkgs/build-support/release/nix-build.nix b/nixpkgs/pkgs/build-support/release/nix-build.nix
new file mode 100644
index 000000000000..ac51b90e0163
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/release/nix-build.nix
@@ -0,0 +1,175 @@
+# This function builds and tests an Autoconf-style source tarball.
+# The result can be installed normally in an environment (e.g., after
+# making it available through a channel).  If `doCoverageAnalysis' is
+# true, it does an ordinary build from a source tarball, except that
+# it turns on GCC's coverage analysis feature.  It then runs `make
+# check' and produces a coverage analysis report using `lcov'.
+
+{ buildOutOfSourceTree ? false
+, preConfigure ? null
+, doCoverageAnalysis ? false
+, doClangAnalysis ? false
+, doCoverityAnalysis ? false
+, lcovFilter ? []
+, lcovExtraTraceFiles ? []
+, src, lib, stdenv
+, name ? if doCoverageAnalysis then "nix-coverage" else "nix-build"
+, failureHook ? null
+, prePhases ? []
+, postPhases ? []
+, buildInputs ? []
+, preHook ? ""
+, postHook ? ""
+, ... } @ args:
+
+let
+  doingAnalysis = doCoverageAnalysis || doClangAnalysis || doCoverityAnalysis;
+in
+stdenv.mkDerivation (
+
+  {
+    # Also run a `make check'.
+    doCheck = true;
+
+    # When doing coverage analysis, we don't care about the result.
+    dontInstall = doingAnalysis;
+    useTempPrefix = doingAnalysis;
+
+    showBuildStats = true;
+
+    finalPhase =
+      ''
+        # Propagate the release name of the source tarball.  This is
+        # to get nice package names in channels.
+        if test -e $origSrc/nix-support/hydra-release-name; then
+          cp $origSrc/nix-support/hydra-release-name $out/nix-support/hydra-release-name
+        fi
+
+        # Package up Coverity analysis results
+        if [ ! -z "${toString doCoverityAnalysis}" ]; then
+          if [ -d "_coverity_$name/cov-int" ]; then
+            mkdir -p $out/tarballs
+            NAME=`cat $out/nix-support/hydra-release-name`
+            cd _coverity_$name
+            tar caf $out/tarballs/$NAME-coverity-int.xz cov-int
+            echo "file cov-build $out/tarballs/$NAME-coverity-int.xz" >> $out/nix-support/hydra-build-products
+          fi
+        fi
+
+        # Package up Clang analysis results
+        if [ ! -z "${toString doClangAnalysis}" ]; then
+          if [ ! -z "`ls _clang_analyze_$name`" ]; then
+            cd  _clang_analyze_$name && mv * $out/analysis
+          else
+            mkdir -p $out/analysis
+            echo "No bugs found." >> $out/analysis/index.html
+          fi
+
+          echo "report analysis $out/analysis" >> $out/nix-support/hydra-build-products
+        fi
+      '';
+
+    failureHook = (lib.optionalString (failureHook != null) failureHook) +
+    ''
+      if test -n "$succeedOnFailure"; then
+          if test -n "$keepBuildDirectory"; then
+              KEEPBUILDDIR="$out/`basename $TMPDIR`"
+              header "Copying build directory to $KEEPBUILDDIR"
+              mkdir -p $KEEPBUILDDIR
+              cp -R "$TMPDIR/"* $KEEPBUILDDIR
+              stopNest
+          fi
+      fi
+    '';
+  }
+
+  // removeAttrs args [ "lib" ] # Propagating lib causes the evaluation to fail, because lib is a function that can't be converted to a string
+
+  // {
+    name = name + (if src ? version then "-" + src.version else "");
+
+    postHook = ''
+      . ${./functions.sh}
+      origSrc=$src
+      src=$(findTarball $src)
+      ${postHook}
+    '';
+
+    preHook = ''
+      # Perform Coverity Analysis
+      if [ ! -z "${toString doCoverityAnalysis}" ]; then
+        shopt -s expand_aliases
+        mkdir _coverity_$name
+        alias make="cov-build --dir _coverity_$name/cov-int make"
+      fi
+
+      # Perform Clang Analysis
+      if [ ! -z "${toString doClangAnalysis}" ]; then
+        shopt -s expand_aliases
+        alias make="scan-build -o _clang_analyze_$name --html-title='Scan results for $name' make"
+      fi
+
+      ${preHook}
+    '';
+
+    # Clean up after analysis
+    postBuild = ''
+      if [ ! -z "${toString (doCoverityAnalysis || doClangAnalysis)}" ]; then
+        unalias make
+      fi
+    '';
+
+    initPhase = ''
+      mkdir -p $out/nix-support
+      echo "$system" > $out/nix-support/system
+
+      if [ -z "${toString doingAnalysis}" ]; then
+          for i in $outputs; do
+              if [ "$i" = out ]; then j=none; else j="$i"; fi
+              mkdir -p ''${!i}/nix-support
+              echo "nix-build $j ''${!i}" >> ''${!i}/nix-support/hydra-build-products
+          done
+      fi
+    '';
+
+    prePhases = ["initPhase"] ++ prePhases;
+
+    buildInputs =
+      buildInputs ++
+      (lib.optional doCoverageAnalysis args.makeGCOVReport) ++
+      (lib.optional doClangAnalysis args.clang-analyzer) ++
+      (lib.optional doCoverityAnalysis args.cov-build) ++
+      (lib.optional doCoverityAnalysis args.xz);
+
+    lcovFilter = ["/nix/store/*"] ++ lcovFilter;
+
+    inherit lcovExtraTraceFiles;
+
+    postPhases = postPhases ++ ["finalPhase"];
+
+    meta = (if args ? meta then args.meta else {}) // {
+      description = if doCoverageAnalysis then "Coverage analysis" else "Nix package for ${stdenv.hostPlatform.system}";
+    };
+
+  }
+
+  //
+
+  (if buildOutOfSourceTree
+   then {
+     preConfigure =
+       # Build out of source tree and make the source tree read-only.  This
+       # helps catch violations of the GNU Coding Standards (info
+       # "(standards) Configuration"), like `make distcheck' does.
+       '' mkdir "../build"
+          cd "../build"
+          configureScript="../$sourceRoot/configure"
+          chmod -R a-w "../$sourceRoot"
+
+          echo "building out of source tree, from \`$PWD'..."
+
+          ${if preConfigure != null then preConfigure else ""}
+       '';
+   }
+   else {})
+)
diff --git a/nixpkgs/pkgs/build-support/release/rpm-build.nix b/nixpkgs/pkgs/build-support/release/rpm-build.nix
new file mode 100644
index 000000000000..47c01f2e66b2
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/release/rpm-build.nix
@@ -0,0 +1,54 @@
+# This function builds an RPM from a source tarball that contains a
+# RPM spec file (i.e., one that can be built using `rpmbuild -ta').
+
+{ name ? "rpm-build"
+, diskImage
+, src, vmTools
+, ... } @ args:
+
+vmTools.buildRPM (
+
+  removeAttrs args ["vmTools"] //
+
+  {
+    name = name + "-" + diskImage.name + (if src ? version then "-" + src.version else "");
+
+    preBuild = ''
+      . ${./functions.sh}
+      propagateImageName
+      src=$(findTarball $src)
+    '';
+
+    postInstall = ''
+      declare -a rpms rpmNames
+      for i in $out/rpms/*/*.rpm; do
+        if echo $i | grep -vq "\.src\.rpm$"; then
+          echo "file rpm $i" >> $out/nix-support/hydra-build-products
+          rpms+=($i)
+          rpmNames+=("$(rpm -qp "$i")")
+        fi
+      done
+
+      echo "installing ''${rpms[*]}..."
+      rpm -Up ''${rpms[*]} --excludepath /nix/store
+
+      eval "$postRPMInstall"
+
+      echo "uninstalling ''${rpmNames[*]}..."
+      rpm -e ''${rpmNames[*]} --nodeps
+
+      for i in $out/rpms/*/*.src.rpm; do
+        echo "file srpm $i" >> $out/nix-support/hydra-build-products
+      done
+
+      for rpmdir in $extraRPMs ; do
+        echo "file rpm-extra $(ls $rpmdir/rpms/*/*.rpm | grep -v 'src\.rpm' | sort | head -1)" >> $out/nix-support/hydra-build-products
+      done
+    '';
+
+    meta = (if args ? meta then args.meta else {}) // {
+      description = "RPM package for ${diskImage.fullName}";
+    };
+  }
+
+)
diff --git a/nixpkgs/pkgs/build-support/release/source-tarball.nix b/nixpkgs/pkgs/build-support/release/source-tarball.nix
new file mode 100644
index 000000000000..c7129ae83f92
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/release/source-tarball.nix
@@ -0,0 +1,130 @@
+# This function converts an un-Autoconfed source tarball (typically a
+# checkout from a Subversion or CVS repository) into a source tarball
+# by running `autoreconf', `configure' and `make dist'.
+
+{ officialRelease ? false
+, buildInputs ? []
+, name ? "source-tarball"
+, version ? "0"
+, versionSuffix ?
+    if officialRelease
+    then ""
+    else "pre${toString (src.rev or src.revCount or "")}"
+, src, stdenv, autoconf, automake, libtool
+, # By default, provide all the GNU Build System as input.
+  bootstrapBuildInputs ? [ autoconf automake libtool ]
+, ... } @ args:
+
+stdenv.mkDerivation (
+
+  # First, attributes that can be overriden by the caller (via args):
+  {
+    # By default, only configure and build a source distribution.
+    # Some packages can only build a distribution after a general
+    # `make' (or even `make install').
+    dontBuild = true;
+    dontInstall = true;
+    doDist = true;
+
+    # If we do install, install to a dummy location.
+    useTempPrefix = true;
+
+    showBuildStats = true;
+
+    preConfigurePhases = "autoconfPhase";
+    postPhases = "finalPhase";
+
+    # Autoconfiscate the sources.
+    autoconfPhase = ''
+      export VERSION=${version}
+      export VERSION_SUFFIX=${versionSuffix}
+
+      # `svn-revision' is set for backwards compatibility with the old
+      # Nix buildfarm.  (Stratego/XT's autoxt uses it.  We should
+      # update it eventually.)
+      echo ${versionSuffix} | sed -e s/pre// > svn-revision
+
+      eval "$preAutoconf"
+
+      if test -x ./bootstrap && test -f ./bootstrap; then ./bootstrap
+      elif test -x ./bootstrap.sh; then ./bootstrap.sh
+      elif test -x ./autogen.sh; then ./autogen.sh
+      elif test -x ./autogen ; then ./autogen
+      elif test -x ./reconf; then ./reconf
+      elif test -f ./configure.in || test -f ./configure.ac; then
+          autoreconf --install --force --verbose
+      else
+          echo "No bootstrap, bootstrap.sh, configure.in or configure.ac. Assuming this is not an GNU Autotools package."
+      fi
+
+      eval "$postAutoconf"
+    '';
+
+    failureHook = ''
+      if test -n "$succeedOnFailure"; then
+          if test -n "$keepBuildDirectory"; then
+              KEEPBUILDDIR="$out/`basename $TMPDIR`"
+              header "Copying build directory to $KEEPBUILDDIR"
+              mkdir -p $KEEPBUILDDIR
+              cp -R "$TMPDIR/"* $KEEPBUILDDIR
+              stopNest
+          fi
+      fi
+    '';
+  }
+
+  # Then, the caller-supplied attributes.
+  // args //
+
+  # And finally, our own stuff.
+  {
+    name = name + "-" + version + versionSuffix;
+
+    buildInputs = buildInputs ++ bootstrapBuildInputs;
+
+    preUnpack = ''
+      mkdir -p $out/nix-support
+    '';
+
+    postUnpack = ''
+      # Set all source files to the current date.  This is because Nix
+      # resets the timestamp on all files to 0 (1/1/1970), which some
+      # people don't like (in particular GNU tar prints harmless but
+      # frightening warnings about it).
+      touch now
+      touch -d "1970-01-01 00:00:00 UTC" then
+      find $sourceRoot ! -newer then -print0 | xargs -0r touch --reference now
+      rm now then
+      eval "$nextPostUnpack"
+    '';
+
+    nextPostUnpack = if args ? postUnpack then args.postUnpack else "";
+
+    # Cause distPhase to copy tar.bz2 in addition to tar.gz.
+    tarballs = "*.tar.gz *.tar.bz2 *.tar.xz";
+
+    finalPhase = ''
+      for i in "$out/tarballs/"*; do
+          echo "file source-dist $i" >> $out/nix-support/hydra-build-products
+      done
+
+      # Try to figure out the release name.
+      releaseName=$( (cd $out/tarballs && ls) | head -n 1 | sed -e 's^\.[a-z].*^^')
+      test -n "$releaseName" && (echo "$releaseName" >> $out/nix-support/hydra-release-name)
+    '';
+
+    passthru = {
+      inherit src;
+      version = version + versionSuffix;
+    };
+
+    meta = (if args ? meta then args.meta else {}) // {
+      description = "Source distribution";
+
+      # Tarball builds are generally important, so give them a high
+      # default priority.
+      schedulingPriority = 200;
+    };
+  }
+
+)
diff --git a/nixpkgs/pkgs/build-support/remove-references-to/darwin-sign-fixup.sh b/nixpkgs/pkgs/build-support/remove-references-to/darwin-sign-fixup.sh
new file mode 100644
index 000000000000..940c18e5a627
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/remove-references-to/darwin-sign-fixup.sh
@@ -0,0 +1,5 @@
+# Fixup hook for nukeReferences, not stdenv
+
+source @signingUtils@
+
+fixupHooks+=(signIfRequired)
diff --git a/nixpkgs/pkgs/build-support/remove-references-to/default.nix b/nixpkgs/pkgs/build-support/remove-references-to/default.nix
new file mode 100644
index 000000000000..f022611ef913
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/remove-references-to/default.nix
@@ -0,0 +1,35 @@
+# The program `remove-references-to' created by this derivation replaces all
+# references to the given Nix store paths in the specified files by a
+# non-existent path (/nix/store/eeee...).  This is useful for getting rid of
+# dependencies that you know are not actually needed at runtime.
+
+{ lib, stdenvNoCC, signingUtils, shell ? stdenvNoCC.shell }:
+
+let
+  stdenv = stdenvNoCC;
+
+  darwinCodeSign = stdenv.targetPlatform.isDarwin && stdenv.targetPlatform.isAarch64;
+in
+
+stdenv.mkDerivation {
+  name = "remove-references-to";
+
+  dontUnpack = true;
+  dontConfigure = true;
+  dontBuild = true;
+
+  installPhase = ''
+    mkdir -p $out/bin
+    substituteAll ${./remove-references-to.sh} $out/bin/remove-references-to
+    chmod a+x $out/bin/remove-references-to
+  '';
+
+  postFixup = lib.optionalString darwinCodeSign ''
+    mkdir -p $out/nix-support
+    substituteAll ${./darwin-sign-fixup.sh} $out/nix-support/setup-hooks.sh
+  '';
+
+  inherit (builtins) storeDir;
+  shell = lib.getBin shell + (shell.shellPath or "");
+  signingUtils = if darwinCodeSign then signingUtils else null;
+}
diff --git a/nixpkgs/pkgs/build-support/remove-references-to/remove-references-to.sh b/nixpkgs/pkgs/build-support/remove-references-to/remove-references-to.sh
new file mode 100644
index 000000000000..d8d38dbd80a9
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/remove-references-to/remove-references-to.sh
@@ -0,0 +1,37 @@
+#! @shell@ -e
+
+fixupHooks=()
+
+if [ -e @out@/nix-support/setup-hooks.sh ]; then
+    source @out@/nix-support/setup-hooks.sh
+fi
+
+# References to remove
+targets=()
+while getopts t: o; do
+    case "$o" in
+        t) storeId=$(echo "$OPTARG" | sed -n "s|^@storeDir@/\\([a-z0-9]\{32\}\\)-.*|\1|p")
+           if [ -z "$storeId" ]; then
+               echo "-t argument must be a Nix store path"
+               exit 1
+           fi
+           targets+=("$storeId")
+    esac
+done
+shift $(($OPTIND-1))
+
+# Files to remove the references from
+regions=()
+for i in "$@"; do
+    test ! -L "$i" -a -f "$i" && regions+=("$i")
+done
+
+for target in "${targets[@]}" ; do
+    sed -i -e "s|@storeDir@/$target-|@storeDir@/eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee-|g" "${regions[@]}"
+done
+
+for region in "${regions[@]}"; do
+    for hook in "${fixupHooks[@]}"; do
+        eval "$hook" "$i"
+    done
+done
diff --git a/nixpkgs/pkgs/build-support/replace-dependency.nix b/nixpkgs/pkgs/build-support/replace-dependency.nix
new file mode 100644
index 000000000000..15ab50bf3974
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/replace-dependency.nix
@@ -0,0 +1,83 @@
+{ runCommand, nix, lib }:
+
+# Replace a single dependency in the requisites tree of drv, propagating
+# the change all the way up the tree, without a full rebuild. This can be
+# useful, for example, to patch a security hole in libc and still use your
+# system safely without rebuilding the world. This should be a short term
+# solution, as soon as a rebuild can be done the properly rebuild derivation
+# should be used. The old dependency and new dependency MUST have the same-length
+# name, and ideally should have close-to-identical directory layout.
+#
+# Example: safeFirefox = replaceDependency {
+#   drv = firefox;
+#   oldDependency = glibc;
+#   newDependency = overrideDerivation glibc (attrs: {
+#     patches  = attrs.patches ++ [ ./fix-glibc-hole.patch ];
+#   });
+# };
+# This will rebuild glibc with your security patch, then copy over firefox
+# (and all of its dependencies) without rebuilding further.
+{ drv, oldDependency, newDependency, verbose ? true }:
+
+with lib;
+
+let
+  warn = if verbose then builtins.trace else (x: y: y);
+  references = import (runCommand "references.nix" { exportReferencesGraph = [ "graph" drv ]; } ''
+    (echo {
+    while read path
+    do
+        echo "  \"$path\" = ["
+        read count
+        read count
+        while [ "0" != "$count" ]
+        do
+            read ref_path
+            if [ "$ref_path" != "$path" ]
+            then
+                echo "    (builtins.storePath $ref_path)"
+            fi
+            count=$(($count - 1))
+        done
+        echo "  ];"
+    done < graph
+    echo }) > $out
+  '').outPath;
+
+  discard = builtins.unsafeDiscardStringContext;
+
+  oldStorepath = builtins.storePath (discard (toString oldDependency));
+
+  referencesOf = drv: references.${discard (toString drv)};
+
+  dependsOnOldMemo = listToAttrs (map
+    (drv: { name = discard (toString drv);
+            value = elem oldStorepath (referencesOf drv) ||
+                    any dependsOnOld (referencesOf drv);
+          }) (builtins.attrNames references));
+
+  dependsOnOld = drv: dependsOnOldMemo.${discard (toString drv)};
+
+  drvName = drv:
+    discard (substring 33 (stringLength (builtins.baseNameOf drv)) (builtins.baseNameOf drv));
+
+  rewriteHashes = drv: hashes: runCommand (drvName drv) { nixStore = "${nix.out}/bin/nix-store"; } ''
+    $nixStore --dump ${drv} | sed 's|${baseNameOf drv}|'$(basename $out)'|g' | sed -e ${
+      concatStringsSep " -e " (mapAttrsToList (name: value:
+        "'s|${baseNameOf name}|${baseNameOf value}|g'"
+      ) hashes)
+    } | $nixStore --restore $out
+  '';
+
+  rewrittenDeps = listToAttrs [ {name = discard (toString oldDependency); value = newDependency;} ];
+
+  rewriteMemo = listToAttrs (map
+    (drv: { name = discard (toString drv);
+            value = rewriteHashes (builtins.storePath drv)
+              (filterAttrs (n: v: builtins.elem (builtins.storePath (discard (toString n))) (referencesOf drv)) rewriteMemo);
+          })
+    (filter dependsOnOld (builtins.attrNames references))) // rewrittenDeps;
+
+  drvHash = discard (toString drv);
+in assert (stringLength (drvName (toString oldDependency)) == stringLength (drvName (toString newDependency)));
+rewriteMemo.${drvHash} or (warn "replace-dependency.nix: Derivation ${drvHash} does not depend on ${discard (toString oldDependency)}" drv)
diff --git a/nixpkgs/pkgs/build-support/replace-secret/replace-secret.nix b/nixpkgs/pkgs/build-support/replace-secret/replace-secret.nix
new file mode 100644
index 000000000000..e04d1aed5f70
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/replace-secret/replace-secret.nix
@@ -0,0 +1,35 @@
+{ stdenv, lib, python3 }:
+
+stdenv.mkDerivation {
+  name = "replace-secret";
+  buildInputs = [ python3 ];
+  phases = [ "installPhase" "checkPhase" ];
+  installPhase = ''
+    install -D ${./replace-secret.py} $out/bin/replace-secret
+    patchShebangs $out
+  '';
+  doCheck = true;
+  checkPhase = ''
+    install -m 0600 ${./test/input_file} long_test
+    $out/bin/replace-secret "replace this" ${./test/passwd} long_test
+    $out/bin/replace-secret "and this" ${./test/rsa} long_test
+    diff ${./test/expected_long_output} long_test
+
+    install -m 0600 ${./test/input_file} short_test
+    $out/bin/replace-secret "replace this" <(echo "a") short_test
+    $out/bin/replace-secret "and this" <(echo "b") short_test
+    diff ${./test/expected_short_output} short_test
+  '';
+  meta = with lib; {
+    platforms = platforms.all;
+    maintainers = with maintainers; [ talyz ];
+    license = licenses.mit;
+    description = "Replace a string in one file with a secret from a second file";
+    longDescription = ''
+      Replace a string in one file with a secret from a second file.
+
+      Since the secret is read from a file, it won't be leaked through
+      '/proc/<pid>/cmdline', unlike when 'sed' or 'replace' is used.
+    '';
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/replace-secret/replace-secret.py b/nixpkgs/pkgs/build-support/replace-secret/replace-secret.py
new file mode 100755
index 000000000000..30ff41d491ba
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/replace-secret/replace-secret.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+
+import argparse
+from argparse import RawDescriptionHelpFormatter
+
+description = """
+Replace a string in one file with a secret from a second file.
+
+Since the secret is read from a file, it won't be leaked through
+'/proc/<pid>/cmdline', unlike when 'sed' or 'replace' is used.
+"""
+
+parser = argparse.ArgumentParser(
+    description=description,
+    formatter_class=RawDescriptionHelpFormatter
+)
+parser.add_argument("string_to_replace", help="the string to replace")
+parser.add_argument("secret_file", help="the file containing the secret")
+parser.add_argument("file", help="the file to perform the replacement on")
+args = parser.parse_args()
+
+with open(args.secret_file) as sf, open(args.file, 'r+') as f:
+    old = f.read()
+    secret = sf.read().strip("\n")
+    new_content = old.replace(args.string_to_replace, secret)
+    f.seek(0)
+    f.write(new_content)
+    f.truncate()
diff --git a/nixpkgs/pkgs/build-support/replace-secret/test/expected_long_output b/nixpkgs/pkgs/build-support/replace-secret/test/expected_long_output
new file mode 100644
index 000000000000..37bd66b905f5
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/replace-secret/test/expected_long_output
@@ -0,0 +1,30 @@
+beginning
+middle $6$UcbJUl5g$HRMfKNKsLTfVbcQb.P5o0bmZUfHDYkWseMSuZ8F5jSIGZZcI3Jnit23f8ZeZOGi4KL86HVM9RYqrpYySOu/fl0 not this
+-----BEGIN RSA PRIVATE KEY-----
+MIIEowIBAAKCAQEAzrru6v5tfwQl6L+rOUjtLo8kbhMUlCLXP7TYngSGrkzPMWe+
+0gB04UAmiPZXfBmvj5fPqYiFjIaEDHE/SD41vJB/RJKKtId2gCAIHhBLkbr+4+60
+yEbLkJci5i4kJC1dt8OKFEzXkaVnwOSgjH+0NwO3bstZ+E70zMXS9+NS71qGsIEb
+5J1TnacwW/u6CdFyakLljWOXOR14rLIpiPBBFLf+oZiepjIhlWXWHqsxZOb7zMI0
+T4W5WJ2dwGFsJ8rkYaGZ+A5qzYbi/KmHqaSPaNDsyoi7yJhAhKPByALJU916+8QO
+xOnqZxWGki3PDzCslRwW4i3mGbZlBQMnlfbN3QIDAQABAoIBAHDn1W7QkFrLmCy6
+6bf6pVdFZF8d2qJhOPAZRClhTXFKj+pqv+QPzcXr9F/fMr6bhK/G+Oqdnlq2aM4m
+16oMF+spe+impEyeo1CsreJFghBQcb9o8qFjUPBiKvROBP0hLcscZ4BYy29HSBgo
+harWYEWfqQJA251q+fYQoP0z0WrZKddOZbRRnJ0ICRxAE7IEtDT6EYt8R9oGi2j4
+/rpdW+rYGjW3TcmzdR7lpVMJRLlbMbSdR8n6cI6rnfySygcoE5tFX5t/YZSNbBPg
+GebKCbEHYNTTG8bC1qjUyzlbEQ6XYWvFO7HTKU7105XpjYTQFByeo0IVkin0o5KW
+t7eQWb0CgYEA6zZUWsYoQ13nXEU6Ky89Q9uhesMfaJ/F2X5ikQSRqRvrR3QR+ULe
+eNnCl10O9SiFpR4b5gSbLSHMffxGN60P1nEO4CiIKE+gOii8Kdk5htIJFy/dcZUc
+PuPM+zD9/6Is5sAWUZo45bnT6685h6EjM2+6zNZtx/XMjSfWbHaY+HMCgYEA4QAy
+6ZEgd6FHnNfM/q2o8XU3d6OCdhcu26u6ydnCalbSpPSKWOi6gnHK4ZnGdryXgIYw
+hRkvYINfiONkShYytotIh4YxUbgpwdvJRyKa2ZdWhcMmtFzZOcEVzQTKBasFT74C
+Wo0iybZ++XZh3M0+n7oyyx39aR7diZ+/zq6PnG8CgYB8B1QH4cHNdDDRqPd5WhmW
+NLQ7xbREOSvc+hYDnkMoxz4TmZL4u1gQpdNEeZ+visSeQvg3HGqvK8lnDaYBKdLW
+IxvS+8yAZSx6PoyqDI+XFh4RCf5dLGGOkBTAyB7Hs761lsiuEwK5sHmdJ/LQIBot
+v1bjOJb/AA/yxvT8kLUtHQKBgGIA9iwqXJv/EfRNQytDdS0HQ4vHGtJZMr3YRVoa
+kcZD3yieo4wqguLCsf4mPv4FE3CWAphW6f39+yTi9xIWLSy56nOtjdnsf7PDCh8E
+AbL5amSFJly1fKDda6OLjHt/jKa5Osk6ZIa8CP6cA/BrLfXg4rL6cyDQouqJPMDH
+5CHdAoGBAIChjbTyoYvANkoANCK4SuqLUYeiYREfiM3sqHe1xirK1PPHw03ZLITl
+ltjo9qE6kPXWcTBVckTKGFlntyCT283FC0/vMmHo8dTdtxF4/wSbkqs3ORuJ3p5J
+cNtLYGD3vgwLmg6tTur4U60XN+tYDzWGteez8J9GwTMfKJmuS9af
+-----END RSA PRIVATE KEY-----
+end
diff --git a/nixpkgs/pkgs/build-support/replace-secret/test/expected_short_output b/nixpkgs/pkgs/build-support/replace-secret/test/expected_short_output
new file mode 100644
index 000000000000..3c81b2e2f991
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/replace-secret/test/expected_short_output
@@ -0,0 +1,4 @@
+beginning
+middle a not this
+b
+end
diff --git a/nixpkgs/pkgs/build-support/replace-secret/test/input_file b/nixpkgs/pkgs/build-support/replace-secret/test/input_file
new file mode 100644
index 000000000000..1e7eadfaab20
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/replace-secret/test/input_file
@@ -0,0 +1,4 @@
+beginning
+middle replace this not this
+and this
+end
diff --git a/nixpkgs/pkgs/build-support/replace-secret/test/passwd b/nixpkgs/pkgs/build-support/replace-secret/test/passwd
new file mode 100644
index 000000000000..68f266226e4a
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/replace-secret/test/passwd
@@ -0,0 +1 @@
+$6$UcbJUl5g$HRMfKNKsLTfVbcQb.P5o0bmZUfHDYkWseMSuZ8F5jSIGZZcI3Jnit23f8ZeZOGi4KL86HVM9RYqrpYySOu/fl0
diff --git a/nixpkgs/pkgs/build-support/replace-secret/test/rsa b/nixpkgs/pkgs/build-support/replace-secret/test/rsa
new file mode 100644
index 000000000000..138cc99ed225
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/replace-secret/test/rsa
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEowIBAAKCAQEAzrru6v5tfwQl6L+rOUjtLo8kbhMUlCLXP7TYngSGrkzPMWe+
+0gB04UAmiPZXfBmvj5fPqYiFjIaEDHE/SD41vJB/RJKKtId2gCAIHhBLkbr+4+60
+yEbLkJci5i4kJC1dt8OKFEzXkaVnwOSgjH+0NwO3bstZ+E70zMXS9+NS71qGsIEb
+5J1TnacwW/u6CdFyakLljWOXOR14rLIpiPBBFLf+oZiepjIhlWXWHqsxZOb7zMI0
+T4W5WJ2dwGFsJ8rkYaGZ+A5qzYbi/KmHqaSPaNDsyoi7yJhAhKPByALJU916+8QO
+xOnqZxWGki3PDzCslRwW4i3mGbZlBQMnlfbN3QIDAQABAoIBAHDn1W7QkFrLmCy6
+6bf6pVdFZF8d2qJhOPAZRClhTXFKj+pqv+QPzcXr9F/fMr6bhK/G+Oqdnlq2aM4m
+16oMF+spe+impEyeo1CsreJFghBQcb9o8qFjUPBiKvROBP0hLcscZ4BYy29HSBgo
+harWYEWfqQJA251q+fYQoP0z0WrZKddOZbRRnJ0ICRxAE7IEtDT6EYt8R9oGi2j4
+/rpdW+rYGjW3TcmzdR7lpVMJRLlbMbSdR8n6cI6rnfySygcoE5tFX5t/YZSNbBPg
+GebKCbEHYNTTG8bC1qjUyzlbEQ6XYWvFO7HTKU7105XpjYTQFByeo0IVkin0o5KW
+t7eQWb0CgYEA6zZUWsYoQ13nXEU6Ky89Q9uhesMfaJ/F2X5ikQSRqRvrR3QR+ULe
+eNnCl10O9SiFpR4b5gSbLSHMffxGN60P1nEO4CiIKE+gOii8Kdk5htIJFy/dcZUc
+PuPM+zD9/6Is5sAWUZo45bnT6685h6EjM2+6zNZtx/XMjSfWbHaY+HMCgYEA4QAy
+6ZEgd6FHnNfM/q2o8XU3d6OCdhcu26u6ydnCalbSpPSKWOi6gnHK4ZnGdryXgIYw
+hRkvYINfiONkShYytotIh4YxUbgpwdvJRyKa2ZdWhcMmtFzZOcEVzQTKBasFT74C
+Wo0iybZ++XZh3M0+n7oyyx39aR7diZ+/zq6PnG8CgYB8B1QH4cHNdDDRqPd5WhmW
+NLQ7xbREOSvc+hYDnkMoxz4TmZL4u1gQpdNEeZ+visSeQvg3HGqvK8lnDaYBKdLW
+IxvS+8yAZSx6PoyqDI+XFh4RCf5dLGGOkBTAyB7Hs761lsiuEwK5sHmdJ/LQIBot
+v1bjOJb/AA/yxvT8kLUtHQKBgGIA9iwqXJv/EfRNQytDdS0HQ4vHGtJZMr3YRVoa
+kcZD3yieo4wqguLCsf4mPv4FE3CWAphW6f39+yTi9xIWLSy56nOtjdnsf7PDCh8E
+AbL5amSFJly1fKDda6OLjHt/jKa5Osk6ZIa8CP6cA/BrLfXg4rL6cyDQouqJPMDH
+5CHdAoGBAIChjbTyoYvANkoANCK4SuqLUYeiYREfiM3sqHe1xirK1PPHw03ZLITl
+ltjo9qE6kPXWcTBVckTKGFlntyCT283FC0/vMmHo8dTdtxF4/wSbkqs3ORuJ3p5J
+cNtLYGD3vgwLmg6tTur4U60XN+tYDzWGteez8J9GwTMfKJmuS9af
+-----END RSA PRIVATE KEY-----
diff --git a/nixpkgs/pkgs/build-support/rust/build-rust-crate/build-crate.nix b/nixpkgs/pkgs/build-support/rust/build-rust-crate/build-crate.nix
new file mode 100644
index 000000000000..df3129d536dc
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/build-rust-crate/build-crate.nix
@@ -0,0 +1,112 @@
+{ lib, stdenv, mkRustcDepArgs, mkRustcFeatureArgs, rust }:
+{ crateName,
+  dependencies,
+  crateFeatures, crateRenames, libName, release, libPath,
+  crateType, metadata, crateBin, hasCrateBin,
+  extraRustcOpts, verbose, colors,
+  buildTests
+}:
+
+  let
+    baseRustcOpts =
+      [
+        (if release then "-C opt-level=3" else "-C debuginfo=2")
+        "-C codegen-units=$NIX_BUILD_CORES"
+        "-C incremental=no"
+        "--remap-path-prefix=$NIX_BUILD_TOP=/"
+        (mkRustcDepArgs dependencies crateRenames)
+        (mkRustcFeatureArgs crateFeatures)
+      ] ++ extraRustcOpts
+      ++ lib.optional (stdenv.hostPlatform != stdenv.buildPlatform) "--target ${rust.toRustTargetSpec stdenv.hostPlatform} -C linker=${stdenv.hostPlatform.config}-gcc"
+      # since rustc 1.42 the "proc_macro" crate is part of the default crate prelude
+      # https://github.com/rust-lang/cargo/commit/4d64eb99a4#diff-7f98585dbf9d30aa100c8318e2c77e79R1021-R1022
+      ++ lib.optional (lib.elem "proc-macro" crateType) "--extern proc_macro"
+    ;
+    rustcMeta = "-C metadata=${metadata} -C extra-filename=-${metadata}";
+
+
+    # build the final rustc arguments that can be different between different
+    # crates
+    libRustcOpts = lib.concatStringsSep " " (
+      baseRustcOpts
+      ++ [rustcMeta]
+      ++ (map (x: "--crate-type ${x}") crateType)
+    );
+
+    binRustcOpts = lib.concatStringsSep " " (
+      baseRustcOpts
+    );
+
+    build_bin = if buildTests then "build_bin_test" else "build_bin";
+  in ''
+    runHook preBuild
+
+    # configure & source common build functions
+    LIB_RUSTC_OPTS="${libRustcOpts}"
+    BIN_RUSTC_OPTS="${binRustcOpts}"
+    LIB_EXT="${stdenv.hostPlatform.extensions.sharedLibrary}"
+    LIB_PATH="${libPath}"
+    LIB_NAME="${libName}"
+
+    CRATE_NAME='${lib.replaceStrings ["-"] ["_"] libName}'
+
+    setup_link_paths
+
+    if [[ -e "$LIB_PATH" ]]; then
+       build_lib "$LIB_PATH"
+       ${lib.optionalString buildTests ''build_lib_test "$LIB_PATH"''}
+    elif [[ -e src/lib.rs ]]; then
+       build_lib src/lib.rs
+       ${lib.optionalString buildTests "build_lib_test src/lib.rs"}
+    fi
+
+
+
+    ${lib.optionalString (lib.length crateBin > 0) (lib.concatMapStringsSep "\n" (bin: ''
+      mkdir -p target/bin
+      BIN_NAME='${bin.name or crateName}'
+      ${if !bin ? path then ''
+        BIN_PATH=""
+        search_for_bin_path "$BIN_NAME"
+      '' else ''
+        BIN_PATH='${bin.path}'
+      ''}
+        ${build_bin} "$BIN_NAME" "$BIN_PATH"
+    '') crateBin)}
+
+    ${lib.optionalString buildTests ''
+    # When tests are enabled build all the files in the `tests` directory as
+    # test binaries.
+    if [ -d tests ]; then
+      # find all the .rs files (or symlinks to those) in the tests directory, no subdirectories
+      find tests -maxdepth 1 \( -type f -o -type l \) -a -name '*.rs' -print0 | while IFS= read -r -d ''' file; do
+        mkdir -p target/bin
+        build_bin_test_file "$file"
+      done
+
+      # find all the subdirectories of tests/ that contain a main.rs file as
+      # that is also a test according to cargo
+      find tests/ -mindepth 1 -maxdepth 2 \( -type f -o -type l \) -a -name 'main.rs' -print0 | while IFS= read -r -d ''' file; do
+        mkdir -p target/bin
+        build_bin_test_file "$file"
+      done
+
+    fi
+    ''}
+
+    # If crateBin is empty and hasCrateBin is not set then we must try to
+    # detect some kind of bin target based on some files that might exist.
+    ${lib.optionalString (lib.length crateBin == 0 && !hasCrateBin) ''
+      if [[ -e src/main.rs ]]; then
+        mkdir -p target/bin
+        ${build_bin} ${crateName} src/main.rs
+      fi
+      for i in src/bin/*.rs; do #*/
+        mkdir -p target/bin
+        ${build_bin} "$(basename $i .rs)" "$i"
+      done
+    ''}
+    # Remove object files to avoid "wrong ELF type"
+    find target -type f -name "*.o" -print0 | xargs -0 rm -f
+    runHook postBuild
+  ''
diff --git a/nixpkgs/pkgs/build-support/rust/build-rust-crate/configure-crate.nix b/nixpkgs/pkgs/build-support/rust/build-rust-crate/configure-crate.nix
new file mode 100644
index 000000000000..3eaba1736fcd
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/build-rust-crate/configure-crate.nix
@@ -0,0 +1,201 @@
+{ lib, stdenv, rust, echo_colored, noisily, mkRustcDepArgs, mkRustcFeatureArgs }:
+{
+  build
+, buildDependencies
+, colors
+, completeBuildDeps
+, completeDeps
+, crateAuthors
+, crateDescription
+, crateHomepage
+, crateFeatures
+, crateName
+, crateRenames
+, crateVersion
+, extraLinkFlags
+, extraRustcOpts
+, libName
+, libPath
+, release
+, verbose
+, workspace_member }:
+let version_ = lib.splitString "-" crateVersion;
+    versionPre = if lib.tail version_ == [] then "" else lib.elemAt version_ 1;
+    version = lib.splitVersion (lib.head version_);
+    rustcOpts = lib.foldl' (opts: opt: opts + " " + opt)
+        (if release then "-C opt-level=3" else "-C debuginfo=2")
+        (["-C codegen-units=$NIX_BUILD_CORES -C incremental=no"] ++ extraRustcOpts);
+    buildDeps = mkRustcDepArgs buildDependencies crateRenames;
+    authors = lib.concatStringsSep ":" crateAuthors;
+    optLevel = if release then 3 else 0;
+    completeDepsDir = lib.concatStringsSep " " completeDeps;
+    completeBuildDepsDir = lib.concatStringsSep " " completeBuildDeps;
+    envFeatures = lib.concatStringsSep " " (
+      map (f: lib.replaceChars ["-"] ["_"] (lib.toUpper f)) crateFeatures
+    );
+in ''
+  ${echo_colored colors}
+  ${noisily colors verbose}
+  source ${./lib.sh}
+
+  ${lib.optionalString (workspace_member != null) ''
+  noisily cd "${workspace_member}"
+''}
+  ${lib.optionalString (workspace_member == null) ''
+  echo_colored "Searching for matching Cargo.toml (${crateName})"
+  local cargo_toml_dir=$(matching_cargo_toml_dir "${crateName}")
+  if [ -z "$cargo_toml_dir" ]; then
+    echo_error "ERROR configuring ${crateName}: No matching Cargo.toml in $(pwd) found." >&2
+    exit 23
+  fi
+  noisily cd "$cargo_toml_dir"
+''}
+
+  runHook preConfigure
+
+  symlink_dependency() {
+    # $1 is the nix-store path of a dependency
+    # $2 is the target path
+    i=$1
+    ln -s -f $i/lib/*.rlib $2 #*/
+    ln -s -f $i/lib/*.so $i/lib/*.dylib $2 #*/
+    if [ -e $i/env ]; then
+        source $i/env
+    fi
+  }
+
+  # The following steps set up the dependencies of the crate. Two
+  # kinds of dependencies are distinguished: build dependencies
+  # (used by the build script) and crate dependencies. For each
+  # dependency we have to:
+  #
+  # - Make its Rust library available to rustc. This is done by
+  #   symlinking all library dependencies into a directory that
+  #   can be provided to rustc.
+  # - Accumulate linking flags. These flags are largely used for
+  #   linking native libraries.
+  #
+  # The crate link flags are added to the `link` and `link.final`
+  # files. The `link` file is used for linkage in the current
+  # crate. The `link.final` file will be copied to the output and can
+  # be used by downstream crates to get the linker flags of this
+  # crate.
+
+  mkdir -p target/{deps,lib,build,buildDeps}
+  chmod uga+w target -R
+  echo ${extraLinkFlags} > target/link
+  echo ${extraLinkFlags} > target/link.final
+
+  # Prepare crate dependencies
+  for i in ${completeDepsDir}; do
+    symlink_dependency $i target/deps
+    if [ -e "$i/lib/link" ]; then
+      cat $i/lib/link >> target/link
+      cat $i/lib/link >> target/link.final
+    fi
+  done
+
+  # Prepare crate build dependencies that are used for the build script.
+  for i in ${completeBuildDepsDir}; do
+    symlink_dependency $i target/buildDeps
+    if [ -e "$i/lib/link" ]; then
+      cat $i/lib/link >> target/link.build
+    fi
+  done
+
+  # Remove duplicate linker flags from the build dependencies.
+  if [[ -e target/link.build ]]; then
+    sort -uo target/link.build target/link.build
+  fi
+
+  # Remove duplicate linker flags from the dependencies.
+  sort -uo target/link target/link
+  tr '\n' ' ' < target/link > target/link_
+
+  # Remove duplicate linker flags from the that are written
+  # to the derivation's output.
+  sort -uo target/link.final target/link.final
+
+  EXTRA_BUILD=""
+  BUILD_OUT_DIR=""
+  export CARGO_PKG_NAME=${crateName}
+  export CARGO_PKG_VERSION=${crateVersion}
+  export CARGO_PKG_AUTHORS="${authors}"
+  export CARGO_PKG_DESCRIPTION="${crateDescription}"
+
+  export CARGO_CFG_TARGET_ARCH=${rust.toTargetArch stdenv.hostPlatform}
+  export CARGO_CFG_TARGET_OS=${rust.toTargetOs stdenv.hostPlatform}
+  export CARGO_CFG_TARGET_FAMILY="unix"
+  export CARGO_CFG_UNIX=1
+  export CARGO_CFG_TARGET_ENV="gnu"
+  export CARGO_CFG_TARGET_ENDIAN=${if stdenv.hostPlatform.parsed.cpu.significantByte.name == "littleEndian" then "little" else "big"}
+  export CARGO_CFG_TARGET_POINTER_WIDTH=${toString stdenv.hostPlatform.parsed.cpu.bits}
+  export CARGO_CFG_TARGET_VENDOR=${stdenv.hostPlatform.parsed.vendor.name}
+
+  export CARGO_MANIFEST_DIR=$(pwd)
+  export DEBUG="${toString (!release)}"
+  export OPT_LEVEL="${toString optLevel}"
+  export TARGET="${rust.toRustTargetSpec stdenv.hostPlatform}"
+  export HOST="${rust.toRustTargetSpec stdenv.buildPlatform}"
+  export PROFILE=${if release then "release" else "debug"}
+  export OUT_DIR=$(pwd)/target/build/${crateName}.out
+  export CARGO_PKG_VERSION_MAJOR=${lib.elemAt version 0}
+  export CARGO_PKG_VERSION_MINOR=${lib.elemAt version 1}
+  export CARGO_PKG_VERSION_PATCH=${lib.elemAt version 2}
+  export CARGO_PKG_VERSION_PRE="${versionPre}"
+  export CARGO_PKG_HOMEPAGE="${crateHomepage}"
+  export NUM_JOBS=$NIX_BUILD_CORES
+  export RUSTC="rustc"
+  export RUSTDOC="rustdoc"
+
+  BUILD=""
+  if [[ ! -z "${build}" ]] ; then
+     BUILD=${build}
+  elif [[ -e "build.rs" ]]; then
+     BUILD="build.rs"
+  fi
+
+  # Compile and run the build script, when available.
+  if [[ ! -z "$BUILD" ]] ; then
+     echo_build_heading "$BUILD" ${libName}
+     mkdir -p target/build/${crateName}
+     EXTRA_BUILD_FLAGS=""
+     if [ -e target/link.build ]; then
+       EXTRA_BUILD_FLAGS="$EXTRA_BUILD_FLAGS $(tr '\n' ' ' < target/link.build)"
+     fi
+     noisily rustc --crate-name build_script_build $BUILD --crate-type bin ${rustcOpts} \
+       ${mkRustcFeatureArgs crateFeatures} --out-dir target/build/${crateName} --emit=dep-info,link \
+       -L dependency=target/buildDeps ${buildDeps} --cap-lints allow $EXTRA_BUILD_FLAGS --color ${colors}
+
+     mkdir -p target/build/${crateName}.out
+     export RUST_BACKTRACE=1
+     BUILD_OUT_DIR="-L $OUT_DIR"
+     mkdir -p $OUT_DIR
+
+     (
+       # Features should be set as environment variable for build scripts:
+       # https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts
+       for feature in ${envFeatures}; do
+         export CARGO_FEATURE_$feature=1
+       done
+
+       target/build/${crateName}/build_script_build > target/build/${crateName}.opt
+     )
+
+     set +e
+     EXTRA_BUILD=$(sed -n "s/^cargo:rustc-flags=\(.*\)/\1/p" target/build/${crateName}.opt | tr '\n' ' ' | sort -u)
+     EXTRA_FEATURES=$(sed -n "s/^cargo:rustc-cfg=\(.*\)/--cfg \1/p" target/build/${crateName}.opt | tr '\n' ' ')
+     EXTRA_LINK=$(sed -n "s/^cargo:rustc-link-lib=\(.*\)/\1/p" target/build/${crateName}.opt | tr '\n' ' ')
+     EXTRA_LINK_SEARCH=$(sed -n "s/^cargo:rustc-link-search=\(.*\)/\1/p" target/build/${crateName}.opt | tr '\n' ' ' | sort -u)
+
+     for env in $(sed -n "s/^cargo:rustc-env=\(.*\)/\1/p" target/build/${crateName}.opt); do
+       export $env
+     done
+
+     CRATENAME=$(echo ${crateName} | sed -e "s/\(.*\)-sys$/\U\1/" -e "s/-/_/g")
+     grep -P "^cargo:(?!(rustc-|warning=|rerun-if-changed=|rerun-if-env-changed))" target/build/${crateName}.opt \
+       | awk -F= "/^cargo:/ { sub(/^cargo:/, \"\", \$1); gsub(/-/, \"_\", \$1); print \"export \" toupper(\"DEP_$(echo $CRATENAME)_\" \$1) \"=\" \$2 }" > target/env
+     set -e
+  fi
+  runHook postConfigure
+''
diff --git a/nixpkgs/pkgs/build-support/rust/build-rust-crate/default.nix b/nixpkgs/pkgs/build-support/rust/build-rust-crate/default.nix
new file mode 100644
index 000000000000..e605c9550e53
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/build-rust-crate/default.nix
@@ -0,0 +1,325 @@
+# Code for buildRustCrate, a Nix function that builds Rust code, just
+# like Cargo, but using Nix instead.
+#
+# This can be useful for deploying packages with NixOps, and to share
+# binary dependencies between projects.
+
+{ lib, stdenv, defaultCrateOverrides, fetchCrate, pkgsBuildBuild, rustc, rust
+, cargo, jq }:
+
+let
+    # Create rustc arguments to link against the given list of dependencies
+    # and renames.
+    #
+    # See docs for crateRenames below.
+    mkRustcDepArgs = dependencies: crateRenames:
+      lib.concatMapStringsSep " " (dep:
+        let
+          normalizeName = lib.replaceStrings ["-"] ["_"];
+          extern = normalizeName dep.libName;
+          # Find a choice that matches in name and optionally version.
+          findMatchOrUseExtern = choices:
+            lib.findFirst (choice:
+              (!(choice ? version)
+                 || choice.version == dep.version or ""))
+            { rename = extern; }
+            choices;
+          name = if lib.hasAttr dep.crateName crateRenames then
+            let choices = crateRenames.${dep.crateName};
+            in
+            normalizeName (
+              if builtins.isList choices
+              then (findMatchOrUseExtern choices).rename
+              else choices
+            )
+          else
+            extern;
+        in (if lib.any (x: x == "lib" || x == "rlib") dep.crateType then
+           " --extern ${name}=${dep.lib}/lib/lib${extern}-${dep.metadata}.rlib"
+         else
+           " --extern ${name}=${dep.lib}/lib/lib${extern}-${dep.metadata}${stdenv.hostPlatform.extensions.sharedLibrary}")
+      ) dependencies;
+
+   # Create feature arguments for rustc.
+   mkRustcFeatureArgs = lib.concatMapStringsSep " " (f: ''--cfg feature=\"${f}\"'');
+
+   inherit (import ./log.nix { inherit lib; }) noisily echo_colored;
+
+   configureCrate = import ./configure-crate.nix {
+     inherit lib stdenv rust echo_colored noisily mkRustcDepArgs mkRustcFeatureArgs;
+   };
+
+   buildCrate = import ./build-crate.nix {
+     inherit lib stdenv mkRustcDepArgs mkRustcFeatureArgs rust;
+   };
+
+   installCrate = import ./install-crate.nix { inherit stdenv; };
+
+   # Allow access to the rust attribute set from inside buildRustCrate, which
+   # has a parameter that shadows the name.
+   rustAttrs = rust;
+in
+
+/* The overridable pkgs.buildRustCrate function.
+ *
+ * Any unrecognized parameters will be passed as to
+ * the underlying stdenv.mkDerivation.
+ */
+ crate_: lib.makeOverridable (
+   # The rust compiler to use.
+   #
+   # Default: pkgs.rustc
+   { rust
+   # Whether to build a release version (`true`) or a debug
+   # version (`false`). Debug versions are faster to build
+   # but might be much slower at runtime.
+   , release
+   # Whether to print rustc invocations etc.
+   #
+   # Example: false
+   # Default: true
+   , verbose
+   # A list of rust/cargo features to enable while building the crate.
+   # Example: [ "std" "async" ]
+   , features
+   # Additional native build inputs for building this crate.
+   , nativeBuildInputs
+   # Additional build inputs for building this crate.
+   #
+   # Example: [ pkgs.openssl ]
+   , buildInputs
+   # Allows to override the parameters to buildRustCrate
+   # for any rust dependency in the transitive build tree.
+   #
+   # Default: pkgs.defaultCrateOverrides
+   #
+   # Example:
+   #
+   # pkgs.defaultCrateOverrides // {
+   #   hello = attrs: { buildInputs = [ openssl ]; };
+   # }
+   , crateOverrides
+   # Rust library dependencies, i.e. other libaries that were built
+   # with buildRustCrate.
+   , dependencies
+   # Rust build dependencies, i.e. other libaries that were built
+   # with buildRustCrate and are used by a build script.
+   , buildDependencies
+   # Specify the "extern" name of a library if it differs from the library target.
+   # See above for an extended explanation.
+   #
+   # Default: no renames.
+   #
+   # Example:
+   #
+   # `crateRenames` supports two formats.
+   #
+   # The simple version is an attrset that maps the
+   # `crateName`s of the dependencies to their alternative
+   # names.
+   #
+   # ```nix
+   # {
+   #   my_crate_name = "my_alternative_name";
+   #   # ...
+   # }
+   # ```
+   #
+   # The extended version is also keyed by the `crateName`s but allows
+   # different names for different crate versions:
+   #
+   # ```nix
+   # {
+   #   my_crate_name = [
+   #       { version = "1.2.3"; rename = "my_alternative_name01"; }
+   #       { version = "3.2.3"; rename = "my_alternative_name03"; }
+   #   ]
+   #   # ...
+   # }
+   # ```
+   #
+   # This roughly corresponds to the following snippet in Cargo.toml:
+   #
+   # ```toml
+   # [dependencies]
+   # my_alternative_name01 = { package = "my_crate_name", version = "0.1" }
+   # my_alternative_name03 = { package = "my_crate_name", version = "0.3" }
+   # ```
+   #
+   # Dependencies which use the lib target name as extern name, do not need
+   # to be specified in the crateRenames, even if their crate name differs.
+   #
+   # Including multiple versions of a crate is very popular during
+   # ecosystem transitions, e.g. from futures 0.1 to futures 0.3.
+   , crateRenames
+   # A list of extra options to pass to rustc.
+   #
+   # Example: [ "-Z debuginfo=2" ]
+   # Default: []
+   , extraRustcOpts
+   # Whether to enable building tests.
+   # Use true to enable.
+   # Default: false
+   , buildTests
+   # Passed to stdenv.mkDerivation.
+   , preUnpack
+   # Passed to stdenv.mkDerivation.
+   , postUnpack
+   # Passed to stdenv.mkDerivation.
+   , prePatch
+   # Passed to stdenv.mkDerivation.
+   , patches
+   # Passed to stdenv.mkDerivation.
+   , postPatch
+   # Passed to stdenv.mkDerivation.
+   , preConfigure
+   # Passed to stdenv.mkDerivation.
+   , postConfigure
+   # Passed to stdenv.mkDerivation.
+   , preBuild
+   # Passed to stdenv.mkDerivation.
+   , postBuild
+   # Passed to stdenv.mkDerivation.
+   , preInstall
+   # Passed to stdenv.mkDerivation.
+   , postInstall
+   }:
+
+let crate = crate_ // (lib.attrByPath [ crate_.crateName ] (attr: {}) crateOverrides crate_);
+    dependencies_ = dependencies;
+    buildDependencies_ = buildDependencies;
+    processedAttrs = [
+      "src" "nativeBuildInputs" "buildInputs" "crateBin" "crateLib" "libName" "libPath"
+      "buildDependencies" "dependencies" "features" "crateRenames"
+      "crateName" "version" "build" "authors" "colors" "edition"
+      "buildTests"
+    ];
+    extraDerivationAttrs = builtins.removeAttrs crate processedAttrs;
+    nativeBuildInputs_ = nativeBuildInputs;
+    buildInputs_ = buildInputs;
+    extraRustcOpts_ = extraRustcOpts;
+    buildTests_ = buildTests;
+
+    # crate2nix has a hack for the old bash based build script that did split
+    # entries at `,`. No we have to work around that hack.
+    # https://github.com/kolloch/crate2nix/blame/5b19c1b14e1b0e5522c3e44e300d0b332dc939e7/crate2nix/templates/build.nix.tera#L89
+    crateBin = lib.filter (bin: !(bin ? name && bin.name == ",")) (crate.crateBin or []);
+    hasCrateBin = crate ? crateBin;
+in
+stdenv.mkDerivation (rec {
+
+    inherit (crate) crateName;
+    inherit
+      preUnpack
+      postUnpack
+      prePatch
+      patches
+      postPatch
+      preConfigure
+      postConfigure
+      preBuild
+      postBuild
+      preInstall
+      postInstall
+      buildTests
+    ;
+
+    src = crate.src or (fetchCrate { inherit (crate) crateName version sha256; });
+    name = "rust_${crate.crateName}-${crate.version}${lib.optionalString buildTests_ "-test"}";
+    version = crate.version;
+    depsBuildBuild = [ pkgsBuildBuild.stdenv.cc ];
+    nativeBuildInputs = [ rust stdenv.cc cargo jq ] ++ (crate.nativeBuildInputs or []) ++ nativeBuildInputs_;
+    buildInputs = (crate.buildInputs or []) ++ buildInputs_;
+    dependencies = map lib.getLib dependencies_;
+    buildDependencies = map lib.getLib buildDependencies_;
+
+    completeDeps = lib.unique (dependencies ++ lib.concatMap (dep: dep.completeDeps) dependencies);
+    completeBuildDeps = lib.unique (
+      buildDependencies
+      ++ lib.concatMap (dep: dep.completeBuildDeps ++ dep.completeDeps) buildDependencies
+    );
+
+    # Create a list of features that are enabled by the crate itself and
+    # through the features argument of buildRustCrate. Exclude features
+    # with a forward slash, since they are passed through to dependencies.
+    crateFeatures = lib.optionals (crate ? features)
+      (builtins.filter (f: !lib.hasInfix "/" f) (crate.features ++ features));
+
+    libName = if crate ? libName then crate.libName else crate.crateName;
+    libPath = if crate ? libPath then crate.libPath else "";
+
+    # Seed the symbol hashes with something unique every time.
+    # https://doc.rust-lang.org/1.0.0/rustc/metadata/loader/index.html#frobbing-symbols
+    metadata = let
+      depsMetadata = lib.foldl' (str: dep: str + dep.metadata) "" (dependencies ++ buildDependencies);
+      hashedMetadata = builtins.hashString "sha256"
+        (crateName + "-" + crateVersion + "___" + toString (mkRustcFeatureArgs crateFeatures) +
+          "___" + depsMetadata + "___" + rustAttrs.toRustTarget stdenv.hostPlatform);
+      in lib.substring 0 10 hashedMetadata;
+
+    build = crate.build or "";
+    # Either set to a concrete sub path to the crate root
+    # or use `null` for auto-detect.
+    workspace_member = crate.workspace_member or ".";
+    crateVersion = crate.version;
+    crateDescription = crate.description or "";
+    crateAuthors = if crate ? authors && lib.isList crate.authors then crate.authors else [];
+    crateHomepage = crate.homepage or "";
+    crateType =
+      if lib.attrByPath ["procMacro"] false crate then ["proc-macro"] else
+      if lib.attrByPath ["plugin"] false crate then ["dylib"] else
+        (crate.type or ["lib"]);
+    colors = lib.attrByPath [ "colors" ] "always" crate;
+    extraLinkFlags = lib.concatStringsSep " " (crate.extraLinkFlags or []);
+    edition = crate.edition or null;
+    extraRustcOpts =
+      lib.optionals (crate ? extraRustcOpts) crate.extraRustcOpts
+      ++ extraRustcOpts_
+      ++ (lib.optional (edition != null) "--edition ${edition}");
+
+
+    configurePhase = configureCrate {
+      inherit crateName buildDependencies completeDeps completeBuildDeps crateDescription
+              crateFeatures crateRenames libName build workspace_member release libPath crateVersion
+              extraLinkFlags extraRustcOpts
+              crateAuthors crateHomepage verbose colors;
+    };
+    buildPhase = buildCrate {
+      inherit crateName dependencies
+              crateFeatures crateRenames libName release libPath crateType
+              metadata hasCrateBin crateBin verbose colors
+              extraRustcOpts buildTests;
+    };
+    installPhase = installCrate crateName metadata buildTests;
+
+    # depending on the test setting we are either producing something with bins
+    # and libs or just test binaries
+    outputs = if buildTests then [ "out" ] else [ "out" "lib" ];
+    outputDev = if buildTests then [ "out" ] else  [ "lib" ];
+
+} // extraDerivationAttrs
+)) {
+  rust = rustc;
+  release = crate_.release or true;
+  verbose = crate_.verbose or true;
+  extraRustcOpts = [];
+  features = [];
+  nativeBuildInputs = [];
+  buildInputs = [];
+  crateOverrides = defaultCrateOverrides;
+  preUnpack = crate_.preUnpack or "";
+  postUnpack = crate_.postUnpack or "";
+  prePatch = crate_.prePatch or "";
+  patches = crate_.patches or [];
+  postPatch = crate_.postPatch or "";
+  preConfigure = crate_.preConfigure or "";
+  postConfigure = crate_.postConfigure or "";
+  preBuild = crate_.preBuild or "";
+  postBuild = crate_.postBuild or "";
+  preInstall = crate_.preInstall or "";
+  postInstall = crate_.postInstall or "";
+  dependencies = crate_.dependencies or [];
+  buildDependencies = crate_.buildDependencies or [];
+  crateRenames = crate_.crateRenames or {};
+  buildTests = crate_.buildTests or false;
+}
diff --git a/nixpkgs/pkgs/build-support/rust/build-rust-crate/helpers.nix b/nixpkgs/pkgs/build-support/rust/build-rust-crate/helpers.nix
new file mode 100644
index 000000000000..386d0ce7084f
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/build-rust-crate/helpers.nix
@@ -0,0 +1,26 @@
+{stdenv, lib}:
+{
+  kernel = stdenv.hostPlatform.parsed.kernel.name;
+  abi = stdenv.hostPlatform.parsed.abi.name;
+  cpu = stdenv.hostPlatform.parsed.cpu.name;
+   updateFeatures = f: up: functions: lib.deepSeq f (lib.foldl' (features: fun: fun features) (lib.attrsets.recursiveUpdate f up) functions);
+   mapFeatures = features: map (fun: fun { features = features; });
+   mkFeatures = feat: lib.foldl (features: featureName:
+     if feat.${featureName} or false then
+       [ featureName ] ++ features
+     else
+       features
+   ) [] (lib.attrNames feat);
+  include = includedFiles: src: builtins.filterSource (path: type:
+     lib.any (f:
+       let p = toString (src + ("/" + f));
+       in
+       p == path || (lib.strings.hasPrefix (p + "/") path)
+     ) includedFiles
+  ) src;
+  exclude = excludedFiles: src: builtins.filterSource (path: type:
+    lib.all (f:
+       !lib.strings.hasPrefix (toString (src + ("/" + f))) path
+    ) excludedFiles
+  ) src;
+}
diff --git a/nixpkgs/pkgs/build-support/rust/build-rust-crate/install-crate.nix b/nixpkgs/pkgs/build-support/rust/build-rust-crate/install-crate.nix
new file mode 100644
index 000000000000..f4a4dcdb0d94
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/build-rust-crate/install-crate.nix
@@ -0,0 +1,51 @@
+{ stdenv }:
+crateName: metadata: buildTests:
+if !buildTests then ''
+  runHook preInstall
+  # always create $out even if we do not have binaries. We are detecting binary targets during compilation, if those are missing there is no way to only have $lib
+  mkdir $out
+  if [[ -s target/env ]]; then
+    mkdir -p $lib
+    cp target/env $lib/env
+  fi
+  if [[ -s target/link.final ]]; then
+    mkdir -p $lib/lib
+    cp target/link.final $lib/lib/link
+  fi
+  if [[ "$(ls -A target/lib)" ]]; then
+    mkdir -p $lib/lib
+    cp -r target/lib/* $lib/lib #*/
+    for library in $lib/lib/*.so $lib/lib/*.dylib; do #*/
+      ln -s $library $(echo $library | sed -e "s/-${metadata}//")
+    done
+  fi
+  if [[ "$(ls -A target/build)" ]]; then # */
+    mkdir -p $lib/lib
+    cp -r target/build/* $lib/lib # */
+  fi
+  if [[ -d target/bin ]]; then
+    if [[ "$(ls -A target/bin)" ]]; then
+      mkdir -p $out/bin
+      cp -rP target/bin/* $out/bin # */
+    fi
+  fi
+  runHook postInstall
+'' else
+# for tests we just put them all in the output. No execution.
+''
+  runHook preInstall
+
+  mkdir -p $out/tests
+  if [ -e target/bin ]; then
+    find target/bin/ -type f -executable -exec cp {} $out/tests \;
+  fi
+  if [ -e target/lib ]; then
+    find target/lib/ -type f \! -name '*.rlib' \
+      -a \! -name '*${stdenv.hostPlatform.extensions.sharedLibrary}' \
+      -a \! -name '*.d' \
+      -executable \
+      -print0 | xargs --no-run-if-empty --null install --target $out/tests;
+  fi
+
+  runHook postInstall
+''
diff --git a/nixpkgs/pkgs/build-support/rust/build-rust-crate/lib.sh b/nixpkgs/pkgs/build-support/rust/build-rust-crate/lib.sh
new file mode 100644
index 000000000000..d4927b025aa8
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/build-rust-crate/lib.sh
@@ -0,0 +1,174 @@
+echo_build_heading() {
+  if (( $# == 1 )); then
+    echo_colored "Building $1"
+  else
+    echo_colored "Building $1 ($2)"
+  fi
+}
+
+build_lib() {
+  lib_src=$1
+  echo_build_heading $lib_src ${libName}
+
+  noisily rustc \
+    --crate-name $CRATE_NAME \
+    $lib_src \
+    --out-dir target/lib \
+    -L dependency=target/deps \
+    --cap-lints allow \
+    $LIB_RUSTC_OPTS \
+    $BUILD_OUT_DIR \
+    $EXTRA_BUILD \
+    $EXTRA_FEATURES \
+    $EXTRA_RUSTC_FLAGS \
+    --color $colors
+
+  EXTRA_LIB=" --extern $CRATE_NAME=target/lib/lib$CRATE_NAME-$metadata.rlib"
+  if [ -e target/deps/lib$CRATE_NAME-$metadata$LIB_EXT ]; then
+     EXTRA_LIB="$EXTRA_LIB --extern $CRATE_NAME=target/lib/lib$CRATE_NAME-$metadata$LIB_EXT"
+  fi
+}
+
+build_bin() {
+  local crate_name=$1
+  local crate_name_=$(echo $crate_name | tr '-' '_')
+  local main_file=""
+
+  if [[ ! -z $2 ]]; then
+    main_file=$2
+  fi
+  echo_build_heading $@
+  noisily rustc \
+    --crate-name $crate_name_ \
+    $main_file \
+    --crate-type bin \
+    $BIN_RUSTC_OPTS \
+    --out-dir target/bin \
+    -L dependency=target/deps \
+    $LINK \
+    $EXTRA_LIB \
+    --cap-lints allow \
+    $BUILD_OUT_DIR \
+    $EXTRA_BUILD \
+    $EXTRA_FEATURES \
+    $EXTRA_RUSTC_FLAGS \
+    --color ${colors} \
+
+  if [ "$crate_name_" != "$crate_name" ]; then
+    mv target/bin/$crate_name_ target/bin/$crate_name
+  fi
+}
+
+build_lib_test() {
+    local file="$1"
+    EXTRA_RUSTC_FLAGS="--test $EXTRA_RUSTC_FLAGS" build_lib "$1" "$2"
+}
+
+build_bin_test() {
+    local crate="$1"
+    local file="$2"
+    EXTRA_RUSTC_FLAGS="--test $EXTRA_RUSTC_FLAGS" build_bin "$1" "$2"
+}
+
+build_bin_test_file() {
+    local file="$1"
+    local derived_crate_name="${file//\//_}"
+    derived_crate_name="${derived_crate_name%.rs}"
+    build_bin_test "$derived_crate_name" "$file"
+}
+
+# Add additional link options that were provided by the build script.
+setup_link_paths() {
+  EXTRA_LIB=""
+  if [[ -e target/link_ ]]; then
+    EXTRA_BUILD="$(cat target/link_) $EXTRA_BUILD"
+  fi
+
+  echo "$EXTRA_LINK_SEARCH" | while read i; do
+     if [[ ! -z "$i" ]]; then
+       for library in $i; do
+         echo "-L $library" >> target/link
+         L=$(echo $library | sed -e "s#$(pwd)/target/build#$lib/lib#")
+         echo "-L $L" >> target/link.final
+       done
+     fi
+  done
+  echo "$EXTRA_LINK" | while read i; do
+     if [[ ! -z "$i" ]]; then
+       for library in $i; do
+         echo "-l $library" >> target/link
+         echo "-l $library" >> target/link.final
+       done
+     fi
+  done
+
+  if [[ -e target/link ]]; then
+     tr '\n' ' ' < target/link > target/link_
+     LINK=$(cat target/link_)
+  fi
+}
+
+search_for_bin_path() {
+  # heuristic to "guess" the correct source file as found in cargo:
+  # https://github.com/rust-lang/cargo/blob/90fc9f620190d5fa3c80b0c8c65a1e1361e6b8ae/src/cargo/util/toml/targets.rs#L308-L325
+
+  BIN_NAME=$1
+  BIN_NAME_=$(echo $BIN_NAME | tr '-' '_')
+
+  # the first two cases are the "new" default IIRC
+  FILES=( "src/bin/$BIN_NAME.rs" "src/bin/$BIN_NAME/main.rs" "src/bin/$BIN_NAME_.rs" "src/bin/$BIN_NAME_/main.rs" "src/bin/main.rs" "src/main.rs" )
+
+  if ! [ -e "$LIB_PATH" -o -e src/lib.rs -o -e "src/$LIB_NAME.rs" ]; then
+    # if this is not a library the following path is also valid
+    FILES=( "src/$BIN_NAME.rs" "src/$BIN_NAME_.rs" "${FILES[@]}" )
+  fi
+
+  for file in "${FILES[@]}";
+  do
+    echo "checking file $file"
+    # first file that exists wins
+    if [[ -e "$file" ]]; then
+            BIN_PATH="$file"
+            break
+    fi
+  done
+
+  if [[ -z "$BIN_PATH" ]]; then
+    echo_error "ERROR: failed to find file for binary target: $BIN_NAME" >&2
+    exit 1
+  fi
+}
+
+# Extracts cargo_toml_path of the matching crate.
+matching_cargo_toml_path() {
+  local manifest_path="$1"
+  local expected_crate_name="$2"
+
+  # If the Cargo.toml is not a workspace root,
+  # it will only contain one package in ".packages"
+  # because "--no-deps" suppressed dependency resolution.
+  #
+  # But to make it more general, we search for a matching
+  # crate in all packages and use the manifest path that
+  # is referenced there.
+  cargo metadata --no-deps --format-version 1 \
+    --manifest-path "$manifest_path" \
+    | jq -r '.packages[]
+            | select( .name == "'$expected_crate_name'")
+            | .manifest_path'
+}
+
+# Find a Cargo.toml in the current or any sub directory
+# with a matching crate name.
+matching_cargo_toml_dir() {
+  local expected_crate_name="$1"
+
+  find -L -name Cargo.toml | sort | while read manifest_path; do
+    echo "...checking manifest_path $manifest_path" >&2
+    local matching_path="$(matching_cargo_toml_path "$manifest_path" "$expected_crate_name")"
+    if [ -n "${matching_path}" ]; then
+      echo "$(dirname $matching_path)"
+      break
+    fi
+  done
+}
diff --git a/nixpkgs/pkgs/build-support/rust/build-rust-crate/log.nix b/nixpkgs/pkgs/build-support/rust/build-rust-crate/log.nix
new file mode 100644
index 000000000000..9054815f4a1b
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/build-rust-crate/log.nix
@@ -0,0 +1,59 @@
+{ lib }:
+
+let echo_colored_body = start_escape:
+      # Body of a function that behaves like "echo" but
+      # has the output colored by the given start_escape
+      # sequence. E.g.
+      #
+      # * echo_x "Building ..."
+      # * echo_x -n "Running "
+      #
+      # This is more complicated than apparent at first sight
+      # because:
+      #   * The color markers and the text must be print
+      #     in the same echo statement. Otherise, other
+      #     intermingled text from concurrent builds will
+      #     be colored as well.
+      #   * We need to preserve the trailing newline of the
+      #     echo if and only if it is present. Bash likes
+      #     to strip those if we capture the output of echo
+      #     in a variable.
+      #   * Leading "-" will be interpreted by test as an
+      #     option for itself. Therefore, we prefix it with
+      #     an x in `[[ "x$1" =~ ^x- ]]`.
+      ''
+      local echo_args="";
+      while [[ "x$1" =~ ^x- ]]; do
+        echo_args+=" $1"
+        shift
+      done
+
+      local start_escape="$(printf '${start_escape}')"
+      local reset="$(printf '\033[0m')"
+      echo $echo_args $start_escape"$@"$reset
+      '';
+  echo_conditional_colored_body = colors: start_escape:
+      if colors == "always"
+      then (echo_colored_body start_escape)
+      else ''echo "$@"'';
+in {
+  echo_colored = colors: ''
+    echo_colored() {
+      ${echo_conditional_colored_body colors ''\033[0;1;32m''}
+    }
+
+    echo_error() {
+      ${echo_conditional_colored_body colors ''\033[0;1;31m''}
+    }
+   '';
+
+  noisily = colors: verbose: ''
+    noisily() {
+  	  ${lib.optionalString verbose ''
+        echo_colored -n "Running "
+        echo $@
+  	  ''}
+  	  $@
+    }
+  '';
+}
diff --git a/nixpkgs/pkgs/build-support/rust/build-rust-crate/test/brotli-crates.nix b/nixpkgs/pkgs/build-support/rust/build-rust-crate/test/brotli-crates.nix
new file mode 100644
index 000000000000..ab9b0a13ae77
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/build-rust-crate/test/brotli-crates.nix
@@ -0,0 +1,95 @@
+{ lib, buildPlatform, buildRustCrate, fetchgit }:
+let kernel = buildPlatform.parsed.kernel.name;
+    abi = buildPlatform.parsed.abi.name;
+    include = includedFiles: src: builtins.filterSource (path: type:
+      lib.lists.any (f:
+        let p = toString (src + ("/" + f)); in
+        (path == p) || (type == "directory" && lib.strings.hasPrefix path p)
+      ) includedFiles
+    ) src;
+    updateFeatures = f: up: functions: builtins.deepSeq f (lib.lists.foldl' (features: fun: fun features) (lib.attrsets.recursiveUpdate f up) functions);
+    mapFeatures = features: map (fun: fun { features = features; });
+    mkFeatures = feat: lib.lists.foldl (features: featureName:
+      if feat.${featureName} or false then
+        [ featureName ] ++ features
+      else
+        features
+    ) [] (builtins.attrNames feat);
+in
+rec {
+    alloc_no_stdlib_1_3_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate {
+    crateName = "alloc-no-stdlib";
+    version = "1.3.0";
+    authors = [ "Daniel Reiter Horn <danielrh@dropbox.com>" ];
+    sha256 = "1jcp27pzmqdszgp80y484g4kwbjbg7x8a589drcwbxg0i8xwkir9";
+    crateBin = [ {  name = "example"; } ];
+    inherit dependencies buildDependencies features;
+  };
+  brotli_2_5_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate {
+    crateName = "brotli";
+    version = "2.5.0";
+    authors = [ "Daniel Reiter Horn <danielrh@dropbox.com>" "The Brotli Authors" ];
+    sha256 = "1ynw4hkdwnp0kj30p86ls44ahv4s99258s019bqrq4mya8hlsb5b";
+    crateBin = [ {  name = "brotli"; } ];
+    inherit dependencies buildDependencies features;
+  };
+  brotli_decompressor_1_3_1_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate {
+    crateName = "brotli-decompressor";
+    version = "1.3.1";
+    authors = [ "Daniel Reiter Horn <danielrh@dropbox.com>" "The Brotli Authors" ];
+    sha256 = "022g69q1xzwdj0130qm3fa4qwpn4q1jx3lc8yz0v0v201p7bm8fb";
+    crateBin = [ {  name = "brotli-decompressor"; } ];
+    inherit dependencies buildDependencies features;
+  };
+  alloc_no_stdlib_1_3_0 = { features?(alloc_no_stdlib_1_3_0_features {}) }: alloc_no_stdlib_1_3_0_ {
+    features = mkFeatures (features.alloc_no_stdlib_1_3_0 or {});
+  };
+  alloc_no_stdlib_1_3_0_features = f: updateFeatures f ({
+    alloc_no_stdlib_1_3_0.default = (f.alloc_no_stdlib_1_3_0.default or true);
+  }) [];
+  brotli_2_5_0 = { features?(brotli_2_5_0_features {}) }: brotli_2_5_0_ {
+    dependencies = mapFeatures features ([ alloc_no_stdlib_1_3_0 brotli_decompressor_1_3_1 ]);
+    features = mkFeatures (features.brotli_2_5_0 or {});
+  };
+  brotli_2_5_0_features = f: updateFeatures f (rec {
+    alloc_no_stdlib_1_3_0.no-stdlib =
+      (f.alloc_no_stdlib_1_3_0.no-stdlib or false) ||
+      (brotli_2_5_0.no-stdlib or false) ||
+      (f.brotli_2_5_0.no-stdlib or false);
+    alloc_no_stdlib_1_3_0.default = true;
+    brotli_2_5_0.default = (f.brotli_2_5_0.default or true);
+    brotli_decompressor_1_3_1.disable-timer =
+      (f.brotli_decompressor_1_3_1.disable-timer or false) ||
+      (brotli_2_5_0.disable-timer or false) ||
+      (f.brotli_2_5_0.disable-timer or false);
+    brotli_decompressor_1_3_1.no-stdlib =
+      (f.brotli_decompressor_1_3_1.no-stdlib or false) ||
+      (brotli_2_5_0.no-stdlib or false) ||
+      (f.brotli_2_5_0.no-stdlib or false);
+    brotli_decompressor_1_3_1.benchmark =
+      (f.brotli_decompressor_1_3_1.benchmark or false) ||
+      (brotli_2_5_0.benchmark or false) ||
+      (f.brotli_2_5_0.benchmark or false);
+    brotli_decompressor_1_3_1.default = true;
+    brotli_decompressor_1_3_1.seccomp =
+      (f.brotli_decompressor_1_3_1.seccomp or false) ||
+      (brotli_2_5_0.seccomp or false) ||
+      (f.brotli_2_5_0.seccomp or false);
+  }) [ alloc_no_stdlib_1_3_0_features brotli_decompressor_1_3_1_features ];
+  brotli_decompressor_1_3_1 = { features?(brotli_decompressor_1_3_1_features {}) }: brotli_decompressor_1_3_1_ {
+    dependencies = mapFeatures features ([ alloc_no_stdlib_1_3_0 ]);
+    features = mkFeatures (features.brotli_decompressor_1_3_1 or {});
+  };
+  brotli_decompressor_1_3_1_features = f: updateFeatures f (rec {
+    alloc_no_stdlib_1_3_0.no-stdlib =
+      (f.alloc_no_stdlib_1_3_0.no-stdlib or false) ||
+      (brotli_decompressor_1_3_1.no-stdlib or false) ||
+      (f.brotli_decompressor_1_3_1.no-stdlib or false);
+    alloc_no_stdlib_1_3_0.default = true;
+    alloc_no_stdlib_1_3_0.unsafe =
+      (f.alloc_no_stdlib_1_3_0.unsafe or false) ||
+      (brotli_decompressor_1_3_1.unsafe or false) ||
+      (f.brotli_decompressor_1_3_1.unsafe or false);
+    brotli_decompressor_1_3_1.default = (f.brotli_decompressor_1_3_1.default or true);
+  }) [ alloc_no_stdlib_1_3_0_features ];
+}
diff --git a/nixpkgs/pkgs/build-support/rust/build-rust-crate/test/default.nix b/nixpkgs/pkgs/build-support/rust/build-rust-crate/test/default.nix
new file mode 100644
index 000000000000..65c8880b134d
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/build-rust-crate/test/default.nix
@@ -0,0 +1,657 @@
+{ lib
+, buildPackages
+, buildRustCrate
+, callPackage
+, releaseTools
+, runCommand
+, runCommandCC
+, stdenv
+, symlinkJoin
+, writeTextFile
+}:
+
+let
+  mkCrate = buildRustCrate: args: let
+    p = {
+      crateName = "nixtestcrate";
+      version = "0.1.0";
+      authors = [ "Test <test@example.com>" ];
+    } // args;
+  in buildRustCrate p;
+  mkHostCrate = mkCrate buildRustCrate;
+
+  mkCargoToml =
+    { name, crateVersion ? "0.1.0", path ? "Cargo.toml" }:
+      mkFile path ''
+        [package]
+        name = ${builtins.toJSON name}
+        version = ${builtins.toJSON crateVersion}
+      '';
+
+  mkFile = destination: text: writeTextFile {
+    name = "src";
+    destination = "/${destination}";
+    inherit text;
+  };
+
+  mkBin = name: mkFile name ''
+    use std::env;
+    fn main() {
+      let name: String = env::args().nth(0).unwrap();
+      println!("executed {}", name);
+    }
+  '';
+
+  mkBinExtern = name: extern: mkFile name ''
+    extern crate ${extern};
+    fn main() {
+      assert_eq!(${extern}::test(), 23);
+    }
+  '';
+
+  mkTestFile = name: functionName: mkFile name ''
+    #[cfg(test)]
+    #[test]
+    fn ${functionName}() {
+      assert!(true);
+    }
+  '';
+  mkTestFileWithMain = name: functionName: mkFile name ''
+    #[cfg(test)]
+    #[test]
+    fn ${functionName}() {
+      assert!(true);
+    }
+
+    fn main() {}
+  '';
+
+
+  mkLib = name: mkFile name "pub fn test() -> i32 { return 23; }";
+
+  mkTest = crateArgs: let
+    crate = mkHostCrate (builtins.removeAttrs crateArgs ["expectedTestOutput"]);
+    hasTests = crateArgs.buildTests or false;
+    expectedTestOutputs = crateArgs.expectedTestOutputs or null;
+    binaries = map (v: lib.escapeShellArg v.name) (crateArgs.crateBin or []);
+    isLib = crateArgs ? libName || crateArgs ? libPath;
+    crateName = crateArgs.crateName or "nixtestcrate";
+    libName = crateArgs.libName or crateName;
+
+    libTestBinary = if !isLib then null else mkHostCrate {
+      crateName = "run-test-${crateName}";
+      dependencies = [ crate ];
+      src = mkBinExtern "src/main.rs" libName;
+    };
+
+    in
+      assert expectedTestOutputs != null -> hasTests;
+      assert hasTests -> expectedTestOutputs != null;
+
+      runCommand "run-buildRustCrate-${crateName}-test" {
+        nativeBuildInputs = [ crate ];
+      } (if !hasTests then ''
+          ${lib.concatMapStringsSep "\n" (binary:
+            # Can't actually run the binary when cross-compiling
+            (lib.optionalString (stdenv.hostPlatform != stdenv.buildPlatform) "type ") + binary
+          ) binaries}
+          ${lib.optionalString isLib ''
+              test -e ${crate}/lib/*.rlib || exit 1
+              ${lib.optionalString (stdenv.hostPlatform != stdenv.buildPlatform) "test -x "} \
+                ${libTestBinary}/bin/run-test-${crateName}
+          ''}
+          touch $out
+        '' else if stdenv.hostPlatform == stdenv.buildPlatform then ''
+          for file in ${crate}/tests/*; do
+            $file 2>&1 >> $out
+          done
+          set -e
+          ${lib.concatMapStringsSep "\n" (o: "grep '${o}' $out || {  echo 'output \"${o}\" not found in:'; cat $out; exit 23; }") expectedTestOutputs}
+        '' else ''
+          for file in ${crate}/tests/*; do
+            test -x "$file"
+          done
+          touch "$out"
+        ''
+      );
+
+    /* Returns a derivation that asserts that the crate specified by `crateArgs`
+       has the specified files as output.
+
+       `name` is used as part of the derivation name that performs the checking.
+
+       `crateArgs` is passed to `mkHostCrate` to build the crate with `buildRustCrate`.
+
+       `expectedFiles` contains a list of expected file paths in the output. E.g.
+       `[ "./bin/my_binary" ]`.
+
+       `output` specifies the name of the output to use. By default, the default
+       output is used but e.g. `output = "lib";` will cause the lib output
+       to be checked instead. You do not need to specify any directories.
+     */
+    assertOutputs = { name, crateArgs, expectedFiles, output? null }:
+      assert (builtins.isString name);
+      assert (builtins.isAttrs crateArgs);
+      assert (builtins.isList expectedFiles);
+
+      let
+        crate = mkHostCrate (builtins.removeAttrs crateArgs ["expectedTestOutput"]);
+        crateOutput = if output == null then crate else crate."${output}";
+        expectedFilesFile = writeTextFile {
+          name = "expected-files-${name}";
+          text =
+            let sorted = builtins.sort (a: b: a<b) expectedFiles;
+                concatenated = builtins.concatStringsSep "\n" sorted;
+            in "${concatenated}\n";
+        };
+      in
+      runCommand "assert-outputs-${name}" {
+      } (''
+      local actualFiles=$(mktemp)
+
+      cd "${crateOutput}"
+      find . -type f \
+        | sort \
+      ''
+      # sed out the hash because it differs per platform
+      + ''
+        | sed -E -e 's/-[0-9a-fA-F]{10}\.rlib/-HASH.rlib/g' \
+        > "$actualFiles"
+      diff -q ${expectedFilesFile} "$actualFiles" > /dev/null || {
+        echo -e "\033[0;1;31mERROR: Difference in expected output files in ${crateOutput} \033[0m" >&2
+        echo === Got:
+        sed -e 's/^/  /' $actualFiles
+        echo === Expected:
+        sed -e 's/^/  /' ${expectedFilesFile}
+        echo === Diff:
+        diff -u ${expectedFilesFile} $actualFiles |\
+          tail -n +3 |\
+          sed -e 's/^/  /'
+        exit 1
+      }
+      touch $out
+      '')
+      ;
+
+  in rec {
+
+  tests = let
+    cases = rec {
+      libPath =  { libPath = "src/my_lib.rs"; src = mkLib "src/my_lib.rs"; };
+      srcLib =  { src = mkLib "src/lib.rs"; };
+
+      # This used to be supported by cargo but as of 1.40.0 I can't make it work like that with just cargo anymore.
+      # This might be a regression or deprecated thing they finally removed…
+      # customLibName =  { libName = "test_lib"; src = mkLib "src/test_lib.rs"; };
+      # rustLibTestsCustomLibName = {
+      #   libName = "test_lib";
+      #   src = mkTestFile "src/test_lib.rs" "foo";
+      #   buildTests = true;
+      #   expectedTestOutputs = [ "test foo ... ok" ];
+      # };
+
+      customLibNameAndLibPath =  { libName = "test_lib"; libPath = "src/best-lib.rs"; src = mkLib "src/best-lib.rs"; };
+      crateBinWithPath =  { crateBin = [{ name = "test_binary1"; path = "src/foobar.rs"; }]; src = mkBin "src/foobar.rs"; };
+      crateBinNoPath1 =  { crateBin = [{ name = "my-binary2"; }]; src = mkBin "src/my_binary2.rs"; };
+      crateBinNoPath2 =  {
+        crateBin = [{ name = "my-binary3"; } { name = "my-binary4"; }];
+        src = symlinkJoin {
+          name = "buildRustCrateMultipleBinariesCase";
+          paths = [ (mkBin "src/bin/my_binary3.rs") (mkBin "src/bin/my_binary4.rs") ];
+        };
+      };
+      crateBinNoPath3 =  { crateBin = [{ name = "my-binary5"; }]; src = mkBin "src/bin/main.rs"; };
+      crateBinNoPath4 =  { crateBin = [{ name = "my-binary6"; }]; src = mkBin "src/main.rs";};
+      crateBinRename1 = {
+        crateBin = [{ name = "my-binary-rename1"; }];
+        src = mkBinExtern "src/main.rs" "foo_renamed";
+        dependencies = [ (mkHostCrate { crateName = "foo"; src = mkLib "src/lib.rs"; }) ];
+        crateRenames = { "foo" = "foo_renamed"; };
+      };
+      crateBinRename2 = {
+        crateBin = [{ name = "my-binary-rename2"; }];
+        src = mkBinExtern "src/main.rs" "foo_renamed";
+        dependencies = [ (mkHostCrate { crateName = "foo"; libName = "foolib"; src = mkLib "src/lib.rs"; }) ];
+        crateRenames = { "foo" = "foo_renamed"; };
+      };
+      crateBinRenameMultiVersion = let
+        crateWithVersion = version: mkHostCrate {
+          crateName = "my_lib";
+          inherit version;
+          src = mkFile "src/lib.rs" ''
+            pub const version: &str = "${version}";
+          '';
+        };
+        depCrate01 = crateWithVersion "0.1.2";
+        depCrate02 = crateWithVersion "0.2.1";
+      in {
+        crateName = "my_bin";
+        src = symlinkJoin {
+          name = "my_bin_src";
+          paths = [
+            (mkFile  "src/main.rs" ''
+              #[test]
+              fn my_lib_01() { assert_eq!(lib01::version, "0.1.2"); }
+
+              #[test]
+              fn my_lib_02() { assert_eq!(lib02::version, "0.2.1"); }
+
+              fn main() { }
+            '')
+          ];
+        };
+        dependencies = [ depCrate01 depCrate02 ];
+        crateRenames = {
+          "my_lib" = [
+            {
+              version = "0.1.2";
+              rename = "lib01";
+            }
+            {
+              version = "0.2.1";
+              rename = "lib02";
+            }
+          ];
+        };
+        buildTests = true;
+        expectedTestOutputs = [
+          "test my_lib_01 ... ok"
+          "test my_lib_02 ... ok"
+        ];
+      };
+      rustLibTestsDefault = {
+        src = mkTestFile "src/lib.rs" "baz";
+        buildTests = true;
+        expectedTestOutputs = [ "test baz ... ok" ];
+      };
+      rustLibTestsCustomLibPath = {
+        libPath = "src/test_path.rs";
+        src = mkTestFile "src/test_path.rs" "bar";
+        buildTests = true;
+        expectedTestOutputs = [ "test bar ... ok" ];
+      };
+      rustLibTestsCustomLibPathWithTests = {
+        libPath = "src/test_path.rs";
+        src = symlinkJoin {
+          name = "rust-lib-tests-custom-lib-path-with-tests-dir";
+          paths = [
+            (mkTestFile "src/test_path.rs" "bar")
+            (mkTestFile "tests/something.rs" "something")
+          ];
+        };
+        buildTests = true;
+        expectedTestOutputs = [
+          "test bar ... ok"
+          "test something ... ok"
+        ];
+      };
+      rustBinTestsCombined = {
+        src = symlinkJoin {
+          name = "rust-bin-tests-combined";
+          paths = [
+            (mkTestFileWithMain "src/main.rs" "src_main")
+            (mkTestFile "tests/foo.rs" "tests_foo")
+            (mkTestFile "tests/bar.rs" "tests_bar")
+          ];
+        };
+        buildTests = true;
+        expectedTestOutputs = [
+          "test src_main ... ok"
+          "test tests_foo ... ok"
+          "test tests_bar ... ok"
+        ];
+      };
+      rustBinTestsSubdirCombined = {
+        src = symlinkJoin {
+          name = "rust-bin-tests-subdir-combined";
+          paths = [
+            (mkTestFileWithMain "src/main.rs" "src_main")
+            (mkTestFile "tests/foo/main.rs" "tests_foo")
+            (mkTestFile "tests/bar/main.rs" "tests_bar")
+          ];
+        };
+        buildTests = true;
+        expectedTestOutputs = [
+          "test src_main ... ok"
+          "test tests_foo ... ok"
+          "test tests_bar ... ok"
+        ];
+      };
+      linkAgainstRlibCrate = {
+        crateName = "foo";
+        src = mkFile  "src/main.rs" ''
+          extern crate somerlib;
+          fn main() {}
+        '';
+        dependencies = [
+          (mkHostCrate {
+            crateName = "somerlib";
+            type = [ "rlib" ];
+            src = mkLib "src/lib.rs";
+          })
+        ];
+      };
+      buildScriptDeps = let
+        depCrate = buildRustCrate: boolVal: mkCrate buildRustCrate {
+          crateName = "bar";
+          src = mkFile "src/lib.rs" ''
+            pub const baz: bool = ${boolVal};
+          '';
+        };
+      in {
+        crateName = "foo";
+        src = symlinkJoin {
+          name = "build-script-and-main";
+          paths = [
+            (mkFile  "src/main.rs" ''
+              extern crate bar;
+              #[cfg(test)]
+              #[test]
+              fn baz_false() { assert!(!bar::baz); }
+              fn main() { }
+            '')
+            (mkFile  "build.rs" ''
+              extern crate bar;
+              fn main() { assert!(bar::baz); }
+            '')
+          ];
+        };
+        buildDependencies = [ (depCrate buildPackages.buildRustCrate "true") ];
+        dependencies = [ (depCrate buildRustCrate "false") ];
+        buildTests = true;
+        expectedTestOutputs = [ "test baz_false ... ok" ];
+      };
+      buildScriptFeatureEnv = {
+        crateName = "build-script-feature-env";
+        features = [ "some-feature" "crate/another_feature" ];
+        src = symlinkJoin {
+          name = "build-script-feature-env";
+          paths = [
+            (mkFile  "src/main.rs" ''
+              #[cfg(test)]
+              #[test]
+              fn feature_not_visible() {
+                assert!(std::env::var("CARGO_FEATURE_SOME_FEATURE").is_err());
+                assert!(option_env!("CARGO_FEATURE_SOME_FEATURE").is_none());
+              }
+              fn main() {}
+            '')
+            (mkFile  "build.rs" ''
+              fn main() {
+                assert!(std::env::var("CARGO_FEATURE_SOME_FEATURE").is_ok());
+                assert!(option_env!("CARGO_FEATURE_SOME_FEATURE").is_none());
+              }
+            '')
+          ];
+        };
+        buildTests = true;
+        expectedTestOutputs = [ "test feature_not_visible ... ok" ];
+      };
+      # Regression test for https://github.com/NixOS/nixpkgs/pull/88054
+      # Build script output should be rewritten as valid env vars.
+      buildScriptIncludeDirDeps = let
+        depCrate = mkHostCrate {
+          crateName = "bar";
+          src = symlinkJoin {
+            name = "build-script-and-include-dir-bar";
+            paths = [
+              (mkFile  "src/lib.rs" ''
+                fn main() { }
+              '')
+              (mkFile  "build.rs" ''
+                use std::path::PathBuf;
+                fn main() { println!("cargo:include-dir={}/src", std::env::current_dir().unwrap_or(PathBuf::from(".")).to_str().unwrap()); }
+              '')
+            ];
+          };
+        };
+      in {
+        crateName = "foo";
+        src = symlinkJoin {
+          name = "build-script-and-include-dir-foo";
+          paths = [
+            (mkFile  "src/main.rs" ''
+              fn main() { }
+            '')
+            (mkFile  "build.rs" ''
+              fn main() { assert!(std::env::var_os("DEP_BAR_INCLUDE_DIR").is_some()); }
+            '')
+          ];
+        };
+        buildDependencies = [ depCrate ];
+        dependencies = [ depCrate ];
+      };
+      # Regression test for https://github.com/NixOS/nixpkgs/issues/74071
+      # Whenevever a build.rs file is generating files those should not be overlayed onto the actual source dir
+      buildRsOutDirOverlay = {
+        src = symlinkJoin {
+          name = "buildrs-out-dir-overlay";
+          paths = [
+            (mkLib "src/lib.rs")
+            (mkFile "build.rs" ''
+              use std::env;
+              use std::ffi::OsString;
+              use std::fs;
+              use std::path::Path;
+              fn main() {
+                let out_dir = env::var_os("OUT_DIR").expect("OUT_DIR not set");
+                let out_file = Path::new(&out_dir).join("lib.rs");
+                fs::write(out_file, "invalid rust code!").expect("failed to write lib.rs");
+              }
+            '')
+          ];
+        };
+      };
+      # Regression test for https://github.com/NixOS/nixpkgs/pull/83379
+      # link flag order should be preserved
+      linkOrder = {
+        src = symlinkJoin {
+          name = "buildrs-out-dir-overlay";
+          paths = [
+            (mkFile "build.rs" ''
+              fn main() {
+                // in the other order, linkage will fail
+                println!("cargo:rustc-link-lib=b");
+                println!("cargo:rustc-link-lib=a");
+              }
+            '')
+            (mkFile "src/main.rs" ''
+              extern "C" {
+                fn hello_world();
+              }
+              fn main() {
+                unsafe {
+                  hello_world();
+                }
+              }
+            '')
+          ];
+        };
+        buildInputs = let
+          compile = name: text: let
+            src = writeTextFile {
+              name = "${name}-src.c";
+              inherit text;
+            };
+          in runCommandCC name {} ''
+            mkdir -p $out/lib
+            # Note: On darwin (which defaults to clang) we have to add
+            # `-undefined dynamic_lookup` as otherwise the compilation fails.
+            $CC -shared \
+              ${lib.optionalString stdenv.isDarwin "-undefined dynamic_lookup"} \
+              -o $out/lib/${name}${stdenv.hostPlatform.extensions.sharedLibrary} ${src}
+          '';
+          b = compile "libb" ''
+            #include <stdio.h>
+
+            void hello();
+
+            void hello_world() {
+              hello();
+              printf(" world!\n");
+            }
+          '';
+          a = compile "liba" ''
+            #include <stdio.h>
+
+            void hello() {
+              printf("hello");
+            }
+          '';
+        in [ a b ];
+      };
+      rustCargoTomlInSubDir = {
+        # The "workspace_member" can be set to the sub directory with the crate to build.
+        # By default ".", meaning the top level directory is assumed.
+        # Using null will trigger a search.
+        workspace_member = null;
+        src = symlinkJoin rec {
+          name = "find-cargo-toml";
+          paths = [
+            (mkCargoToml { name = "ignoreMe"; })
+            (mkTestFileWithMain "src/main.rs" "ignore_main")
+
+            (mkCargoToml { name = "rustCargoTomlInSubDir"; path = "subdir/Cargo.toml"; })
+            (mkTestFileWithMain "subdir/src/main.rs" "src_main")
+            (mkTestFile "subdir/tests/foo/main.rs" "tests_foo")
+            (mkTestFile "subdir/tests/bar/main.rs" "tests_bar")
+          ];
+        };
+        buildTests = true;
+        expectedTestOutputs = [
+          "test src_main ... ok"
+          "test tests_foo ... ok"
+          "test tests_bar ... ok"
+        ];
+      };
+
+      rustCargoTomlInTopDir =
+        let
+          withoutCargoTomlSearch = builtins.removeAttrs rustCargoTomlInSubDir [ "workspace_member" ];
+        in
+          withoutCargoTomlSearch // {
+            expectedTestOutputs = [
+              "test ignore_main ... ok"
+            ];
+          };
+      procMacroInPrelude = {
+        procMacro = true;
+        edition = "2018";
+        src = symlinkJoin {
+          name = "proc-macro-in-prelude";
+          paths = [
+            (mkFile "src/lib.rs" ''
+              use proc_macro::TokenTree;
+            '')
+          ];
+        };
+      };
+    };
+    brotliCrates = (callPackage ./brotli-crates.nix {});
+    tests = lib.mapAttrs (key: value: mkTest (value // lib.optionalAttrs (!value?crateName) { crateName = key; })) cases;
+  in tests // rec {
+
+    crateBinWithPathOutputs = assertOutputs {
+      name="crateBinWithPath";
+      crateArgs = {
+        crateBin = [{ name = "test_binary1"; path = "src/foobar.rs"; }];
+        src = mkBin "src/foobar.rs";
+      };
+      expectedFiles = [
+        "./bin/test_binary1"
+      ];
+    };
+
+    crateBinWithPathOutputsDebug = assertOutputs {
+      name="crateBinWithPath";
+      crateArgs = {
+        release = false;
+        crateBin = [{ name = "test_binary1"; path = "src/foobar.rs"; }];
+        src = mkBin "src/foobar.rs";
+      };
+      expectedFiles = [
+        "./bin/test_binary1"
+      ] ++ lib.optionals stdenv.isDarwin [
+        # On Darwin, the debug symbols are in a seperate directory.
+        "./bin/test_binary1.dSYM/Contents/Info.plist"
+        "./bin/test_binary1.dSYM/Contents/Resources/DWARF/test_binary1"
+      ];
+    };
+
+    crateBinNoPath1Outputs = assertOutputs {
+      name="crateBinNoPath1";
+      crateArgs = {
+        crateBin = [{ name = "my-binary2"; }];
+        src = mkBin "src/my_binary2.rs";
+      };
+      expectedFiles = [
+        "./bin/my-binary2"
+      ];
+    };
+
+    crateLibOutputs = assertOutputs {
+      name="crateLib";
+      output="lib";
+      crateArgs = {
+        libName = "test_lib";
+        type = [ "rlib" ];
+        libPath = "src/lib.rs";
+        src = mkLib "src/lib.rs";
+      };
+      expectedFiles = [
+        "./nix-support/propagated-build-inputs"
+        "./lib/libtest_lib-HASH.rlib"
+        "./lib/link"
+      ];
+    };
+
+    crateLibOutputsDebug = assertOutputs {
+      name="crateLib";
+      output="lib";
+      crateArgs = {
+        release = false;
+        libName = "test_lib";
+        type = [ "rlib" ];
+        libPath = "src/lib.rs";
+        src = mkLib "src/lib.rs";
+      };
+      expectedFiles = [
+        "./nix-support/propagated-build-inputs"
+        "./lib/libtest_lib-HASH.rlib"
+        "./lib/link"
+      ];
+    };
+
+    brotliTest = let
+      pkg = brotliCrates.brotli_2_5_0 {};
+    in runCommand "run-brotli-test-cmd" {
+      nativeBuildInputs = [ pkg ];
+    } (if stdenv.hostPlatform == stdenv.buildPlatform then ''
+      ${pkg}/bin/brotli -c ${pkg}/bin/brotli > /dev/null && touch $out
+    '' else ''
+      test -x '${pkg}/bin/brotli' && touch $out
+    '');
+    allocNoStdLibTest = let
+      pkg = brotliCrates.alloc_no_stdlib_1_3_0 {};
+    in runCommand "run-alloc-no-stdlib-test-cmd" {
+      nativeBuildInputs = [ pkg ];
+    } ''
+      test -e ${pkg}/bin/example && touch $out
+    '';
+    brotliDecompressorTest = let
+      pkg = brotliCrates.brotli_decompressor_1_3_1 {};
+    in runCommand "run-brotli-decompressor-test-cmd" {
+      nativeBuildInputs = [ pkg ];
+    } ''
+      test -e ${pkg}/bin/brotli-decompressor && touch $out
+    '';
+  };
+  test = releaseTools.aggregate {
+    name = "buildRustCrate-tests";
+    meta = {
+      description = "Test cases for buildRustCrate";
+      maintainers = [ lib.maintainers.andir ];
+    };
+    constituents = builtins.attrValues tests;
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/rust/cargo-vendor-normalise.py b/nixpkgs/pkgs/build-support/rust/cargo-vendor-normalise.py
new file mode 100755
index 000000000000..2d7a18957184
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/cargo-vendor-normalise.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+
+import sys
+
+import toml
+
+
+def quote(s: str) -> str:
+    escaped = s.replace('"', r"\"").replace("\n", r"\n").replace("\\", "\\\\")
+    return '"{}"'.format(escaped)
+
+
+def main() -> None:
+    data = toml.load(sys.stdin)
+
+    assert list(data.keys()) == ["source"]
+
+    # this value is non deterministic
+    data["source"]["vendored-sources"]["directory"] = "@vendor@"
+
+    lines = []
+    inner = data["source"]
+    for source, attrs in sorted(inner.items()):
+        lines.append("[source.{}]".format(quote(source)))
+        if source == "vendored-sources":
+            lines.append('"directory" = "@vendor@"\n')
+        else:
+            for key, value in sorted(attrs.items()):
+                attr = "{} = {}".format(quote(key), quote(value))
+                lines.append(attr)
+        lines.append("")
+
+    result = "\n".join(lines)
+    real = toml.loads(result)
+    assert real == data, "output = {} while input = {}".format(real, data)
+
+    print(result)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/nixpkgs/pkgs/build-support/rust/carnix.nix b/nixpkgs/pkgs/build-support/rust/carnix.nix
new file mode 100644
index 000000000000..46bbff92a9cc
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/carnix.nix
@@ -0,0 +1,259 @@
+# Generated by carnix 0.9.10: carnix generate-nix
+{ lib, buildPlatform, buildRustCrate, buildRustCrateHelpers, cratesIO, fetchgit }:
+with buildRustCrateHelpers;
+let inherit (lib.lists) fold;
+    inherit (lib.attrsets) recursiveUpdate;
+in
+rec {
+  crates = cratesIO;
+  carnix = crates.crates.carnix."0.10.0" deps;
+  __all = [ (carnix {}) ];
+  deps.aho_corasick."0.6.10" = {
+    memchr = "2.2.0";
+  };
+  deps.ansi_term."0.11.0" = {
+    winapi = "0.3.6";
+  };
+  deps.argon2rs."0.2.5" = {
+    blake2_rfc = "0.2.18";
+    scoped_threadpool = "0.1.9";
+  };
+  deps.arrayvec."0.4.10" = {
+    nodrop = "0.1.13";
+  };
+  deps.atty."0.2.11" = {
+    termion = "1.5.1";
+    libc = "0.2.50";
+    winapi = "0.3.6";
+  };
+  deps.autocfg."0.1.2" = {};
+  deps.backtrace."0.3.14" = {
+    cfg_if = "0.1.7";
+    rustc_demangle = "0.1.13";
+    autocfg = "0.1.2";
+    backtrace_sys = "0.1.28";
+    libc = "0.2.50";
+    winapi = "0.3.6";
+  };
+  deps.backtrace_sys."0.1.28" = {
+    libc = "0.2.50";
+    cc = "1.0.32";
+  };
+  deps.bitflags."1.0.4" = {};
+  deps.blake2_rfc."0.2.18" = {
+    arrayvec = "0.4.10";
+    constant_time_eq = "0.1.3";
+  };
+  deps.carnix."0.10.0" = {
+    clap = "2.32.0";
+    dirs = "1.0.5";
+    env_logger = "0.6.1";
+    failure = "0.1.5";
+    failure_derive = "0.1.5";
+    itertools = "0.8.0";
+    log = "0.4.6";
+    nom = "3.2.1";
+    regex = "1.1.2";
+    serde = "1.0.89";
+    serde_derive = "1.0.89";
+    serde_json = "1.0.39";
+    tempdir = "0.3.7";
+    toml = "0.5.0";
+    url = "1.7.2";
+  };
+  deps.cc."1.0.32" = {};
+  deps.cfg_if."0.1.7" = {};
+  deps.clap."2.32.0" = {
+    atty = "0.2.11";
+    bitflags = "1.0.4";
+    strsim = "0.7.0";
+    textwrap = "0.10.0";
+    unicode_width = "0.1.5";
+    vec_map = "0.8.1";
+    ansi_term = "0.11.0";
+  };
+  deps.cloudabi."0.0.3" = {
+    bitflags = "1.0.4";
+  };
+  deps.constant_time_eq."0.1.3" = {};
+  deps.dirs."1.0.5" = {
+    redox_users = "0.3.0";
+    libc = "0.2.50";
+    winapi = "0.3.6";
+  };
+  deps.either."1.5.1" = {};
+  deps.env_logger."0.6.1" = {
+    atty = "0.2.11";
+    humantime = "1.2.0";
+    log = "0.4.6";
+    regex = "1.1.2";
+    termcolor = "1.0.4";
+  };
+  deps.failure."0.1.5" = {
+    backtrace = "0.3.14";
+    failure_derive = "0.1.5";
+  };
+  deps.failure_derive."0.1.5" = {
+    proc_macro2 = "0.4.27";
+    quote = "0.6.11";
+    syn = "0.15.29";
+    synstructure = "0.10.1";
+  };
+  deps.fuchsia_cprng."0.1.1" = {};
+  deps.humantime."1.2.0" = {
+    quick_error = "1.2.2";
+  };
+  deps.idna."0.1.5" = {
+    matches = "0.1.8";
+    unicode_bidi = "0.3.4";
+    unicode_normalization = "0.1.8";
+  };
+  deps.itertools."0.8.0" = {
+    either = "1.5.1";
+  };
+  deps.itoa."0.4.3" = {};
+  deps.lazy_static."1.3.0" = {};
+  deps.libc."0.2.50" = {};
+  deps.log."0.4.6" = {
+    cfg_if = "0.1.7";
+  };
+  deps.matches."0.1.8" = {};
+  deps.memchr."1.0.2" = {
+    libc = "0.2.50";
+  };
+  deps.memchr."2.2.0" = {};
+  deps.nodrop."0.1.13" = {};
+  deps.nom."3.2.1" = {
+    memchr = "1.0.2";
+  };
+  deps.percent_encoding."1.0.1" = {};
+  deps.proc_macro2."0.4.27" = {
+    unicode_xid = "0.1.0";
+  };
+  deps.quick_error."1.2.2" = {};
+  deps.quote."0.6.11" = {
+    proc_macro2 = "0.4.27";
+  };
+  deps.rand."0.4.6" = {
+    rand_core = "0.3.1";
+    rdrand = "0.4.0";
+    fuchsia_cprng = "0.1.1";
+    libc = "0.2.50";
+    winapi = "0.3.6";
+  };
+  deps.rand_core."0.3.1" = {
+    rand_core = "0.4.0";
+  };
+  deps.rand_core."0.4.0" = {};
+  deps.rand_os."0.1.3" = {
+    rand_core = "0.4.0";
+    rdrand = "0.4.0";
+    cloudabi = "0.0.3";
+    fuchsia_cprng = "0.1.1";
+    libc = "0.2.50";
+    winapi = "0.3.6";
+  };
+  deps.rdrand."0.4.0" = {
+    rand_core = "0.3.1";
+  };
+  deps.redox_syscall."0.1.51" = {};
+  deps.redox_termios."0.1.1" = {
+    redox_syscall = "0.1.51";
+  };
+  deps.redox_users."0.3.0" = {
+    argon2rs = "0.2.5";
+    failure = "0.1.5";
+    rand_os = "0.1.3";
+    redox_syscall = "0.1.51";
+  };
+  deps.regex."1.1.2" = {
+    aho_corasick = "0.6.10";
+    memchr = "2.2.0";
+    regex_syntax = "0.6.5";
+    thread_local = "0.3.6";
+    utf8_ranges = "1.0.2";
+  };
+  deps.regex_syntax."0.6.5" = {
+    ucd_util = "0.1.3";
+  };
+  deps.remove_dir_all."0.5.1" = {
+    winapi = "0.3.6";
+  };
+  deps.rustc_demangle."0.1.13" = {};
+  deps.ryu."0.2.7" = {};
+  deps.scoped_threadpool."0.1.9" = {};
+  deps.serde."1.0.89" = {};
+  deps.serde_derive."1.0.89" = {
+    proc_macro2 = "0.4.27";
+    quote = "0.6.11";
+    syn = "0.15.29";
+  };
+  deps.serde_json."1.0.39" = {
+    itoa = "0.4.3";
+    ryu = "0.2.7";
+    serde = "1.0.89";
+  };
+  deps.smallvec."0.6.9" = {};
+  deps.strsim."0.7.0" = {};
+  deps.syn."0.15.29" = {
+    proc_macro2 = "0.4.27";
+    quote = "0.6.11";
+    unicode_xid = "0.1.0";
+  };
+  deps.synstructure."0.10.1" = {
+    proc_macro2 = "0.4.27";
+    quote = "0.6.11";
+    syn = "0.15.29";
+    unicode_xid = "0.1.0";
+  };
+  deps.tempdir."0.3.7" = {
+    rand = "0.4.6";
+    remove_dir_all = "0.5.1";
+  };
+  deps.termcolor."1.0.4" = {
+    wincolor = "1.0.1";
+  };
+  deps.termion."1.5.1" = {
+    libc = "0.2.50";
+    redox_syscall = "0.1.51";
+    redox_termios = "0.1.1";
+  };
+  deps.textwrap."0.10.0" = {
+    unicode_width = "0.1.5";
+  };
+  deps.thread_local."0.3.6" = {
+    lazy_static = "1.3.0";
+  };
+  deps.toml."0.5.0" = {
+    serde = "1.0.89";
+  };
+  deps.ucd_util."0.1.3" = {};
+  deps.unicode_bidi."0.3.4" = {
+    matches = "0.1.8";
+  };
+  deps.unicode_normalization."0.1.8" = {
+    smallvec = "0.6.9";
+  };
+  deps.unicode_width."0.1.5" = {};
+  deps.unicode_xid."0.1.0" = {};
+  deps.url."1.7.2" = {
+    idna = "0.1.5";
+    matches = "0.1.8";
+    percent_encoding = "1.0.1";
+  };
+  deps.utf8_ranges."1.0.2" = {};
+  deps.vec_map."0.8.1" = {};
+  deps.winapi."0.3.6" = {
+    winapi_i686_pc_windows_gnu = "0.4.0";
+    winapi_x86_64_pc_windows_gnu = "0.4.0";
+  };
+  deps.winapi_i686_pc_windows_gnu."0.4.0" = {};
+  deps.winapi_util."0.1.2" = {
+    winapi = "0.3.6";
+  };
+  deps.winapi_x86_64_pc_windows_gnu."0.4.0" = {};
+  deps.wincolor."1.0.1" = {
+    winapi = "0.3.6";
+    winapi_util = "0.1.2";
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/rust/crates-io.nix b/nixpkgs/pkgs/build-support/rust/crates-io.nix
new file mode 100644
index 000000000000..66f98cd99121
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/crates-io.nix
@@ -0,0 +1,7756 @@
+{ lib, buildRustCrate, buildRustCrateHelpers }:
+with buildRustCrateHelpers;
+let inherit (lib.lists) fold;
+    inherit (lib.attrsets) recursiveUpdate;
+in
+rec {
+# aho-corasick-0.6.10
+
+  crates.aho_corasick."0.6.10" = deps: { features?(features_.aho_corasick."0.6.10" deps {}) }: buildRustCrate {
+    crateName = "aho-corasick";
+    version = "0.6.10";
+    description = "Fast multiple substring searching with finite state machines.";
+    authors = [ "Andrew Gallant <jamslam@gmail.com>" ];
+    sha256 = "0bhasxfpmfmz1460chwsx59vdld05axvmk1nbp3sd48xav3d108p";
+    libName = "aho_corasick";
+    crateBin =
+      [{  name = "aho-corasick-dot";  path = "src/main.rs"; }];
+    dependencies = mapFeatures features ([
+      (crates."memchr"."${deps."aho_corasick"."0.6.10"."memchr"}" deps)
+    ]);
+  };
+  features_.aho_corasick."0.6.10" = deps: f: updateFeatures f ({
+    aho_corasick."0.6.10".default = (f.aho_corasick."0.6.10".default or true);
+    memchr."${deps.aho_corasick."0.6.10".memchr}".default = true;
+  }) [
+    (features_.memchr."${deps."aho_corasick"."0.6.10"."memchr"}" deps)
+  ];
+
+
+# end
+# aho-corasick-0.6.8
+
+  crates.aho_corasick."0.6.8" = deps: { features?(features_.aho_corasick."0.6.8" deps {}) }: buildRustCrate {
+    crateName = "aho-corasick";
+    version = "0.6.8";
+    authors = [ "Andrew Gallant <jamslam@gmail.com>" ];
+    sha256 = "04bz5m32ykyn946iwxgbrl8nwca7ssxsqma140hgmkchaay80nfr";
+    libName = "aho_corasick";
+    crateBin =
+      [{  name = "aho-corasick-dot";  path = "src/main.rs"; }];
+    dependencies = mapFeatures features ([
+      (crates."memchr"."${deps."aho_corasick"."0.6.8"."memchr"}" deps)
+    ]);
+  };
+  features_.aho_corasick."0.6.8" = deps: f: updateFeatures f ({
+    aho_corasick."0.6.8".default = (f.aho_corasick."0.6.8".default or true);
+    memchr."${deps.aho_corasick."0.6.8".memchr}".default = true;
+  }) [
+    (features_.memchr."${deps."aho_corasick"."0.6.8"."memchr"}" deps)
+  ];
+
+
+# end
+# ansi_term-0.11.0
+
+  crates.ansi_term."0.11.0" = deps: { features?(features_.ansi_term."0.11.0" deps {}) }: buildRustCrate {
+    crateName = "ansi_term";
+    version = "0.11.0";
+    authors = [ "ogham@bsago.me" "Ryan Scheel (Havvy) <ryan.havvy@gmail.com>" "Josh Triplett <josh@joshtriplett.org>" ];
+    sha256 = "08fk0p2xvkqpmz3zlrwnf6l8sj2vngw464rvzspzp31sbgxbwm4v";
+    dependencies = (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."ansi_term"."0.11.0"."winapi"}" deps)
+    ]) else []);
+  };
+  features_.ansi_term."0.11.0" = deps: f: updateFeatures f ({
+    ansi_term."0.11.0".default = (f.ansi_term."0.11.0".default or true);
+    winapi = fold recursiveUpdate {} [
+      { "${deps.ansi_term."0.11.0".winapi}"."consoleapi" = true; }
+      { "${deps.ansi_term."0.11.0".winapi}"."errhandlingapi" = true; }
+      { "${deps.ansi_term."0.11.0".winapi}"."processenv" = true; }
+      { "${deps.ansi_term."0.11.0".winapi}".default = true; }
+    ];
+  }) [
+    (features_.winapi."${deps."ansi_term"."0.11.0"."winapi"}" deps)
+  ];
+
+
+# end
+# argon2rs-0.2.5
+
+  crates.argon2rs."0.2.5" = deps: { features?(features_.argon2rs."0.2.5" deps {}) }: buildRustCrate {
+    crateName = "argon2rs";
+    version = "0.2.5";
+    authors = [ "bryant <bryant@defrag.in>" ];
+    sha256 = "1byl9b3wwyrarn8qack21v5fi2qsnn3y5clvikk2apskhmnih1rw";
+    dependencies = mapFeatures features ([
+      (crates."blake2_rfc"."${deps."argon2rs"."0.2.5"."blake2_rfc"}" deps)
+      (crates."scoped_threadpool"."${deps."argon2rs"."0.2.5"."scoped_threadpool"}" deps)
+    ]);
+    features = mkFeatures (features."argon2rs"."0.2.5" or {});
+  };
+  features_.argon2rs."0.2.5" = deps: f: updateFeatures f (rec {
+    argon2rs."0.2.5".default = (f.argon2rs."0.2.5".default or true);
+    blake2_rfc = fold recursiveUpdate {} [
+      { "${deps.argon2rs."0.2.5".blake2_rfc}"."simd_asm" =
+        (f.blake2_rfc."${deps.argon2rs."0.2.5".blake2_rfc}"."simd_asm" or false) ||
+        (argon2rs."0.2.5"."simd" or false) ||
+        (f."argon2rs"."0.2.5"."simd" or false); }
+      { "${deps.argon2rs."0.2.5".blake2_rfc}".default = true; }
+    ];
+    scoped_threadpool."${deps.argon2rs."0.2.5".scoped_threadpool}".default = true;
+  }) [
+    (features_.blake2_rfc."${deps."argon2rs"."0.2.5"."blake2_rfc"}" deps)
+    (features_.scoped_threadpool."${deps."argon2rs"."0.2.5"."scoped_threadpool"}" deps)
+  ];
+
+
+# end
+# arrayvec-0.4.10
+
+  crates.arrayvec."0.4.10" = deps: { features?(features_.arrayvec."0.4.10" deps {}) }: buildRustCrate {
+    crateName = "arrayvec";
+    version = "0.4.10";
+    description = "A vector with fixed capacity, backed by an array (it can be stored on the stack too). Implements fixed capacity ArrayVec and ArrayString.";
+    authors = [ "bluss" ];
+    sha256 = "0qbh825i59w5wfdysqdkiwbwkrsy7lgbd4pwbyb8pxx8wc36iny8";
+    dependencies = mapFeatures features ([
+      (crates."nodrop"."${deps."arrayvec"."0.4.10"."nodrop"}" deps)
+    ]);
+    features = mkFeatures (features."arrayvec"."0.4.10" or {});
+  };
+  features_.arrayvec."0.4.10" = deps: f: updateFeatures f (rec {
+    arrayvec = fold recursiveUpdate {} [
+      { "0.4.10"."serde" =
+        (f.arrayvec."0.4.10"."serde" or false) ||
+        (f.arrayvec."0.4.10".serde-1 or false) ||
+        (arrayvec."0.4.10"."serde-1" or false); }
+      { "0.4.10"."std" =
+        (f.arrayvec."0.4.10"."std" or false) ||
+        (f.arrayvec."0.4.10".default or false) ||
+        (arrayvec."0.4.10"."default" or false); }
+      { "0.4.10".default = (f.arrayvec."0.4.10".default or true); }
+    ];
+    nodrop."${deps.arrayvec."0.4.10".nodrop}".default = (f.nodrop."${deps.arrayvec."0.4.10".nodrop}".default or false);
+  }) [
+    (features_.nodrop."${deps."arrayvec"."0.4.10"."nodrop"}" deps)
+  ];
+
+
+# end
+# arrayvec-0.4.7
+
+  crates.arrayvec."0.4.7" = deps: { features?(features_.arrayvec."0.4.7" deps {}) }: buildRustCrate {
+    crateName = "arrayvec";
+    version = "0.4.7";
+    authors = [ "bluss" ];
+    sha256 = "0fzgv7z1x1qnyd7j32vdcadk4k9wfx897y06mr3bw1yi52iqf4z4";
+    dependencies = mapFeatures features ([
+      (crates."nodrop"."${deps."arrayvec"."0.4.7"."nodrop"}" deps)
+    ]);
+    features = mkFeatures (features."arrayvec"."0.4.7" or {});
+  };
+  features_.arrayvec."0.4.7" = deps: f: updateFeatures f (rec {
+    arrayvec = fold recursiveUpdate {} [
+      { "0.4.7".default = (f.arrayvec."0.4.7".default or true); }
+      { "0.4.7".serde =
+        (f.arrayvec."0.4.7".serde or false) ||
+        (f.arrayvec."0.4.7".serde-1 or false) ||
+        (arrayvec."0.4.7"."serde-1" or false); }
+      { "0.4.7".std =
+        (f.arrayvec."0.4.7".std or false) ||
+        (f.arrayvec."0.4.7".default or false) ||
+        (arrayvec."0.4.7"."default" or false); }
+    ];
+    nodrop."${deps.arrayvec."0.4.7".nodrop}".default = (f.nodrop."${deps.arrayvec."0.4.7".nodrop}".default or false);
+  }) [
+    (features_.nodrop."${deps."arrayvec"."0.4.7"."nodrop"}" deps)
+  ];
+
+
+# end
+# atty-0.2.11
+
+  crates.atty."0.2.11" = deps: { features?(features_.atty."0.2.11" deps {}) }: buildRustCrate {
+    crateName = "atty";
+    version = "0.2.11";
+    authors = [ "softprops <d.tangren@gmail.com>" ];
+    sha256 = "0by1bj2km9jxi4i4g76zzi76fc2rcm9934jpnyrqd95zw344pb20";
+    dependencies = (if kernel == "redox" then mapFeatures features ([
+      (crates."termion"."${deps."atty"."0.2.11"."termion"}" deps)
+    ]) else [])
+      ++ (if (kernel == "linux" || kernel == "darwin") then mapFeatures features ([
+      (crates."libc"."${deps."atty"."0.2.11"."libc"}" deps)
+    ]) else [])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."atty"."0.2.11"."winapi"}" deps)
+    ]) else []);
+  };
+  features_.atty."0.2.11" = deps: f: updateFeatures f ({
+    atty."0.2.11".default = (f.atty."0.2.11".default or true);
+    libc."${deps.atty."0.2.11".libc}".default = (f.libc."${deps.atty."0.2.11".libc}".default or false);
+    termion."${deps.atty."0.2.11".termion}".default = true;
+    winapi = fold recursiveUpdate {} [
+      { "${deps.atty."0.2.11".winapi}"."consoleapi" = true; }
+      { "${deps.atty."0.2.11".winapi}"."minwinbase" = true; }
+      { "${deps.atty."0.2.11".winapi}"."minwindef" = true; }
+      { "${deps.atty."0.2.11".winapi}"."processenv" = true; }
+      { "${deps.atty."0.2.11".winapi}"."winbase" = true; }
+      { "${deps.atty."0.2.11".winapi}".default = true; }
+    ];
+  }) [
+    (features_.termion."${deps."atty"."0.2.11"."termion"}" deps)
+    (features_.libc."${deps."atty"."0.2.11"."libc"}" deps)
+    (features_.winapi."${deps."atty"."0.2.11"."winapi"}" deps)
+  ];
+
+
+# end
+# autocfg-0.1.2
+
+  crates.autocfg."0.1.2" = deps: { features?(features_.autocfg."0.1.2" deps {}) }: buildRustCrate {
+    crateName = "autocfg";
+    version = "0.1.2";
+    description = "Automatic cfg for Rust compiler features";
+    authors = [ "Josh Stone <cuviper@gmail.com>" ];
+    sha256 = "0dv81dwnp1al3j4ffz007yrjv4w1c7hw09gnf0xs3icxiw6qqfs3";
+  };
+  features_.autocfg."0.1.2" = deps: f: updateFeatures f ({
+    autocfg."0.1.2".default = (f.autocfg."0.1.2".default or true);
+  }) [];
+
+
+# end
+# backtrace-0.3.14
+
+  crates.backtrace."0.3.14" = deps: { features?(features_.backtrace."0.3.14" deps {}) }: buildRustCrate {
+    crateName = "backtrace";
+    version = "0.3.14";
+    description = "A library to acquire a stack trace (backtrace) at runtime in a Rust program.\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" "The Rust Project Developers" ];
+    sha256 = "0sp0ib8r5w9sv1g2nkm9yclp16j46yjglw0yhkmh0snf355633mz";
+    dependencies = mapFeatures features ([
+      (crates."cfg_if"."${deps."backtrace"."0.3.14"."cfg_if"}" deps)
+      (crates."rustc_demangle"."${deps."backtrace"."0.3.14"."rustc_demangle"}" deps)
+    ])
+      ++ (if (kernel == "linux" || kernel == "darwin") && !(kernel == "fuchsia") && !(kernel == "emscripten") && !(kernel == "darwin") && !(kernel == "ios") then mapFeatures features ([
+    ]
+      ++ (if features.backtrace."0.3.14".backtrace-sys or false then [ (crates.backtrace_sys."${deps."backtrace"."0.3.14".backtrace_sys}" deps) ] else [])) else [])
+      ++ (if (kernel == "linux" || kernel == "darwin") || abi == "sgx" then mapFeatures features ([
+      (crates."libc"."${deps."backtrace"."0.3.14"."libc"}" deps)
+    ]) else [])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."backtrace"."0.3.14"."winapi"}" deps)
+    ]) else []);
+
+    buildDependencies = mapFeatures features ([
+      (crates."autocfg"."${deps."backtrace"."0.3.14"."autocfg"}" deps)
+    ]);
+    features = mkFeatures (features."backtrace"."0.3.14" or {});
+  };
+  features_.backtrace."0.3.14" = deps: f: updateFeatures f (rec {
+    autocfg."${deps.backtrace."0.3.14".autocfg}".default = true;
+    backtrace = fold recursiveUpdate {} [
+      { "0.3.14"."addr2line" =
+        (f.backtrace."0.3.14"."addr2line" or false) ||
+        (f.backtrace."0.3.14".gimli-symbolize or false) ||
+        (backtrace."0.3.14"."gimli-symbolize" or false); }
+      { "0.3.14"."backtrace-sys" =
+        (f.backtrace."0.3.14"."backtrace-sys" or false) ||
+        (f.backtrace."0.3.14".libbacktrace or false) ||
+        (backtrace."0.3.14"."libbacktrace" or false); }
+      { "0.3.14"."coresymbolication" =
+        (f.backtrace."0.3.14"."coresymbolication" or false) ||
+        (f.backtrace."0.3.14".default or false) ||
+        (backtrace."0.3.14"."default" or false); }
+      { "0.3.14"."dbghelp" =
+        (f.backtrace."0.3.14"."dbghelp" or false) ||
+        (f.backtrace."0.3.14".default or false) ||
+        (backtrace."0.3.14"."default" or false); }
+      { "0.3.14"."dladdr" =
+        (f.backtrace."0.3.14"."dladdr" or false) ||
+        (f.backtrace."0.3.14".default or false) ||
+        (backtrace."0.3.14"."default" or false); }
+      { "0.3.14"."findshlibs" =
+        (f.backtrace."0.3.14"."findshlibs" or false) ||
+        (f.backtrace."0.3.14".gimli-symbolize or false) ||
+        (backtrace."0.3.14"."gimli-symbolize" or false); }
+      { "0.3.14"."gimli" =
+        (f.backtrace."0.3.14"."gimli" or false) ||
+        (f.backtrace."0.3.14".gimli-symbolize or false) ||
+        (backtrace."0.3.14"."gimli-symbolize" or false); }
+      { "0.3.14"."libbacktrace" =
+        (f.backtrace."0.3.14"."libbacktrace" or false) ||
+        (f.backtrace."0.3.14".default or false) ||
+        (backtrace."0.3.14"."default" or false); }
+      { "0.3.14"."libunwind" =
+        (f.backtrace."0.3.14"."libunwind" or false) ||
+        (f.backtrace."0.3.14".default or false) ||
+        (backtrace."0.3.14"."default" or false); }
+      { "0.3.14"."memmap" =
+        (f.backtrace."0.3.14"."memmap" or false) ||
+        (f.backtrace."0.3.14".gimli-symbolize or false) ||
+        (backtrace."0.3.14"."gimli-symbolize" or false); }
+      { "0.3.14"."object" =
+        (f.backtrace."0.3.14"."object" or false) ||
+        (f.backtrace."0.3.14".gimli-symbolize or false) ||
+        (backtrace."0.3.14"."gimli-symbolize" or false); }
+      { "0.3.14"."rustc-serialize" =
+        (f.backtrace."0.3.14"."rustc-serialize" or false) ||
+        (f.backtrace."0.3.14".serialize-rustc or false) ||
+        (backtrace."0.3.14"."serialize-rustc" or false); }
+      { "0.3.14"."serde" =
+        (f.backtrace."0.3.14"."serde" or false) ||
+        (f.backtrace."0.3.14".serialize-serde or false) ||
+        (backtrace."0.3.14"."serialize-serde" or false); }
+      { "0.3.14"."serde_derive" =
+        (f.backtrace."0.3.14"."serde_derive" or false) ||
+        (f.backtrace."0.3.14".serialize-serde or false) ||
+        (backtrace."0.3.14"."serialize-serde" or false); }
+      { "0.3.14"."std" =
+        (f.backtrace."0.3.14"."std" or false) ||
+        (f.backtrace."0.3.14".default or false) ||
+        (backtrace."0.3.14"."default" or false) ||
+        (f.backtrace."0.3.14".libbacktrace or false) ||
+        (backtrace."0.3.14"."libbacktrace" or false); }
+      { "0.3.14".default = (f.backtrace."0.3.14".default or true); }
+    ];
+    backtrace_sys."${deps.backtrace."0.3.14".backtrace_sys}".default = true;
+    cfg_if."${deps.backtrace."0.3.14".cfg_if}".default = true;
+    libc."${deps.backtrace."0.3.14".libc}".default = (f.libc."${deps.backtrace."0.3.14".libc}".default or false);
+    rustc_demangle."${deps.backtrace."0.3.14".rustc_demangle}".default = true;
+    winapi = fold recursiveUpdate {} [
+      { "${deps.backtrace."0.3.14".winapi}"."dbghelp" = true; }
+      { "${deps.backtrace."0.3.14".winapi}"."minwindef" = true; }
+      { "${deps.backtrace."0.3.14".winapi}"."processthreadsapi" = true; }
+      { "${deps.backtrace."0.3.14".winapi}"."winnt" = true; }
+      { "${deps.backtrace."0.3.14".winapi}".default = true; }
+    ];
+  }) [
+    (features_.cfg_if."${deps."backtrace"."0.3.14"."cfg_if"}" deps)
+    (features_.rustc_demangle."${deps."backtrace"."0.3.14"."rustc_demangle"}" deps)
+    (features_.autocfg."${deps."backtrace"."0.3.14"."autocfg"}" deps)
+    (features_.backtrace_sys."${deps."backtrace"."0.3.14"."backtrace_sys"}" deps)
+    (features_.libc."${deps."backtrace"."0.3.14"."libc"}" deps)
+    (features_.winapi."${deps."backtrace"."0.3.14"."winapi"}" deps)
+  ];
+
+
+# end
+# backtrace-0.3.9
+
+  crates.backtrace."0.3.9" = deps: { features?(features_.backtrace."0.3.9" deps {}) }: buildRustCrate {
+    crateName = "backtrace";
+    version = "0.3.9";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" "The Rust Project Developers" ];
+    sha256 = "137pjkcn89b7fqk78w65ggj92pynmf1hkr1sjz53aga4b50lkmwm";
+    dependencies = mapFeatures features ([
+      (crates."cfg_if"."${deps."backtrace"."0.3.9"."cfg_if"}" deps)
+      (crates."rustc_demangle"."${deps."backtrace"."0.3.9"."rustc_demangle"}" deps)
+    ])
+      ++ (if (kernel == "linux" || kernel == "darwin") && !(kernel == "fuchsia") && !(kernel == "emscripten") && !(kernel == "darwin") && !(kernel == "ios") then mapFeatures features ([
+    ]
+      ++ (if features.backtrace."0.3.9".backtrace-sys or false then [ (crates.backtrace_sys."${deps."backtrace"."0.3.9".backtrace_sys}" deps) ] else [])) else [])
+      ++ (if (kernel == "linux" || kernel == "darwin") then mapFeatures features ([
+      (crates."libc"."${deps."backtrace"."0.3.9"."libc"}" deps)
+    ]) else [])
+      ++ (if kernel == "windows" then mapFeatures features ([
+    ]
+      ++ (if features.backtrace."0.3.9".winapi or false then [ (crates.winapi."${deps."backtrace"."0.3.9".winapi}" deps) ] else [])) else []);
+    features = mkFeatures (features."backtrace"."0.3.9" or {});
+  };
+  features_.backtrace."0.3.9" = deps: f: updateFeatures f (rec {
+    backtrace = fold recursiveUpdate {} [
+      { "0.3.9".addr2line =
+        (f.backtrace."0.3.9".addr2line or false) ||
+        (f.backtrace."0.3.9".gimli-symbolize or false) ||
+        (backtrace."0.3.9"."gimli-symbolize" or false); }
+      { "0.3.9".backtrace-sys =
+        (f.backtrace."0.3.9".backtrace-sys or false) ||
+        (f.backtrace."0.3.9".libbacktrace or false) ||
+        (backtrace."0.3.9"."libbacktrace" or false); }
+      { "0.3.9".coresymbolication =
+        (f.backtrace."0.3.9".coresymbolication or false) ||
+        (f.backtrace."0.3.9".default or false) ||
+        (backtrace."0.3.9"."default" or false); }
+      { "0.3.9".dbghelp =
+        (f.backtrace."0.3.9".dbghelp or false) ||
+        (f.backtrace."0.3.9".default or false) ||
+        (backtrace."0.3.9"."default" or false); }
+      { "0.3.9".default = (f.backtrace."0.3.9".default or true); }
+      { "0.3.9".dladdr =
+        (f.backtrace."0.3.9".dladdr or false) ||
+        (f.backtrace."0.3.9".default or false) ||
+        (backtrace."0.3.9"."default" or false); }
+      { "0.3.9".findshlibs =
+        (f.backtrace."0.3.9".findshlibs or false) ||
+        (f.backtrace."0.3.9".gimli-symbolize or false) ||
+        (backtrace."0.3.9"."gimli-symbolize" or false); }
+      { "0.3.9".gimli =
+        (f.backtrace."0.3.9".gimli or false) ||
+        (f.backtrace."0.3.9".gimli-symbolize or false) ||
+        (backtrace."0.3.9"."gimli-symbolize" or false); }
+      { "0.3.9".libbacktrace =
+        (f.backtrace."0.3.9".libbacktrace or false) ||
+        (f.backtrace."0.3.9".default or false) ||
+        (backtrace."0.3.9"."default" or false); }
+      { "0.3.9".libunwind =
+        (f.backtrace."0.3.9".libunwind or false) ||
+        (f.backtrace."0.3.9".default or false) ||
+        (backtrace."0.3.9"."default" or false); }
+      { "0.3.9".memmap =
+        (f.backtrace."0.3.9".memmap or false) ||
+        (f.backtrace."0.3.9".gimli-symbolize or false) ||
+        (backtrace."0.3.9"."gimli-symbolize" or false); }
+      { "0.3.9".object =
+        (f.backtrace."0.3.9".object or false) ||
+        (f.backtrace."0.3.9".gimli-symbolize or false) ||
+        (backtrace."0.3.9"."gimli-symbolize" or false); }
+      { "0.3.9".rustc-serialize =
+        (f.backtrace."0.3.9".rustc-serialize or false) ||
+        (f.backtrace."0.3.9".serialize-rustc or false) ||
+        (backtrace."0.3.9"."serialize-rustc" or false); }
+      { "0.3.9".serde =
+        (f.backtrace."0.3.9".serde or false) ||
+        (f.backtrace."0.3.9".serialize-serde or false) ||
+        (backtrace."0.3.9"."serialize-serde" or false); }
+      { "0.3.9".serde_derive =
+        (f.backtrace."0.3.9".serde_derive or false) ||
+        (f.backtrace."0.3.9".serialize-serde or false) ||
+        (backtrace."0.3.9"."serialize-serde" or false); }
+      { "0.3.9".winapi =
+        (f.backtrace."0.3.9".winapi or false) ||
+        (f.backtrace."0.3.9".dbghelp or false) ||
+        (backtrace."0.3.9"."dbghelp" or false); }
+    ];
+    backtrace_sys."${deps.backtrace."0.3.9".backtrace_sys}".default = true;
+    cfg_if."${deps.backtrace."0.3.9".cfg_if}".default = true;
+    libc."${deps.backtrace."0.3.9".libc}".default = true;
+    rustc_demangle."${deps.backtrace."0.3.9".rustc_demangle}".default = true;
+    winapi = fold recursiveUpdate {} [
+      { "${deps.backtrace."0.3.9".winapi}"."dbghelp" = true; }
+      { "${deps.backtrace."0.3.9".winapi}"."minwindef" = true; }
+      { "${deps.backtrace."0.3.9".winapi}"."processthreadsapi" = true; }
+      { "${deps.backtrace."0.3.9".winapi}"."std" = true; }
+      { "${deps.backtrace."0.3.9".winapi}"."winnt" = true; }
+      { "${deps.backtrace."0.3.9".winapi}".default = true; }
+    ];
+  }) [
+    (features_.cfg_if."${deps."backtrace"."0.3.9"."cfg_if"}" deps)
+    (features_.rustc_demangle."${deps."backtrace"."0.3.9"."rustc_demangle"}" deps)
+    (features_.backtrace_sys."${deps."backtrace"."0.3.9"."backtrace_sys"}" deps)
+    (features_.libc."${deps."backtrace"."0.3.9"."libc"}" deps)
+    (features_.winapi."${deps."backtrace"."0.3.9"."winapi"}" deps)
+  ];
+
+
+# end
+# backtrace-sys-0.1.24
+
+  crates.backtrace_sys."0.1.24" = deps: { features?(features_.backtrace_sys."0.1.24" deps {}) }: buildRustCrate {
+    crateName = "backtrace-sys";
+    version = "0.1.24";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "15d6jlknykiijcin3vqbx33760w24ss5qw3l1xd3hms5k4vc8305";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+      (crates."libc"."${deps."backtrace_sys"."0.1.24"."libc"}" deps)
+    ]);
+
+    buildDependencies = mapFeatures features ([
+      (crates."cc"."${deps."backtrace_sys"."0.1.24"."cc"}" deps)
+    ]);
+  };
+  features_.backtrace_sys."0.1.24" = deps: f: updateFeatures f ({
+    backtrace_sys."0.1.24".default = (f.backtrace_sys."0.1.24".default or true);
+    cc."${deps.backtrace_sys."0.1.24".cc}".default = true;
+    libc."${deps.backtrace_sys."0.1.24".libc}".default = true;
+  }) [
+    (features_.libc."${deps."backtrace_sys"."0.1.24"."libc"}" deps)
+    (features_.cc."${deps."backtrace_sys"."0.1.24"."cc"}" deps)
+  ];
+
+
+# end
+# backtrace-sys-0.1.28
+
+  crates.backtrace_sys."0.1.28" = deps: { features?(features_.backtrace_sys."0.1.28" deps {}) }: buildRustCrate {
+    crateName = "backtrace-sys";
+    version = "0.1.28";
+    description = "Bindings to the libbacktrace gcc library\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "1bbw8chs0wskxwzz7f3yy7mjqhyqj8lslq8pcjw1rbd2g23c34xl";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+      (crates."libc"."${deps."backtrace_sys"."0.1.28"."libc"}" deps)
+    ]);
+
+    buildDependencies = mapFeatures features ([
+      (crates."cc"."${deps."backtrace_sys"."0.1.28"."cc"}" deps)
+    ]);
+  };
+  features_.backtrace_sys."0.1.28" = deps: f: updateFeatures f ({
+    backtrace_sys."0.1.28".default = (f.backtrace_sys."0.1.28".default or true);
+    cc."${deps.backtrace_sys."0.1.28".cc}".default = true;
+    libc."${deps.backtrace_sys."0.1.28".libc}".default = (f.libc."${deps.backtrace_sys."0.1.28".libc}".default or false);
+  }) [
+    (features_.libc."${deps."backtrace_sys"."0.1.28"."libc"}" deps)
+    (features_.cc."${deps."backtrace_sys"."0.1.28"."cc"}" deps)
+  ];
+
+
+# end
+# bitflags-1.0.4
+
+  crates.bitflags."1.0.4" = deps: { features?(features_.bitflags."1.0.4" deps {}) }: buildRustCrate {
+    crateName = "bitflags";
+    version = "1.0.4";
+    authors = [ "The Rust Project Developers" ];
+    sha256 = "1g1wmz2001qmfrd37dnd5qiss5njrw26aywmg6yhkmkbyrhjxb08";
+    features = mkFeatures (features."bitflags"."1.0.4" or {});
+  };
+  features_.bitflags."1.0.4" = deps: f: updateFeatures f ({
+    bitflags."1.0.4".default = (f.bitflags."1.0.4".default or true);
+  }) [];
+
+
+# end
+# blake2-rfc-0.2.18
+
+  crates.blake2_rfc."0.2.18" = deps: { features?(features_.blake2_rfc."0.2.18" deps {}) }: buildRustCrate {
+    crateName = "blake2-rfc";
+    version = "0.2.18";
+    authors = [ "Cesar Eduardo Barros <cesarb@cesarb.eti.br>" ];
+    sha256 = "0pyqrik4471ljk16prs0iwb2sam39z0z6axyyjxlqxdmf4wprf0l";
+    dependencies = mapFeatures features ([
+      (crates."arrayvec"."${deps."blake2_rfc"."0.2.18"."arrayvec"}" deps)
+      (crates."constant_time_eq"."${deps."blake2_rfc"."0.2.18"."constant_time_eq"}" deps)
+    ]);
+    features = mkFeatures (features."blake2_rfc"."0.2.18" or {});
+  };
+  features_.blake2_rfc."0.2.18" = deps: f: updateFeatures f (rec {
+    arrayvec."${deps.blake2_rfc."0.2.18".arrayvec}".default = (f.arrayvec."${deps.blake2_rfc."0.2.18".arrayvec}".default or false);
+    blake2_rfc = fold recursiveUpdate {} [
+      { "0.2.18".default = (f.blake2_rfc."0.2.18".default or true); }
+      { "0.2.18".simd =
+        (f.blake2_rfc."0.2.18".simd or false) ||
+        (f.blake2_rfc."0.2.18".simd_opt or false) ||
+        (blake2_rfc."0.2.18"."simd_opt" or false); }
+      { "0.2.18".simd_opt =
+        (f.blake2_rfc."0.2.18".simd_opt or false) ||
+        (f.blake2_rfc."0.2.18".simd_asm or false) ||
+        (blake2_rfc."0.2.18"."simd_asm" or false); }
+      { "0.2.18".std =
+        (f.blake2_rfc."0.2.18".std or false) ||
+        (f.blake2_rfc."0.2.18".default or false) ||
+        (blake2_rfc."0.2.18"."default" or false); }
+    ];
+    constant_time_eq."${deps.blake2_rfc."0.2.18".constant_time_eq}".default = true;
+  }) [
+    (features_.arrayvec."${deps."blake2_rfc"."0.2.18"."arrayvec"}" deps)
+    (features_.constant_time_eq."${deps."blake2_rfc"."0.2.18"."constant_time_eq"}" deps)
+  ];
+
+
+# end
+# carnix-0.10.0
+
+  crates.carnix."0.10.0" = deps: { features?(features_.carnix."0.10.0" deps {}) }: buildRustCrate {
+    crateName = "carnix";
+    version = "0.10.0";
+    description = "Generate Nix expressions from Cargo.lock files (in order to use Nix as a build system for crates).";
+    authors = [ "pe@pijul.org <pe@pijul.org>" ];
+    sha256 = "0hrp22yvrqnhaanr0ckrwihx9j3irhzd2cmb19sp49ksdi25d8ri";
+    crateBin =
+      [{  name = "cargo-generate-nixfile";  path = "src/cargo-generate-nixfile.rs"; }] ++
+      [{  name = "carnix";  path = "src/main.rs"; }];
+    dependencies = mapFeatures features ([
+      (crates."clap"."${deps."carnix"."0.10.0"."clap"}" deps)
+      (crates."dirs"."${deps."carnix"."0.10.0"."dirs"}" deps)
+      (crates."env_logger"."${deps."carnix"."0.10.0"."env_logger"}" deps)
+      (crates."failure"."${deps."carnix"."0.10.0"."failure"}" deps)
+      (crates."failure_derive"."${deps."carnix"."0.10.0"."failure_derive"}" deps)
+      (crates."itertools"."${deps."carnix"."0.10.0"."itertools"}" deps)
+      (crates."log"."${deps."carnix"."0.10.0"."log"}" deps)
+      (crates."nom"."${deps."carnix"."0.10.0"."nom"}" deps)
+      (crates."regex"."${deps."carnix"."0.10.0"."regex"}" deps)
+      (crates."serde"."${deps."carnix"."0.10.0"."serde"}" deps)
+      (crates."serde_derive"."${deps."carnix"."0.10.0"."serde_derive"}" deps)
+      (crates."serde_json"."${deps."carnix"."0.10.0"."serde_json"}" deps)
+      (crates."tempdir"."${deps."carnix"."0.10.0"."tempdir"}" deps)
+      (crates."toml"."${deps."carnix"."0.10.0"."toml"}" deps)
+      (crates."url"."${deps."carnix"."0.10.0"."url"}" deps)
+    ]);
+  };
+  features_.carnix."0.10.0" = deps: f: updateFeatures f ({
+    carnix."0.10.0".default = (f.carnix."0.10.0".default or true);
+    clap."${deps.carnix."0.10.0".clap}".default = true;
+    dirs."${deps.carnix."0.10.0".dirs}".default = true;
+    env_logger."${deps.carnix."0.10.0".env_logger}".default = true;
+    failure."${deps.carnix."0.10.0".failure}".default = true;
+    failure_derive."${deps.carnix."0.10.0".failure_derive}".default = true;
+    itertools."${deps.carnix."0.10.0".itertools}".default = true;
+    log."${deps.carnix."0.10.0".log}".default = true;
+    nom."${deps.carnix."0.10.0".nom}".default = true;
+    regex."${deps.carnix."0.10.0".regex}".default = true;
+    serde."${deps.carnix."0.10.0".serde}".default = true;
+    serde_derive."${deps.carnix."0.10.0".serde_derive}".default = true;
+    serde_json."${deps.carnix."0.10.0".serde_json}".default = true;
+    tempdir."${deps.carnix."0.10.0".tempdir}".default = true;
+    toml."${deps.carnix."0.10.0".toml}".default = true;
+    url."${deps.carnix."0.10.0".url}".default = true;
+  }) [
+    (features_.clap."${deps."carnix"."0.10.0"."clap"}" deps)
+    (features_.dirs."${deps."carnix"."0.10.0"."dirs"}" deps)
+    (features_.env_logger."${deps."carnix"."0.10.0"."env_logger"}" deps)
+    (features_.failure."${deps."carnix"."0.10.0"."failure"}" deps)
+    (features_.failure_derive."${deps."carnix"."0.10.0"."failure_derive"}" deps)
+    (features_.itertools."${deps."carnix"."0.10.0"."itertools"}" deps)
+    (features_.log."${deps."carnix"."0.10.0"."log"}" deps)
+    (features_.nom."${deps."carnix"."0.10.0"."nom"}" deps)
+    (features_.regex."${deps."carnix"."0.10.0"."regex"}" deps)
+    (features_.serde."${deps."carnix"."0.10.0"."serde"}" deps)
+    (features_.serde_derive."${deps."carnix"."0.10.0"."serde_derive"}" deps)
+    (features_.serde_json."${deps."carnix"."0.10.0"."serde_json"}" deps)
+    (features_.tempdir."${deps."carnix"."0.10.0"."tempdir"}" deps)
+    (features_.toml."${deps."carnix"."0.10.0"."toml"}" deps)
+    (features_.url."${deps."carnix"."0.10.0"."url"}" deps)
+  ];
+
+
+# end
+# carnix-0.9.1
+
+  crates.carnix."0.9.1" = deps: { features?(features_.carnix."0.9.1" deps {}) }: buildRustCrate {
+    crateName = "carnix";
+    version = "0.9.1";
+    authors = [ "pe@pijul.org <pe@pijul.org>" ];
+    sha256 = "0dn292d4mjlxif0kclrljzff8rm35cd9d92vycjbzklyhz5d62wi";
+    crateBin =
+      [{  name = "cargo-generate-nixfile";  path = "src/cargo-generate-nixfile.rs"; }] ++
+      [{  name = "carnix";  path = "src/main.rs"; }];
+    dependencies = mapFeatures features ([
+      (crates."clap"."${deps."carnix"."0.9.1"."clap"}" deps)
+      (crates."dirs"."${deps."carnix"."0.9.1"."dirs"}" deps)
+      (crates."env_logger"."${deps."carnix"."0.9.1"."env_logger"}" deps)
+      (crates."error_chain"."${deps."carnix"."0.9.1"."error_chain"}" deps)
+      (crates."itertools"."${deps."carnix"."0.9.1"."itertools"}" deps)
+      (crates."log"."${deps."carnix"."0.9.1"."log"}" deps)
+      (crates."nom"."${deps."carnix"."0.9.1"."nom"}" deps)
+      (crates."regex"."${deps."carnix"."0.9.1"."regex"}" deps)
+      (crates."serde"."${deps."carnix"."0.9.1"."serde"}" deps)
+      (crates."serde_derive"."${deps."carnix"."0.9.1"."serde_derive"}" deps)
+      (crates."serde_json"."${deps."carnix"."0.9.1"."serde_json"}" deps)
+      (crates."tempdir"."${deps."carnix"."0.9.1"."tempdir"}" deps)
+      (crates."toml"."${deps."carnix"."0.9.1"."toml"}" deps)
+    ]);
+  };
+  features_.carnix."0.9.1" = deps: f: updateFeatures f ({
+    carnix."0.9.1".default = (f.carnix."0.9.1".default or true);
+    clap."${deps.carnix."0.9.1".clap}".default = true;
+    dirs."${deps.carnix."0.9.1".dirs}".default = true;
+    env_logger."${deps.carnix."0.9.1".env_logger}".default = true;
+    error_chain."${deps.carnix."0.9.1".error_chain}".default = true;
+    itertools."${deps.carnix."0.9.1".itertools}".default = true;
+    log."${deps.carnix."0.9.1".log}".default = true;
+    nom."${deps.carnix."0.9.1".nom}".default = true;
+    regex."${deps.carnix."0.9.1".regex}".default = true;
+    serde."${deps.carnix."0.9.1".serde}".default = true;
+    serde_derive."${deps.carnix."0.9.1".serde_derive}".default = true;
+    serde_json."${deps.carnix."0.9.1".serde_json}".default = true;
+    tempdir."${deps.carnix."0.9.1".tempdir}".default = true;
+    toml."${deps.carnix."0.9.1".toml}".default = true;
+  }) [
+    (features_.clap."${deps."carnix"."0.9.1"."clap"}" deps)
+    (features_.dirs."${deps."carnix"."0.9.1"."dirs"}" deps)
+    (features_.env_logger."${deps."carnix"."0.9.1"."env_logger"}" deps)
+    (features_.error_chain."${deps."carnix"."0.9.1"."error_chain"}" deps)
+    (features_.itertools."${deps."carnix"."0.9.1"."itertools"}" deps)
+    (features_.log."${deps."carnix"."0.9.1"."log"}" deps)
+    (features_.nom."${deps."carnix"."0.9.1"."nom"}" deps)
+    (features_.regex."${deps."carnix"."0.9.1"."regex"}" deps)
+    (features_.serde."${deps."carnix"."0.9.1"."serde"}" deps)
+    (features_.serde_derive."${deps."carnix"."0.9.1"."serde_derive"}" deps)
+    (features_.serde_json."${deps."carnix"."0.9.1"."serde_json"}" deps)
+    (features_.tempdir."${deps."carnix"."0.9.1"."tempdir"}" deps)
+    (features_.toml."${deps."carnix"."0.9.1"."toml"}" deps)
+  ];
+
+
+# end
+# carnix-0.9.2
+
+  crates.carnix."0.9.2" = deps: { features?(features_.carnix."0.9.2" deps {}) }: buildRustCrate {
+    crateName = "carnix";
+    version = "0.9.2";
+    authors = [ "pe@pijul.org <pe@pijul.org>" ];
+    sha256 = "1r668rjqcwsxjpz2hrr7j3k099c1xsb8vfq1w7y1ps9hap9af42z";
+    crateBin =
+      [{  name = "cargo-generate-nixfile";  path = "src/cargo-generate-nixfile.rs"; }] ++
+      [{  name = "carnix";  path = "src/main.rs"; }];
+    dependencies = mapFeatures features ([
+      (crates."clap"."${deps."carnix"."0.9.2"."clap"}" deps)
+      (crates."dirs"."${deps."carnix"."0.9.2"."dirs"}" deps)
+      (crates."env_logger"."${deps."carnix"."0.9.2"."env_logger"}" deps)
+      (crates."error_chain"."${deps."carnix"."0.9.2"."error_chain"}" deps)
+      (crates."itertools"."${deps."carnix"."0.9.2"."itertools"}" deps)
+      (crates."log"."${deps."carnix"."0.9.2"."log"}" deps)
+      (crates."nom"."${deps."carnix"."0.9.2"."nom"}" deps)
+      (crates."regex"."${deps."carnix"."0.9.2"."regex"}" deps)
+      (crates."serde"."${deps."carnix"."0.9.2"."serde"}" deps)
+      (crates."serde_derive"."${deps."carnix"."0.9.2"."serde_derive"}" deps)
+      (crates."serde_json"."${deps."carnix"."0.9.2"."serde_json"}" deps)
+      (crates."tempdir"."${deps."carnix"."0.9.2"."tempdir"}" deps)
+      (crates."toml"."${deps."carnix"."0.9.2"."toml"}" deps)
+    ]);
+  };
+  features_.carnix."0.9.2" = deps: f: updateFeatures f ({
+    carnix."0.9.2".default = (f.carnix."0.9.2".default or true);
+    clap."${deps.carnix."0.9.2".clap}".default = true;
+    dirs."${deps.carnix."0.9.2".dirs}".default = true;
+    env_logger."${deps.carnix."0.9.2".env_logger}".default = true;
+    error_chain."${deps.carnix."0.9.2".error_chain}".default = true;
+    itertools."${deps.carnix."0.9.2".itertools}".default = true;
+    log."${deps.carnix."0.9.2".log}".default = true;
+    nom."${deps.carnix."0.9.2".nom}".default = true;
+    regex."${deps.carnix."0.9.2".regex}".default = true;
+    serde."${deps.carnix."0.9.2".serde}".default = true;
+    serde_derive."${deps.carnix."0.9.2".serde_derive}".default = true;
+    serde_json."${deps.carnix."0.9.2".serde_json}".default = true;
+    tempdir."${deps.carnix."0.9.2".tempdir}".default = true;
+    toml."${deps.carnix."0.9.2".toml}".default = true;
+  }) [
+    (features_.clap."${deps."carnix"."0.9.2"."clap"}" deps)
+    (features_.dirs."${deps."carnix"."0.9.2"."dirs"}" deps)
+    (features_.env_logger."${deps."carnix"."0.9.2"."env_logger"}" deps)
+    (features_.error_chain."${deps."carnix"."0.9.2"."error_chain"}" deps)
+    (features_.itertools."${deps."carnix"."0.9.2"."itertools"}" deps)
+    (features_.log."${deps."carnix"."0.9.2"."log"}" deps)
+    (features_.nom."${deps."carnix"."0.9.2"."nom"}" deps)
+    (features_.regex."${deps."carnix"."0.9.2"."regex"}" deps)
+    (features_.serde."${deps."carnix"."0.9.2"."serde"}" deps)
+    (features_.serde_derive."${deps."carnix"."0.9.2"."serde_derive"}" deps)
+    (features_.serde_json."${deps."carnix"."0.9.2"."serde_json"}" deps)
+    (features_.tempdir."${deps."carnix"."0.9.2"."tempdir"}" deps)
+    (features_.toml."${deps."carnix"."0.9.2"."toml"}" deps)
+  ];
+
+
+# end
+# carnix-0.9.8
+
+  crates.carnix."0.9.8" = deps: { features?(features_.carnix."0.9.8" deps {}) }: buildRustCrate {
+    crateName = "carnix";
+    version = "0.9.8";
+    authors = [ "pe@pijul.org <pe@pijul.org>" ];
+    sha256 = "0c2k98qjm1yyx5wl0wqs0rrjczp6h62ri1x8a99442clxsyvp4n9";
+    crateBin =
+      [{  name = "cargo-generate-nixfile";  path = "src/cargo-generate-nixfile.rs"; }] ++
+      [{  name = "carnix";  path = "src/main.rs"; }];
+    dependencies = mapFeatures features ([
+      (crates."clap"."${deps."carnix"."0.9.8"."clap"}" deps)
+      (crates."dirs"."${deps."carnix"."0.9.8"."dirs"}" deps)
+      (crates."env_logger"."${deps."carnix"."0.9.8"."env_logger"}" deps)
+      (crates."error_chain"."${deps."carnix"."0.9.8"."error_chain"}" deps)
+      (crates."itertools"."${deps."carnix"."0.9.8"."itertools"}" deps)
+      (crates."log"."${deps."carnix"."0.9.8"."log"}" deps)
+      (crates."nom"."${deps."carnix"."0.9.8"."nom"}" deps)
+      (crates."regex"."${deps."carnix"."0.9.8"."regex"}" deps)
+      (crates."serde"."${deps."carnix"."0.9.8"."serde"}" deps)
+      (crates."serde_derive"."${deps."carnix"."0.9.8"."serde_derive"}" deps)
+      (crates."serde_json"."${deps."carnix"."0.9.8"."serde_json"}" deps)
+      (crates."tempdir"."${deps."carnix"."0.9.8"."tempdir"}" deps)
+      (crates."toml"."${deps."carnix"."0.9.8"."toml"}" deps)
+      (crates."url"."${deps."carnix"."0.9.8"."url"}" deps)
+    ]);
+  };
+  features_.carnix."0.9.8" = deps: f: updateFeatures f ({
+    carnix."0.9.8".default = (f.carnix."0.9.8".default or true);
+    clap."${deps.carnix."0.9.8".clap}".default = true;
+    dirs."${deps.carnix."0.9.8".dirs}".default = true;
+    env_logger."${deps.carnix."0.9.8".env_logger}".default = true;
+    error_chain."${deps.carnix."0.9.8".error_chain}".default = true;
+    itertools."${deps.carnix."0.9.8".itertools}".default = true;
+    log."${deps.carnix."0.9.8".log}".default = true;
+    nom."${deps.carnix."0.9.8".nom}".default = true;
+    regex."${deps.carnix."0.9.8".regex}".default = true;
+    serde."${deps.carnix."0.9.8".serde}".default = true;
+    serde_derive."${deps.carnix."0.9.8".serde_derive}".default = true;
+    serde_json."${deps.carnix."0.9.8".serde_json}".default = true;
+    tempdir."${deps.carnix."0.9.8".tempdir}".default = true;
+    toml."${deps.carnix."0.9.8".toml}".default = true;
+    url."${deps.carnix."0.9.8".url}".default = true;
+  }) [
+    (features_.clap."${deps."carnix"."0.9.8"."clap"}" deps)
+    (features_.dirs."${deps."carnix"."0.9.8"."dirs"}" deps)
+    (features_.env_logger."${deps."carnix"."0.9.8"."env_logger"}" deps)
+    (features_.error_chain."${deps."carnix"."0.9.8"."error_chain"}" deps)
+    (features_.itertools."${deps."carnix"."0.9.8"."itertools"}" deps)
+    (features_.log."${deps."carnix"."0.9.8"."log"}" deps)
+    (features_.nom."${deps."carnix"."0.9.8"."nom"}" deps)
+    (features_.regex."${deps."carnix"."0.9.8"."regex"}" deps)
+    (features_.serde."${deps."carnix"."0.9.8"."serde"}" deps)
+    (features_.serde_derive."${deps."carnix"."0.9.8"."serde_derive"}" deps)
+    (features_.serde_json."${deps."carnix"."0.9.8"."serde_json"}" deps)
+    (features_.tempdir."${deps."carnix"."0.9.8"."tempdir"}" deps)
+    (features_.toml."${deps."carnix"."0.9.8"."toml"}" deps)
+    (features_.url."${deps."carnix"."0.9.8"."url"}" deps)
+  ];
+
+
+# end
+# cc-1.0.25
+
+  crates.cc."1.0.25" = deps: { features?(features_.cc."1.0.25" deps {}) }: buildRustCrate {
+    crateName = "cc";
+    version = "1.0.25";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "0pd8fhjlpr5qan984frkf1c8nxrqp6827wmmfzhm2840229z2hq0";
+    dependencies = mapFeatures features ([
+]);
+    features = mkFeatures (features."cc"."1.0.25" or {});
+  };
+  features_.cc."1.0.25" = deps: f: updateFeatures f (rec {
+    cc = fold recursiveUpdate {} [
+      { "1.0.25".default = (f.cc."1.0.25".default or true); }
+      { "1.0.25".rayon =
+        (f.cc."1.0.25".rayon or false) ||
+        (f.cc."1.0.25".parallel or false) ||
+        (cc."1.0.25"."parallel" or false); }
+    ];
+  }) [];
+
+
+# end
+# cc-1.0.32
+
+  crates.cc."1.0.32" = deps: { features?(features_.cc."1.0.32" deps {}) }: buildRustCrate {
+    crateName = "cc";
+    version = "1.0.32";
+    description = "A build-time dependency for Cargo build scripts to assist in invoking the native\nC compiler to compile native C code into a static archive to be linked into Rust\ncode.\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "0mq4ma94yis74dnn98w2wkaad195dr6qwlma4fs590xiv0j15ldx";
+    dependencies = mapFeatures features ([
+]);
+    features = mkFeatures (features."cc"."1.0.32" or {});
+  };
+  features_.cc."1.0.32" = deps: f: updateFeatures f (rec {
+    cc = fold recursiveUpdate {} [
+      { "1.0.32"."rayon" =
+        (f.cc."1.0.32"."rayon" or false) ||
+        (f.cc."1.0.32".parallel or false) ||
+        (cc."1.0.32"."parallel" or false); }
+      { "1.0.32".default = (f.cc."1.0.32".default or true); }
+    ];
+  }) [];
+
+
+# end
+# cfg-if-0.1.6
+
+  crates.cfg_if."0.1.6" = deps: { features?(features_.cfg_if."0.1.6" deps {}) }: buildRustCrate {
+    crateName = "cfg-if";
+    version = "0.1.6";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "11qrix06wagkplyk908i3423ps9m9np6c4vbcq81s9fyl244xv3n";
+  };
+  features_.cfg_if."0.1.6" = deps: f: updateFeatures f ({
+    cfg_if."0.1.6".default = (f.cfg_if."0.1.6".default or true);
+  }) [];
+
+
+# end
+# cfg-if-0.1.7
+
+  crates.cfg_if."0.1.7" = deps: { features?(features_.cfg_if."0.1.7" deps {}) }: buildRustCrate {
+    crateName = "cfg-if";
+    version = "0.1.7";
+    description = "A macro to ergonomically define an item depending on a large number of #[cfg]\nparameters. Structured like an if-else chain, the first matching branch is the\nitem that gets emitted.\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "13gvcx1dxjq4mpmpj26hpg3yc97qffkx2zi58ykr1dwr8q2biiig";
+  };
+  features_.cfg_if."0.1.7" = deps: f: updateFeatures f ({
+    cfg_if."0.1.7".default = (f.cfg_if."0.1.7".default or true);
+  }) [];
+
+
+# end
+# clap-2.32.0
+
+  crates.clap."2.32.0" = deps: { features?(features_.clap."2.32.0" deps {}) }: buildRustCrate {
+    crateName = "clap";
+    version = "2.32.0";
+    authors = [ "Kevin K. <kbknapp@gmail.com>" ];
+    sha256 = "1hdjf0janvpjkwrjdjx1mm2aayzr54k72w6mriyr0n5anjkcj1lx";
+    dependencies = mapFeatures features ([
+      (crates."bitflags"."${deps."clap"."2.32.0"."bitflags"}" deps)
+      (crates."textwrap"."${deps."clap"."2.32.0"."textwrap"}" deps)
+      (crates."unicode_width"."${deps."clap"."2.32.0"."unicode_width"}" deps)
+    ]
+      ++ (if features.clap."2.32.0".atty or false then [ (crates.atty."${deps."clap"."2.32.0".atty}" deps) ] else [])
+      ++ (if features.clap."2.32.0".strsim or false then [ (crates.strsim."${deps."clap"."2.32.0".strsim}" deps) ] else [])
+      ++ (if features.clap."2.32.0".vec_map or false then [ (crates.vec_map."${deps."clap"."2.32.0".vec_map}" deps) ] else []))
+      ++ (if !(kernel == "windows") then mapFeatures features ([
+    ]
+      ++ (if features.clap."2.32.0".ansi_term or false then [ (crates.ansi_term."${deps."clap"."2.32.0".ansi_term}" deps) ] else [])) else []);
+    features = mkFeatures (features."clap"."2.32.0" or {});
+  };
+  features_.clap."2.32.0" = deps: f: updateFeatures f (rec {
+    ansi_term."${deps.clap."2.32.0".ansi_term}".default = true;
+    atty."${deps.clap."2.32.0".atty}".default = true;
+    bitflags."${deps.clap."2.32.0".bitflags}".default = true;
+    clap = fold recursiveUpdate {} [
+      { "2.32.0".ansi_term =
+        (f.clap."2.32.0".ansi_term or false) ||
+        (f.clap."2.32.0".color or false) ||
+        (clap."2.32.0"."color" or false); }
+      { "2.32.0".atty =
+        (f.clap."2.32.0".atty or false) ||
+        (f.clap."2.32.0".color or false) ||
+        (clap."2.32.0"."color" or false); }
+      { "2.32.0".clippy =
+        (f.clap."2.32.0".clippy or false) ||
+        (f.clap."2.32.0".lints or false) ||
+        (clap."2.32.0"."lints" or false); }
+      { "2.32.0".color =
+        (f.clap."2.32.0".color or false) ||
+        (f.clap."2.32.0".default or false) ||
+        (clap."2.32.0"."default" or false); }
+      { "2.32.0".default = (f.clap."2.32.0".default or true); }
+      { "2.32.0".strsim =
+        (f.clap."2.32.0".strsim or false) ||
+        (f.clap."2.32.0".suggestions or false) ||
+        (clap."2.32.0"."suggestions" or false); }
+      { "2.32.0".suggestions =
+        (f.clap."2.32.0".suggestions or false) ||
+        (f.clap."2.32.0".default or false) ||
+        (clap."2.32.0"."default" or false); }
+      { "2.32.0".term_size =
+        (f.clap."2.32.0".term_size or false) ||
+        (f.clap."2.32.0".wrap_help or false) ||
+        (clap."2.32.0"."wrap_help" or false); }
+      { "2.32.0".vec_map =
+        (f.clap."2.32.0".vec_map or false) ||
+        (f.clap."2.32.0".default or false) ||
+        (clap."2.32.0"."default" or false); }
+      { "2.32.0".yaml =
+        (f.clap."2.32.0".yaml or false) ||
+        (f.clap."2.32.0".doc or false) ||
+        (clap."2.32.0"."doc" or false); }
+      { "2.32.0".yaml-rust =
+        (f.clap."2.32.0".yaml-rust or false) ||
+        (f.clap."2.32.0".yaml or false) ||
+        (clap."2.32.0"."yaml" or false); }
+    ];
+    strsim."${deps.clap."2.32.0".strsim}".default = true;
+    textwrap = fold recursiveUpdate {} [
+      { "${deps.clap."2.32.0".textwrap}"."term_size" =
+        (f.textwrap."${deps.clap."2.32.0".textwrap}"."term_size" or false) ||
+        (clap."2.32.0"."wrap_help" or false) ||
+        (f."clap"."2.32.0"."wrap_help" or false); }
+      { "${deps.clap."2.32.0".textwrap}".default = true; }
+    ];
+    unicode_width."${deps.clap."2.32.0".unicode_width}".default = true;
+    vec_map."${deps.clap."2.32.0".vec_map}".default = true;
+  }) [
+    (features_.atty."${deps."clap"."2.32.0"."atty"}" deps)
+    (features_.bitflags."${deps."clap"."2.32.0"."bitflags"}" deps)
+    (features_.strsim."${deps."clap"."2.32.0"."strsim"}" deps)
+    (features_.textwrap."${deps."clap"."2.32.0"."textwrap"}" deps)
+    (features_.unicode_width."${deps."clap"."2.32.0"."unicode_width"}" deps)
+    (features_.vec_map."${deps."clap"."2.32.0"."vec_map"}" deps)
+    (features_.ansi_term."${deps."clap"."2.32.0"."ansi_term"}" deps)
+  ];
+
+
+# end
+# cloudabi-0.0.3
+
+  crates.cloudabi."0.0.3" = deps: { features?(features_.cloudabi."0.0.3" deps {}) }: buildRustCrate {
+    crateName = "cloudabi";
+    version = "0.0.3";
+    description = "Low level interface to CloudABI. Contains all syscalls and related types.";
+    authors = [ "Nuxi (https://nuxi.nl/) and contributors" ];
+    sha256 = "1z9lby5sr6vslfd14d6igk03s7awf91mxpsfmsp3prxbxlk0x7h5";
+    libPath = "cloudabi.rs";
+    dependencies = mapFeatures features ([
+    ]
+      ++ (if features.cloudabi."0.0.3".bitflags or false then [ (crates.bitflags."${deps."cloudabi"."0.0.3".bitflags}" deps) ] else []));
+    features = mkFeatures (features."cloudabi"."0.0.3" or {});
+  };
+  features_.cloudabi."0.0.3" = deps: f: updateFeatures f (rec {
+    bitflags."${deps.cloudabi."0.0.3".bitflags}".default = true;
+    cloudabi = fold recursiveUpdate {} [
+      { "0.0.3"."bitflags" =
+        (f.cloudabi."0.0.3"."bitflags" or false) ||
+        (f.cloudabi."0.0.3".default or false) ||
+        (cloudabi."0.0.3"."default" or false); }
+      { "0.0.3".default = (f.cloudabi."0.0.3".default or true); }
+    ];
+  }) [
+    (features_.bitflags."${deps."cloudabi"."0.0.3"."bitflags"}" deps)
+  ];
+
+
+# end
+# constant_time_eq-0.1.3
+
+  crates.constant_time_eq."0.1.3" = deps: { features?(features_.constant_time_eq."0.1.3" deps {}) }: buildRustCrate {
+    crateName = "constant_time_eq";
+    version = "0.1.3";
+    authors = [ "Cesar Eduardo Barros <cesarb@cesarb.eti.br>" ];
+    sha256 = "03qri9hjf049gwqg9q527lybpg918q6y5q4g9a5lma753nff49wd";
+  };
+  features_.constant_time_eq."0.1.3" = deps: f: updateFeatures f ({
+    constant_time_eq."0.1.3".default = (f.constant_time_eq."0.1.3".default or true);
+  }) [];
+
+
+# end
+# dirs-1.0.4
+
+  crates.dirs."1.0.4" = deps: { features?(features_.dirs."1.0.4" deps {}) }: buildRustCrate {
+    crateName = "dirs";
+    version = "1.0.4";
+    authors = [ "Simon Ochsenreither <simon@ochsenreither.de>" ];
+    sha256 = "1hp3nz0350b0gpavb3w5ajqc9l1k59cfrcsr3hcavwlkizdnpv1y";
+    dependencies = (if kernel == "redox" then mapFeatures features ([
+      (crates."redox_users"."${deps."dirs"."1.0.4"."redox_users"}" deps)
+    ]) else [])
+      ++ (if (kernel == "linux" || kernel == "darwin") then mapFeatures features ([
+      (crates."libc"."${deps."dirs"."1.0.4"."libc"}" deps)
+    ]) else [])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."dirs"."1.0.4"."winapi"}" deps)
+    ]) else []);
+  };
+  features_.dirs."1.0.4" = deps: f: updateFeatures f ({
+    dirs."1.0.4".default = (f.dirs."1.0.4".default or true);
+    libc."${deps.dirs."1.0.4".libc}".default = true;
+    redox_users."${deps.dirs."1.0.4".redox_users}".default = true;
+    winapi = fold recursiveUpdate {} [
+      { "${deps.dirs."1.0.4".winapi}"."knownfolders" = true; }
+      { "${deps.dirs."1.0.4".winapi}"."objbase" = true; }
+      { "${deps.dirs."1.0.4".winapi}"."shlobj" = true; }
+      { "${deps.dirs."1.0.4".winapi}"."winbase" = true; }
+      { "${deps.dirs."1.0.4".winapi}"."winerror" = true; }
+      { "${deps.dirs."1.0.4".winapi}".default = true; }
+    ];
+  }) [
+    (features_.redox_users."${deps."dirs"."1.0.4"."redox_users"}" deps)
+    (features_.libc."${deps."dirs"."1.0.4"."libc"}" deps)
+    (features_.winapi."${deps."dirs"."1.0.4"."winapi"}" deps)
+  ];
+
+
+# end
+# dirs-1.0.5
+
+  crates.dirs."1.0.5" = deps: { features?(features_.dirs."1.0.5" deps {}) }: buildRustCrate {
+    crateName = "dirs";
+    version = "1.0.5";
+    description = "A tiny low-level library that provides platform-specific standard locations of directories for config, cache and other data on Linux, Windows, macOS and Redox by leveraging the mechanisms defined by the XDG base/user directory specifications on Linux, the Known Folder API on Windows, and the Standard Directory guidelines on macOS.";
+    authors = [ "Simon Ochsenreither <simon@ochsenreither.de>" ];
+    sha256 = "1py68zwwrhlj5vbz9f9ansjmhc8y4gs5bpamw9ycmqz030pprwf3";
+    dependencies = (if kernel == "redox" then mapFeatures features ([
+      (crates."redox_users"."${deps."dirs"."1.0.5"."redox_users"}" deps)
+    ]) else [])
+      ++ (if (kernel == "linux" || kernel == "darwin") then mapFeatures features ([
+      (crates."libc"."${deps."dirs"."1.0.5"."libc"}" deps)
+    ]) else [])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."dirs"."1.0.5"."winapi"}" deps)
+    ]) else []);
+  };
+  features_.dirs."1.0.5" = deps: f: updateFeatures f ({
+    dirs."1.0.5".default = (f.dirs."1.0.5".default or true);
+    libc."${deps.dirs."1.0.5".libc}".default = true;
+    redox_users."${deps.dirs."1.0.5".redox_users}".default = true;
+    winapi = fold recursiveUpdate {} [
+      { "${deps.dirs."1.0.5".winapi}"."knownfolders" = true; }
+      { "${deps.dirs."1.0.5".winapi}"."objbase" = true; }
+      { "${deps.dirs."1.0.5".winapi}"."shlobj" = true; }
+      { "${deps.dirs."1.0.5".winapi}"."winbase" = true; }
+      { "${deps.dirs."1.0.5".winapi}"."winerror" = true; }
+      { "${deps.dirs."1.0.5".winapi}".default = true; }
+    ];
+  }) [
+    (features_.redox_users."${deps."dirs"."1.0.5"."redox_users"}" deps)
+    (features_.libc."${deps."dirs"."1.0.5"."libc"}" deps)
+    (features_.winapi."${deps."dirs"."1.0.5"."winapi"}" deps)
+  ];
+
+
+# end
+# either-1.5.0
+
+  crates.either."1.5.0" = deps: { features?(features_.either."1.5.0" deps {}) }: buildRustCrate {
+    crateName = "either";
+    version = "1.5.0";
+    authors = [ "bluss" ];
+    sha256 = "1f7kl2ln01y02m8fpd2zrdjiwqmgfvl9nxxrfry3k19d1gd2bsvz";
+    dependencies = mapFeatures features ([
+]);
+    features = mkFeatures (features."either"."1.5.0" or {});
+  };
+  features_.either."1.5.0" = deps: f: updateFeatures f (rec {
+    either = fold recursiveUpdate {} [
+      { "1.5.0".default = (f.either."1.5.0".default or true); }
+      { "1.5.0".use_std =
+        (f.either."1.5.0".use_std or false) ||
+        (f.either."1.5.0".default or false) ||
+        (either."1.5.0"."default" or false); }
+    ];
+  }) [];
+
+
+# end
+# either-1.5.1
+
+  crates.either."1.5.1" = deps: { features?(features_.either."1.5.1" deps {}) }: buildRustCrate {
+    crateName = "either";
+    version = "1.5.1";
+    description = "The enum `Either` with variants `Left` and `Right` is a general purpose sum type with two cases.\n";
+    authors = [ "bluss" ];
+    sha256 = "049dmvnyrrhf0fw955jrfazdapdl84x32grwwxllh8in39yv3783";
+    dependencies = mapFeatures features ([
+]);
+    features = mkFeatures (features."either"."1.5.1" or {});
+  };
+  features_.either."1.5.1" = deps: f: updateFeatures f (rec {
+    either = fold recursiveUpdate {} [
+      { "1.5.1"."use_std" =
+        (f.either."1.5.1"."use_std" or false) ||
+        (f.either."1.5.1".default or false) ||
+        (either."1.5.1"."default" or false); }
+      { "1.5.1".default = (f.either."1.5.1".default or true); }
+    ];
+  }) [];
+
+
+# end
+# env_logger-0.5.13
+
+  crates.env_logger."0.5.13" = deps: { features?(features_.env_logger."0.5.13" deps {}) }: buildRustCrate {
+    crateName = "env_logger";
+    version = "0.5.13";
+    authors = [ "The Rust Project Developers" ];
+    sha256 = "1q6vylngcz4bn088b4hvsl879l8yz1k2bma75waljb5p4h4kbb72";
+    dependencies = mapFeatures features ([
+      (crates."atty"."${deps."env_logger"."0.5.13"."atty"}" deps)
+      (crates."humantime"."${deps."env_logger"."0.5.13"."humantime"}" deps)
+      (crates."log"."${deps."env_logger"."0.5.13"."log"}" deps)
+      (crates."termcolor"."${deps."env_logger"."0.5.13"."termcolor"}" deps)
+    ]
+      ++ (if features.env_logger."0.5.13".regex or false then [ (crates.regex."${deps."env_logger"."0.5.13".regex}" deps) ] else []));
+    features = mkFeatures (features."env_logger"."0.5.13" or {});
+  };
+  features_.env_logger."0.5.13" = deps: f: updateFeatures f (rec {
+    atty."${deps.env_logger."0.5.13".atty}".default = true;
+    env_logger = fold recursiveUpdate {} [
+      { "0.5.13".default = (f.env_logger."0.5.13".default or true); }
+      { "0.5.13".regex =
+        (f.env_logger."0.5.13".regex or false) ||
+        (f.env_logger."0.5.13".default or false) ||
+        (env_logger."0.5.13"."default" or false); }
+    ];
+    humantime."${deps.env_logger."0.5.13".humantime}".default = true;
+    log = fold recursiveUpdate {} [
+      { "${deps.env_logger."0.5.13".log}"."std" = true; }
+      { "${deps.env_logger."0.5.13".log}".default = true; }
+    ];
+    regex."${deps.env_logger."0.5.13".regex}".default = true;
+    termcolor."${deps.env_logger."0.5.13".termcolor}".default = true;
+  }) [
+    (features_.atty."${deps."env_logger"."0.5.13"."atty"}" deps)
+    (features_.humantime."${deps."env_logger"."0.5.13"."humantime"}" deps)
+    (features_.log."${deps."env_logger"."0.5.13"."log"}" deps)
+    (features_.regex."${deps."env_logger"."0.5.13"."regex"}" deps)
+    (features_.termcolor."${deps."env_logger"."0.5.13"."termcolor"}" deps)
+  ];
+
+
+# end
+# env_logger-0.6.1
+
+  crates.env_logger."0.6.1" = deps: { features?(features_.env_logger."0.6.1" deps {}) }: buildRustCrate {
+    crateName = "env_logger";
+    version = "0.6.1";
+    description = "A logging implementation for `log` which is configured via an environment\nvariable.\n";
+    authors = [ "The Rust Project Developers" ];
+    sha256 = "1d02i2yaqpnmbgw42pf0hd56ddd9jr4zq5yypbmfvc8rs13x0jql";
+    dependencies = mapFeatures features ([
+      (crates."log"."${deps."env_logger"."0.6.1"."log"}" deps)
+    ]
+      ++ (if features.env_logger."0.6.1".atty or false then [ (crates.atty."${deps."env_logger"."0.6.1".atty}" deps) ] else [])
+      ++ (if features.env_logger."0.6.1".humantime or false then [ (crates.humantime."${deps."env_logger"."0.6.1".humantime}" deps) ] else [])
+      ++ (if features.env_logger."0.6.1".regex or false then [ (crates.regex."${deps."env_logger"."0.6.1".regex}" deps) ] else [])
+      ++ (if features.env_logger."0.6.1".termcolor or false then [ (crates.termcolor."${deps."env_logger"."0.6.1".termcolor}" deps) ] else []));
+    features = mkFeatures (features."env_logger"."0.6.1" or {});
+  };
+  features_.env_logger."0.6.1" = deps: f: updateFeatures f (rec {
+    atty."${deps.env_logger."0.6.1".atty}".default = true;
+    env_logger = fold recursiveUpdate {} [
+      { "0.6.1"."atty" =
+        (f.env_logger."0.6.1"."atty" or false) ||
+        (f.env_logger."0.6.1".default or false) ||
+        (env_logger."0.6.1"."default" or false); }
+      { "0.6.1"."humantime" =
+        (f.env_logger."0.6.1"."humantime" or false) ||
+        (f.env_logger."0.6.1".default or false) ||
+        (env_logger."0.6.1"."default" or false); }
+      { "0.6.1"."regex" =
+        (f.env_logger."0.6.1"."regex" or false) ||
+        (f.env_logger."0.6.1".default or false) ||
+        (env_logger."0.6.1"."default" or false); }
+      { "0.6.1"."termcolor" =
+        (f.env_logger."0.6.1"."termcolor" or false) ||
+        (f.env_logger."0.6.1".default or false) ||
+        (env_logger."0.6.1"."default" or false); }
+      { "0.6.1".default = (f.env_logger."0.6.1".default or true); }
+    ];
+    humantime."${deps.env_logger."0.6.1".humantime}".default = true;
+    log = fold recursiveUpdate {} [
+      { "${deps.env_logger."0.6.1".log}"."std" = true; }
+      { "${deps.env_logger."0.6.1".log}".default = true; }
+    ];
+    regex."${deps.env_logger."0.6.1".regex}".default = true;
+    termcolor."${deps.env_logger."0.6.1".termcolor}".default = true;
+  }) [
+    (features_.atty."${deps."env_logger"."0.6.1"."atty"}" deps)
+    (features_.humantime."${deps."env_logger"."0.6.1"."humantime"}" deps)
+    (features_.log."${deps."env_logger"."0.6.1"."log"}" deps)
+    (features_.regex."${deps."env_logger"."0.6.1"."regex"}" deps)
+    (features_.termcolor."${deps."env_logger"."0.6.1"."termcolor"}" deps)
+  ];
+
+
+# end
+# error-chain-0.12.0
+
+  crates.error_chain."0.12.0" = deps: { features?(features_.error_chain."0.12.0" deps {}) }: buildRustCrate {
+    crateName = "error-chain";
+    version = "0.12.0";
+    authors = [ "Brian Anderson <banderson@mozilla.com>" "Paul Colomiets <paul@colomiets.name>" "Colin Kiegel <kiegel@gmx.de>" "Yamakaky <yamakaky@yamaworld.fr>" ];
+    sha256 = "1m6wk1r6wqg1mn69bxxvk5k081cb4xy6bfhsxb99rv408x9wjcnl";
+    dependencies = mapFeatures features ([
+    ]
+      ++ (if features.error_chain."0.12.0".backtrace or false then [ (crates.backtrace."${deps."error_chain"."0.12.0".backtrace}" deps) ] else []));
+    features = mkFeatures (features."error_chain"."0.12.0" or {});
+  };
+  features_.error_chain."0.12.0" = deps: f: updateFeatures f (rec {
+    backtrace."${deps.error_chain."0.12.0".backtrace}".default = true;
+    error_chain = fold recursiveUpdate {} [
+      { "0.12.0".backtrace =
+        (f.error_chain."0.12.0".backtrace or false) ||
+        (f.error_chain."0.12.0".default or false) ||
+        (error_chain."0.12.0"."default" or false); }
+      { "0.12.0".default = (f.error_chain."0.12.0".default or true); }
+      { "0.12.0".example_generated =
+        (f.error_chain."0.12.0".example_generated or false) ||
+        (f.error_chain."0.12.0".default or false) ||
+        (error_chain."0.12.0"."default" or false); }
+    ];
+  }) [
+    (features_.backtrace."${deps."error_chain"."0.12.0"."backtrace"}" deps)
+  ];
+
+
+# end
+# failure-0.1.3
+
+  crates.failure."0.1.3" = deps: { features?(features_.failure."0.1.3" deps {}) }: buildRustCrate {
+    crateName = "failure";
+    version = "0.1.3";
+    authors = [ "Without Boats <boats@mozilla.com>" ];
+    sha256 = "0cibp01z0clyxrvkl7v7kq6jszsgcg9vwv6d9l6d1drk9jqdss4s";
+    dependencies = mapFeatures features ([
+    ]
+      ++ (if features.failure."0.1.3".backtrace or false then [ (crates.backtrace."${deps."failure"."0.1.3".backtrace}" deps) ] else [])
+      ++ (if features.failure."0.1.3".failure_derive or false then [ (crates.failure_derive."${deps."failure"."0.1.3".failure_derive}" deps) ] else []));
+    features = mkFeatures (features."failure"."0.1.3" or {});
+  };
+  features_.failure."0.1.3" = deps: f: updateFeatures f (rec {
+    backtrace."${deps.failure."0.1.3".backtrace}".default = true;
+    failure = fold recursiveUpdate {} [
+      { "0.1.3".backtrace =
+        (f.failure."0.1.3".backtrace or false) ||
+        (f.failure."0.1.3".std or false) ||
+        (failure."0.1.3"."std" or false); }
+      { "0.1.3".default = (f.failure."0.1.3".default or true); }
+      { "0.1.3".derive =
+        (f.failure."0.1.3".derive or false) ||
+        (f.failure."0.1.3".default or false) ||
+        (failure."0.1.3"."default" or false); }
+      { "0.1.3".failure_derive =
+        (f.failure."0.1.3".failure_derive or false) ||
+        (f.failure."0.1.3".derive or false) ||
+        (failure."0.1.3"."derive" or false); }
+      { "0.1.3".std =
+        (f.failure."0.1.3".std or false) ||
+        (f.failure."0.1.3".default or false) ||
+        (failure."0.1.3"."default" or false); }
+    ];
+    failure_derive."${deps.failure."0.1.3".failure_derive}".default = true;
+  }) [
+    (features_.backtrace."${deps."failure"."0.1.3"."backtrace"}" deps)
+    (features_.failure_derive."${deps."failure"."0.1.3"."failure_derive"}" deps)
+  ];
+
+
+# end
+# failure-0.1.5
+
+  crates.failure."0.1.5" = deps: { features?(features_.failure."0.1.5" deps {}) }: buildRustCrate {
+    crateName = "failure";
+    version = "0.1.5";
+    description = "Experimental error handling abstraction.";
+    authors = [ "Without Boats <boats@mozilla.com>" ];
+    sha256 = "1msaj1c0fg12dzyf4fhxqlx1gfx41lj2smdjmkc9hkrgajk2g3kx";
+    dependencies = mapFeatures features ([
+    ]
+      ++ (if features.failure."0.1.5".backtrace or false then [ (crates.backtrace."${deps."failure"."0.1.5".backtrace}" deps) ] else [])
+      ++ (if features.failure."0.1.5".failure_derive or false then [ (crates.failure_derive."${deps."failure"."0.1.5".failure_derive}" deps) ] else []));
+    features = mkFeatures (features."failure"."0.1.5" or {});
+  };
+  features_.failure."0.1.5" = deps: f: updateFeatures f (rec {
+    backtrace."${deps.failure."0.1.5".backtrace}".default = true;
+    failure = fold recursiveUpdate {} [
+      { "0.1.5"."backtrace" =
+        (f.failure."0.1.5"."backtrace" or false) ||
+        (f.failure."0.1.5".std or false) ||
+        (failure."0.1.5"."std" or false); }
+      { "0.1.5"."derive" =
+        (f.failure."0.1.5"."derive" or false) ||
+        (f.failure."0.1.5".default or false) ||
+        (failure."0.1.5"."default" or false); }
+      { "0.1.5"."failure_derive" =
+        (f.failure."0.1.5"."failure_derive" or false) ||
+        (f.failure."0.1.5".derive or false) ||
+        (failure."0.1.5"."derive" or false); }
+      { "0.1.5"."std" =
+        (f.failure."0.1.5"."std" or false) ||
+        (f.failure."0.1.5".default or false) ||
+        (failure."0.1.5"."default" or false); }
+      { "0.1.5".default = (f.failure."0.1.5".default or true); }
+    ];
+    failure_derive."${deps.failure."0.1.5".failure_derive}".default = true;
+  }) [
+    (features_.backtrace."${deps."failure"."0.1.5"."backtrace"}" deps)
+    (features_.failure_derive."${deps."failure"."0.1.5"."failure_derive"}" deps)
+  ];
+
+
+# end
+# failure_derive-0.1.3
+
+  crates.failure_derive."0.1.3" = deps: { features?(features_.failure_derive."0.1.3" deps {}) }: buildRustCrate {
+    crateName = "failure_derive";
+    version = "0.1.3";
+    authors = [ "Without Boats <woboats@gmail.com>" ];
+    sha256 = "1mh7ad2d17f13g0k29bskp0f9faws0w1q4a5yfzlzi75bw9kidgm";
+    procMacro = true;
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+      (crates."proc_macro2"."${deps."failure_derive"."0.1.3"."proc_macro2"}" deps)
+      (crates."quote"."${deps."failure_derive"."0.1.3"."quote"}" deps)
+      (crates."syn"."${deps."failure_derive"."0.1.3"."syn"}" deps)
+      (crates."synstructure"."${deps."failure_derive"."0.1.3"."synstructure"}" deps)
+    ]);
+    features = mkFeatures (features."failure_derive"."0.1.3" or {});
+  };
+  features_.failure_derive."0.1.3" = deps: f: updateFeatures f ({
+    failure_derive."0.1.3".default = (f.failure_derive."0.1.3".default or true);
+    proc_macro2."${deps.failure_derive."0.1.3".proc_macro2}".default = true;
+    quote."${deps.failure_derive."0.1.3".quote}".default = true;
+    syn."${deps.failure_derive."0.1.3".syn}".default = true;
+    synstructure."${deps.failure_derive."0.1.3".synstructure}".default = true;
+  }) [
+    (features_.proc_macro2."${deps."failure_derive"."0.1.3"."proc_macro2"}" deps)
+    (features_.quote."${deps."failure_derive"."0.1.3"."quote"}" deps)
+    (features_.syn."${deps."failure_derive"."0.1.3"."syn"}" deps)
+    (features_.synstructure."${deps."failure_derive"."0.1.3"."synstructure"}" deps)
+  ];
+
+
+# end
+# failure_derive-0.1.5
+
+  crates.failure_derive."0.1.5" = deps: { features?(features_.failure_derive."0.1.5" deps {}) }: buildRustCrate {
+    crateName = "failure_derive";
+    version = "0.1.5";
+    description = "derives for the failure crate";
+    authors = [ "Without Boats <woboats@gmail.com>" ];
+    sha256 = "1wzk484b87r4qszcvdl2bkniv5ls4r2f2dshz7hmgiv6z4ln12g0";
+    procMacro = true;
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+      (crates."proc_macro2"."${deps."failure_derive"."0.1.5"."proc_macro2"}" deps)
+      (crates."quote"."${deps."failure_derive"."0.1.5"."quote"}" deps)
+      (crates."syn"."${deps."failure_derive"."0.1.5"."syn"}" deps)
+      (crates."synstructure"."${deps."failure_derive"."0.1.5"."synstructure"}" deps)
+    ]);
+    features = mkFeatures (features."failure_derive"."0.1.5" or {});
+  };
+  features_.failure_derive."0.1.5" = deps: f: updateFeatures f ({
+    failure_derive."0.1.5".default = (f.failure_derive."0.1.5".default or true);
+    proc_macro2."${deps.failure_derive."0.1.5".proc_macro2}".default = true;
+    quote."${deps.failure_derive."0.1.5".quote}".default = true;
+    syn."${deps.failure_derive."0.1.5".syn}".default = true;
+    synstructure."${deps.failure_derive."0.1.5".synstructure}".default = true;
+  }) [
+    (features_.proc_macro2."${deps."failure_derive"."0.1.5"."proc_macro2"}" deps)
+    (features_.quote."${deps."failure_derive"."0.1.5"."quote"}" deps)
+    (features_.syn."${deps."failure_derive"."0.1.5"."syn"}" deps)
+    (features_.synstructure."${deps."failure_derive"."0.1.5"."synstructure"}" deps)
+  ];
+
+
+# end
+# fuchsia-cprng-0.1.1
+
+  crates.fuchsia_cprng."0.1.1" = deps: { features?(features_.fuchsia_cprng."0.1.1" deps {}) }: buildRustCrate {
+    crateName = "fuchsia-cprng";
+    version = "0.1.1";
+    description = "Rust crate for the Fuchsia cryptographically secure pseudorandom number generator";
+    authors = [ "Erick Tryzelaar <etryzelaar@google.com>" ];
+    edition = "2018";
+    sha256 = "07apwv9dj716yjlcj29p94vkqn5zmfh7hlrqvrjx3wzshphc95h9";
+  };
+  features_.fuchsia_cprng."0.1.1" = deps: f: updateFeatures f ({
+    fuchsia_cprng."0.1.1".default = (f.fuchsia_cprng."0.1.1".default or true);
+  }) [];
+
+
+# end
+# fuchsia-zircon-0.3.3
+
+  crates.fuchsia_zircon."0.3.3" = deps: { features?(features_.fuchsia_zircon."0.3.3" deps {}) }: buildRustCrate {
+    crateName = "fuchsia-zircon";
+    version = "0.3.3";
+    authors = [ "Raph Levien <raph@google.com>" ];
+    sha256 = "0jrf4shb1699r4la8z358vri8318w4mdi6qzfqy30p2ymjlca4gk";
+    dependencies = mapFeatures features ([
+      (crates."bitflags"."${deps."fuchsia_zircon"."0.3.3"."bitflags"}" deps)
+      (crates."fuchsia_zircon_sys"."${deps."fuchsia_zircon"."0.3.3"."fuchsia_zircon_sys"}" deps)
+    ]);
+  };
+  features_.fuchsia_zircon."0.3.3" = deps: f: updateFeatures f ({
+    bitflags."${deps.fuchsia_zircon."0.3.3".bitflags}".default = true;
+    fuchsia_zircon."0.3.3".default = (f.fuchsia_zircon."0.3.3".default or true);
+    fuchsia_zircon_sys."${deps.fuchsia_zircon."0.3.3".fuchsia_zircon_sys}".default = true;
+  }) [
+    (features_.bitflags."${deps."fuchsia_zircon"."0.3.3"."bitflags"}" deps)
+    (features_.fuchsia_zircon_sys."${deps."fuchsia_zircon"."0.3.3"."fuchsia_zircon_sys"}" deps)
+  ];
+
+
+# end
+# fuchsia-zircon-sys-0.3.3
+
+  crates.fuchsia_zircon_sys."0.3.3" = deps: { features?(features_.fuchsia_zircon_sys."0.3.3" deps {}) }: buildRustCrate {
+    crateName = "fuchsia-zircon-sys";
+    version = "0.3.3";
+    authors = [ "Raph Levien <raph@google.com>" ];
+    sha256 = "08jp1zxrm9jbrr6l26bjal4dbm8bxfy57ickdgibsqxr1n9j3hf5";
+  };
+  features_.fuchsia_zircon_sys."0.3.3" = deps: f: updateFeatures f ({
+    fuchsia_zircon_sys."0.3.3".default = (f.fuchsia_zircon_sys."0.3.3".default or true);
+  }) [];
+
+
+# end
+# humantime-1.1.1
+
+  crates.humantime."1.1.1" = deps: { features?(features_.humantime."1.1.1" deps {}) }: buildRustCrate {
+    crateName = "humantime";
+    version = "1.1.1";
+    authors = [ "Paul Colomiets <paul@colomiets.name>" ];
+    sha256 = "1lzdfsfzdikcp1qb6wcdvnsdv16pmzr7p7cv171vnbnyz2lrwbgn";
+    libPath = "src/lib.rs";
+    dependencies = mapFeatures features ([
+      (crates."quick_error"."${deps."humantime"."1.1.1"."quick_error"}" deps)
+    ]);
+  };
+  features_.humantime."1.1.1" = deps: f: updateFeatures f ({
+    humantime."1.1.1".default = (f.humantime."1.1.1".default or true);
+    quick_error."${deps.humantime."1.1.1".quick_error}".default = true;
+  }) [
+    (features_.quick_error."${deps."humantime"."1.1.1"."quick_error"}" deps)
+  ];
+
+
+# end
+# humantime-1.2.0
+
+  crates.humantime."1.2.0" = deps: { features?(features_.humantime."1.2.0" deps {}) }: buildRustCrate {
+    crateName = "humantime";
+    version = "1.2.0";
+    description = "    A parser and formatter for std::time::{Duration, SystemTime}\n";
+    authors = [ "Paul Colomiets <paul@colomiets.name>" ];
+    sha256 = "0wlcxzz2mhq0brkfbjb12hc6jm17bgm8m6pdgblw4qjwmf26aw28";
+    libPath = "src/lib.rs";
+    dependencies = mapFeatures features ([
+      (crates."quick_error"."${deps."humantime"."1.2.0"."quick_error"}" deps)
+    ]);
+  };
+  features_.humantime."1.2.0" = deps: f: updateFeatures f ({
+    humantime."1.2.0".default = (f.humantime."1.2.0".default or true);
+    quick_error."${deps.humantime."1.2.0".quick_error}".default = true;
+  }) [
+    (features_.quick_error."${deps."humantime"."1.2.0"."quick_error"}" deps)
+  ];
+
+
+# end
+# idna-0.1.5
+
+  crates.idna."0.1.5" = deps: { features?(features_.idna."0.1.5" deps {}) }: buildRustCrate {
+    crateName = "idna";
+    version = "0.1.5";
+    authors = [ "The rust-url developers" ];
+    sha256 = "1gwgl19rz5vzi67rrhamczhxy050f5ynx4ybabfapyalv7z1qmjy";
+    dependencies = mapFeatures features ([
+      (crates."matches"."${deps."idna"."0.1.5"."matches"}" deps)
+      (crates."unicode_bidi"."${deps."idna"."0.1.5"."unicode_bidi"}" deps)
+      (crates."unicode_normalization"."${deps."idna"."0.1.5"."unicode_normalization"}" deps)
+    ]);
+  };
+  features_.idna."0.1.5" = deps: f: updateFeatures f ({
+    idna."0.1.5".default = (f.idna."0.1.5".default or true);
+    matches."${deps.idna."0.1.5".matches}".default = true;
+    unicode_bidi."${deps.idna."0.1.5".unicode_bidi}".default = true;
+    unicode_normalization."${deps.idna."0.1.5".unicode_normalization}".default = true;
+  }) [
+    (features_.matches."${deps."idna"."0.1.5"."matches"}" deps)
+    (features_.unicode_bidi."${deps."idna"."0.1.5"."unicode_bidi"}" deps)
+    (features_.unicode_normalization."${deps."idna"."0.1.5"."unicode_normalization"}" deps)
+  ];
+
+
+# end
+# itertools-0.7.8
+
+  crates.itertools."0.7.8" = deps: { features?(features_.itertools."0.7.8" deps {}) }: buildRustCrate {
+    crateName = "itertools";
+    version = "0.7.8";
+    authors = [ "bluss" ];
+    sha256 = "0ib30cd7d1icjxsa13mji1gry3grp72kx8p33yd84mphdbc3d357";
+    dependencies = mapFeatures features ([
+      (crates."either"."${deps."itertools"."0.7.8"."either"}" deps)
+    ]);
+    features = mkFeatures (features."itertools"."0.7.8" or {});
+  };
+  features_.itertools."0.7.8" = deps: f: updateFeatures f (rec {
+    either."${deps.itertools."0.7.8".either}".default = (f.either."${deps.itertools."0.7.8".either}".default or false);
+    itertools = fold recursiveUpdate {} [
+      { "0.7.8".default = (f.itertools."0.7.8".default or true); }
+      { "0.7.8".use_std =
+        (f.itertools."0.7.8".use_std or false) ||
+        (f.itertools."0.7.8".default or false) ||
+        (itertools."0.7.8"."default" or false); }
+    ];
+  }) [
+    (features_.either."${deps."itertools"."0.7.8"."either"}" deps)
+  ];
+
+
+# end
+# itertools-0.8.0
+
+  crates.itertools."0.8.0" = deps: { features?(features_.itertools."0.8.0" deps {}) }: buildRustCrate {
+    crateName = "itertools";
+    version = "0.8.0";
+    description = "Extra iterator adaptors, iterator methods, free functions, and macros.";
+    authors = [ "bluss" ];
+    sha256 = "0xpz59yf03vyj540i7sqypn2aqfid08c4vzyg0l6rqm08da77n7n";
+    dependencies = mapFeatures features ([
+      (crates."either"."${deps."itertools"."0.8.0"."either"}" deps)
+    ]);
+    features = mkFeatures (features."itertools"."0.8.0" or {});
+  };
+  features_.itertools."0.8.0" = deps: f: updateFeatures f (rec {
+    either."${deps.itertools."0.8.0".either}".default = (f.either."${deps.itertools."0.8.0".either}".default or false);
+    itertools = fold recursiveUpdate {} [
+      { "0.8.0"."use_std" =
+        (f.itertools."0.8.0"."use_std" or false) ||
+        (f.itertools."0.8.0".default or false) ||
+        (itertools."0.8.0"."default" or false); }
+      { "0.8.0".default = (f.itertools."0.8.0".default or true); }
+    ];
+  }) [
+    (features_.either."${deps."itertools"."0.8.0"."either"}" deps)
+  ];
+
+
+# end
+# itoa-0.4.3
+
+  crates.itoa."0.4.3" = deps: { features?(features_.itoa."0.4.3" deps {}) }: buildRustCrate {
+    crateName = "itoa";
+    version = "0.4.3";
+    authors = [ "David Tolnay <dtolnay@gmail.com>" ];
+    sha256 = "0zadimmdgvili3gdwxqg7ljv3r4wcdg1kkdfp9nl15vnm23vrhy1";
+    features = mkFeatures (features."itoa"."0.4.3" or {});
+  };
+  features_.itoa."0.4.3" = deps: f: updateFeatures f (rec {
+    itoa = fold recursiveUpdate {} [
+      { "0.4.3".default = (f.itoa."0.4.3".default or true); }
+      { "0.4.3".std =
+        (f.itoa."0.4.3".std or false) ||
+        (f.itoa."0.4.3".default or false) ||
+        (itoa."0.4.3"."default" or false); }
+    ];
+  }) [];
+
+
+# end
+# lazy_static-1.1.0
+
+  crates.lazy_static."1.1.0" = deps: { features?(features_.lazy_static."1.1.0" deps {}) }: buildRustCrate {
+    crateName = "lazy_static";
+    version = "1.1.0";
+    authors = [ "Marvin Löbel <loebel.marvin@gmail.com>" ];
+    sha256 = "1da2b6nxfc2l547qgl9kd1pn9sh1af96a6qx6xw8xdnv6hh5fag0";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+]);
+
+    buildDependencies = mapFeatures features ([
+      (crates."version_check"."${deps."lazy_static"."1.1.0"."version_check"}" deps)
+    ]);
+    features = mkFeatures (features."lazy_static"."1.1.0" or {});
+  };
+  features_.lazy_static."1.1.0" = deps: f: updateFeatures f (rec {
+    lazy_static = fold recursiveUpdate {} [
+      { "1.1.0".default = (f.lazy_static."1.1.0".default or true); }
+      { "1.1.0".nightly =
+        (f.lazy_static."1.1.0".nightly or false) ||
+        (f.lazy_static."1.1.0".spin_no_std or false) ||
+        (lazy_static."1.1.0"."spin_no_std" or false); }
+      { "1.1.0".spin =
+        (f.lazy_static."1.1.0".spin or false) ||
+        (f.lazy_static."1.1.0".spin_no_std or false) ||
+        (lazy_static."1.1.0"."spin_no_std" or false); }
+    ];
+    version_check."${deps.lazy_static."1.1.0".version_check}".default = true;
+  }) [
+    (features_.version_check."${deps."lazy_static"."1.1.0"."version_check"}" deps)
+  ];
+
+
+# end
+# lazy_static-1.3.0
+
+  crates.lazy_static."1.3.0" = deps: { features?(features_.lazy_static."1.3.0" deps {}) }: buildRustCrate {
+    crateName = "lazy_static";
+    version = "1.3.0";
+    description = "A macro for declaring lazily evaluated statics in Rust.";
+    authors = [ "Marvin Löbel <loebel.marvin@gmail.com>" ];
+    sha256 = "1vv47va18ydk7dx5paz88g3jy1d3lwbx6qpxkbj8gyfv770i4b1y";
+    dependencies = mapFeatures features ([
+]);
+    features = mkFeatures (features."lazy_static"."1.3.0" or {});
+  };
+  features_.lazy_static."1.3.0" = deps: f: updateFeatures f (rec {
+    lazy_static = fold recursiveUpdate {} [
+      { "1.3.0"."spin" =
+        (f.lazy_static."1.3.0"."spin" or false) ||
+        (f.lazy_static."1.3.0".spin_no_std or false) ||
+        (lazy_static."1.3.0"."spin_no_std" or false); }
+      { "1.3.0".default = (f.lazy_static."1.3.0".default or true); }
+    ];
+  }) [];
+
+
+# end
+# libc-0.2.43
+
+  crates.libc."0.2.43" = deps: { features?(features_.libc."0.2.43" deps {}) }: buildRustCrate {
+    crateName = "libc";
+    version = "0.2.43";
+    authors = [ "The Rust Project Developers" ];
+    sha256 = "0pshydmsq71kl9276zc2928ld50sp524ixcqkcqsgq410dx6c50b";
+    features = mkFeatures (features."libc"."0.2.43" or {});
+  };
+  features_.libc."0.2.43" = deps: f: updateFeatures f (rec {
+    libc = fold recursiveUpdate {} [
+      { "0.2.43".default = (f.libc."0.2.43".default or true); }
+      { "0.2.43".use_std =
+        (f.libc."0.2.43".use_std or false) ||
+        (f.libc."0.2.43".default or false) ||
+        (libc."0.2.43"."default" or false); }
+    ];
+  }) [];
+
+
+# end
+# libc-0.2.50
+
+  crates.libc."0.2.50" = deps: { features?(features_.libc."0.2.50" deps {}) }: buildRustCrate {
+    crateName = "libc";
+    version = "0.2.50";
+    description = "Raw FFI bindings to platform libraries like libc.\n";
+    authors = [ "The Rust Project Developers" ];
+    sha256 = "14y4zm0xp2xbj3l1kxqf2wpl58xb7hglxdbfx5dcxjlchbvk5dzs";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+]);
+    features = mkFeatures (features."libc"."0.2.50" or {});
+  };
+  features_.libc."0.2.50" = deps: f: updateFeatures f (rec {
+    libc = fold recursiveUpdate {} [
+      { "0.2.50"."align" =
+        (f.libc."0.2.50"."align" or false) ||
+        (f.libc."0.2.50".rustc-dep-of-std or false) ||
+        (libc."0.2.50"."rustc-dep-of-std" or false); }
+      { "0.2.50"."rustc-std-workspace-core" =
+        (f.libc."0.2.50"."rustc-std-workspace-core" or false) ||
+        (f.libc."0.2.50".rustc-dep-of-std or false) ||
+        (libc."0.2.50"."rustc-dep-of-std" or false); }
+      { "0.2.50"."use_std" =
+        (f.libc."0.2.50"."use_std" or false) ||
+        (f.libc."0.2.50".default or false) ||
+        (libc."0.2.50"."default" or false); }
+      { "0.2.50".default = (f.libc."0.2.50".default or true); }
+    ];
+  }) [];
+
+
+# end
+# log-0.4.5
+
+  crates.log."0.4.5" = deps: { features?(features_.log."0.4.5" deps {}) }: buildRustCrate {
+    crateName = "log";
+    version = "0.4.5";
+    authors = [ "The Rust Project Developers" ];
+    sha256 = "1hdcj17al94ga90q7jx2y1rmxi68n3akra1awv3hr3s9b9zipgq6";
+    dependencies = mapFeatures features ([
+      (crates."cfg_if"."${deps."log"."0.4.5"."cfg_if"}" deps)
+    ]);
+    features = mkFeatures (features."log"."0.4.5" or {});
+  };
+  features_.log."0.4.5" = deps: f: updateFeatures f ({
+    cfg_if."${deps.log."0.4.5".cfg_if}".default = true;
+    log."0.4.5".default = (f.log."0.4.5".default or true);
+  }) [
+    (features_.cfg_if."${deps."log"."0.4.5"."cfg_if"}" deps)
+  ];
+
+
+# end
+# log-0.4.6
+
+  crates.log."0.4.6" = deps: { features?(features_.log."0.4.6" deps {}) }: buildRustCrate {
+    crateName = "log";
+    version = "0.4.6";
+    description = "A lightweight logging facade for Rust\n";
+    authors = [ "The Rust Project Developers" ];
+    sha256 = "1nd8dl9mvc9vd6fks5d4gsxaz990xi6rzlb8ymllshmwi153vngr";
+    dependencies = mapFeatures features ([
+      (crates."cfg_if"."${deps."log"."0.4.6"."cfg_if"}" deps)
+    ]);
+    features = mkFeatures (features."log"."0.4.6" or {});
+  };
+  features_.log."0.4.6" = deps: f: updateFeatures f ({
+    cfg_if."${deps.log."0.4.6".cfg_if}".default = true;
+    log."0.4.6".default = (f.log."0.4.6".default or true);
+  }) [
+    (features_.cfg_if."${deps."log"."0.4.6"."cfg_if"}" deps)
+  ];
+
+
+# end
+# matches-0.1.8
+
+  crates.matches."0.1.8" = deps: { features?(features_.matches."0.1.8" deps {}) }: buildRustCrate {
+    crateName = "matches";
+    version = "0.1.8";
+    authors = [ "Simon Sapin <simon.sapin@exyr.org>" ];
+    sha256 = "03hl636fg6xggy0a26200xs74amk3k9n0908rga2szn68agyz3cv";
+    libPath = "lib.rs";
+  };
+  features_.matches."0.1.8" = deps: f: updateFeatures f ({
+    matches."0.1.8".default = (f.matches."0.1.8".default or true);
+  }) [];
+
+
+# end
+# memchr-1.0.2
+
+  crates.memchr."1.0.2" = deps: { features?(features_.memchr."1.0.2" deps {}) }: buildRustCrate {
+    crateName = "memchr";
+    version = "1.0.2";
+    authors = [ "Andrew Gallant <jamslam@gmail.com>" "bluss" ];
+    sha256 = "0dfb8ifl9nrc9kzgd5z91q6qg87sh285q1ih7xgrsglmqfav9lg7";
+    dependencies = mapFeatures features ([
+    ]
+      ++ (if features.memchr."1.0.2".libc or false then [ (crates.libc."${deps."memchr"."1.0.2".libc}" deps) ] else []));
+    features = mkFeatures (features."memchr"."1.0.2" or {});
+  };
+  features_.memchr."1.0.2" = deps: f: updateFeatures f (rec {
+    libc = fold recursiveUpdate {} [
+      { "${deps.memchr."1.0.2".libc}"."use_std" =
+        (f.libc."${deps.memchr."1.0.2".libc}"."use_std" or false) ||
+        (memchr."1.0.2"."use_std" or false) ||
+        (f."memchr"."1.0.2"."use_std" or false); }
+      { "${deps.memchr."1.0.2".libc}".default = (f.libc."${deps.memchr."1.0.2".libc}".default or false); }
+    ];
+    memchr = fold recursiveUpdate {} [
+      { "1.0.2".default = (f.memchr."1.0.2".default or true); }
+      { "1.0.2".libc =
+        (f.memchr."1.0.2".libc or false) ||
+        (f.memchr."1.0.2".default or false) ||
+        (memchr."1.0.2"."default" or false) ||
+        (f.memchr."1.0.2".use_std or false) ||
+        (memchr."1.0.2"."use_std" or false); }
+      { "1.0.2".use_std =
+        (f.memchr."1.0.2".use_std or false) ||
+        (f.memchr."1.0.2".default or false) ||
+        (memchr."1.0.2"."default" or false); }
+    ];
+  }) [
+    (features_.libc."${deps."memchr"."1.0.2"."libc"}" deps)
+  ];
+
+
+# end
+# memchr-2.1.0
+
+  crates.memchr."2.1.0" = deps: { features?(features_.memchr."2.1.0" deps {}) }: buildRustCrate {
+    crateName = "memchr";
+    version = "2.1.0";
+    authors = [ "Andrew Gallant <jamslam@gmail.com>" "bluss" ];
+    sha256 = "02w1fc5z1ccx8fbzgcr0mpk0xf2i9g4vbx9q5c2g8pjddbaqvjjq";
+    dependencies = mapFeatures features ([
+      (crates."cfg_if"."${deps."memchr"."2.1.0"."cfg_if"}" deps)
+    ]
+      ++ (if features.memchr."2.1.0".libc or false then [ (crates.libc."${deps."memchr"."2.1.0".libc}" deps) ] else []));
+
+    buildDependencies = mapFeatures features ([
+      (crates."version_check"."${deps."memchr"."2.1.0"."version_check"}" deps)
+    ]);
+    features = mkFeatures (features."memchr"."2.1.0" or {});
+  };
+  features_.memchr."2.1.0" = deps: f: updateFeatures f (rec {
+    cfg_if."${deps.memchr."2.1.0".cfg_if}".default = true;
+    libc = fold recursiveUpdate {} [
+      { "${deps.memchr."2.1.0".libc}"."use_std" =
+        (f.libc."${deps.memchr."2.1.0".libc}"."use_std" or false) ||
+        (memchr."2.1.0"."use_std" or false) ||
+        (f."memchr"."2.1.0"."use_std" or false); }
+      { "${deps.memchr."2.1.0".libc}".default = (f.libc."${deps.memchr."2.1.0".libc}".default or false); }
+    ];
+    memchr = fold recursiveUpdate {} [
+      { "2.1.0".default = (f.memchr."2.1.0".default or true); }
+      { "2.1.0".libc =
+        (f.memchr."2.1.0".libc or false) ||
+        (f.memchr."2.1.0".default or false) ||
+        (memchr."2.1.0"."default" or false) ||
+        (f.memchr."2.1.0".use_std or false) ||
+        (memchr."2.1.0"."use_std" or false); }
+      { "2.1.0".use_std =
+        (f.memchr."2.1.0".use_std or false) ||
+        (f.memchr."2.1.0".default or false) ||
+        (memchr."2.1.0"."default" or false); }
+    ];
+    version_check."${deps.memchr."2.1.0".version_check}".default = true;
+  }) [
+    (features_.cfg_if."${deps."memchr"."2.1.0"."cfg_if"}" deps)
+    (features_.libc."${deps."memchr"."2.1.0"."libc"}" deps)
+    (features_.version_check."${deps."memchr"."2.1.0"."version_check"}" deps)
+  ];
+
+
+# end
+# memchr-2.2.0
+
+  crates.memchr."2.2.0" = deps: { features?(features_.memchr."2.2.0" deps {}) }: buildRustCrate {
+    crateName = "memchr";
+    version = "2.2.0";
+    description = "Safe interface to memchr.";
+    authors = [ "Andrew Gallant <jamslam@gmail.com>" "bluss" ];
+    sha256 = "11vwg8iig9jyjxq3n1cq15g29ikzw5l7ar87md54k1aisjs0997p";
+    dependencies = mapFeatures features ([
+]);
+    features = mkFeatures (features."memchr"."2.2.0" or {});
+  };
+  features_.memchr."2.2.0" = deps: f: updateFeatures f (rec {
+    memchr = fold recursiveUpdate {} [
+      { "2.2.0"."use_std" =
+        (f.memchr."2.2.0"."use_std" or false) ||
+        (f.memchr."2.2.0".default or false) ||
+        (memchr."2.2.0"."default" or false); }
+      { "2.2.0".default = (f.memchr."2.2.0".default or true); }
+    ];
+  }) [];
+
+
+# end
+# nodrop-0.1.12
+
+  crates.nodrop."0.1.12" = deps: { features?(features_.nodrop."0.1.12" deps {}) }: buildRustCrate {
+    crateName = "nodrop";
+    version = "0.1.12";
+    authors = [ "bluss" ];
+    sha256 = "1b9rxvdg8061gxjc239l9slndf0ds3m6fy2sf3gs8f9kknqgl49d";
+    dependencies = mapFeatures features ([
+]);
+    features = mkFeatures (features."nodrop"."0.1.12" or {});
+  };
+  features_.nodrop."0.1.12" = deps: f: updateFeatures f (rec {
+    nodrop = fold recursiveUpdate {} [
+      { "0.1.12".default = (f.nodrop."0.1.12".default or true); }
+      { "0.1.12".nodrop-union =
+        (f.nodrop."0.1.12".nodrop-union or false) ||
+        (f.nodrop."0.1.12".use_union or false) ||
+        (nodrop."0.1.12"."use_union" or false); }
+      { "0.1.12".std =
+        (f.nodrop."0.1.12".std or false) ||
+        (f.nodrop."0.1.12".default or false) ||
+        (nodrop."0.1.12"."default" or false); }
+    ];
+  }) [];
+
+
+# end
+# nodrop-0.1.13
+
+  crates.nodrop."0.1.13" = deps: { features?(features_.nodrop."0.1.13" deps {}) }: buildRustCrate {
+    crateName = "nodrop";
+    version = "0.1.13";
+    description = "A wrapper type to inhibit drop (destructor). Use std::mem::ManuallyDrop instead!";
+    authors = [ "bluss" ];
+    sha256 = "0gkfx6wihr9z0m8nbdhma5pyvbipznjpkzny2d4zkc05b0vnhinb";
+    dependencies = mapFeatures features ([
+]);
+    features = mkFeatures (features."nodrop"."0.1.13" or {});
+  };
+  features_.nodrop."0.1.13" = deps: f: updateFeatures f (rec {
+    nodrop = fold recursiveUpdate {} [
+      { "0.1.13"."nodrop-union" =
+        (f.nodrop."0.1.13"."nodrop-union" or false) ||
+        (f.nodrop."0.1.13".use_union or false) ||
+        (nodrop."0.1.13"."use_union" or false); }
+      { "0.1.13"."std" =
+        (f.nodrop."0.1.13"."std" or false) ||
+        (f.nodrop."0.1.13".default or false) ||
+        (nodrop."0.1.13"."default" or false); }
+      { "0.1.13".default = (f.nodrop."0.1.13".default or true); }
+    ];
+  }) [];
+
+
+# end
+# nom-3.2.1
+
+  crates.nom."3.2.1" = deps: { features?(features_.nom."3.2.1" deps {}) }: buildRustCrate {
+    crateName = "nom";
+    version = "3.2.1";
+    authors = [ "contact@geoffroycouprie.com" ];
+    sha256 = "1vcllxrz9hdw6j25kn020ka3psz1vkaqh1hm3yfak2240zrxgi07";
+    dependencies = mapFeatures features ([
+      (crates."memchr"."${deps."nom"."3.2.1"."memchr"}" deps)
+    ]);
+    features = mkFeatures (features."nom"."3.2.1" or {});
+  };
+  features_.nom."3.2.1" = deps: f: updateFeatures f (rec {
+    memchr = fold recursiveUpdate {} [
+      { "${deps.nom."3.2.1".memchr}"."use_std" =
+        (f.memchr."${deps.nom."3.2.1".memchr}"."use_std" or false) ||
+        (nom."3.2.1"."std" or false) ||
+        (f."nom"."3.2.1"."std" or false); }
+      { "${deps.nom."3.2.1".memchr}".default = (f.memchr."${deps.nom."3.2.1".memchr}".default or false); }
+    ];
+    nom = fold recursiveUpdate {} [
+      { "3.2.1".compiler_error =
+        (f.nom."3.2.1".compiler_error or false) ||
+        (f.nom."3.2.1".nightly or false) ||
+        (nom."3.2.1"."nightly" or false); }
+      { "3.2.1".default = (f.nom."3.2.1".default or true); }
+      { "3.2.1".lazy_static =
+        (f.nom."3.2.1".lazy_static or false) ||
+        (f.nom."3.2.1".regexp_macros or false) ||
+        (nom."3.2.1"."regexp_macros" or false); }
+      { "3.2.1".regex =
+        (f.nom."3.2.1".regex or false) ||
+        (f.nom."3.2.1".regexp or false) ||
+        (nom."3.2.1"."regexp" or false); }
+      { "3.2.1".regexp =
+        (f.nom."3.2.1".regexp or false) ||
+        (f.nom."3.2.1".regexp_macros or false) ||
+        (nom."3.2.1"."regexp_macros" or false); }
+      { "3.2.1".std =
+        (f.nom."3.2.1".std or false) ||
+        (f.nom."3.2.1".default or false) ||
+        (nom."3.2.1"."default" or false); }
+      { "3.2.1".stream =
+        (f.nom."3.2.1".stream or false) ||
+        (f.nom."3.2.1".default or false) ||
+        (nom."3.2.1"."default" or false); }
+    ];
+  }) [
+    (features_.memchr."${deps."nom"."3.2.1"."memchr"}" deps)
+  ];
+
+
+# end
+# percent-encoding-1.0.1
+
+  crates.percent_encoding."1.0.1" = deps: { features?(features_.percent_encoding."1.0.1" deps {}) }: buildRustCrate {
+    crateName = "percent-encoding";
+    version = "1.0.1";
+    authors = [ "The rust-url developers" ];
+    sha256 = "04ahrp7aw4ip7fmadb0bknybmkfav0kk0gw4ps3ydq5w6hr0ib5i";
+    libPath = "lib.rs";
+  };
+  features_.percent_encoding."1.0.1" = deps: f: updateFeatures f ({
+    percent_encoding."1.0.1".default = (f.percent_encoding."1.0.1".default or true);
+  }) [];
+
+
+# end
+# proc-macro2-0.4.20
+
+  crates.proc_macro2."0.4.20" = deps: { features?(features_.proc_macro2."0.4.20" deps {}) }: buildRustCrate {
+    crateName = "proc-macro2";
+    version = "0.4.20";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "0yr74b00d3wzg21kjvfln7vzzvf9aghbaff4c747i3grbd997ys2";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+      (crates."unicode_xid"."${deps."proc_macro2"."0.4.20"."unicode_xid"}" deps)
+    ]);
+    features = mkFeatures (features."proc_macro2"."0.4.20" or {});
+  };
+  features_.proc_macro2."0.4.20" = deps: f: updateFeatures f (rec {
+    proc_macro2 = fold recursiveUpdate {} [
+      { "0.4.20".default = (f.proc_macro2."0.4.20".default or true); }
+      { "0.4.20".proc-macro =
+        (f.proc_macro2."0.4.20".proc-macro or false) ||
+        (f.proc_macro2."0.4.20".default or false) ||
+        (proc_macro2."0.4.20"."default" or false) ||
+        (f.proc_macro2."0.4.20".nightly or false) ||
+        (proc_macro2."0.4.20"."nightly" or false); }
+    ];
+    unicode_xid."${deps.proc_macro2."0.4.20".unicode_xid}".default = true;
+  }) [
+    (features_.unicode_xid."${deps."proc_macro2"."0.4.20"."unicode_xid"}" deps)
+  ];
+
+
+# end
+# proc-macro2-0.4.27
+
+  crates.proc_macro2."0.4.27" = deps: { features?(features_.proc_macro2."0.4.27" deps {}) }: buildRustCrate {
+    crateName = "proc-macro2";
+    version = "0.4.27";
+    description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "1cp4c40p3hwn2sz72ssqa62gp5n8w4gbamdqvvadzp5l7gxnq95i";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+      (crates."unicode_xid"."${deps."proc_macro2"."0.4.27"."unicode_xid"}" deps)
+    ]);
+    features = mkFeatures (features."proc_macro2"."0.4.27" or {});
+  };
+  features_.proc_macro2."0.4.27" = deps: f: updateFeatures f (rec {
+    proc_macro2 = fold recursiveUpdate {} [
+      { "0.4.27"."proc-macro" =
+        (f.proc_macro2."0.4.27"."proc-macro" or false) ||
+        (f.proc_macro2."0.4.27".default or false) ||
+        (proc_macro2."0.4.27"."default" or false); }
+      { "0.4.27".default = (f.proc_macro2."0.4.27".default or true); }
+    ];
+    unicode_xid."${deps.proc_macro2."0.4.27".unicode_xid}".default = true;
+  }) [
+    (features_.unicode_xid."${deps."proc_macro2"."0.4.27"."unicode_xid"}" deps)
+  ];
+
+
+# end
+# quick-error-1.2.2
+
+  crates.quick_error."1.2.2" = deps: { features?(features_.quick_error."1.2.2" deps {}) }: buildRustCrate {
+    crateName = "quick-error";
+    version = "1.2.2";
+    authors = [ "Paul Colomiets <paul@colomiets.name>" "Colin Kiegel <kiegel@gmx.de>" ];
+    sha256 = "192a3adc5phgpibgqblsdx1b421l5yg9bjbmv552qqq9f37h60k5";
+  };
+  features_.quick_error."1.2.2" = deps: f: updateFeatures f ({
+    quick_error."1.2.2".default = (f.quick_error."1.2.2".default or true);
+  }) [];
+
+
+# end
+# quote-0.6.11
+
+  crates.quote."0.6.11" = deps: { features?(features_.quote."0.6.11" deps {}) }: buildRustCrate {
+    crateName = "quote";
+    version = "0.6.11";
+    description = "Quasi-quoting macro quote!(...)";
+    authors = [ "David Tolnay <dtolnay@gmail.com>" ];
+    sha256 = "0agska77z58cypcq4knayzwx7r7n6m756z1cz9cp2z4sv0b846ga";
+    dependencies = mapFeatures features ([
+      (crates."proc_macro2"."${deps."quote"."0.6.11"."proc_macro2"}" deps)
+    ]);
+    features = mkFeatures (features."quote"."0.6.11" or {});
+  };
+  features_.quote."0.6.11" = deps: f: updateFeatures f (rec {
+    proc_macro2 = fold recursiveUpdate {} [
+      { "${deps.quote."0.6.11".proc_macro2}"."proc-macro" =
+        (f.proc_macro2."${deps.quote."0.6.11".proc_macro2}"."proc-macro" or false) ||
+        (quote."0.6.11"."proc-macro" or false) ||
+        (f."quote"."0.6.11"."proc-macro" or false); }
+      { "${deps.quote."0.6.11".proc_macro2}".default = (f.proc_macro2."${deps.quote."0.6.11".proc_macro2}".default or false); }
+    ];
+    quote = fold recursiveUpdate {} [
+      { "0.6.11"."proc-macro" =
+        (f.quote."0.6.11"."proc-macro" or false) ||
+        (f.quote."0.6.11".default or false) ||
+        (quote."0.6.11"."default" or false); }
+      { "0.6.11".default = (f.quote."0.6.11".default or true); }
+    ];
+  }) [
+    (features_.proc_macro2."${deps."quote"."0.6.11"."proc_macro2"}" deps)
+  ];
+
+
+# end
+# quote-0.6.8
+
+  crates.quote."0.6.8" = deps: { features?(features_.quote."0.6.8" deps {}) }: buildRustCrate {
+    crateName = "quote";
+    version = "0.6.8";
+    authors = [ "David Tolnay <dtolnay@gmail.com>" ];
+    sha256 = "0dq6j23w6pmc4l6v490arixdwypy0b82z76nrzaingqhqri4p3mh";
+    dependencies = mapFeatures features ([
+      (crates."proc_macro2"."${deps."quote"."0.6.8"."proc_macro2"}" deps)
+    ]);
+    features = mkFeatures (features."quote"."0.6.8" or {});
+  };
+  features_.quote."0.6.8" = deps: f: updateFeatures f (rec {
+    proc_macro2 = fold recursiveUpdate {} [
+      { "${deps.quote."0.6.8".proc_macro2}"."proc-macro" =
+        (f.proc_macro2."${deps.quote."0.6.8".proc_macro2}"."proc-macro" or false) ||
+        (quote."0.6.8"."proc-macro" or false) ||
+        (f."quote"."0.6.8"."proc-macro" or false); }
+      { "${deps.quote."0.6.8".proc_macro2}".default = (f.proc_macro2."${deps.quote."0.6.8".proc_macro2}".default or false); }
+    ];
+    quote = fold recursiveUpdate {} [
+      { "0.6.8".default = (f.quote."0.6.8".default or true); }
+      { "0.6.8".proc-macro =
+        (f.quote."0.6.8".proc-macro or false) ||
+        (f.quote."0.6.8".default or false) ||
+        (quote."0.6.8"."default" or false); }
+    ];
+  }) [
+    (features_.proc_macro2."${deps."quote"."0.6.8"."proc_macro2"}" deps)
+  ];
+
+
+# end
+# rand-0.4.3
+
+  crates.rand."0.4.3" = deps: { features?(features_.rand."0.4.3" deps {}) }: buildRustCrate {
+    crateName = "rand";
+    version = "0.4.3";
+    authors = [ "The Rust Project Developers" ];
+    sha256 = "1644wri45l147822xy7dgdm4k7myxzs66cb795ga0x7dan11ci4f";
+    dependencies = (if kernel == "fuchsia" then mapFeatures features ([
+      (crates."fuchsia_zircon"."${deps."rand"."0.4.3"."fuchsia_zircon"}" deps)
+    ]) else [])
+      ++ (if (kernel == "linux" || kernel == "darwin") then mapFeatures features ([
+    ]
+      ++ (if features.rand."0.4.3".libc or false then [ (crates.libc."${deps."rand"."0.4.3".libc}" deps) ] else [])) else [])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."rand"."0.4.3"."winapi"}" deps)
+    ]) else []);
+    features = mkFeatures (features."rand"."0.4.3" or {});
+  };
+  features_.rand."0.4.3" = deps: f: updateFeatures f (rec {
+    fuchsia_zircon."${deps.rand."0.4.3".fuchsia_zircon}".default = true;
+    libc."${deps.rand."0.4.3".libc}".default = true;
+    rand = fold recursiveUpdate {} [
+      { "0.4.3".default = (f.rand."0.4.3".default or true); }
+      { "0.4.3".i128_support =
+        (f.rand."0.4.3".i128_support or false) ||
+        (f.rand."0.4.3".nightly or false) ||
+        (rand."0.4.3"."nightly" or false); }
+      { "0.4.3".libc =
+        (f.rand."0.4.3".libc or false) ||
+        (f.rand."0.4.3".std or false) ||
+        (rand."0.4.3"."std" or false); }
+      { "0.4.3".std =
+        (f.rand."0.4.3".std or false) ||
+        (f.rand."0.4.3".default or false) ||
+        (rand."0.4.3"."default" or false); }
+    ];
+    winapi = fold recursiveUpdate {} [
+      { "${deps.rand."0.4.3".winapi}"."minwindef" = true; }
+      { "${deps.rand."0.4.3".winapi}"."ntsecapi" = true; }
+      { "${deps.rand."0.4.3".winapi}"."profileapi" = true; }
+      { "${deps.rand."0.4.3".winapi}"."winnt" = true; }
+      { "${deps.rand."0.4.3".winapi}".default = true; }
+    ];
+  }) [
+    (features_.fuchsia_zircon."${deps."rand"."0.4.3"."fuchsia_zircon"}" deps)
+    (features_.libc."${deps."rand"."0.4.3"."libc"}" deps)
+    (features_.winapi."${deps."rand"."0.4.3"."winapi"}" deps)
+  ];
+
+
+# end
+# rand-0.4.6
+
+  crates.rand."0.4.6" = deps: { features?(features_.rand."0.4.6" deps {}) }: buildRustCrate {
+    crateName = "rand";
+    version = "0.4.6";
+    description = "Random number generators and other randomness functionality.\n";
+    authors = [ "The Rust Project Developers" ];
+    sha256 = "0c3rmg5q7d6qdi7cbmg5py9alm70wd3xsg0mmcawrnl35qv37zfs";
+    dependencies = (if abi == "sgx" then mapFeatures features ([
+      (crates."rand_core"."${deps."rand"."0.4.6"."rand_core"}" deps)
+      (crates."rdrand"."${deps."rand"."0.4.6"."rdrand"}" deps)
+    ]) else [])
+      ++ (if kernel == "fuchsia" then mapFeatures features ([
+      (crates."fuchsia_cprng"."${deps."rand"."0.4.6"."fuchsia_cprng"}" deps)
+    ]) else [])
+      ++ (if (kernel == "linux" || kernel == "darwin") then mapFeatures features ([
+    ]
+      ++ (if features.rand."0.4.6".libc or false then [ (crates.libc."${deps."rand"."0.4.6".libc}" deps) ] else [])) else [])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."rand"."0.4.6"."winapi"}" deps)
+    ]) else []);
+    features = mkFeatures (features."rand"."0.4.6" or {});
+  };
+  features_.rand."0.4.6" = deps: f: updateFeatures f (rec {
+    fuchsia_cprng."${deps.rand."0.4.6".fuchsia_cprng}".default = true;
+    libc."${deps.rand."0.4.6".libc}".default = true;
+    rand = fold recursiveUpdate {} [
+      { "0.4.6"."i128_support" =
+        (f.rand."0.4.6"."i128_support" or false) ||
+        (f.rand."0.4.6".nightly or false) ||
+        (rand."0.4.6"."nightly" or false); }
+      { "0.4.6"."libc" =
+        (f.rand."0.4.6"."libc" or false) ||
+        (f.rand."0.4.6".std or false) ||
+        (rand."0.4.6"."std" or false); }
+      { "0.4.6"."std" =
+        (f.rand."0.4.6"."std" or false) ||
+        (f.rand."0.4.6".default or false) ||
+        (rand."0.4.6"."default" or false); }
+      { "0.4.6".default = (f.rand."0.4.6".default or true); }
+    ];
+    rand_core."${deps.rand."0.4.6".rand_core}".default = (f.rand_core."${deps.rand."0.4.6".rand_core}".default or false);
+    rdrand."${deps.rand."0.4.6".rdrand}".default = true;
+    winapi = fold recursiveUpdate {} [
+      { "${deps.rand."0.4.6".winapi}"."minwindef" = true; }
+      { "${deps.rand."0.4.6".winapi}"."ntsecapi" = true; }
+      { "${deps.rand."0.4.6".winapi}"."profileapi" = true; }
+      { "${deps.rand."0.4.6".winapi}"."winnt" = true; }
+      { "${deps.rand."0.4.6".winapi}".default = true; }
+    ];
+  }) [
+    (features_.rand_core."${deps."rand"."0.4.6"."rand_core"}" deps)
+    (features_.rdrand."${deps."rand"."0.4.6"."rdrand"}" deps)
+    (features_.fuchsia_cprng."${deps."rand"."0.4.6"."fuchsia_cprng"}" deps)
+    (features_.libc."${deps."rand"."0.4.6"."libc"}" deps)
+    (features_.winapi."${deps."rand"."0.4.6"."winapi"}" deps)
+  ];
+
+
+# end
+# rand_core-0.3.1
+
+  crates.rand_core."0.3.1" = deps: { features?(features_.rand_core."0.3.1" deps {}) }: buildRustCrate {
+    crateName = "rand_core";
+    version = "0.3.1";
+    description = "Core random number generator traits and tools for implementation.\n";
+    authors = [ "The Rand Project Developers" "The Rust Project Developers" ];
+    sha256 = "0q0ssgpj9x5a6fda83nhmfydy7a6c0wvxm0jhncsmjx8qp8gw91m";
+    dependencies = mapFeatures features ([
+      (crates."rand_core"."${deps."rand_core"."0.3.1"."rand_core"}" deps)
+    ]);
+    features = mkFeatures (features."rand_core"."0.3.1" or {});
+  };
+  features_.rand_core."0.3.1" = deps: f: updateFeatures f (rec {
+    rand_core = fold recursiveUpdate {} [
+      { "${deps.rand_core."0.3.1".rand_core}"."alloc" =
+        (f.rand_core."${deps.rand_core."0.3.1".rand_core}"."alloc" or false) ||
+        (rand_core."0.3.1"."alloc" or false) ||
+        (f."rand_core"."0.3.1"."alloc" or false); }
+      { "${deps.rand_core."0.3.1".rand_core}"."serde1" =
+        (f.rand_core."${deps.rand_core."0.3.1".rand_core}"."serde1" or false) ||
+        (rand_core."0.3.1"."serde1" or false) ||
+        (f."rand_core"."0.3.1"."serde1" or false); }
+      { "${deps.rand_core."0.3.1".rand_core}"."std" =
+        (f.rand_core."${deps.rand_core."0.3.1".rand_core}"."std" or false) ||
+        (rand_core."0.3.1"."std" or false) ||
+        (f."rand_core"."0.3.1"."std" or false); }
+      { "${deps.rand_core."0.3.1".rand_core}".default = true; }
+      { "0.3.1"."std" =
+        (f.rand_core."0.3.1"."std" or false) ||
+        (f.rand_core."0.3.1".default or false) ||
+        (rand_core."0.3.1"."default" or false); }
+      { "0.3.1".default = (f.rand_core."0.3.1".default or true); }
+    ];
+  }) [
+    (features_.rand_core."${deps."rand_core"."0.3.1"."rand_core"}" deps)
+  ];
+
+
+# end
+# rand_core-0.4.0
+
+  crates.rand_core."0.4.0" = deps: { features?(features_.rand_core."0.4.0" deps {}) }: buildRustCrate {
+    crateName = "rand_core";
+    version = "0.4.0";
+    description = "Core random number generator traits and tools for implementation.\n";
+    authors = [ "The Rand Project Developers" "The Rust Project Developers" ];
+    sha256 = "0wb5iwhffibj0pnpznhv1g3i7h1fnhz64s3nz74fz6vsm3q6q3br";
+    dependencies = mapFeatures features ([
+]);
+    features = mkFeatures (features."rand_core"."0.4.0" or {});
+  };
+  features_.rand_core."0.4.0" = deps: f: updateFeatures f (rec {
+    rand_core = fold recursiveUpdate {} [
+      { "0.4.0"."alloc" =
+        (f.rand_core."0.4.0"."alloc" or false) ||
+        (f.rand_core."0.4.0".std or false) ||
+        (rand_core."0.4.0"."std" or false); }
+      { "0.4.0"."serde" =
+        (f.rand_core."0.4.0"."serde" or false) ||
+        (f.rand_core."0.4.0".serde1 or false) ||
+        (rand_core."0.4.0"."serde1" or false); }
+      { "0.4.0"."serde_derive" =
+        (f.rand_core."0.4.0"."serde_derive" or false) ||
+        (f.rand_core."0.4.0".serde1 or false) ||
+        (rand_core."0.4.0"."serde1" or false); }
+      { "0.4.0".default = (f.rand_core."0.4.0".default or true); }
+    ];
+  }) [];
+
+
+# end
+# rand_os-0.1.3
+
+  crates.rand_os."0.1.3" = deps: { features?(features_.rand_os."0.1.3" deps {}) }: buildRustCrate {
+    crateName = "rand_os";
+    version = "0.1.3";
+    description = "OS backed Random Number Generator";
+    authors = [ "The Rand Project Developers" ];
+    sha256 = "0ywwspizgs9g8vzn6m5ix9yg36n15119d6n792h7mk4r5vs0ww4j";
+    dependencies = mapFeatures features ([
+      (crates."rand_core"."${deps."rand_os"."0.1.3"."rand_core"}" deps)
+    ])
+      ++ (if abi == "sgx" then mapFeatures features ([
+      (crates."rdrand"."${deps."rand_os"."0.1.3"."rdrand"}" deps)
+    ]) else [])
+      ++ (if kernel == "cloudabi" then mapFeatures features ([
+      (crates."cloudabi"."${deps."rand_os"."0.1.3"."cloudabi"}" deps)
+    ]) else [])
+      ++ (if kernel == "fuchsia" then mapFeatures features ([
+      (crates."fuchsia_cprng"."${deps."rand_os"."0.1.3"."fuchsia_cprng"}" deps)
+    ]) else [])
+      ++ (if (kernel == "linux" || kernel == "darwin") then mapFeatures features ([
+      (crates."libc"."${deps."rand_os"."0.1.3"."libc"}" deps)
+    ]) else [])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."rand_os"."0.1.3"."winapi"}" deps)
+    ]) else [])
+      ++ (if kernel == "wasm32-unknown-unknown" then mapFeatures features ([
+]) else []);
+  };
+  features_.rand_os."0.1.3" = deps: f: updateFeatures f ({
+    cloudabi."${deps.rand_os."0.1.3".cloudabi}".default = true;
+    fuchsia_cprng."${deps.rand_os."0.1.3".fuchsia_cprng}".default = true;
+    libc."${deps.rand_os."0.1.3".libc}".default = true;
+    rand_core = fold recursiveUpdate {} [
+      { "${deps.rand_os."0.1.3".rand_core}"."std" = true; }
+      { "${deps.rand_os."0.1.3".rand_core}".default = true; }
+    ];
+    rand_os."0.1.3".default = (f.rand_os."0.1.3".default or true);
+    rdrand."${deps.rand_os."0.1.3".rdrand}".default = true;
+    winapi = fold recursiveUpdate {} [
+      { "${deps.rand_os."0.1.3".winapi}"."minwindef" = true; }
+      { "${deps.rand_os."0.1.3".winapi}"."ntsecapi" = true; }
+      { "${deps.rand_os."0.1.3".winapi}"."winnt" = true; }
+      { "${deps.rand_os."0.1.3".winapi}".default = true; }
+    ];
+  }) [
+    (features_.rand_core."${deps."rand_os"."0.1.3"."rand_core"}" deps)
+    (features_.rdrand."${deps."rand_os"."0.1.3"."rdrand"}" deps)
+    (features_.cloudabi."${deps."rand_os"."0.1.3"."cloudabi"}" deps)
+    (features_.fuchsia_cprng."${deps."rand_os"."0.1.3"."fuchsia_cprng"}" deps)
+    (features_.libc."${deps."rand_os"."0.1.3"."libc"}" deps)
+    (features_.winapi."${deps."rand_os"."0.1.3"."winapi"}" deps)
+  ];
+
+
+# end
+# rdrand-0.4.0
+
+  crates.rdrand."0.4.0" = deps: { features?(features_.rdrand."0.4.0" deps {}) }: buildRustCrate {
+    crateName = "rdrand";
+    version = "0.4.0";
+    description = "An implementation of random number generator based on rdrand and rdseed instructions";
+    authors = [ "Simonas Kazlauskas <rdrand@kazlauskas.me>" ];
+    sha256 = "15hrcasn0v876wpkwab1dwbk9kvqwrb3iv4y4dibb6yxnfvzwajk";
+    dependencies = mapFeatures features ([
+      (crates."rand_core"."${deps."rdrand"."0.4.0"."rand_core"}" deps)
+    ]);
+    features = mkFeatures (features."rdrand"."0.4.0" or {});
+  };
+  features_.rdrand."0.4.0" = deps: f: updateFeatures f (rec {
+    rand_core."${deps.rdrand."0.4.0".rand_core}".default = (f.rand_core."${deps.rdrand."0.4.0".rand_core}".default or false);
+    rdrand = fold recursiveUpdate {} [
+      { "0.4.0"."std" =
+        (f.rdrand."0.4.0"."std" or false) ||
+        (f.rdrand."0.4.0".default or false) ||
+        (rdrand."0.4.0"."default" or false); }
+      { "0.4.0".default = (f.rdrand."0.4.0".default or true); }
+    ];
+  }) [
+    (features_.rand_core."${deps."rdrand"."0.4.0"."rand_core"}" deps)
+  ];
+
+
+# end
+# redox_syscall-0.1.40
+
+  crates.redox_syscall."0.1.40" = deps: { features?(features_.redox_syscall."0.1.40" deps {}) }: buildRustCrate {
+    crateName = "redox_syscall";
+    version = "0.1.40";
+    authors = [ "Jeremy Soller <jackpot51@gmail.com>" ];
+    sha256 = "132rnhrq49l3z7gjrwj2zfadgw6q0355s6a7id7x7c0d7sk72611";
+    libName = "syscall";
+  };
+  features_.redox_syscall."0.1.40" = deps: f: updateFeatures f ({
+    redox_syscall."0.1.40".default = (f.redox_syscall."0.1.40".default or true);
+  }) [];
+
+
+# end
+# redox_syscall-0.1.51
+
+  crates.redox_syscall."0.1.51" = deps: { features?(features_.redox_syscall."0.1.51" deps {}) }: buildRustCrate {
+    crateName = "redox_syscall";
+    version = "0.1.51";
+    description = "A Rust library to access raw Redox system calls";
+    authors = [ "Jeremy Soller <jackpot51@gmail.com>" ];
+    sha256 = "1a61cv7yydx64vpyvzr0z0hwzdvy4gcvcnfc6k70zpkngj5sz3ip";
+    libName = "syscall";
+  };
+  features_.redox_syscall."0.1.51" = deps: f: updateFeatures f ({
+    redox_syscall."0.1.51".default = (f.redox_syscall."0.1.51".default or true);
+  }) [];
+
+
+# end
+# redox_termios-0.1.1
+
+  crates.redox_termios."0.1.1" = deps: { features?(features_.redox_termios."0.1.1" deps {}) }: buildRustCrate {
+    crateName = "redox_termios";
+    version = "0.1.1";
+    authors = [ "Jeremy Soller <jackpot51@gmail.com>" ];
+    sha256 = "04s6yyzjca552hdaqlvqhp3vw0zqbc304md5czyd3axh56iry8wh";
+    libPath = "src/lib.rs";
+    dependencies = mapFeatures features ([
+      (crates."redox_syscall"."${deps."redox_termios"."0.1.1"."redox_syscall"}" deps)
+    ]);
+  };
+  features_.redox_termios."0.1.1" = deps: f: updateFeatures f ({
+    redox_syscall."${deps.redox_termios."0.1.1".redox_syscall}".default = true;
+    redox_termios."0.1.1".default = (f.redox_termios."0.1.1".default or true);
+  }) [
+    (features_.redox_syscall."${deps."redox_termios"."0.1.1"."redox_syscall"}" deps)
+  ];
+
+
+# end
+# redox_users-0.2.0
+
+  crates.redox_users."0.2.0" = deps: { features?(features_.redox_users."0.2.0" deps {}) }: buildRustCrate {
+    crateName = "redox_users";
+    version = "0.2.0";
+    authors = [ "Jose Narvaez <goyox86@gmail.com>" "Wesley Hershberger <mggmugginsmc@gmail.com>" ];
+    sha256 = "0s9jrh378jk8rfi1xfwxvh2r1gv6rn3bq6n7sbajkrqqq0xzijvf";
+    dependencies = mapFeatures features ([
+      (crates."argon2rs"."${deps."redox_users"."0.2.0"."argon2rs"}" deps)
+      (crates."failure"."${deps."redox_users"."0.2.0"."failure"}" deps)
+      (crates."rand"."${deps."redox_users"."0.2.0"."rand"}" deps)
+      (crates."redox_syscall"."${deps."redox_users"."0.2.0"."redox_syscall"}" deps)
+    ]);
+  };
+  features_.redox_users."0.2.0" = deps: f: updateFeatures f ({
+    argon2rs."${deps.redox_users."0.2.0".argon2rs}".default = (f.argon2rs."${deps.redox_users."0.2.0".argon2rs}".default or false);
+    failure."${deps.redox_users."0.2.0".failure}".default = true;
+    rand."${deps.redox_users."0.2.0".rand}".default = true;
+    redox_syscall."${deps.redox_users."0.2.0".redox_syscall}".default = true;
+    redox_users."0.2.0".default = (f.redox_users."0.2.0".default or true);
+  }) [
+    (features_.argon2rs."${deps."redox_users"."0.2.0"."argon2rs"}" deps)
+    (features_.failure."${deps."redox_users"."0.2.0"."failure"}" deps)
+    (features_.rand."${deps."redox_users"."0.2.0"."rand"}" deps)
+    (features_.redox_syscall."${deps."redox_users"."0.2.0"."redox_syscall"}" deps)
+  ];
+
+
+# end
+# redox_users-0.3.0
+
+  crates.redox_users."0.3.0" = deps: { features?(features_.redox_users."0.3.0" deps {}) }: buildRustCrate {
+    crateName = "redox_users";
+    version = "0.3.0";
+    description = "A Rust library to access Redox users and groups functionality";
+    authors = [ "Jose Narvaez <goyox86@gmail.com>" "Wesley Hershberger <mggmugginsmc@gmail.com>" ];
+    sha256 = "051rzqgk5hn7rf24nwgbb32zfdn8qp2kwqvdp0772ia85p737p4j";
+    dependencies = mapFeatures features ([
+      (crates."argon2rs"."${deps."redox_users"."0.3.0"."argon2rs"}" deps)
+      (crates."failure"."${deps."redox_users"."0.3.0"."failure"}" deps)
+      (crates."rand_os"."${deps."redox_users"."0.3.0"."rand_os"}" deps)
+      (crates."redox_syscall"."${deps."redox_users"."0.3.0"."redox_syscall"}" deps)
+    ]);
+  };
+  features_.redox_users."0.3.0" = deps: f: updateFeatures f ({
+    argon2rs."${deps.redox_users."0.3.0".argon2rs}".default = (f.argon2rs."${deps.redox_users."0.3.0".argon2rs}".default or false);
+    failure."${deps.redox_users."0.3.0".failure}".default = true;
+    rand_os."${deps.redox_users."0.3.0".rand_os}".default = true;
+    redox_syscall."${deps.redox_users."0.3.0".redox_syscall}".default = true;
+    redox_users."0.3.0".default = (f.redox_users."0.3.0".default or true);
+  }) [
+    (features_.argon2rs."${deps."redox_users"."0.3.0"."argon2rs"}" deps)
+    (features_.failure."${deps."redox_users"."0.3.0"."failure"}" deps)
+    (features_.rand_os."${deps."redox_users"."0.3.0"."rand_os"}" deps)
+    (features_.redox_syscall."${deps."redox_users"."0.3.0"."redox_syscall"}" deps)
+  ];
+
+
+# end
+# regex-1.0.5
+
+  crates.regex."1.0.5" = deps: { features?(features_.regex."1.0.5" deps {}) }: buildRustCrate {
+    crateName = "regex";
+    version = "1.0.5";
+    authors = [ "The Rust Project Developers" ];
+    sha256 = "1nb4dva9lhb3v76bdds9qcxldb2xy998sdraqnqaqdr6axfsfp02";
+    dependencies = mapFeatures features ([
+      (crates."aho_corasick"."${deps."regex"."1.0.5"."aho_corasick"}" deps)
+      (crates."memchr"."${deps."regex"."1.0.5"."memchr"}" deps)
+      (crates."regex_syntax"."${deps."regex"."1.0.5"."regex_syntax"}" deps)
+      (crates."thread_local"."${deps."regex"."1.0.5"."thread_local"}" deps)
+      (crates."utf8_ranges"."${deps."regex"."1.0.5"."utf8_ranges"}" deps)
+    ]);
+    features = mkFeatures (features."regex"."1.0.5" or {});
+  };
+  features_.regex."1.0.5" = deps: f: updateFeatures f (rec {
+    aho_corasick."${deps.regex."1.0.5".aho_corasick}".default = true;
+    memchr."${deps.regex."1.0.5".memchr}".default = true;
+    regex = fold recursiveUpdate {} [
+      { "1.0.5".default = (f.regex."1.0.5".default or true); }
+      { "1.0.5".pattern =
+        (f.regex."1.0.5".pattern or false) ||
+        (f.regex."1.0.5".unstable or false) ||
+        (regex."1.0.5"."unstable" or false); }
+      { "1.0.5".use_std =
+        (f.regex."1.0.5".use_std or false) ||
+        (f.regex."1.0.5".default or false) ||
+        (regex."1.0.5"."default" or false); }
+    ];
+    regex_syntax."${deps.regex."1.0.5".regex_syntax}".default = true;
+    thread_local."${deps.regex."1.0.5".thread_local}".default = true;
+    utf8_ranges."${deps.regex."1.0.5".utf8_ranges}".default = true;
+  }) [
+    (features_.aho_corasick."${deps."regex"."1.0.5"."aho_corasick"}" deps)
+    (features_.memchr."${deps."regex"."1.0.5"."memchr"}" deps)
+    (features_.regex_syntax."${deps."regex"."1.0.5"."regex_syntax"}" deps)
+    (features_.thread_local."${deps."regex"."1.0.5"."thread_local"}" deps)
+    (features_.utf8_ranges."${deps."regex"."1.0.5"."utf8_ranges"}" deps)
+  ];
+
+
+# end
+# regex-1.1.2
+
+  crates.regex."1.1.2" = deps: { features?(features_.regex."1.1.2" deps {}) }: buildRustCrate {
+    crateName = "regex";
+    version = "1.1.2";
+    description = "An implementation of regular expressions for Rust. This implementation uses\nfinite automata and guarantees linear time matching on all inputs.\n";
+    authors = [ "The Rust Project Developers" ];
+    sha256 = "1c9nb031z1vw5l6lzfkfra2mah9hb2s1wgq9f1lmgcbkiiprj9xd";
+    dependencies = mapFeatures features ([
+      (crates."aho_corasick"."${deps."regex"."1.1.2"."aho_corasick"}" deps)
+      (crates."memchr"."${deps."regex"."1.1.2"."memchr"}" deps)
+      (crates."regex_syntax"."${deps."regex"."1.1.2"."regex_syntax"}" deps)
+      (crates."thread_local"."${deps."regex"."1.1.2"."thread_local"}" deps)
+      (crates."utf8_ranges"."${deps."regex"."1.1.2"."utf8_ranges"}" deps)
+    ]);
+    features = mkFeatures (features."regex"."1.1.2" or {});
+  };
+  features_.regex."1.1.2" = deps: f: updateFeatures f (rec {
+    aho_corasick."${deps.regex."1.1.2".aho_corasick}".default = true;
+    memchr."${deps.regex."1.1.2".memchr}".default = true;
+    regex = fold recursiveUpdate {} [
+      { "1.1.2"."pattern" =
+        (f.regex."1.1.2"."pattern" or false) ||
+        (f.regex."1.1.2".unstable or false) ||
+        (regex."1.1.2"."unstable" or false); }
+      { "1.1.2"."use_std" =
+        (f.regex."1.1.2"."use_std" or false) ||
+        (f.regex."1.1.2".default or false) ||
+        (regex."1.1.2"."default" or false); }
+      { "1.1.2".default = (f.regex."1.1.2".default or true); }
+    ];
+    regex_syntax."${deps.regex."1.1.2".regex_syntax}".default = true;
+    thread_local."${deps.regex."1.1.2".thread_local}".default = true;
+    utf8_ranges."${deps.regex."1.1.2".utf8_ranges}".default = true;
+  }) [
+    (features_.aho_corasick."${deps."regex"."1.1.2"."aho_corasick"}" deps)
+    (features_.memchr."${deps."regex"."1.1.2"."memchr"}" deps)
+    (features_.regex_syntax."${deps."regex"."1.1.2"."regex_syntax"}" deps)
+    (features_.thread_local."${deps."regex"."1.1.2"."thread_local"}" deps)
+    (features_.utf8_ranges."${deps."regex"."1.1.2"."utf8_ranges"}" deps)
+  ];
+
+
+# end
+# regex-syntax-0.6.2
+
+  crates.regex_syntax."0.6.2" = deps: { features?(features_.regex_syntax."0.6.2" deps {}) }: buildRustCrate {
+    crateName = "regex-syntax";
+    version = "0.6.2";
+    authors = [ "The Rust Project Developers" ];
+    sha256 = "109426mj7nhwr6szdzbcvn1a8g5zy52f9maqxjd9agm8wg87ylyw";
+    dependencies = mapFeatures features ([
+      (crates."ucd_util"."${deps."regex_syntax"."0.6.2"."ucd_util"}" deps)
+    ]);
+  };
+  features_.regex_syntax."0.6.2" = deps: f: updateFeatures f ({
+    regex_syntax."0.6.2".default = (f.regex_syntax."0.6.2".default or true);
+    ucd_util."${deps.regex_syntax."0.6.2".ucd_util}".default = true;
+  }) [
+    (features_.ucd_util."${deps."regex_syntax"."0.6.2"."ucd_util"}" deps)
+  ];
+
+
+# end
+# regex-syntax-0.6.5
+
+  crates.regex_syntax."0.6.5" = deps: { features?(features_.regex_syntax."0.6.5" deps {}) }: buildRustCrate {
+    crateName = "regex-syntax";
+    version = "0.6.5";
+    description = "A regular expression parser.";
+    authors = [ "The Rust Project Developers" ];
+    sha256 = "0aaaba1fan2qfyc31wzdmgmbmyirc27zgcbz41ba5wm1lb2d8kli";
+    dependencies = mapFeatures features ([
+      (crates."ucd_util"."${deps."regex_syntax"."0.6.5"."ucd_util"}" deps)
+    ]);
+  };
+  features_.regex_syntax."0.6.5" = deps: f: updateFeatures f ({
+    regex_syntax."0.6.5".default = (f.regex_syntax."0.6.5".default or true);
+    ucd_util."${deps.regex_syntax."0.6.5".ucd_util}".default = true;
+  }) [
+    (features_.ucd_util."${deps."regex_syntax"."0.6.5"."ucd_util"}" deps)
+  ];
+
+
+# end
+# remove_dir_all-0.5.1
+
+  crates.remove_dir_all."0.5.1" = deps: { features?(features_.remove_dir_all."0.5.1" deps {}) }: buildRustCrate {
+    crateName = "remove_dir_all";
+    version = "0.5.1";
+    authors = [ "Aaronepower <theaaronepower@gmail.com>" ];
+    sha256 = "1chx3yvfbj46xjz4bzsvps208l46hfbcy0sm98gpiya454n4rrl7";
+    dependencies = (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."remove_dir_all"."0.5.1"."winapi"}" deps)
+    ]) else []);
+  };
+  features_.remove_dir_all."0.5.1" = deps: f: updateFeatures f ({
+    remove_dir_all."0.5.1".default = (f.remove_dir_all."0.5.1".default or true);
+    winapi = fold recursiveUpdate {} [
+      { "${deps.remove_dir_all."0.5.1".winapi}"."errhandlingapi" = true; }
+      { "${deps.remove_dir_all."0.5.1".winapi}"."fileapi" = true; }
+      { "${deps.remove_dir_all."0.5.1".winapi}"."std" = true; }
+      { "${deps.remove_dir_all."0.5.1".winapi}"."winbase" = true; }
+      { "${deps.remove_dir_all."0.5.1".winapi}"."winerror" = true; }
+      { "${deps.remove_dir_all."0.5.1".winapi}".default = true; }
+    ];
+  }) [
+    (features_.winapi."${deps."remove_dir_all"."0.5.1"."winapi"}" deps)
+  ];
+
+
+# end
+# rustc-demangle-0.1.13
+
+  crates.rustc_demangle."0.1.13" = deps: { features?(features_.rustc_demangle."0.1.13" deps {}) }: buildRustCrate {
+    crateName = "rustc-demangle";
+    version = "0.1.13";
+    description = "Rust compiler symbol demangling.\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "0sr6cr02araqnlqwc5ghvnafjmkw11vzjswqaz757lvyrcl8xcy6";
+  };
+  features_.rustc_demangle."0.1.13" = deps: f: updateFeatures f ({
+    rustc_demangle."0.1.13".default = (f.rustc_demangle."0.1.13".default or true);
+  }) [];
+
+
+# end
+# rustc-demangle-0.1.9
+
+  crates.rustc_demangle."0.1.9" = deps: { features?(features_.rustc_demangle."0.1.9" deps {}) }: buildRustCrate {
+    crateName = "rustc-demangle";
+    version = "0.1.9";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "00ma4r9haq0zv5krps617mym6y74056pfcivyld0kpci156vfaax";
+  };
+  features_.rustc_demangle."0.1.9" = deps: f: updateFeatures f ({
+    rustc_demangle."0.1.9".default = (f.rustc_demangle."0.1.9".default or true);
+  }) [];
+
+
+# end
+# ryu-0.2.6
+
+  crates.ryu."0.2.6" = deps: { features?(features_.ryu."0.2.6" deps {}) }: buildRustCrate {
+    crateName = "ryu";
+    version = "0.2.6";
+    authors = [ "David Tolnay <dtolnay@gmail.com>" ];
+    sha256 = "1vdh6z4aysc9kiiqhl7vxkqz3fykcnp24kgfizshlwfsz2j0p9dr";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+]);
+    features = mkFeatures (features."ryu"."0.2.6" or {});
+  };
+  features_.ryu."0.2.6" = deps: f: updateFeatures f ({
+    ryu."0.2.6".default = (f.ryu."0.2.6".default or true);
+  }) [];
+
+
+# end
+# ryu-0.2.7
+
+  crates.ryu."0.2.7" = deps: { features?(features_.ryu."0.2.7" deps {}) }: buildRustCrate {
+    crateName = "ryu";
+    version = "0.2.7";
+    description = "Fast floating point to string conversion";
+    authors = [ "David Tolnay <dtolnay@gmail.com>" ];
+    sha256 = "0m8szf1m87wfqkwh1f9zp9bn2mb0m9nav028xxnd0hlig90b44bd";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+]);
+    features = mkFeatures (features."ryu"."0.2.7" or {});
+  };
+  features_.ryu."0.2.7" = deps: f: updateFeatures f ({
+    ryu."0.2.7".default = (f.ryu."0.2.7".default or true);
+  }) [];
+
+
+# end
+# scoped_threadpool-0.1.9
+
+  crates.scoped_threadpool."0.1.9" = deps: { features?(features_.scoped_threadpool."0.1.9" deps {}) }: buildRustCrate {
+    crateName = "scoped_threadpool";
+    version = "0.1.9";
+    authors = [ "Marvin Löbel <loebel.marvin@gmail.com>" ];
+    sha256 = "1arqj2skcfr46s1lcyvnlmfr5456kg5nhn8k90xyfjnxkp5yga2v";
+    features = mkFeatures (features."scoped_threadpool"."0.1.9" or {});
+  };
+  features_.scoped_threadpool."0.1.9" = deps: f: updateFeatures f ({
+    scoped_threadpool."0.1.9".default = (f.scoped_threadpool."0.1.9".default or true);
+  }) [];
+
+
+# end
+# serde-1.0.80
+
+  crates.serde."1.0.80" = deps: { features?(features_.serde."1.0.80" deps {}) }: buildRustCrate {
+    crateName = "serde";
+    version = "1.0.80";
+    authors = [ "Erick Tryzelaar <erick.tryzelaar@gmail.com>" "David Tolnay <dtolnay@gmail.com>" ];
+    sha256 = "0vyciw2qhrws4hz87pfnsjdfzzdw2sclxqxq394g3a219a2rdcxz";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+]);
+    features = mkFeatures (features."serde"."1.0.80" or {});
+  };
+  features_.serde."1.0.80" = deps: f: updateFeatures f (rec {
+    serde = fold recursiveUpdate {} [
+      { "1.0.80".default = (f.serde."1.0.80".default or true); }
+      { "1.0.80".serde_derive =
+        (f.serde."1.0.80".serde_derive or false) ||
+        (f.serde."1.0.80".derive or false) ||
+        (serde."1.0.80"."derive" or false); }
+      { "1.0.80".std =
+        (f.serde."1.0.80".std or false) ||
+        (f.serde."1.0.80".default or false) ||
+        (serde."1.0.80"."default" or false); }
+      { "1.0.80".unstable =
+        (f.serde."1.0.80".unstable or false) ||
+        (f.serde."1.0.80".alloc or false) ||
+        (serde."1.0.80"."alloc" or false); }
+    ];
+  }) [];
+
+
+# end
+# serde-1.0.84
+
+  crates.serde."1.0.84" = deps: { features?(features_.serde."1.0.84" deps {}) }: buildRustCrate {
+    crateName = "serde";
+    version = "1.0.84";
+    authors = [ "Erick Tryzelaar <erick.tryzelaar@gmail.com>" "David Tolnay <dtolnay@gmail.com>" ];
+    sha256 = "1x40cvvkbkz592jflwbfbxhim3wxdqp9dy0qxjw13ra7q57b29gy";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+]);
+    features = mkFeatures (features."serde"."1.0.84" or {});
+  };
+  features_.serde."1.0.84" = deps: f: updateFeatures f (rec {
+    serde = fold recursiveUpdate {} [
+      { "1.0.84".default = (f.serde."1.0.84".default or true); }
+      { "1.0.84".serde_derive =
+        (f.serde."1.0.84".serde_derive or false) ||
+        (f.serde."1.0.84".derive or false) ||
+        (serde."1.0.84"."derive" or false); }
+      { "1.0.84".std =
+        (f.serde."1.0.84".std or false) ||
+        (f.serde."1.0.84".default or false) ||
+        (serde."1.0.84"."default" or false); }
+      { "1.0.84".unstable =
+        (f.serde."1.0.84".unstable or false) ||
+        (f.serde."1.0.84".alloc or false) ||
+        (serde."1.0.84"."alloc" or false); }
+    ];
+  }) [];
+
+
+# end
+# serde-1.0.89
+
+  crates.serde."1.0.89" = deps: { features?(features_.serde."1.0.89" deps {}) }: buildRustCrate {
+    crateName = "serde";
+    version = "1.0.89";
+    description = "A generic serialization/deserialization framework";
+    authors = [ "Erick Tryzelaar <erick.tryzelaar@gmail.com>" "David Tolnay <dtolnay@gmail.com>" ];
+    sha256 = "14pidc6skkm92vhp431wi1aam5vv5g6rmsimik38wzb0qy72c71g";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+]);
+    features = mkFeatures (features."serde"."1.0.89" or {});
+  };
+  features_.serde."1.0.89" = deps: f: updateFeatures f (rec {
+    serde = fold recursiveUpdate {} [
+      { "1.0.89"."serde_derive" =
+        (f.serde."1.0.89"."serde_derive" or false) ||
+        (f.serde."1.0.89".derive or false) ||
+        (serde."1.0.89"."derive" or false); }
+      { "1.0.89"."std" =
+        (f.serde."1.0.89"."std" or false) ||
+        (f.serde."1.0.89".default or false) ||
+        (serde."1.0.89"."default" or false); }
+      { "1.0.89"."unstable" =
+        (f.serde."1.0.89"."unstable" or false) ||
+        (f.serde."1.0.89".alloc or false) ||
+        (serde."1.0.89"."alloc" or false); }
+      { "1.0.89".default = (f.serde."1.0.89".default or true); }
+    ];
+  }) [];
+
+
+# end
+# serde_derive-1.0.80
+
+  crates.serde_derive."1.0.80" = deps: { features?(features_.serde_derive."1.0.80" deps {}) }: buildRustCrate {
+    crateName = "serde_derive";
+    version = "1.0.80";
+    authors = [ "Erick Tryzelaar <erick.tryzelaar@gmail.com>" "David Tolnay <dtolnay@gmail.com>" ];
+    sha256 = "1akvzhbnnqhd92lfj7vp43scs1vdml7x27c82l5yh0kz7xf7jaky";
+    procMacro = true;
+    dependencies = mapFeatures features ([
+      (crates."proc_macro2"."${deps."serde_derive"."1.0.80"."proc_macro2"}" deps)
+      (crates."quote"."${deps."serde_derive"."1.0.80"."quote"}" deps)
+      (crates."syn"."${deps."serde_derive"."1.0.80"."syn"}" deps)
+    ]);
+    features = mkFeatures (features."serde_derive"."1.0.80" or {});
+  };
+  features_.serde_derive."1.0.80" = deps: f: updateFeatures f ({
+    proc_macro2."${deps.serde_derive."1.0.80".proc_macro2}".default = true;
+    quote."${deps.serde_derive."1.0.80".quote}".default = true;
+    serde_derive."1.0.80".default = (f.serde_derive."1.0.80".default or true);
+    syn = fold recursiveUpdate {} [
+      { "${deps.serde_derive."1.0.80".syn}"."visit" = true; }
+      { "${deps.serde_derive."1.0.80".syn}".default = true; }
+    ];
+  }) [
+    (features_.proc_macro2."${deps."serde_derive"."1.0.80"."proc_macro2"}" deps)
+    (features_.quote."${deps."serde_derive"."1.0.80"."quote"}" deps)
+    (features_.syn."${deps."serde_derive"."1.0.80"."syn"}" deps)
+  ];
+
+
+# end
+# serde_derive-1.0.89
+
+  crates.serde_derive."1.0.89" = deps: { features?(features_.serde_derive."1.0.89" deps {}) }: buildRustCrate {
+    crateName = "serde_derive";
+    version = "1.0.89";
+    description = "Macros 1.1 implementation of #[derive(Serialize, Deserialize)]";
+    authors = [ "Erick Tryzelaar <erick.tryzelaar@gmail.com>" "David Tolnay <dtolnay@gmail.com>" ];
+    sha256 = "0wxbxq9sccrd939pfnrgfzykkwl9gag2yf7vxhg2c2p9kx36d3wm";
+    procMacro = true;
+    dependencies = mapFeatures features ([
+      (crates."proc_macro2"."${deps."serde_derive"."1.0.89"."proc_macro2"}" deps)
+      (crates."quote"."${deps."serde_derive"."1.0.89"."quote"}" deps)
+      (crates."syn"."${deps."serde_derive"."1.0.89"."syn"}" deps)
+    ]);
+    features = mkFeatures (features."serde_derive"."1.0.89" or {});
+  };
+  features_.serde_derive."1.0.89" = deps: f: updateFeatures f ({
+    proc_macro2."${deps.serde_derive."1.0.89".proc_macro2}".default = true;
+    quote."${deps.serde_derive."1.0.89".quote}".default = true;
+    serde_derive."1.0.89".default = (f.serde_derive."1.0.89".default or true);
+    syn = fold recursiveUpdate {} [
+      { "${deps.serde_derive."1.0.89".syn}"."visit" = true; }
+      { "${deps.serde_derive."1.0.89".syn}".default = true; }
+    ];
+  }) [
+    (features_.proc_macro2."${deps."serde_derive"."1.0.89"."proc_macro2"}" deps)
+    (features_.quote."${deps."serde_derive"."1.0.89"."quote"}" deps)
+    (features_.syn."${deps."serde_derive"."1.0.89"."syn"}" deps)
+  ];
+
+
+# end
+# serde_json-1.0.32
+
+  crates.serde_json."1.0.32" = deps: { features?(features_.serde_json."1.0.32" deps {}) }: buildRustCrate {
+    crateName = "serde_json";
+    version = "1.0.32";
+    authors = [ "Erick Tryzelaar <erick.tryzelaar@gmail.com>" "David Tolnay <dtolnay@gmail.com>" ];
+    sha256 = "1dqkvizi02j1bs5c21kw20idf4aa5399g29ndwl6vkmmrqkr1gr0";
+    dependencies = mapFeatures features ([
+      (crates."itoa"."${deps."serde_json"."1.0.32"."itoa"}" deps)
+      (crates."ryu"."${deps."serde_json"."1.0.32"."ryu"}" deps)
+      (crates."serde"."${deps."serde_json"."1.0.32"."serde"}" deps)
+    ]);
+    features = mkFeatures (features."serde_json"."1.0.32" or {});
+  };
+  features_.serde_json."1.0.32" = deps: f: updateFeatures f (rec {
+    itoa."${deps.serde_json."1.0.32".itoa}".default = true;
+    ryu."${deps.serde_json."1.0.32".ryu}".default = true;
+    serde."${deps.serde_json."1.0.32".serde}".default = true;
+    serde_json = fold recursiveUpdate {} [
+      { "1.0.32".default = (f.serde_json."1.0.32".default or true); }
+      { "1.0.32".indexmap =
+        (f.serde_json."1.0.32".indexmap or false) ||
+        (f.serde_json."1.0.32".preserve_order or false) ||
+        (serde_json."1.0.32"."preserve_order" or false); }
+    ];
+  }) [
+    (features_.itoa."${deps."serde_json"."1.0.32"."itoa"}" deps)
+    (features_.ryu."${deps."serde_json"."1.0.32"."ryu"}" deps)
+    (features_.serde."${deps."serde_json"."1.0.32"."serde"}" deps)
+  ];
+
+
+# end
+# serde_json-1.0.39
+
+  crates.serde_json."1.0.39" = deps: { features?(features_.serde_json."1.0.39" deps {}) }: buildRustCrate {
+    crateName = "serde_json";
+    version = "1.0.39";
+    description = "A JSON serialization file format";
+    authors = [ "Erick Tryzelaar <erick.tryzelaar@gmail.com>" "David Tolnay <dtolnay@gmail.com>" ];
+    sha256 = "07ydv06hn8x0yl0rc94l2wl9r2xz1fqd97n1s6j3bgdc6gw406a8";
+    dependencies = mapFeatures features ([
+      (crates."itoa"."${deps."serde_json"."1.0.39"."itoa"}" deps)
+      (crates."ryu"."${deps."serde_json"."1.0.39"."ryu"}" deps)
+      (crates."serde"."${deps."serde_json"."1.0.39"."serde"}" deps)
+    ]);
+    features = mkFeatures (features."serde_json"."1.0.39" or {});
+  };
+  features_.serde_json."1.0.39" = deps: f: updateFeatures f (rec {
+    itoa."${deps.serde_json."1.0.39".itoa}".default = true;
+    ryu."${deps.serde_json."1.0.39".ryu}".default = true;
+    serde."${deps.serde_json."1.0.39".serde}".default = true;
+    serde_json = fold recursiveUpdate {} [
+      { "1.0.39"."indexmap" =
+        (f.serde_json."1.0.39"."indexmap" or false) ||
+        (f.serde_json."1.0.39".preserve_order or false) ||
+        (serde_json."1.0.39"."preserve_order" or false); }
+      { "1.0.39".default = (f.serde_json."1.0.39".default or true); }
+    ];
+  }) [
+    (features_.itoa."${deps."serde_json"."1.0.39"."itoa"}" deps)
+    (features_.ryu."${deps."serde_json"."1.0.39"."ryu"}" deps)
+    (features_.serde."${deps."serde_json"."1.0.39"."serde"}" deps)
+  ];
+
+
+# end
+# smallvec-0.6.9
+
+  crates.smallvec."0.6.9" = deps: { features?(features_.smallvec."0.6.9" deps {}) }: buildRustCrate {
+    crateName = "smallvec";
+    version = "0.6.9";
+    description = "'Small vector' optimization: store up to a small number of items on the stack";
+    authors = [ "Simon Sapin <simon.sapin@exyr.org>" ];
+    sha256 = "0p96l51a2pq5y0vn48nhbm6qslbc6k8h28cxm0pmzkqmj7xynz6w";
+    libPath = "lib.rs";
+    dependencies = mapFeatures features ([
+]);
+    features = mkFeatures (features."smallvec"."0.6.9" or {});
+  };
+  features_.smallvec."0.6.9" = deps: f: updateFeatures f (rec {
+    smallvec = fold recursiveUpdate {} [
+      { "0.6.9"."std" =
+        (f.smallvec."0.6.9"."std" or false) ||
+        (f.smallvec."0.6.9".default or false) ||
+        (smallvec."0.6.9"."default" or false); }
+      { "0.6.9".default = (f.smallvec."0.6.9".default or true); }
+    ];
+  }) [];
+
+
+# end
+# strsim-0.7.0
+
+  crates.strsim."0.7.0" = deps: { features?(features_.strsim."0.7.0" deps {}) }: buildRustCrate {
+    crateName = "strsim";
+    version = "0.7.0";
+    authors = [ "Danny Guo <dannyguo91@gmail.com>" ];
+    sha256 = "0fy0k5f2705z73mb3x9459bpcvrx4ky8jpr4zikcbiwan4bnm0iv";
+  };
+  features_.strsim."0.7.0" = deps: f: updateFeatures f ({
+    strsim."0.7.0".default = (f.strsim."0.7.0".default or true);
+  }) [];
+
+
+# end
+# syn-0.15.13
+
+  crates.syn."0.15.13" = deps: { features?(features_.syn."0.15.13" deps {}) }: buildRustCrate {
+    crateName = "syn";
+    version = "0.15.13";
+    authors = [ "David Tolnay <dtolnay@gmail.com>" ];
+    sha256 = "1zvnppl08f2njpkl3m10h221sdl4vsm7v6vyq63dxk16nn37b1bh";
+    dependencies = mapFeatures features ([
+      (crates."proc_macro2"."${deps."syn"."0.15.13"."proc_macro2"}" deps)
+      (crates."unicode_xid"."${deps."syn"."0.15.13"."unicode_xid"}" deps)
+    ]
+      ++ (if features.syn."0.15.13".quote or false then [ (crates.quote."${deps."syn"."0.15.13".quote}" deps) ] else []));
+    features = mkFeatures (features."syn"."0.15.13" or {});
+  };
+  features_.syn."0.15.13" = deps: f: updateFeatures f (rec {
+    proc_macro2 = fold recursiveUpdate {} [
+      { "${deps.syn."0.15.13".proc_macro2}"."proc-macro" =
+        (f.proc_macro2."${deps.syn."0.15.13".proc_macro2}"."proc-macro" or false) ||
+        (syn."0.15.13"."proc-macro" or false) ||
+        (f."syn"."0.15.13"."proc-macro" or false); }
+      { "${deps.syn."0.15.13".proc_macro2}".default = (f.proc_macro2."${deps.syn."0.15.13".proc_macro2}".default or false); }
+    ];
+    quote = fold recursiveUpdate {} [
+      { "${deps.syn."0.15.13".quote}"."proc-macro" =
+        (f.quote."${deps.syn."0.15.13".quote}"."proc-macro" or false) ||
+        (syn."0.15.13"."proc-macro" or false) ||
+        (f."syn"."0.15.13"."proc-macro" or false); }
+      { "${deps.syn."0.15.13".quote}".default = (f.quote."${deps.syn."0.15.13".quote}".default or false); }
+    ];
+    syn = fold recursiveUpdate {} [
+      { "0.15.13".clone-impls =
+        (f.syn."0.15.13".clone-impls or false) ||
+        (f.syn."0.15.13".default or false) ||
+        (syn."0.15.13"."default" or false); }
+      { "0.15.13".default = (f.syn."0.15.13".default or true); }
+      { "0.15.13".derive =
+        (f.syn."0.15.13".derive or false) ||
+        (f.syn."0.15.13".default or false) ||
+        (syn."0.15.13"."default" or false); }
+      { "0.15.13".parsing =
+        (f.syn."0.15.13".parsing or false) ||
+        (f.syn."0.15.13".default or false) ||
+        (syn."0.15.13"."default" or false); }
+      { "0.15.13".printing =
+        (f.syn."0.15.13".printing or false) ||
+        (f.syn."0.15.13".default or false) ||
+        (syn."0.15.13"."default" or false); }
+      { "0.15.13".proc-macro =
+        (f.syn."0.15.13".proc-macro or false) ||
+        (f.syn."0.15.13".default or false) ||
+        (syn."0.15.13"."default" or false); }
+      { "0.15.13".quote =
+        (f.syn."0.15.13".quote or false) ||
+        (f.syn."0.15.13".printing or false) ||
+        (syn."0.15.13"."printing" or false); }
+    ];
+    unicode_xid."${deps.syn."0.15.13".unicode_xid}".default = true;
+  }) [
+    (features_.proc_macro2."${deps."syn"."0.15.13"."proc_macro2"}" deps)
+    (features_.quote."${deps."syn"."0.15.13"."quote"}" deps)
+    (features_.unicode_xid."${deps."syn"."0.15.13"."unicode_xid"}" deps)
+  ];
+
+
+# end
+# syn-0.15.29
+
+  crates.syn."0.15.29" = deps: { features?(features_.syn."0.15.29" deps {}) }: buildRustCrate {
+    crateName = "syn";
+    version = "0.15.29";
+    description = "Parser for Rust source code";
+    authors = [ "David Tolnay <dtolnay@gmail.com>" ];
+    sha256 = "0wrd6awgc6f1iwfn2v9fvwyd2yddgxdjv9s106kvwg1ljbw3fajw";
+    dependencies = mapFeatures features ([
+      (crates."proc_macro2"."${deps."syn"."0.15.29"."proc_macro2"}" deps)
+      (crates."unicode_xid"."${deps."syn"."0.15.29"."unicode_xid"}" deps)
+    ]
+      ++ (if features.syn."0.15.29".quote or false then [ (crates.quote."${deps."syn"."0.15.29".quote}" deps) ] else []));
+    features = mkFeatures (features."syn"."0.15.29" or {});
+  };
+  features_.syn."0.15.29" = deps: f: updateFeatures f (rec {
+    proc_macro2 = fold recursiveUpdate {} [
+      { "${deps.syn."0.15.29".proc_macro2}"."proc-macro" =
+        (f.proc_macro2."${deps.syn."0.15.29".proc_macro2}"."proc-macro" or false) ||
+        (syn."0.15.29"."proc-macro" or false) ||
+        (f."syn"."0.15.29"."proc-macro" or false); }
+      { "${deps.syn."0.15.29".proc_macro2}".default = (f.proc_macro2."${deps.syn."0.15.29".proc_macro2}".default or false); }
+    ];
+    quote = fold recursiveUpdate {} [
+      { "${deps.syn."0.15.29".quote}"."proc-macro" =
+        (f.quote."${deps.syn."0.15.29".quote}"."proc-macro" or false) ||
+        (syn."0.15.29"."proc-macro" or false) ||
+        (f."syn"."0.15.29"."proc-macro" or false); }
+      { "${deps.syn."0.15.29".quote}".default = (f.quote."${deps.syn."0.15.29".quote}".default or false); }
+    ];
+    syn = fold recursiveUpdate {} [
+      { "0.15.29"."clone-impls" =
+        (f.syn."0.15.29"."clone-impls" or false) ||
+        (f.syn."0.15.29".default or false) ||
+        (syn."0.15.29"."default" or false); }
+      { "0.15.29"."derive" =
+        (f.syn."0.15.29"."derive" or false) ||
+        (f.syn."0.15.29".default or false) ||
+        (syn."0.15.29"."default" or false); }
+      { "0.15.29"."parsing" =
+        (f.syn."0.15.29"."parsing" or false) ||
+        (f.syn."0.15.29".default or false) ||
+        (syn."0.15.29"."default" or false); }
+      { "0.15.29"."printing" =
+        (f.syn."0.15.29"."printing" or false) ||
+        (f.syn."0.15.29".default or false) ||
+        (syn."0.15.29"."default" or false); }
+      { "0.15.29"."proc-macro" =
+        (f.syn."0.15.29"."proc-macro" or false) ||
+        (f.syn."0.15.29".default or false) ||
+        (syn."0.15.29"."default" or false); }
+      { "0.15.29"."quote" =
+        (f.syn."0.15.29"."quote" or false) ||
+        (f.syn."0.15.29".printing or false) ||
+        (syn."0.15.29"."printing" or false); }
+      { "0.15.29".default = (f.syn."0.15.29".default or true); }
+    ];
+    unicode_xid."${deps.syn."0.15.29".unicode_xid}".default = true;
+  }) [
+    (features_.proc_macro2."${deps."syn"."0.15.29"."proc_macro2"}" deps)
+    (features_.quote."${deps."syn"."0.15.29"."quote"}" deps)
+    (features_.unicode_xid."${deps."syn"."0.15.29"."unicode_xid"}" deps)
+  ];
+
+
+# end
+# synstructure-0.10.0
+
+  crates.synstructure."0.10.0" = deps: { features?(features_.synstructure."0.10.0" deps {}) }: buildRustCrate {
+    crateName = "synstructure";
+    version = "0.10.0";
+    authors = [ "Nika Layzell <nika@thelayzells.com>" ];
+    sha256 = "1alb4hsbm5qf4jy7nmdkqrh3jagqk1xj88w0pmz67f16dvgpf0qf";
+    dependencies = mapFeatures features ([
+      (crates."proc_macro2"."${deps."synstructure"."0.10.0"."proc_macro2"}" deps)
+      (crates."quote"."${deps."synstructure"."0.10.0"."quote"}" deps)
+      (crates."syn"."${deps."synstructure"."0.10.0"."syn"}" deps)
+      (crates."unicode_xid"."${deps."synstructure"."0.10.0"."unicode_xid"}" deps)
+    ]);
+    features = mkFeatures (features."synstructure"."0.10.0" or {});
+  };
+  features_.synstructure."0.10.0" = deps: f: updateFeatures f ({
+    proc_macro2."${deps.synstructure."0.10.0".proc_macro2}".default = true;
+    quote."${deps.synstructure."0.10.0".quote}".default = true;
+    syn = fold recursiveUpdate {} [
+      { "${deps.synstructure."0.10.0".syn}"."extra-traits" = true; }
+      { "${deps.synstructure."0.10.0".syn}"."visit" = true; }
+      { "${deps.synstructure."0.10.0".syn}".default = true; }
+    ];
+    synstructure."0.10.0".default = (f.synstructure."0.10.0".default or true);
+    unicode_xid."${deps.synstructure."0.10.0".unicode_xid}".default = true;
+  }) [
+    (features_.proc_macro2."${deps."synstructure"."0.10.0"."proc_macro2"}" deps)
+    (features_.quote."${deps."synstructure"."0.10.0"."quote"}" deps)
+    (features_.syn."${deps."synstructure"."0.10.0"."syn"}" deps)
+    (features_.unicode_xid."${deps."synstructure"."0.10.0"."unicode_xid"}" deps)
+  ];
+
+
+# end
+# synstructure-0.10.1
+
+  crates.synstructure."0.10.1" = deps: { features?(features_.synstructure."0.10.1" deps {}) }: buildRustCrate {
+    crateName = "synstructure";
+    version = "0.10.1";
+    description = "Helper methods and macros for custom derives";
+    authors = [ "Nika Layzell <nika@thelayzells.com>" ];
+    sha256 = "0mx2vwd0d0f7hanz15nkp0ikkfjsx9rfkph7pynxyfbj45ank4g3";
+    dependencies = mapFeatures features ([
+      (crates."proc_macro2"."${deps."synstructure"."0.10.1"."proc_macro2"}" deps)
+      (crates."quote"."${deps."synstructure"."0.10.1"."quote"}" deps)
+      (crates."syn"."${deps."synstructure"."0.10.1"."syn"}" deps)
+      (crates."unicode_xid"."${deps."synstructure"."0.10.1"."unicode_xid"}" deps)
+    ]);
+    features = mkFeatures (features."synstructure"."0.10.1" or {});
+  };
+  features_.synstructure."0.10.1" = deps: f: updateFeatures f ({
+    proc_macro2."${deps.synstructure."0.10.1".proc_macro2}".default = true;
+    quote."${deps.synstructure."0.10.1".quote}".default = true;
+    syn = fold recursiveUpdate {} [
+      { "${deps.synstructure."0.10.1".syn}"."extra-traits" = true; }
+      { "${deps.synstructure."0.10.1".syn}"."visit" = true; }
+      { "${deps.synstructure."0.10.1".syn}".default = true; }
+    ];
+    synstructure."0.10.1".default = (f.synstructure."0.10.1".default or true);
+    unicode_xid."${deps.synstructure."0.10.1".unicode_xid}".default = true;
+  }) [
+    (features_.proc_macro2."${deps."synstructure"."0.10.1"."proc_macro2"}" deps)
+    (features_.quote."${deps."synstructure"."0.10.1"."quote"}" deps)
+    (features_.syn."${deps."synstructure"."0.10.1"."syn"}" deps)
+    (features_.unicode_xid."${deps."synstructure"."0.10.1"."unicode_xid"}" deps)
+  ];
+
+
+# end
+# tempdir-0.3.7
+
+  crates.tempdir."0.3.7" = deps: { features?(features_.tempdir."0.3.7" deps {}) }: buildRustCrate {
+    crateName = "tempdir";
+    version = "0.3.7";
+    authors = [ "The Rust Project Developers" ];
+    sha256 = "0y53sxybyljrr7lh0x0ysrsa7p7cljmwv9v80acy3rc6n97g67vy";
+    dependencies = mapFeatures features ([
+      (crates."rand"."${deps."tempdir"."0.3.7"."rand"}" deps)
+      (crates."remove_dir_all"."${deps."tempdir"."0.3.7"."remove_dir_all"}" deps)
+    ]);
+  };
+  features_.tempdir."0.3.7" = deps: f: updateFeatures f ({
+    rand."${deps.tempdir."0.3.7".rand}".default = true;
+    remove_dir_all."${deps.tempdir."0.3.7".remove_dir_all}".default = true;
+    tempdir."0.3.7".default = (f.tempdir."0.3.7".default or true);
+  }) [
+    (features_.rand."${deps."tempdir"."0.3.7"."rand"}" deps)
+    (features_.remove_dir_all."${deps."tempdir"."0.3.7"."remove_dir_all"}" deps)
+  ];
+
+
+# end
+# termcolor-1.0.4
+
+  crates.termcolor."1.0.4" = deps: { features?(features_.termcolor."1.0.4" deps {}) }: buildRustCrate {
+    crateName = "termcolor";
+    version = "1.0.4";
+    authors = [ "Andrew Gallant <jamslam@gmail.com>" ];
+    sha256 = "0xydrjc0bxg08llcbcmkka29szdrfklk4vh6l6mdd67ajifqw1mv";
+    dependencies = (if kernel == "windows" then mapFeatures features ([
+      (crates."wincolor"."${deps."termcolor"."1.0.4"."wincolor"}" deps)
+    ]) else []);
+  };
+  features_.termcolor."1.0.4" = deps: f: updateFeatures f ({
+    termcolor."1.0.4".default = (f.termcolor."1.0.4".default or true);
+    wincolor."${deps.termcolor."1.0.4".wincolor}".default = true;
+  }) [
+    (features_.wincolor."${deps."termcolor"."1.0.4"."wincolor"}" deps)
+  ];
+
+
+# end
+# termion-1.5.1
+
+  crates.termion."1.5.1" = deps: { features?(features_.termion."1.5.1" deps {}) }: buildRustCrate {
+    crateName = "termion";
+    version = "1.5.1";
+    authors = [ "ticki <Ticki@users.noreply.github.com>" "gycos <alexandre.bury@gmail.com>" "IGI-111 <igi-111@protonmail.com>" ];
+    sha256 = "02gq4vd8iws1f3gjrgrgpajsk2bk43nds5acbbb4s8dvrdvr8nf1";
+    dependencies = (if !(kernel == "redox") then mapFeatures features ([
+      (crates."libc"."${deps."termion"."1.5.1"."libc"}" deps)
+    ]) else [])
+      ++ (if kernel == "redox" then mapFeatures features ([
+      (crates."redox_syscall"."${deps."termion"."1.5.1"."redox_syscall"}" deps)
+      (crates."redox_termios"."${deps."termion"."1.5.1"."redox_termios"}" deps)
+    ]) else []);
+  };
+  features_.termion."1.5.1" = deps: f: updateFeatures f ({
+    libc."${deps.termion."1.5.1".libc}".default = true;
+    redox_syscall."${deps.termion."1.5.1".redox_syscall}".default = true;
+    redox_termios."${deps.termion."1.5.1".redox_termios}".default = true;
+    termion."1.5.1".default = (f.termion."1.5.1".default or true);
+  }) [
+    (features_.libc."${deps."termion"."1.5.1"."libc"}" deps)
+    (features_.redox_syscall."${deps."termion"."1.5.1"."redox_syscall"}" deps)
+    (features_.redox_termios."${deps."termion"."1.5.1"."redox_termios"}" deps)
+  ];
+
+
+# end
+# textwrap-0.10.0
+
+  crates.textwrap."0.10.0" = deps: { features?(features_.textwrap."0.10.0" deps {}) }: buildRustCrate {
+    crateName = "textwrap";
+    version = "0.10.0";
+    authors = [ "Martin Geisler <martin@geisler.net>" ];
+    sha256 = "1s8d5cna12smhgj0x2y1xphklyk2an1yzbadnj89p1vy5vnjpsas";
+    dependencies = mapFeatures features ([
+      (crates."unicode_width"."${deps."textwrap"."0.10.0"."unicode_width"}" deps)
+    ]);
+  };
+  features_.textwrap."0.10.0" = deps: f: updateFeatures f ({
+    textwrap."0.10.0".default = (f.textwrap."0.10.0".default or true);
+    unicode_width."${deps.textwrap."0.10.0".unicode_width}".default = true;
+  }) [
+    (features_.unicode_width."${deps."textwrap"."0.10.0"."unicode_width"}" deps)
+  ];
+
+
+# end
+# thread_local-0.3.6
+
+  crates.thread_local."0.3.6" = deps: { features?(features_.thread_local."0.3.6" deps {}) }: buildRustCrate {
+    crateName = "thread_local";
+    version = "0.3.6";
+    authors = [ "Amanieu d'Antras <amanieu@gmail.com>" ];
+    sha256 = "02rksdwjmz2pw9bmgbb4c0bgkbq5z6nvg510sq1s6y2j1gam0c7i";
+    dependencies = mapFeatures features ([
+      (crates."lazy_static"."${deps."thread_local"."0.3.6"."lazy_static"}" deps)
+    ]);
+  };
+  features_.thread_local."0.3.6" = deps: f: updateFeatures f ({
+    lazy_static."${deps.thread_local."0.3.6".lazy_static}".default = true;
+    thread_local."0.3.6".default = (f.thread_local."0.3.6".default or true);
+  }) [
+    (features_.lazy_static."${deps."thread_local"."0.3.6"."lazy_static"}" deps)
+  ];
+
+
+# end
+# toml-0.4.10
+
+  crates.toml."0.4.10" = deps: { features?(features_.toml."0.4.10" deps {}) }: buildRustCrate {
+    crateName = "toml";
+    version = "0.4.10";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "0fs4kxl86w3kmgwcgcv23nk79zagayz1spg281r83w0ywf88d6f1";
+    dependencies = mapFeatures features ([
+      (crates."serde"."${deps."toml"."0.4.10"."serde"}" deps)
+    ]);
+  };
+  features_.toml."0.4.10" = deps: f: updateFeatures f ({
+    serde."${deps.toml."0.4.10".serde}".default = true;
+    toml."0.4.10".default = (f.toml."0.4.10".default or true);
+  }) [
+    (features_.serde."${deps."toml"."0.4.10"."serde"}" deps)
+  ];
+
+
+# end
+# toml-0.4.8
+
+  crates.toml."0.4.8" = deps: { features?(features_.toml."0.4.8" deps {}) }: buildRustCrate {
+    crateName = "toml";
+    version = "0.4.8";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "1xm3chgsvi3qqi7vj8sb5xvnbfpkqyl4fiwh7y2cl6r4brwlmif1";
+    dependencies = mapFeatures features ([
+      (crates."serde"."${deps."toml"."0.4.8"."serde"}" deps)
+    ]);
+  };
+  features_.toml."0.4.8" = deps: f: updateFeatures f ({
+    serde."${deps.toml."0.4.8".serde}".default = true;
+    toml."0.4.8".default = (f.toml."0.4.8".default or true);
+  }) [
+    (features_.serde."${deps."toml"."0.4.8"."serde"}" deps)
+  ];
+
+
+# end
+# toml-0.5.0
+
+  crates.toml."0.5.0" = deps: { features?(features_.toml."0.5.0" deps {}) }: buildRustCrate {
+    crateName = "toml";
+    version = "0.5.0";
+    description = "A native Rust encoder and decoder of TOML-formatted files and streams. Provides\nimplementations of the standard Serialize/Deserialize traits for TOML data to\nfacilitate deserializing and serializing Rust structures.\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "0l2lqzbn5g9l376k01isq1nhb14inkr4c50qbjbdzh5qysz7dyk5";
+    dependencies = mapFeatures features ([
+      (crates."serde"."${deps."toml"."0.5.0"."serde"}" deps)
+    ]);
+    features = mkFeatures (features."toml"."0.5.0" or {});
+  };
+  features_.toml."0.5.0" = deps: f: updateFeatures f (rec {
+    serde."${deps.toml."0.5.0".serde}".default = true;
+    toml = fold recursiveUpdate {} [
+      { "0.5.0"."linked-hash-map" =
+        (f.toml."0.5.0"."linked-hash-map" or false) ||
+        (f.toml."0.5.0".preserve_order or false) ||
+        (toml."0.5.0"."preserve_order" or false); }
+      { "0.5.0".default = (f.toml."0.5.0".default or true); }
+    ];
+  }) [
+    (features_.serde."${deps."toml"."0.5.0"."serde"}" deps)
+  ];
+
+
+# end
+# toml2nix-0.1.1
+
+  crates.toml2nix."0.1.1" = deps: { features?(features_.toml2nix."0.1.1" deps {}) }: buildRustCrate {
+    crateName = "toml2nix";
+    version = "0.1.1";
+    authors = [ "Pierre-Étienne Meunier <pierre-etienne.meunier@inria.fr>" ];
+    sha256 = "167qyylp0s76h7r0n99as3jwry5mrn5q1wxh2sdwh51d5qnnw6b2";
+    dependencies = mapFeatures features ([
+      (crates."toml"."${deps."toml2nix"."0.1.1"."toml"}" deps)
+    ]);
+  };
+  features_.toml2nix."0.1.1" = deps: f: updateFeatures f ({
+    toml."${deps.toml2nix."0.1.1".toml}".default = true;
+    toml2nix."0.1.1".default = (f.toml2nix."0.1.1".default or true);
+  }) [
+    (features_.toml."${deps."toml2nix"."0.1.1"."toml"}" deps)
+  ];
+
+
+# end
+# ucd-util-0.1.1
+
+  crates.ucd_util."0.1.1" = deps: { features?(features_.ucd_util."0.1.1" deps {}) }: buildRustCrate {
+    crateName = "ucd-util";
+    version = "0.1.1";
+    authors = [ "Andrew Gallant <jamslam@gmail.com>" ];
+    sha256 = "02a8h3siipx52b832xc8m8rwasj6nx9jpiwfldw8hp6k205hgkn0";
+  };
+  features_.ucd_util."0.1.1" = deps: f: updateFeatures f ({
+    ucd_util."0.1.1".default = (f.ucd_util."0.1.1".default or true);
+  }) [];
+
+
+# end
+# ucd-util-0.1.3
+
+  crates.ucd_util."0.1.3" = deps: { features?(features_.ucd_util."0.1.3" deps {}) }: buildRustCrate {
+    crateName = "ucd-util";
+    version = "0.1.3";
+    description = "A small utility library for working with the Unicode character database.\n";
+    authors = [ "Andrew Gallant <jamslam@gmail.com>" ];
+    sha256 = "1n1qi3jywq5syq90z9qd8qzbn58pcjgv1sx4sdmipm4jf9zanz15";
+  };
+  features_.ucd_util."0.1.3" = deps: f: updateFeatures f ({
+    ucd_util."0.1.3".default = (f.ucd_util."0.1.3".default or true);
+  }) [];
+
+
+# end
+# unicode-bidi-0.3.4
+
+  crates.unicode_bidi."0.3.4" = deps: { features?(features_.unicode_bidi."0.3.4" deps {}) }: buildRustCrate {
+    crateName = "unicode-bidi";
+    version = "0.3.4";
+    authors = [ "The Servo Project Developers" ];
+    sha256 = "0lcd6jasrf8p9p0q20qyf10c6xhvw40m2c4rr105hbk6zy26nj1q";
+    libName = "unicode_bidi";
+    dependencies = mapFeatures features ([
+      (crates."matches"."${deps."unicode_bidi"."0.3.4"."matches"}" deps)
+    ]);
+    features = mkFeatures (features."unicode_bidi"."0.3.4" or {});
+  };
+  features_.unicode_bidi."0.3.4" = deps: f: updateFeatures f (rec {
+    matches."${deps.unicode_bidi."0.3.4".matches}".default = true;
+    unicode_bidi = fold recursiveUpdate {} [
+      { "0.3.4".default = (f.unicode_bidi."0.3.4".default or true); }
+      { "0.3.4".flame =
+        (f.unicode_bidi."0.3.4".flame or false) ||
+        (f.unicode_bidi."0.3.4".flame_it or false) ||
+        (unicode_bidi."0.3.4"."flame_it" or false); }
+      { "0.3.4".flamer =
+        (f.unicode_bidi."0.3.4".flamer or false) ||
+        (f.unicode_bidi."0.3.4".flame_it or false) ||
+        (unicode_bidi."0.3.4"."flame_it" or false); }
+      { "0.3.4".serde =
+        (f.unicode_bidi."0.3.4".serde or false) ||
+        (f.unicode_bidi."0.3.4".with_serde or false) ||
+        (unicode_bidi."0.3.4"."with_serde" or false); }
+    ];
+  }) [
+    (features_.matches."${deps."unicode_bidi"."0.3.4"."matches"}" deps)
+  ];
+
+
+# end
+# unicode-normalization-0.1.7
+
+  crates.unicode_normalization."0.1.7" = deps: { features?(features_.unicode_normalization."0.1.7" deps {}) }: buildRustCrate {
+    crateName = "unicode-normalization";
+    version = "0.1.7";
+    authors = [ "kwantam <kwantam@gmail.com>" ];
+    sha256 = "1da2hv800pd0wilmn4idwpgv5p510hjxizjcfv6xzb40xcsjd8gs";
+  };
+  features_.unicode_normalization."0.1.7" = deps: f: updateFeatures f ({
+    unicode_normalization."0.1.7".default = (f.unicode_normalization."0.1.7".default or true);
+  }) [];
+
+
+# end
+# unicode-normalization-0.1.8
+
+  crates.unicode_normalization."0.1.8" = deps: { features?(features_.unicode_normalization."0.1.8" deps {}) }: buildRustCrate {
+    crateName = "unicode-normalization";
+    version = "0.1.8";
+    description = "This crate provides functions for normalization of\nUnicode strings, including Canonical and Compatible\nDecomposition and Recomposition, as described in\nUnicode Standard Annex #15.\n";
+    authors = [ "kwantam <kwantam@gmail.com>" ];
+    sha256 = "1pb26i2xd5zz0icabyqahikpca0iwj2jd4145pczc4bb7p641dsz";
+    dependencies = mapFeatures features ([
+      (crates."smallvec"."${deps."unicode_normalization"."0.1.8"."smallvec"}" deps)
+    ]);
+  };
+  features_.unicode_normalization."0.1.8" = deps: f: updateFeatures f ({
+    smallvec."${deps.unicode_normalization."0.1.8".smallvec}".default = true;
+    unicode_normalization."0.1.8".default = (f.unicode_normalization."0.1.8".default or true);
+  }) [
+    (features_.smallvec."${deps."unicode_normalization"."0.1.8"."smallvec"}" deps)
+  ];
+
+
+# end
+# unicode-width-0.1.5
+
+  crates.unicode_width."0.1.5" = deps: { features?(features_.unicode_width."0.1.5" deps {}) }: buildRustCrate {
+    crateName = "unicode-width";
+    version = "0.1.5";
+    authors = [ "kwantam <kwantam@gmail.com>" ];
+    sha256 = "0886lc2aymwgy0lhavwn6s48ik3c61ykzzd3za6prgnw51j7bi4w";
+    features = mkFeatures (features."unicode_width"."0.1.5" or {});
+  };
+  features_.unicode_width."0.1.5" = deps: f: updateFeatures f ({
+    unicode_width."0.1.5".default = (f.unicode_width."0.1.5".default or true);
+  }) [];
+
+
+# end
+# unicode-xid-0.1.0
+
+  crates.unicode_xid."0.1.0" = deps: { features?(features_.unicode_xid."0.1.0" deps {}) }: buildRustCrate {
+    crateName = "unicode-xid";
+    version = "0.1.0";
+    authors = [ "erick.tryzelaar <erick.tryzelaar@gmail.com>" "kwantam <kwantam@gmail.com>" ];
+    sha256 = "05wdmwlfzxhq3nhsxn6wx4q8dhxzzfb9szsz6wiw092m1rjj01zj";
+    features = mkFeatures (features."unicode_xid"."0.1.0" or {});
+  };
+  features_.unicode_xid."0.1.0" = deps: f: updateFeatures f ({
+    unicode_xid."0.1.0".default = (f.unicode_xid."0.1.0".default or true);
+  }) [];
+
+
+# end
+# url-1.7.2
+
+  crates.url."1.7.2" = deps: { features?(features_.url."1.7.2" deps {}) }: buildRustCrate {
+    crateName = "url";
+    version = "1.7.2";
+    authors = [ "The rust-url developers" ];
+    sha256 = "0qzrjzd9r1niv7037x4cgnv98fs1vj0k18lpxx890ipc47x5gc09";
+    dependencies = mapFeatures features ([
+      (crates."idna"."${deps."url"."1.7.2"."idna"}" deps)
+      (crates."matches"."${deps."url"."1.7.2"."matches"}" deps)
+      (crates."percent_encoding"."${deps."url"."1.7.2"."percent_encoding"}" deps)
+    ]);
+    features = mkFeatures (features."url"."1.7.2" or {});
+  };
+  features_.url."1.7.2" = deps: f: updateFeatures f (rec {
+    idna."${deps.url."1.7.2".idna}".default = true;
+    matches."${deps.url."1.7.2".matches}".default = true;
+    percent_encoding."${deps.url."1.7.2".percent_encoding}".default = true;
+    url = fold recursiveUpdate {} [
+      { "1.7.2".default = (f.url."1.7.2".default or true); }
+      { "1.7.2".encoding =
+        (f.url."1.7.2".encoding or false) ||
+        (f.url."1.7.2".query_encoding or false) ||
+        (url."1.7.2"."query_encoding" or false); }
+      { "1.7.2".heapsize =
+        (f.url."1.7.2".heapsize or false) ||
+        (f.url."1.7.2".heap_size or false) ||
+        (url."1.7.2"."heap_size" or false); }
+    ];
+  }) [
+    (features_.idna."${deps."url"."1.7.2"."idna"}" deps)
+    (features_.matches."${deps."url"."1.7.2"."matches"}" deps)
+    (features_.percent_encoding."${deps."url"."1.7.2"."percent_encoding"}" deps)
+  ];
+
+
+# end
+# utf8-ranges-1.0.1
+
+  crates.utf8_ranges."1.0.1" = deps: { features?(features_.utf8_ranges."1.0.1" deps {}) }: buildRustCrate {
+    crateName = "utf8-ranges";
+    version = "1.0.1";
+    authors = [ "Andrew Gallant <jamslam@gmail.com>" ];
+    sha256 = "1s56ihd2c8ba6191078wivvv59247szaiszrh8x2rxqfsxlfrnpp";
+  };
+  features_.utf8_ranges."1.0.1" = deps: f: updateFeatures f ({
+    utf8_ranges."1.0.1".default = (f.utf8_ranges."1.0.1".default or true);
+  }) [];
+
+
+# end
+# utf8-ranges-1.0.2
+
+  crates.utf8_ranges."1.0.2" = deps: { features?(features_.utf8_ranges."1.0.2" deps {}) }: buildRustCrate {
+    crateName = "utf8-ranges";
+    version = "1.0.2";
+    description = "Convert ranges of Unicode codepoints to UTF-8 byte ranges.";
+    authors = [ "Andrew Gallant <jamslam@gmail.com>" ];
+    sha256 = "1my02laqsgnd8ib4dvjgd4rilprqjad6pb9jj9vi67csi5qs2281";
+  };
+  features_.utf8_ranges."1.0.2" = deps: f: updateFeatures f ({
+    utf8_ranges."1.0.2".default = (f.utf8_ranges."1.0.2".default or true);
+  }) [];
+
+
+# end
+# vec_map-0.8.1
+
+  crates.vec_map."0.8.1" = deps: { features?(features_.vec_map."0.8.1" deps {}) }: buildRustCrate {
+    crateName = "vec_map";
+    version = "0.8.1";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" "Jorge Aparicio <japaricious@gmail.com>" "Alexis Beingessner <a.beingessner@gmail.com>" "Brian Anderson <>" "tbu- <>" "Manish Goregaokar <>" "Aaron Turon <aturon@mozilla.com>" "Adolfo Ochagavía <>" "Niko Matsakis <>" "Steven Fackler <>" "Chase Southwood <csouth3@illinois.edu>" "Eduard Burtescu <>" "Florian Wilkens <>" "Félix Raimundo <>" "Tibor Benke <>" "Markus Siemens <markus@m-siemens.de>" "Josh Branchaud <jbranchaud@gmail.com>" "Huon Wilson <dbau.pp@gmail.com>" "Corey Farwell <coref@rwell.org>" "Aaron Liblong <>" "Nick Cameron <nrc@ncameron.org>" "Patrick Walton <pcwalton@mimiga.net>" "Felix S Klock II <>" "Andrew Paseltiner <apaseltiner@gmail.com>" "Sean McArthur <sean.monstar@gmail.com>" "Vadim Petrochenkov <>" ];
+    sha256 = "1jj2nrg8h3l53d43rwkpkikq5a5x15ms4rf1rw92hp5lrqhi8mpi";
+    dependencies = mapFeatures features ([
+]);
+    features = mkFeatures (features."vec_map"."0.8.1" or {});
+  };
+  features_.vec_map."0.8.1" = deps: f: updateFeatures f (rec {
+    vec_map = fold recursiveUpdate {} [
+      { "0.8.1".default = (f.vec_map."0.8.1".default or true); }
+      { "0.8.1".serde =
+        (f.vec_map."0.8.1".serde or false) ||
+        (f.vec_map."0.8.1".eders or false) ||
+        (vec_map."0.8.1"."eders" or false); }
+    ];
+  }) [];
+
+
+# end
+# version_check-0.1.5
+
+  crates.version_check."0.1.5" = deps: { features?(features_.version_check."0.1.5" deps {}) }: buildRustCrate {
+    crateName = "version_check";
+    version = "0.1.5";
+    authors = [ "Sergio Benitez <sb@sergio.bz>" ];
+    sha256 = "1yrx9xblmwbafw2firxyqbj8f771kkzfd24n3q7xgwiqyhi0y8qd";
+  };
+  features_.version_check."0.1.5" = deps: f: updateFeatures f ({
+    version_check."0.1.5".default = (f.version_check."0.1.5".default or true);
+  }) [];
+
+
+# end
+# winapi-0.3.6
+
+  crates.winapi."0.3.6" = deps: { features?(features_.winapi."0.3.6" deps {}) }: buildRustCrate {
+    crateName = "winapi";
+    version = "0.3.6";
+    authors = [ "Peter Atashian <retep998@gmail.com>" ];
+    sha256 = "1d9jfp4cjd82sr1q4dgdlrkvm33zhhav9d7ihr0nivqbncr059m4";
+    build = "build.rs";
+    dependencies = (if kernel == "i686-pc-windows-gnu" then mapFeatures features ([
+      (crates."winapi_i686_pc_windows_gnu"."${deps."winapi"."0.3.6"."winapi_i686_pc_windows_gnu"}" deps)
+    ]) else [])
+      ++ (if kernel == "x86_64-pc-windows-gnu" then mapFeatures features ([
+      (crates."winapi_x86_64_pc_windows_gnu"."${deps."winapi"."0.3.6"."winapi_x86_64_pc_windows_gnu"}" deps)
+    ]) else []);
+    features = mkFeatures (features."winapi"."0.3.6" or {});
+  };
+  features_.winapi."0.3.6" = deps: f: updateFeatures f ({
+    winapi."0.3.6".default = (f.winapi."0.3.6".default or true);
+    winapi_i686_pc_windows_gnu."${deps.winapi."0.3.6".winapi_i686_pc_windows_gnu}".default = true;
+    winapi_x86_64_pc_windows_gnu."${deps.winapi."0.3.6".winapi_x86_64_pc_windows_gnu}".default = true;
+  }) [
+    (features_.winapi_i686_pc_windows_gnu."${deps."winapi"."0.3.6"."winapi_i686_pc_windows_gnu"}" deps)
+    (features_.winapi_x86_64_pc_windows_gnu."${deps."winapi"."0.3.6"."winapi_x86_64_pc_windows_gnu"}" deps)
+  ];
+
+
+# end
+# winapi-i686-pc-windows-gnu-0.4.0
+
+  crates.winapi_i686_pc_windows_gnu."0.4.0" = deps: { features?(features_.winapi_i686_pc_windows_gnu."0.4.0" deps {}) }: buildRustCrate {
+    crateName = "winapi-i686-pc-windows-gnu";
+    version = "0.4.0";
+    authors = [ "Peter Atashian <retep998@gmail.com>" ];
+    sha256 = "05ihkij18r4gamjpxj4gra24514can762imjzlmak5wlzidplzrp";
+    build = "build.rs";
+  };
+  features_.winapi_i686_pc_windows_gnu."0.4.0" = deps: f: updateFeatures f ({
+    winapi_i686_pc_windows_gnu."0.4.0".default = (f.winapi_i686_pc_windows_gnu."0.4.0".default or true);
+  }) [];
+
+
+# end
+# winapi-util-0.1.1
+
+  crates.winapi_util."0.1.1" = deps: { features?(features_.winapi_util."0.1.1" deps {}) }: buildRustCrate {
+    crateName = "winapi-util";
+    version = "0.1.1";
+    authors = [ "Andrew Gallant <jamslam@gmail.com>" ];
+    sha256 = "10madanla73aagbklx6y73r2g2vwq9w8a0qcghbbbpn9vfr6a95f";
+    dependencies = (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."winapi_util"."0.1.1"."winapi"}" deps)
+    ]) else []);
+  };
+  features_.winapi_util."0.1.1" = deps: f: updateFeatures f ({
+    winapi = fold recursiveUpdate {} [
+      { "${deps.winapi_util."0.1.1".winapi}"."consoleapi" = true; }
+      { "${deps.winapi_util."0.1.1".winapi}"."errhandlingapi" = true; }
+      { "${deps.winapi_util."0.1.1".winapi}"."fileapi" = true; }
+      { "${deps.winapi_util."0.1.1".winapi}"."minwindef" = true; }
+      { "${deps.winapi_util."0.1.1".winapi}"."processenv" = true; }
+      { "${deps.winapi_util."0.1.1".winapi}"."std" = true; }
+      { "${deps.winapi_util."0.1.1".winapi}"."winbase" = true; }
+      { "${deps.winapi_util."0.1.1".winapi}"."wincon" = true; }
+      { "${deps.winapi_util."0.1.1".winapi}"."winerror" = true; }
+      { "${deps.winapi_util."0.1.1".winapi}".default = true; }
+    ];
+    winapi_util."0.1.1".default = (f.winapi_util."0.1.1".default or true);
+  }) [
+    (features_.winapi."${deps."winapi_util"."0.1.1"."winapi"}" deps)
+  ];
+
+
+# end
+# winapi-util-0.1.2
+
+  crates.winapi_util."0.1.2" = deps: { features?(features_.winapi_util."0.1.2" deps {}) }: buildRustCrate {
+    crateName = "winapi-util";
+    version = "0.1.2";
+    description = "A dumping ground for high level safe wrappers over winapi.";
+    authors = [ "Andrew Gallant <jamslam@gmail.com>" ];
+    sha256 = "07jj7rg7nndd7bqhjin1xphbv8kb5clvhzpqpxkvm3wl84r3mj1h";
+    dependencies = (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."winapi_util"."0.1.2"."winapi"}" deps)
+    ]) else []);
+  };
+  features_.winapi_util."0.1.2" = deps: f: updateFeatures f ({
+    winapi = fold recursiveUpdate {} [
+      { "${deps.winapi_util."0.1.2".winapi}"."consoleapi" = true; }
+      { "${deps.winapi_util."0.1.2".winapi}"."errhandlingapi" = true; }
+      { "${deps.winapi_util."0.1.2".winapi}"."fileapi" = true; }
+      { "${deps.winapi_util."0.1.2".winapi}"."minwindef" = true; }
+      { "${deps.winapi_util."0.1.2".winapi}"."processenv" = true; }
+      { "${deps.winapi_util."0.1.2".winapi}"."std" = true; }
+      { "${deps.winapi_util."0.1.2".winapi}"."winbase" = true; }
+      { "${deps.winapi_util."0.1.2".winapi}"."wincon" = true; }
+      { "${deps.winapi_util."0.1.2".winapi}"."winerror" = true; }
+      { "${deps.winapi_util."0.1.2".winapi}"."winnt" = true; }
+      { "${deps.winapi_util."0.1.2".winapi}".default = true; }
+    ];
+    winapi_util."0.1.2".default = (f.winapi_util."0.1.2".default or true);
+  }) [
+    (features_.winapi."${deps."winapi_util"."0.1.2"."winapi"}" deps)
+  ];
+
+
+# end
+# winapi-x86_64-pc-windows-gnu-0.4.0
+
+  crates.winapi_x86_64_pc_windows_gnu."0.4.0" = deps: { features?(features_.winapi_x86_64_pc_windows_gnu."0.4.0" deps {}) }: buildRustCrate {
+    crateName = "winapi-x86_64-pc-windows-gnu";
+    version = "0.4.0";
+    authors = [ "Peter Atashian <retep998@gmail.com>" ];
+    sha256 = "0n1ylmlsb8yg1v583i4xy0qmqg42275flvbc51hdqjjfjcl9vlbj";
+    build = "build.rs";
+  };
+  features_.winapi_x86_64_pc_windows_gnu."0.4.0" = deps: f: updateFeatures f ({
+    winapi_x86_64_pc_windows_gnu."0.4.0".default = (f.winapi_x86_64_pc_windows_gnu."0.4.0".default or true);
+  }) [];
+
+
+# end
+# wincolor-1.0.1
+
+  crates.wincolor."1.0.1" = deps: { features?(features_.wincolor."1.0.1" deps {}) }: buildRustCrate {
+    crateName = "wincolor";
+    version = "1.0.1";
+    authors = [ "Andrew Gallant <jamslam@gmail.com>" ];
+    sha256 = "0gr7v4krmjba7yq16071rfacz42qbapas7mxk5nphjwb042a8gvz";
+    dependencies = mapFeatures features ([
+      (crates."winapi"."${deps."wincolor"."1.0.1"."winapi"}" deps)
+      (crates."winapi_util"."${deps."wincolor"."1.0.1"."winapi_util"}" deps)
+    ]);
+  };
+  features_.wincolor."1.0.1" = deps: f: updateFeatures f ({
+    winapi = fold recursiveUpdate {} [
+      { "${deps.wincolor."1.0.1".winapi}"."minwindef" = true; }
+      { "${deps.wincolor."1.0.1".winapi}"."wincon" = true; }
+      { "${deps.wincolor."1.0.1".winapi}".default = true; }
+    ];
+    winapi_util."${deps.wincolor."1.0.1".winapi_util}".default = true;
+    wincolor."1.0.1".default = (f.wincolor."1.0.1".default or true);
+  }) [
+    (features_.winapi."${deps."wincolor"."1.0.1"."winapi"}" deps)
+    (features_.winapi_util."${deps."wincolor"."1.0.1"."winapi_util"}" deps)
+  ];
+
+
+# end
+# aho-corasick-0.7.3
+
+  crates.aho_corasick."0.7.3" = deps: { features?(features_.aho_corasick."0.7.3" deps {}) }: buildRustCrate {
+    crateName = "aho-corasick";
+    version = "0.7.3";
+    description = "Fast multiple substring searching.";
+    authors = [ "Andrew Gallant <jamslam@gmail.com>" ];
+    sha256 = "0dn42fbdms4brigqphxrvzbjd1s4knyjlzky30kgvpnrcl4sqqdv";
+    libName = "aho_corasick";
+    dependencies = mapFeatures features ([
+      (crates."memchr"."${deps."aho_corasick"."0.7.3"."memchr"}" deps)
+    ]);
+    features = mkFeatures (features."aho_corasick"."0.7.3" or {});
+  };
+  features_.aho_corasick."0.7.3" = deps: f: updateFeatures f (rec {
+    aho_corasick = fold recursiveUpdate {} [
+      { "0.7.3"."std" =
+        (f.aho_corasick."0.7.3"."std" or false) ||
+        (f.aho_corasick."0.7.3".default or false) ||
+        (aho_corasick."0.7.3"."default" or false); }
+      { "0.7.3".default = (f.aho_corasick."0.7.3".default or true); }
+    ];
+    memchr = fold recursiveUpdate {} [
+      { "${deps.aho_corasick."0.7.3".memchr}"."use_std" =
+        (f.memchr."${deps.aho_corasick."0.7.3".memchr}"."use_std" or false) ||
+        (aho_corasick."0.7.3"."std" or false) ||
+        (f."aho_corasick"."0.7.3"."std" or false); }
+      { "${deps.aho_corasick."0.7.3".memchr}".default = (f.memchr."${deps.aho_corasick."0.7.3".memchr}".default or false); }
+    ];
+  }) [
+    (features_.memchr."${deps."aho_corasick"."0.7.3"."memchr"}" deps)
+  ];
+
+
+# end
+# backtrace-0.3.15
+
+  crates.backtrace."0.3.15" = deps: { features?(features_.backtrace."0.3.15" deps {}) }: buildRustCrate {
+    crateName = "backtrace";
+    version = "0.3.15";
+    description = "A library to acquire a stack trace (backtrace) at runtime in a Rust program.\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" "The Rust Project Developers" ];
+    sha256 = "0qgbc07aq9kfixv29s60xx666lmdpgmf27a78fwjlhnfzhqvkn0p";
+    dependencies = mapFeatures features ([
+      (crates."cfg_if"."${deps."backtrace"."0.3.15"."cfg_if"}" deps)
+      (crates."rustc_demangle"."${deps."backtrace"."0.3.15"."rustc_demangle"}" deps)
+    ])
+      ++ (if (kernel == "linux" || kernel == "darwin") && !(kernel == "fuchsia") && !(kernel == "emscripten") && !(kernel == "darwin") && !(kernel == "ios") then mapFeatures features ([
+    ]
+      ++ (if features.backtrace."0.3.15".backtrace-sys or false then [ (crates.backtrace_sys."${deps."backtrace"."0.3.15".backtrace_sys}" deps) ] else [])) else [])
+      ++ (if (kernel == "linux" || kernel == "darwin") || abi == "sgx" then mapFeatures features ([
+      (crates."libc"."${deps."backtrace"."0.3.15"."libc"}" deps)
+    ]) else [])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."backtrace"."0.3.15"."winapi"}" deps)
+    ]) else []);
+
+    buildDependencies = mapFeatures features ([
+      (crates."autocfg"."${deps."backtrace"."0.3.15"."autocfg"}" deps)
+    ]);
+    features = mkFeatures (features."backtrace"."0.3.15" or {});
+  };
+  features_.backtrace."0.3.15" = deps: f: updateFeatures f (rec {
+    autocfg."${deps.backtrace."0.3.15".autocfg}".default = true;
+    backtrace = fold recursiveUpdate {} [
+      { "0.3.15"."addr2line" =
+        (f.backtrace."0.3.15"."addr2line" or false) ||
+        (f.backtrace."0.3.15".gimli-symbolize or false) ||
+        (backtrace."0.3.15"."gimli-symbolize" or false); }
+      { "0.3.15"."backtrace-sys" =
+        (f.backtrace."0.3.15"."backtrace-sys" or false) ||
+        (f.backtrace."0.3.15".libbacktrace or false) ||
+        (backtrace."0.3.15"."libbacktrace" or false); }
+      { "0.3.15"."coresymbolication" =
+        (f.backtrace."0.3.15"."coresymbolication" or false) ||
+        (f.backtrace."0.3.15".default or false) ||
+        (backtrace."0.3.15"."default" or false); }
+      { "0.3.15"."dbghelp" =
+        (f.backtrace."0.3.15"."dbghelp" or false) ||
+        (f.backtrace."0.3.15".default or false) ||
+        (backtrace."0.3.15"."default" or false); }
+      { "0.3.15"."dladdr" =
+        (f.backtrace."0.3.15"."dladdr" or false) ||
+        (f.backtrace."0.3.15".default or false) ||
+        (backtrace."0.3.15"."default" or false); }
+      { "0.3.15"."findshlibs" =
+        (f.backtrace."0.3.15"."findshlibs" or false) ||
+        (f.backtrace."0.3.15".gimli-symbolize or false) ||
+        (backtrace."0.3.15"."gimli-symbolize" or false); }
+      { "0.3.15"."gimli" =
+        (f.backtrace."0.3.15"."gimli" or false) ||
+        (f.backtrace."0.3.15".gimli-symbolize or false) ||
+        (backtrace."0.3.15"."gimli-symbolize" or false); }
+      { "0.3.15"."libbacktrace" =
+        (f.backtrace."0.3.15"."libbacktrace" or false) ||
+        (f.backtrace."0.3.15".default or false) ||
+        (backtrace."0.3.15"."default" or false); }
+      { "0.3.15"."libunwind" =
+        (f.backtrace."0.3.15"."libunwind" or false) ||
+        (f.backtrace."0.3.15".default or false) ||
+        (backtrace."0.3.15"."default" or false); }
+      { "0.3.15"."memmap" =
+        (f.backtrace."0.3.15"."memmap" or false) ||
+        (f.backtrace."0.3.15".gimli-symbolize or false) ||
+        (backtrace."0.3.15"."gimli-symbolize" or false); }
+      { "0.3.15"."object" =
+        (f.backtrace."0.3.15"."object" or false) ||
+        (f.backtrace."0.3.15".gimli-symbolize or false) ||
+        (backtrace."0.3.15"."gimli-symbolize" or false); }
+      { "0.3.15"."rustc-serialize" =
+        (f.backtrace."0.3.15"."rustc-serialize" or false) ||
+        (f.backtrace."0.3.15".serialize-rustc or false) ||
+        (backtrace."0.3.15"."serialize-rustc" or false); }
+      { "0.3.15"."serde" =
+        (f.backtrace."0.3.15"."serde" or false) ||
+        (f.backtrace."0.3.15".serialize-serde or false) ||
+        (backtrace."0.3.15"."serialize-serde" or false); }
+      { "0.3.15"."serde_derive" =
+        (f.backtrace."0.3.15"."serde_derive" or false) ||
+        (f.backtrace."0.3.15".serialize-serde or false) ||
+        (backtrace."0.3.15"."serialize-serde" or false); }
+      { "0.3.15"."std" =
+        (f.backtrace."0.3.15"."std" or false) ||
+        (f.backtrace."0.3.15".default or false) ||
+        (backtrace."0.3.15"."default" or false) ||
+        (f.backtrace."0.3.15".libbacktrace or false) ||
+        (backtrace."0.3.15"."libbacktrace" or false); }
+      { "0.3.15".default = (f.backtrace."0.3.15".default or true); }
+    ];
+    backtrace_sys."${deps.backtrace."0.3.15".backtrace_sys}".default = true;
+    cfg_if."${deps.backtrace."0.3.15".cfg_if}".default = true;
+    libc."${deps.backtrace."0.3.15".libc}".default = (f.libc."${deps.backtrace."0.3.15".libc}".default or false);
+    rustc_demangle."${deps.backtrace."0.3.15".rustc_demangle}".default = true;
+    winapi = fold recursiveUpdate {} [
+      { "${deps.backtrace."0.3.15".winapi}"."dbghelp" = true; }
+      { "${deps.backtrace."0.3.15".winapi}"."minwindef" = true; }
+      { "${deps.backtrace."0.3.15".winapi}"."processthreadsapi" = true; }
+      { "${deps.backtrace."0.3.15".winapi}"."winnt" = true; }
+      { "${deps.backtrace."0.3.15".winapi}".default = true; }
+    ];
+  }) [
+    (features_.cfg_if."${deps."backtrace"."0.3.15"."cfg_if"}" deps)
+    (features_.rustc_demangle."${deps."backtrace"."0.3.15"."rustc_demangle"}" deps)
+    (features_.autocfg."${deps."backtrace"."0.3.15"."autocfg"}" deps)
+    (features_.backtrace_sys."${deps."backtrace"."0.3.15"."backtrace_sys"}" deps)
+    (features_.libc."${deps."backtrace"."0.3.15"."libc"}" deps)
+    (features_.winapi."${deps."backtrace"."0.3.15"."winapi"}" deps)
+  ];
+
+
+# end
+# bstr-0.1.2
+
+  crates.bstr."0.1.2" = deps: { features?(features_.bstr."0.1.2" deps {}) }: buildRustCrate {
+    crateName = "bstr";
+    version = "0.1.2";
+    description = "A string type that is not required to be valid UTF-8.";
+    authors = [ "Andrew Gallant <jamslam@gmail.com>" ];
+    sha256 = "1m30sssr8qghgf5fg17vvlrcr5mbbnv8fixzzfvzk3nan4bxyckf";
+    dependencies = mapFeatures features ([
+      (crates."memchr"."${deps."bstr"."0.1.2"."memchr"}" deps)
+    ]);
+    features = mkFeatures (features."bstr"."0.1.2" or {});
+  };
+  features_.bstr."0.1.2" = deps: f: updateFeatures f (rec {
+    bstr = fold recursiveUpdate {} [
+      { "0.1.2"."lazy_static" =
+        (f.bstr."0.1.2"."lazy_static" or false) ||
+        (f.bstr."0.1.2".unicode or false) ||
+        (bstr."0.1.2"."unicode" or false); }
+      { "0.1.2"."regex-automata" =
+        (f.bstr."0.1.2"."regex-automata" or false) ||
+        (f.bstr."0.1.2".unicode or false) ||
+        (bstr."0.1.2"."unicode" or false); }
+      { "0.1.2"."serde" =
+        (f.bstr."0.1.2"."serde" or false) ||
+        (f.bstr."0.1.2".serde1-nostd or false) ||
+        (bstr."0.1.2"."serde1-nostd" or false); }
+      { "0.1.2"."serde1-nostd" =
+        (f.bstr."0.1.2"."serde1-nostd" or false) ||
+        (f.bstr."0.1.2".serde1 or false) ||
+        (bstr."0.1.2"."serde1" or false); }
+      { "0.1.2"."std" =
+        (f.bstr."0.1.2"."std" or false) ||
+        (f.bstr."0.1.2".default or false) ||
+        (bstr."0.1.2"."default" or false) ||
+        (f.bstr."0.1.2".serde1 or false) ||
+        (bstr."0.1.2"."serde1" or false); }
+      { "0.1.2"."unicode" =
+        (f.bstr."0.1.2"."unicode" or false) ||
+        (f.bstr."0.1.2".default or false) ||
+        (bstr."0.1.2"."default" or false); }
+      { "0.1.2".default = (f.bstr."0.1.2".default or true); }
+    ];
+    memchr = fold recursiveUpdate {} [
+      { "${deps.bstr."0.1.2".memchr}"."use_std" =
+        (f.memchr."${deps.bstr."0.1.2".memchr}"."use_std" or false) ||
+        (bstr."0.1.2"."std" or false) ||
+        (f."bstr"."0.1.2"."std" or false); }
+      { "${deps.bstr."0.1.2".memchr}".default = (f.memchr."${deps.bstr."0.1.2".memchr}".default or false); }
+    ];
+  }) [
+    (features_.memchr."${deps."bstr"."0.1.2"."memchr"}" deps)
+  ];
+
+
+# end
+# build_const-0.2.1
+
+  crates.build_const."0.2.1" = deps: { features?(features_.build_const."0.2.1" deps {}) }: buildRustCrate {
+    crateName = "build_const";
+    version = "0.2.1";
+    description = "library for creating importable constants from build.rs or a script";
+    authors = [ "Garrett Berg <vitiral@gmail.com>" ];
+    sha256 = "15249xzi3qlm72p4glxgavwyq70fx2sp4df6ii0sdlrixrrp77pl";
+    features = mkFeatures (features."build_const"."0.2.1" or {});
+  };
+  features_.build_const."0.2.1" = deps: f: updateFeatures f (rec {
+    build_const = fold recursiveUpdate {} [
+      { "0.2.1"."std" =
+        (f.build_const."0.2.1"."std" or false) ||
+        (f.build_const."0.2.1".default or false) ||
+        (build_const."0.2.1"."default" or false); }
+      { "0.2.1".default = (f.build_const."0.2.1".default or true); }
+    ];
+  }) [];
+
+
+# end
+# byteorder-1.3.1
+
+  crates.byteorder."1.3.1" = deps: { features?(features_.byteorder."1.3.1" deps {}) }: buildRustCrate {
+    crateName = "byteorder";
+    version = "1.3.1";
+    description = "Library for reading/writing numbers in big-endian and little-endian.";
+    authors = [ "Andrew Gallant <jamslam@gmail.com>" ];
+    sha256 = "1dd46l7fvmxfq90kh6ip1ghsxzzcdybac8f0mh2jivsdv9vy8k4w";
+    build = "build.rs";
+    features = mkFeatures (features."byteorder"."1.3.1" or {});
+  };
+  features_.byteorder."1.3.1" = deps: f: updateFeatures f (rec {
+    byteorder = fold recursiveUpdate {} [
+      { "1.3.1"."std" =
+        (f.byteorder."1.3.1"."std" or false) ||
+        (f.byteorder."1.3.1".default or false) ||
+        (byteorder."1.3.1"."default" or false); }
+      { "1.3.1".default = (f.byteorder."1.3.1".default or true); }
+    ];
+  }) [];
+
+
+# end
+# bytes-0.4.12
+
+  crates.bytes."0.4.12" = deps: { features?(features_.bytes."0.4.12" deps {}) }: buildRustCrate {
+    crateName = "bytes";
+    version = "0.4.12";
+    description = "Types and traits for working with bytes";
+    authors = [ "Carl Lerche <me@carllerche.com>" ];
+    sha256 = "0cw577vll9qp0h3l1sy24anr5mcnd5j26q9q7nw4f0mddssvfphf";
+    dependencies = mapFeatures features ([
+      (crates."byteorder"."${deps."bytes"."0.4.12"."byteorder"}" deps)
+      (crates."iovec"."${deps."bytes"."0.4.12"."iovec"}" deps)
+    ]);
+    features = mkFeatures (features."bytes"."0.4.12" or {});
+  };
+  features_.bytes."0.4.12" = deps: f: updateFeatures f (rec {
+    byteorder = fold recursiveUpdate {} [
+      { "${deps.bytes."0.4.12".byteorder}"."i128" =
+        (f.byteorder."${deps.bytes."0.4.12".byteorder}"."i128" or false) ||
+        (bytes."0.4.12"."i128" or false) ||
+        (f."bytes"."0.4.12"."i128" or false); }
+      { "${deps.bytes."0.4.12".byteorder}".default = true; }
+    ];
+    bytes."0.4.12".default = (f.bytes."0.4.12".default or true);
+    iovec."${deps.bytes."0.4.12".iovec}".default = true;
+  }) [
+    (features_.byteorder."${deps."bytes"."0.4.12"."byteorder"}" deps)
+    (features_.iovec."${deps."bytes"."0.4.12"."iovec"}" deps)
+  ];
+
+
+# end
+# bytesize-1.0.0
+
+  crates.bytesize."1.0.0" = deps: { features?(features_.bytesize."1.0.0" deps {}) }: buildRustCrate {
+    crateName = "bytesize";
+    version = "1.0.0";
+    description = "an utility for human-readable bytes representations";
+    authors = [ "Hyunsik Choi <hyunsik.choi@gmail.com>" ];
+    sha256 = "04j5hibh1sskjbifrm5d10vmd1fycfgm10cdfa9hpyir7lbkhbg9";
+    dependencies = mapFeatures features ([
+]);
+  };
+  features_.bytesize."1.0.0" = deps: f: updateFeatures f ({
+    bytesize."1.0.0".default = (f.bytesize."1.0.0".default or true);
+  }) [];
+
+
+# end
+# cargo-0.35.0
+
+  crates.cargo."0.35.0" = deps: { features?(features_.cargo."0.35.0" deps {}) }: buildRustCrate {
+    crateName = "cargo";
+    version = "0.35.0";
+    description = "Cargo, a package manager for Rust.\n";
+    authors = [ "Yehuda Katz <wycats@gmail.com>" "Carl Lerche <me@carllerche.com>" "Alex Crichton <alex@alexcrichton.com>" ];
+    edition = "2018";
+    sha256 = "17nqb1cr70igaaahk1lr859jaa57p05i1q4smqhqpq1jswwpdsnn";
+    libPath = "src/cargo/lib.rs";
+    crateBin =
+      [{  name = "cargo"; }];
+    dependencies = mapFeatures features ([
+      (crates."atty"."${deps."cargo"."0.35.0"."atty"}" deps)
+      (crates."byteorder"."${deps."cargo"."0.35.0"."byteorder"}" deps)
+      (crates."bytesize"."${deps."cargo"."0.35.0"."bytesize"}" deps)
+      (crates."clap"."${deps."cargo"."0.35.0"."clap"}" deps)
+      (crates."crates_io"."${deps."cargo"."0.35.0"."crates_io"}" deps)
+      (crates."crossbeam_utils"."${deps."cargo"."0.35.0"."crossbeam_utils"}" deps)
+      (crates."crypto_hash"."${deps."cargo"."0.35.0"."crypto_hash"}" deps)
+      (crates."curl"."${deps."cargo"."0.35.0"."curl"}" deps)
+      (crates."curl_sys"."${deps."cargo"."0.35.0"."curl_sys"}" deps)
+      (crates."env_logger"."${deps."cargo"."0.35.0"."env_logger"}" deps)
+      (crates."failure"."${deps."cargo"."0.35.0"."failure"}" deps)
+      (crates."filetime"."${deps."cargo"."0.35.0"."filetime"}" deps)
+      (crates."flate2"."${deps."cargo"."0.35.0"."flate2"}" deps)
+      (crates."fs2"."${deps."cargo"."0.35.0"."fs2"}" deps)
+      (crates."git2"."${deps."cargo"."0.35.0"."git2"}" deps)
+      (crates."git2_curl"."${deps."cargo"."0.35.0"."git2_curl"}" deps)
+      (crates."glob"."${deps."cargo"."0.35.0"."glob"}" deps)
+      (crates."hex"."${deps."cargo"."0.35.0"."hex"}" deps)
+      (crates."home"."${deps."cargo"."0.35.0"."home"}" deps)
+      (crates."ignore"."${deps."cargo"."0.35.0"."ignore"}" deps)
+      (crates."im_rc"."${deps."cargo"."0.35.0"."im_rc"}" deps)
+      (crates."jobserver"."${deps."cargo"."0.35.0"."jobserver"}" deps)
+      (crates."lazy_static"."${deps."cargo"."0.35.0"."lazy_static"}" deps)
+      (crates."lazycell"."${deps."cargo"."0.35.0"."lazycell"}" deps)
+      (crates."libc"."${deps."cargo"."0.35.0"."libc"}" deps)
+      (crates."libgit2_sys"."${deps."cargo"."0.35.0"."libgit2_sys"}" deps)
+      (crates."log"."${deps."cargo"."0.35.0"."log"}" deps)
+      (crates."num_cpus"."${deps."cargo"."0.35.0"."num_cpus"}" deps)
+      (crates."opener"."${deps."cargo"."0.35.0"."opener"}" deps)
+      (crates."rustc_workspace_hack"."${deps."cargo"."0.35.0"."rustc_workspace_hack"}" deps)
+      (crates."rustfix"."${deps."cargo"."0.35.0"."rustfix"}" deps)
+      (crates."same_file"."${deps."cargo"."0.35.0"."same_file"}" deps)
+      (crates."semver"."${deps."cargo"."0.35.0"."semver"}" deps)
+      (crates."serde"."${deps."cargo"."0.35.0"."serde"}" deps)
+      (crates."serde_ignored"."${deps."cargo"."0.35.0"."serde_ignored"}" deps)
+      (crates."serde_json"."${deps."cargo"."0.35.0"."serde_json"}" deps)
+      (crates."shell_escape"."${deps."cargo"."0.35.0"."shell_escape"}" deps)
+      (crates."tar"."${deps."cargo"."0.35.0"."tar"}" deps)
+      (crates."tempfile"."${deps."cargo"."0.35.0"."tempfile"}" deps)
+      (crates."termcolor"."${deps."cargo"."0.35.0"."termcolor"}" deps)
+      (crates."toml"."${deps."cargo"."0.35.0"."toml"}" deps)
+      (crates."unicode_width"."${deps."cargo"."0.35.0"."unicode_width"}" deps)
+      (crates."url"."${deps."cargo"."0.35.0"."url"}" deps)
+      (crates."url_serde"."${deps."cargo"."0.35.0"."url_serde"}" deps)
+    ])
+      ++ (if kernel == "darwin" then mapFeatures features ([
+      (crates."core_foundation"."${deps."cargo"."0.35.0"."core_foundation"}" deps)
+    ]) else [])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."fwdansi"."${deps."cargo"."0.35.0"."fwdansi"}" deps)
+      (crates."miow"."${deps."cargo"."0.35.0"."miow"}" deps)
+      (crates."winapi"."${deps."cargo"."0.35.0"."winapi"}" deps)
+    ]) else []);
+    features = mkFeatures (features."cargo"."0.35.0" or {});
+  };
+  features_.cargo."0.35.0" = deps: f: updateFeatures f (rec {
+    atty."${deps.cargo."0.35.0".atty}".default = true;
+    byteorder."${deps.cargo."0.35.0".byteorder}".default = true;
+    bytesize."${deps.cargo."0.35.0".bytesize}".default = true;
+    cargo = fold recursiveUpdate {} [
+      { "0.35.0"."pretty_env_logger" =
+        (f.cargo."0.35.0"."pretty_env_logger" or false) ||
+        (f.cargo."0.35.0".pretty-env-logger or false) ||
+        (cargo."0.35.0"."pretty-env-logger" or false); }
+      { "0.35.0".default = (f.cargo."0.35.0".default or true); }
+    ];
+    clap."${deps.cargo."0.35.0".clap}".default = true;
+    core_foundation = fold recursiveUpdate {} [
+      { "${deps.cargo."0.35.0".core_foundation}"."mac_os_10_7_support" = true; }
+      { "${deps.cargo."0.35.0".core_foundation}".default = true; }
+    ];
+    crates_io."${deps.cargo."0.35.0".crates_io}".default = true;
+    crossbeam_utils."${deps.cargo."0.35.0".crossbeam_utils}".default = true;
+    crypto_hash."${deps.cargo."0.35.0".crypto_hash}".default = true;
+    curl = fold recursiveUpdate {} [
+      { "${deps.cargo."0.35.0".curl}"."http2" = true; }
+      { "${deps.cargo."0.35.0".curl}".default = true; }
+    ];
+    curl_sys."${deps.cargo."0.35.0".curl_sys}".default = true;
+    env_logger."${deps.cargo."0.35.0".env_logger}".default = true;
+    failure."${deps.cargo."0.35.0".failure}".default = true;
+    filetime."${deps.cargo."0.35.0".filetime}".default = true;
+    flate2 = fold recursiveUpdate {} [
+      { "${deps.cargo."0.35.0".flate2}"."zlib" = true; }
+      { "${deps.cargo."0.35.0".flate2}".default = true; }
+    ];
+    fs2."${deps.cargo."0.35.0".fs2}".default = true;
+    fwdansi."${deps.cargo."0.35.0".fwdansi}".default = true;
+    git2."${deps.cargo."0.35.0".git2}".default = true;
+    git2_curl."${deps.cargo."0.35.0".git2_curl}".default = true;
+    glob."${deps.cargo."0.35.0".glob}".default = true;
+    hex."${deps.cargo."0.35.0".hex}".default = true;
+    home."${deps.cargo."0.35.0".home}".default = true;
+    ignore."${deps.cargo."0.35.0".ignore}".default = true;
+    im_rc."${deps.cargo."0.35.0".im_rc}".default = true;
+    jobserver."${deps.cargo."0.35.0".jobserver}".default = true;
+    lazy_static."${deps.cargo."0.35.0".lazy_static}".default = true;
+    lazycell."${deps.cargo."0.35.0".lazycell}".default = true;
+    libc."${deps.cargo."0.35.0".libc}".default = true;
+    libgit2_sys."${deps.cargo."0.35.0".libgit2_sys}".default = true;
+    log."${deps.cargo."0.35.0".log}".default = true;
+    miow."${deps.cargo."0.35.0".miow}".default = true;
+    num_cpus."${deps.cargo."0.35.0".num_cpus}".default = true;
+    opener."${deps.cargo."0.35.0".opener}".default = true;
+    rustc_workspace_hack."${deps.cargo."0.35.0".rustc_workspace_hack}".default = true;
+    rustfix."${deps.cargo."0.35.0".rustfix}".default = true;
+    same_file."${deps.cargo."0.35.0".same_file}".default = true;
+    semver = fold recursiveUpdate {} [
+      { "${deps.cargo."0.35.0".semver}"."serde" = true; }
+      { "${deps.cargo."0.35.0".semver}".default = true; }
+    ];
+    serde = fold recursiveUpdate {} [
+      { "${deps.cargo."0.35.0".serde}"."derive" = true; }
+      { "${deps.cargo."0.35.0".serde}".default = true; }
+    ];
+    serde_ignored."${deps.cargo."0.35.0".serde_ignored}".default = true;
+    serde_json = fold recursiveUpdate {} [
+      { "${deps.cargo."0.35.0".serde_json}"."raw_value" = true; }
+      { "${deps.cargo."0.35.0".serde_json}".default = true; }
+    ];
+    shell_escape."${deps.cargo."0.35.0".shell_escape}".default = true;
+    tar."${deps.cargo."0.35.0".tar}".default = (f.tar."${deps.cargo."0.35.0".tar}".default or false);
+    tempfile."${deps.cargo."0.35.0".tempfile}".default = true;
+    termcolor."${deps.cargo."0.35.0".termcolor}".default = true;
+    toml."${deps.cargo."0.35.0".toml}".default = true;
+    unicode_width."${deps.cargo."0.35.0".unicode_width}".default = true;
+    url."${deps.cargo."0.35.0".url}".default = true;
+    url_serde."${deps.cargo."0.35.0".url_serde}".default = true;
+    winapi = fold recursiveUpdate {} [
+      { "${deps.cargo."0.35.0".winapi}"."basetsd" = true; }
+      { "${deps.cargo."0.35.0".winapi}"."handleapi" = true; }
+      { "${deps.cargo."0.35.0".winapi}"."jobapi" = true; }
+      { "${deps.cargo."0.35.0".winapi}"."jobapi2" = true; }
+      { "${deps.cargo."0.35.0".winapi}"."memoryapi" = true; }
+      { "${deps.cargo."0.35.0".winapi}"."minwindef" = true; }
+      { "${deps.cargo."0.35.0".winapi}"."ntdef" = true; }
+      { "${deps.cargo."0.35.0".winapi}"."ntstatus" = true; }
+      { "${deps.cargo."0.35.0".winapi}"."processenv" = true; }
+      { "${deps.cargo."0.35.0".winapi}"."processthreadsapi" = true; }
+      { "${deps.cargo."0.35.0".winapi}"."psapi" = true; }
+      { "${deps.cargo."0.35.0".winapi}"."synchapi" = true; }
+      { "${deps.cargo."0.35.0".winapi}"."winbase" = true; }
+      { "${deps.cargo."0.35.0".winapi}"."wincon" = true; }
+      { "${deps.cargo."0.35.0".winapi}"."winerror" = true; }
+      { "${deps.cargo."0.35.0".winapi}"."winnt" = true; }
+      { "${deps.cargo."0.35.0".winapi}".default = true; }
+    ];
+  }) [
+    (features_.atty."${deps."cargo"."0.35.0"."atty"}" deps)
+    (features_.byteorder."${deps."cargo"."0.35.0"."byteorder"}" deps)
+    (features_.bytesize."${deps."cargo"."0.35.0"."bytesize"}" deps)
+    (features_.clap."${deps."cargo"."0.35.0"."clap"}" deps)
+    (features_.crates_io."${deps."cargo"."0.35.0"."crates_io"}" deps)
+    (features_.crossbeam_utils."${deps."cargo"."0.35.0"."crossbeam_utils"}" deps)
+    (features_.crypto_hash."${deps."cargo"."0.35.0"."crypto_hash"}" deps)
+    (features_.curl."${deps."cargo"."0.35.0"."curl"}" deps)
+    (features_.curl_sys."${deps."cargo"."0.35.0"."curl_sys"}" deps)
+    (features_.env_logger."${deps."cargo"."0.35.0"."env_logger"}" deps)
+    (features_.failure."${deps."cargo"."0.35.0"."failure"}" deps)
+    (features_.filetime."${deps."cargo"."0.35.0"."filetime"}" deps)
+    (features_.flate2."${deps."cargo"."0.35.0"."flate2"}" deps)
+    (features_.fs2."${deps."cargo"."0.35.0"."fs2"}" deps)
+    (features_.git2."${deps."cargo"."0.35.0"."git2"}" deps)
+    (features_.git2_curl."${deps."cargo"."0.35.0"."git2_curl"}" deps)
+    (features_.glob."${deps."cargo"."0.35.0"."glob"}" deps)
+    (features_.hex."${deps."cargo"."0.35.0"."hex"}" deps)
+    (features_.home."${deps."cargo"."0.35.0"."home"}" deps)
+    (features_.ignore."${deps."cargo"."0.35.0"."ignore"}" deps)
+    (features_.im_rc."${deps."cargo"."0.35.0"."im_rc"}" deps)
+    (features_.jobserver."${deps."cargo"."0.35.0"."jobserver"}" deps)
+    (features_.lazy_static."${deps."cargo"."0.35.0"."lazy_static"}" deps)
+    (features_.lazycell."${deps."cargo"."0.35.0"."lazycell"}" deps)
+    (features_.libc."${deps."cargo"."0.35.0"."libc"}" deps)
+    (features_.libgit2_sys."${deps."cargo"."0.35.0"."libgit2_sys"}" deps)
+    (features_.log."${deps."cargo"."0.35.0"."log"}" deps)
+    (features_.num_cpus."${deps."cargo"."0.35.0"."num_cpus"}" deps)
+    (features_.opener."${deps."cargo"."0.35.0"."opener"}" deps)
+    (features_.rustc_workspace_hack."${deps."cargo"."0.35.0"."rustc_workspace_hack"}" deps)
+    (features_.rustfix."${deps."cargo"."0.35.0"."rustfix"}" deps)
+    (features_.same_file."${deps."cargo"."0.35.0"."same_file"}" deps)
+    (features_.semver."${deps."cargo"."0.35.0"."semver"}" deps)
+    (features_.serde."${deps."cargo"."0.35.0"."serde"}" deps)
+    (features_.serde_ignored."${deps."cargo"."0.35.0"."serde_ignored"}" deps)
+    (features_.serde_json."${deps."cargo"."0.35.0"."serde_json"}" deps)
+    (features_.shell_escape."${deps."cargo"."0.35.0"."shell_escape"}" deps)
+    (features_.tar."${deps."cargo"."0.35.0"."tar"}" deps)
+    (features_.tempfile."${deps."cargo"."0.35.0"."tempfile"}" deps)
+    (features_.termcolor."${deps."cargo"."0.35.0"."termcolor"}" deps)
+    (features_.toml."${deps."cargo"."0.35.0"."toml"}" deps)
+    (features_.unicode_width."${deps."cargo"."0.35.0"."unicode_width"}" deps)
+    (features_.url."${deps."cargo"."0.35.0"."url"}" deps)
+    (features_.url_serde."${deps."cargo"."0.35.0"."url_serde"}" deps)
+    (features_.core_foundation."${deps."cargo"."0.35.0"."core_foundation"}" deps)
+    (features_.fwdansi."${deps."cargo"."0.35.0"."fwdansi"}" deps)
+    (features_.miow."${deps."cargo"."0.35.0"."miow"}" deps)
+    (features_.winapi."${deps."cargo"."0.35.0"."winapi"}" deps)
+  ];
+
+
+# end
+# cc-1.0.35
+
+  crates.cc."1.0.35" = deps: { features?(features_.cc."1.0.35" deps {}) }: buildRustCrate {
+    crateName = "cc";
+    version = "1.0.35";
+    description = "A build-time dependency for Cargo build scripts to assist in invoking the native\nC compiler to compile native C code into a static archive to be linked into Rust\ncode.\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "0dq3d2hgc5r14lk97ajj4mw30fibznjzrl9w745fqhwnq51jp7dj";
+    dependencies = mapFeatures features ([
+]);
+    features = mkFeatures (features."cc"."1.0.35" or {});
+  };
+  features_.cc."1.0.35" = deps: f: updateFeatures f (rec {
+    cc = fold recursiveUpdate {} [
+      { "1.0.35"."rayon" =
+        (f.cc."1.0.35"."rayon" or false) ||
+        (f.cc."1.0.35".parallel or false) ||
+        (cc."1.0.35"."parallel" or false); }
+      { "1.0.35".default = (f.cc."1.0.35".default or true); }
+    ];
+  }) [];
+
+
+# end
+# clap-2.33.0
+
+  crates.clap."2.33.0" = deps: { features?(features_.clap."2.33.0" deps {}) }: buildRustCrate {
+    crateName = "clap";
+    version = "2.33.0";
+    description = "A simple to use, efficient, and full-featured Command Line Argument Parser\n";
+    authors = [ "Kevin K. <kbknapp@gmail.com>" ];
+    sha256 = "054n9ngh6pkknpmd4acgdsp40iw6f5jzq8a4h2b76gnbvk6p5xjh";
+    dependencies = mapFeatures features ([
+      (crates."bitflags"."${deps."clap"."2.33.0"."bitflags"}" deps)
+      (crates."textwrap"."${deps."clap"."2.33.0"."textwrap"}" deps)
+      (crates."unicode_width"."${deps."clap"."2.33.0"."unicode_width"}" deps)
+    ]
+      ++ (if features.clap."2.33.0".atty or false then [ (crates.atty."${deps."clap"."2.33.0".atty}" deps) ] else [])
+      ++ (if features.clap."2.33.0".strsim or false then [ (crates.strsim."${deps."clap"."2.33.0".strsim}" deps) ] else [])
+      ++ (if features.clap."2.33.0".vec_map or false then [ (crates.vec_map."${deps."clap"."2.33.0".vec_map}" deps) ] else []))
+      ++ (if !(kernel == "windows") then mapFeatures features ([
+    ]
+      ++ (if features.clap."2.33.0".ansi_term or false then [ (crates.ansi_term."${deps."clap"."2.33.0".ansi_term}" deps) ] else [])) else []);
+    features = mkFeatures (features."clap"."2.33.0" or {});
+  };
+  features_.clap."2.33.0" = deps: f: updateFeatures f (rec {
+    ansi_term."${deps.clap."2.33.0".ansi_term}".default = true;
+    atty."${deps.clap."2.33.0".atty}".default = true;
+    bitflags."${deps.clap."2.33.0".bitflags}".default = true;
+    clap = fold recursiveUpdate {} [
+      { "2.33.0"."ansi_term" =
+        (f.clap."2.33.0"."ansi_term" or false) ||
+        (f.clap."2.33.0".color or false) ||
+        (clap."2.33.0"."color" or false); }
+      { "2.33.0"."atty" =
+        (f.clap."2.33.0"."atty" or false) ||
+        (f.clap."2.33.0".color or false) ||
+        (clap."2.33.0"."color" or false); }
+      { "2.33.0"."clippy" =
+        (f.clap."2.33.0"."clippy" or false) ||
+        (f.clap."2.33.0".lints or false) ||
+        (clap."2.33.0"."lints" or false); }
+      { "2.33.0"."color" =
+        (f.clap."2.33.0"."color" or false) ||
+        (f.clap."2.33.0".default or false) ||
+        (clap."2.33.0"."default" or false); }
+      { "2.33.0"."strsim" =
+        (f.clap."2.33.0"."strsim" or false) ||
+        (f.clap."2.33.0".suggestions or false) ||
+        (clap."2.33.0"."suggestions" or false); }
+      { "2.33.0"."suggestions" =
+        (f.clap."2.33.0"."suggestions" or false) ||
+        (f.clap."2.33.0".default or false) ||
+        (clap."2.33.0"."default" or false); }
+      { "2.33.0"."term_size" =
+        (f.clap."2.33.0"."term_size" or false) ||
+        (f.clap."2.33.0".wrap_help or false) ||
+        (clap."2.33.0"."wrap_help" or false); }
+      { "2.33.0"."vec_map" =
+        (f.clap."2.33.0"."vec_map" or false) ||
+        (f.clap."2.33.0".default or false) ||
+        (clap."2.33.0"."default" or false); }
+      { "2.33.0"."yaml" =
+        (f.clap."2.33.0"."yaml" or false) ||
+        (f.clap."2.33.0".doc or false) ||
+        (clap."2.33.0"."doc" or false); }
+      { "2.33.0"."yaml-rust" =
+        (f.clap."2.33.0"."yaml-rust" or false) ||
+        (f.clap."2.33.0".yaml or false) ||
+        (clap."2.33.0"."yaml" or false); }
+      { "2.33.0".default = (f.clap."2.33.0".default or true); }
+    ];
+    strsim."${deps.clap."2.33.0".strsim}".default = true;
+    textwrap = fold recursiveUpdate {} [
+      { "${deps.clap."2.33.0".textwrap}"."term_size" =
+        (f.textwrap."${deps.clap."2.33.0".textwrap}"."term_size" or false) ||
+        (clap."2.33.0"."wrap_help" or false) ||
+        (f."clap"."2.33.0"."wrap_help" or false); }
+      { "${deps.clap."2.33.0".textwrap}".default = true; }
+    ];
+    unicode_width."${deps.clap."2.33.0".unicode_width}".default = true;
+    vec_map."${deps.clap."2.33.0".vec_map}".default = true;
+  }) [
+    (features_.atty."${deps."clap"."2.33.0"."atty"}" deps)
+    (features_.bitflags."${deps."clap"."2.33.0"."bitflags"}" deps)
+    (features_.strsim."${deps."clap"."2.33.0"."strsim"}" deps)
+    (features_.textwrap."${deps."clap"."2.33.0"."textwrap"}" deps)
+    (features_.unicode_width."${deps."clap"."2.33.0"."unicode_width"}" deps)
+    (features_.vec_map."${deps."clap"."2.33.0"."vec_map"}" deps)
+    (features_.ansi_term."${deps."clap"."2.33.0"."ansi_term"}" deps)
+  ];
+
+
+# end
+# commoncrypto-0.2.0
+
+  crates.commoncrypto."0.2.0" = deps: { features?(features_.commoncrypto."0.2.0" deps {}) }: buildRustCrate {
+    crateName = "commoncrypto";
+    version = "0.2.0";
+    description = "Idiomatic Rust wrappers for Mac OS X's CommonCrypto library";
+    authors = [ "Mark Lee" ];
+    sha256 = "1ywgmv5ai4f6yskr3wv3j1wzfsdm9km8j8lm4x4j5ccln5362xdf";
+    dependencies = mapFeatures features ([
+      (crates."commoncrypto_sys"."${deps."commoncrypto"."0.2.0"."commoncrypto_sys"}" deps)
+    ]);
+    features = mkFeatures (features."commoncrypto"."0.2.0" or {});
+  };
+  features_.commoncrypto."0.2.0" = deps: f: updateFeatures f (rec {
+    commoncrypto = fold recursiveUpdate {} [
+      { "0.2.0"."clippy" =
+        (f.commoncrypto."0.2.0"."clippy" or false) ||
+        (f.commoncrypto."0.2.0".lint or false) ||
+        (commoncrypto."0.2.0"."lint" or false); }
+      { "0.2.0".default = (f.commoncrypto."0.2.0".default or true); }
+    ];
+    commoncrypto_sys."${deps.commoncrypto."0.2.0".commoncrypto_sys}".default = true;
+  }) [
+    (features_.commoncrypto_sys."${deps."commoncrypto"."0.2.0"."commoncrypto_sys"}" deps)
+  ];
+
+
+# end
+# commoncrypto-sys-0.2.0
+
+  crates.commoncrypto_sys."0.2.0" = deps: { features?(features_.commoncrypto_sys."0.2.0" deps {}) }: buildRustCrate {
+    crateName = "commoncrypto-sys";
+    version = "0.2.0";
+    description = "FFI bindings to Mac OS X's CommonCrypto library";
+    authors = [ "Mark Lee" ];
+    sha256 = "001i2g7xbfi48r2xjgxwrgjjjf00x9c24vfrs3g6p2q2djhwww4i";
+    dependencies = mapFeatures features ([
+      (crates."libc"."${deps."commoncrypto_sys"."0.2.0"."libc"}" deps)
+    ]);
+    features = mkFeatures (features."commoncrypto_sys"."0.2.0" or {});
+  };
+  features_.commoncrypto_sys."0.2.0" = deps: f: updateFeatures f (rec {
+    commoncrypto_sys = fold recursiveUpdate {} [
+      { "0.2.0"."clippy" =
+        (f.commoncrypto_sys."0.2.0"."clippy" or false) ||
+        (f.commoncrypto_sys."0.2.0".lint or false) ||
+        (commoncrypto_sys."0.2.0"."lint" or false); }
+      { "0.2.0".default = (f.commoncrypto_sys."0.2.0".default or true); }
+    ];
+    libc."${deps.commoncrypto_sys."0.2.0".libc}".default = true;
+  }) [
+    (features_.libc."${deps."commoncrypto_sys"."0.2.0"."libc"}" deps)
+  ];
+
+
+# end
+# core-foundation-0.6.4
+
+  crates.core_foundation."0.6.4" = deps: { features?(features_.core_foundation."0.6.4" deps {}) }: buildRustCrate {
+    crateName = "core-foundation";
+    version = "0.6.4";
+    description = "Bindings to Core Foundation for macOS";
+    authors = [ "The Servo Project Developers" ];
+    sha256 = "1kabsqxh01m6l2b1gz8wgn0d1k6fyczww9kaks0sbmsz5g78ngzx";
+    dependencies = mapFeatures features ([
+      (crates."core_foundation_sys"."${deps."core_foundation"."0.6.4"."core_foundation_sys"}" deps)
+      (crates."libc"."${deps."core_foundation"."0.6.4"."libc"}" deps)
+    ]);
+    features = mkFeatures (features."core_foundation"."0.6.4" or {});
+  };
+  features_.core_foundation."0.6.4" = deps: f: updateFeatures f (rec {
+    core_foundation = fold recursiveUpdate {} [
+      { "0.6.4"."chrono" =
+        (f.core_foundation."0.6.4"."chrono" or false) ||
+        (f.core_foundation."0.6.4".with-chrono or false) ||
+        (core_foundation."0.6.4"."with-chrono" or false); }
+      { "0.6.4"."uuid" =
+        (f.core_foundation."0.6.4"."uuid" or false) ||
+        (f.core_foundation."0.6.4".with-uuid or false) ||
+        (core_foundation."0.6.4"."with-uuid" or false); }
+      { "0.6.4".default = (f.core_foundation."0.6.4".default or true); }
+    ];
+    core_foundation_sys = fold recursiveUpdate {} [
+      { "${deps.core_foundation."0.6.4".core_foundation_sys}"."mac_os_10_7_support" =
+        (f.core_foundation_sys."${deps.core_foundation."0.6.4".core_foundation_sys}"."mac_os_10_7_support" or false) ||
+        (core_foundation."0.6.4"."mac_os_10_7_support" or false) ||
+        (f."core_foundation"."0.6.4"."mac_os_10_7_support" or false); }
+      { "${deps.core_foundation."0.6.4".core_foundation_sys}"."mac_os_10_8_features" =
+        (f.core_foundation_sys."${deps.core_foundation."0.6.4".core_foundation_sys}"."mac_os_10_8_features" or false) ||
+        (core_foundation."0.6.4"."mac_os_10_8_features" or false) ||
+        (f."core_foundation"."0.6.4"."mac_os_10_8_features" or false); }
+      { "${deps.core_foundation."0.6.4".core_foundation_sys}".default = true; }
+    ];
+    libc."${deps.core_foundation."0.6.4".libc}".default = true;
+  }) [
+    (features_.core_foundation_sys."${deps."core_foundation"."0.6.4"."core_foundation_sys"}" deps)
+    (features_.libc."${deps."core_foundation"."0.6.4"."libc"}" deps)
+  ];
+
+
+# end
+# core-foundation-sys-0.6.2
+
+  crates.core_foundation_sys."0.6.2" = deps: { features?(features_.core_foundation_sys."0.6.2" deps {}) }: buildRustCrate {
+    crateName = "core-foundation-sys";
+    version = "0.6.2";
+    description = "Bindings to Core Foundation for OS X";
+    authors = [ "The Servo Project Developers" ];
+    sha256 = "1n2v6wlqkmqwhl7k6y50irx51p37xb0fcm3njbman82gnyq8di2c";
+    build = "build.rs";
+    features = mkFeatures (features."core_foundation_sys"."0.6.2" or {});
+  };
+  features_.core_foundation_sys."0.6.2" = deps: f: updateFeatures f ({
+    core_foundation_sys."0.6.2".default = (f.core_foundation_sys."0.6.2".default or true);
+  }) [];
+
+
+# end
+# crates-io-0.23.0
+
+  crates.crates_io."0.23.0" = deps: { features?(features_.crates_io."0.23.0" deps {}) }: buildRustCrate {
+    crateName = "crates-io";
+    version = "0.23.0";
+    description = "Helpers for interacting with crates.io\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    edition = "2018";
+    sha256 = "0yf7zhlqnyci12rl9x6xrwlcp8slf8ldfn3d72ad6j2hyp2cb59y";
+    libPath = "lib.rs";
+    libName = "crates_io";
+    dependencies = mapFeatures features ([
+      (crates."curl"."${deps."crates_io"."0.23.0"."curl"}" deps)
+      (crates."failure"."${deps."crates_io"."0.23.0"."failure"}" deps)
+      (crates."http"."${deps."crates_io"."0.23.0"."http"}" deps)
+      (crates."serde"."${deps."crates_io"."0.23.0"."serde"}" deps)
+      (crates."serde_derive"."${deps."crates_io"."0.23.0"."serde_derive"}" deps)
+      (crates."serde_json"."${deps."crates_io"."0.23.0"."serde_json"}" deps)
+      (crates."url"."${deps."crates_io"."0.23.0"."url"}" deps)
+    ]);
+  };
+  features_.crates_io."0.23.0" = deps: f: updateFeatures f ({
+    crates_io."0.23.0".default = (f.crates_io."0.23.0".default or true);
+    curl."${deps.crates_io."0.23.0".curl}".default = true;
+    failure."${deps.crates_io."0.23.0".failure}".default = true;
+    http."${deps.crates_io."0.23.0".http}".default = true;
+    serde = fold recursiveUpdate {} [
+      { "${deps.crates_io."0.23.0".serde}"."derive" = true; }
+      { "${deps.crates_io."0.23.0".serde}".default = true; }
+    ];
+    serde_derive."${deps.crates_io."0.23.0".serde_derive}".default = true;
+    serde_json."${deps.crates_io."0.23.0".serde_json}".default = true;
+    url."${deps.crates_io."0.23.0".url}".default = true;
+  }) [
+    (features_.curl."${deps."crates_io"."0.23.0"."curl"}" deps)
+    (features_.failure."${deps."crates_io"."0.23.0"."failure"}" deps)
+    (features_.http."${deps."crates_io"."0.23.0"."http"}" deps)
+    (features_.serde."${deps."crates_io"."0.23.0"."serde"}" deps)
+    (features_.serde_derive."${deps."crates_io"."0.23.0"."serde_derive"}" deps)
+    (features_.serde_json."${deps."crates_io"."0.23.0"."serde_json"}" deps)
+    (features_.url."${deps."crates_io"."0.23.0"."url"}" deps)
+  ];
+
+
+# end
+# crc-1.8.1
+
+  crates.crc."1.8.1" = deps: { features?(features_.crc."1.8.1" deps {}) }: buildRustCrate {
+    crateName = "crc";
+    version = "1.8.1";
+    description = "Rust implementation of CRC(16, 32, 64) with support of various standards";
+    authors = [ "Rui Hu <code@mrhooray.com>" ];
+    sha256 = "00m9jjqrddp3bqyanvyxv0hf6s56bx1wy51vcdcxg4n2jdhg109s";
+
+    buildDependencies = mapFeatures features ([
+      (crates."build_const"."${deps."crc"."1.8.1"."build_const"}" deps)
+    ]);
+    features = mkFeatures (features."crc"."1.8.1" or {});
+  };
+  features_.crc."1.8.1" = deps: f: updateFeatures f (rec {
+    build_const."${deps.crc."1.8.1".build_const}".default = true;
+    crc = fold recursiveUpdate {} [
+      { "1.8.1"."std" =
+        (f.crc."1.8.1"."std" or false) ||
+        (f.crc."1.8.1".default or false) ||
+        (crc."1.8.1"."default" or false); }
+      { "1.8.1".default = (f.crc."1.8.1".default or true); }
+    ];
+  }) [
+    (features_.build_const."${deps."crc"."1.8.1"."build_const"}" deps)
+  ];
+
+
+# end
+# crc32fast-1.2.0
+
+  crates.crc32fast."1.2.0" = deps: { features?(features_.crc32fast."1.2.0" deps {}) }: buildRustCrate {
+    crateName = "crc32fast";
+    version = "1.2.0";
+    description = "Fast, SIMD-accelerated CRC32 (IEEE) checksum computation";
+    authors = [ "Sam Rijs <srijs@airpost.net>" "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "1mx88ndqln6vzg7hjhjp8b7g0qggpqggsjrlsdqrfsrbpdzffcn8";
+    dependencies = mapFeatures features ([
+      (crates."cfg_if"."${deps."crc32fast"."1.2.0"."cfg_if"}" deps)
+    ]);
+    features = mkFeatures (features."crc32fast"."1.2.0" or {});
+  };
+  features_.crc32fast."1.2.0" = deps: f: updateFeatures f (rec {
+    cfg_if."${deps.crc32fast."1.2.0".cfg_if}".default = true;
+    crc32fast = fold recursiveUpdate {} [
+      { "1.2.0"."std" =
+        (f.crc32fast."1.2.0"."std" or false) ||
+        (f.crc32fast."1.2.0".default or false) ||
+        (crc32fast."1.2.0"."default" or false); }
+      { "1.2.0".default = (f.crc32fast."1.2.0".default or true); }
+    ];
+  }) [
+    (features_.cfg_if."${deps."crc32fast"."1.2.0"."cfg_if"}" deps)
+  ];
+
+
+# end
+# crossbeam-channel-0.3.8
+
+  crates.crossbeam_channel."0.3.8" = deps: { features?(features_.crossbeam_channel."0.3.8" deps {}) }: buildRustCrate {
+    crateName = "crossbeam-channel";
+    version = "0.3.8";
+    description = "Multi-producer multi-consumer channels for message passing";
+    authors = [ "The Crossbeam Project Developers" ];
+    sha256 = "0apm8why2qsgr8ykh9x677kc9ml7qp71mvirfkdzdn4c1jyqyyzm";
+    dependencies = mapFeatures features ([
+      (crates."crossbeam_utils"."${deps."crossbeam_channel"."0.3.8"."crossbeam_utils"}" deps)
+      (crates."smallvec"."${deps."crossbeam_channel"."0.3.8"."smallvec"}" deps)
+    ]);
+  };
+  features_.crossbeam_channel."0.3.8" = deps: f: updateFeatures f ({
+    crossbeam_channel."0.3.8".default = (f.crossbeam_channel."0.3.8".default or true);
+    crossbeam_utils."${deps.crossbeam_channel."0.3.8".crossbeam_utils}".default = true;
+    smallvec."${deps.crossbeam_channel."0.3.8".smallvec}".default = true;
+  }) [
+    (features_.crossbeam_utils."${deps."crossbeam_channel"."0.3.8"."crossbeam_utils"}" deps)
+    (features_.smallvec."${deps."crossbeam_channel"."0.3.8"."smallvec"}" deps)
+  ];
+
+
+# end
+# crossbeam-utils-0.6.5
+
+  crates.crossbeam_utils."0.6.5" = deps: { features?(features_.crossbeam_utils."0.6.5" deps {}) }: buildRustCrate {
+    crateName = "crossbeam-utils";
+    version = "0.6.5";
+    description = "Utilities for concurrent programming";
+    authors = [ "The Crossbeam Project Developers" ];
+    sha256 = "1z7wgcl9d22r2x6769r5945rnwf3jqfrrmb16q7kzk292r1d4rdg";
+    dependencies = mapFeatures features ([
+      (crates."cfg_if"."${deps."crossbeam_utils"."0.6.5"."cfg_if"}" deps)
+    ]
+      ++ (if features.crossbeam_utils."0.6.5".lazy_static or false then [ (crates.lazy_static."${deps."crossbeam_utils"."0.6.5".lazy_static}" deps) ] else []));
+    features = mkFeatures (features."crossbeam_utils"."0.6.5" or {});
+  };
+  features_.crossbeam_utils."0.6.5" = deps: f: updateFeatures f (rec {
+    cfg_if."${deps.crossbeam_utils."0.6.5".cfg_if}".default = true;
+    crossbeam_utils = fold recursiveUpdate {} [
+      { "0.6.5"."lazy_static" =
+        (f.crossbeam_utils."0.6.5"."lazy_static" or false) ||
+        (f.crossbeam_utils."0.6.5".std or false) ||
+        (crossbeam_utils."0.6.5"."std" or false); }
+      { "0.6.5"."std" =
+        (f.crossbeam_utils."0.6.5"."std" or false) ||
+        (f.crossbeam_utils."0.6.5".default or false) ||
+        (crossbeam_utils."0.6.5"."default" or false); }
+      { "0.6.5".default = (f.crossbeam_utils."0.6.5".default or true); }
+    ];
+    lazy_static."${deps.crossbeam_utils."0.6.5".lazy_static}".default = true;
+  }) [
+    (features_.cfg_if."${deps."crossbeam_utils"."0.6.5"."cfg_if"}" deps)
+    (features_.lazy_static."${deps."crossbeam_utils"."0.6.5"."lazy_static"}" deps)
+  ];
+
+
+# end
+# crypto-hash-0.3.3
+
+  crates.crypto_hash."0.3.3" = deps: { features?(features_.crypto_hash."0.3.3" deps {}) }: buildRustCrate {
+    crateName = "crypto-hash";
+    version = "0.3.3";
+    description = "A wrapper for OS-level cryptographic hash functions";
+    authors = [ "Mark Lee" ];
+    sha256 = "0ybl3q06snf0p0w5c743yipf1gyhim2z0yqczgdhclfmzgj4gxqy";
+    dependencies = mapFeatures features ([
+      (crates."hex"."${deps."crypto_hash"."0.3.3"."hex"}" deps)
+    ])
+      ++ (if kernel == "darwin" || kernel == "ios" then mapFeatures features ([
+      (crates."commoncrypto"."${deps."crypto_hash"."0.3.3"."commoncrypto"}" deps)
+    ]) else [])
+      ++ (if !(kernel == "windows" || kernel == "darwin" || kernel == "ios") then mapFeatures features ([
+      (crates."openssl"."${deps."crypto_hash"."0.3.3"."openssl"}" deps)
+    ]) else [])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."crypto_hash"."0.3.3"."winapi"}" deps)
+    ]) else []);
+  };
+  features_.crypto_hash."0.3.3" = deps: f: updateFeatures f ({
+    commoncrypto."${deps.crypto_hash."0.3.3".commoncrypto}".default = true;
+    crypto_hash."0.3.3".default = (f.crypto_hash."0.3.3".default or true);
+    hex."${deps.crypto_hash."0.3.3".hex}".default = true;
+    openssl."${deps.crypto_hash."0.3.3".openssl}".default = true;
+    winapi = fold recursiveUpdate {} [
+      { "${deps.crypto_hash."0.3.3".winapi}"."minwindef" = true; }
+      { "${deps.crypto_hash."0.3.3".winapi}"."wincrypt" = true; }
+      { "${deps.crypto_hash."0.3.3".winapi}".default = true; }
+    ];
+  }) [
+    (features_.hex."${deps."crypto_hash"."0.3.3"."hex"}" deps)
+    (features_.commoncrypto."${deps."crypto_hash"."0.3.3"."commoncrypto"}" deps)
+    (features_.openssl."${deps."crypto_hash"."0.3.3"."openssl"}" deps)
+    (features_.winapi."${deps."crypto_hash"."0.3.3"."winapi"}" deps)
+  ];
+
+
+# end
+# curl-0.4.21
+
+  crates.curl."0.4.21" = deps: { features?(features_.curl."0.4.21" deps {}) }: buildRustCrate {
+    crateName = "curl";
+    version = "0.4.21";
+    description = "Rust bindings to libcurl for making HTTP requests";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "1n13h0scc4s77ryf3w19n3myh4k1ls4bfxrx6y6ffvayjfnh13qy";
+    dependencies = mapFeatures features ([
+      (crates."curl_sys"."${deps."curl"."0.4.21"."curl_sys"}" deps)
+      (crates."libc"."${deps."curl"."0.4.21"."libc"}" deps)
+      (crates."socket2"."${deps."curl"."0.4.21"."socket2"}" deps)
+    ])
+      ++ (if (kernel == "linux" || kernel == "darwin") && !(kernel == "darwin") then mapFeatures features ([
+    ]
+      ++ (if features.curl."0.4.21".openssl-probe or false then [ (crates.openssl_probe."${deps."curl"."0.4.21".openssl_probe}" deps) ] else [])
+      ++ (if features.curl."0.4.21".openssl-sys or false then [ (crates.openssl_sys."${deps."curl"."0.4.21".openssl_sys}" deps) ] else [])) else [])
+      ++ (if abi == "msvc" then mapFeatures features ([
+      (crates."kernel32_sys"."${deps."curl"."0.4.21"."kernel32_sys"}" deps)
+      (crates."schannel"."${deps."curl"."0.4.21"."schannel"}" deps)
+    ]) else [])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."curl"."0.4.21"."winapi"}" deps)
+    ]) else []);
+    features = mkFeatures (features."curl"."0.4.21" or {});
+  };
+  features_.curl."0.4.21" = deps: f: updateFeatures f (rec {
+    curl = fold recursiveUpdate {} [
+      { "0.4.21"."openssl-probe" =
+        (f.curl."0.4.21"."openssl-probe" or false) ||
+        (f.curl."0.4.21".ssl or false) ||
+        (curl."0.4.21"."ssl" or false); }
+      { "0.4.21"."openssl-sys" =
+        (f.curl."0.4.21"."openssl-sys" or false) ||
+        (f.curl."0.4.21".ssl or false) ||
+        (curl."0.4.21"."ssl" or false); }
+      { "0.4.21"."ssl" =
+        (f.curl."0.4.21"."ssl" or false) ||
+        (f.curl."0.4.21".default or false) ||
+        (curl."0.4.21"."default" or false); }
+      { "0.4.21".default = (f.curl."0.4.21".default or true); }
+    ];
+    curl_sys = fold recursiveUpdate {} [
+      { "${deps.curl."0.4.21".curl_sys}"."force-system-lib-on-osx" =
+        (f.curl_sys."${deps.curl."0.4.21".curl_sys}"."force-system-lib-on-osx" or false) ||
+        (curl."0.4.21"."force-system-lib-on-osx" or false) ||
+        (f."curl"."0.4.21"."force-system-lib-on-osx" or false); }
+      { "${deps.curl."0.4.21".curl_sys}"."http2" =
+        (f.curl_sys."${deps.curl."0.4.21".curl_sys}"."http2" or false) ||
+        (curl."0.4.21"."http2" or false) ||
+        (f."curl"."0.4.21"."http2" or false); }
+      { "${deps.curl."0.4.21".curl_sys}"."ssl" =
+        (f.curl_sys."${deps.curl."0.4.21".curl_sys}"."ssl" or false) ||
+        (curl."0.4.21"."ssl" or false) ||
+        (f."curl"."0.4.21"."ssl" or false); }
+      { "${deps.curl."0.4.21".curl_sys}"."static-curl" =
+        (f.curl_sys."${deps.curl."0.4.21".curl_sys}"."static-curl" or false) ||
+        (curl."0.4.21"."static-curl" or false) ||
+        (f."curl"."0.4.21"."static-curl" or false); }
+      { "${deps.curl."0.4.21".curl_sys}"."static-ssl" =
+        (f.curl_sys."${deps.curl."0.4.21".curl_sys}"."static-ssl" or false) ||
+        (curl."0.4.21"."static-ssl" or false) ||
+        (f."curl"."0.4.21"."static-ssl" or false); }
+      { "${deps.curl."0.4.21".curl_sys}".default = (f.curl_sys."${deps.curl."0.4.21".curl_sys}".default or false); }
+    ];
+    kernel32_sys."${deps.curl."0.4.21".kernel32_sys}".default = true;
+    libc."${deps.curl."0.4.21".libc}".default = true;
+    openssl_probe."${deps.curl."0.4.21".openssl_probe}".default = true;
+    openssl_sys."${deps.curl."0.4.21".openssl_sys}".default = true;
+    schannel."${deps.curl."0.4.21".schannel}".default = true;
+    socket2."${deps.curl."0.4.21".socket2}".default = true;
+    winapi."${deps.curl."0.4.21".winapi}".default = true;
+  }) [
+    (features_.curl_sys."${deps."curl"."0.4.21"."curl_sys"}" deps)
+    (features_.libc."${deps."curl"."0.4.21"."libc"}" deps)
+    (features_.socket2."${deps."curl"."0.4.21"."socket2"}" deps)
+    (features_.openssl_probe."${deps."curl"."0.4.21"."openssl_probe"}" deps)
+    (features_.openssl_sys."${deps."curl"."0.4.21"."openssl_sys"}" deps)
+    (features_.kernel32_sys."${deps."curl"."0.4.21"."kernel32_sys"}" deps)
+    (features_.schannel."${deps."curl"."0.4.21"."schannel"}" deps)
+    (features_.winapi."${deps."curl"."0.4.21"."winapi"}" deps)
+  ];
+
+
+# end
+# curl-sys-0.4.18
+
+  crates.curl_sys."0.4.18" = deps: { features?(features_.curl_sys."0.4.18" deps {}) }: buildRustCrate {
+    crateName = "curl-sys";
+    version = "0.4.18";
+    description = "Native bindings to the libcurl library";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "1y9qglyirlxhp62gh5vlzpq67jw7cyccvsajvmj30dv1sn7cn3vk";
+    libPath = "lib.rs";
+    libName = "curl_sys";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+      (crates."libc"."${deps."curl_sys"."0.4.18"."libc"}" deps)
+      (crates."libz_sys"."${deps."curl_sys"."0.4.18"."libz_sys"}" deps)
+    ]
+      ++ (if features.curl_sys."0.4.18".libnghttp2-sys or false then [ (crates.libnghttp2_sys."${deps."curl_sys"."0.4.18".libnghttp2_sys}" deps) ] else []))
+      ++ (if (kernel == "linux" || kernel == "darwin") && !(kernel == "darwin") then mapFeatures features ([
+    ]
+      ++ (if features.curl_sys."0.4.18".openssl-sys or false then [ (crates.openssl_sys."${deps."curl_sys"."0.4.18".openssl_sys}" deps) ] else [])) else [])
+      ++ (if abi == "msvc" then mapFeatures features ([
+]) else [])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."curl_sys"."0.4.18"."winapi"}" deps)
+    ]) else []);
+
+    buildDependencies = mapFeatures features ([
+      (crates."cc"."${deps."curl_sys"."0.4.18"."cc"}" deps)
+      (crates."pkg_config"."${deps."curl_sys"."0.4.18"."pkg_config"}" deps)
+    ]);
+    features = mkFeatures (features."curl_sys"."0.4.18" or {});
+  };
+  features_.curl_sys."0.4.18" = deps: f: updateFeatures f (rec {
+    cc."${deps.curl_sys."0.4.18".cc}".default = true;
+    curl_sys = fold recursiveUpdate {} [
+      { "0.4.18"."libnghttp2-sys" =
+        (f.curl_sys."0.4.18"."libnghttp2-sys" or false) ||
+        (f.curl_sys."0.4.18".http2 or false) ||
+        (curl_sys."0.4.18"."http2" or false); }
+      { "0.4.18"."openssl-sys" =
+        (f.curl_sys."0.4.18"."openssl-sys" or false) ||
+        (f.curl_sys."0.4.18".ssl or false) ||
+        (curl_sys."0.4.18"."ssl" or false); }
+      { "0.4.18"."ssl" =
+        (f.curl_sys."0.4.18"."ssl" or false) ||
+        (f.curl_sys."0.4.18".default or false) ||
+        (curl_sys."0.4.18"."default" or false); }
+      { "0.4.18".default = (f.curl_sys."0.4.18".default or true); }
+    ];
+    libc."${deps.curl_sys."0.4.18".libc}".default = true;
+    libnghttp2_sys."${deps.curl_sys."0.4.18".libnghttp2_sys}".default = true;
+    libz_sys."${deps.curl_sys."0.4.18".libz_sys}".default = true;
+    openssl_sys."${deps.curl_sys."0.4.18".openssl_sys}".default = true;
+    pkg_config."${deps.curl_sys."0.4.18".pkg_config}".default = true;
+    winapi = fold recursiveUpdate {} [
+      { "${deps.curl_sys."0.4.18".winapi}"."winsock2" = true; }
+      { "${deps.curl_sys."0.4.18".winapi}"."ws2def" = true; }
+      { "${deps.curl_sys."0.4.18".winapi}".default = true; }
+    ];
+  }) [
+    (features_.libc."${deps."curl_sys"."0.4.18"."libc"}" deps)
+    (features_.libnghttp2_sys."${deps."curl_sys"."0.4.18"."libnghttp2_sys"}" deps)
+    (features_.libz_sys."${deps."curl_sys"."0.4.18"."libz_sys"}" deps)
+    (features_.cc."${deps."curl_sys"."0.4.18"."cc"}" deps)
+    (features_.pkg_config."${deps."curl_sys"."0.4.18"."pkg_config"}" deps)
+    (features_.openssl_sys."${deps."curl_sys"."0.4.18"."openssl_sys"}" deps)
+    (features_.winapi."${deps."curl_sys"."0.4.18"."winapi"}" deps)
+  ];
+
+
+# end
+# docopt-1.1.0
+
+  crates.docopt."1.1.0" = deps: { features?(features_.docopt."1.1.0" deps {}) }: buildRustCrate {
+    crateName = "docopt";
+    version = "1.1.0";
+    description = "Command line argument parsing.";
+    authors = [ "Andrew Gallant <jamslam@gmail.com>" ];
+    edition = "2018";
+    sha256 = "1xjvfw8398qcxwhdmak1bw2j6zn125ch24dmrmghv50vnlbb997x";
+    crateBin =
+      [{  name = "docopt-wordlist";  path = "src/wordlist.rs"; }];
+    dependencies = mapFeatures features ([
+      (crates."lazy_static"."${deps."docopt"."1.1.0"."lazy_static"}" deps)
+      (crates."regex"."${deps."docopt"."1.1.0"."regex"}" deps)
+      (crates."serde"."${deps."docopt"."1.1.0"."serde"}" deps)
+      (crates."strsim"."${deps."docopt"."1.1.0"."strsim"}" deps)
+    ]);
+  };
+  features_.docopt."1.1.0" = deps: f: updateFeatures f ({
+    docopt."1.1.0".default = (f.docopt."1.1.0".default or true);
+    lazy_static."${deps.docopt."1.1.0".lazy_static}".default = true;
+    regex."${deps.docopt."1.1.0".regex}".default = true;
+    serde = fold recursiveUpdate {} [
+      { "${deps.docopt."1.1.0".serde}"."derive" = true; }
+      { "${deps.docopt."1.1.0".serde}".default = true; }
+    ];
+    strsim."${deps.docopt."1.1.0".strsim}".default = true;
+  }) [
+    (features_.lazy_static."${deps."docopt"."1.1.0"."lazy_static"}" deps)
+    (features_.regex."${deps."docopt"."1.1.0"."regex"}" deps)
+    (features_.serde."${deps."docopt"."1.1.0"."serde"}" deps)
+    (features_.strsim."${deps."docopt"."1.1.0"."strsim"}" deps)
+  ];
+
+
+# end
+# either-1.5.2
+
+  crates.either."1.5.2" = deps: { features?(features_.either."1.5.2" deps {}) }: buildRustCrate {
+    crateName = "either";
+    version = "1.5.2";
+    description = "The enum `Either` with variants `Left` and `Right` is a general purpose sum type with two cases.\n";
+    authors = [ "bluss" ];
+    sha256 = "1zqq1057c51f53ga4p9l4dd8ax6md27h1xjrjp2plkvml5iymks5";
+    dependencies = mapFeatures features ([
+]);
+    features = mkFeatures (features."either"."1.5.2" or {});
+  };
+  features_.either."1.5.2" = deps: f: updateFeatures f (rec {
+    either = fold recursiveUpdate {} [
+      { "1.5.2"."use_std" =
+        (f.either."1.5.2"."use_std" or false) ||
+        (f.either."1.5.2".default or false) ||
+        (either."1.5.2"."default" or false); }
+      { "1.5.2".default = (f.either."1.5.2".default or true); }
+    ];
+  }) [];
+
+
+# end
+# filetime-0.2.4
+
+  crates.filetime."0.2.4" = deps: { features?(features_.filetime."0.2.4" deps {}) }: buildRustCrate {
+    crateName = "filetime";
+    version = "0.2.4";
+    description = "Platform-agnostic accessors of timestamps in File metadata\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "1lsc0qjihr8y56rlzdcldzr0nbljm8qqi691msgwhy6wrkawwx5d";
+    dependencies = mapFeatures features ([
+      (crates."cfg_if"."${deps."filetime"."0.2.4"."cfg_if"}" deps)
+    ])
+      ++ (if kernel == "redox" then mapFeatures features ([
+      (crates."redox_syscall"."${deps."filetime"."0.2.4"."redox_syscall"}" deps)
+    ]) else [])
+      ++ (if (kernel == "linux" || kernel == "darwin") then mapFeatures features ([
+      (crates."libc"."${deps."filetime"."0.2.4"."libc"}" deps)
+    ]) else []);
+  };
+  features_.filetime."0.2.4" = deps: f: updateFeatures f ({
+    cfg_if."${deps.filetime."0.2.4".cfg_if}".default = true;
+    filetime."0.2.4".default = (f.filetime."0.2.4".default or true);
+    libc."${deps.filetime."0.2.4".libc}".default = true;
+    redox_syscall."${deps.filetime."0.2.4".redox_syscall}".default = true;
+  }) [
+    (features_.cfg_if."${deps."filetime"."0.2.4"."cfg_if"}" deps)
+    (features_.redox_syscall."${deps."filetime"."0.2.4"."redox_syscall"}" deps)
+    (features_.libc."${deps."filetime"."0.2.4"."libc"}" deps)
+  ];
+
+
+# end
+# flate2-1.0.7
+
+  crates.flate2."1.0.7" = deps: { features?(features_.flate2."1.0.7" deps {}) }: buildRustCrate {
+    crateName = "flate2";
+    version = "1.0.7";
+    description = "Bindings to miniz.c for DEFLATE compression and decompression exposed as\nReader/Writer streams. Contains bindings for zlib, deflate, and gzip-based\nstreams.\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "012vi948sap09hm1nmd228fqn7i5kp6wfb9zlz0m8ysq5if1s9mc";
+    dependencies = mapFeatures features ([
+      (crates."crc32fast"."${deps."flate2"."1.0.7"."crc32fast"}" deps)
+      (crates."libc"."${deps."flate2"."1.0.7"."libc"}" deps)
+    ]
+      ++ (if features.flate2."1.0.7".libz-sys or false then [ (crates.libz_sys."${deps."flate2"."1.0.7".libz_sys}" deps) ] else [])
+      ++ (if features.flate2."1.0.7".miniz-sys or false then [ (crates.miniz_sys."${deps."flate2"."1.0.7".miniz_sys}" deps) ] else [])
+      ++ (if features.flate2."1.0.7".miniz_oxide_c_api or false then [ (crates.miniz_oxide_c_api."${deps."flate2"."1.0.7".miniz_oxide_c_api}" deps) ] else []))
+      ++ (if cpu == "wasm32" && !(kernel == "emscripten") then mapFeatures features ([
+      (crates."miniz_oxide_c_api"."${deps."flate2"."1.0.7"."miniz_oxide_c_api"}" deps)
+    ]) else []);
+    features = mkFeatures (features."flate2"."1.0.7" or {});
+  };
+  features_.flate2."1.0.7" = deps: f: updateFeatures f (rec {
+    crc32fast."${deps.flate2."1.0.7".crc32fast}".default = true;
+    flate2 = fold recursiveUpdate {} [
+      { "1.0.7"."futures" =
+        (f.flate2."1.0.7"."futures" or false) ||
+        (f.flate2."1.0.7".tokio or false) ||
+        (flate2."1.0.7"."tokio" or false); }
+      { "1.0.7"."libz-sys" =
+        (f.flate2."1.0.7"."libz-sys" or false) ||
+        (f.flate2."1.0.7".zlib or false) ||
+        (flate2."1.0.7"."zlib" or false); }
+      { "1.0.7"."miniz-sys" =
+        (f.flate2."1.0.7"."miniz-sys" or false) ||
+        (f.flate2."1.0.7".default or false) ||
+        (flate2."1.0.7"."default" or false); }
+      { "1.0.7"."miniz_oxide_c_api" =
+        (f.flate2."1.0.7"."miniz_oxide_c_api" or false) ||
+        (f.flate2."1.0.7".rust_backend or false) ||
+        (flate2."1.0.7"."rust_backend" or false); }
+      { "1.0.7"."tokio-io" =
+        (f.flate2."1.0.7"."tokio-io" or false) ||
+        (f.flate2."1.0.7".tokio or false) ||
+        (flate2."1.0.7"."tokio" or false); }
+      { "1.0.7".default = (f.flate2."1.0.7".default or true); }
+    ];
+    libc."${deps.flate2."1.0.7".libc}".default = true;
+    libz_sys."${deps.flate2."1.0.7".libz_sys}".default = true;
+    miniz_oxide_c_api = fold recursiveUpdate {} [
+      { "${deps.flate2."1.0.7".miniz_oxide_c_api}"."no_c_export" =
+        (f.miniz_oxide_c_api."${deps.flate2."1.0.7".miniz_oxide_c_api}"."no_c_export" or false) ||
+        true ||
+        true; }
+      { "${deps.flate2."1.0.7".miniz_oxide_c_api}".default = true; }
+    ];
+    miniz_sys."${deps.flate2."1.0.7".miniz_sys}".default = true;
+  }) [
+    (features_.crc32fast."${deps."flate2"."1.0.7"."crc32fast"}" deps)
+    (features_.libc."${deps."flate2"."1.0.7"."libc"}" deps)
+    (features_.libz_sys."${deps."flate2"."1.0.7"."libz_sys"}" deps)
+    (features_.miniz_sys."${deps."flate2"."1.0.7"."miniz_sys"}" deps)
+    (features_.miniz_oxide_c_api."${deps."flate2"."1.0.7"."miniz_oxide_c_api"}" deps)
+    (features_.miniz_oxide_c_api."${deps."flate2"."1.0.7"."miniz_oxide_c_api"}" deps)
+  ];
+
+
+# end
+# fnv-1.0.6
+
+  crates.fnv."1.0.6" = deps: { features?(features_.fnv."1.0.6" deps {}) }: buildRustCrate {
+    crateName = "fnv";
+    version = "1.0.6";
+    description = "Fowler–Noll–Vo hash function";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "128mlh23y3gg6ag5h8iiqlcbl59smisdzraqy88ldrf75kbw27ip";
+    libPath = "lib.rs";
+  };
+  features_.fnv."1.0.6" = deps: f: updateFeatures f ({
+    fnv."1.0.6".default = (f.fnv."1.0.6".default or true);
+  }) [];
+
+
+# end
+# foreign-types-0.3.2
+
+  crates.foreign_types."0.3.2" = deps: { features?(features_.foreign_types."0.3.2" deps {}) }: buildRustCrate {
+    crateName = "foreign-types";
+    version = "0.3.2";
+    description = "A framework for Rust wrappers over C APIs";
+    authors = [ "Steven Fackler <sfackler@gmail.com>" ];
+    sha256 = "105n8sp2djb1s5lzrw04p7ss3dchr5qa3canmynx396nh3vwm2p8";
+    dependencies = mapFeatures features ([
+      (crates."foreign_types_shared"."${deps."foreign_types"."0.3.2"."foreign_types_shared"}" deps)
+    ]);
+  };
+  features_.foreign_types."0.3.2" = deps: f: updateFeatures f ({
+    foreign_types."0.3.2".default = (f.foreign_types."0.3.2".default or true);
+    foreign_types_shared."${deps.foreign_types."0.3.2".foreign_types_shared}".default = true;
+  }) [
+    (features_.foreign_types_shared."${deps."foreign_types"."0.3.2"."foreign_types_shared"}" deps)
+  ];
+
+
+# end
+# foreign-types-shared-0.1.1
+
+  crates.foreign_types_shared."0.1.1" = deps: { features?(features_.foreign_types_shared."0.1.1" deps {}) }: buildRustCrate {
+    crateName = "foreign-types-shared";
+    version = "0.1.1";
+    description = "An internal crate used by foreign-types";
+    authors = [ "Steven Fackler <sfackler@gmail.com>" ];
+    sha256 = "0b6cnvqbflws8dxywk4589vgbz80049lz4x1g9dfy4s1ppd3g4z5";
+  };
+  features_.foreign_types_shared."0.1.1" = deps: f: updateFeatures f ({
+    foreign_types_shared."0.1.1".default = (f.foreign_types_shared."0.1.1".default or true);
+  }) [];
+
+
+# end
+# fs2-0.4.3
+
+  crates.fs2."0.4.3" = deps: { features?(features_.fs2."0.4.3" deps {}) }: buildRustCrate {
+    crateName = "fs2";
+    version = "0.4.3";
+    description = "Cross-platform file locks and file duplication.";
+    authors = [ "Dan Burkert <dan@danburkert.com>" ];
+    sha256 = "1crj36rhhpk3qby9yj7r77w7sld0mzab2yicmphbdkfymbmp3ldp";
+    dependencies = (if (kernel == "linux" || kernel == "darwin") then mapFeatures features ([
+      (crates."libc"."${deps."fs2"."0.4.3"."libc"}" deps)
+    ]) else [])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."fs2"."0.4.3"."winapi"}" deps)
+    ]) else []);
+  };
+  features_.fs2."0.4.3" = deps: f: updateFeatures f ({
+    fs2."0.4.3".default = (f.fs2."0.4.3".default or true);
+    libc."${deps.fs2."0.4.3".libc}".default = true;
+    winapi = fold recursiveUpdate {} [
+      { "${deps.fs2."0.4.3".winapi}"."fileapi" = true; }
+      { "${deps.fs2."0.4.3".winapi}"."handleapi" = true; }
+      { "${deps.fs2."0.4.3".winapi}"."processthreadsapi" = true; }
+      { "${deps.fs2."0.4.3".winapi}"."std" = true; }
+      { "${deps.fs2."0.4.3".winapi}"."winbase" = true; }
+      { "${deps.fs2."0.4.3".winapi}"."winerror" = true; }
+      { "${deps.fs2."0.4.3".winapi}".default = true; }
+    ];
+  }) [
+    (features_.libc."${deps."fs2"."0.4.3"."libc"}" deps)
+    (features_.winapi."${deps."fs2"."0.4.3"."winapi"}" deps)
+  ];
+
+
+# end
+# fwdansi-1.0.1
+
+  crates.fwdansi."1.0.1" = deps: { features?(features_.fwdansi."1.0.1" deps {}) }: buildRustCrate {
+    crateName = "fwdansi";
+    version = "1.0.1";
+    description = "Forwards a byte string with ANSI escape code to a termcolor terminal";
+    authors = [ "kennytm <kennytm@gmail.com>" ];
+    sha256 = "00mzclq1wx55p6x5xx4yhpj70vsrivk2w1wbzq8bnf6xnl2km0xn";
+    dependencies = mapFeatures features ([
+      (crates."memchr"."${deps."fwdansi"."1.0.1"."memchr"}" deps)
+      (crates."termcolor"."${deps."fwdansi"."1.0.1"."termcolor"}" deps)
+    ]);
+  };
+  features_.fwdansi."1.0.1" = deps: f: updateFeatures f ({
+    fwdansi."1.0.1".default = (f.fwdansi."1.0.1".default or true);
+    memchr."${deps.fwdansi."1.0.1".memchr}".default = true;
+    termcolor."${deps.fwdansi."1.0.1".termcolor}".default = true;
+  }) [
+    (features_.memchr."${deps."fwdansi"."1.0.1"."memchr"}" deps)
+    (features_.termcolor."${deps."fwdansi"."1.0.1"."termcolor"}" deps)
+  ];
+
+
+# end
+# git2-0.8.0
+
+  crates.git2."0.8.0" = deps: { features?(features_.git2."0.8.0" deps {}) }: buildRustCrate {
+    crateName = "git2";
+    version = "0.8.0";
+    description = "Bindings to libgit2 for interoperating with git repositories. This library is\nboth threadsafe and memory safe and allows both reading and writing git\nrepositories.\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "0nkzglhq7lrdzv23jakygv6h5kks2mdr7xh73chnr7bqdc36mi43";
+    dependencies = mapFeatures features ([
+      (crates."bitflags"."${deps."git2"."0.8.0"."bitflags"}" deps)
+      (crates."libc"."${deps."git2"."0.8.0"."libc"}" deps)
+      (crates."libgit2_sys"."${deps."git2"."0.8.0"."libgit2_sys"}" deps)
+      (crates."log"."${deps."git2"."0.8.0"."log"}" deps)
+      (crates."url"."${deps."git2"."0.8.0"."url"}" deps)
+    ])
+      ++ (if (kernel == "linux" || kernel == "darwin") && !(kernel == "darwin") then mapFeatures features ([
+    ]
+      ++ (if features.git2."0.8.0".openssl-probe or false then [ (crates.openssl_probe."${deps."git2"."0.8.0".openssl_probe}" deps) ] else [])
+      ++ (if features.git2."0.8.0".openssl-sys or false then [ (crates.openssl_sys."${deps."git2"."0.8.0".openssl_sys}" deps) ] else [])) else []);
+    features = mkFeatures (features."git2"."0.8.0" or {});
+  };
+  features_.git2."0.8.0" = deps: f: updateFeatures f (rec {
+    bitflags."${deps.git2."0.8.0".bitflags}".default = true;
+    git2 = fold recursiveUpdate {} [
+      { "0.8.0"."curl" =
+        (f.git2."0.8.0"."curl" or false) ||
+        (f.git2."0.8.0".default or false) ||
+        (git2."0.8.0"."default" or false); }
+      { "0.8.0"."https" =
+        (f.git2."0.8.0"."https" or false) ||
+        (f.git2."0.8.0".default or false) ||
+        (git2."0.8.0"."default" or false); }
+      { "0.8.0"."openssl-probe" =
+        (f.git2."0.8.0"."openssl-probe" or false) ||
+        (f.git2."0.8.0".https or false) ||
+        (git2."0.8.0"."https" or false); }
+      { "0.8.0"."openssl-sys" =
+        (f.git2."0.8.0"."openssl-sys" or false) ||
+        (f.git2."0.8.0".https or false) ||
+        (git2."0.8.0"."https" or false); }
+      { "0.8.0"."ssh" =
+        (f.git2."0.8.0"."ssh" or false) ||
+        (f.git2."0.8.0".default or false) ||
+        (git2."0.8.0"."default" or false); }
+      { "0.8.0"."ssh_key_from_memory" =
+        (f.git2."0.8.0"."ssh_key_from_memory" or false) ||
+        (f.git2."0.8.0".default or false) ||
+        (git2."0.8.0"."default" or false); }
+      { "0.8.0".default = (f.git2."0.8.0".default or true); }
+    ];
+    libc."${deps.git2."0.8.0".libc}".default = true;
+    libgit2_sys = fold recursiveUpdate {} [
+      { "${deps.git2."0.8.0".libgit2_sys}"."curl" =
+        (f.libgit2_sys."${deps.git2."0.8.0".libgit2_sys}"."curl" or false) ||
+        (git2."0.8.0"."curl" or false) ||
+        (f."git2"."0.8.0"."curl" or false); }
+      { "${deps.git2."0.8.0".libgit2_sys}"."https" =
+        (f.libgit2_sys."${deps.git2."0.8.0".libgit2_sys}"."https" or false) ||
+        (git2."0.8.0"."https" or false) ||
+        (f."git2"."0.8.0"."https" or false); }
+      { "${deps.git2."0.8.0".libgit2_sys}"."ssh" =
+        (f.libgit2_sys."${deps.git2."0.8.0".libgit2_sys}"."ssh" or false) ||
+        (git2."0.8.0"."ssh" or false) ||
+        (f."git2"."0.8.0"."ssh" or false); }
+      { "${deps.git2."0.8.0".libgit2_sys}"."ssh_key_from_memory" =
+        (f.libgit2_sys."${deps.git2."0.8.0".libgit2_sys}"."ssh_key_from_memory" or false) ||
+        (git2."0.8.0"."ssh_key_from_memory" or false) ||
+        (f."git2"."0.8.0"."ssh_key_from_memory" or false); }
+      { "${deps.git2."0.8.0".libgit2_sys}".default = true; }
+    ];
+    log."${deps.git2."0.8.0".log}".default = true;
+    openssl_probe."${deps.git2."0.8.0".openssl_probe}".default = true;
+    openssl_sys."${deps.git2."0.8.0".openssl_sys}".default = true;
+    url."${deps.git2."0.8.0".url}".default = true;
+  }) [
+    (features_.bitflags."${deps."git2"."0.8.0"."bitflags"}" deps)
+    (features_.libc."${deps."git2"."0.8.0"."libc"}" deps)
+    (features_.libgit2_sys."${deps."git2"."0.8.0"."libgit2_sys"}" deps)
+    (features_.log."${deps."git2"."0.8.0"."log"}" deps)
+    (features_.url."${deps."git2"."0.8.0"."url"}" deps)
+    (features_.openssl_probe."${deps."git2"."0.8.0"."openssl_probe"}" deps)
+    (features_.openssl_sys."${deps."git2"."0.8.0"."openssl_sys"}" deps)
+  ];
+
+
+# end
+# git2-curl-0.9.0
+
+  crates.git2_curl."0.9.0" = deps: { features?(features_.git2_curl."0.9.0" deps {}) }: buildRustCrate {
+    crateName = "git2-curl";
+    version = "0.9.0";
+    description = "Backend for an HTTP transport in libgit2 powered by libcurl.\n\nIntended to be used with the git2 crate.\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "0m7bjx7pbrd7hiwwbazgigv9anici9jfwgzhp3q47smbwszdv2hh";
+    dependencies = mapFeatures features ([
+      (crates."curl"."${deps."git2_curl"."0.9.0"."curl"}" deps)
+      (crates."git2"."${deps."git2_curl"."0.9.0"."git2"}" deps)
+      (crates."log"."${deps."git2_curl"."0.9.0"."log"}" deps)
+      (crates."url"."${deps."git2_curl"."0.9.0"."url"}" deps)
+    ]);
+  };
+  features_.git2_curl."0.9.0" = deps: f: updateFeatures f ({
+    curl."${deps.git2_curl."0.9.0".curl}".default = true;
+    git2."${deps.git2_curl."0.9.0".git2}".default = (f.git2."${deps.git2_curl."0.9.0".git2}".default or false);
+    git2_curl."0.9.0".default = (f.git2_curl."0.9.0".default or true);
+    log."${deps.git2_curl."0.9.0".log}".default = true;
+    url."${deps.git2_curl."0.9.0".url}".default = true;
+  }) [
+    (features_.curl."${deps."git2_curl"."0.9.0"."curl"}" deps)
+    (features_.git2."${deps."git2_curl"."0.9.0"."git2"}" deps)
+    (features_.log."${deps."git2_curl"."0.9.0"."log"}" deps)
+    (features_.url."${deps."git2_curl"."0.9.0"."url"}" deps)
+  ];
+
+
+# end
+# glob-0.2.11
+
+  crates.glob."0.2.11" = deps: { features?(features_.glob."0.2.11" deps {}) }: buildRustCrate {
+    crateName = "glob";
+    version = "0.2.11";
+    description = "Support for matching file paths against Unix shell style patterns.\n";
+    authors = [ "The Rust Project Developers" ];
+    sha256 = "104389jjxs8r2f5cc9p0axhjmndgln60ih5x4f00ccgg9d3zarlf";
+  };
+  features_.glob."0.2.11" = deps: f: updateFeatures f ({
+    glob."0.2.11".default = (f.glob."0.2.11".default or true);
+  }) [];
+
+
+# end
+# globset-0.4.3
+
+  crates.globset."0.4.3" = deps: { features?(features_.globset."0.4.3" deps {}) }: buildRustCrate {
+    crateName = "globset";
+    version = "0.4.3";
+    description = "Cross platform single glob and glob set matching. Glob set matching is the\nprocess of matching one or more glob patterns against a single candidate path\nsimultaneously, and returning all of the globs that matched.\n";
+    authors = [ "Andrew Gallant <jamslam@gmail.com>" ];
+    sha256 = "0vj99vw55mp7w44p1157f9c44q5lms6sn0mllhavwrwdn3iyfrij";
+    dependencies = mapFeatures features ([
+      (crates."aho_corasick"."${deps."globset"."0.4.3"."aho_corasick"}" deps)
+      (crates."bstr"."${deps."globset"."0.4.3"."bstr"}" deps)
+      (crates."fnv"."${deps."globset"."0.4.3"."fnv"}" deps)
+      (crates."log"."${deps."globset"."0.4.3"."log"}" deps)
+      (crates."regex"."${deps."globset"."0.4.3"."regex"}" deps)
+    ]);
+    features = mkFeatures (features."globset"."0.4.3" or {});
+  };
+  features_.globset."0.4.3" = deps: f: updateFeatures f ({
+    aho_corasick."${deps.globset."0.4.3".aho_corasick}".default = true;
+    bstr = fold recursiveUpdate {} [
+      { "${deps.globset."0.4.3".bstr}"."std" = true; }
+      { "${deps.globset."0.4.3".bstr}".default = (f.bstr."${deps.globset."0.4.3".bstr}".default or false); }
+    ];
+    fnv."${deps.globset."0.4.3".fnv}".default = true;
+    globset."0.4.3".default = (f.globset."0.4.3".default or true);
+    log."${deps.globset."0.4.3".log}".default = true;
+    regex."${deps.globset."0.4.3".regex}".default = true;
+  }) [
+    (features_.aho_corasick."${deps."globset"."0.4.3"."aho_corasick"}" deps)
+    (features_.bstr."${deps."globset"."0.4.3"."bstr"}" deps)
+    (features_.fnv."${deps."globset"."0.4.3"."fnv"}" deps)
+    (features_.log."${deps."globset"."0.4.3"."log"}" deps)
+    (features_.regex."${deps."globset"."0.4.3"."regex"}" deps)
+  ];
+
+
+# end
+# hashbrown-0.1.8
+
+  crates.hashbrown."0.1.8" = deps: { features?(features_.hashbrown."0.1.8" deps {}) }: buildRustCrate {
+    crateName = "hashbrown";
+    version = "0.1.8";
+    description = "A Rust port of Google's SwissTable hash map";
+    authors = [ "Amanieu d'Antras <amanieu@gmail.com>" ];
+    sha256 = "047fk80pg59cdn5lz4h2a514fmgmya896dvy3dqqviia52a27fzh";
+    dependencies = mapFeatures features ([
+      (crates."byteorder"."${deps."hashbrown"."0.1.8"."byteorder"}" deps)
+      (crates."scopeguard"."${deps."hashbrown"."0.1.8"."scopeguard"}" deps)
+    ]);
+    features = mkFeatures (features."hashbrown"."0.1.8" or {});
+  };
+  features_.hashbrown."0.1.8" = deps: f: updateFeatures f ({
+    byteorder."${deps.hashbrown."0.1.8".byteorder}".default = (f.byteorder."${deps.hashbrown."0.1.8".byteorder}".default or false);
+    hashbrown."0.1.8".default = (f.hashbrown."0.1.8".default or true);
+    scopeguard."${deps.hashbrown."0.1.8".scopeguard}".default = (f.scopeguard."${deps.hashbrown."0.1.8".scopeguard}".default or false);
+  }) [
+    (features_.byteorder."${deps."hashbrown"."0.1.8"."byteorder"}" deps)
+    (features_.scopeguard."${deps."hashbrown"."0.1.8"."scopeguard"}" deps)
+  ];
+
+
+# end
+# hex-0.3.2
+
+  crates.hex."0.3.2" = deps: { features?(features_.hex."0.3.2" deps {}) }: buildRustCrate {
+    crateName = "hex";
+    version = "0.3.2";
+    description = "Encoding and decoding data into/from hexadecimal representation.";
+    authors = [ "KokaKiwi <kokakiwi@kokakiwi.net>" ];
+    sha256 = "0hs0xfb4x67y4ss9mmbjmibkwakbn3xf23i21m409bw2zqk9b6kz";
+    features = mkFeatures (features."hex"."0.3.2" or {});
+  };
+  features_.hex."0.3.2" = deps: f: updateFeatures f ({
+    hex."0.3.2".default = (f.hex."0.3.2".default or true);
+  }) [];
+
+
+# end
+# home-0.3.4
+
+  crates.home."0.3.4" = deps: { features?(features_.home."0.3.4" deps {}) }: buildRustCrate {
+    crateName = "home";
+    version = "0.3.4";
+    description = "Shared definitions of home directories";
+    authors = [ "Brian Anderson <andersrb@gmail.com>" ];
+    sha256 = "19fbzvv74wqxqpdlz6ri1p270i8hp17h8njjj68k98sgrabkcr0n";
+    dependencies = (if kernel == "windows" then mapFeatures features ([
+      (crates."scopeguard"."${deps."home"."0.3.4"."scopeguard"}" deps)
+      (crates."winapi"."${deps."home"."0.3.4"."winapi"}" deps)
+    ]) else []);
+  };
+  features_.home."0.3.4" = deps: f: updateFeatures f ({
+    home."0.3.4".default = (f.home."0.3.4".default or true);
+    scopeguard."${deps.home."0.3.4".scopeguard}".default = true;
+    winapi = fold recursiveUpdate {} [
+      { "${deps.home."0.3.4".winapi}"."errhandlingapi" = true; }
+      { "${deps.home."0.3.4".winapi}"."handleapi" = true; }
+      { "${deps.home."0.3.4".winapi}"."processthreadsapi" = true; }
+      { "${deps.home."0.3.4".winapi}"."std" = true; }
+      { "${deps.home."0.3.4".winapi}"."userenv" = true; }
+      { "${deps.home."0.3.4".winapi}"."winerror" = true; }
+      { "${deps.home."0.3.4".winapi}"."winnt" = true; }
+      { "${deps.home."0.3.4".winapi}".default = true; }
+    ];
+  }) [
+    (features_.scopeguard."${deps."home"."0.3.4"."scopeguard"}" deps)
+    (features_.winapi."${deps."home"."0.3.4"."winapi"}" deps)
+  ];
+
+
+# end
+# http-0.1.17
+
+  crates.http."0.1.17" = deps: { features?(features_.http."0.1.17" deps {}) }: buildRustCrate {
+    crateName = "http";
+    version = "0.1.17";
+    description = "A set of types for representing HTTP requests and responses.\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" "Carl Lerche <me@carllerche.com>" "Sean McArthur <sean@seanmonstar.com>" ];
+    sha256 = "0q71wgggg1h5kjyg1gb4m70g3ian9qwrkx2b9wwvfyafrkmjpg9c";
+    dependencies = mapFeatures features ([
+      (crates."bytes"."${deps."http"."0.1.17"."bytes"}" deps)
+      (crates."fnv"."${deps."http"."0.1.17"."fnv"}" deps)
+      (crates."itoa"."${deps."http"."0.1.17"."itoa"}" deps)
+    ]);
+  };
+  features_.http."0.1.17" = deps: f: updateFeatures f ({
+    bytes."${deps.http."0.1.17".bytes}".default = true;
+    fnv."${deps.http."0.1.17".fnv}".default = true;
+    http."0.1.17".default = (f.http."0.1.17".default or true);
+    itoa."${deps.http."0.1.17".itoa}".default = true;
+  }) [
+    (features_.bytes."${deps."http"."0.1.17"."bytes"}" deps)
+    (features_.fnv."${deps."http"."0.1.17"."fnv"}" deps)
+    (features_.itoa."${deps."http"."0.1.17"."itoa"}" deps)
+  ];
+
+
+# end
+# ignore-0.4.7
+
+  crates.ignore."0.4.7" = deps: { features?(features_.ignore."0.4.7" deps {}) }: buildRustCrate {
+    crateName = "ignore";
+    version = "0.4.7";
+    description = "A fast library for efficiently matching ignore files such as `.gitignore`\nagainst file paths.\n";
+    authors = [ "Andrew Gallant <jamslam@gmail.com>" ];
+    sha256 = "10ky0pnkzk6spa416sxvhcpc1nxq56n6mxkmhzy3ws57x9v75nkj";
+    dependencies = mapFeatures features ([
+      (crates."crossbeam_channel"."${deps."ignore"."0.4.7"."crossbeam_channel"}" deps)
+      (crates."globset"."${deps."ignore"."0.4.7"."globset"}" deps)
+      (crates."lazy_static"."${deps."ignore"."0.4.7"."lazy_static"}" deps)
+      (crates."log"."${deps."ignore"."0.4.7"."log"}" deps)
+      (crates."memchr"."${deps."ignore"."0.4.7"."memchr"}" deps)
+      (crates."regex"."${deps."ignore"."0.4.7"."regex"}" deps)
+      (crates."same_file"."${deps."ignore"."0.4.7"."same_file"}" deps)
+      (crates."thread_local"."${deps."ignore"."0.4.7"."thread_local"}" deps)
+      (crates."walkdir"."${deps."ignore"."0.4.7"."walkdir"}" deps)
+    ])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi_util"."${deps."ignore"."0.4.7"."winapi_util"}" deps)
+    ]) else []);
+    features = mkFeatures (features."ignore"."0.4.7" or {});
+  };
+  features_.ignore."0.4.7" = deps: f: updateFeatures f (rec {
+    crossbeam_channel."${deps.ignore."0.4.7".crossbeam_channel}".default = true;
+    globset = fold recursiveUpdate {} [
+      { "${deps.ignore."0.4.7".globset}"."simd-accel" =
+        (f.globset."${deps.ignore."0.4.7".globset}"."simd-accel" or false) ||
+        (ignore."0.4.7"."simd-accel" or false) ||
+        (f."ignore"."0.4.7"."simd-accel" or false); }
+      { "${deps.ignore."0.4.7".globset}".default = true; }
+    ];
+    ignore."0.4.7".default = (f.ignore."0.4.7".default or true);
+    lazy_static."${deps.ignore."0.4.7".lazy_static}".default = true;
+    log."${deps.ignore."0.4.7".log}".default = true;
+    memchr."${deps.ignore."0.4.7".memchr}".default = true;
+    regex."${deps.ignore."0.4.7".regex}".default = true;
+    same_file."${deps.ignore."0.4.7".same_file}".default = true;
+    thread_local."${deps.ignore."0.4.7".thread_local}".default = true;
+    walkdir."${deps.ignore."0.4.7".walkdir}".default = true;
+    winapi_util."${deps.ignore."0.4.7".winapi_util}".default = true;
+  }) [
+    (features_.crossbeam_channel."${deps."ignore"."0.4.7"."crossbeam_channel"}" deps)
+    (features_.globset."${deps."ignore"."0.4.7"."globset"}" deps)
+    (features_.lazy_static."${deps."ignore"."0.4.7"."lazy_static"}" deps)
+    (features_.log."${deps."ignore"."0.4.7"."log"}" deps)
+    (features_.memchr."${deps."ignore"."0.4.7"."memchr"}" deps)
+    (features_.regex."${deps."ignore"."0.4.7"."regex"}" deps)
+    (features_.same_file."${deps."ignore"."0.4.7"."same_file"}" deps)
+    (features_.thread_local."${deps."ignore"."0.4.7"."thread_local"}" deps)
+    (features_.walkdir."${deps."ignore"."0.4.7"."walkdir"}" deps)
+    (features_.winapi_util."${deps."ignore"."0.4.7"."winapi_util"}" deps)
+  ];
+
+
+# end
+# im-rc-12.3.4
+
+  crates.im_rc."12.3.4" = deps: { features?(features_.im_rc."12.3.4" deps {}) }: buildRustCrate {
+    crateName = "im-rc";
+    version = "12.3.4";
+    description = "Immutable collection datatypes (the fast but not thread safe version)";
+    authors = [ "Bodil Stokke <bodil@bodil.org>" ];
+    edition = "2018";
+    sha256 = "0l53vjm7ycccb0lxj1zpgvlik5rpngnf9gggvgb3jbdv2jxjkdhz";
+    libPath = "./src/lib.rs";
+    build = "./build.rs";
+    dependencies = mapFeatures features ([
+      (crates."sized_chunks"."${deps."im_rc"."12.3.4"."sized_chunks"}" deps)
+      (crates."typenum"."${deps."im_rc"."12.3.4"."typenum"}" deps)
+    ]);
+
+    buildDependencies = mapFeatures features ([
+      (crates."rustc_version"."${deps."im_rc"."12.3.4"."rustc_version"}" deps)
+    ]);
+  };
+  features_.im_rc."12.3.4" = deps: f: updateFeatures f ({
+    im_rc."12.3.4".default = (f.im_rc."12.3.4".default or true);
+    rustc_version."${deps.im_rc."12.3.4".rustc_version}".default = true;
+    sized_chunks."${deps.im_rc."12.3.4".sized_chunks}".default = true;
+    typenum."${deps.im_rc."12.3.4".typenum}".default = true;
+  }) [
+    (features_.sized_chunks."${deps."im_rc"."12.3.4"."sized_chunks"}" deps)
+    (features_.typenum."${deps."im_rc"."12.3.4"."typenum"}" deps)
+    (features_.rustc_version."${deps."im_rc"."12.3.4"."rustc_version"}" deps)
+  ];
+
+
+# end
+# iovec-0.1.2
+
+  crates.iovec."0.1.2" = deps: { features?(features_.iovec."0.1.2" deps {}) }: buildRustCrate {
+    crateName = "iovec";
+    version = "0.1.2";
+    description = "Portable buffer type for scatter/gather I/O operations\n";
+    authors = [ "Carl Lerche <me@carllerche.com>" ];
+    sha256 = "0vjymmb7wj4v4kza5jjn48fcdb85j3k37y7msjl3ifz0p9yiyp2r";
+    dependencies = (if (kernel == "linux" || kernel == "darwin") then mapFeatures features ([
+      (crates."libc"."${deps."iovec"."0.1.2"."libc"}" deps)
+    ]) else [])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."iovec"."0.1.2"."winapi"}" deps)
+    ]) else []);
+  };
+  features_.iovec."0.1.2" = deps: f: updateFeatures f ({
+    iovec."0.1.2".default = (f.iovec."0.1.2".default or true);
+    libc."${deps.iovec."0.1.2".libc}".default = true;
+    winapi."${deps.iovec."0.1.2".winapi}".default = true;
+  }) [
+    (features_.libc."${deps."iovec"."0.1.2"."libc"}" deps)
+    (features_.winapi."${deps."iovec"."0.1.2"."winapi"}" deps)
+  ];
+
+
+# end
+# itertools-0.7.11
+
+  crates.itertools."0.7.11" = deps: { features?(features_.itertools."0.7.11" deps {}) }: buildRustCrate {
+    crateName = "itertools";
+    version = "0.7.11";
+    description = "Extra iterator adaptors, iterator methods, free functions, and macros.";
+    authors = [ "bluss" ];
+    sha256 = "0gavmkvn2c3cwfwk5zl5p7saiqn4ww227am5ykn6pgfm7c6ppz56";
+    dependencies = mapFeatures features ([
+      (crates."either"."${deps."itertools"."0.7.11"."either"}" deps)
+    ]);
+    features = mkFeatures (features."itertools"."0.7.11" or {});
+  };
+  features_.itertools."0.7.11" = deps: f: updateFeatures f (rec {
+    either."${deps.itertools."0.7.11".either}".default = (f.either."${deps.itertools."0.7.11".either}".default or false);
+    itertools = fold recursiveUpdate {} [
+      { "0.7.11"."use_std" =
+        (f.itertools."0.7.11"."use_std" or false) ||
+        (f.itertools."0.7.11".default or false) ||
+        (itertools."0.7.11"."default" or false); }
+      { "0.7.11".default = (f.itertools."0.7.11".default or true); }
+    ];
+  }) [
+    (features_.either."${deps."itertools"."0.7.11"."either"}" deps)
+  ];
+
+
+# end
+# jobserver-0.1.13
+
+  crates.jobserver."0.1.13" = deps: { features?(features_.jobserver."0.1.13" deps {}) }: buildRustCrate {
+    crateName = "jobserver";
+    version = "0.1.13";
+    description = "An implementation of the GNU make jobserver for Rust\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "01h08h0k9i7cvlnlw53jf398d03k5kxrs7m30xl7h9s5dlw0vi9i";
+    dependencies = mapFeatures features ([
+      (crates."log"."${deps."jobserver"."0.1.13"."log"}" deps)
+    ])
+      ++ (if (kernel == "linux" || kernel == "darwin") then mapFeatures features ([
+      (crates."libc"."${deps."jobserver"."0.1.13"."libc"}" deps)
+    ]) else [])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."rand"."${deps."jobserver"."0.1.13"."rand"}" deps)
+    ]) else []);
+  };
+  features_.jobserver."0.1.13" = deps: f: updateFeatures f ({
+    jobserver."0.1.13".default = (f.jobserver."0.1.13".default or true);
+    libc."${deps.jobserver."0.1.13".libc}".default = true;
+    log."${deps.jobserver."0.1.13".log}".default = true;
+    rand."${deps.jobserver."0.1.13".rand}".default = true;
+  }) [
+    (features_.log."${deps."jobserver"."0.1.13"."log"}" deps)
+    (features_.libc."${deps."jobserver"."0.1.13"."libc"}" deps)
+    (features_.rand."${deps."jobserver"."0.1.13"."rand"}" deps)
+  ];
+
+
+# end
+# kernel32-sys-0.2.2
+
+  crates.kernel32_sys."0.2.2" = deps: { features?(features_.kernel32_sys."0.2.2" deps {}) }: buildRustCrate {
+    crateName = "kernel32-sys";
+    version = "0.2.2";
+    description = "Contains function definitions for the Windows API library kernel32. See winapi for types and constants.";
+    authors = [ "Peter Atashian <retep998@gmail.com>" ];
+    sha256 = "1lrw1hbinyvr6cp28g60z97w32w8vsk6pahk64pmrv2fmby8srfj";
+    libName = "kernel32";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+      (crates."winapi"."${deps."kernel32_sys"."0.2.2"."winapi"}" deps)
+    ]);
+
+    buildDependencies = mapFeatures features ([
+      (crates."winapi_build"."${deps."kernel32_sys"."0.2.2"."winapi_build"}" deps)
+    ]);
+  };
+  features_.kernel32_sys."0.2.2" = deps: f: updateFeatures f ({
+    kernel32_sys."0.2.2".default = (f.kernel32_sys."0.2.2".default or true);
+    winapi."${deps.kernel32_sys."0.2.2".winapi}".default = true;
+    winapi_build."${deps.kernel32_sys."0.2.2".winapi_build}".default = true;
+  }) [
+    (features_.winapi."${deps."kernel32_sys"."0.2.2"."winapi"}" deps)
+    (features_.winapi_build."${deps."kernel32_sys"."0.2.2"."winapi_build"}" deps)
+  ];
+
+
+# end
+# lazycell-1.2.1
+
+  crates.lazycell."1.2.1" = deps: { features?(features_.lazycell."1.2.1" deps {}) }: buildRustCrate {
+    crateName = "lazycell";
+    version = "1.2.1";
+    description = "A library providing a lazily filled Cell struct";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" "Nikita Pekin <contact@nikitapek.in>" ];
+    sha256 = "1m4h2q9rgxrgc7xjnws1x81lrb68jll8w3pykx1a9bhr29q2mcwm";
+    dependencies = mapFeatures features ([
+]);
+    features = mkFeatures (features."lazycell"."1.2.1" or {});
+  };
+  features_.lazycell."1.2.1" = deps: f: updateFeatures f (rec {
+    lazycell = fold recursiveUpdate {} [
+      { "1.2.1"."clippy" =
+        (f.lazycell."1.2.1"."clippy" or false) ||
+        (f.lazycell."1.2.1".nightly-testing or false) ||
+        (lazycell."1.2.1"."nightly-testing" or false); }
+      { "1.2.1"."nightly" =
+        (f.lazycell."1.2.1"."nightly" or false) ||
+        (f.lazycell."1.2.1".nightly-testing or false) ||
+        (lazycell."1.2.1"."nightly-testing" or false); }
+      { "1.2.1".default = (f.lazycell."1.2.1".default or true); }
+    ];
+  }) [];
+
+
+# end
+# libc-0.2.51
+
+  crates.libc."0.2.51" = deps: { features?(features_.libc."0.2.51" deps {}) }: buildRustCrate {
+    crateName = "libc";
+    version = "0.2.51";
+    description = "Raw FFI bindings to platform libraries like libc.\n";
+    authors = [ "The Rust Project Developers" ];
+    sha256 = "1lzavxj1ymm7vghs6nmzq9shprdlqby73py9k30gwvv0dwy365cv";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+]);
+    features = mkFeatures (features."libc"."0.2.51" or {});
+  };
+  features_.libc."0.2.51" = deps: f: updateFeatures f (rec {
+    libc = fold recursiveUpdate {} [
+      { "0.2.51"."align" =
+        (f.libc."0.2.51"."align" or false) ||
+        (f.libc."0.2.51".rustc-dep-of-std or false) ||
+        (libc."0.2.51"."rustc-dep-of-std" or false); }
+      { "0.2.51"."rustc-std-workspace-core" =
+        (f.libc."0.2.51"."rustc-std-workspace-core" or false) ||
+        (f.libc."0.2.51".rustc-dep-of-std or false) ||
+        (libc."0.2.51"."rustc-dep-of-std" or false); }
+      { "0.2.51"."use_std" =
+        (f.libc."0.2.51"."use_std" or false) ||
+        (f.libc."0.2.51".default or false) ||
+        (libc."0.2.51"."default" or false); }
+      { "0.2.51".default = (f.libc."0.2.51".default or true); }
+    ];
+  }) [];
+
+
+# end
+# libgit2-sys-0.7.11
+
+  crates.libgit2_sys."0.7.11" = deps: { features?(features_.libgit2_sys."0.7.11" deps {}) }: buildRustCrate {
+    crateName = "libgit2-sys";
+    version = "0.7.11";
+    description = "Native bindings to the libgit2 library";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "12wyfl7xl7lpz65s17j5rf9xfkn461792f67jqsz0ign3daaac9h";
+    libPath = "lib.rs";
+    libName = "libgit2_sys";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+      (crates."libc"."${deps."libgit2_sys"."0.7.11"."libc"}" deps)
+      (crates."libz_sys"."${deps."libgit2_sys"."0.7.11"."libz_sys"}" deps)
+    ]
+      ++ (if features.libgit2_sys."0.7.11".curl-sys or false then [ (crates.curl_sys."${deps."libgit2_sys"."0.7.11".curl_sys}" deps) ] else [])
+      ++ (if features.libgit2_sys."0.7.11".libssh2-sys or false then [ (crates.libssh2_sys."${deps."libgit2_sys"."0.7.11".libssh2_sys}" deps) ] else []))
+      ++ (if (kernel == "linux" || kernel == "darwin") then mapFeatures features ([
+    ]
+      ++ (if features.libgit2_sys."0.7.11".openssl-sys or false then [ (crates.openssl_sys."${deps."libgit2_sys"."0.7.11".openssl_sys}" deps) ] else [])) else []);
+
+    buildDependencies = mapFeatures features ([
+      (crates."cc"."${deps."libgit2_sys"."0.7.11"."cc"}" deps)
+      (crates."pkg_config"."${deps."libgit2_sys"."0.7.11"."pkg_config"}" deps)
+    ]);
+    features = mkFeatures (features."libgit2_sys"."0.7.11" or {});
+  };
+  features_.libgit2_sys."0.7.11" = deps: f: updateFeatures f (rec {
+    cc."${deps.libgit2_sys."0.7.11".cc}".default = true;
+    curl_sys."${deps.libgit2_sys."0.7.11".curl_sys}".default = true;
+    libc."${deps.libgit2_sys."0.7.11".libc}".default = true;
+    libgit2_sys = fold recursiveUpdate {} [
+      { "0.7.11"."curl-sys" =
+        (f.libgit2_sys."0.7.11"."curl-sys" or false) ||
+        (f.libgit2_sys."0.7.11".curl or false) ||
+        (libgit2_sys."0.7.11"."curl" or false); }
+      { "0.7.11"."libssh2-sys" =
+        (f.libgit2_sys."0.7.11"."libssh2-sys" or false) ||
+        (f.libgit2_sys."0.7.11".ssh or false) ||
+        (libgit2_sys."0.7.11"."ssh" or false); }
+      { "0.7.11"."openssl-sys" =
+        (f.libgit2_sys."0.7.11"."openssl-sys" or false) ||
+        (f.libgit2_sys."0.7.11".https or false) ||
+        (libgit2_sys."0.7.11"."https" or false); }
+      { "0.7.11".default = (f.libgit2_sys."0.7.11".default or true); }
+    ];
+    libssh2_sys."${deps.libgit2_sys."0.7.11".libssh2_sys}".default = true;
+    libz_sys."${deps.libgit2_sys."0.7.11".libz_sys}".default = true;
+    openssl_sys."${deps.libgit2_sys."0.7.11".openssl_sys}".default = true;
+    pkg_config."${deps.libgit2_sys."0.7.11".pkg_config}".default = true;
+  }) [
+    (features_.curl_sys."${deps."libgit2_sys"."0.7.11"."curl_sys"}" deps)
+    (features_.libc."${deps."libgit2_sys"."0.7.11"."libc"}" deps)
+    (features_.libssh2_sys."${deps."libgit2_sys"."0.7.11"."libssh2_sys"}" deps)
+    (features_.libz_sys."${deps."libgit2_sys"."0.7.11"."libz_sys"}" deps)
+    (features_.cc."${deps."libgit2_sys"."0.7.11"."cc"}" deps)
+    (features_.pkg_config."${deps."libgit2_sys"."0.7.11"."pkg_config"}" deps)
+    (features_.openssl_sys."${deps."libgit2_sys"."0.7.11"."openssl_sys"}" deps)
+  ];
+
+
+# end
+# libnghttp2-sys-0.1.1
+
+  crates.libnghttp2_sys."0.1.1" = deps: { features?(features_.libnghttp2_sys."0.1.1" deps {}) }: buildRustCrate {
+    crateName = "libnghttp2-sys";
+    version = "0.1.1";
+    description = "FFI bindings for libnghttp2 (nghttp2)\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "08z41i7d8pm0jzv6p77kp22hh0a4psdy109n6nxr8x2k1ibjxk8w";
+    dependencies = mapFeatures features ([
+      (crates."libc"."${deps."libnghttp2_sys"."0.1.1"."libc"}" deps)
+    ]);
+
+    buildDependencies = mapFeatures features ([
+      (crates."cc"."${deps."libnghttp2_sys"."0.1.1"."cc"}" deps)
+    ]);
+  };
+  features_.libnghttp2_sys."0.1.1" = deps: f: updateFeatures f ({
+    cc."${deps.libnghttp2_sys."0.1.1".cc}".default = true;
+    libc."${deps.libnghttp2_sys."0.1.1".libc}".default = true;
+    libnghttp2_sys."0.1.1".default = (f.libnghttp2_sys."0.1.1".default or true);
+  }) [
+    (features_.libc."${deps."libnghttp2_sys"."0.1.1"."libc"}" deps)
+    (features_.cc."${deps."libnghttp2_sys"."0.1.1"."cc"}" deps)
+  ];
+
+
+# end
+# libssh2-sys-0.2.11
+
+  crates.libssh2_sys."0.2.11" = deps: { features?(features_.libssh2_sys."0.2.11" deps {}) }: buildRustCrate {
+    crateName = "libssh2-sys";
+    version = "0.2.11";
+    description = "Native bindings to the libssh2 library";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "1mjily9qjjjf31pzvlxyqnp1midjc77s6sd303j46d14igna7nhi";
+    libPath = "lib.rs";
+    libName = "libssh2_sys";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+      (crates."libc"."${deps."libssh2_sys"."0.2.11"."libc"}" deps)
+      (crates."libz_sys"."${deps."libssh2_sys"."0.2.11"."libz_sys"}" deps)
+    ])
+      ++ (if abi == "msvc" then mapFeatures features ([
+]) else [])
+      ++ (if (kernel == "linux" || kernel == "darwin") then mapFeatures features ([
+      (crates."openssl_sys"."${deps."libssh2_sys"."0.2.11"."openssl_sys"}" deps)
+    ]) else []);
+
+    buildDependencies = mapFeatures features ([
+      (crates."cc"."${deps."libssh2_sys"."0.2.11"."cc"}" deps)
+      (crates."pkg_config"."${deps."libssh2_sys"."0.2.11"."pkg_config"}" deps)
+    ]);
+  };
+  features_.libssh2_sys."0.2.11" = deps: f: updateFeatures f ({
+    cc."${deps.libssh2_sys."0.2.11".cc}".default = true;
+    libc."${deps.libssh2_sys."0.2.11".libc}".default = true;
+    libssh2_sys."0.2.11".default = (f.libssh2_sys."0.2.11".default or true);
+    libz_sys."${deps.libssh2_sys."0.2.11".libz_sys}".default = true;
+    openssl_sys."${deps.libssh2_sys."0.2.11".openssl_sys}".default = true;
+    pkg_config."${deps.libssh2_sys."0.2.11".pkg_config}".default = true;
+  }) [
+    (features_.libc."${deps."libssh2_sys"."0.2.11"."libc"}" deps)
+    (features_.libz_sys."${deps."libssh2_sys"."0.2.11"."libz_sys"}" deps)
+    (features_.cc."${deps."libssh2_sys"."0.2.11"."cc"}" deps)
+    (features_.pkg_config."${deps."libssh2_sys"."0.2.11"."pkg_config"}" deps)
+    (features_.openssl_sys."${deps."libssh2_sys"."0.2.11"."openssl_sys"}" deps)
+  ];
+
+
+# end
+# libz-sys-1.0.25
+
+  crates.libz_sys."1.0.25" = deps: { features?(features_.libz_sys."1.0.25" deps {}) }: buildRustCrate {
+    crateName = "libz-sys";
+    version = "1.0.25";
+    description = "Bindings to the system libz library (also known as zlib).\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "195jzg8mgjbvmkbpx1rzkzrqm0g2fdivk79v44c9lzl64r3f9fym";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+      (crates."libc"."${deps."libz_sys"."1.0.25"."libc"}" deps)
+    ])
+      ++ (if abi == "msvc" then mapFeatures features ([
+]) else []);
+
+    buildDependencies = mapFeatures features ([
+      (crates."cc"."${deps."libz_sys"."1.0.25"."cc"}" deps)
+      (crates."pkg_config"."${deps."libz_sys"."1.0.25"."pkg_config"}" deps)
+    ]);
+    features = mkFeatures (features."libz_sys"."1.0.25" or {});
+  };
+  features_.libz_sys."1.0.25" = deps: f: updateFeatures f ({
+    cc."${deps.libz_sys."1.0.25".cc}".default = true;
+    libc."${deps.libz_sys."1.0.25".libc}".default = true;
+    libz_sys."1.0.25".default = (f.libz_sys."1.0.25".default or true);
+    pkg_config."${deps.libz_sys."1.0.25".pkg_config}".default = true;
+  }) [
+    (features_.libc."${deps."libz_sys"."1.0.25"."libc"}" deps)
+    (features_.cc."${deps."libz_sys"."1.0.25"."cc"}" deps)
+    (features_.pkg_config."${deps."libz_sys"."1.0.25"."pkg_config"}" deps)
+  ];
+
+
+# end
+# lock_api-0.1.5
+
+  crates.lock_api."0.1.5" = deps: { features?(features_.lock_api."0.1.5" deps {}) }: buildRustCrate {
+    crateName = "lock_api";
+    version = "0.1.5";
+    description = "Wrappers to create fully-featured Mutex and RwLock types. Compatible with no_std.";
+    authors = [ "Amanieu d'Antras <amanieu@gmail.com>" ];
+    sha256 = "132sidr5hvjfkaqm3l95zpcpi8yk5ddd0g79zf1ad4v65sxirqqm";
+    dependencies = mapFeatures features ([
+      (crates."scopeguard"."${deps."lock_api"."0.1.5"."scopeguard"}" deps)
+    ]);
+    features = mkFeatures (features."lock_api"."0.1.5" or {});
+  };
+  features_.lock_api."0.1.5" = deps: f: updateFeatures f ({
+    lock_api."0.1.5".default = (f.lock_api."0.1.5".default or true);
+    scopeguard."${deps.lock_api."0.1.5".scopeguard}".default = (f.scopeguard."${deps.lock_api."0.1.5".scopeguard}".default or false);
+  }) [
+    (features_.scopeguard."${deps."lock_api"."0.1.5"."scopeguard"}" deps)
+  ];
+
+
+# end
+# matrixmultiply-0.1.15
+
+  crates.matrixmultiply."0.1.15" = deps: { features?(features_.matrixmultiply."0.1.15" deps {}) }: buildRustCrate {
+    crateName = "matrixmultiply";
+    version = "0.1.15";
+    description = "General matrix multiplication of f32 and f64 matrices in Rust. Supports matrices with general strides. Uses a microkernel strategy, so that the implementation is easy to parallelize and optimize. `RUSTFLAGS=\"-C target-cpu=native\"` is your friend here.";
+    authors = [ "bluss" ];
+    sha256 = "0ix1i4lnkfqnzv8f9wr34bf0mlr1sx5hr7yr70k4npxmwxscvdj5";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+      (crates."rawpointer"."${deps."matrixmultiply"."0.1.15"."rawpointer"}" deps)
+    ]);
+  };
+  features_.matrixmultiply."0.1.15" = deps: f: updateFeatures f ({
+    matrixmultiply."0.1.15".default = (f.matrixmultiply."0.1.15".default or true);
+    rawpointer."${deps.matrixmultiply."0.1.15".rawpointer}".default = true;
+  }) [
+    (features_.rawpointer."${deps."matrixmultiply"."0.1.15"."rawpointer"}" deps)
+  ];
+
+
+# end
+# miniz-sys-0.1.11
+
+  crates.miniz_sys."0.1.11" = deps: { features?(features_.miniz_sys."0.1.11" deps {}) }: buildRustCrate {
+    crateName = "miniz-sys";
+    version = "0.1.11";
+    description = "Bindings to the miniz.c library.\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "0l2wsakqjj7kc06dwxlpz4h8wih0f9d1idrz5gb1svipvh81khsm";
+    libPath = "lib.rs";
+    libName = "miniz_sys";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+      (crates."libc"."${deps."miniz_sys"."0.1.11"."libc"}" deps)
+    ]);
+
+    buildDependencies = mapFeatures features ([
+      (crates."cc"."${deps."miniz_sys"."0.1.11"."cc"}" deps)
+    ]);
+  };
+  features_.miniz_sys."0.1.11" = deps: f: updateFeatures f ({
+    cc."${deps.miniz_sys."0.1.11".cc}".default = true;
+    libc."${deps.miniz_sys."0.1.11".libc}".default = true;
+    miniz_sys."0.1.11".default = (f.miniz_sys."0.1.11".default or true);
+  }) [
+    (features_.libc."${deps."miniz_sys"."0.1.11"."libc"}" deps)
+    (features_.cc."${deps."miniz_sys"."0.1.11"."cc"}" deps)
+  ];
+
+
+# end
+# miniz_oxide-0.2.1
+
+  crates.miniz_oxide."0.2.1" = deps: { features?(features_.miniz_oxide."0.2.1" deps {}) }: buildRustCrate {
+    crateName = "miniz_oxide";
+    version = "0.2.1";
+    description = "DEFLATE compression and decompression library rewritten in Rust based on miniz";
+    authors = [ "Frommi <daniil.liferenko@gmail.com>" ];
+    sha256 = "1ly14vlk0gq7czi1323l2dsy5y8dpvdwld4h9083i0y3hx9iyfdz";
+    dependencies = mapFeatures features ([
+      (crates."adler32"."${deps."miniz_oxide"."0.2.1"."adler32"}" deps)
+    ]);
+  };
+  features_.miniz_oxide."0.2.1" = deps: f: updateFeatures f ({
+    adler32."${deps.miniz_oxide."0.2.1".adler32}".default = true;
+    miniz_oxide."0.2.1".default = (f.miniz_oxide."0.2.1".default or true);
+  }) [
+    (features_.adler32."${deps."miniz_oxide"."0.2.1"."adler32"}" deps)
+  ];
+
+
+# end
+# miniz_oxide_c_api-0.2.1
+
+  crates.miniz_oxide_c_api."0.2.1" = deps: { features?(features_.miniz_oxide_c_api."0.2.1" deps {}) }: buildRustCrate {
+    crateName = "miniz_oxide_c_api";
+    version = "0.2.1";
+    description = "DEFLATE compression and decompression API designed to be Rust drop-in replacement for miniz";
+    authors = [ "Frommi <daniil.liferenko@gmail.com>" ];
+    sha256 = "1zsk334nhy2rvyhbr0815l0gp6w40al6rxxafkycaafx3m9j8cj2";
+    build = "src/build.rs";
+    dependencies = mapFeatures features ([
+      (crates."crc"."${deps."miniz_oxide_c_api"."0.2.1"."crc"}" deps)
+      (crates."libc"."${deps."miniz_oxide_c_api"."0.2.1"."libc"}" deps)
+      (crates."miniz_oxide"."${deps."miniz_oxide_c_api"."0.2.1"."miniz_oxide"}" deps)
+    ]);
+
+    buildDependencies = mapFeatures features ([
+      (crates."cc"."${deps."miniz_oxide_c_api"."0.2.1"."cc"}" deps)
+    ]);
+    features = mkFeatures (features."miniz_oxide_c_api"."0.2.1" or {});
+  };
+  features_.miniz_oxide_c_api."0.2.1" = deps: f: updateFeatures f (rec {
+    cc."${deps.miniz_oxide_c_api."0.2.1".cc}".default = true;
+    crc."${deps.miniz_oxide_c_api."0.2.1".crc}".default = true;
+    libc."${deps.miniz_oxide_c_api."0.2.1".libc}".default = true;
+    miniz_oxide."${deps.miniz_oxide_c_api."0.2.1".miniz_oxide}".default = true;
+    miniz_oxide_c_api = fold recursiveUpdate {} [
+      { "0.2.1"."build_orig_miniz" =
+        (f.miniz_oxide_c_api."0.2.1"."build_orig_miniz" or false) ||
+        (f.miniz_oxide_c_api."0.2.1".benching or false) ||
+        (miniz_oxide_c_api."0.2.1"."benching" or false) ||
+        (f.miniz_oxide_c_api."0.2.1".fuzzing or false) ||
+        (miniz_oxide_c_api."0.2.1"."fuzzing" or false); }
+      { "0.2.1"."build_stub_miniz" =
+        (f.miniz_oxide_c_api."0.2.1"."build_stub_miniz" or false) ||
+        (f.miniz_oxide_c_api."0.2.1".miniz_zip or false) ||
+        (miniz_oxide_c_api."0.2.1"."miniz_zip" or false); }
+      { "0.2.1"."no_c_export" =
+        (f.miniz_oxide_c_api."0.2.1"."no_c_export" or false) ||
+        (f.miniz_oxide_c_api."0.2.1".benching or false) ||
+        (miniz_oxide_c_api."0.2.1"."benching" or false) ||
+        (f.miniz_oxide_c_api."0.2.1".fuzzing or false) ||
+        (miniz_oxide_c_api."0.2.1"."fuzzing" or false); }
+      { "0.2.1".default = (f.miniz_oxide_c_api."0.2.1".default or true); }
+    ];
+  }) [
+    (features_.crc."${deps."miniz_oxide_c_api"."0.2.1"."crc"}" deps)
+    (features_.libc."${deps."miniz_oxide_c_api"."0.2.1"."libc"}" deps)
+    (features_.miniz_oxide."${deps."miniz_oxide_c_api"."0.2.1"."miniz_oxide"}" deps)
+    (features_.cc."${deps."miniz_oxide_c_api"."0.2.1"."cc"}" deps)
+  ];
+
+
+# end
+# miow-0.3.3
+
+  crates.miow."0.3.3" = deps: { features?(features_.miow."0.3.3" deps {}) }: buildRustCrate {
+    crateName = "miow";
+    version = "0.3.3";
+    description = "A zero overhead I/O library for Windows, focusing on IOCP and Async I/O\nabstractions.\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "1mlk5mn00cl6bmf8qlpc6r85dxf4l45vbkbzshsr1mrkb3hn1j57";
+    dependencies = mapFeatures features ([
+      (crates."socket2"."${deps."miow"."0.3.3"."socket2"}" deps)
+      (crates."winapi"."${deps."miow"."0.3.3"."winapi"}" deps)
+    ]);
+  };
+  features_.miow."0.3.3" = deps: f: updateFeatures f ({
+    miow."0.3.3".default = (f.miow."0.3.3".default or true);
+    socket2."${deps.miow."0.3.3".socket2}".default = true;
+    winapi = fold recursiveUpdate {} [
+      { "${deps.miow."0.3.3".winapi}"."fileapi" = true; }
+      { "${deps.miow."0.3.3".winapi}"."handleapi" = true; }
+      { "${deps.miow."0.3.3".winapi}"."ioapiset" = true; }
+      { "${deps.miow."0.3.3".winapi}"."minwindef" = true; }
+      { "${deps.miow."0.3.3".winapi}"."namedpipeapi" = true; }
+      { "${deps.miow."0.3.3".winapi}"."ntdef" = true; }
+      { "${deps.miow."0.3.3".winapi}"."std" = true; }
+      { "${deps.miow."0.3.3".winapi}"."synchapi" = true; }
+      { "${deps.miow."0.3.3".winapi}"."winerror" = true; }
+      { "${deps.miow."0.3.3".winapi}"."winsock2" = true; }
+      { "${deps.miow."0.3.3".winapi}"."ws2def" = true; }
+      { "${deps.miow."0.3.3".winapi}"."ws2ipdef" = true; }
+      { "${deps.miow."0.3.3".winapi}".default = true; }
+    ];
+  }) [
+    (features_.socket2."${deps."miow"."0.3.3"."socket2"}" deps)
+    (features_.winapi."${deps."miow"."0.3.3"."winapi"}" deps)
+  ];
+
+
+# end
+# ndarray-0.12.1
+
+  crates.ndarray."0.12.1" = deps: { features?(features_.ndarray."0.12.1" deps {}) }: buildRustCrate {
+    crateName = "ndarray";
+    version = "0.12.1";
+    description = "An n-dimensional array for general elements and for numerics. Lightweight array views and slicing; views support chunking and splitting.";
+    authors = [ "bluss" "Jim Turner" ];
+    sha256 = "13708k97kdjfj6g4z1yapjln0v4m7zj0114h8snw44fj79l00346";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+      (crates."itertools"."${deps."ndarray"."0.12.1"."itertools"}" deps)
+      (crates."matrixmultiply"."${deps."ndarray"."0.12.1"."matrixmultiply"}" deps)
+      (crates."num_complex"."${deps."ndarray"."0.12.1"."num_complex"}" deps)
+      (crates."num_traits"."${deps."ndarray"."0.12.1"."num_traits"}" deps)
+    ]);
+    features = mkFeatures (features."ndarray"."0.12.1" or {});
+  };
+  features_.ndarray."0.12.1" = deps: f: updateFeatures f (rec {
+    itertools."${deps.ndarray."0.12.1".itertools}".default = (f.itertools."${deps.ndarray."0.12.1".itertools}".default or false);
+    matrixmultiply."${deps.ndarray."0.12.1".matrixmultiply}".default = true;
+    ndarray = fold recursiveUpdate {} [
+      { "0.12.1"."blas" =
+        (f.ndarray."0.12.1"."blas" or false) ||
+        (f.ndarray."0.12.1".test-blas-openblas-sys or false) ||
+        (ndarray."0.12.1"."test-blas-openblas-sys" or false); }
+      { "0.12.1"."blas-src" =
+        (f.ndarray."0.12.1"."blas-src" or false) ||
+        (f.ndarray."0.12.1".blas or false) ||
+        (ndarray."0.12.1"."blas" or false); }
+      { "0.12.1"."cblas-sys" =
+        (f.ndarray."0.12.1"."cblas-sys" or false) ||
+        (f.ndarray."0.12.1".blas or false) ||
+        (ndarray."0.12.1"."blas" or false); }
+      { "0.12.1"."rustc-serialize" =
+        (f.ndarray."0.12.1"."rustc-serialize" or false) ||
+        (f.ndarray."0.12.1".docs or false) ||
+        (ndarray."0.12.1"."docs" or false); }
+      { "0.12.1"."serde" =
+        (f.ndarray."0.12.1"."serde" or false) ||
+        (f.ndarray."0.12.1".serde-1 or false) ||
+        (ndarray."0.12.1"."serde-1" or false); }
+      { "0.12.1"."serde-1" =
+        (f.ndarray."0.12.1"."serde-1" or false) ||
+        (f.ndarray."0.12.1".docs or false) ||
+        (ndarray."0.12.1"."docs" or false); }
+      { "0.12.1"."test-blas-openblas-sys" =
+        (f.ndarray."0.12.1"."test-blas-openblas-sys" or false) ||
+        (f.ndarray."0.12.1".test or false) ||
+        (ndarray."0.12.1"."test" or false); }
+      { "0.12.1".default = (f.ndarray."0.12.1".default or true); }
+    ];
+    num_complex."${deps.ndarray."0.12.1".num_complex}".default = true;
+    num_traits."${deps.ndarray."0.12.1".num_traits}".default = true;
+  }) [
+    (features_.itertools."${deps."ndarray"."0.12.1"."itertools"}" deps)
+    (features_.matrixmultiply."${deps."ndarray"."0.12.1"."matrixmultiply"}" deps)
+    (features_.num_complex."${deps."ndarray"."0.12.1"."num_complex"}" deps)
+    (features_.num_traits."${deps."ndarray"."0.12.1"."num_traits"}" deps)
+  ];
+
+
+# end
+# num-complex-0.2.1
+
+  crates.num_complex."0.2.1" = deps: { features?(features_.num_complex."0.2.1" deps {}) }: buildRustCrate {
+    crateName = "num-complex";
+    version = "0.2.1";
+    description = "Complex numbers implementation for Rust";
+    authors = [ "The Rust Project Developers" ];
+    sha256 = "12lpp62ahc80p33cpw2771l8bwl0q13rl5vq0jzkqib1l5z8q80z";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+      (crates."num_traits"."${deps."num_complex"."0.2.1"."num_traits"}" deps)
+    ]);
+    features = mkFeatures (features."num_complex"."0.2.1" or {});
+  };
+  features_.num_complex."0.2.1" = deps: f: updateFeatures f (rec {
+    num_complex = fold recursiveUpdate {} [
+      { "0.2.1"."std" =
+        (f.num_complex."0.2.1"."std" or false) ||
+        (f.num_complex."0.2.1".default or false) ||
+        (num_complex."0.2.1"."default" or false); }
+      { "0.2.1".default = (f.num_complex."0.2.1".default or true); }
+    ];
+    num_traits = fold recursiveUpdate {} [
+      { "${deps.num_complex."0.2.1".num_traits}"."i128" =
+        (f.num_traits."${deps.num_complex."0.2.1".num_traits}"."i128" or false) ||
+        (num_complex."0.2.1"."i128" or false) ||
+        (f."num_complex"."0.2.1"."i128" or false); }
+      { "${deps.num_complex."0.2.1".num_traits}"."std" =
+        (f.num_traits."${deps.num_complex."0.2.1".num_traits}"."std" or false) ||
+        (num_complex."0.2.1"."std" or false) ||
+        (f."num_complex"."0.2.1"."std" or false); }
+      { "${deps.num_complex."0.2.1".num_traits}".default = (f.num_traits."${deps.num_complex."0.2.1".num_traits}".default or false); }
+    ];
+  }) [
+    (features_.num_traits."${deps."num_complex"."0.2.1"."num_traits"}" deps)
+  ];
+
+
+# end
+# num-traits-0.2.6
+
+  crates.num_traits."0.2.6" = deps: { features?(features_.num_traits."0.2.6" deps {}) }: buildRustCrate {
+    crateName = "num-traits";
+    version = "0.2.6";
+    description = "Numeric traits for generic mathematics";
+    authors = [ "The Rust Project Developers" ];
+    sha256 = "1d20sil9n0wgznd1nycm3yjfj1mzyl41ambb7by1apxlyiil1azk";
+    build = "build.rs";
+    features = mkFeatures (features."num_traits"."0.2.6" or {});
+  };
+  features_.num_traits."0.2.6" = deps: f: updateFeatures f (rec {
+    num_traits = fold recursiveUpdate {} [
+      { "0.2.6"."std" =
+        (f.num_traits."0.2.6"."std" or false) ||
+        (f.num_traits."0.2.6".default or false) ||
+        (num_traits."0.2.6"."default" or false); }
+      { "0.2.6".default = (f.num_traits."0.2.6".default or true); }
+    ];
+  }) [];
+
+
+# end
+# num_cpus-1.10.0
+
+  crates.num_cpus."1.10.0" = deps: { features?(features_.num_cpus."1.10.0" deps {}) }: buildRustCrate {
+    crateName = "num_cpus";
+    version = "1.10.0";
+    description = "Get the number of CPUs on a machine.";
+    authors = [ "Sean McArthur <sean@seanmonstar.com>" ];
+    sha256 = "1411jyxy1wd8d59mv7cf6ynkvvar92czmwhb9l2c1brdkxbbiqn7";
+    dependencies = mapFeatures features ([
+      (crates."libc"."${deps."num_cpus"."1.10.0"."libc"}" deps)
+    ]);
+  };
+  features_.num_cpus."1.10.0" = deps: f: updateFeatures f ({
+    libc."${deps.num_cpus."1.10.0".libc}".default = true;
+    num_cpus."1.10.0".default = (f.num_cpus."1.10.0".default or true);
+  }) [
+    (features_.libc."${deps."num_cpus"."1.10.0"."libc"}" deps)
+  ];
+
+
+# end
+# once_cell-0.1.8
+
+  crates.once_cell."0.1.8" = deps: { features?(features_.once_cell."0.1.8" deps {}) }: buildRustCrate {
+    crateName = "once_cell";
+    version = "0.1.8";
+    description = "Single assignment cells and lazy static values without macros.";
+    authors = [ "Aleksey Kladov <aleksey.kladov@gmail.com>" ];
+    sha256 = "1n1da1x3cf3qbq9a925pimy6i0r12gcicwqjxc63nfb2bnzkg074";
+    dependencies = mapFeatures features ([
+    ]
+      ++ (if features.once_cell."0.1.8".parking_lot or false then [ (crates.parking_lot."${deps."once_cell"."0.1.8".parking_lot}" deps) ] else []));
+    features = mkFeatures (features."once_cell"."0.1.8" or {});
+  };
+  features_.once_cell."0.1.8" = deps: f: updateFeatures f (rec {
+    once_cell = fold recursiveUpdate {} [
+      { "0.1.8"."parking_lot" =
+        (f.once_cell."0.1.8"."parking_lot" or false) ||
+        (f.once_cell."0.1.8".default or false) ||
+        (once_cell."0.1.8"."default" or false); }
+      { "0.1.8".default = (f.once_cell."0.1.8".default or true); }
+    ];
+    parking_lot."${deps.once_cell."0.1.8".parking_lot}".default = true;
+  }) [
+    (features_.parking_lot."${deps."once_cell"."0.1.8"."parking_lot"}" deps)
+  ];
+
+
+# end
+# opener-0.3.2
+
+  crates.opener."0.3.2" = deps: { features?(features_.opener."0.3.2" deps {}) }: buildRustCrate {
+    crateName = "opener";
+    version = "0.3.2";
+    description = "Open a file or link using the system default program.";
+    authors = [ "Brian Bowman <seeker14491@gmail.com>" ];
+    sha256 = "1ql2snax07n3xxn4nz9r6d95rhrri66qy5s5zl9jfsdbs193hzcm";
+    dependencies = mapFeatures features ([
+      (crates."failure"."${deps."opener"."0.3.2"."failure"}" deps)
+      (crates."failure_derive"."${deps."opener"."0.3.2"."failure_derive"}" deps)
+    ])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."opener"."0.3.2"."winapi"}" deps)
+    ]) else []);
+  };
+  features_.opener."0.3.2" = deps: f: updateFeatures f ({
+    failure."${deps.opener."0.3.2".failure}".default = true;
+    failure_derive."${deps.opener."0.3.2".failure_derive}".default = true;
+    opener."0.3.2".default = (f.opener."0.3.2".default or true);
+    winapi = fold recursiveUpdate {} [
+      { "${deps.opener."0.3.2".winapi}"."shellapi" = true; }
+      { "${deps.opener."0.3.2".winapi}".default = true; }
+    ];
+  }) [
+    (features_.failure."${deps."opener"."0.3.2"."failure"}" deps)
+    (features_.failure_derive."${deps."opener"."0.3.2"."failure_derive"}" deps)
+    (features_.winapi."${deps."opener"."0.3.2"."winapi"}" deps)
+  ];
+
+
+# end
+# openssl-0.10.20
+
+  crates.openssl."0.10.20" = deps: { features?(features_.openssl."0.10.20" deps {}) }: buildRustCrate {
+    crateName = "openssl";
+    version = "0.10.20";
+    description = "OpenSSL bindings";
+    authors = [ "Steven Fackler <sfackler@gmail.com>" ];
+    sha256 = "1y3zkq988vx48a4j0i23mr7vm1wy5w71yws2v6hyf4vb5iw3r5s5";
+    dependencies = mapFeatures features ([
+      (crates."bitflags"."${deps."openssl"."0.10.20"."bitflags"}" deps)
+      (crates."cfg_if"."${deps."openssl"."0.10.20"."cfg_if"}" deps)
+      (crates."foreign_types"."${deps."openssl"."0.10.20"."foreign_types"}" deps)
+      (crates."lazy_static"."${deps."openssl"."0.10.20"."lazy_static"}" deps)
+      (crates."libc"."${deps."openssl"."0.10.20"."libc"}" deps)
+      (crates."openssl_sys"."${deps."openssl"."0.10.20"."openssl_sys"}" deps)
+    ]);
+    features = mkFeatures (features."openssl"."0.10.20" or {});
+  };
+  features_.openssl."0.10.20" = deps: f: updateFeatures f (rec {
+    bitflags."${deps.openssl."0.10.20".bitflags}".default = true;
+    cfg_if."${deps.openssl."0.10.20".cfg_if}".default = true;
+    foreign_types."${deps.openssl."0.10.20".foreign_types}".default = true;
+    lazy_static."${deps.openssl."0.10.20".lazy_static}".default = true;
+    libc."${deps.openssl."0.10.20".libc}".default = true;
+    openssl."0.10.20".default = (f.openssl."0.10.20".default or true);
+    openssl_sys = fold recursiveUpdate {} [
+      { "${deps.openssl."0.10.20".openssl_sys}"."vendored" =
+        (f.openssl_sys."${deps.openssl."0.10.20".openssl_sys}"."vendored" or false) ||
+        (openssl."0.10.20"."vendored" or false) ||
+        (f."openssl"."0.10.20"."vendored" or false); }
+      { "${deps.openssl."0.10.20".openssl_sys}".default = true; }
+    ];
+  }) [
+    (features_.bitflags."${deps."openssl"."0.10.20"."bitflags"}" deps)
+    (features_.cfg_if."${deps."openssl"."0.10.20"."cfg_if"}" deps)
+    (features_.foreign_types."${deps."openssl"."0.10.20"."foreign_types"}" deps)
+    (features_.lazy_static."${deps."openssl"."0.10.20"."lazy_static"}" deps)
+    (features_.libc."${deps."openssl"."0.10.20"."libc"}" deps)
+    (features_.openssl_sys."${deps."openssl"."0.10.20"."openssl_sys"}" deps)
+  ];
+
+
+# end
+# openssl-probe-0.1.2
+
+  crates.openssl_probe."0.1.2" = deps: { features?(features_.openssl_probe."0.1.2" deps {}) }: buildRustCrate {
+    crateName = "openssl-probe";
+    version = "0.1.2";
+    description = "Tool for helping to find SSL certificate locations on the system for OpenSSL\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "1a89fznx26vvaxyrxdvgf6iwai5xvs6xjvpjin68fgvrslv6n15a";
+  };
+  features_.openssl_probe."0.1.2" = deps: f: updateFeatures f ({
+    openssl_probe."0.1.2".default = (f.openssl_probe."0.1.2".default or true);
+  }) [];
+
+
+# end
+# openssl-src-111.2.1+1.1.1b
+
+  crates.openssl_src."111.2.1+1.1.1b" = deps: { features?(features_.openssl_src."111.2.1+1.1.1b" deps {}) }: buildRustCrate {
+    crateName = "openssl-src";
+    version = "111.2.1+1.1.1b";
+    description = "Source of OpenSSL and logic to build it.\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "0gfa29r16ds88a3sjcgkc2q5dkhgnxk58gly313r05xqj4zi2pxc";
+    dependencies = mapFeatures features ([
+      (crates."cc"."${deps."openssl_src"."111.2.1+1.1.1b"."cc"}" deps)
+    ]);
+  };
+  features_.openssl_src."111.2.1+1.1.1b" = deps: f: updateFeatures f ({
+    cc."${deps.openssl_src."111.2.1+1.1.1b".cc}".default = true;
+    openssl_src."111.2.1+1.1.1b".default = (f.openssl_src."111.2.1+1.1.1b".default or true);
+  }) [
+    (features_.cc."${deps."openssl_src"."111.2.1+1.1.1b"."cc"}" deps)
+  ];
+
+
+# end
+# openssl-sys-0.9.43
+
+  crates.openssl_sys."0.9.43" = deps: { features?(features_.openssl_sys."0.9.43" deps {}) }: buildRustCrate {
+    crateName = "openssl-sys";
+    version = "0.9.43";
+    description = "FFI bindings to OpenSSL";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" "Steven Fackler <sfackler@gmail.com>" ];
+    sha256 = "1ip0f94jakr85pxjhrkg9w9lgiiy1yga3ckm6c8xb13klsr7ky9y";
+    build = "build/main.rs";
+    dependencies = mapFeatures features ([
+      (crates."libc"."${deps."openssl_sys"."0.9.43"."libc"}" deps)
+    ])
+      ++ (if abi == "msvc" then mapFeatures features ([
+]) else []);
+
+    buildDependencies = mapFeatures features ([
+      (crates."cc"."${deps."openssl_sys"."0.9.43"."cc"}" deps)
+      (crates."pkg_config"."${deps."openssl_sys"."0.9.43"."pkg_config"}" deps)
+      (crates."rustc_version"."${deps."openssl_sys"."0.9.43"."rustc_version"}" deps)
+    ]
+      ++ (if features.openssl_sys."0.9.43".openssl-src or false then [ (crates.openssl_src."${deps."openssl_sys"."0.9.43".openssl_src}" deps) ] else []));
+    features = mkFeatures (features."openssl_sys"."0.9.43" or {});
+  };
+  features_.openssl_sys."0.9.43" = deps: f: updateFeatures f (rec {
+    cc."${deps.openssl_sys."0.9.43".cc}".default = true;
+    libc."${deps.openssl_sys."0.9.43".libc}".default = true;
+    openssl_src."${deps.openssl_sys."0.9.43".openssl_src}".default = true;
+    openssl_sys = fold recursiveUpdate {} [
+      { "0.9.43"."openssl-src" =
+        (f.openssl_sys."0.9.43"."openssl-src" or false) ||
+        (f.openssl_sys."0.9.43".vendored or false) ||
+        (openssl_sys."0.9.43"."vendored" or false); }
+      { "0.9.43".default = (f.openssl_sys."0.9.43".default or true); }
+    ];
+    pkg_config."${deps.openssl_sys."0.9.43".pkg_config}".default = true;
+    rustc_version."${deps.openssl_sys."0.9.43".rustc_version}".default = true;
+  }) [
+    (features_.libc."${deps."openssl_sys"."0.9.43"."libc"}" deps)
+    (features_.cc."${deps."openssl_sys"."0.9.43"."cc"}" deps)
+    (features_.openssl_src."${deps."openssl_sys"."0.9.43"."openssl_src"}" deps)
+    (features_.pkg_config."${deps."openssl_sys"."0.9.43"."pkg_config"}" deps)
+    (features_.rustc_version."${deps."openssl_sys"."0.9.43"."rustc_version"}" deps)
+  ];
+
+
+# end
+# parking_lot-0.7.1
+
+  crates.parking_lot."0.7.1" = deps: { features?(features_.parking_lot."0.7.1" deps {}) }: buildRustCrate {
+    crateName = "parking_lot";
+    version = "0.7.1";
+    description = "More compact and efficient implementations of the standard synchronization primitives.";
+    authors = [ "Amanieu d'Antras <amanieu@gmail.com>" ];
+    sha256 = "1qpb49xd176hqqabxdb48f1hvylfbf68rpz8yfrhw0x68ys0lkq1";
+    dependencies = mapFeatures features ([
+      (crates."lock_api"."${deps."parking_lot"."0.7.1"."lock_api"}" deps)
+      (crates."parking_lot_core"."${deps."parking_lot"."0.7.1"."parking_lot_core"}" deps)
+    ]);
+    features = mkFeatures (features."parking_lot"."0.7.1" or {});
+  };
+  features_.parking_lot."0.7.1" = deps: f: updateFeatures f (rec {
+    lock_api = fold recursiveUpdate {} [
+      { "${deps.parking_lot."0.7.1".lock_api}"."nightly" =
+        (f.lock_api."${deps.parking_lot."0.7.1".lock_api}"."nightly" or false) ||
+        (parking_lot."0.7.1"."nightly" or false) ||
+        (f."parking_lot"."0.7.1"."nightly" or false); }
+      { "${deps.parking_lot."0.7.1".lock_api}"."owning_ref" =
+        (f.lock_api."${deps.parking_lot."0.7.1".lock_api}"."owning_ref" or false) ||
+        (parking_lot."0.7.1"."owning_ref" or false) ||
+        (f."parking_lot"."0.7.1"."owning_ref" or false); }
+      { "${deps.parking_lot."0.7.1".lock_api}".default = true; }
+    ];
+    parking_lot = fold recursiveUpdate {} [
+      { "0.7.1"."owning_ref" =
+        (f.parking_lot."0.7.1"."owning_ref" or false) ||
+        (f.parking_lot."0.7.1".default or false) ||
+        (parking_lot."0.7.1"."default" or false); }
+      { "0.7.1".default = (f.parking_lot."0.7.1".default or true); }
+    ];
+    parking_lot_core = fold recursiveUpdate {} [
+      { "${deps.parking_lot."0.7.1".parking_lot_core}"."deadlock_detection" =
+        (f.parking_lot_core."${deps.parking_lot."0.7.1".parking_lot_core}"."deadlock_detection" or false) ||
+        (parking_lot."0.7.1"."deadlock_detection" or false) ||
+        (f."parking_lot"."0.7.1"."deadlock_detection" or false); }
+      { "${deps.parking_lot."0.7.1".parking_lot_core}"."nightly" =
+        (f.parking_lot_core."${deps.parking_lot."0.7.1".parking_lot_core}"."nightly" or false) ||
+        (parking_lot."0.7.1"."nightly" or false) ||
+        (f."parking_lot"."0.7.1"."nightly" or false); }
+      { "${deps.parking_lot."0.7.1".parking_lot_core}".default = true; }
+    ];
+  }) [
+    (features_.lock_api."${deps."parking_lot"."0.7.1"."lock_api"}" deps)
+    (features_.parking_lot_core."${deps."parking_lot"."0.7.1"."parking_lot_core"}" deps)
+  ];
+
+
+# end
+# parking_lot_core-0.4.0
+
+  crates.parking_lot_core."0.4.0" = deps: { features?(features_.parking_lot_core."0.4.0" deps {}) }: buildRustCrate {
+    crateName = "parking_lot_core";
+    version = "0.4.0";
+    description = "An advanced API for creating custom synchronization primitives.";
+    authors = [ "Amanieu d'Antras <amanieu@gmail.com>" ];
+    sha256 = "1mzk5i240ddvhwnz65hhjk4cq61z235g1n8bd7al4mg6vx437c16";
+    dependencies = mapFeatures features ([
+      (crates."rand"."${deps."parking_lot_core"."0.4.0"."rand"}" deps)
+      (crates."smallvec"."${deps."parking_lot_core"."0.4.0"."smallvec"}" deps)
+    ])
+      ++ (if (kernel == "linux" || kernel == "darwin") then mapFeatures features ([
+      (crates."libc"."${deps."parking_lot_core"."0.4.0"."libc"}" deps)
+    ]) else [])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."parking_lot_core"."0.4.0"."winapi"}" deps)
+    ]) else []);
+
+    buildDependencies = mapFeatures features ([
+      (crates."rustc_version"."${deps."parking_lot_core"."0.4.0"."rustc_version"}" deps)
+    ]);
+    features = mkFeatures (features."parking_lot_core"."0.4.0" or {});
+  };
+  features_.parking_lot_core."0.4.0" = deps: f: updateFeatures f (rec {
+    libc."${deps.parking_lot_core."0.4.0".libc}".default = true;
+    parking_lot_core = fold recursiveUpdate {} [
+      { "0.4.0"."backtrace" =
+        (f.parking_lot_core."0.4.0"."backtrace" or false) ||
+        (f.parking_lot_core."0.4.0".deadlock_detection or false) ||
+        (parking_lot_core."0.4.0"."deadlock_detection" or false); }
+      { "0.4.0"."petgraph" =
+        (f.parking_lot_core."0.4.0"."petgraph" or false) ||
+        (f.parking_lot_core."0.4.0".deadlock_detection or false) ||
+        (parking_lot_core."0.4.0"."deadlock_detection" or false); }
+      { "0.4.0"."thread-id" =
+        (f.parking_lot_core."0.4.0"."thread-id" or false) ||
+        (f.parking_lot_core."0.4.0".deadlock_detection or false) ||
+        (parking_lot_core."0.4.0"."deadlock_detection" or false); }
+      { "0.4.0".default = (f.parking_lot_core."0.4.0".default or true); }
+    ];
+    rand."${deps.parking_lot_core."0.4.0".rand}".default = true;
+    rustc_version."${deps.parking_lot_core."0.4.0".rustc_version}".default = true;
+    smallvec."${deps.parking_lot_core."0.4.0".smallvec}".default = true;
+    winapi = fold recursiveUpdate {} [
+      { "${deps.parking_lot_core."0.4.0".winapi}"."errhandlingapi" = true; }
+      { "${deps.parking_lot_core."0.4.0".winapi}"."handleapi" = true; }
+      { "${deps.parking_lot_core."0.4.0".winapi}"."minwindef" = true; }
+      { "${deps.parking_lot_core."0.4.0".winapi}"."ntstatus" = true; }
+      { "${deps.parking_lot_core."0.4.0".winapi}"."winbase" = true; }
+      { "${deps.parking_lot_core."0.4.0".winapi}"."winerror" = true; }
+      { "${deps.parking_lot_core."0.4.0".winapi}"."winnt" = true; }
+      { "${deps.parking_lot_core."0.4.0".winapi}".default = true; }
+    ];
+  }) [
+    (features_.rand."${deps."parking_lot_core"."0.4.0"."rand"}" deps)
+    (features_.smallvec."${deps."parking_lot_core"."0.4.0"."smallvec"}" deps)
+    (features_.rustc_version."${deps."parking_lot_core"."0.4.0"."rustc_version"}" deps)
+    (features_.libc."${deps."parking_lot_core"."0.4.0"."libc"}" deps)
+    (features_.winapi."${deps."parking_lot_core"."0.4.0"."winapi"}" deps)
+  ];
+
+
+# end
+# pkg-config-0.3.14
+
+  crates.pkg_config."0.3.14" = deps: { features?(features_.pkg_config."0.3.14" deps {}) }: buildRustCrate {
+    crateName = "pkg-config";
+    version = "0.3.14";
+    description = "A library to run the pkg-config system tool at build time in order to be used in\nCargo build scripts.\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "0207fsarrm412j0dh87lfcas72n8mxar7q3mgflsbsrqnb140sv6";
+  };
+  features_.pkg_config."0.3.14" = deps: f: updateFeatures f ({
+    pkg_config."0.3.14".default = (f.pkg_config."0.3.14".default or true);
+  }) [];
+
+
+# end
+# quote-0.6.12
+
+  crates.quote."0.6.12" = deps: { features?(features_.quote."0.6.12" deps {}) }: buildRustCrate {
+    crateName = "quote";
+    version = "0.6.12";
+    description = "Quasi-quoting macro quote!(...)";
+    authors = [ "David Tolnay <dtolnay@gmail.com>" ];
+    sha256 = "1ckd2d2sy0hrwrqcr47dn0n3hyh7ygpc026l8xaycccyg27mihv9";
+    dependencies = mapFeatures features ([
+      (crates."proc_macro2"."${deps."quote"."0.6.12"."proc_macro2"}" deps)
+    ]);
+    features = mkFeatures (features."quote"."0.6.12" or {});
+  };
+  features_.quote."0.6.12" = deps: f: updateFeatures f (rec {
+    proc_macro2 = fold recursiveUpdate {} [
+      { "${deps.quote."0.6.12".proc_macro2}"."proc-macro" =
+        (f.proc_macro2."${deps.quote."0.6.12".proc_macro2}"."proc-macro" or false) ||
+        (quote."0.6.12"."proc-macro" or false) ||
+        (f."quote"."0.6.12"."proc-macro" or false); }
+      { "${deps.quote."0.6.12".proc_macro2}".default = (f.proc_macro2."${deps.quote."0.6.12".proc_macro2}".default or false); }
+    ];
+    quote = fold recursiveUpdate {} [
+      { "0.6.12"."proc-macro" =
+        (f.quote."0.6.12"."proc-macro" or false) ||
+        (f.quote."0.6.12".default or false) ||
+        (quote."0.6.12"."default" or false); }
+      { "0.6.12".default = (f.quote."0.6.12".default or true); }
+    ];
+  }) [
+    (features_.proc_macro2."${deps."quote"."0.6.12"."proc_macro2"}" deps)
+  ];
+
+
+# end
+# rand-0.6.5
+
+  crates.rand."0.6.5" = deps: { features?(features_.rand."0.6.5" deps {}) }: buildRustCrate {
+    crateName = "rand";
+    version = "0.6.5";
+    description = "Random number generators and other randomness functionality.\n";
+    authors = [ "The Rand Project Developers" "The Rust Project Developers" ];
+    sha256 = "0zbck48159aj8zrwzf80sd9xxh96w4f4968nshwjpysjvflimvgb";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+      (crates."rand_chacha"."${deps."rand"."0.6.5"."rand_chacha"}" deps)
+      (crates."rand_core"."${deps."rand"."0.6.5"."rand_core"}" deps)
+      (crates."rand_hc"."${deps."rand"."0.6.5"."rand_hc"}" deps)
+      (crates."rand_isaac"."${deps."rand"."0.6.5"."rand_isaac"}" deps)
+      (crates."rand_jitter"."${deps."rand"."0.6.5"."rand_jitter"}" deps)
+      (crates."rand_pcg"."${deps."rand"."0.6.5"."rand_pcg"}" deps)
+      (crates."rand_xorshift"."${deps."rand"."0.6.5"."rand_xorshift"}" deps)
+    ]
+      ++ (if features.rand."0.6.5".rand_os or false then [ (crates.rand_os."${deps."rand"."0.6.5".rand_os}" deps) ] else []))
+      ++ (if (kernel == "linux" || kernel == "darwin") then mapFeatures features ([
+      (crates."libc"."${deps."rand"."0.6.5"."libc"}" deps)
+    ]) else [])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."rand"."0.6.5"."winapi"}" deps)
+    ]) else []);
+
+    buildDependencies = mapFeatures features ([
+      (crates."autocfg"."${deps."rand"."0.6.5"."autocfg"}" deps)
+    ]);
+    features = mkFeatures (features."rand"."0.6.5" or {});
+  };
+  features_.rand."0.6.5" = deps: f: updateFeatures f (rec {
+    autocfg."${deps.rand."0.6.5".autocfg}".default = true;
+    libc."${deps.rand."0.6.5".libc}".default = (f.libc."${deps.rand."0.6.5".libc}".default or false);
+    rand = fold recursiveUpdate {} [
+      { "0.6.5"."alloc" =
+        (f.rand."0.6.5"."alloc" or false) ||
+        (f.rand."0.6.5".std or false) ||
+        (rand."0.6.5"."std" or false); }
+      { "0.6.5"."packed_simd" =
+        (f.rand."0.6.5"."packed_simd" or false) ||
+        (f.rand."0.6.5".simd_support or false) ||
+        (rand."0.6.5"."simd_support" or false); }
+      { "0.6.5"."rand_os" =
+        (f.rand."0.6.5"."rand_os" or false) ||
+        (f.rand."0.6.5".std or false) ||
+        (rand."0.6.5"."std" or false); }
+      { "0.6.5"."simd_support" =
+        (f.rand."0.6.5"."simd_support" or false) ||
+        (f.rand."0.6.5".nightly or false) ||
+        (rand."0.6.5"."nightly" or false); }
+      { "0.6.5"."std" =
+        (f.rand."0.6.5"."std" or false) ||
+        (f.rand."0.6.5".default or false) ||
+        (rand."0.6.5"."default" or false); }
+      { "0.6.5".default = (f.rand."0.6.5".default or true); }
+    ];
+    rand_chacha."${deps.rand."0.6.5".rand_chacha}".default = true;
+    rand_core = fold recursiveUpdate {} [
+      { "${deps.rand."0.6.5".rand_core}"."alloc" =
+        (f.rand_core."${deps.rand."0.6.5".rand_core}"."alloc" or false) ||
+        (rand."0.6.5"."alloc" or false) ||
+        (f."rand"."0.6.5"."alloc" or false); }
+      { "${deps.rand."0.6.5".rand_core}"."serde1" =
+        (f.rand_core."${deps.rand."0.6.5".rand_core}"."serde1" or false) ||
+        (rand."0.6.5"."serde1" or false) ||
+        (f."rand"."0.6.5"."serde1" or false); }
+      { "${deps.rand."0.6.5".rand_core}"."std" =
+        (f.rand_core."${deps.rand."0.6.5".rand_core}"."std" or false) ||
+        (rand."0.6.5"."std" or false) ||
+        (f."rand"."0.6.5"."std" or false); }
+      { "${deps.rand."0.6.5".rand_core}".default = true; }
+    ];
+    rand_hc."${deps.rand."0.6.5".rand_hc}".default = true;
+    rand_isaac = fold recursiveUpdate {} [
+      { "${deps.rand."0.6.5".rand_isaac}"."serde1" =
+        (f.rand_isaac."${deps.rand."0.6.5".rand_isaac}"."serde1" or false) ||
+        (rand."0.6.5"."serde1" or false) ||
+        (f."rand"."0.6.5"."serde1" or false); }
+      { "${deps.rand."0.6.5".rand_isaac}".default = true; }
+    ];
+    rand_jitter = fold recursiveUpdate {} [
+      { "${deps.rand."0.6.5".rand_jitter}"."std" =
+        (f.rand_jitter."${deps.rand."0.6.5".rand_jitter}"."std" or false) ||
+        (rand."0.6.5"."std" or false) ||
+        (f."rand"."0.6.5"."std" or false); }
+      { "${deps.rand."0.6.5".rand_jitter}".default = true; }
+    ];
+    rand_os = fold recursiveUpdate {} [
+      { "${deps.rand."0.6.5".rand_os}"."stdweb" =
+        (f.rand_os."${deps.rand."0.6.5".rand_os}"."stdweb" or false) ||
+        (rand."0.6.5"."stdweb" or false) ||
+        (f."rand"."0.6.5"."stdweb" or false); }
+      { "${deps.rand."0.6.5".rand_os}"."wasm-bindgen" =
+        (f.rand_os."${deps.rand."0.6.5".rand_os}"."wasm-bindgen" or false) ||
+        (rand."0.6.5"."wasm-bindgen" or false) ||
+        (f."rand"."0.6.5"."wasm-bindgen" or false); }
+      { "${deps.rand."0.6.5".rand_os}".default = true; }
+    ];
+    rand_pcg."${deps.rand."0.6.5".rand_pcg}".default = true;
+    rand_xorshift = fold recursiveUpdate {} [
+      { "${deps.rand."0.6.5".rand_xorshift}"."serde1" =
+        (f.rand_xorshift."${deps.rand."0.6.5".rand_xorshift}"."serde1" or false) ||
+        (rand."0.6.5"."serde1" or false) ||
+        (f."rand"."0.6.5"."serde1" or false); }
+      { "${deps.rand."0.6.5".rand_xorshift}".default = true; }
+    ];
+    winapi = fold recursiveUpdate {} [
+      { "${deps.rand."0.6.5".winapi}"."minwindef" = true; }
+      { "${deps.rand."0.6.5".winapi}"."ntsecapi" = true; }
+      { "${deps.rand."0.6.5".winapi}"."profileapi" = true; }
+      { "${deps.rand."0.6.5".winapi}"."winnt" = true; }
+      { "${deps.rand."0.6.5".winapi}".default = true; }
+    ];
+  }) [
+    (features_.rand_chacha."${deps."rand"."0.6.5"."rand_chacha"}" deps)
+    (features_.rand_core."${deps."rand"."0.6.5"."rand_core"}" deps)
+    (features_.rand_hc."${deps."rand"."0.6.5"."rand_hc"}" deps)
+    (features_.rand_isaac."${deps."rand"."0.6.5"."rand_isaac"}" deps)
+    (features_.rand_jitter."${deps."rand"."0.6.5"."rand_jitter"}" deps)
+    (features_.rand_os."${deps."rand"."0.6.5"."rand_os"}" deps)
+    (features_.rand_pcg."${deps."rand"."0.6.5"."rand_pcg"}" deps)
+    (features_.rand_xorshift."${deps."rand"."0.6.5"."rand_xorshift"}" deps)
+    (features_.autocfg."${deps."rand"."0.6.5"."autocfg"}" deps)
+    (features_.libc."${deps."rand"."0.6.5"."libc"}" deps)
+    (features_.winapi."${deps."rand"."0.6.5"."winapi"}" deps)
+  ];
+
+
+# end
+# rand_chacha-0.1.1
+
+  crates.rand_chacha."0.1.1" = deps: { features?(features_.rand_chacha."0.1.1" deps {}) }: buildRustCrate {
+    crateName = "rand_chacha";
+    version = "0.1.1";
+    description = "ChaCha random number generator\n";
+    authors = [ "The Rand Project Developers" "The Rust Project Developers" ];
+    sha256 = "0xnxm4mjd7wjnh18zxc1yickw58axbycp35ciraplqdfwn1gffwi";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+      (crates."rand_core"."${deps."rand_chacha"."0.1.1"."rand_core"}" deps)
+    ]);
+
+    buildDependencies = mapFeatures features ([
+      (crates."autocfg"."${deps."rand_chacha"."0.1.1"."autocfg"}" deps)
+    ]);
+  };
+  features_.rand_chacha."0.1.1" = deps: f: updateFeatures f ({
+    autocfg."${deps.rand_chacha."0.1.1".autocfg}".default = true;
+    rand_chacha."0.1.1".default = (f.rand_chacha."0.1.1".default or true);
+    rand_core."${deps.rand_chacha."0.1.1".rand_core}".default = (f.rand_core."${deps.rand_chacha."0.1.1".rand_core}".default or false);
+  }) [
+    (features_.rand_core."${deps."rand_chacha"."0.1.1"."rand_core"}" deps)
+    (features_.autocfg."${deps."rand_chacha"."0.1.1"."autocfg"}" deps)
+  ];
+
+
+# end
+# rand_hc-0.1.0
+
+  crates.rand_hc."0.1.0" = deps: { features?(features_.rand_hc."0.1.0" deps {}) }: buildRustCrate {
+    crateName = "rand_hc";
+    version = "0.1.0";
+    description = "HC128 random number generator\n";
+    authors = [ "The Rand Project Developers" ];
+    sha256 = "05agb75j87yp7y1zk8yf7bpm66hc0673r3dlypn0kazynr6fdgkz";
+    dependencies = mapFeatures features ([
+      (crates."rand_core"."${deps."rand_hc"."0.1.0"."rand_core"}" deps)
+    ]);
+  };
+  features_.rand_hc."0.1.0" = deps: f: updateFeatures f ({
+    rand_core."${deps.rand_hc."0.1.0".rand_core}".default = (f.rand_core."${deps.rand_hc."0.1.0".rand_core}".default or false);
+    rand_hc."0.1.0".default = (f.rand_hc."0.1.0".default or true);
+  }) [
+    (features_.rand_core."${deps."rand_hc"."0.1.0"."rand_core"}" deps)
+  ];
+
+
+# end
+# rand_isaac-0.1.1
+
+  crates.rand_isaac."0.1.1" = deps: { features?(features_.rand_isaac."0.1.1" deps {}) }: buildRustCrate {
+    crateName = "rand_isaac";
+    version = "0.1.1";
+    description = "ISAAC random number generator\n";
+    authors = [ "The Rand Project Developers" "The Rust Project Developers" ];
+    sha256 = "10hhdh5b5sa03s6b63y9bafm956jwilx41s71jbrzl63ccx8lxdq";
+    dependencies = mapFeatures features ([
+      (crates."rand_core"."${deps."rand_isaac"."0.1.1"."rand_core"}" deps)
+    ]);
+    features = mkFeatures (features."rand_isaac"."0.1.1" or {});
+  };
+  features_.rand_isaac."0.1.1" = deps: f: updateFeatures f (rec {
+    rand_core = fold recursiveUpdate {} [
+      { "${deps.rand_isaac."0.1.1".rand_core}"."serde1" =
+        (f.rand_core."${deps.rand_isaac."0.1.1".rand_core}"."serde1" or false) ||
+        (rand_isaac."0.1.1"."serde1" or false) ||
+        (f."rand_isaac"."0.1.1"."serde1" or false); }
+      { "${deps.rand_isaac."0.1.1".rand_core}".default = (f.rand_core."${deps.rand_isaac."0.1.1".rand_core}".default or false); }
+    ];
+    rand_isaac = fold recursiveUpdate {} [
+      { "0.1.1"."serde" =
+        (f.rand_isaac."0.1.1"."serde" or false) ||
+        (f.rand_isaac."0.1.1".serde1 or false) ||
+        (rand_isaac."0.1.1"."serde1" or false); }
+      { "0.1.1"."serde_derive" =
+        (f.rand_isaac."0.1.1"."serde_derive" or false) ||
+        (f.rand_isaac."0.1.1".serde1 or false) ||
+        (rand_isaac."0.1.1"."serde1" or false); }
+      { "0.1.1".default = (f.rand_isaac."0.1.1".default or true); }
+    ];
+  }) [
+    (features_.rand_core."${deps."rand_isaac"."0.1.1"."rand_core"}" deps)
+  ];
+
+
+# end
+# rand_jitter-0.1.3
+
+  crates.rand_jitter."0.1.3" = deps: { features?(features_.rand_jitter."0.1.3" deps {}) }: buildRustCrate {
+    crateName = "rand_jitter";
+    version = "0.1.3";
+    description = "Random number generator based on timing jitter";
+    authors = [ "The Rand Project Developers" ];
+    sha256 = "1cb4q73rmh1inlx3liy6rabapcqh6p6c1plsd2lxw6dmi67d1qc3";
+    dependencies = mapFeatures features ([
+      (crates."rand_core"."${deps."rand_jitter"."0.1.3"."rand_core"}" deps)
+    ])
+      ++ (if kernel == "darwin" || kernel == "ios" then mapFeatures features ([
+      (crates."libc"."${deps."rand_jitter"."0.1.3"."libc"}" deps)
+    ]) else [])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."rand_jitter"."0.1.3"."winapi"}" deps)
+    ]) else []);
+    features = mkFeatures (features."rand_jitter"."0.1.3" or {});
+  };
+  features_.rand_jitter."0.1.3" = deps: f: updateFeatures f (rec {
+    libc."${deps.rand_jitter."0.1.3".libc}".default = true;
+    rand_core = fold recursiveUpdate {} [
+      { "${deps.rand_jitter."0.1.3".rand_core}"."std" =
+        (f.rand_core."${deps.rand_jitter."0.1.3".rand_core}"."std" or false) ||
+        (rand_jitter."0.1.3"."std" or false) ||
+        (f."rand_jitter"."0.1.3"."std" or false); }
+      { "${deps.rand_jitter."0.1.3".rand_core}".default = true; }
+    ];
+    rand_jitter."0.1.3".default = (f.rand_jitter."0.1.3".default or true);
+    winapi = fold recursiveUpdate {} [
+      { "${deps.rand_jitter."0.1.3".winapi}"."profileapi" = true; }
+      { "${deps.rand_jitter."0.1.3".winapi}".default = true; }
+    ];
+  }) [
+    (features_.rand_core."${deps."rand_jitter"."0.1.3"."rand_core"}" deps)
+    (features_.libc."${deps."rand_jitter"."0.1.3"."libc"}" deps)
+    (features_.winapi."${deps."rand_jitter"."0.1.3"."winapi"}" deps)
+  ];
+
+
+# end
+# rand_pcg-0.1.2
+
+  crates.rand_pcg."0.1.2" = deps: { features?(features_.rand_pcg."0.1.2" deps {}) }: buildRustCrate {
+    crateName = "rand_pcg";
+    version = "0.1.2";
+    description = "Selected PCG random number generators\n";
+    authors = [ "The Rand Project Developers" ];
+    sha256 = "04qgi2ai2z42li5h4aawvxbpnlqyjfnipz9d6k73mdnl6p1xq938";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+      (crates."rand_core"."${deps."rand_pcg"."0.1.2"."rand_core"}" deps)
+    ]);
+
+    buildDependencies = mapFeatures features ([
+      (crates."autocfg"."${deps."rand_pcg"."0.1.2"."autocfg"}" deps)
+    ]);
+    features = mkFeatures (features."rand_pcg"."0.1.2" or {});
+  };
+  features_.rand_pcg."0.1.2" = deps: f: updateFeatures f (rec {
+    autocfg."${deps.rand_pcg."0.1.2".autocfg}".default = true;
+    rand_core."${deps.rand_pcg."0.1.2".rand_core}".default = true;
+    rand_pcg = fold recursiveUpdate {} [
+      { "0.1.2"."serde" =
+        (f.rand_pcg."0.1.2"."serde" or false) ||
+        (f.rand_pcg."0.1.2".serde1 or false) ||
+        (rand_pcg."0.1.2"."serde1" or false); }
+      { "0.1.2"."serde_derive" =
+        (f.rand_pcg."0.1.2"."serde_derive" or false) ||
+        (f.rand_pcg."0.1.2".serde1 or false) ||
+        (rand_pcg."0.1.2"."serde1" or false); }
+      { "0.1.2".default = (f.rand_pcg."0.1.2".default or true); }
+    ];
+  }) [
+    (features_.rand_core."${deps."rand_pcg"."0.1.2"."rand_core"}" deps)
+    (features_.autocfg."${deps."rand_pcg"."0.1.2"."autocfg"}" deps)
+  ];
+
+
+# end
+# rand_xorshift-0.1.1
+
+  crates.rand_xorshift."0.1.1" = deps: { features?(features_.rand_xorshift."0.1.1" deps {}) }: buildRustCrate {
+    crateName = "rand_xorshift";
+    version = "0.1.1";
+    description = "Xorshift random number generator\n";
+    authors = [ "The Rand Project Developers" "The Rust Project Developers" ];
+    sha256 = "0v365c4h4lzxwz5k5kp9m0661s0sss7ylv74if0xb4svis9sswnn";
+    dependencies = mapFeatures features ([
+      (crates."rand_core"."${deps."rand_xorshift"."0.1.1"."rand_core"}" deps)
+    ]);
+    features = mkFeatures (features."rand_xorshift"."0.1.1" or {});
+  };
+  features_.rand_xorshift."0.1.1" = deps: f: updateFeatures f (rec {
+    rand_core."${deps.rand_xorshift."0.1.1".rand_core}".default = (f.rand_core."${deps.rand_xorshift."0.1.1".rand_core}".default or false);
+    rand_xorshift = fold recursiveUpdate {} [
+      { "0.1.1"."serde" =
+        (f.rand_xorshift."0.1.1"."serde" or false) ||
+        (f.rand_xorshift."0.1.1".serde1 or false) ||
+        (rand_xorshift."0.1.1"."serde1" or false); }
+      { "0.1.1"."serde_derive" =
+        (f.rand_xorshift."0.1.1"."serde_derive" or false) ||
+        (f.rand_xorshift."0.1.1".serde1 or false) ||
+        (rand_xorshift."0.1.1"."serde1" or false); }
+      { "0.1.1".default = (f.rand_xorshift."0.1.1".default or true); }
+    ];
+  }) [
+    (features_.rand_core."${deps."rand_xorshift"."0.1.1"."rand_core"}" deps)
+  ];
+
+
+# end
+# rawpointer-0.1.0
+
+  crates.rawpointer."0.1.0" = deps: { features?(features_.rawpointer."0.1.0" deps {}) }: buildRustCrate {
+    crateName = "rawpointer";
+    version = "0.1.0";
+    description = "Extra methods for raw pointers.\n\nFor example `.post_inc()` and `.pre_dec()` (c.f. `ptr++` and `--ptr`) and\n`ptrdistance`.\n";
+    authors = [ "bluss" ];
+    sha256 = "0hblv2cv310ixf5f1jw4nk9w5pb95wh4dwqyjv07g2xrshbw6j04";
+  };
+  features_.rawpointer."0.1.0" = deps: f: updateFeatures f ({
+    rawpointer."0.1.0".default = (f.rawpointer."0.1.0".default or true);
+  }) [];
+
+
+# end
+# redox_syscall-0.1.54
+
+  crates.redox_syscall."0.1.54" = deps: { features?(features_.redox_syscall."0.1.54" deps {}) }: buildRustCrate {
+    crateName = "redox_syscall";
+    version = "0.1.54";
+    description = "A Rust library to access raw Redox system calls";
+    authors = [ "Jeremy Soller <jackpot51@gmail.com>" ];
+    sha256 = "1ndcp7brnvii87ndcd34fk846498r07iznphkslcy0shic9cp4rr";
+    libName = "syscall";
+  };
+  features_.redox_syscall."0.1.54" = deps: f: updateFeatures f ({
+    redox_syscall."0.1.54".default = (f.redox_syscall."0.1.54".default or true);
+  }) [];
+
+
+# end
+# regex-1.1.6
+
+  crates.regex."1.1.6" = deps: { features?(features_.regex."1.1.6" deps {}) }: buildRustCrate {
+    crateName = "regex";
+    version = "1.1.6";
+    description = "An implementation of regular expressions for Rust. This implementation uses\nfinite automata and guarantees linear time matching on all inputs.\n";
+    authors = [ "The Rust Project Developers" ];
+    sha256 = "1yynvabg03m5f65qxcw70qckkfjwi9xyfpjdp6yq7pk0xf0ydc0b";
+    dependencies = mapFeatures features ([
+      (crates."aho_corasick"."${deps."regex"."1.1.6"."aho_corasick"}" deps)
+      (crates."memchr"."${deps."regex"."1.1.6"."memchr"}" deps)
+      (crates."regex_syntax"."${deps."regex"."1.1.6"."regex_syntax"}" deps)
+      (crates."thread_local"."${deps."regex"."1.1.6"."thread_local"}" deps)
+      (crates."utf8_ranges"."${deps."regex"."1.1.6"."utf8_ranges"}" deps)
+    ]);
+    features = mkFeatures (features."regex"."1.1.6" or {});
+  };
+  features_.regex."1.1.6" = deps: f: updateFeatures f (rec {
+    aho_corasick."${deps.regex."1.1.6".aho_corasick}".default = true;
+    memchr."${deps.regex."1.1.6".memchr}".default = true;
+    regex = fold recursiveUpdate {} [
+      { "1.1.6"."pattern" =
+        (f.regex."1.1.6"."pattern" or false) ||
+        (f.regex."1.1.6".unstable or false) ||
+        (regex."1.1.6"."unstable" or false); }
+      { "1.1.6"."use_std" =
+        (f.regex."1.1.6"."use_std" or false) ||
+        (f.regex."1.1.6".default or false) ||
+        (regex."1.1.6"."default" or false); }
+      { "1.1.6".default = (f.regex."1.1.6".default or true); }
+    ];
+    regex_syntax."${deps.regex."1.1.6".regex_syntax}".default = true;
+    thread_local."${deps.regex."1.1.6".thread_local}".default = true;
+    utf8_ranges."${deps.regex."1.1.6".utf8_ranges}".default = true;
+  }) [
+    (features_.aho_corasick."${deps."regex"."1.1.6"."aho_corasick"}" deps)
+    (features_.memchr."${deps."regex"."1.1.6"."memchr"}" deps)
+    (features_.regex_syntax."${deps."regex"."1.1.6"."regex_syntax"}" deps)
+    (features_.thread_local."${deps."regex"."1.1.6"."thread_local"}" deps)
+    (features_.utf8_ranges."${deps."regex"."1.1.6"."utf8_ranges"}" deps)
+  ];
+
+
+# end
+# regex-syntax-0.6.6
+
+  crates.regex_syntax."0.6.6" = deps: { features?(features_.regex_syntax."0.6.6" deps {}) }: buildRustCrate {
+    crateName = "regex-syntax";
+    version = "0.6.6";
+    description = "A regular expression parser.";
+    authors = [ "The Rust Project Developers" ];
+    sha256 = "1cjrdc3affa3rjfaxkp91xnf9k0fsqn9z4xqc280vv39nvrl8p8b";
+    dependencies = mapFeatures features ([
+      (crates."ucd_util"."${deps."regex_syntax"."0.6.6"."ucd_util"}" deps)
+    ]);
+  };
+  features_.regex_syntax."0.6.6" = deps: f: updateFeatures f ({
+    regex_syntax."0.6.6".default = (f.regex_syntax."0.6.6".default or true);
+    ucd_util."${deps.regex_syntax."0.6.6".ucd_util}".default = true;
+  }) [
+    (features_.ucd_util."${deps."regex_syntax"."0.6.6"."ucd_util"}" deps)
+  ];
+
+
+# end
+# rustc-demangle-0.1.14
+
+  crates.rustc_demangle."0.1.14" = deps: { features?(features_.rustc_demangle."0.1.14" deps {}) }: buildRustCrate {
+    crateName = "rustc-demangle";
+    version = "0.1.14";
+    description = "Rust compiler symbol demangling.\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "07vl0ms3a27fpry9kh9piv08w7d51i5m7bgphk7pw4jygwzdy31f";
+    dependencies = mapFeatures features ([
+]);
+    features = mkFeatures (features."rustc_demangle"."0.1.14" or {});
+  };
+  features_.rustc_demangle."0.1.14" = deps: f: updateFeatures f (rec {
+    rustc_demangle = fold recursiveUpdate {} [
+      { "0.1.14"."compiler_builtins" =
+        (f.rustc_demangle."0.1.14"."compiler_builtins" or false) ||
+        (f.rustc_demangle."0.1.14".rustc-dep-of-std or false) ||
+        (rustc_demangle."0.1.14"."rustc-dep-of-std" or false); }
+      { "0.1.14"."core" =
+        (f.rustc_demangle."0.1.14"."core" or false) ||
+        (f.rustc_demangle."0.1.14".rustc-dep-of-std or false) ||
+        (rustc_demangle."0.1.14"."rustc-dep-of-std" or false); }
+      { "0.1.14".default = (f.rustc_demangle."0.1.14".default or true); }
+    ];
+  }) [];
+
+
+# end
+# rustc-workspace-hack-1.0.0
+
+  crates.rustc_workspace_hack."1.0.0" = deps: { features?(features_.rustc_workspace_hack."1.0.0" deps {}) }: buildRustCrate {
+    crateName = "rustc-workspace-hack";
+    version = "1.0.0";
+    description = "Hack for the compiler's own build system\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "0arpdp472j4lrwxbmf4z21d8kh95rbbphnzccf605pqq2rvczv3p";
+  };
+  features_.rustc_workspace_hack."1.0.0" = deps: f: updateFeatures f ({
+    rustc_workspace_hack."1.0.0".default = (f.rustc_workspace_hack."1.0.0".default or true);
+  }) [];
+
+
+# end
+# rustc_version-0.2.3
+
+  crates.rustc_version."0.2.3" = deps: { features?(features_.rustc_version."0.2.3" deps {}) }: buildRustCrate {
+    crateName = "rustc_version";
+    version = "0.2.3";
+    description = "A library for querying the version of a installed rustc compiler";
+    authors = [ "Marvin Löbel <loebel.marvin@gmail.com>" ];
+    sha256 = "0rgwzbgs3i9fqjm1p4ra3n7frafmpwl29c8lw85kv1rxn7n2zaa7";
+    dependencies = mapFeatures features ([
+      (crates."semver"."${deps."rustc_version"."0.2.3"."semver"}" deps)
+    ]);
+  };
+  features_.rustc_version."0.2.3" = deps: f: updateFeatures f ({
+    rustc_version."0.2.3".default = (f.rustc_version."0.2.3".default or true);
+    semver."${deps.rustc_version."0.2.3".semver}".default = true;
+  }) [
+    (features_.semver."${deps."rustc_version"."0.2.3"."semver"}" deps)
+  ];
+
+
+# end
+# rustfix-0.4.5
+
+  crates.rustfix."0.4.5" = deps: { features?(features_.rustfix."0.4.5" deps {}) }: buildRustCrate {
+    crateName = "rustfix";
+    version = "0.4.5";
+    description = "Automatically apply the suggestions made by rustc";
+    authors = [ "Pascal Hertleif <killercup@gmail.com>" "Oliver Schneider <oli-obk@users.noreply.github.com>" ];
+    sha256 = "16nz3wbxspl6awwy3k3ym8yqiyq1jad82m2cf8mrz5h3arfp208l";
+    dependencies = mapFeatures features ([
+      (crates."failure"."${deps."rustfix"."0.4.5"."failure"}" deps)
+      (crates."log"."${deps."rustfix"."0.4.5"."log"}" deps)
+      (crates."serde"."${deps."rustfix"."0.4.5"."serde"}" deps)
+      (crates."serde_derive"."${deps."rustfix"."0.4.5"."serde_derive"}" deps)
+      (crates."serde_json"."${deps."rustfix"."0.4.5"."serde_json"}" deps)
+    ]);
+  };
+  features_.rustfix."0.4.5" = deps: f: updateFeatures f ({
+    failure."${deps.rustfix."0.4.5".failure}".default = true;
+    log."${deps.rustfix."0.4.5".log}".default = true;
+    rustfix."0.4.5".default = (f.rustfix."0.4.5".default or true);
+    serde."${deps.rustfix."0.4.5".serde}".default = true;
+    serde_derive."${deps.rustfix."0.4.5".serde_derive}".default = true;
+    serde_json."${deps.rustfix."0.4.5".serde_json}".default = true;
+  }) [
+    (features_.failure."${deps."rustfix"."0.4.5"."failure"}" deps)
+    (features_.log."${deps."rustfix"."0.4.5"."log"}" deps)
+    (features_.serde."${deps."rustfix"."0.4.5"."serde"}" deps)
+    (features_.serde_derive."${deps."rustfix"."0.4.5"."serde_derive"}" deps)
+    (features_.serde_json."${deps."rustfix"."0.4.5"."serde_json"}" deps)
+  ];
+
+
+# end
+# same-file-1.0.4
+
+  crates.same_file."1.0.4" = deps: { features?(features_.same_file."1.0.4" deps {}) }: buildRustCrate {
+    crateName = "same-file";
+    version = "1.0.4";
+    description = "A simple crate for determining whether two file paths point to the same file.\n";
+    authors = [ "Andrew Gallant <jamslam@gmail.com>" ];
+    sha256 = "1zs244ssl381cqlnh2g42g3i60qip4z72i26z44d6kas3y3gy77q";
+    dependencies = (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi_util"."${deps."same_file"."1.0.4"."winapi_util"}" deps)
+    ]) else []);
+  };
+  features_.same_file."1.0.4" = deps: f: updateFeatures f ({
+    same_file."1.0.4".default = (f.same_file."1.0.4".default or true);
+    winapi_util."${deps.same_file."1.0.4".winapi_util}".default = true;
+  }) [
+    (features_.winapi_util."${deps."same_file"."1.0.4"."winapi_util"}" deps)
+  ];
+
+
+# end
+# schannel-0.1.15
+
+  crates.schannel."0.1.15" = deps: { features?(features_.schannel."0.1.15" deps {}) }: buildRustCrate {
+    crateName = "schannel";
+    version = "0.1.15";
+    description = "Schannel bindings for rust, allowing SSL/TLS (e.g. https) without openssl";
+    authors = [ "Steven Fackler <sfackler@gmail.com>" "Steffen Butzer <steffen.butzer@outlook.com>" ];
+    sha256 = "1x9i0z9y8n5cg23ppyglgqdlz6rwcv2a489m5qpfk6l2ib8a1jdv";
+    dependencies = mapFeatures features ([
+      (crates."lazy_static"."${deps."schannel"."0.1.15"."lazy_static"}" deps)
+      (crates."winapi"."${deps."schannel"."0.1.15"."winapi"}" deps)
+    ]);
+  };
+  features_.schannel."0.1.15" = deps: f: updateFeatures f ({
+    lazy_static."${deps.schannel."0.1.15".lazy_static}".default = true;
+    schannel."0.1.15".default = (f.schannel."0.1.15".default or true);
+    winapi = fold recursiveUpdate {} [
+      { "${deps.schannel."0.1.15".winapi}"."lmcons" = true; }
+      { "${deps.schannel."0.1.15".winapi}"."minschannel" = true; }
+      { "${deps.schannel."0.1.15".winapi}"."schannel" = true; }
+      { "${deps.schannel."0.1.15".winapi}"."securitybaseapi" = true; }
+      { "${deps.schannel."0.1.15".winapi}"."sspi" = true; }
+      { "${deps.schannel."0.1.15".winapi}"."sysinfoapi" = true; }
+      { "${deps.schannel."0.1.15".winapi}"."timezoneapi" = true; }
+      { "${deps.schannel."0.1.15".winapi}"."winbase" = true; }
+      { "${deps.schannel."0.1.15".winapi}"."wincrypt" = true; }
+      { "${deps.schannel."0.1.15".winapi}"."winerror" = true; }
+      { "${deps.schannel."0.1.15".winapi}".default = true; }
+    ];
+  }) [
+    (features_.lazy_static."${deps."schannel"."0.1.15"."lazy_static"}" deps)
+    (features_.winapi."${deps."schannel"."0.1.15"."winapi"}" deps)
+  ];
+
+
+# end
+# scopeguard-0.3.3
+
+  crates.scopeguard."0.3.3" = deps: { features?(features_.scopeguard."0.3.3" deps {}) }: buildRustCrate {
+    crateName = "scopeguard";
+    version = "0.3.3";
+    description = "A RAII scope guard that will run a given closure when it goes out of scope,\neven if the code between panics (assuming unwinding panic).\n\nDefines the macros `defer!` and `defer_on_unwind!`; the latter only runs\nif the scope is extited through unwinding on panic.\n";
+    authors = [ "bluss" ];
+    sha256 = "0i1l013csrqzfz6c68pr5pi01hg5v5yahq8fsdmaxy6p8ygsjf3r";
+    features = mkFeatures (features."scopeguard"."0.3.3" or {});
+  };
+  features_.scopeguard."0.3.3" = deps: f: updateFeatures f (rec {
+    scopeguard = fold recursiveUpdate {} [
+      { "0.3.3"."use_std" =
+        (f.scopeguard."0.3.3"."use_std" or false) ||
+        (f.scopeguard."0.3.3".default or false) ||
+        (scopeguard."0.3.3"."default" or false); }
+      { "0.3.3".default = (f.scopeguard."0.3.3".default or true); }
+    ];
+  }) [];
+
+
+# end
+# semver-0.9.0
+
+  crates.semver."0.9.0" = deps: { features?(features_.semver."0.9.0" deps {}) }: buildRustCrate {
+    crateName = "semver";
+    version = "0.9.0";
+    description = "Semantic version parsing and comparison.\n";
+    authors = [ "Steve Klabnik <steve@steveklabnik.com>" "The Rust Project Developers" ];
+    sha256 = "0azak2lb2wc36s3x15az886kck7rpnksrw14lalm157rg9sc9z63";
+    dependencies = mapFeatures features ([
+      (crates."semver_parser"."${deps."semver"."0.9.0"."semver_parser"}" deps)
+    ]
+      ++ (if features.semver."0.9.0".serde or false then [ (crates.serde."${deps."semver"."0.9.0".serde}" deps) ] else []));
+    features = mkFeatures (features."semver"."0.9.0" or {});
+  };
+  features_.semver."0.9.0" = deps: f: updateFeatures f (rec {
+    semver = fold recursiveUpdate {} [
+      { "0.9.0"."serde" =
+        (f.semver."0.9.0"."serde" or false) ||
+        (f.semver."0.9.0".ci or false) ||
+        (semver."0.9.0"."ci" or false); }
+      { "0.9.0".default = (f.semver."0.9.0".default or true); }
+    ];
+    semver_parser."${deps.semver."0.9.0".semver_parser}".default = true;
+    serde."${deps.semver."0.9.0".serde}".default = true;
+  }) [
+    (features_.semver_parser."${deps."semver"."0.9.0"."semver_parser"}" deps)
+    (features_.serde."${deps."semver"."0.9.0"."serde"}" deps)
+  ];
+
+
+# end
+# semver-parser-0.7.0
+
+  crates.semver_parser."0.7.0" = deps: { features?(features_.semver_parser."0.7.0" deps {}) }: buildRustCrate {
+    crateName = "semver-parser";
+    version = "0.7.0";
+    description = "Parsing of the semver spec.\n";
+    authors = [ "Steve Klabnik <steve@steveklabnik.com>" ];
+    sha256 = "1da66c8413yakx0y15k8c055yna5lyb6fr0fw9318kdwkrk5k12h";
+  };
+  features_.semver_parser."0.7.0" = deps: f: updateFeatures f ({
+    semver_parser."0.7.0".default = (f.semver_parser."0.7.0".default or true);
+  }) [];
+
+
+# end
+# serde-1.0.90
+
+  crates.serde."1.0.90" = deps: { features?(features_.serde."1.0.90" deps {}) }: buildRustCrate {
+    crateName = "serde";
+    version = "1.0.90";
+    description = "A generic serialization/deserialization framework";
+    authors = [ "Erick Tryzelaar <erick.tryzelaar@gmail.com>" "David Tolnay <dtolnay@gmail.com>" ];
+    sha256 = "10b6n74m1dvb667vrn1db47ncb4h0mkqbg1dsamqjvv5vl5b5j56";
+    build = "build.rs";
+    dependencies = mapFeatures features ([
+    ]
+      ++ (if features.serde."1.0.90".serde_derive or false then [ (crates.serde_derive."${deps."serde"."1.0.90".serde_derive}" deps) ] else []));
+    features = mkFeatures (features."serde"."1.0.90" or {});
+  };
+  features_.serde."1.0.90" = deps: f: updateFeatures f (rec {
+    serde = fold recursiveUpdate {} [
+      { "1.0.90"."serde_derive" =
+        (f.serde."1.0.90"."serde_derive" or false) ||
+        (f.serde."1.0.90".derive or false) ||
+        (serde."1.0.90"."derive" or false); }
+      { "1.0.90"."std" =
+        (f.serde."1.0.90"."std" or false) ||
+        (f.serde."1.0.90".default or false) ||
+        (serde."1.0.90"."default" or false); }
+      { "1.0.90"."unstable" =
+        (f.serde."1.0.90"."unstable" or false) ||
+        (f.serde."1.0.90".alloc or false) ||
+        (serde."1.0.90"."alloc" or false); }
+      { "1.0.90".default = (f.serde."1.0.90".default or true); }
+    ];
+    serde_derive."${deps.serde."1.0.90".serde_derive}".default = true;
+  }) [
+    (features_.serde_derive."${deps."serde"."1.0.90"."serde_derive"}" deps)
+  ];
+
+
+# end
+# serde_derive-1.0.90
+
+  crates.serde_derive."1.0.90" = deps: { features?(features_.serde_derive."1.0.90" deps {}) }: buildRustCrate {
+    crateName = "serde_derive";
+    version = "1.0.90";
+    description = "Macros 1.1 implementation of #[derive(Serialize, Deserialize)]";
+    authors = [ "Erick Tryzelaar <erick.tryzelaar@gmail.com>" "David Tolnay <dtolnay@gmail.com>" ];
+    sha256 = "1m4xgyl8jj3mxj0wszminzc1qf2gbkj9dpl17vi95nwl6m7i157y";
+    procMacro = true;
+    dependencies = mapFeatures features ([
+      (crates."proc_macro2"."${deps."serde_derive"."1.0.90"."proc_macro2"}" deps)
+      (crates."quote"."${deps."serde_derive"."1.0.90"."quote"}" deps)
+      (crates."syn"."${deps."serde_derive"."1.0.90"."syn"}" deps)
+    ]);
+    features = mkFeatures (features."serde_derive"."1.0.90" or {});
+  };
+  features_.serde_derive."1.0.90" = deps: f: updateFeatures f ({
+    proc_macro2."${deps.serde_derive."1.0.90".proc_macro2}".default = true;
+    quote."${deps.serde_derive."1.0.90".quote}".default = true;
+    serde_derive."1.0.90".default = (f.serde_derive."1.0.90".default or true);
+    syn = fold recursiveUpdate {} [
+      { "${deps.serde_derive."1.0.90".syn}"."visit" = true; }
+      { "${deps.serde_derive."1.0.90".syn}".default = true; }
+    ];
+  }) [
+    (features_.proc_macro2."${deps."serde_derive"."1.0.90"."proc_macro2"}" deps)
+    (features_.quote."${deps."serde_derive"."1.0.90"."quote"}" deps)
+    (features_.syn."${deps."serde_derive"."1.0.90"."syn"}" deps)
+  ];
+
+
+# end
+# serde_ignored-0.0.4
+
+  crates.serde_ignored."0.0.4" = deps: { features?(features_.serde_ignored."0.0.4" deps {}) }: buildRustCrate {
+    crateName = "serde_ignored";
+    version = "0.0.4";
+    description = "Find out about keys that are ignored when deserializing data";
+    authors = [ "David Tolnay <dtolnay@gmail.com>" ];
+    sha256 = "1ljsywm58p1s645rg2l9mchc5xa6mzxjpm8ag8nc2b74yp09h4jh";
+    dependencies = mapFeatures features ([
+      (crates."serde"."${deps."serde_ignored"."0.0.4"."serde"}" deps)
+    ]);
+  };
+  features_.serde_ignored."0.0.4" = deps: f: updateFeatures f ({
+    serde."${deps.serde_ignored."0.0.4".serde}".default = true;
+    serde_ignored."0.0.4".default = (f.serde_ignored."0.0.4".default or true);
+  }) [
+    (features_.serde."${deps."serde_ignored"."0.0.4"."serde"}" deps)
+  ];
+
+
+# end
+# shell-escape-0.1.4
+
+  crates.shell_escape."0.1.4" = deps: { features?(features_.shell_escape."0.1.4" deps {}) }: buildRustCrate {
+    crateName = "shell-escape";
+    version = "0.1.4";
+    description = "Escape characters that may have a special meaning in a shell";
+    authors = [ "Steven Fackler <sfackler@gmail.com>" ];
+    sha256 = "02ik28la039b8anx0sx8mbdp2yx66m64mjrjyy6x0dgpbmfxmc24";
+  };
+  features_.shell_escape."0.1.4" = deps: f: updateFeatures f ({
+    shell_escape."0.1.4".default = (f.shell_escape."0.1.4".default or true);
+  }) [];
+
+
+# end
+# sized-chunks-0.1.3
+
+  crates.sized_chunks."0.1.3" = deps: { features?(features_.sized_chunks."0.1.3" deps {}) }: buildRustCrate {
+    crateName = "sized-chunks";
+    version = "0.1.3";
+    description = "Efficient sized chunk datatypes";
+    authors = [ "Bodil Stokke <bodil@bodil.org>" ];
+    edition = "2018";
+    sha256 = "0qp5yvy2kqpk5qhiq3ybwynv740j3wv97ar1kjx96hmmbman142i";
+    dependencies = mapFeatures features ([
+      (crates."typenum"."${deps."sized_chunks"."0.1.3"."typenum"}" deps)
+    ]);
+  };
+  features_.sized_chunks."0.1.3" = deps: f: updateFeatures f ({
+    sized_chunks."0.1.3".default = (f.sized_chunks."0.1.3".default or true);
+    typenum."${deps.sized_chunks."0.1.3".typenum}".default = true;
+  }) [
+    (features_.typenum."${deps."sized_chunks"."0.1.3"."typenum"}" deps)
+  ];
+
+
+# end
+# socket2-0.3.8
+
+  crates.socket2."0.3.8" = deps: { features?(features_.socket2."0.3.8" deps {}) }: buildRustCrate {
+    crateName = "socket2";
+    version = "0.3.8";
+    description = "Utilities for handling networking sockets with a maximal amount of configuration\npossible intended.\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "1a71m20jxmf9kqqinksphc7wj1j7q672q29cpza7p9siyzyfx598";
+    dependencies = (if (kernel == "linux" || kernel == "darwin") || kernel == "redox" then mapFeatures features ([
+      (crates."cfg_if"."${deps."socket2"."0.3.8"."cfg_if"}" deps)
+      (crates."libc"."${deps."socket2"."0.3.8"."libc"}" deps)
+    ]) else [])
+      ++ (if kernel == "redox" then mapFeatures features ([
+      (crates."redox_syscall"."${deps."socket2"."0.3.8"."redox_syscall"}" deps)
+    ]) else [])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."socket2"."0.3.8"."winapi"}" deps)
+    ]) else []);
+    features = mkFeatures (features."socket2"."0.3.8" or {});
+  };
+  features_.socket2."0.3.8" = deps: f: updateFeatures f ({
+    cfg_if."${deps.socket2."0.3.8".cfg_if}".default = true;
+    libc."${deps.socket2."0.3.8".libc}".default = true;
+    redox_syscall."${deps.socket2."0.3.8".redox_syscall}".default = true;
+    socket2."0.3.8".default = (f.socket2."0.3.8".default or true);
+    winapi = fold recursiveUpdate {} [
+      { "${deps.socket2."0.3.8".winapi}"."handleapi" = true; }
+      { "${deps.socket2."0.3.8".winapi}"."minwindef" = true; }
+      { "${deps.socket2."0.3.8".winapi}"."ws2def" = true; }
+      { "${deps.socket2."0.3.8".winapi}"."ws2ipdef" = true; }
+      { "${deps.socket2."0.3.8".winapi}"."ws2tcpip" = true; }
+      { "${deps.socket2."0.3.8".winapi}".default = true; }
+    ];
+  }) [
+    (features_.cfg_if."${deps."socket2"."0.3.8"."cfg_if"}" deps)
+    (features_.libc."${deps."socket2"."0.3.8"."libc"}" deps)
+    (features_.redox_syscall."${deps."socket2"."0.3.8"."redox_syscall"}" deps)
+    (features_.winapi."${deps."socket2"."0.3.8"."winapi"}" deps)
+  ];
+
+
+# end
+# strsim-0.8.0
+
+  crates.strsim."0.8.0" = deps: { features?(features_.strsim."0.8.0" deps {}) }: buildRustCrate {
+    crateName = "strsim";
+    version = "0.8.0";
+    description = "Implementations of string similarity metrics.\nIncludes Hamming, Levenshtein, OSA, Damerau-Levenshtein, Jaro, and Jaro-Winkler.\n";
+    authors = [ "Danny Guo <dannyguo91@gmail.com>" ];
+    sha256 = "0d3jsdz22wgjyxdakqnvdgmwjdvkximz50d9zfk4qlalw635qcvy";
+  };
+  features_.strsim."0.8.0" = deps: f: updateFeatures f ({
+    strsim."0.8.0".default = (f.strsim."0.8.0".default or true);
+  }) [];
+
+
+# end
+# strsim-0.9.1
+
+  crates.strsim."0.9.1" = deps: { features?(features_.strsim."0.9.1" deps {}) }: buildRustCrate {
+    crateName = "strsim";
+    version = "0.9.1";
+    description = "Implementations of string similarity metrics.\nIncludes Hamming, Levenshtein, OSA, Damerau-Levenshtein, Jaro, and Jaro-Winkler.\n";
+    authors = [ "Danny Guo <dannyguo91@gmail.com>" ];
+    sha256 = "0lpi3lrq6z5slay72ir1sg1ki0bvr3qia82lzx937gpc999i6bah";
+    dependencies = mapFeatures features ([
+      (crates."hashbrown"."${deps."strsim"."0.9.1"."hashbrown"}" deps)
+      (crates."ndarray"."${deps."strsim"."0.9.1"."ndarray"}" deps)
+    ]);
+  };
+  features_.strsim."0.9.1" = deps: f: updateFeatures f ({
+    hashbrown."${deps.strsim."0.9.1".hashbrown}".default = true;
+    ndarray."${deps.strsim."0.9.1".ndarray}".default = true;
+    strsim."0.9.1".default = (f.strsim."0.9.1".default or true);
+  }) [
+    (features_.hashbrown."${deps."strsim"."0.9.1"."hashbrown"}" deps)
+    (features_.ndarray."${deps."strsim"."0.9.1"."ndarray"}" deps)
+  ];
+
+
+# end
+# syn-0.15.32
+
+  crates.syn."0.15.32" = deps: { features?(features_.syn."0.15.32" deps {}) }: buildRustCrate {
+    crateName = "syn";
+    version = "0.15.32";
+    description = "Parser for Rust source code";
+    authors = [ "David Tolnay <dtolnay@gmail.com>" ];
+    sha256 = "1xq1mbns8zyg2ls5927wqi7i7hn2y933czbqqds648gcdqccsqb0";
+    dependencies = mapFeatures features ([
+      (crates."proc_macro2"."${deps."syn"."0.15.32"."proc_macro2"}" deps)
+      (crates."unicode_xid"."${deps."syn"."0.15.32"."unicode_xid"}" deps)
+    ]
+      ++ (if features.syn."0.15.32".quote or false then [ (crates.quote."${deps."syn"."0.15.32".quote}" deps) ] else []));
+    features = mkFeatures (features."syn"."0.15.32" or {});
+  };
+  features_.syn."0.15.32" = deps: f: updateFeatures f (rec {
+    proc_macro2 = fold recursiveUpdate {} [
+      { "${deps.syn."0.15.32".proc_macro2}"."proc-macro" =
+        (f.proc_macro2."${deps.syn."0.15.32".proc_macro2}"."proc-macro" or false) ||
+        (syn."0.15.32"."proc-macro" or false) ||
+        (f."syn"."0.15.32"."proc-macro" or false); }
+      { "${deps.syn."0.15.32".proc_macro2}".default = (f.proc_macro2."${deps.syn."0.15.32".proc_macro2}".default or false); }
+    ];
+    quote = fold recursiveUpdate {} [
+      { "${deps.syn."0.15.32".quote}"."proc-macro" =
+        (f.quote."${deps.syn."0.15.32".quote}"."proc-macro" or false) ||
+        (syn."0.15.32"."proc-macro" or false) ||
+        (f."syn"."0.15.32"."proc-macro" or false); }
+      { "${deps.syn."0.15.32".quote}".default = (f.quote."${deps.syn."0.15.32".quote}".default or false); }
+    ];
+    syn = fold recursiveUpdate {} [
+      { "0.15.32"."clone-impls" =
+        (f.syn."0.15.32"."clone-impls" or false) ||
+        (f.syn."0.15.32".default or false) ||
+        (syn."0.15.32"."default" or false); }
+      { "0.15.32"."derive" =
+        (f.syn."0.15.32"."derive" or false) ||
+        (f.syn."0.15.32".default or false) ||
+        (syn."0.15.32"."default" or false); }
+      { "0.15.32"."parsing" =
+        (f.syn."0.15.32"."parsing" or false) ||
+        (f.syn."0.15.32".default or false) ||
+        (syn."0.15.32"."default" or false); }
+      { "0.15.32"."printing" =
+        (f.syn."0.15.32"."printing" or false) ||
+        (f.syn."0.15.32".default or false) ||
+        (syn."0.15.32"."default" or false); }
+      { "0.15.32"."proc-macro" =
+        (f.syn."0.15.32"."proc-macro" or false) ||
+        (f.syn."0.15.32".default or false) ||
+        (syn."0.15.32"."default" or false); }
+      { "0.15.32"."quote" =
+        (f.syn."0.15.32"."quote" or false) ||
+        (f.syn."0.15.32".printing or false) ||
+        (syn."0.15.32"."printing" or false); }
+      { "0.15.32".default = (f.syn."0.15.32".default or true); }
+    ];
+    unicode_xid."${deps.syn."0.15.32".unicode_xid}".default = true;
+  }) [
+    (features_.proc_macro2."${deps."syn"."0.15.32"."proc_macro2"}" deps)
+    (features_.quote."${deps."syn"."0.15.32"."quote"}" deps)
+    (features_.unicode_xid."${deps."syn"."0.15.32"."unicode_xid"}" deps)
+  ];
+
+
+# end
+# tar-0.4.22
+
+  crates.tar."0.4.22" = deps: { features?(features_.tar."0.4.22" deps {}) }: buildRustCrate {
+    crateName = "tar";
+    version = "0.4.22";
+    description = "A Rust implementation of a TAR file reader and writer. This library does not\ncurrently handle compression, but it is abstract over all I/O readers and\nwriters. Additionally, great lengths are taken to ensure that the entire\ncontents are never required to be entirely resident in memory all at once.\n";
+    authors = [ "Alex Crichton <alex@alexcrichton.com>" ];
+    sha256 = "1y2b5av1ckf7v7rw1p59fjddn2jwzv0xr69lgdd4l41g43c3zq9j";
+    dependencies = mapFeatures features ([
+      (crates."filetime"."${deps."tar"."0.4.22"."filetime"}" deps)
+    ])
+      ++ (if kernel == "redox" then mapFeatures features ([
+      (crates."redox_syscall"."${deps."tar"."0.4.22"."redox_syscall"}" deps)
+    ]) else [])
+      ++ (if (kernel == "linux" || kernel == "darwin") then mapFeatures features ([
+      (crates."libc"."${deps."tar"."0.4.22"."libc"}" deps)
+    ]) else []);
+  };
+  features_.tar."0.4.22" = deps: f: updateFeatures f (rec {
+    filetime."${deps.tar."0.4.22".filetime}".default = true;
+    libc."${deps.tar."0.4.22".libc}".default = true;
+    redox_syscall."${deps.tar."0.4.22".redox_syscall}".default = true;
+    tar = fold recursiveUpdate {} [
+      { "0.4.22"."xattr" =
+        (f.tar."0.4.22"."xattr" or false) ||
+        (f.tar."0.4.22".default or false) ||
+        (tar."0.4.22"."default" or false); }
+      { "0.4.22".default = (f.tar."0.4.22".default or true); }
+    ];
+  }) [
+    (features_.filetime."${deps."tar"."0.4.22"."filetime"}" deps)
+    (features_.redox_syscall."${deps."tar"."0.4.22"."redox_syscall"}" deps)
+    (features_.libc."${deps."tar"."0.4.22"."libc"}" deps)
+  ];
+
+
+# end
+# tempfile-3.0.7
+
+  crates.tempfile."3.0.7" = deps: { features?(features_.tempfile."3.0.7" deps {}) }: buildRustCrate {
+    crateName = "tempfile";
+    version = "3.0.7";
+    description = "A library for managing temporary files and directories.\n";
+    authors = [ "Steven Allen <steven@stebalien.com>" "The Rust Project Developers" "Ashley Mannix <ashleymannix@live.com.au>" "Jason White <jasonaw0@gmail.com>" ];
+    sha256 = "19h7ch8fvisxrrmabcnhlfj6b8vg34zaw8491x141p0n0727niaf";
+    dependencies = mapFeatures features ([
+      (crates."cfg_if"."${deps."tempfile"."3.0.7"."cfg_if"}" deps)
+      (crates."rand"."${deps."tempfile"."3.0.7"."rand"}" deps)
+      (crates."remove_dir_all"."${deps."tempfile"."3.0.7"."remove_dir_all"}" deps)
+    ])
+      ++ (if kernel == "redox" then mapFeatures features ([
+      (crates."redox_syscall"."${deps."tempfile"."3.0.7"."redox_syscall"}" deps)
+    ]) else [])
+      ++ (if (kernel == "linux" || kernel == "darwin") then mapFeatures features ([
+      (crates."libc"."${deps."tempfile"."3.0.7"."libc"}" deps)
+    ]) else [])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."tempfile"."3.0.7"."winapi"}" deps)
+    ]) else []);
+  };
+  features_.tempfile."3.0.7" = deps: f: updateFeatures f ({
+    cfg_if."${deps.tempfile."3.0.7".cfg_if}".default = true;
+    libc."${deps.tempfile."3.0.7".libc}".default = true;
+    rand."${deps.tempfile."3.0.7".rand}".default = true;
+    redox_syscall."${deps.tempfile."3.0.7".redox_syscall}".default = true;
+    remove_dir_all."${deps.tempfile."3.0.7".remove_dir_all}".default = true;
+    tempfile."3.0.7".default = (f.tempfile."3.0.7".default or true);
+    winapi = fold recursiveUpdate {} [
+      { "${deps.tempfile."3.0.7".winapi}"."fileapi" = true; }
+      { "${deps.tempfile."3.0.7".winapi}"."handleapi" = true; }
+      { "${deps.tempfile."3.0.7".winapi}"."winbase" = true; }
+      { "${deps.tempfile."3.0.7".winapi}".default = true; }
+    ];
+  }) [
+    (features_.cfg_if."${deps."tempfile"."3.0.7"."cfg_if"}" deps)
+    (features_.rand."${deps."tempfile"."3.0.7"."rand"}" deps)
+    (features_.remove_dir_all."${deps."tempfile"."3.0.7"."remove_dir_all"}" deps)
+    (features_.redox_syscall."${deps."tempfile"."3.0.7"."redox_syscall"}" deps)
+    (features_.libc."${deps."tempfile"."3.0.7"."libc"}" deps)
+    (features_.winapi."${deps."tempfile"."3.0.7"."winapi"}" deps)
+  ];
+
+
+# end
+# textwrap-0.11.0
+
+  crates.textwrap."0.11.0" = deps: { features?(features_.textwrap."0.11.0" deps {}) }: buildRustCrate {
+    crateName = "textwrap";
+    version = "0.11.0";
+    description = "Textwrap is a small library for word wrapping, indenting, and\ndedenting strings.\n\nYou can use it to format strings (such as help and error messages) for\ndisplay in commandline applications. It is designed to be efficient\nand handle Unicode characters correctly.\n";
+    authors = [ "Martin Geisler <martin@geisler.net>" ];
+    sha256 = "0s25qh49n7kjayrdj4q3v0jk0jc6vy88rdw0bvgfxqlscpqpxi7d";
+    dependencies = mapFeatures features ([
+      (crates."unicode_width"."${deps."textwrap"."0.11.0"."unicode_width"}" deps)
+    ]);
+  };
+  features_.textwrap."0.11.0" = deps: f: updateFeatures f ({
+    textwrap."0.11.0".default = (f.textwrap."0.11.0".default or true);
+    unicode_width."${deps.textwrap."0.11.0".unicode_width}".default = true;
+  }) [
+    (features_.unicode_width."${deps."textwrap"."0.11.0"."unicode_width"}" deps)
+  ];
+
+
+# end
+# typenum-1.10.0
+
+  crates.typenum."1.10.0" = deps: { features?(features_.typenum."1.10.0" deps {}) }: buildRustCrate {
+    crateName = "typenum";
+    version = "1.10.0";
+    description = "Typenum is a Rust library for type-level numbers evaluated at compile time. It currently supports bits, unsigned integers, and signed integers. It also provides a type-level array of type-level numbers, but its implementation is incomplete.";
+    authors = [ "Paho Lurie-Gregg <paho@paholg.com>" "Andre Bogus <bogusandre@gmail.com>" ];
+    sha256 = "1v2cgg0mlzkg5prs7swysckgk2ay6bpda8m83c2sn3z77dcsx3bc";
+    build = "build/main.rs";
+    features = mkFeatures (features."typenum"."1.10.0" or {});
+  };
+  features_.typenum."1.10.0" = deps: f: updateFeatures f ({
+    typenum."1.10.0".default = (f.typenum."1.10.0".default or true);
+  }) [];
+
+
+# end
+# url_serde-0.2.0
+
+  crates.url_serde."0.2.0" = deps: { features?(features_.url_serde."0.2.0" deps {}) }: buildRustCrate {
+    crateName = "url_serde";
+    version = "0.2.0";
+    description = "Serde support for URL types";
+    authors = [ "The rust-url developers" ];
+    sha256 = "07ry87rw0pi1da6b53f7s3f52wx3ihxbcgjd4ldspfv5xh6wipsg";
+    dependencies = mapFeatures features ([
+      (crates."serde"."${deps."url_serde"."0.2.0"."serde"}" deps)
+      (crates."url"."${deps."url_serde"."0.2.0"."url"}" deps)
+    ]);
+  };
+  features_.url_serde."0.2.0" = deps: f: updateFeatures f ({
+    serde."${deps.url_serde."0.2.0".serde}".default = true;
+    url."${deps.url_serde."0.2.0".url}".default = true;
+    url_serde."0.2.0".default = (f.url_serde."0.2.0".default or true);
+  }) [
+    (features_.serde."${deps."url_serde"."0.2.0"."serde"}" deps)
+    (features_.url."${deps."url_serde"."0.2.0"."url"}" deps)
+  ];
+
+
+# end
+# vcpkg-0.2.6
+
+  crates.vcpkg."0.2.6" = deps: { features?(features_.vcpkg."0.2.6" deps {}) }: buildRustCrate {
+    crateName = "vcpkg";
+    version = "0.2.6";
+    description = "A library to find native dependencies in a vcpkg tree at build\ntime in order to be used in Cargo build scripts.\n";
+    authors = [ "Jim McGrath <jimmc2@gmail.com>" ];
+    sha256 = "1ig6jqpzzl1z9vk4qywgpfr4hfbd8ny8frqsgm3r449wkc4n1i5x";
+  };
+  features_.vcpkg."0.2.6" = deps: f: updateFeatures f ({
+    vcpkg."0.2.6".default = (f.vcpkg."0.2.6".default or true);
+  }) [];
+
+
+# end
+# walkdir-2.2.7
+
+  crates.walkdir."2.2.7" = deps: { features?(features_.walkdir."2.2.7" deps {}) }: buildRustCrate {
+    crateName = "walkdir";
+    version = "2.2.7";
+    description = "Recursively walk a directory.";
+    authors = [ "Andrew Gallant <jamslam@gmail.com>" ];
+    sha256 = "0wq3v28916kkla29yyi0g0xfc16apwx24py68049kriz3gjlig03";
+    dependencies = mapFeatures features ([
+      (crates."same_file"."${deps."walkdir"."2.2.7"."same_file"}" deps)
+    ])
+      ++ (if kernel == "windows" then mapFeatures features ([
+      (crates."winapi"."${deps."walkdir"."2.2.7"."winapi"}" deps)
+      (crates."winapi_util"."${deps."walkdir"."2.2.7"."winapi_util"}" deps)
+    ]) else []);
+  };
+  features_.walkdir."2.2.7" = deps: f: updateFeatures f ({
+    same_file."${deps.walkdir."2.2.7".same_file}".default = true;
+    walkdir."2.2.7".default = (f.walkdir."2.2.7".default or true);
+    winapi = fold recursiveUpdate {} [
+      { "${deps.walkdir."2.2.7".winapi}"."std" = true; }
+      { "${deps.walkdir."2.2.7".winapi}"."winnt" = true; }
+      { "${deps.walkdir."2.2.7".winapi}".default = true; }
+    ];
+    winapi_util."${deps.walkdir."2.2.7".winapi_util}".default = true;
+  }) [
+    (features_.same_file."${deps."walkdir"."2.2.7"."same_file"}" deps)
+    (features_.winapi."${deps."walkdir"."2.2.7"."winapi"}" deps)
+    (features_.winapi_util."${deps."walkdir"."2.2.7"."winapi_util"}" deps)
+  ];
+
+
+# end
+# winapi-0.2.8
+
+  crates.winapi."0.2.8" = deps: { features?(features_.winapi."0.2.8" deps {}) }: buildRustCrate {
+    crateName = "winapi";
+    version = "0.2.8";
+    description = "Types and constants for WinAPI bindings. See README for list of crates providing function bindings.";
+    authors = [ "Peter Atashian <retep998@gmail.com>" ];
+    sha256 = "0a45b58ywf12vb7gvj6h3j264nydynmzyqz8d8rqxsj6icqv82as";
+  };
+  features_.winapi."0.2.8" = deps: f: updateFeatures f ({
+    winapi."0.2.8".default = (f.winapi."0.2.8".default or true);
+  }) [];
+
+
+# end
+# winapi-0.3.7
+
+  crates.winapi."0.3.7" = deps: { features?(features_.winapi."0.3.7" deps {}) }: buildRustCrate {
+    crateName = "winapi";
+    version = "0.3.7";
+    description = "Raw FFI bindings for all of Windows API.";
+    authors = [ "Peter Atashian <retep998@gmail.com>" ];
+    sha256 = "1k51gfkp0zqw7nj07y443mscs46icmdhld442s2073niap0kkdr8";
+    build = "build.rs";
+    dependencies = (if kernel == "i686-pc-windows-gnu" then mapFeatures features ([
+      (crates."winapi_i686_pc_windows_gnu"."${deps."winapi"."0.3.7"."winapi_i686_pc_windows_gnu"}" deps)
+    ]) else [])
+      ++ (if kernel == "x86_64-pc-windows-gnu" then mapFeatures features ([
+      (crates."winapi_x86_64_pc_windows_gnu"."${deps."winapi"."0.3.7"."winapi_x86_64_pc_windows_gnu"}" deps)
+    ]) else []);
+    features = mkFeatures (features."winapi"."0.3.7" or {});
+  };
+  features_.winapi."0.3.7" = deps: f: updateFeatures f (rec {
+    winapi = fold recursiveUpdate {} [
+      { "0.3.7"."impl-debug" =
+        (f.winapi."0.3.7"."impl-debug" or false) ||
+        (f.winapi."0.3.7".debug or false) ||
+        (winapi."0.3.7"."debug" or false); }
+      { "0.3.7".default = (f.winapi."0.3.7".default or true); }
+    ];
+    winapi_i686_pc_windows_gnu."${deps.winapi."0.3.7".winapi_i686_pc_windows_gnu}".default = true;
+    winapi_x86_64_pc_windows_gnu."${deps.winapi."0.3.7".winapi_x86_64_pc_windows_gnu}".default = true;
+  }) [
+    (features_.winapi_i686_pc_windows_gnu."${deps."winapi"."0.3.7"."winapi_i686_pc_windows_gnu"}" deps)
+    (features_.winapi_x86_64_pc_windows_gnu."${deps."winapi"."0.3.7"."winapi_x86_64_pc_windows_gnu"}" deps)
+  ];
+
+
+# end
+# winapi-build-0.1.1
+
+  crates.winapi_build."0.1.1" = deps: { features?(features_.winapi_build."0.1.1" deps {}) }: buildRustCrate {
+    crateName = "winapi-build";
+    version = "0.1.1";
+    description = "Common code for build.rs in WinAPI -sys crates.";
+    authors = [ "Peter Atashian <retep998@gmail.com>" ];
+    sha256 = "1lxlpi87rkhxcwp2ykf1ldw3p108hwm24nywf3jfrvmff4rjhqga";
+    libName = "build";
+  };
+  features_.winapi_build."0.1.1" = deps: f: updateFeatures f ({
+    winapi_build."0.1.1".default = (f.winapi_build."0.1.1".default or true);
+  }) [];
+
+
+# end
+# adler32-1.0.3
+
+  crates.adler32."1.0.3" = deps: { features?(features_.adler32."1.0.3" deps {}) }: buildRustCrate {
+    crateName = "adler32";
+    version = "1.0.3";
+    description = "Minimal Adler32 implementation for Rust.";
+    authors = [ "Remi Rampin <remirampin@gmail.com>" ];
+    sha256 = "1z3mvjgw02mbqk98kizzibrca01d5wfkpazsrp3vkkv3i56pn6fb";
+  };
+  features_.adler32."1.0.3" = deps: f: updateFeatures f ({
+    adler32."1.0.3".default = (f.adler32."1.0.3".default or true);
+  }) [];
+
+
+# end
+}
diff --git a/nixpkgs/pkgs/build-support/rust/default-crate-overrides.nix b/nixpkgs/pkgs/build-support/rust/default-crate-overrides.nix
new file mode 100644
index 000000000000..61cec2a6abab
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/default-crate-overrides.nix
@@ -0,0 +1,163 @@
+{ lib, stdenv, pkg-config, curl, darwin, libiconv, libgit2, libssh2,
+  openssl, sqlite, zlib, dbus, dbus-glib, gdk-pixbuf, cairo, python3,
+  libsodium, postgresql, gmp, foundationdb, capnproto, nettle, clang,
+  llvmPackages, ... }:
+
+let
+  inherit (darwin.apple_sdk.frameworks) CoreFoundation Security;
+in
+{
+  cairo-rs = attrs: {
+    buildInputs = [ cairo ];
+  };
+
+  capnp-rpc = attrs: {
+    nativeBuildInputs = [ capnproto ];
+  };
+
+  cargo = attrs: {
+    buildInputs = [ openssl zlib curl ]
+      ++ lib.optionals stdenv.isDarwin [ CoreFoundation Security libiconv ];
+  };
+
+  libz-sys = attrs: {
+    nativeBuildInputs = [ pkg-config ];
+    buildInputs = [ zlib ];
+    extraLinkFlags = ["-L${zlib.out}/lib"];
+  };
+
+  curl-sys = attrs: {
+    nativeBuildInputs = [ pkg-config ];
+    buildInputs = [ zlib curl ];
+    propagatedBuildInputs = [ curl zlib ];
+    extraLinkFlags = ["-L${zlib.out}/lib"];
+  };
+
+  dbus = attrs: {
+    nativeBuildInputs = [ pkg-config ];
+    buildInputs = [ dbus ];
+  };
+
+  foundationdb-sys = attrs: {
+    buildInputs = [ foundationdb ];
+    # needed for 0.4+ release, when the FFI bindings are auto-generated
+    #
+    # patchPhase = ''
+    #   substituteInPlace ./foundationdb-sys/build.rs \
+    #     --replace /usr/local/include ${foundationdb.dev}/include
+    # '';
+  };
+
+  foundationdb = attrs: {
+    buildInputs = [ foundationdb ];
+  };
+
+  gobject-sys = attrs: {
+    buildInputs = [ dbus-glib ];
+  };
+
+  gio-sys = attrs: {
+    buildInputs = [ dbus-glib ];
+  };
+
+  gdk-pixbuf-sys = attrs: {
+    buildInputs = [ dbus-glib ];
+  };
+
+  gdk-pixbuf = attrs: {
+    buildInputs = [ gdk-pixbuf ];
+  };
+
+  libgit2-sys = attrs: {
+    LIBGIT2_SYS_USE_PKG_CONFIG = true;
+    nativeBuildInputs = [ pkg-config ];
+    buildInputs = [ openssl zlib libgit2 ];
+  };
+
+  libsqlite3-sys = attrs: {
+    nativeBuildInputs = [ pkg-config ];
+    buildInputs = [ sqlite ];
+  };
+
+  libssh2-sys = attrs: {
+    nativeBuildInputs = [ pkg-config ];
+    buildInputs = [ openssl zlib libssh2 ];
+  };
+
+  libdbus-sys = attrs: {
+    nativeBuildInputs = [ pkg-config ];
+    buildInputs = [ dbus ];
+  };
+
+  nettle-sys = attrs: {
+    nativeBuildInputs = [ pkg-config ];
+    buildInputs = [ nettle clang ];
+    LIBCLANG_PATH = "${llvmPackages.libclang.lib}/lib";
+  };
+
+  openssl = attrs: {
+    buildInputs = [ openssl ];
+  };
+
+  openssl-sys = attrs: {
+    nativeBuildInputs = [ pkg-config ];
+    buildInputs = [ openssl ];
+  };
+
+  pq-sys = attr: {
+    nativeBuildInputs = [ pkg-config ];
+    buildInputs = [ postgresql ];
+  };
+
+  rink = attrs: {
+    buildInputs = [ gmp ];
+    crateBin = [ {  name = "rink"; path = "src/bin/rink.rs"; } ];
+  };
+
+  security-framework-sys = attr: {
+    propagatedBuildInputs = [ Security ];
+  };
+
+  sequoia-openpgp = attrs: {
+    buildInputs = [ gmp ];
+  };
+
+  sequoia-openpgp-ffi = attrs: {
+    buildInputs = [ gmp ];
+  };
+
+  sequoia-ipc = attrs: {
+    buildInputs = [ gmp ];
+  };
+
+  sequoia-guide = attrs: {
+    buildInputs = [ gmp ];
+  };
+
+  sequoia-store = attrs: {
+    nativeBuildInputs = [ capnproto ];
+    buildInputs = [ sqlite gmp ];
+  };
+
+  sequoia-sq = attrs: {
+    buildInputs = [ sqlite gmp ];
+  };
+
+  sequoia-tool = attrs: {
+    nativeBuildInputs = [ capnproto ];
+    buildInputs = [ sqlite gmp ];
+  };
+
+  serde_derive = attrs: {
+    buildInputs = lib.optional stdenv.isDarwin Security;
+  };
+
+  thrussh-libsodium = attrs: {
+    nativeBuildInputs = [ pkg-config ];
+    buildInputs = [ libsodium ];
+  };
+
+  xcb = attrs: {
+    buildInputs = [ python3 ];
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/rust/default.nix b/nixpkgs/pkgs/build-support/rust/default.nix
new file mode 100644
index 000000000000..a3b280050efe
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/default.nix
@@ -0,0 +1,161 @@
+{ stdenv
+, lib
+, buildPackages
+, cacert
+, cargoBuildHook
+, cargoCheckHook
+, cargoInstallHook
+, cargoSetupHook
+, fetchCargoTarball
+, importCargoLock
+, runCommandNoCC
+, rustPlatform
+, callPackage
+, remarshal
+, git
+, rust
+, rustc
+, libiconv
+, windows
+}:
+
+let
+  buildRustPackage =
+    { name ? "${args.pname}-${args.version}"
+
+      # SRI hash
+    , cargoHash ? ""
+
+      # Legacy hash
+    , cargoSha256 ? ""
+
+      # Name for the vendored dependencies tarball
+    , cargoDepsName ? name
+
+    , src ? null
+    , srcs ? null
+    , unpackPhase ? null
+    , cargoPatches ? []
+    , patches ? []
+    , sourceRoot ? null
+    , logLevel ? ""
+    , buildInputs ? []
+    , nativeBuildInputs ? []
+    , cargoUpdateHook ? ""
+    , cargoDepsHook ? ""
+    , buildType ? "release"
+    , meta ? {}
+    , cargoLock ? null
+    , cargoVendorDir ? null
+    , checkType ? buildType
+    , depsExtraArgs ? {}
+
+    # Toggles whether a custom sysroot is created when the target is a .json file.
+    , __internal_dontAddSysroot ? false
+
+    # Needed to `pushd`/`popd` into a subdir of a tarball if this subdir
+    # contains a Cargo.toml, but isn't part of a workspace (which is e.g. the
+    # case for `rustfmt`/etc from the `rust-sources).
+    # Otherwise, everything from the tarball would've been built/tested.
+    , buildAndTestSubdir ? null
+    , ... } @ args:
+
+    assert cargoVendorDir == null && cargoLock == null -> cargoSha256 == "" && cargoHash == ""
+      -> throw "cargoSha256, cargoHash, cargoVendorDir, or cargoLock must be set";
+    assert buildType == "release" || buildType == "debug";
+
+    let
+
+      cargoDeps =
+        if cargoVendorDir == null
+        then if cargoLock != null then importCargoLock cargoLock
+        else fetchCargoTarball ({
+          inherit src srcs sourceRoot unpackPhase cargoUpdateHook;
+          name = cargoDepsName;
+          hash = cargoHash;
+          patches = cargoPatches;
+          sha256 = cargoSha256;
+        } // depsExtraArgs)
+        else null;
+
+      # If we have a cargoSha256 fixed-output derivation, validate it at build time
+      # against the src fixed-output derivation to check consistency.
+      validateCargoDeps = !(cargoHash == "" && cargoSha256 == "");
+
+      target = rust.toRustTargetSpec stdenv.hostPlatform;
+      targetIsJSON = lib.hasSuffix ".json" target;
+      useSysroot = targetIsJSON && !__internal_dontAddSysroot;
+
+      # see https://github.com/rust-lang/cargo/blob/964a16a28e234a3d397b2a7031d4ab4a428b1391/src/cargo/core/compiler/compile_kind.rs#L151-L168
+      # the "${}" is needed to transform the path into a /nix/store path before baseNameOf
+      shortTarget = if targetIsJSON then
+          (lib.removeSuffix ".json" (builtins.baseNameOf "${target}"))
+        else target;
+
+      sysroot = (callPackage ./sysroot {}) {
+        inherit target shortTarget;
+        RUSTFLAGS = args.RUSTFLAGS or "";
+        originalCargoToml = src + /Cargo.toml; # profile info is later extracted
+      };
+
+    in
+
+    # Tests don't currently work for `no_std`, and all custom sysroots are currently built without `std`.
+    # See https://os.phil-opp.com/testing/ for more information.
+    assert useSysroot -> !(args.doCheck or true);
+
+    stdenv.mkDerivation ((removeAttrs args [ "depsExtraArgs" "cargoLock" ]) // lib.optionalAttrs useSysroot {
+      RUSTFLAGS = "--sysroot ${sysroot} " + (args.RUSTFLAGS or "");
+    } // {
+      inherit buildAndTestSubdir cargoDeps;
+
+      cargoBuildType = buildType;
+
+      cargoCheckType = checkType;
+
+      patchRegistryDeps = ./patch-registry-deps;
+
+      nativeBuildInputs = nativeBuildInputs ++ [
+        cacert
+        git
+        cargoBuildHook
+        cargoCheckHook
+        cargoInstallHook
+        cargoSetupHook
+        rustc
+      ];
+
+      buildInputs = buildInputs
+        ++ lib.optionals stdenv.hostPlatform.isDarwin [ libiconv ]
+        ++ lib.optionals stdenv.hostPlatform.isMinGW [ windows.pthreads ];
+
+      patches = cargoPatches ++ patches;
+
+      PKG_CONFIG_ALLOW_CROSS =
+        if stdenv.buildPlatform != stdenv.hostPlatform then 1 else 0;
+
+      postUnpack = ''
+        eval "$cargoDepsHook"
+
+        export RUST_LOG=${logLevel}
+      '' + (args.postUnpack or "");
+
+      configurePhase = args.configurePhase or ''
+        runHook preConfigure
+        runHook postConfigure
+      '';
+
+      doCheck = args.doCheck or true;
+
+      strictDeps = true;
+
+      passthru = { inherit cargoDeps; } // (args.passthru or {});
+
+      meta = {
+        # default to Rust's platforms
+        platforms = rustc.meta.platforms;
+      } // meta;
+    }) // {
+      overrideRustAttrs = f: buildRustPackage (args // (f args));
+    };
+in buildRustPackage
diff --git a/nixpkgs/pkgs/build-support/rust/fetchCargoTarball.nix b/nixpkgs/pkgs/build-support/rust/fetchCargoTarball.nix
new file mode 100644
index 000000000000..3b36554e7075
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/fetchCargoTarball.nix
@@ -0,0 +1,86 @@
+{ lib, stdenv, cacert, git, cargo, python3 }:
+let cargo-vendor-normalise = stdenv.mkDerivation {
+  name = "cargo-vendor-normalise";
+  src = ./cargo-vendor-normalise.py;
+  nativeBuildInputs = [ python3.pkgs.wrapPython ];
+  dontUnpack = true;
+  installPhase = "install -D $src $out/bin/cargo-vendor-normalise";
+  pythonPath = [ python3.pkgs.toml ];
+  postFixup = "wrapPythonPrograms";
+  doInstallCheck = true;
+  installCheckPhase = ''
+    # check that ./fetchcargo-default-config.toml is a fix point
+    reference=${./fetchcargo-default-config.toml}
+    < $reference $out/bin/cargo-vendor-normalise > test;
+    cmp test $reference
+  '';
+  preferLocalBuild = true;
+};
+in
+{ name ? "cargo-deps"
+, src ? null
+, srcs ? []
+, patches ? []
+, sourceRoot ? ""
+, hash ? ""
+, sha256 ? ""
+, cargoUpdateHook ? ""
+, ...
+} @ args:
+
+let hash_ =
+  if hash != "" then { outputHashAlgo = null; outputHash = hash; }
+  else if sha256 != "" then { outputHashAlgo = "sha256"; outputHash = sha256; }
+  else throw "fetchCargoTarball requires a hash for ${name}";
+in stdenv.mkDerivation ({
+  name = "${name}-vendor.tar.gz";
+  nativeBuildInputs = [ cacert git cargo-vendor-normalise cargo ];
+
+  phases = "unpackPhase patchPhase buildPhase installPhase";
+
+  buildPhase = ''
+    # Ensure deterministic Cargo vendor builds
+    export SOURCE_DATE_EPOCH=1
+
+    if [[ ! -f Cargo.lock ]]; then
+        echo
+        echo "ERROR: The Cargo.lock file doesn't exist"
+        echo
+        echo "Cargo.lock is needed to make sure that cargoHash/cargoSha256 doesn't change"
+        echo "when the registry is updated."
+        echo
+
+        exit 1
+    fi
+
+    # Keep the original around for copyLockfile
+    cp Cargo.lock Cargo.lock.orig
+
+    export CARGO_HOME=$(mktemp -d cargo-home.XXX)
+    CARGO_CONFIG=$(mktemp cargo-config.XXXX)
+
+    ${cargoUpdateHook}
+
+    cargo vendor $name | cargo-vendor-normalise > $CARGO_CONFIG
+
+    # Add the Cargo.lock to allow hash invalidation
+    cp Cargo.lock.orig $name/Cargo.lock
+
+    # Packages with git dependencies generate non-default cargo configs, so
+    # always install it rather than trying to write a standard default template.
+    install -D $CARGO_CONFIG $name/.cargo/config;
+  '';
+
+  # Build a reproducible tar, per instructions at https://reproducible-builds.org/docs/archives/
+  installPhase = ''
+    tar --owner=0 --group=0 --numeric-owner --format=gnu \
+        --sort=name --mtime="@$SOURCE_DATE_EPOCH" \
+        -czf $out $name
+  '';
+
+  inherit (hash_) outputHashAlgo outputHash;
+
+  impureEnvVars = lib.fetchers.proxyImpureEnvVars;
+} // (builtins.removeAttrs args [
+  "name" "sha256" "cargoUpdateHook"
+]))
diff --git a/nixpkgs/pkgs/build-support/rust/fetchcargo-default-config.toml b/nixpkgs/pkgs/build-support/rust/fetchcargo-default-config.toml
new file mode 100755
index 000000000000..dd8ebbc32d31
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/fetchcargo-default-config.toml
@@ -0,0 +1,7 @@
+[source."crates-io"]
+"replace-with" = "vendored-sources"
+
+[source."vendored-sources"]
+"directory" = "@vendor@"
+
+
diff --git a/nixpkgs/pkgs/build-support/rust/fetchcrate.nix b/nixpkgs/pkgs/build-support/rust/fetchcrate.nix
new file mode 100644
index 000000000000..4e6c38b032ce
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/fetchcrate.nix
@@ -0,0 +1,38 @@
+{ lib, fetchurl, unzip }:
+
+{ crateName ? args.pname
+, pname ? null
+, version
+, sha256
+, ... } @ args:
+
+assert pname == null || pname == crateName;
+
+lib.overrideDerivation (fetchurl ({
+
+  name = "${crateName}-${version}.tar.gz";
+  url = "https://crates.io/api/v1/crates/${crateName}/${version}/download";
+  recursiveHash = true;
+
+  downloadToTemp = true;
+
+  postFetch =
+    ''
+      export PATH=${unzip}/bin:$PATH
+
+      unpackDir="$TMPDIR/unpack"
+      mkdir "$unpackDir"
+      cd "$unpackDir"
+
+      renamed="$TMPDIR/${crateName}-${version}.tar.gz"
+      mv "$downloadedFile" "$renamed"
+      unpackFile "$renamed"
+      fn=$(cd "$unpackDir" && echo *)
+      if [ -f "$unpackDir/$fn" ]; then
+        mkdir $out
+      fi
+      mv "$unpackDir/$fn" "$out"
+    '';
+} // removeAttrs args [ "crateName" "pname" "version" ]))
+# Hackety-hack: we actually need unzip hooks, too
+(x: {nativeBuildInputs = x.nativeBuildInputs++ [unzip];})
diff --git a/nixpkgs/pkgs/build-support/rust/hooks/cargo-build-hook.sh b/nixpkgs/pkgs/build-support/rust/hooks/cargo-build-hook.sh
new file mode 100644
index 000000000000..c10120c5aa19
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/hooks/cargo-build-hook.sh
@@ -0,0 +1,41 @@
+declare -a cargoBuildFlags
+
+cargoBuildHook() {
+    echo "Executing cargoBuildHook"
+
+    runHook preBuild
+
+    if [ ! -z "${buildAndTestSubdir-}" ]; then
+        pushd "${buildAndTestSubdir}"
+    fi
+
+    if [ "${cargoBuildType}" != "debug" ]; then
+        cargoBuildProfileFlag="--${cargoBuildType}"
+    fi
+
+    (
+    set -x
+    env \
+      "CC_@rustBuildPlatform@=@ccForBuild@" \
+      "CXX_@rustBuildPlatform@=@cxxForBuild@" \
+      "CC_@rustTargetPlatform@=@ccForHost@" \
+      "CXX_@rustTargetPlatform@=@cxxForHost@" \
+      cargo build -j $NIX_BUILD_CORES \
+        --target @rustTargetPlatformSpec@ \
+        --frozen \
+        ${cargoBuildProfileFlag} \
+        ${cargoBuildFlags}
+    )
+
+    if [ ! -z "${buildAndTestSubdir-}" ]; then
+        popd
+    fi
+
+    runHook postBuild
+
+    echo "Finished cargoBuildHook"
+}
+
+if [ -z "${dontCargoBuild-}" ] && [ -z "${buildPhase-}" ]; then
+  buildPhase=cargoBuildHook
+fi
diff --git a/nixpkgs/pkgs/build-support/rust/hooks/cargo-check-hook.sh b/nixpkgs/pkgs/build-support/rust/hooks/cargo-check-hook.sh
new file mode 100644
index 000000000000..f0339afb38fa
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/hooks/cargo-check-hook.sh
@@ -0,0 +1,46 @@
+declare -a checkFlags
+declare -a cargoTestFlags
+
+cargoCheckHook() {
+    echo "Executing cargoCheckHook"
+
+    runHook preCheck
+
+    if [[ -n "${buildAndTestSubdir-}" ]]; then
+        pushd "${buildAndTestSubdir}"
+    fi
+
+    if [[ -z ${dontUseCargoParallelTests-} ]]; then
+        threads=$NIX_BUILD_CORES
+    else
+        threads=1
+    fi
+
+    if [ "${cargoBuildType}" != "debug" ]; then
+        cargoBuildProfileFlag="--${cargoBuildType}"
+    fi
+
+    argstr="${cargoBuildProfileFlag} --target @rustTargetPlatformSpec@ --frozen ${cargoTestFlags}";
+
+    (
+        set -x
+        cargo test \
+              -j $NIX_BUILD_CORES \
+              ${argstr} -- \
+              --test-threads=${threads} \
+              ${checkFlags} \
+              ${checkFlagsArray+"${checkFlagsArray[@]}"}
+    )
+
+    if [[ -n "${buildAndTestSubdir-}" ]]; then
+        popd
+    fi
+
+    echo "Finished cargoCheckHook"
+
+    runHook postCheck
+}
+
+if [ -z "${dontCargoCheck-}" ] && [ -z "${checkPhase-}" ]; then
+  checkPhase=cargoCheckHook
+fi
diff --git a/nixpkgs/pkgs/build-support/rust/hooks/cargo-install-hook.sh b/nixpkgs/pkgs/build-support/rust/hooks/cargo-install-hook.sh
new file mode 100644
index 000000000000..69ce72669366
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/hooks/cargo-install-hook.sh
@@ -0,0 +1,49 @@
+cargoInstallPostBuildHook() {
+    echo "Executing cargoInstallPostBuildHook"
+
+    releaseDir=target/@shortTarget@/$cargoBuildType
+    tmpDir="${releaseDir}-tmp";
+
+    mkdir -p $tmpDir
+    cp -r ${releaseDir}/* $tmpDir/
+    bins=$(find $tmpDir \
+      -maxdepth 1 \
+      -type f \
+      -executable ! \( -regex ".*\.\(so.[0-9.]+\|so\|a\|dylib\)" \))
+
+    echo "Finished cargoInstallPostBuildHook"
+}
+
+cargoInstallHook() {
+    echo "Executing cargoInstallHook"
+
+    runHook preInstall
+
+    # rename the output dir to a architecture independent one
+
+    releaseDir=target/@shortTarget@/$cargoBuildType
+    tmpDir="${releaseDir}-tmp";
+
+    mapfile -t targets < <(find "$NIX_BUILD_TOP" -type d | grep "${tmpDir}$")
+    for target in "${targets[@]}"; do
+      rm -rf "$target/../../${cargoBuildType}"
+      ln -srf "$target" "$target/../../"
+    done
+    mkdir -p $out/bin $out/lib
+
+    xargs -r cp -t $out/bin <<< $bins
+    find $tmpDir \
+      -maxdepth 1 \
+      -regex ".*\.\(so.[0-9.]+\|so\|a\|dylib\)" \
+      -print0 | xargs -r -0 cp -t $out/lib
+    rmdir --ignore-fail-on-non-empty $out/lib $out/bin
+    runHook postInstall
+
+    echo "Finished cargoInstallHook"
+}
+
+
+if [ -z "${dontCargoInstall-}" ] && [ -z "${installPhase-}" ]; then
+  installPhase=cargoInstallHook
+  postBuildHooks+=(cargoInstallPostBuildHook)
+fi
diff --git a/nixpkgs/pkgs/build-support/rust/hooks/cargo-setup-hook.sh b/nixpkgs/pkgs/build-support/rust/hooks/cargo-setup-hook.sh
new file mode 100644
index 000000000000..842e66b5170e
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/hooks/cargo-setup-hook.sh
@@ -0,0 +1,86 @@
+cargoSetupPostUnpackHook() {
+    echo "Executing cargoSetupPostUnpackHook"
+
+    # Some cargo builds include build hooks that modify their own vendor
+    # dependencies. This copies the vendor directory into the build tree and makes
+    # it writable. If we're using a tarball, the unpackFile hook already handles
+    # this for us automatically.
+    if [ -z $cargoVendorDir ]; then
+        unpackFile "$cargoDeps"
+        export cargoDepsCopy=$(stripHash $cargoDeps)
+    else
+      cargoDepsCopy="$sourceRoot/${cargoRoot:+$cargoRoot/}${cargoVendorDir}"
+    fi
+
+    if [ ! -d .cargo ]; then
+        mkdir .cargo
+    fi
+
+    config="$(pwd)/$cargoDepsCopy/.cargo/config";
+    if [[ ! -e $config ]]; then
+      config=@defaultConfig@
+    fi;
+
+    tmp_config=$(mktemp)
+    substitute $config $tmp_config \
+      --subst-var-by vendor "$(pwd)/$cargoDepsCopy"
+    cat ${tmp_config} >> .cargo/config
+
+    cat >> .cargo/config <<'EOF'
+    @rustTarget@
+EOF
+
+    echo "Finished cargoSetupPostUnpackHook"
+}
+
+# After unpacking and applying patches, check that the Cargo.lock matches our
+# src package. Note that we do this after the patchPhase, because the
+# patchPhase may create the Cargo.lock if upstream has not shipped one.
+cargoSetupPostPatchHook() {
+    echo "Executing cargoSetupPostPatchHook"
+
+    cargoDepsLockfile="$NIX_BUILD_TOP/$cargoDepsCopy/Cargo.lock"
+    srcLockfile="$NIX_BUILD_TOP/$sourceRoot/${cargoRoot:+$cargoRoot/}/Cargo.lock"
+
+    echo "Validating consistency between $srcLockfile and $cargoDepsLockfile"
+    if ! @diff@ $srcLockfile $cargoDepsLockfile; then
+
+      # If the diff failed, first double-check that the file exists, so we can
+      # give a friendlier error msg.
+      if ! [ -e $srcLockfile ]; then
+        echo "ERROR: Missing Cargo.lock from src. Expected to find it at: $srcLockfile"
+        echo "Hint: You can use the cargoPatches attribute to add a Cargo.lock manually to the build."
+        exit 1
+      fi
+
+      if ! [ -e $cargoDepsLockfile ]; then
+        echo "ERROR: Missing lockfile from cargo vendor. Expected to find it at: $cargoDepsLockfile"
+        exit 1
+      fi
+
+      echo
+      echo "ERROR: cargoSha256 is out of date"
+      echo
+      echo "Cargo.lock is not the same in $cargoDepsCopy"
+      echo
+      echo "To fix the issue:"
+      echo '1. Use "0000000000000000000000000000000000000000000000000000" as the cargoSha256 value'
+      echo "2. Build the derivation and wait for it to fail with a hash mismatch"
+      echo "3. Copy the 'got: sha256:' value back into the cargoSha256 field"
+      echo
+
+      exit 1
+    fi
+
+    unset cargoDepsCopy
+
+    echo "Finished cargoSetupPostPatchHook"
+}
+
+if [ -z "${dontCargoSetupPostUnpack-}" ]; then
+  postUnpackHooks+=(cargoSetupPostUnpackHook)
+fi
+
+if [ -z ${cargoVendorDir-} ]; then
+  postPatchHooks+=(cargoSetupPostPatchHook)
+fi
diff --git a/nixpkgs/pkgs/build-support/rust/hooks/default.nix b/nixpkgs/pkgs/build-support/rust/hooks/default.nix
new file mode 100644
index 000000000000..d86c9ebaed80
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/hooks/default.nix
@@ -0,0 +1,95 @@
+{ buildPackages
+, callPackage
+, cargo
+, diffutils
+, lib
+, makeSetupHook
+, maturin
+, rust
+, rustc
+, stdenv
+, target ? rust.toRustTargetSpec stdenv.hostPlatform
+}:
+
+let
+  targetIsJSON = lib.hasSuffix ".json" target;
+
+  # see https://github.com/rust-lang/cargo/blob/964a16a28e234a3d397b2a7031d4ab4a428b1391/src/cargo/core/compiler/compile_kind.rs#L151-L168
+  # the "${}" is needed to transform the path into a /nix/store path before baseNameOf
+  shortTarget = if targetIsJSON then
+      (lib.removeSuffix ".json" (builtins.baseNameOf "${target}"))
+    else target;
+  ccForBuild = "${buildPackages.stdenv.cc}/bin/${buildPackages.stdenv.cc.targetPrefix}cc";
+  cxxForBuild = "${buildPackages.stdenv.cc}/bin/${buildPackages.stdenv.cc.targetPrefix}c++";
+  ccForHost = "${stdenv.cc}/bin/${stdenv.cc.targetPrefix}cc";
+  cxxForHost = "${stdenv.cc}/bin/${stdenv.cc.targetPrefix}c++";
+  rustBuildPlatform = rust.toRustTarget stdenv.buildPlatform;
+  rustTargetPlatform = rust.toRustTarget stdenv.hostPlatform;
+  rustTargetPlatformSpec = rust.toRustTargetSpec stdenv.hostPlatform;
+in {
+  cargoBuildHook = callPackage ({ }:
+    makeSetupHook {
+      name = "cargo-build-hook.sh";
+      deps = [ cargo ];
+      substitutions = {
+        inherit ccForBuild ccForHost cxxForBuild cxxForHost
+          rustBuildPlatform rustTargetPlatform rustTargetPlatformSpec;
+      };
+    } ./cargo-build-hook.sh) {};
+
+  cargoCheckHook = callPackage ({ }:
+    makeSetupHook {
+      name = "cargo-check-hook.sh";
+      deps = [ cargo ];
+      substitutions = {
+        inherit rustTargetPlatformSpec;
+      };
+    } ./cargo-check-hook.sh) {};
+
+  cargoInstallHook = callPackage ({ }:
+    makeSetupHook {
+      name = "cargo-install-hook.sh";
+      deps = [ ];
+      substitutions = {
+        inherit shortTarget;
+      };
+    } ./cargo-install-hook.sh) {};
+
+  cargoSetupHook = callPackage ({ }:
+    makeSetupHook {
+      name = "cargo-setup-hook.sh";
+      deps = [ ];
+      substitutions = {
+        defaultConfig = ../fetchcargo-default-config.toml;
+
+        # Specify the stdenv's `diff` by abspath to ensure that the user's build
+        # inputs do not cause us to find the wrong `diff`.
+        # The `.nativeDrv` stanza works like nativeBuildInputs and ensures cross-compiling has the right version available.
+        diff = "${diffutils.nativeDrv or diffutils}/bin/diff";
+
+        # Target platform
+        rustTarget = ''
+          [target."${rust.toRustTarget stdenv.buildPlatform}"]
+          "linker" = "${ccForBuild}"
+          ${lib.optionalString (stdenv.buildPlatform.config != stdenv.hostPlatform.config) ''
+            [target."${shortTarget}"]
+            "linker" = "${ccForHost}"
+            ${# https://github.com/rust-lang/rust/issues/46651#issuecomment-433611633
+            lib.optionalString (stdenv.hostPlatform.isMusl && stdenv.hostPlatform.isAarch64) ''
+              "rustflags" = [ "-C", "target-feature=+crt-static", "-C", "link-arg=-lgcc" ]
+            ''}
+          ''}
+        '';
+      };
+    } ./cargo-setup-hook.sh) {};
+
+  maturinBuildHook = callPackage ({ }:
+    makeSetupHook {
+      name = "maturin-build-hook.sh";
+      deps = [ cargo maturin rustc ];
+      substitutions = {
+        inherit ccForBuild ccForHost cxxForBuild cxxForHost
+          rustBuildPlatform rustTargetPlatform rustTargetPlatformSpec;
+      };
+    } ./maturin-build-hook.sh) {};
+}
diff --git a/nixpkgs/pkgs/build-support/rust/hooks/maturin-build-hook.sh b/nixpkgs/pkgs/build-support/rust/hooks/maturin-build-hook.sh
new file mode 100644
index 000000000000..7e2599d92240
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/hooks/maturin-build-hook.sh
@@ -0,0 +1,39 @@
+maturinBuildHook() {
+    echo "Executing maturinBuildHook"
+
+    runHook preBuild
+
+    if [ ! -z "${buildAndTestSubdir-}" ]; then
+        pushd "${buildAndTestSubdir}"
+    fi
+
+    (
+    set -x
+    env \
+      "CC_@rustBuildPlatform@=@ccForBuild@" \
+      "CXX_@rustBuildPlatform@=@cxxForBuild@" \
+      "CC_@rustTargetPlatform@=@ccForHost@" \
+      "CXX_@rustTargetPlatform@=@cxxForHost@" \
+      maturin build \
+        --cargo-extra-args="-j $NIX_BUILD_CORES --frozen" \
+        --target @rustTargetPlatformSpec@ \
+        --manylinux off \
+        --strip \
+        --release \
+        ${maturinBuildFlags-}
+    )
+
+    runHook postBuild
+
+    if [ ! -z "${buildAndTestSubdir-}" ]; then
+        popd
+    fi
+
+    # Move the wheel to dist/ so that regular Python tooling can find it.
+    mkdir -p dist
+    mv target/wheels/*.whl dist/
+
+    echo "Finished maturinBuildHook"
+}
+
+buildPhase=maturinBuildHook
diff --git a/nixpkgs/pkgs/build-support/rust/import-cargo-lock.nix b/nixpkgs/pkgs/build-support/rust/import-cargo-lock.nix
new file mode 100644
index 000000000000..244572f79e80
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/import-cargo-lock.nix
@@ -0,0 +1,167 @@
+{ fetchgit, fetchurl, lib, runCommand, cargo, jq }:
+
+{
+  # Cargo lock file
+  lockFile
+
+  # Hashes for git dependencies.
+, outputHashes ? {}
+}:
+
+let
+  # Parse a git source into different components.
+  parseGit = src:
+    let
+      parts = builtins.match ''git\+([^?]+)(\?rev=(.*))?#(.*)?'' src;
+      rev = builtins.elemAt parts 2;
+    in
+      if parts == null then null
+      else {
+        url = builtins.elemAt parts 0;
+        sha = builtins.elemAt parts 3;
+      } // lib.optionalAttrs (rev != null) { inherit rev; };
+
+  packages = (builtins.fromTOML (builtins.readFile lockFile)).package;
+
+  # There is no source attribute for the source package itself. But
+  # since we do not want to vendor the source package anyway, we can
+  # safely skip it.
+  depPackages = (builtins.filter (p: p ? "source") packages);
+
+  # Create dependent crates from packages.
+  #
+  # Force evaluation of the git SHA -> hash mapping, so that an error is
+  # thrown if there are stale hashes. We cannot rely on gitShaOutputHash
+  # being evaluated otherwise, since there could be no git dependencies.
+  depCrates = builtins.deepSeq (gitShaOutputHash) (builtins.map mkCrate depPackages);
+
+  # Map package name + version to git commit SHA for packages with a git source.
+  namesGitShas = builtins.listToAttrs (
+    builtins.map nameGitSha (builtins.filter (pkg: lib.hasPrefix "git+" pkg.source) depPackages)
+  );
+
+  nameGitSha = pkg: let gitParts = parseGit pkg.source; in {
+    name = "${pkg.name}-${pkg.version}";
+    value = gitParts.sha;
+  };
+
+  # Convert the attrset provided through the `outputHashes` argument to a
+  # a mapping from git commit SHA -> output hash.
+  #
+  # There may be multiple different packages with different names
+  # originating from the same git repository (typically a Cargo
+  # workspace). By using the git commit SHA as a universal identifier,
+  # the user does not have to specify the output hash for every package
+  # individually.
+  gitShaOutputHash = lib.mapAttrs' (nameVer: hash:
+    let
+      unusedHash = throw "A hash was specified for ${nameVer}, but there is no corresponding git dependency.";
+      rev = namesGitShas.${nameVer} or unusedHash; in {
+      name = rev;
+      value = hash;
+    }) outputHashes;
+
+  # We can't use the existing fetchCrate function, since it uses a
+  # recursive hash of the unpacked crate.
+  fetchCrate = pkg: fetchurl {
+    name = "crate-${pkg.name}-${pkg.version}.tar.gz";
+    url = "https://crates.io/api/v1/crates/${pkg.name}/${pkg.version}/download";
+    sha256 = pkg.checksum;
+  };
+
+  # Fetch and unpack a crate.
+  mkCrate = pkg:
+    let
+      gitParts = parseGit pkg.source;
+    in
+      if pkg.source == "registry+https://github.com/rust-lang/crates.io-index" then
+      let
+        crateTarball = fetchCrate pkg;
+      in runCommand "${pkg.name}-${pkg.version}" {} ''
+        mkdir $out
+        tar xf "${crateTarball}" -C $out --strip-components=1
+
+        # Cargo is happy with largely empty metadata.
+        printf '{"files":{},"package":"${pkg.checksum}"}' > "$out/.cargo-checksum.json"
+      ''
+      else if gitParts != null then
+      let
+        missingHash = throw ''
+          No hash was found while vendoring the git dependency ${pkg.name}-${pkg.version}. You can add
+          a hash through the `outputHashes` argument of `importCargoLock`:
+
+          outputHashes = {
+            "${pkg.name}-${pkg.version}" = "<hash>";
+          };
+
+          If you use `buildRustPackage`, you can add this attribute to the `cargoLock`
+          attribute set.
+        '';
+        sha256 = gitShaOutputHash.${gitParts.sha} or missingHash;
+        tree = fetchgit {
+          inherit sha256;
+          inherit (gitParts) url;
+          rev = gitParts.sha; # The commit SHA is always available.
+        };
+      in runCommand "${pkg.name}-${pkg.version}" {} ''
+        tree=${tree}
+        if grep --quiet '\[workspace\]' "$tree/Cargo.toml"; then
+          # If the target package is in a workspace, find the crate path
+          # using `cargo metadata`.
+          crateCargoTOML=$(${cargo}/bin/cargo metadata --format-version 1 --no-deps --manifest-path $tree/Cargo.toml | \
+            ${jq}/bin/jq -r '.packages[] | select(.name == "${pkg.name}") | .manifest_path')
+
+            if [[ ! -z $crateCargoTOML ]]; then
+              tree=$(dirname $crateCargoTOML)
+            else
+              >&2 echo "Cannot find path for crate '${pkg.name}-${pkg.version}' in the Cargo workspace in: $tree"
+              exit 1
+            fi
+        fi
+
+        cp -prvd "$tree/" $out
+        chmod u+w $out
+
+        # Cargo is happy with empty metadata.
+        printf '{"files":{},"package":null}' > "$out/.cargo-checksum.json"
+
+        # Set up configuration for the vendor directory.
+        cat > $out/.cargo-config <<EOF
+        [source."${gitParts.url}"]
+        git = "${gitParts.url}"
+        ${lib.optionalString (gitParts ? rev) "rev = \"${gitParts.rev}\""}
+        replace-with = "vendored-sources"
+        EOF
+      ''
+      else throw "Cannot handle crate source: ${pkg.source}";
+
+  vendorDir = runCommand "cargo-vendor-dir" {} ''
+    mkdir -p $out/.cargo
+
+    ln -s ${lockFile} $out/Cargo.lock
+
+    cat > $out/.cargo/config <<EOF
+    [source.crates-io]
+    replace-with = "vendored-sources"
+
+    [source.vendored-sources]
+    directory = "cargo-vendor-dir"
+    EOF
+
+    declare -A keysSeen
+
+    for crate in ${toString depCrates}; do
+      # Link the crate directory, removing the output path hash from the destination.
+      ln -s "$crate" $out/$(basename "$crate" | cut -c 34-)
+
+      if [ -e "$crate/.cargo-config" ]; then
+        key=$(sed 's/\[source\."\(.*\)"\]/\1/; t; d' < "$crate/.cargo-config")
+        if [[ -z ''${keysSeen[$key]} ]]; then
+          keysSeen[$key]=1
+          cat "$crate/.cargo-config" >> $out/.cargo/config
+        fi
+      fi
+    done
+  '';
+in
+  vendorDir
diff --git a/nixpkgs/pkgs/build-support/rust/patch-registry-deps/pkg-config b/nixpkgs/pkgs/build-support/rust/patch-registry-deps/pkg-config
new file mode 100644
index 000000000000..fbb094304587
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/patch-registry-deps/pkg-config
@@ -0,0 +1,8 @@
+for dir in pkg-config-*; do
+    [ -d "$dir" ] || continue
+
+    echo "Patching pkg-config registry dep"
+
+    substituteInPlace "$dir/src/lib.rs" \
+        --replace '"/usr"' '"'"$NIX_STORE"'/"'
+done
diff --git a/nixpkgs/pkgs/build-support/rust/sysroot/Cargo.lock b/nixpkgs/pkgs/build-support/rust/sysroot/Cargo.lock
new file mode 100644
index 000000000000..61fcef61744e
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/sysroot/Cargo.lock
@@ -0,0 +1,29 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+[[package]]
+name = "alloc"
+version = "0.0.0"
+dependencies = [
+ "compiler_builtins",
+ "core",
+]
+
+[[package]]
+name = "compiler_builtins"
+version = "0.1.36"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7cd0782e0a7da7598164153173e5a5d4d9b1da094473c98dce0ff91406112369"
+dependencies = [
+ "rustc-std-workspace-core",
+]
+
+[[package]]
+name = "core"
+version = "0.0.0"
+
+[[package]]
+name = "rustc-std-workspace-core"
+version = "1.99.0"
+dependencies = [
+ "core",
+]
diff --git a/nixpkgs/pkgs/build-support/rust/sysroot/cargo.py b/nixpkgs/pkgs/build-support/rust/sysroot/cargo.py
new file mode 100644
index 000000000000..09f6fba6d1c8
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/sysroot/cargo.py
@@ -0,0 +1,45 @@
+import os
+import toml
+
+rust_src = os.environ['RUSTC_SRC']
+orig_cargo = os.environ['ORIG_CARGO'] if 'ORIG_CARGO' in os.environ else None
+
+base = {
+  'package': {
+    'name': 'alloc',
+    'version': '0.0.0',
+    'authors': ['The Rust Project Developers'],
+    'edition': '2018',
+  },
+  'dependencies': {
+    'compiler_builtins': {
+      'version': '0.1.0',
+      'features': ['rustc-dep-of-std', 'mem'],
+    },
+    'core': {
+      'path': os.path.join(rust_src, 'libcore'),
+    },
+  },
+  'lib': {
+    'name': 'alloc',
+    'path': os.path.join(rust_src, 'liballoc/lib.rs'),
+  },
+  'patch': {
+    'crates-io': {
+      'rustc-std-workspace-core': {
+        'path': os.path.join(rust_src, 'tools/rustc-std-workspace-core'),
+      },
+    },
+  },
+}
+
+if orig_cargo is not None:
+  with open(orig_cargo, 'r') as f:
+    src = toml.loads(f.read())
+    if 'profile' in src:
+      base['profile'] = src['profile']
+
+out = toml.dumps(base)
+
+with open('Cargo.toml', 'x') as f:
+  f.write(out)
diff --git a/nixpkgs/pkgs/build-support/rust/sysroot/default.nix b/nixpkgs/pkgs/build-support/rust/sysroot/default.nix
new file mode 100644
index 000000000000..4db7cf0dc392
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/sysroot/default.nix
@@ -0,0 +1,41 @@
+{ stdenv, rust, rustPlatform, buildPackages }:
+
+{ shortTarget, originalCargoToml, target, RUSTFLAGS }:
+
+let
+  cargoSrc = stdenv.mkDerivation {
+    name = "cargo-src";
+    preferLocalBuild = true;
+    phases = [ "installPhase" ];
+    installPhase = ''
+      RUSTC_SRC=${rustPlatform.rustcSrc.override { minimalContent = false; }} ORIG_CARGO=${originalCargoToml} \
+        ${buildPackages.python3.withPackages (ps: with ps; [ toml ])}/bin/python3 ${./cargo.py}
+      mkdir -p $out
+      cp Cargo.toml $out/Cargo.toml
+      cp ${./Cargo.lock} $out/Cargo.lock
+    '';
+  };
+in rustPlatform.buildRustPackage {
+  inherit target RUSTFLAGS;
+
+  name = "custom-sysroot";
+  src =  cargoSrc;
+
+  RUSTC_BOOTSTRAP = 1;
+  __internal_dontAddSysroot = true;
+  cargoSha256 = "0y6dqfhsgk00y3fv5bnjzk0s7i30nwqc1rp0xlrk83hkh80x81mw";
+
+  doCheck = false;
+
+  installPhase = ''
+    export LIBS_DIR=$out/lib/rustlib/${shortTarget}/lib
+    mkdir -p $LIBS_DIR
+    for f in target/${shortTarget}/release/deps/*.{rlib,rmeta}; do
+      cp $f $LIBS_DIR
+    done
+
+    export RUST_SYSROOT=$(rustc --print=sysroot)
+    host=${rust.toRustTarget stdenv.buildPlatform}
+    cp -r $RUST_SYSROOT/lib/rustlib/$host $out
+  '';
+}
diff --git a/nixpkgs/pkgs/build-support/rust/sysroot/update-lockfile.sh b/nixpkgs/pkgs/build-support/rust/sysroot/update-lockfile.sh
new file mode 100755
index 000000000000..83d29832384f
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/sysroot/update-lockfile.sh
@@ -0,0 +1,21 @@
+#!/usr/bin/env nix-shell
+#!nix-shell -i bash -p python3 python3.pkgs.toml cargo
+
+set -e
+
+HERE=$(dirname "${BASH_SOURCE[0]}")
+NIXPKGS_ROOT="$HERE/../../../.."
+
+# https://unix.stackexchange.com/a/84980/390173
+tempdir=$(mktemp -d 2>/dev/null || mktemp -d -t 'update-lockfile')
+
+cd "$tempdir"
+nix-build -E "with import (/. + \"${NIXPKGS_ROOT}\") {}; pkgs.rustPlatform.rustcSrc.override { minimalContent = false; }"
+RUSTC_SRC="$(pwd)/result" python3 "$HERE/cargo.py"
+RUSTC_BOOTSTRAP=1 cargo build || echo "Build failure is expected. All that's needed is the lockfile."
+
+cp Cargo.lock "$HERE"
+
+rm -rf "$tempdir"
+
+
diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/Cargo.lock b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/Cargo.lock
new file mode 100644
index 000000000000..fd1b5e42ad30
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/Cargo.lock
@@ -0,0 +1,83 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+[[package]]
+name = "basic"
+version = "0.1.0"
+dependencies = [
+ "rand",
+]
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "getrandom"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c9495705279e7140bf035dde1f6e750c162df8b625267cd52cc44e0b156732c8"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "wasi",
+]
+
+[[package]]
+name = "libc"
+version = "0.2.94"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "18794a8ad5b29321f790b55d93dfba91e125cb1a9edbd4f8e3150acc771c1a5e"
+
+[[package]]
+name = "ppv-lite86"
+version = "0.2.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857"
+
+[[package]]
+name = "rand"
+version = "0.8.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0ef9e7e66b4468674bfcb0c81af8b7fa0bb154fa9f28eb840da5c447baeb8d7e"
+dependencies = [
+ "libc",
+ "rand_chacha",
+ "rand_core",
+ "rand_hc",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e12735cf05c9e10bf21534da50a147b924d555dc7a547c42e6bb2d5b6017ae0d"
+dependencies = [
+ "ppv-lite86",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.6.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34cf66eb183df1c5876e2dcf6b13d57340741e8dc255b48e40a26de954d06ae7"
+dependencies = [
+ "getrandom",
+]
+
+[[package]]
+name = "rand_hc"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3190ef7066a446f2e7f42e239d161e905420ccab01eb967c9eb27d21b2322a73"
+dependencies = [
+ "rand_core",
+]
+
+[[package]]
+name = "wasi"
+version = "0.10.2+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/Cargo.toml b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/Cargo.toml
new file mode 100644
index 000000000000..f555bb0de62e
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "basic"
+version = "0.1.0"
+authors = ["Daniël de Kok <me@danieldk.eu>"]
+edition = "2018"
+
+[dependencies]
+rand = "0.8"
diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/default.nix b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/default.nix
new file mode 100644
index 000000000000..d595b58109ad
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/default.nix
@@ -0,0 +1,18 @@
+{ rustPlatform }:
+
+rustPlatform.buildRustPackage {
+  pname = "basic";
+  version = "0.1.0";
+
+  src = ./.;
+
+  cargoLock = {
+    lockFile = ./Cargo.lock;
+  };
+
+  doInstallCheck = true;
+
+  installCheckPhase = ''
+    $out/bin/basic
+  '';
+}
diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/src/main.rs b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/src/main.rs
new file mode 100644
index 000000000000..50b4ed799e43
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/src/main.rs
@@ -0,0 +1,9 @@
+use rand::Rng;
+
+fn main() {
+    let mut rng = rand::thread_rng();
+
+    // Always draw zero :).
+    let roll: u8 = rng.gen_range(0..1);
+    assert_eq!(roll, 0);
+}
diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/default.nix b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/default.nix
new file mode 100644
index 000000000000..2dd525a8ac3f
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/default.nix
@@ -0,0 +1,8 @@
+{ callPackage }:
+
+{
+  basic = callPackage ./basic { };
+  gitDependency = callPackage ./git-dependency { };
+  gitDependencyNoRev = callPackage ./git-dependency-no-rev { };
+  maturin = callPackage ./maturin { };
+}
diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-no-rev/Cargo.lock b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-no-rev/Cargo.lock
new file mode 100644
index 000000000000..54b9c7c5739d
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-no-rev/Cargo.lock
@@ -0,0 +1,79 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "getrandom"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c9495705279e7140bf035dde1f6e750c162df8b625267cd52cc44e0b156732c8"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "wasi",
+]
+
+[[package]]
+name = "git-dependency-no-rev"
+version = "0.1.0"
+dependencies = [
+ "rand",
+]
+
+[[package]]
+name = "libc"
+version = "0.2.94"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "18794a8ad5b29321f790b55d93dfba91e125cb1a9edbd4f8e3150acc771c1a5e"
+
+[[package]]
+name = "ppv-lite86"
+version = "0.2.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857"
+
+[[package]]
+name = "rand"
+version = "0.8.3"
+source = "git+https://github.com/rust-random/rand.git#f0e01ee0a7257753cc51b291f62666f4765923ef"
+dependencies = [
+ "libc",
+ "rand_chacha",
+ "rand_core",
+ "rand_hc",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.3.0"
+source = "git+https://github.com/rust-random/rand.git#f0e01ee0a7257753cc51b291f62666f4765923ef"
+dependencies = [
+ "ppv-lite86",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.6.2"
+source = "git+https://github.com/rust-random/rand.git#f0e01ee0a7257753cc51b291f62666f4765923ef"
+dependencies = [
+ "getrandom",
+]
+
+[[package]]
+name = "rand_hc"
+version = "0.3.0"
+source = "git+https://github.com/rust-random/rand.git#f0e01ee0a7257753cc51b291f62666f4765923ef"
+dependencies = [
+ "rand_core",
+]
+
+[[package]]
+name = "wasi"
+version = "0.10.2+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-no-rev/Cargo.toml b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-no-rev/Cargo.toml
new file mode 100644
index 000000000000..770dfb86f523
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-no-rev/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "git-dependency-no-rev"
+version = "0.1.0"
+authors = ["Daniël de Kok <me@danieldk.eu>"]
+edition = "2018"
+
+[dependencies]
+rand = { git = "https://github.com/rust-random/rand.git" }
diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-no-rev/default.nix b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-no-rev/default.nix
new file mode 100644
index 000000000000..fc36edc40772
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-no-rev/default.nix
@@ -0,0 +1,21 @@
+{ rustPlatform }:
+
+rustPlatform.buildRustPackage {
+  pname = "git-dependency-no-rev";
+  version = "0.1.0";
+
+  src = ./.;
+
+  cargoLock = {
+    lockFile = ./Cargo.lock;
+    outputHashes = {
+      "rand-0.8.3" = "0ya2hia3cn31qa8894s3av2s8j5bjwb6yq92k0jsnlx7jid0jwqa";
+    };
+  };
+
+  doInstallCheck = true;
+
+  installCheckPhase = ''
+    $out/bin/git-dependency-no-rev
+  '';
+}
diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-no-rev/src/main.rs b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-no-rev/src/main.rs
new file mode 100644
index 000000000000..50b4ed799e43
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-no-rev/src/main.rs
@@ -0,0 +1,9 @@
+use rand::Rng;
+
+fn main() {
+    let mut rng = rand::thread_rng();
+
+    // Always draw zero :).
+    let roll: u8 = rng.gen_range(0..1);
+    assert_eq!(roll, 0);
+}
diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/Cargo.lock b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/Cargo.lock
new file mode 100644
index 000000000000..50600ef4caa5
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/Cargo.lock
@@ -0,0 +1,79 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "getrandom"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c9495705279e7140bf035dde1f6e750c162df8b625267cd52cc44e0b156732c8"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "wasi",
+]
+
+[[package]]
+name = "git-dependency"
+version = "0.1.0"
+dependencies = [
+ "rand",
+]
+
+[[package]]
+name = "libc"
+version = "0.2.94"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "18794a8ad5b29321f790b55d93dfba91e125cb1a9edbd4f8e3150acc771c1a5e"
+
+[[package]]
+name = "ppv-lite86"
+version = "0.2.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857"
+
+[[package]]
+name = "rand"
+version = "0.8.3"
+source = "git+https://github.com/rust-random/rand.git?rev=0.8.3#6ecbe2626b2cc6110a25c97b1702b347574febc7"
+dependencies = [
+ "libc",
+ "rand_chacha",
+ "rand_core",
+ "rand_hc",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.3.0"
+source = "git+https://github.com/rust-random/rand.git?rev=0.8.3#6ecbe2626b2cc6110a25c97b1702b347574febc7"
+dependencies = [
+ "ppv-lite86",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.6.1"
+source = "git+https://github.com/rust-random/rand.git?rev=0.8.3#6ecbe2626b2cc6110a25c97b1702b347574febc7"
+dependencies = [
+ "getrandom",
+]
+
+[[package]]
+name = "rand_hc"
+version = "0.3.0"
+source = "git+https://github.com/rust-random/rand.git?rev=0.8.3#6ecbe2626b2cc6110a25c97b1702b347574febc7"
+dependencies = [
+ "rand_core",
+]
+
+[[package]]
+name = "wasi"
+version = "0.10.2+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/Cargo.toml b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/Cargo.toml
new file mode 100644
index 000000000000..11ee8b1763e6
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "git-dependency"
+version = "0.1.0"
+authors = ["Daniël de Kok <me@danieldk.eu>"]
+edition = "2018"
+
+[dependencies]
+rand = { git = "https://github.com/rust-random/rand.git", rev = "0.8.3" }
diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/default.nix b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/default.nix
new file mode 100644
index 000000000000..17276c5f5c3c
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/default.nix
@@ -0,0 +1,21 @@
+{ rustPlatform }:
+
+rustPlatform.buildRustPackage {
+  pname = "git-dependency";
+  version = "0.1.0";
+
+  src = ./.;
+
+  cargoLock = {
+    lockFile = ./Cargo.lock;
+    outputHashes = {
+      "rand-0.8.3" = "0l3p174bpwia61vcvxz5mw65a13ri3wy94z04xrnyy5lzciykz4f";
+    };
+  };
+
+  doInstallCheck = true;
+
+  installCheckPhase = ''
+    $out/bin/git-dependency
+  '';
+}
diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/src/main.rs b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/src/main.rs
new file mode 100644
index 000000000000..50b4ed799e43
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/src/main.rs
@@ -0,0 +1,9 @@
+use rand::Rng;
+
+fn main() {
+    let mut rng = rand::thread_rng();
+
+    // Always draw zero :).
+    let roll: u8 = rng.gen_range(0..1);
+    assert_eq!(roll, 0);
+}
diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/maturin/Cargo.lock b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/maturin/Cargo.lock
new file mode 100644
index 000000000000..5e698d4ff735
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/maturin/Cargo.lock
@@ -0,0 +1,682 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+[[package]]
+name = "ahash"
+version = "0.4.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "739f4a8db6605981345c5654f3a85b056ce52f37a39d34da03f25bf2151ea16e"
+
+[[package]]
+name = "assert_approx_eq"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3c07dab4369547dbe5114677b33fbbf724971019f3818172d59a97a61c774ffd"
+
+[[package]]
+name = "autocfg"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
+
+[[package]]
+name = "bitflags"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
+
+[[package]]
+name = "byteorder"
+version = "1.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ae44d1a3d5a19df61dd0c8beb138458ac2a53a7ac09eba97d55592540004306b"
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "const_fn"
+version = "0.4.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "28b9d6de7f49e22cf97ad17fc4036ece69300032f45f78f30b4a4482cdc3f4a6"
+
+[[package]]
+name = "crossbeam-channel"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dca26ee1f8d361640700bde38b2c37d8c22b3ce2d360e1fc1c74ea4b0aa7d775"
+dependencies = [
+ "cfg-if",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-deque"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "94af6efb46fef72616855b036a624cf27ba656ffc9be1b9a3c931cfc7749a9a9"
+dependencies = [
+ "cfg-if",
+ "crossbeam-epoch",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-epoch"
+version = "0.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1aaa739f95311c2c7887a76863f500026092fb1dce0161dab577e559ef3569d"
+dependencies = [
+ "cfg-if",
+ "const_fn",
+ "crossbeam-utils",
+ "lazy_static",
+ "memoffset",
+ "scopeguard",
+]
+
+[[package]]
+name = "crossbeam-utils"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "02d96d1e189ef58269ebe5b97953da3274d83a93af647c2ddd6f9dab28cedb8d"
+dependencies = [
+ "autocfg",
+ "cfg-if",
+ "lazy_static",
+]
+
+[[package]]
+name = "ctor"
+version = "0.1.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e8f45d9ad417bcef4817d614a501ab55cdd96a6fdb24f49aab89a54acfd66b19"
+dependencies = [
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "either"
+version = "1.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
+
+[[package]]
+name = "getrandom"
+version = "0.1.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "wasi",
+]
+
+[[package]]
+name = "ghost"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1a5bcf1bbeab73aa4cf2fde60a846858dc036163c7c33bec309f8d17de785479"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "glob"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
+
+[[package]]
+name = "hashbrown"
+version = "0.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04"
+dependencies = [
+ "ahash",
+]
+
+[[package]]
+name = "hermit-abi"
+version = "0.1.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "322f4de77956e22ed0e5032c359a0f1273f1f7f0d79bfa3b8ffbc730d7fbcc5c"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "indoc"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "47741a8bc60fb26eb8d6e0238bbb26d8575ff623fdc97b1a2c00c050b9684ed8"
+dependencies = [
+ "indoc-impl",
+ "proc-macro-hack",
+]
+
+[[package]]
+name = "indoc-impl"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ce046d161f000fffde5f432a0d034d0341dc152643b2598ed5bfce44c4f3a8f0"
+dependencies = [
+ "proc-macro-hack",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "unindent",
+]
+
+[[package]]
+name = "instant"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "61124eeebbd69b8190558df225adf7e4caafce0d743919e5d6b19652314ec5ec"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "inventory"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0f0f7efb804ec95e33db9ad49e4252f049e37e8b0a4652e3cd61f7999f2eff7f"
+dependencies = [
+ "ctor",
+ "ghost",
+ "inventory-impl",
+]
+
+[[package]]
+name = "inventory-impl"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "75c094e94816723ab936484666968f5b58060492e880f3c8d00489a1e244fa51"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "itoa"
+version = "0.4.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dd25036021b0de88a0aff6b850051563c6516d0bf53f8638938edbb9de732736"
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+
+[[package]]
+name = "libc"
+version = "0.2.86"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b7282d924be3275cec7f6756ff4121987bc6481325397dde6ba3e7802b1a8b1c"
+
+[[package]]
+name = "lock_api"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dd96ffd135b2fd7b973ac026d28085defbe8983df057ced3eb4f2130b0831312"
+dependencies = [
+ "scopeguard",
+]
+
+[[package]]
+name = "memoffset"
+version = "0.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "num-bigint"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5e9a41747ae4633fce5adffb4d2e81ffc5e89593cb19917f8fb2cc5ff76507bf"
+dependencies = [
+ "autocfg",
+ "num-integer",
+ "num-traits",
+]
+
+[[package]]
+name = "num-complex"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "747d632c0c558b87dbabbe6a82f3b4ae03720d0646ac5b7b4dae89394be5f2c5"
+dependencies = [
+ "num-traits",
+]
+
+[[package]]
+name = "num-integer"
+version = "0.1.44"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db"
+dependencies = [
+ "autocfg",
+ "num-traits",
+]
+
+[[package]]
+name = "num-traits"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "num_cpus"
+version = "1.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3"
+dependencies = [
+ "hermit-abi",
+ "libc",
+]
+
+[[package]]
+name = "parking_lot"
+version = "0.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d7744ac029df22dca6284efe4e898991d28e3085c706c972bcd7da4a27a15eb"
+dependencies = [
+ "instant",
+ "lock_api",
+ "parking_lot_core",
+]
+
+[[package]]
+name = "parking_lot_core"
+version = "0.8.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fa7a782938e745763fe6907fc6ba86946d72f49fe7e21de074e08128a99fb018"
+dependencies = [
+ "cfg-if",
+ "instant",
+ "libc",
+ "redox_syscall",
+ "smallvec",
+ "winapi",
+]
+
+[[package]]
+name = "paste"
+version = "0.1.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "45ca20c77d80be666aef2b45486da86238fabe33e38306bd3118fe4af33fa880"
+dependencies = [
+ "paste-impl",
+ "proc-macro-hack",
+]
+
+[[package]]
+name = "paste-impl"
+version = "0.1.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d95a7db200b97ef370c8e6de0088252f7e0dfff7d047a28528e47456c0fc98b6"
+dependencies = [
+ "proc-macro-hack",
+]
+
+[[package]]
+name = "ppv-lite86"
+version = "0.2.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857"
+
+[[package]]
+name = "proc-macro-hack"
+version = "0.5.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5"
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.24"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"
+dependencies = [
+ "unicode-xid",
+]
+
+[[package]]
+name = "proptest"
+version = "0.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "12e6c80c1139113c28ee4670dc50cc42915228b51f56a9e407f0ec60f966646f"
+dependencies = [
+ "bitflags",
+ "byteorder",
+ "lazy_static",
+ "num-traits",
+ "quick-error",
+ "rand",
+ "rand_chacha",
+ "rand_xorshift",
+ "regex-syntax",
+]
+
+[[package]]
+name = "pyo3"
+version = "0.13.2"
+dependencies = [
+ "assert_approx_eq",
+ "cfg-if",
+ "ctor",
+ "hashbrown",
+ "indoc",
+ "inventory",
+ "libc",
+ "num-bigint",
+ "num-complex",
+ "parking_lot",
+ "paste",
+ "proptest",
+ "pyo3",
+ "pyo3-macros",
+ "rustversion",
+ "serde",
+ "serde_json",
+ "trybuild",
+ "unindent",
+]
+
+[[package]]
+name = "pyo3-macros"
+version = "0.13.2"
+dependencies = [
+ "pyo3-macros-backend",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "pyo3-macros-backend"
+version = "0.13.2"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "quick-error"
+version = "1.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
+
+[[package]]
+name = "quote"
+version = "1.0.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "rand"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
+dependencies = [
+ "getrandom",
+ "libc",
+ "rand_chacha",
+ "rand_core",
+ "rand_hc",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402"
+dependencies = [
+ "ppv-lite86",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
+dependencies = [
+ "getrandom",
+]
+
+[[package]]
+name = "rand_hc"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
+dependencies = [
+ "rand_core",
+]
+
+[[package]]
+name = "rand_xorshift"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77d416b86801d23dde1aa643023b775c3a462efc0ed96443add11546cdf1dca8"
+dependencies = [
+ "rand_core",
+]
+
+[[package]]
+name = "rayon"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b0d8e0819fadc20c74ea8373106ead0600e3a67ef1fe8da56e39b9ae7275674"
+dependencies = [
+ "autocfg",
+ "crossbeam-deque",
+ "either",
+ "rayon-core",
+]
+
+[[package]]
+name = "rayon-core"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ab346ac5921dc62ffa9f89b7a773907511cdfa5490c572ae9be1be33e8afa4a"
+dependencies = [
+ "crossbeam-channel",
+ "crossbeam-deque",
+ "crossbeam-utils",
+ "lazy_static",
+ "num_cpus",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "94341e4e44e24f6b591b59e47a8a027df12e008d73fd5672dbea9cc22f4507d9"
+dependencies = [
+ "bitflags",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.6.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b5eb417147ba9860a96cfe72a0b93bf88fee1744b5636ec99ab20c1aa9376581"
+
+[[package]]
+name = "rustapi-module"
+version = "0.1.0"
+dependencies = [
+ "pyo3",
+]
+
+[[package]]
+name = "rustversion"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cb5d2a036dc6d2d8fd16fde3498b04306e29bd193bf306a57427019b823d5acd"
+
+[[package]]
+name = "ryu"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e"
+
+[[package]]
+name = "scopeguard"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
+
+[[package]]
+name = "serde"
+version = "1.0.123"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "92d5161132722baa40d802cc70b15262b98258453e85e5d1d365c757c73869ae"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.123"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9391c295d64fc0abb2c556bad848f33cb8296276b1ad2677d1ae1ace4f258f31"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.62"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ea1c6153794552ea7cf7cf63b1231a25de00ec90db326ba6264440fa08e31486"
+dependencies = [
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "smallvec"
+version = "1.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fe0f37c9e8f3c5a4a66ad655a93c74daac4ad00c441533bf5c6e7990bb42604e"
+
+[[package]]
+name = "syn"
+version = "1.0.60"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c700597eca8a5a762beb35753ef6b94df201c81cca676604f547495a0d7f0081"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-xid",
+]
+
+[[package]]
+name = "termcolor"
+version = "1.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "toml"
+version = "0.5.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "trybuild"
+version = "1.0.41"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "99471a206425fba51842a9186315f32d91c56eadc21ea4c21f847b59cf778f8b"
+dependencies = [
+ "glob",
+ "lazy_static",
+ "serde",
+ "serde_json",
+ "termcolor",
+ "toml",
+]
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564"
+
+[[package]]
+name = "unindent"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f14ee04d9415b52b3aeab06258a3f07093182b88ba0f9b8d203f211a7a7d41c7"
+
+[[package]]
+name = "wasi"
+version = "0.9.0+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-util"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+
+[[package]]
+name = "word-count"
+version = "0.1.0"
+dependencies = [
+ "pyo3",
+ "rayon",
+]
diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/maturin/default.nix b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/maturin/default.nix
new file mode 100644
index 000000000000..af0de596b387
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/maturin/default.nix
@@ -0,0 +1,43 @@
+{ lib
+, fetchFromGitHub
+, python3
+, rustPlatform
+}:
+
+python3.pkgs.buildPythonPackage rec {
+  pname = "word-count";
+  version = "0.13.2";
+
+  format = "pyproject";
+
+  src = fetchFromGitHub {
+    owner = "PyO3";
+    repo = "pyo3";
+    rev = "v${version}";
+    hash = "sha256-NOMrrfo8WjlPhtGxWUOPJS/UDDdbLQRCXR++Zd6JmIA=";
+  };
+
+  cargoDeps = rustPlatform.importCargoLock {
+    lockFile = ./Cargo.lock;
+  };
+
+  postPatch = ''
+    cp ${./Cargo.lock} Cargo.lock
+  '';
+
+  buildAndTestSubdir = "examples/word-count";
+
+  nativeBuildInputs = with rustPlatform; [
+    cargoSetupHook
+    maturinBuildHook
+  ];
+
+  pythonImportsCheck = [ "word_count" ];
+
+  meta = with lib; {
+    description = "PyO3 word count example";
+    homepage = "https://github.com/PyO3/pyo3";
+    license = licenses.asl20;
+    maintainers = [ maintainers.danieldk ];
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/audit-blas.sh b/nixpkgs/pkgs/build-support/setup-hooks/audit-blas.sh
new file mode 100644
index 000000000000..6a40073fb234
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/audit-blas.sh
@@ -0,0 +1,37 @@
+# Ensure that we are always linking against “libblas.so.3” and
+# “liblapack.so.3”.
+
+auditBlas() {
+    local dir="$prefix"
+    [ -e "$dir" ] || return 0
+
+    local i
+    while IFS= read -r -d $'\0' i; do
+        if ! isELF "$i"; then continue; fi
+
+        if $OBJDUMP -p "$i" | grep 'NEEDED' | awk '{ print $2; }' | grep -q '\(libmkl_rt.so\|libopenblas.so.0\)'; then
+            echo "$i refers to a specific implementation of BLAS or LAPACK."
+            echo "This prevents users from switching BLAS/LAPACK implementations."
+            echo "Add \`blas' or \`lapack' to buildInputs instead of \`mkl' or \`openblas'."
+            exit 1
+        fi
+
+        (IFS=:
+         for dir in "$(patchelf --print-rpath "$i")"; do
+             if [ -f "$dir/libblas.so.3" ] || [ -f "$dir/libblas.so" ]; then
+                 if [ "$dir" != "@blas@/lib" ]; then
+                     echo "$dir is not allowed to contain a library named libblas.so.3"
+                     exit 1
+                 fi
+             fi
+             if [ -f "$dir/liblapack.so.3" ] || [ -f "$dir/liblapack.so" ]; then
+                 if [ "$dir" != "@lapack@/lib" ]; then
+                     echo "$dir is not allowed to contain a library named liblapack.so.3"
+                     exit 1
+                 fi
+             fi
+         done)
+    done < <(find "$dir" -type f -print0)
+}
+
+fixupOutputHooks+=(auditBlas)
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/audit-tmpdir.sh b/nixpkgs/pkgs/build-support/setup-hooks/audit-tmpdir.sh
new file mode 100644
index 000000000000..c9dd32d1dd22
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/audit-tmpdir.sh
@@ -0,0 +1,41 @@
+# Check whether RPATHs or wrapper scripts contain references to
+# $TMPDIR. This is a serious security bug because it allows any user
+# to inject files into search paths of other users' processes.
+#
+# It might be better to have Nix scan build output for any occurrence
+# of $TMPDIR (which would also be good for reproducibility), but at
+# the moment that would produce too many spurious errors (e.g. debug
+# info or assertion messages that refer to $TMPDIR).
+
+fixupOutputHooks+=('if [[ -z "${noAuditTmpdir-}" && -e "$prefix" ]]; then auditTmpdir "$prefix"; fi')
+
+auditTmpdir() {
+    local dir="$1"
+    [ -e "$dir" ] || return 0
+
+    header "checking for references to $TMPDIR/ in $dir..."
+
+    local i
+    while IFS= read -r -d $'\0' i; do
+        if [[ "$i" =~ .build-id ]]; then continue; fi
+
+        if isELF "$i"; then
+            if { printf :; patchelf --print-rpath "$i"; } | grep -q -F ":$TMPDIR/"; then
+                echo "RPATH of binary $i contains a forbidden reference to $TMPDIR/"
+                exit 1
+            fi
+        fi
+
+        if isScript "$i"; then
+            if [ -e "$(dirname "$i")/.$(basename "$i")-wrapped" ]; then
+                if grep -q -F "$TMPDIR/" "$i"; then
+                    echo "wrapper script $i contains a forbidden reference to $TMPDIR/"
+                    exit 1
+                fi
+            fi
+        fi
+
+    done < <(find "$dir" -type f -print0)
+
+    stopNest
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/auto-patchelf.sh b/nixpkgs/pkgs/build-support/setup-hooks/auto-patchelf.sh
new file mode 100644
index 000000000000..511371931de8
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/auto-patchelf.sh
@@ -0,0 +1,265 @@
+#!/usr/bin/env bash
+
+declare -a autoPatchelfLibs
+declare -Ag autoPatchelfFailedDeps
+
+gatherLibraries() {
+    autoPatchelfLibs+=("$1/lib")
+}
+
+# wrapper around patchelf to raise proper error messages
+# containing the tried file name and command
+runPatchelf() {
+  patchelf "$@" || (echo "Command failed: patchelf $*" && exit 1)
+}
+
+# shellcheck disable=SC2154
+# (targetOffset is referenced but not assigned.)
+addEnvHooks "$targetOffset" gatherLibraries
+
+isExecutable() {
+    # For dynamically linked ELF files it would be enough to check just for the
+    # INTERP section. However, we won't catch statically linked executables as
+    # they only have an ELF type of EXEC but no INTERP.
+    #
+    # So what we do here is just check whether *either* the ELF type is EXEC
+    # *or* there is an INTERP section. This also catches position-independent
+    # executables, as they typically have an INTERP section but their ELF type
+    # is DYN.
+    isExeResult="$(LANG=C $READELF -h -l "$1" 2> /dev/null \
+        | grep '^ *Type: *EXEC\>\|^ *INTERP\>')"
+    # not using grep -q, because it can cause Broken pipe
+    [ -n "$isExeResult" ]
+}
+
+# We cache dependencies so that we don't need to search through all of them on
+# every consecutive call to findDependency.
+declare -Ag autoPatchelfCachedDepsAssoc
+declare -ag autoPatchelfCachedDeps
+
+
+addToDepCache() {
+    if [[ ${autoPatchelfCachedDepsAssoc[$1]+f} ]]; then return; fi
+
+    # store deps in an assoc. array for efficient lookups
+    # otherwise findDependency would have quadratic complexity
+    autoPatchelfCachedDepsAssoc["$1"]=""
+
+    # also store deps in normal array to maintain their order
+    autoPatchelfCachedDeps+=("$1")
+}
+
+declare -gi depCacheInitialised=0
+declare -gi doneRecursiveSearch=0
+declare -g foundDependency
+
+getDepsFromSo() {
+    ldd "$1" 2> /dev/null | sed -n -e 's/[^=]*=> *\(.\+\) \+([^)]*)$/\1/p'
+}
+
+populateCacheWithRecursiveDeps() {
+    local so found foundso
+    for so in "${autoPatchelfCachedDeps[@]}"; do
+        for found in $(getDepsFromSo "$so"); do
+            local base="${found##*/}"
+            local soname="${base%.so*}"
+            for foundso in "${found%/*}/$soname".so*; do
+                addToDepCache "$foundso"
+            done
+        done
+    done
+}
+
+getSoArch() {
+    objdump -f "$1" | sed -ne 's/^architecture: *\([^,]\+\).*/\1/p'
+}
+
+# NOTE: If you want to use this function outside of the autoPatchelf function,
+# keep in mind that the dependency cache is only valid inside the subshell
+# spawned by the autoPatchelf function, so invoking this directly will possibly
+# rebuild the dependency cache. See the autoPatchelf function below for more
+# information.
+findDependency() {
+    local filename="$1"
+    local arch="$2"
+    local lib dep
+
+    if [ $depCacheInitialised -eq 0 ]; then
+        for lib in "${autoPatchelfLibs[@]}"; do
+            for so in "$lib/"*.so*; do addToDepCache "$so"; done
+        done
+        depCacheInitialised=1
+    fi
+
+    for dep in "${autoPatchelfCachedDeps[@]}"; do
+        if [ "$filename" = "${dep##*/}" ]; then
+            if [ "$(getSoArch "$dep")" = "$arch" ]; then
+                foundDependency="$dep"
+                return 0
+            fi
+        fi
+    done
+
+    # Populate the dependency cache with recursive dependencies *only* if we
+    # didn't find the right dependency so far and afterwards run findDependency
+    # again, but this time with $doneRecursiveSearch set to 1 so that it won't
+    # recurse again (and thus infinitely).
+    if [ $doneRecursiveSearch -eq 0 ]; then
+        populateCacheWithRecursiveDeps
+        doneRecursiveSearch=1
+        findDependency "$filename" "$arch" || return 1
+        return 0
+    fi
+    return 1
+}
+
+autoPatchelfFile() {
+    local dep rpath="" toPatch="$1"
+
+    local interpreter
+    interpreter="$(< "$NIX_CC/nix-support/dynamic-linker")"
+    if isExecutable "$toPatch"; then
+        runPatchelf --set-interpreter "$interpreter" "$toPatch"
+        # shellcheck disable=SC2154
+        # (runtimeDependencies is referenced but not assigned.)
+        if [ -n "$runtimeDependencies" ]; then
+            for dep in $runtimeDependencies; do
+                rpath="$rpath${rpath:+:}$dep/lib"
+            done
+        fi
+    fi
+
+    echo "searching for dependencies of $toPatch" >&2
+
+    # We're going to find all dependencies based on ldd output, so we need to
+    # clear the RPATH first.
+    runPatchelf --remove-rpath "$toPatch"
+
+    # If the file is not a dynamic executable, ldd/sed will fail,
+    # in which case we return, since there is nothing left to do.
+    local missing
+    missing="$(
+        ldd "$toPatch" 2> /dev/null | \
+            sed -n -e 's/^[\t ]*\([^ ]\+\) => not found.*/\1/p'
+    )" || return 0
+
+    # This ensures that we get the output of all missing dependencies instead
+    # of failing at the first one, because it's more useful when working on a
+    # new package where you don't yet know its dependencies.
+
+    for dep in $missing; do
+        echo -n "  $dep -> " >&2
+        if findDependency "$dep" "$(getSoArch "$toPatch")"; then
+            rpath="$rpath${rpath:+:}${foundDependency%/*}"
+            echo "found: $foundDependency" >&2
+        else
+            echo "not found!" >&2
+            autoPatchelfFailedDeps["$dep"]="$toPatch"
+        fi
+    done
+
+    if [ -n "$rpath" ]; then
+        echo "setting RPATH to: $rpath" >&2
+        runPatchelf --set-rpath "$rpath" "$toPatch"
+    fi
+}
+
+# Can be used to manually add additional directories with shared object files
+# to be included for the next autoPatchelf invocation.
+addAutoPatchelfSearchPath() {
+    local -a findOpts=()
+
+    # XXX: Somewhat similar to the one in the autoPatchelf function, maybe make
+    #      it DRY someday...
+    while [ $# -gt 0 ]; do
+        case "$1" in
+            --) shift; break;;
+            --no-recurse) shift; findOpts+=("-maxdepth" 1);;
+            --*)
+                echo "addAutoPatchelfSearchPath: ERROR: Invalid command line" \
+                     "argument: $1" >&2
+                return 1;;
+            *) break;;
+        esac
+    done
+
+    while IFS= read -r -d '' file; do
+    addToDepCache "$file"
+    done <  <(find "$@" "${findOpts[@]}" \! -type d \
+            \( -name '*.so' -o -name '*.so.*' \) -print0)
+}
+
+autoPatchelf() {
+    local norecurse=
+
+    while [ $# -gt 0 ]; do
+        case "$1" in
+            --) shift; break;;
+            --no-recurse) shift; norecurse=1;;
+            --*)
+                echo "autoPatchelf: ERROR: Invalid command line" \
+                     "argument: $1" >&2
+                return 1;;
+            *) break;;
+        esac
+    done
+
+    if [ $# -eq 0 ]; then
+        echo "autoPatchelf: No paths to patch specified." >&2
+        return 1
+    fi
+
+    echo "automatically fixing dependencies for ELF files" >&2
+
+    # Add all shared objects of the current output path to the start of
+    # autoPatchelfCachedDeps so that it's chosen first in findDependency.
+    addAutoPatchelfSearchPath ${norecurse:+--no-recurse} -- "$@"
+
+    while IFS= read -r -d $'\0' file; do
+      isELF "$file" || continue
+      segmentHeaders="$(LANG=C $READELF -l "$file")"
+      # Skip if the ELF file doesn't have segment headers (eg. object files).
+      # not using grep -q, because it can cause Broken pipe
+      [ -n "$(echo "$segmentHeaders" | grep '^Program Headers:')" ] || continue
+      if isExecutable "$file"; then
+          # Skip if the executable is statically linked.
+          [ -n "$(echo "$segmentHeaders" | grep "^ *INTERP\\>")" ] || continue
+      fi
+      # Jump file if patchelf is unable to parse it
+      # Some programs contain binary blobs for testing,
+      # which are identified as ELF but fail to be parsed by patchelf
+      patchelf "$file" || continue
+      autoPatchelfFile "$file"
+    done < <(find "$@" ${norecurse:+-maxdepth 1} -type f -print0)
+
+    # fail if any dependencies were not found and
+    # autoPatchelfIgnoreMissingDeps is not set
+    local depsMissing=0
+    for failedDep in "${!autoPatchelfFailedDeps[@]}"; do
+      echo "autoPatchelfHook could not satisfy dependency $failedDep wanted by ${autoPatchelfFailedDeps[$failedDep]}"
+      depsMissing=1
+    done
+    # shellcheck disable=SC2154
+    # (autoPatchelfIgnoreMissingDeps is referenced but not assigned.)
+    if [[ $depsMissing == 1 && -z "$autoPatchelfIgnoreMissingDeps" ]]; then
+      echo "Add the missing dependencies to the build inputs or set autoPatchelfIgnoreMissingDeps=true"
+      exit 1
+    fi
+}
+
+# XXX: This should ultimately use fixupOutputHooks but we currently don't have
+# a way to enforce the order. If we have $runtimeDependencies set, the setup
+# hook of patchelf is going to ruin everything and strip out those additional
+# RPATHs.
+#
+# So what we do here is basically run in postFixup and emulate the same
+# behaviour as fixupOutputHooks because the setup hook for patchelf is run in
+# fixupOutput and the postFixup hook runs later.
+postFixupHooks+=('
+    if [ -z "${dontAutoPatchelf-}" ]; then
+        autoPatchelf -- $(for output in $outputs; do
+            [ -e "${!output}" ] || continue
+            echo "${!output}"
+        done)
+    fi
+')
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/autoreconf.sh b/nixpkgs/pkgs/build-support/setup-hooks/autoreconf.sh
new file mode 100644
index 000000000000..c08cab158688
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/autoreconf.sh
@@ -0,0 +1,7 @@
+preConfigurePhases+=" autoreconfPhase"
+
+autoreconfPhase() {
+    runHook preAutoreconf
+    autoreconf ${autoreconfFlags:---install --force --verbose}
+    runHook postAutoreconf
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/breakpoint-hook.sh b/nixpkgs/pkgs/build-support/setup-hooks/breakpoint-hook.sh
new file mode 100644
index 000000000000..6bef786ac3ac
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/breakpoint-hook.sh
@@ -0,0 +1,9 @@
+breakpointHook() {
+    local red='\033[0;31m'
+    local no_color='\033[0m'
+
+    echo -e "${red}build failed in ${curPhase} with exit code ${exitCode}${no_color}"
+    printf "To attach install cntr and run the following command as root:\n\n"
+    sh -c "echo '   cntr attach -t command cntr-${out}'; while true; do sleep 99999999; done"
+}
+failureHooks+=(breakpointHook)
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/compress-man-pages.sh b/nixpkgs/pkgs/build-support/setup-hooks/compress-man-pages.sh
new file mode 100644
index 000000000000..f5af76e8168f
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/compress-man-pages.sh
@@ -0,0 +1,33 @@
+fixupOutputHooks+=('if [ -z "${dontGzipMan-}" ]; then compressManPages "$prefix"; fi')
+
+compressManPages() {
+    local dir="$1"
+
+    if [ -L "$dir"/share ] || [ -L "$dir"/share/man ] || [ ! -d "$dir/share/man" ]
+        then return
+    fi
+    echo "gzipping man pages under $dir/share/man/"
+
+    # Compress all uncompressed manpages.  Don't follow symlinks, etc.
+    find "$dir"/share/man/ -type f -a '!' -regex '.*\.\(bz2\|gz\)$' -print0 \
+        | while IFS= read -r -d $'\0' f
+    do
+        if gzip -c -n "$f" > "$f".gz; then
+            rm "$f"
+        else
+            rm "$f".gz
+        fi
+    done
+
+    # Point symlinks to compressed manpages.
+    find "$dir"/share/man/ -type l -a '!' -regex '.*\.\(bz2\|gz\)$' -print0 \
+        | sort -z \
+        | while IFS= read -r -d $'\0' f
+    do
+        local target
+        target="$(readlink -f "$f")"
+        if [ -f "$target".gz ]; then
+            ln -sf "$target".gz "$f".gz && rm "$f"
+        fi
+    done
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/copy-desktop-items.sh b/nixpkgs/pkgs/build-support/setup-hooks/copy-desktop-items.sh
new file mode 100644
index 000000000000..f96a10f33d5c
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/copy-desktop-items.sh
@@ -0,0 +1,42 @@
+# shellcheck shell=bash
+
+# Setup hook that installs specified desktop items.
+#
+# Example usage in a derivation:
+#
+#   { …, makeDesktopItem, copyDesktopItems, … }:
+#
+#   let desktopItem = makeDesktopItem { … }; in
+#   stdenv.mkDerivation {
+#     …
+#     nativeBuildInputs = [ copyDesktopItems ];
+#
+#     desktopItems =  [ desktopItem ];
+#     …
+#   }
+#
+# This hook will copy files which are either given by full path
+# or all '*.desktop' files placed inside the 'share/applications'
+# folder of each `desktopItems` argument.
+
+postInstallHooks+=(copyDesktopItems)
+
+copyDesktopItems() {
+    if [ "${dontCopyDesktopItems-}" = 1 ]; then return; fi
+
+    if [ -z "$desktopItems" ]; then
+        return
+    fi
+
+    for desktopItem in $desktopItems; do
+        if [[ -f "$desktopItem" ]]; then
+            echo "Copying '$f' into '$out/share/applications'"
+            install -D -m 444 -t "$out"/share/applications "$f"
+        else
+            for f in "$desktopItem"/share/applications/*.desktop; do
+                echo "Copying '$f' into '$out/share/applications'"
+                install -D -m 444 -t "$out"/share/applications "$f"
+            done
+        fi
+    done
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/die.sh b/nixpkgs/pkgs/build-support/setup-hooks/die.sh
new file mode 100644
index 000000000000..0db41e030f4c
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/die.sh
@@ -0,0 +1,21 @@
+# Exit with backtrace and error message
+#
+# Usage: die "Error message"
+die() {
+    # Let us be a little sloppy with errors, because otherwise the final
+    # invocation of `caller` below will cause the script to exit.
+    set +e
+
+    # Print our error message
+    printf "\nBuilder called die: %b\n" "$*"
+    printf "Backtrace:\n"
+
+    # Print a backtrace.
+    local frame=0
+    while caller $frame; do
+        ((frame++));
+    done
+    printf "\n"
+
+    exit 1
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/enable-coverage-instrumentation.sh b/nixpkgs/pkgs/build-support/setup-hooks/enable-coverage-instrumentation.sh
new file mode 100644
index 000000000000..2b48fea4ff0b
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/enable-coverage-instrumentation.sh
@@ -0,0 +1,20 @@
+postPhases+=" cleanupBuildDir"
+
+# Force GCC to build with coverage instrumentation.  Also disable
+# optimisation, since it may confuse things.
+export NIX_CFLAGS_COMPILE="${NIX_CFLAGS_COMPILE:-} -O0 --coverage"
+
+# Get rid of everything that isn't a gcno file or a C source file.
+# Also strip the `.tmp_' prefix from gcno files.  (The Linux kernel
+# creates these.)
+cleanupBuildDir() {
+    if ! [ -e $out/.build ]; then return; fi
+
+    find $out/.build/ -type f -a ! \
+        \( -name "*.c" -o -name "*.cc" -o -name "*.cpp" -o -name "*.h" -o -name "*.hh" -o -name "*.y" -o -name "*.l" -o -name "*.gcno" \) \
+        | xargs rm -f --
+
+    for i in $(find $out/.build/ -name ".tmp_*.gcno"); do
+        mv "$i" "$(echo $i | sed s/.tmp_//)"
+    done
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/find-xml-catalogs.sh b/nixpkgs/pkgs/build-support/setup-hooks/find-xml-catalogs.sh
new file mode 100644
index 000000000000..f446a6f27fd9
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/find-xml-catalogs.sh
@@ -0,0 +1,22 @@
+addXMLCatalogs () {
+    local d i
+    # ‘xml/dtd’ and ‘xml/xsl’ are deprecated. Catalogs should be
+    # installed underneath ‘share/xml’.
+    for d in $1/share/xml $1/xml/dtd $1/xml/xsl; do
+        if [ -d $d ]; then
+            for i in $(find $d -name catalog.xml); do
+                XML_CATALOG_FILES+=" $i"
+            done
+        fi
+    done
+}
+
+if [ -z "${libxmlHookDone-}" ]; then
+    libxmlHookDone=1
+
+    # Set up XML_CATALOG_FILES.  An empty initial value prevents
+    # xmllint and xsltproc from looking in /etc/xml/catalog.
+    export XML_CATALOG_FILES=''
+    if [ -z "$XML_CATALOG_FILES" ]; then XML_CATALOG_FILES=" "; fi
+    addEnvHooks "$hostOffset" addXMLCatalogs
+fi
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/fix-darwin-dylib-names.sh b/nixpkgs/pkgs/build-support/setup-hooks/fix-darwin-dylib-names.sh
new file mode 100644
index 000000000000..55e196e654df
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/fix-darwin-dylib-names.sh
@@ -0,0 +1,40 @@
+# On macOS, binaries refer to dynamic library dependencies using
+# either relative paths (e.g. "libicudata.dylib", searched relative to
+# $DYLD_LIBRARY_PATH) or absolute paths
+# (e.g. "/nix/store/.../lib/libicudata.dylib").  In Nix, the latter is
+# preferred since it allows programs to just work.  When linking
+# against a library (e.g. "-licudata"), the linker uses the install
+# name embedded in the dylib (which can be shown using "otool -D").
+# Most packages create dylibs with absolute install names, but some do
+# not.  This setup hook fixes dylibs by setting their install names to
+# their absolute path (using "install_name_tool -id").  It also
+# rewrites references in other dylibs to absolute paths.
+
+fixupOutputHooks+=('fixDarwinDylibNamesIn $prefix')
+
+fixDarwinDylibNames() {
+    local flags=()
+    local old_id
+
+    for fn in "$@"; do
+        flags+=(-change "$(basename "$fn")" "$fn")
+    done
+
+    for fn in "$@"; do
+        if [ -L "$fn" ]; then continue; fi
+        echo "$fn: fixing dylib"
+        int_out=$(@targetPrefix@install_name_tool -id "$fn" "${flags[@]}" "$fn" 2>&1)
+        result=$?
+        if [ "$result" -ne 0 ] &&
+            ! grep "shared library stub file and can't be changed" <<< "$out"
+        then
+            echo "$int_out" >&2
+            exit "$result"
+        fi
+    done
+}
+
+fixDarwinDylibNamesIn() {
+    local dir="$1"
+    fixDarwinDylibNames $(find "$dir" -name "*.dylib")
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/gog-unpack.sh b/nixpkgs/pkgs/build-support/setup-hooks/gog-unpack.sh
new file mode 100644
index 000000000000..559b543fadfc
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/gog-unpack.sh
@@ -0,0 +1,11 @@
+unpackPhase="unpackGog"
+
+unpackGog() {
+    runHook preUnpackGog
+
+    innoextract --silent --extract --exclude-temp "${src}"
+
+    find . -depth -print -execdir rename -f 'y/A-Z/a-z/' '{}' \;
+
+    runHook postUnpackGog
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/install-shell-files.sh b/nixpkgs/pkgs/build-support/setup-hooks/install-shell-files.sh
new file mode 100644
index 000000000000..194b408b1050
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/install-shell-files.sh
@@ -0,0 +1,230 @@
+# shellcheck shell=bash
+# Setup hook for the `installShellFiles` package.
+#
+# Example usage in a derivation:
+#
+#   { …, installShellFiles, … }:
+#   stdenv.mkDerivation {
+#     …
+#     nativeBuildInputs = [ installShellFiles ];
+#     postInstall = ''
+#       installManPage share/doc/foobar.1
+#       installShellCompletion share/completions/foobar.{bash,fish,zsh}
+#     '';
+#     …
+#   }
+#
+# See comments on each function for more details.
+
+# installManPage <path> [...<path>]
+#
+# Each argument is checked for its man section suffix and installed into the appropriate
+# share/man/man<n>/ directory. The function returns an error if any paths don't have the man
+# section suffix (with optional .gz compression).
+installManPage() {
+    local path
+    for path in "$@"; do
+        if (( "${NIX_DEBUG:-0}" >= 1 )); then
+            echo "installManPage: installing $path"
+        fi
+        if test -z "$path"; then
+            echo "installManPage: error: path cannot be empty" >&2
+            return 1
+        fi
+        local basename
+        basename=$(stripHash "$path") # use stripHash in case it's a nix store path
+        local trimmed=${basename%.gz} # don't get fooled by compressed manpages
+        local suffix=${trimmed##*.}
+        if test -z "$suffix" -o "$suffix" = "$trimmed"; then
+            echo "installManPage: error: path missing manpage section suffix: $path" >&2
+            return 1
+        fi
+        local outRoot
+        if test "$suffix" = 3; then
+            outRoot=${!outputDevman:?}
+        else
+            outRoot=${!outputMan:?}
+        fi
+        install -Dm644 -T "$path" "${outRoot}/share/man/man$suffix/$basename" || return
+    done
+}
+
+# installShellCompletion [--cmd <name>] ([--bash|--fish|--zsh] [--name <name>] <path>)...
+#
+# Each path is installed into the appropriate directory for shell completions for the given shell.
+# If one of `--bash`, `--fish`, or `--zsh` is given the path is assumed to belong to that shell.
+# Otherwise the file extension will be examined to pick a shell. If the shell is unknown a warning
+# will be logged and the command will return a non-zero status code after processing any remaining
+# paths. Any of the shell flags will affect all subsequent paths (unless another shell flag is
+# given).
+#
+# If the shell completion needs to be renamed before installing the optional `--name <name>` flag
+# may be given. Any name provided with this flag only applies to the next path.
+#
+# If all shell completions need to be renamed before installing the optional `--cmd <name>` flag
+# may be given. This will synthesize a name for each file, unless overridden with an explicit
+# `--name` flag. For example, `--cmd foobar` will synthesize the name `_foobar` for zsh and
+# `foobar.bash` for bash.
+#
+# For zsh completions, if the `--name` flag is not given, the path will be automatically renamed
+# such that `foobar.zsh` becomes `_foobar`.
+#
+# A path may be a named fd, such as produced by the bash construct `<(cmd)`. When using a named fd,
+# the shell type flag must be provided, and either the `--name` or `--cmd` flag must be provided.
+# This might look something like:
+#
+#   installShellCompletion --zsh --name _foobar <($out/bin/foobar --zsh-completion)
+#
+# This command accepts multiple shell flags in conjunction with multiple paths if you wish to
+# install them all in one command:
+#
+#   installShellCompletion share/completions/foobar.{bash,fish} --zsh share/completions/_foobar
+#
+# However it may be easier to read if each shell is split into its own invocation, especially when
+# renaming is involved:
+#
+#   installShellCompletion --bash --name foobar.bash share/completions.bash
+#   installShellCompletion --fish --name foobar.fish share/completions.fish
+#   installShellCompletion --zsh --name _foobar share/completions.zsh
+#
+# Or to use shell newline escaping to split a single invocation across multiple lines:
+#
+#   installShellCompletion --cmd foobar \
+#     --bash <($out/bin/foobar --bash-completion) \
+#     --fish <($out/bin/foobar --fish-completion) \
+#     --zsh <($out/bin/foobar --zsh-completion)
+#
+# If any argument is `--` the remaining arguments will be treated as paths.
+installShellCompletion() {
+    local shell='' name='' cmdname='' retval=0 parseArgs=1 arg
+    while { arg=$1; shift; }; do
+        # Parse arguments
+        if (( parseArgs )); then
+            case "$arg" in
+            --bash|--fish|--zsh)
+                shell=${arg#--}
+                continue;;
+            --name)
+                name=$1
+                shift || {
+                    echo 'installShellCompletion: error: --name flag expected an argument' >&2
+                    return 1
+                }
+                continue;;
+            --name=*)
+                # treat `--name=foo` the same as `--name foo`
+                name=${arg#--name=}
+                continue;;
+            --cmd)
+                cmdname=$1
+                shift || {
+                    echo 'installShellCompletion: error: --cmd flag expected an argument' >&2
+                    return 1
+                }
+                continue;;
+            --cmd=*)
+                # treat `--cmd=foo` the same as `--cmd foo`
+                cmdname=${arg#--cmd=}
+                continue;;
+            --?*)
+                echo "installShellCompletion: warning: unknown flag ${arg%%=*}" >&2
+                retval=2
+                continue;;
+            --)
+                # treat remaining args as paths
+                parseArgs=0
+                continue;;
+            esac
+        fi
+        if (( "${NIX_DEBUG:-0}" >= 1 )); then
+            echo "installShellCompletion: installing $arg${name:+ as $name}"
+        fi
+        # if we get here, this is a path or named pipe
+        # Identify shell and output name
+        local curShell=$shell
+        local outName=''
+        if [[ -z "$arg" ]]; then
+            echo "installShellCompletion: error: empty path is not allowed" >&2
+            return 1
+        elif [[ -p "$arg" ]]; then
+            # this is a named fd or fifo
+            if [[ -z "$curShell" ]]; then
+                echo "installShellCompletion: error: named pipe requires one of --bash, --fish, or --zsh" >&2
+                return 1
+            elif [[ -z "$name" && -z "$cmdname" ]]; then
+                echo "installShellCompletion: error: named pipe requires one of --cmd or --name" >&2
+                return 1
+            fi
+        else
+            # this is a path
+            local argbase
+            argbase=$(stripHash "$arg")
+            if [[ -z "$curShell" ]]; then
+                # auto-detect the shell
+                case "$argbase" in
+                ?*.bash) curShell=bash;;
+                ?*.fish) curShell=fish;;
+                ?*.zsh) curShell=zsh;;
+                *)
+                    if [[ "$argbase" = _* && "$argbase" != *.* ]]; then
+                        # probably zsh
+                        echo "installShellCompletion: warning: assuming path \`$arg' is zsh; please specify with --zsh" >&2
+                        curShell=zsh
+                    else
+                        echo "installShellCompletion: warning: unknown shell for path: $arg" >&2
+                        retval=2
+                        continue
+                    fi;;
+                esac
+            fi
+            outName=$argbase
+        fi
+        # Identify output path
+        if [[ -n "$name" ]]; then
+            outName=$name
+        elif [[ -n "$cmdname" ]]; then
+            case "$curShell" in
+            bash|fish) outName=$cmdname.$curShell;;
+            zsh) outName=_$cmdname;;
+            *)
+                # Our list of shells is out of sync with the flags we accept or extensions we detect.
+                echo 'installShellCompletion: internal error' >&2
+                return 1;;
+            esac
+        fi
+        local sharePath
+        case "$curShell" in
+        bash) sharePath=bash-completion/completions;;
+        fish) sharePath=fish/vendor_completions.d;;
+        zsh)
+            sharePath=zsh/site-functions
+            # only apply automatic renaming if we didn't have a manual rename
+            if [[ -z "$name" && -z "$cmdname" ]]; then
+                # convert a name like `foo.zsh` into `_foo`
+                outName=${outName%.zsh}
+                outName=_${outName#_}
+            fi;;
+        *)
+            # Our list of shells is out of sync with the flags we accept or extensions we detect.
+            echo 'installShellCompletion: internal error' >&2
+            return 1;;
+        esac
+        # Install file
+        local outDir="${!outputBin:?}/share/$sharePath"
+        local outPath="$outDir/$outName"
+        if [[ -p "$arg" ]]; then
+            # install handles named pipes on NixOS but not on macOS
+            mkdir -p "$outDir" \
+            && cat "$arg" > "$outPath"
+        else
+            install -Dm644 -T "$arg" "$outPath"
+        fi || return
+        # Clear the per-path flags
+        name=
+    done
+    if [[ -n "$name" ]]; then
+        echo 'installShellCompletion: error: --name flag given with no path' >&2
+        return 1
+    fi
+    return $retval
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/keep-build-tree.sh b/nixpkgs/pkgs/build-support/setup-hooks/keep-build-tree.sh
new file mode 100644
index 000000000000..754900bfc337
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/keep-build-tree.sh
@@ -0,0 +1,6 @@
+prePhases+=" moveBuildDir"
+
+moveBuildDir() {
+    mkdir -p $out/.build
+    cd $out/.build
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/ld-is-cc-hook.sh b/nixpkgs/pkgs/build-support/setup-hooks/ld-is-cc-hook.sh
new file mode 100644
index 000000000000..b53e184b0956
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/ld-is-cc-hook.sh
@@ -0,0 +1,5 @@
+ld-is-cc-hook() {
+    LD=$CC
+}
+
+preConfigureHooks+=(ld-is-cc-hook)
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/make-coverage-analysis-report.sh b/nixpkgs/pkgs/build-support/setup-hooks/make-coverage-analysis-report.sh
new file mode 100644
index 000000000000..9108b4c50355
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/make-coverage-analysis-report.sh
@@ -0,0 +1,25 @@
+postPhases+=" coverageReportPhase"
+
+coverageReportPhase() {
+    lcov --directory . --capture --output-file app.info
+    set -o noglob
+    lcov --remove app.info ${lcovFilter:-"/nix/store/*"} > app2.info
+    set +o noglob
+    mv app2.info app.info
+
+    mkdir -p $out/coverage
+    genhtml app.info $lcovExtraTraceFiles -o $out/coverage > log
+
+    # Grab the overall coverage percentage so that Hydra can plot it over time.
+    mkdir -p $out/nix-support
+    lineCoverage="$(sed 's/.*lines\.*: \([0-9\.]\+\)%.*/\1/; t ; d' log)"
+    functionCoverage="$(sed 's/.*functions\.*: \([0-9\.]\+\)%.*/\1/; t ; d' log)"
+    if [ -z "$lineCoverage" -o -z "$functionCoverage" ]; then
+        echo "failed to get coverage statistics"
+        exit 1
+    fi
+    echo "lineCoverage $lineCoverage %" >> $out/nix-support/hydra-metrics
+    echo "functionCoverage $functionCoverage %" >> $out/nix-support/hydra-metrics
+
+    echo "report coverage $out/coverage" >> $out/nix-support/hydra-build-products
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/make-symlinks-relative.sh b/nixpkgs/pkgs/build-support/setup-hooks/make-symlinks-relative.sh
new file mode 100644
index 000000000000..0608d3ca81c4
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/make-symlinks-relative.sh
@@ -0,0 +1,28 @@
+fixupOutputHooks+=(_makeSymlinksRelative)
+
+# For every symlink in $output that refers to another file in $output
+# ensure that the symlink is relative. This removes references to the output
+# has from the resulting store paths and thus the NAR files.
+_makeSymlinksRelative() {
+    local symlinkTarget
+
+    if [ -n "${dontRewriteSymlinks-}" ]; then
+        return 0
+    fi
+
+    while IFS= read -r -d $'\0' f; do
+        symlinkTarget=$(readlink "$f")
+        if [[ "$symlinkTarget"/ != "$prefix"/* ]]; then
+            # skip this symlink as it doesn't point to $prefix
+            continue
+        fi
+
+        if [ ! -e "$symlinkTarget" ]; then
+            echo "the symlink $f is broken, it points to $symlinkTarget (which is missing)"
+        fi
+
+        echo "rewriting symlink $f to be relative to $prefix"
+        ln -snrf "$symlinkTarget" "$f"
+
+    done < <(find $prefix -type l -print0)
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/make-wrapper.sh b/nixpkgs/pkgs/build-support/setup-hooks/make-wrapper.sh
new file mode 100644
index 000000000000..8b7012677cd5
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/make-wrapper.sh
@@ -0,0 +1,146 @@
+# Assert that FILE exists and is executable
+#
+# assertExecutable FILE
+assertExecutable() {
+    local file="$1"
+    [[ -f "$file" && -x "$file" ]] || \
+        die "Cannot wrap '$file' because it is not an executable file"
+}
+
+# construct an executable file that wraps the actual executable
+# makeWrapper EXECUTABLE OUT_PATH ARGS
+
+# ARGS:
+# --argv0       NAME    : set name of executed process to NAME
+#                         (otherwise it’s called …-wrapped)
+# --set         VAR VAL : add VAR with value VAL to the executable’s
+#                         environment
+# --set-default VAR VAL : like --set, but only adds VAR if not already set in
+#                         the environment
+# --unset       VAR     : remove VAR from the environment
+# --run         COMMAND : run command before the executable
+# --add-flags   FLAGS   : add FLAGS to invocation of executable
+
+# --prefix          ENV SEP VAL   : suffix/prefix ENV with VAL, separated by SEP
+# --suffix
+# --suffix-each     ENV SEP VALS  : like --suffix, but VALS is a list
+# --prefix-contents ENV SEP FILES : like --suffix-each, but contents of FILES
+#                                   are read first and used as VALS
+# --suffix-contents
+makeWrapper() {
+    local original="$1"
+    local wrapper="$2"
+    local params varName value command separator n fileNames
+    local argv0 flagsBefore flags
+
+    assertExecutable "$original"
+
+    mkdir -p "$(dirname "$wrapper")"
+
+    echo "#! @shell@ -e" > "$wrapper"
+
+    params=("$@")
+    for ((n = 2; n < ${#params[*]}; n += 1)); do
+        p="${params[$n]}"
+
+        if [[ "$p" == "--set" ]]; then
+            varName="${params[$((n + 1))]}"
+            value="${params[$((n + 2))]}"
+            n=$((n + 2))
+            echo "export $varName=${value@Q}" >> "$wrapper"
+        elif [[ "$p" == "--set-default" ]]; then
+            varName="${params[$((n + 1))]}"
+            value="${params[$((n + 2))]}"
+            n=$((n + 2))
+            echo "export $varName=\${$varName-${value@Q}}" >> "$wrapper"
+        elif [[ "$p" == "--unset" ]]; then
+            varName="${params[$((n + 1))]}"
+            n=$((n + 1))
+            echo "unset $varName" >> "$wrapper"
+        elif [[ "$p" == "--run" ]]; then
+            command="${params[$((n + 1))]}"
+            n=$((n + 1))
+            echo "$command" >> "$wrapper"
+        elif [[ ("$p" == "--suffix") || ("$p" == "--prefix") ]]; then
+            varName="${params[$((n + 1))]}"
+            separator="${params[$((n + 2))]}"
+            value="${params[$((n + 3))]}"
+            n=$((n + 3))
+            if test -n "$value"; then
+                if test "$p" = "--suffix"; then
+                    echo "export $varName=\$$varName\${$varName:+${separator@Q}}${value@Q}" >> "$wrapper"
+                else
+                    echo "export $varName=${value@Q}\${$varName:+${separator@Q}}\$$varName" >> "$wrapper"
+                fi
+            fi
+        elif [[ "$p" == "--suffix-each" ]]; then
+            varName="${params[$((n + 1))]}"
+            separator="${params[$((n + 2))]}"
+            values="${params[$((n + 3))]}"
+            n=$((n + 3))
+            for value in $values; do
+                echo "export $varName=\$$varName\${$varName:+$separator}${value@Q}" >> "$wrapper"
+            done
+        elif [[ ("$p" == "--suffix-contents") || ("$p" == "--prefix-contents") ]]; then
+            varName="${params[$((n + 1))]}"
+            separator="${params[$((n + 2))]}"
+            fileNames="${params[$((n + 3))]}"
+            n=$((n + 3))
+            for fileName in $fileNames; do
+                contents="$(cat "$fileName")"
+                if test "$p" = "--suffix-contents"; then
+                    echo "export $varName=\$$varName\${$varName:+$separator}${contents@Q}" >> "$wrapper"
+                else
+                    echo "export $varName=${contents@Q}\${$varName:+$separator}\$$varName" >> "$wrapper"
+                fi
+            done
+        elif [[ "$p" == "--add-flags" ]]; then
+            flags="${params[$((n + 1))]}"
+            n=$((n + 1))
+            flagsBefore="$flagsBefore $flags"
+        elif [[ "$p" == "--argv0" ]]; then
+            argv0="${params[$((n + 1))]}"
+            n=$((n + 1))
+        else
+            die "makeWrapper doesn't understand the arg $p"
+        fi
+    done
+
+    echo exec ${argv0:+-a \"$argv0\"} \""$original"\" \
+         "$flagsBefore" '"$@"' >> "$wrapper"
+
+    chmod +x "$wrapper"
+}
+
+addSuffix() {
+    suffix="$1"
+    shift
+    for name in "$@"; do
+        echo "$name$suffix"
+    done
+}
+
+filterExisting() {
+    for fn in "$@"; do
+        if test -e "$fn"; then
+            echo "$fn"
+        fi
+    done
+}
+
+# Syntax: wrapProgram <PROGRAM> <MAKE-WRAPPER FLAGS...>
+wrapProgram() {
+    local prog="$1"
+    local hidden
+
+    assertExecutable "$prog"
+
+    hidden="$(dirname "$prog")/.$(basename "$prog")"-wrapped
+    while [ -e "$hidden" ]; do
+      hidden="${hidden}_"
+    done
+    mv "$prog" "$hidden"
+    # Silence warning about unexpanded $0:
+    # shellcheck disable=SC2016
+    makeWrapper "$hidden" "$prog" --argv0 '$0' "${@:2}"
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/move-docs.sh b/nixpkgs/pkgs/build-support/setup-hooks/move-docs.sh
new file mode 100644
index 000000000000..ef31dcdce274
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/move-docs.sh
@@ -0,0 +1,23 @@
+# This setup hook moves $out/{man,doc,info} to $out/share; moves
+# $out/share/man to $man/share/man; and moves $out/share/doc to
+# $man/share/doc.
+
+preFixupHooks+=(_moveToShare)
+
+_moveToShare() {
+    forceShare=${forceShare:=man doc info}
+    if [ -z "$forceShare" -o -z "$out" ]; then return; fi
+
+    for d in $forceShare; do
+        if [ -d "$out/$d" ]; then
+            if [ -d "$out/share/$d" ]; then
+                echo "both $d/ and share/$d/ exist!"
+            else
+                echo "moving $out/$d to $out/share/$d"
+                mkdir -p $out/share
+                mv $out/$d $out/share/
+            fi
+        fi
+    done
+}
+
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/move-lib64.sh b/nixpkgs/pkgs/build-support/setup-hooks/move-lib64.sh
new file mode 100644
index 000000000000..9517af797323
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/move-lib64.sh
@@ -0,0 +1,22 @@
+# This setup hook, for each output, moves everything in $output/lib64
+# to $output/lib, and replaces $output/lib64 with a symlink to
+# $output/lib. The rationale is that lib64 directories are unnecessary
+# in Nix (since 32-bit and 64-bit builds of a package are in different
+# store paths anyway).
+# If the move would overwrite anything, it should fail on rmdir.
+
+fixupOutputHooks+=(_moveLib64)
+
+_moveLib64() {
+    if [ "${dontMoveLib64-}" = 1 ]; then return; fi
+    if [ ! -e "$prefix/lib64" -o -L "$prefix/lib64" ]; then return; fi
+    echo "moving $prefix/lib64/* to $prefix/lib"
+    mkdir -p $prefix/lib
+    shopt -s dotglob
+    for i in $prefix/lib64/*; do
+        mv --no-clobber "$i" $prefix/lib
+    done
+    shopt -u dotglob
+    rmdir $prefix/lib64
+    ln -s lib $prefix/lib64
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/move-sbin.sh b/nixpkgs/pkgs/build-support/setup-hooks/move-sbin.sh
new file mode 100644
index 000000000000..1c0c4dc9f2d9
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/move-sbin.sh
@@ -0,0 +1,19 @@
+# This setup hook, for each output, moves everything in $output/sbin
+# to $output/bin, and replaces $output/sbin with a symlink to
+# $output/bin.
+
+fixupOutputHooks+=(_moveSbin)
+
+_moveSbin() {
+    if [ "${dontMoveSbin-}" = 1 ]; then return; fi
+    if [ ! -e "$prefix/sbin" -o -L "$prefix/sbin" ]; then return; fi
+    echo "moving $prefix/sbin/* to $prefix/bin"
+    mkdir -p $prefix/bin
+    shopt -s dotglob
+    for i in $prefix/sbin/*; do
+        mv "$i" $prefix/bin
+    done
+    shopt -u dotglob
+    rmdir $prefix/sbin
+    ln -s bin $prefix/sbin
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/move-systemd-user-units.sh b/nixpkgs/pkgs/build-support/setup-hooks/move-systemd-user-units.sh
new file mode 100755
index 000000000000..5963d87c7515
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/move-systemd-user-units.sh
@@ -0,0 +1,25 @@
+#!/usr/bin/env bash
+
+# This setup hook, for each output, moves everything in
+# $output/lib/systemd/user to $output/share/systemd/user, and replaces
+# $output/lib/systemd/user with a symlink to
+# $output/share/systemd/user.
+
+fixupOutputHooks+=(_moveSystemdUserUnits)
+
+_moveSystemdUserUnits() {
+    if [ "${dontMoveSystemdUserUnits:-0}" = 1 ]; then return; fi
+    if [ ! -e "${prefix:?}/lib/systemd/user" ]; then return; fi
+    local source="$prefix/lib/systemd/user"
+    local target="$prefix/share/systemd/user"
+    echo "moving $source/* to $target"
+    mkdir -p "$target"
+    (
+      shopt -s dotglob
+      for i in "$source"/*; do
+          mv "$i" "$target"
+      done
+    )
+    rmdir "$source"
+    ln -s "$target" "$source"
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/multiple-outputs.sh b/nixpkgs/pkgs/build-support/setup-hooks/multiple-outputs.sh
new file mode 100644
index 000000000000..bfa47e3b20e1
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/multiple-outputs.sh
@@ -0,0 +1,199 @@
+# The base package for automatic multiple-output splitting. Used in stdenv as well.
+preConfigureHooks+=(_multioutConfig)
+preFixupHooks+=(_multioutDocs)
+preFixupHooks+=(_multioutDevs)
+postFixupHooks+=(_multioutPropagateDev)
+
+# Assign the first string containing nonempty variable to the variable named $1
+_assignFirst() {
+    local varName="$1"
+    local REMOVE=REMOVE # slightly hacky - we allow REMOVE (i.e. not a variable name)
+    shift
+    while (( $# )); do
+        if [ -n "${!1-}" ]; then eval "${varName}"="$1"; return; fi
+        shift
+    done
+    echo "Error: _assignFirst found no valid variant!"
+    return 1 # none found
+}
+
+# Same as _assignFirst, but only if "$1" = ""
+_overrideFirst() {
+    if [ -z "${!1-}" ]; then
+        _assignFirst "$@"
+    fi
+}
+
+
+# Setup chains of sane default values with easy overridability.
+# The variables are global to be usable anywhere during the build.
+# Typical usage in package is defining outputBin = "dev";
+
+_overrideFirst outputDev "dev" "out"
+_overrideFirst outputBin "bin" "out"
+
+_overrideFirst outputInclude "$outputDev"
+
+# so-libs are often among the main things to keep, and so go to $out
+_overrideFirst outputLib "lib" "out"
+
+_overrideFirst outputDoc "doc" "out"
+_overrideFirst outputDevdoc "devdoc" REMOVE # documentation for developers
+# man and info pages are small and often useful to distribute with binaries
+_overrideFirst outputMan "man" "$outputBin"
+_overrideFirst outputDevman "devman" "devdoc" "$outputMan"
+_overrideFirst outputInfo "info" "$outputBin"
+
+
+# Add standard flags to put files into the desired outputs.
+_multioutConfig() {
+    if [ "$outputs" = "out" ] || [ -z "${setOutputFlags-1}" ]; then return; fi;
+
+    # try to detect share/doc/${shareDocName}
+    # Note: sadly, $configureScript detection comes later in configurePhase,
+    #   and reordering would cause more trouble than worth.
+    if [ -z "$shareDocName" ]; then
+        local confScript="$configureScript"
+        if [ -z "$confScript" ] && [ -x ./configure ]; then
+            confScript=./configure
+        fi
+        if [ -f "$confScript" ]; then
+            local shareDocName="$(sed -n "s/^PACKAGE_TARNAME='\(.*\)'$/\1/p" < "$confScript")"
+        fi
+                                    # PACKAGE_TARNAME sometimes contains garbage.
+        if [ -z "$shareDocName" ] || echo "$shareDocName" | grep -q '[^a-zA-Z0-9_-]'; then
+            shareDocName="$(echo "$name" | sed 's/-[^a-zA-Z].*//')"
+        fi
+    fi
+
+    configureFlags="\
+        --bindir=${!outputBin}/bin --sbindir=${!outputBin}/sbin \
+        --includedir=${!outputInclude}/include --oldincludedir=${!outputInclude}/include \
+        --mandir=${!outputMan}/share/man --infodir=${!outputInfo}/share/info \
+        --docdir=${!outputDoc}/share/doc/${shareDocName} \
+        --libdir=${!outputLib}/lib --libexecdir=${!outputLib}/libexec \
+        --localedir=${!outputLib}/share/locale \
+        $configureFlags"
+
+    installFlags="\
+        pkgconfigdir=${!outputDev}/lib/pkgconfig \
+        m4datadir=${!outputDev}/share/aclocal aclocaldir=${!outputDev}/share/aclocal \
+        $installFlags"
+}
+
+
+# Add rpath prefixes to library paths, and avoid stdenv doing it for $out.
+_addRpathPrefix "${!outputLib}"
+NIX_NO_SELF_RPATH=1
+
+
+# Move subpaths that match pattern $1 from under any output/ to the $2 output/
+# Beware: only globbing patterns are accepted, e.g.: * ? {foo,bar}
+# A special target "REMOVE" is allowed: moveToOutput foo REMOVE
+moveToOutput() {
+    local patt="$1"
+    local dstOut="$2"
+    local output
+    for output in $outputs; do
+        if [ "${!output}" = "$dstOut" ]; then continue; fi
+        local srcPath
+        for srcPath in "${!output}"/$patt; do
+            # apply to existing files/dirs, *including* broken symlinks
+            if [ ! -e "$srcPath" ] && [ ! -L "$srcPath" ]; then continue; fi
+
+            if [ "$dstOut" = REMOVE ]; then
+                echo "Removing $srcPath"
+                rm -r "$srcPath"
+            else
+                local dstPath="$dstOut${srcPath#${!output}}"
+                echo "Moving $srcPath to $dstPath"
+
+                if [ -d "$dstPath" ] && [ -d "$srcPath" ]
+                then # attempt directory merge
+                    # check the case of trying to move an empty directory
+                    rmdir "$srcPath" --ignore-fail-on-non-empty
+                    if [ -d "$srcPath" ]; then
+                      mv -t "$dstPath" "$srcPath"/*
+                      rmdir "$srcPath"
+                    fi
+                else # usual move
+                    mkdir -p "$(readlink -m "$dstPath/..")"
+                    mv "$srcPath" "$dstPath"
+                fi
+            fi
+
+            # remove empty directories, printing iff at least one gets removed
+            local srcParent="$(readlink -m "$srcPath/..")"
+            if rmdir "$srcParent"; then
+                echo "Removing empty $srcParent/ and (possibly) its parents"
+                rmdir -p --ignore-fail-on-non-empty "$(readlink -m "$srcParent/..")" \
+                    2> /dev/null || true # doesn't ignore failure for some reason
+            fi
+        done
+    done
+}
+
+# Move documentation to the desired outputs.
+_multioutDocs() {
+    local REMOVE=REMOVE # slightly hacky - we expand ${!outputFoo}
+
+    moveToOutput share/info "${!outputInfo}"
+    moveToOutput share/doc "${!outputDoc}"
+    moveToOutput share/gtk-doc "${!outputDevdoc}"
+    moveToOutput share/devhelp/books "${!outputDevdoc}"
+
+    # the default outputMan is in $bin
+    moveToOutput share/man "${!outputMan}"
+    moveToOutput share/man/man3 "${!outputDevman}"
+}
+
+# Move development-only stuff to the desired outputs.
+_multioutDevs() {
+    if [ "$outputs" = "out" ] || [ -z "${moveToDev-1}" ]; then return; fi;
+    moveToOutput include "${!outputInclude}"
+    # these files are sometimes provided even without using the corresponding tool
+    moveToOutput lib/pkgconfig "${!outputDev}"
+    moveToOutput share/pkgconfig "${!outputDev}"
+    moveToOutput lib/cmake "${!outputDev}"
+    moveToOutput share/aclocal "${!outputDev}"
+    # don't move *.la, as libtool needs them in the directory of the library
+
+    for f in "${!outputDev}"/{lib,share}/pkgconfig/*.pc; do
+        echo "Patching '$f' includedir to output ${!outputInclude}"
+        sed -i "/^includedir=/s,=\${prefix},=${!outputInclude}," "$f"
+    done
+}
+
+# Make the "dev" propagate other outputs needed for development.
+_multioutPropagateDev() {
+    if [ "$outputs" = "out" ]; then return; fi;
+
+    local outputFirst
+    for outputFirst in $outputs; do
+        break
+    done
+    local propagaterOutput="$outputDev"
+    if [ -z "$propagaterOutput" ]; then
+        propagaterOutput="$outputFirst"
+    fi
+
+    # Default value: propagate binaries, includes and libraries
+    if [ -z "${propagatedBuildOutputs+1}" ]; then
+        local po_dirty="$outputBin $outputInclude $outputLib"
+        set +o pipefail
+        propagatedBuildOutputs=`echo "$po_dirty" \
+            | tr -s ' ' '\n' | grep -v -F "$propagaterOutput" \
+            | sort -u | tr '\n' ' ' `
+        set -o pipefail
+    fi
+
+    # The variable was explicitly set to empty or we resolved it so
+    if [ -z "$propagatedBuildOutputs" ]; then
+        return
+    fi
+
+    mkdir -p "${!propagaterOutput}"/nix-support
+    for output in $propagatedBuildOutputs; do
+        echo -n " ${!output}" >> "${!propagaterOutput}"/nix-support/propagated-build-inputs
+    done
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/patch-shebangs.sh b/nixpkgs/pkgs/build-support/setup-hooks/patch-shebangs.sh
new file mode 100644
index 000000000000..04ebcd2cc64e
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/patch-shebangs.sh
@@ -0,0 +1,119 @@
+# This setup hook causes the fixup phase to rewrite all script
+# interpreter file names (`#!  /path') to paths found in $PATH.  E.g.,
+# /bin/sh will be rewritten to /nix/store/<hash>-some-bash/bin/sh.
+# /usr/bin/env gets special treatment so that ".../bin/env python" is
+# rewritten to /nix/store/<hash>/bin/python.  Interpreters that are
+# already in the store are left untouched.
+# A script file must be marked as executable, otherwise it will not be
+# considered.
+
+fixupOutputHooks+=(patchShebangsAuto)
+
+# Run patch shebangs on a directory or file.
+# Can take multiple paths as arguments.
+# patchShebangs [--build | --host] PATH...
+
+# Flags:
+# --build : Lookup commands available at build-time
+# --host  : Lookup commands available at runtime
+
+# Example use cases,
+# $ patchShebangs --host /nix/store/...-hello-1.0/bin
+# $ patchShebangs --build configure
+
+patchShebangs() {
+    local pathName
+
+    if [[ "$1" == "--host" ]]; then
+        pathName=HOST_PATH
+        shift
+    elif [[ "$1" == "--build" ]]; then
+        pathName=PATH
+        shift
+    fi
+
+    echo "patching script interpreter paths in $@"
+    local f
+    local oldPath
+    local newPath
+    local arg0
+    local args
+    local oldInterpreterLine
+    local newInterpreterLine
+
+    if [[ $# -eq 0 ]]; then
+        echo "No arguments supplied to patchShebangs" >&2
+        return 0
+    fi
+
+    local f
+    while IFS= read -r -d $'\0' f; do
+        isScript "$f" || continue
+
+        read -r oldInterpreterLine < "$f"
+        read -r oldPath arg0 args <<< "${oldInterpreterLine:2}"
+
+        if [[ -z "$pathName" ]]; then
+            if [[ -n $strictDeps && $f == "$NIX_STORE"* ]]; then
+                pathName=HOST_PATH
+            else
+                pathName=PATH
+            fi
+        fi
+
+        if [[ "$oldPath" == *"/bin/env" ]]; then
+            # Check for unsupported 'env' functionality:
+            # - options: something starting with a '-'
+            # - environment variables: foo=bar
+            if [[ $arg0 == "-"* || $arg0 == *"="* ]]; then
+                echo "$f: unsupported interpreter directive \"$oldInterpreterLine\" (set dontPatchShebangs=1 and handle shebang patching yourself)" >&2
+                exit 1
+            fi
+
+            newPath="$(PATH="${!pathName}" command -v "$arg0" || true)"
+        else
+            if [[ -z $oldPath ]]; then
+                # If no interpreter is specified linux will use /bin/sh. Set
+                # oldpath="/bin/sh" so that we get /nix/store/.../sh.
+                oldPath="/bin/sh"
+            fi
+
+            newPath="$(PATH="${!pathName}" command -v "$(basename "$oldPath")" || true)"
+
+            args="$arg0 $args"
+        fi
+
+        # Strip trailing whitespace introduced when no arguments are present
+        newInterpreterLine="$newPath $args"
+        newInterpreterLine=${newInterpreterLine%${newInterpreterLine##*[![:space:]]}}
+
+        if [[ -n "$oldPath" && "${oldPath:0:${#NIX_STORE}}" != "$NIX_STORE" ]]; then
+            if [[ -n "$newPath" && "$newPath" != "$oldPath" ]]; then
+                echo "$f: interpreter directive changed from \"$oldInterpreterLine\" to \"$newInterpreterLine\""
+                # escape the escape chars so that sed doesn't interpret them
+                escapedInterpreterLine=${newInterpreterLine//\\/\\\\}
+
+                # Preserve times, see: https://github.com/NixOS/nixpkgs/pull/33281
+                timestamp=$(stat --printf "%y" "$f")
+                sed -i -e "1 s|.*|#\!$escapedInterpreterLine|" "$f"
+                touch --date "$timestamp" "$f"
+            fi
+        fi
+    done < <(find "$@" -type f -perm -0100 -print0)
+
+    stopNest
+}
+
+patchShebangsAuto () {
+    if [[ -z "${dontPatchShebangs-}" && -e "$prefix" ]]; then
+
+        # Dev output will end up being run on the build platform. An
+        # example case of this is sdl2-config. Otherwise, we can just
+        # use the runtime path (--host).
+        if [[ "$output" != out && "$output" = "$outputDev" ]]; then
+            patchShebangs --build "$prefix"
+        else
+            patchShebangs --host "$prefix"
+        fi
+    fi
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/prune-libtool-files.sh b/nixpkgs/pkgs/build-support/setup-hooks/prune-libtool-files.sh
new file mode 100644
index 000000000000..0ec56549645c
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/prune-libtool-files.sh
@@ -0,0 +1,22 @@
+# Clear dependency_libs in libtool files for shared libraries.
+
+# Shared libraries already encode their dependencies with locations.  .la
+# files do not always encode those locations, and sometimes encode the
+# locations in the wrong Nix output. .la files are not needed for shared
+# libraries, but without dependency_libs they do not hurt either.
+
+fixupOutputHooks+=(_pruneLibtoolFiles)
+
+_pruneLibtoolFiles() {
+    if [ "${dontPruneLibtoolFiles-}" ] || [ ! -e "$prefix" ]; then
+       return
+    fi
+
+    # Libtool uses "dlname" and "library_names" fields for shared libraries and
+    # the "old_library" field for static libraries.  We are processing only
+    # those .la files that do not describe static libraries.
+    find "$prefix" -type f -name '*.la' \
+         -exec grep -q '^# Generated by .*libtool' {} \; \
+         -exec grep -q "^old_library=''" {} \; \
+         -exec sed -i {} -e "/^dependency_libs='[^']/ c dependency_libs='' #pruned" \;
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/reproducible-builds.sh b/nixpkgs/pkgs/build-support/setup-hooks/reproducible-builds.sh
new file mode 100644
index 000000000000..5b01c213fe4a
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/reproducible-builds.sh
@@ -0,0 +1,9 @@
+# Use the last part of the out path as hash input for the build.
+# This should ensure that it is deterministic across rebuilds of the same
+# derivation and not easily collide with other builds.
+# We also truncate the hash so that it cannot cause reference cycles.
+export NIX_CFLAGS_COMPILE+=" -frandom-seed=$(
+    outbase="${out##*/}"
+    randomseed="${outbase:0:10}"
+    echo $randomseed
+)"
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/role.bash b/nixpkgs/pkgs/build-support/setup-hooks/role.bash
new file mode 100644
index 000000000000..cf69e732e7c3
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/role.bash
@@ -0,0 +1,71 @@
+# Since the same derivation can be depend on in multiple ways, we need to
+# accumulate *each* role (i.e. host and target platforms relative the depending
+# derivation) in which the derivation is used.
+#
+# The role is intened to be use as part of other variables names like
+#  - $NIX_SOMETHING${role_post}
+
+function getRole() {
+    case $1 in
+        -1)
+            role_post='_FOR_BUILD'
+            ;;
+        0)
+            role_post=''
+            ;;
+        1)
+            role_post='_FOR_TARGET'
+            ;;
+        *)
+            echo "@name@: used as improper sort of dependency" >2
+            return 1
+            ;;
+    esac
+}
+
+# `hostOffset` describes how the host platform of the package is slid relative
+# to the depending package. `targetOffset` likewise describes the target
+# platform of the package. Both are brought into scope of the setup hook defined
+# for dependency whose setup hook is being processed relative to the package
+# being built.
+
+function getHostRole()   {
+    getRole "$hostOffset"
+}
+function getTargetRole() {
+    getRole "$targetOffset"
+}
+
+# `depHostOffset` describes how the host platform of the dependencies are slid
+# relative to the depending package. `depTargetOffset` likewise describes the
+# target platform of dependenices. Both are brought into scope of the
+# environment hook defined for the dependency being applied relative to the
+# package being built.
+
+function getHostRoleEnvHook()   {
+    getRole "$depHostOffset"
+}
+function getTargetRoleEnvHook() {
+    getRole "$depTargetOffset"
+}
+
+# This variant is inteneded specifically for code-prodocing tool wrapper scripts
+# `NIX_@wrapperName@_TARGET_*_@suffixSalt@` tracks this (needs to be an exported
+# env var so can't use fancier data structures).
+function getTargetRoleWrapper() {
+    case $targetOffset in
+        -1)
+            export NIX_@wrapperName@_TARGET_BUILD_@suffixSalt@=1
+            ;;
+        0)
+            export NIX_@wrapperName@_TARGET_HOST_@suffixSalt@=1
+            ;;
+        1)
+            export NIX_@wrapperName@_TARGET_TARGET_@suffixSalt@=1
+            ;;
+        *)
+            echo "@name@: used as improper sort of dependency" >2
+            return 1
+            ;;
+    esac
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/separate-debug-info.sh b/nixpkgs/pkgs/build-support/setup-hooks/separate-debug-info.sh
new file mode 100644
index 000000000000..19dbb10d18e7
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/separate-debug-info.sh
@@ -0,0 +1,37 @@
+export NIX_SET_BUILD_ID=1
+export NIX_LDFLAGS+=" --compress-debug-sections=zlib"
+export NIX_CFLAGS_COMPILE+=" -ggdb -Wa,--compress-debug-sections"
+dontStrip=1
+
+fixupOutputHooks+=(_separateDebugInfo)
+
+_separateDebugInfo() {
+    [ -e "$prefix" ] || return 0
+
+    local dst="${debug:-$out}"
+    if [ "$prefix" = "$dst" ]; then return 0; fi
+
+    dst="$dst/lib/debug/.build-id"
+
+    # Find executables and dynamic libraries.
+    local i magic
+    while IFS= read -r -d $'\0' i; do
+        if ! isELF "$i"; then continue; fi
+
+        # Extract the Build ID. FIXME: there's probably a cleaner way.
+        local id="$($READELF -n "$i" | sed 's/.*Build ID: \([0-9a-f]*\).*/\1/; t; d')"
+        if [ "${#id}" != 40 ]; then
+            echo "could not find build ID of $i, skipping" >&2
+            continue
+        fi
+
+        # Extract the debug info.
+        header "separating debug info from $i (build ID $id)"
+        mkdir -p "$dst/${id:0:2}"
+        $OBJCOPY --only-keep-debug "$i" "$dst/${id:0:2}/${id:2}.debug"
+        $STRIP --strip-debug "$i"
+
+        # Also a create a symlink <original-name>.debug.
+        ln -sfn ".build-id/${id:0:2}/${id:2}.debug" "$dst/../$(basename "$i")"
+    done < <(find "$prefix" -type f -print0)
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/set-java-classpath.sh b/nixpkgs/pkgs/build-support/setup-hooks/set-java-classpath.sh
new file mode 100644
index 000000000000..445fa56d61de
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/set-java-classpath.sh
@@ -0,0 +1,13 @@
+# This setup hook adds every JAR in the share/java subdirectories of
+# the build inputs to $CLASSPATH.
+
+export CLASSPATH
+
+addPkgToClassPath () {
+    local jar
+    for jar in $1/share/java/*.jar; do
+        export CLASSPATH=''${CLASSPATH-}''${CLASSPATH:+:}''${jar}
+    done
+}
+
+addEnvHooks "$targetOffset" addPkgToClassPath
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/set-source-date-epoch-to-latest.sh b/nixpkgs/pkgs/build-support/setup-hooks/set-source-date-epoch-to-latest.sh
new file mode 100644
index 000000000000..ae34ffec4854
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/set-source-date-epoch-to-latest.sh
@@ -0,0 +1,34 @@
+updateSourceDateEpoch() {
+    local path="$1"
+
+    # Get the last modification time of all regular files, sort them,
+    # and get the most recent. Maybe we should use
+    # https://github.com/0-wiz-0/findnewest here.
+    local -a res=($(find "$path" -type f -not -newer "$NIX_BUILD_TOP/.." -printf '%T@ %p\0' \
+                    | sort -n --zero-terminated | tail -n1 --zero-terminated | head -c -1))
+    local time="${res[0]//\.[0-9]*/}" # remove the fraction part
+    local newestFile="${res[1]}"
+
+    # Update $SOURCE_DATE_EPOCH if the most recent file we found is newer.
+    if [ "${time:-0}" -gt "$SOURCE_DATE_EPOCH" ]; then
+        echo "setting SOURCE_DATE_EPOCH to timestamp $time of file $newestFile"
+        export SOURCE_DATE_EPOCH="$time"
+
+        # Warn if the new timestamp is too close to the present. This
+        # may indicate that we were being applied to a file generated
+        # during the build, or that an unpacker didn't restore
+        # timestamps properly.
+        local now="$(date +%s)"
+        if [ "$time" -gt $((now - 60)) ]; then
+            echo "warning: file $newestFile may be generated; SOURCE_DATE_EPOCH may be non-deterministic"
+        fi
+    fi
+}
+
+postUnpackHooks+=(_updateSourceDateEpochFromSourceRoot)
+
+_updateSourceDateEpochFromSourceRoot() {
+    if [ -n "$sourceRoot" ]; then
+        updateSourceDateEpoch "$sourceRoot"
+    fi
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/setup-debug-info-dirs.sh b/nixpkgs/pkgs/build-support/setup-hooks/setup-debug-info-dirs.sh
new file mode 100644
index 000000000000..96bf48cf123a
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/setup-debug-info-dirs.sh
@@ -0,0 +1,5 @@
+setupDebugInfoDirs () {
+    addToSearchPath NIX_DEBUG_INFO_DIRS $1/lib/debug
+}
+
+addEnvHooks "$targetOffset" setupDebugInfoDirs
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/shorten-perl-shebang.sh b/nixpkgs/pkgs/build-support/setup-hooks/shorten-perl-shebang.sh
new file mode 100644
index 000000000000..4bf7c0ff1af4
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/shorten-perl-shebang.sh
@@ -0,0 +1,88 @@
+# This setup hook modifies a Perl script so that any "-I" flags in its shebang
+# line are rewritten into a "use lib ..." statement on the next line. This gets
+# around a limitation in Darwin, which will not properly handle a script whose
+# shebang line exceeds 511 characters.
+#
+# Each occurrence of "-I /path/to/lib1" or "-I/path/to/lib2" is removed from
+# the shebang line, along with the single space that preceded it. These library
+# paths are placed into a new line of the form
+#
+#     use lib "/path/to/lib1", "/path/to/lib2";
+#
+# immediately following the shebang line. If a library appeared in the original
+# list more than once, only its first occurrence will appear in the output
+# list. In other words, the libraries are deduplicated, but the ordering of the
+# first appearance of each one is preserved.
+#
+# Any flags other than "-I" in the shebang line are left as-is, and the
+# interpreter is also left alone (although the script will abort if the
+# interpreter does not seem to be either "perl" or else "env" with "perl" as
+# its argument). Each line after the shebang line is left unchanged. Each file
+# is modified in place.
+#
+# Usage:
+#     shortenPerlShebang SCRIPT...
+
+shortenPerlShebang() {
+    while [ $# -gt 0 ]; do
+        _shortenPerlShebang "$1"
+        shift
+    done
+}
+
+_shortenPerlShebang() {
+    local program="$1"
+
+    echo "shortenPerlShebang: rewriting shebang line in $program"
+
+    if ! isScript "$program"; then
+        die "shortenPerlShebang: refusing to modify $program because it is not a script"
+    fi
+
+    local temp="$(mktemp)"
+
+    gawk '
+        (NR == 1) {
+            if (!($0 ~ /\/(perl|env +perl)\>/)) {
+                print "shortenPerlShebang: script does not seem to be a Perl script" > "/dev/stderr"
+                exit 1
+            }
+            idx = 0
+            while (match($0, / -I ?([^ ]+)/, pieces)) {
+                matches[idx] = pieces[1]
+                idx++
+                $0 = gensub(/ -I ?[^ ]+/, "", 1, $0)
+            }
+            print $0
+            if (idx > 0) {
+                prefix = "use lib "
+                for (idx in matches) {
+                    path = matches[idx]
+                    if (!(path in seen)) {
+                        printf "%s\"%s\"", prefix, path
+                        seen[path] = 1
+                        prefix = ", "
+                    }
+                }
+                print ";"
+            }
+        }
+        (NR > 1 ) {
+            print
+        }
+    ' "$program" > "$temp" || die
+	# Preserve the mode of the original file
+	cp --preserve=mode --attributes-only "$program" "$temp"
+	mv "$temp" "$program"
+
+    # Measure the new shebang line length and make sure it's okay. We subtract
+    # one to account for the trailing newline that "head" included in its
+    # output.
+    local new_length=$(( $(head -n 1 "$program" | wc -c) - 1 ))
+
+    # Darwin is okay when the shebang line contains 511 characters, but not
+    # when it contains 512 characters.
+    if [ $new_length -ge 512 ]; then
+        die "shortenPerlShebang: shebang line is $new_length characters--still too long for Darwin!"
+    fi
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/strip.sh b/nixpkgs/pkgs/build-support/setup-hooks/strip.sh
new file mode 100644
index 000000000000..c31a50eba57b
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/strip.sh
@@ -0,0 +1,57 @@
+# This setup hook strips libraries and executables in the fixup phase.
+
+fixupOutputHooks+=(_doStrip)
+
+_doStrip() {
+    # We don't bother to strip build platform code because it shouldn't make it
+    # to $out anyways---if it does, that's a bigger problem that a lack of
+    # stripping will help catch.
+    local -ra flags=(dontStripHost dontStripTarget)
+    local -ra stripCmds=(STRIP TARGET_STRIP)
+
+    # Optimization
+    if [[ "${STRIP-}" == "${TARGET_STRIP-}" ]]; then
+        dontStripTarget+=1
+    fi
+
+    local i
+    for i in ${!stripCmds[@]}; do
+        local -n flag="${flags[$i]}"
+        local -n stripCmd="${stripCmds[$i]}"
+
+        # `dontStrip` disables them all
+        if [[ "${dontStrip-}" || "${flag-}" ]] || ! type -f "${stripCmd-}" 2>/dev/null
+        then continue; fi
+
+        stripDebugList=${stripDebugList:-lib lib32 lib64 libexec bin sbin}
+        if [ -n "$stripDebugList" ]; then
+            stripDirs "$stripCmd" "$stripDebugList" "${stripDebugFlags:--S}"
+        fi
+
+        stripAllList=${stripAllList:-}
+        if [ -n "$stripAllList" ]; then
+            stripDirs "$stripCmd" "$stripAllList" "${stripAllFlags:--s}"
+        fi
+    done
+}
+
+stripDirs() {
+    local cmd="$1"
+    local dirs="$2"
+    local stripFlags="$3"
+    local dirsNew=
+
+    local d
+    for d in ${dirs}; do
+        if [ -d "$prefix/$d" ]; then
+            dirsNew="${dirsNew} $prefix/$d "
+        fi
+    done
+    dirs=${dirsNew}
+
+    if [ -n "${dirs}" ]; then
+        header "stripping (with command $cmd and flags $stripFlags) in$dirs"
+        find $dirs -type f -exec $cmd $commonStripFlags $stripFlags '{}' \; 2>/dev/null
+        stopNest
+    fi
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/update-autotools-gnu-config-scripts.sh b/nixpkgs/pkgs/build-support/setup-hooks/update-autotools-gnu-config-scripts.sh
new file mode 100644
index 000000000000..ebd3afa05d94
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/update-autotools-gnu-config-scripts.sh
@@ -0,0 +1,12 @@
+preConfigurePhases+=" updateAutotoolsGnuConfigScriptsPhase"
+
+updateAutotoolsGnuConfigScriptsPhase() {
+    if [ -n "${dontUpdateAutotoolsGnuConfigScripts-}" ]; then return; fi
+
+    for script in config.sub config.guess; do
+        for f in $(find . -type f -name "$script"); do
+            echo "Updating Autotools / GNU config script to a newer upstream version: $f"
+            cp -f "@gnu_config@/$script" "$f"
+        done
+    done
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/use-old-cxx-abi.sh b/nixpkgs/pkgs/build-support/setup-hooks/use-old-cxx-abi.sh
new file mode 100644
index 000000000000..53335d7a9a7a
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/use-old-cxx-abi.sh
@@ -0,0 +1 @@
+export NIX_CFLAGS_COMPILE+=" -D_GLIBCXX_USE_CXX11_ABI=0"
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/validate-pkg-config.sh b/nixpkgs/pkgs/build-support/setup-hooks/validate-pkg-config.sh
new file mode 100644
index 000000000000..ada1b56760d6
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/validate-pkg-config.sh
@@ -0,0 +1,18 @@
+# This setup hook validates each pkgconfig file in each output.
+
+fixupOutputHooks+=(_validatePkgConfig)
+
+_validatePkgConfig() {
+    local bail=0
+    for pc in $(find "$prefix" -name '*.pc'); do
+        # Do not fail immediately. It's nice to see all errors when
+        # there are multiple pkgconfig files.
+        if ! pkg-config --validate "$pc"; then
+            bail=1
+        fi
+    done
+
+    if [ $bail -eq 1 ]; then
+        exit 1
+    fi
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/win-dll-link.sh b/nixpkgs/pkgs/build-support/setup-hooks/win-dll-link.sh
new file mode 100644
index 000000000000..6130f32bef86
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/win-dll-link.sh
@@ -0,0 +1,45 @@
+
+fixupOutputHooks+=(_linkDLLs)
+
+# For every *.{exe,dll} in $output/bin/ we try to find all (potential)
+# transitive dependencies and symlink those DLLs into $output/bin
+# so they are found on invocation.
+# (DLLs are first searched in the directory of the running exe file.)
+# The links are relative, so relocating whole /nix/store won't break them.
+_linkDLLs() {
+(
+    if [ ! -d "$prefix/bin" ]; then exit; fi
+    cd "$prefix/bin"
+
+    # Compose path list where DLLs should be located:
+    #   prefix $PATH by currently-built outputs
+    local DLLPATH=""
+    local outName
+    for outName in $outputs; do
+        addToSearchPath DLLPATH "${!outName}/bin"
+    done
+    DLLPATH="$DLLPATH:$PATH"
+
+    echo DLLPATH="'$DLLPATH'"
+
+    linkCount=0
+    # Iterate over any DLL that we depend on.
+    local dll
+    for dll in $($OBJDUMP -p *.{exe,dll} | sed -n 's/.*DLL Name: \(.*\)/\1/p' | sort -u); do
+        if [ -e "./$dll" ]; then continue; fi
+        # Locate the DLL - it should be an *executable* file on $DLLPATH.
+        local dllPath="$(PATH="$DLLPATH" type -P "$dll")"
+        if [ -z "$dllPath" ]; then continue; fi
+        # That DLL might have its own (transitive) dependencies,
+        # so add also all DLLs from its directory to be sure.
+        local dllPath2
+        for dllPath2 in "$dllPath" "$(dirname $(readlink "$dllPath" || echo "$dllPath"))"/*.dll; do
+            if [ -e ./"$(basename "$dllPath2")" ]; then continue; fi
+            CYGWIN+=\ winsymlinks:nativestrict ln -sr "$dllPath2" .
+            linkCount=$(($linkCount+1))
+        done
+    done
+    echo "Created $linkCount DLL link(s) in $prefix/bin"
+)
+}
+
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/default.nix b/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/default.nix
new file mode 100644
index 000000000000..d0ea088bf71e
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/default.nix
@@ -0,0 +1,177 @@
+{ stdenv
+, lib
+, makeSetupHook
+, makeWrapper
+, gobject-introspection
+, isGraphical ? true
+, gtk3
+, librsvg
+, dconf
+, callPackage
+, wrapGAppsHook
+, writeTextFile
+}:
+
+makeSetupHook {
+  deps = lib.optionals (!stdenv.isDarwin) [
+    # It is highly probable that a program will use GSettings,
+    # at minimum through GTK file chooser dialogue.
+    # Let’s add a GIO module for “dconf” GSettings backend
+    # to avoid falling back to “memory” backend. This is
+    # required for GSettings-based settings to be persisted.
+    # Unfortunately, it also requires the user to have dconf
+    # D-Bus service enabled globally (e.g. through a NixOS module).
+    dconf.lib
+  ] ++ lib.optionals isGraphical [
+    # TODO: remove this, packages should depend on GTK explicitly.
+    gtk3
+
+    # librsvg provides a module for gdk-pixbuf to allow rendering
+    # SVG icons. Most icon themes are SVG-based and so are some
+    # graphics in GTK (e.g. cross for closing window in window title bar)
+    # so it is pretty much required for applications using GTK.
+    librsvg
+  ] ++ [
+
+    # We use the wrapProgram function.
+    makeWrapper
+  ];
+  substitutions = {
+    passthru.tests = let
+      sample-project = ./tests/sample-project;
+
+      testLib = callPackage ./tests/lib.nix { };
+      inherit (testLib) expectSomeLineContainingYInFileXToMentionZ;
+    in rec {
+      # Simple derivation containing a program and a daemon.
+      basic = stdenv.mkDerivation {
+        name = "basic";
+
+        src = sample-project;
+
+        nativeBuildInputs = [ wrapGAppsHook ];
+
+        installFlags = [ "bin-foo" "libexec-bar" ];
+      };
+
+      # The wrapper for executable files should add path to dconf GIO module.
+      basic-contains-dconf = let
+        tested = basic;
+      in testLib.runTest "basic-contains-dconf" (
+        testLib.skip stdenv.isDarwin ''
+          ${expectSomeLineContainingYInFileXToMentionZ "${tested}/bin/foo" "GIO_EXTRA_MODULES=" "${dconf.lib}/lib/gio/modules"}
+          ${expectSomeLineContainingYInFileXToMentionZ "${tested}/libexec/bar" "GIO_EXTRA_MODULES=" "${dconf.lib}/lib/gio/modules"}
+        ''
+      );
+
+      # Simple derivation containing a gobject-introspection typelib.
+      typelib-Mahjong = stdenv.mkDerivation {
+        name = "typelib-Mahjong";
+
+        src = sample-project;
+
+        installFlags = [ "typelib-Mahjong" ];
+      };
+
+      # Simple derivation using a typelib.
+      typelib-user = stdenv.mkDerivation {
+        name = "typelib-user";
+
+        src = sample-project;
+
+        nativeBuildInputs = [
+          gobject-introspection
+          wrapGAppsHook
+        ];
+
+        buildInputs = [
+          typelib-Mahjong
+        ];
+
+        installFlags = [ "bin-foo" "libexec-bar" ];
+      };
+
+      # Testing cooperation with gobject-introspection setup hook,
+      # which should populate GI_TYPELIB_PATH variable with paths
+      # to typelibs among the derivation’s dependencies.
+      # The resulting GI_TYPELIB_PATH should be picked up by the wrapper.
+      typelib-user-has-gi-typelib-path = let
+        tested = typelib-user;
+      in testLib.runTest "typelib-user-has-gi-typelib-path" ''
+        ${expectSomeLineContainingYInFileXToMentionZ "${tested}/bin/foo" "GI_TYPELIB_PATH=" "${typelib-Mahjong}/lib/girepository-1.0"}
+        ${expectSomeLineContainingYInFileXToMentionZ "${tested}/libexec/bar" "GI_TYPELIB_PATH=" "${typelib-Mahjong}/lib/girepository-1.0"}
+      '';
+
+      # Simple derivation containing a gobject-introspection typelib in lib output.
+      typelib-Bechamel = stdenv.mkDerivation {
+        name = "typelib-Bechamel";
+
+        outputs = [ "out" "lib" ];
+
+        src = sample-project;
+
+        makeFlags = [
+          "LIBDIR=${placeholder "lib"}/lib"
+        ];
+
+        installFlags = [ "typelib-Bechamel" ];
+      };
+
+      # Simple derivation using a typelib from non-default output.
+      typelib-multiout-user = stdenv.mkDerivation {
+        name = "typelib-multiout-user";
+
+        src = sample-project;
+
+        nativeBuildInputs = [
+          gobject-introspection
+          wrapGAppsHook
+        ];
+
+        buildInputs = [
+          typelib-Bechamel
+        ];
+
+        installFlags = [ "bin-foo" "libexec-bar" ];
+      };
+
+      # Testing cooperation with gobject-introspection setup hook,
+      # which should populate GI_TYPELIB_PATH variable with paths
+      # to typelibs among the derivation’s dependencies,
+      # even when they are not in default output.
+      # The resulting GI_TYPELIB_PATH should be picked up by the wrapper.
+      typelib-multiout-user-has-gi-typelib-path = let
+        tested = typelib-multiout-user;
+      in testLib.runTest "typelib-multiout-user-has-gi-typelib-path" ''
+        ${expectSomeLineContainingYInFileXToMentionZ "${tested}/bin/foo" "GI_TYPELIB_PATH=" "${typelib-Bechamel.lib}/lib/girepository-1.0"}
+        ${expectSomeLineContainingYInFileXToMentionZ "${tested}/libexec/bar" "GI_TYPELIB_PATH=" "${typelib-Bechamel.lib}/lib/girepository-1.0"}
+      '';
+
+      # Simple derivation that contains a typelib as well as a program using it.
+      typelib-self-user = stdenv.mkDerivation {
+        name = "typelib-self-user";
+
+        src = sample-project;
+
+        nativeBuildInputs = [
+          gobject-introspection
+          wrapGAppsHook
+        ];
+
+        installFlags = [ "typelib-Cow" "bin-foo" "libexec-bar" ];
+      };
+
+      # Testing cooperation with gobject-introspection setup hook,
+      # which should add the path to derivation’s own typelibs
+      # to GI_TYPELIB_PATH variable.
+      # The resulting GI_TYPELIB_PATH should be picked up by the wrapper.
+      # https://github.com/NixOS/nixpkgs/issues/85515
+      typelib-self-user-has-gi-typelib-path = let
+        tested = typelib-self-user;
+      in testLib.runTest "typelib-self-user-has-gi-typelib-path" ''
+        ${expectSomeLineContainingYInFileXToMentionZ "${tested}/bin/foo" "GI_TYPELIB_PATH=" "${typelib-self-user}/lib/girepository-1.0"}
+        ${expectSomeLineContainingYInFileXToMentionZ "${tested}/libexec/bar" "GI_TYPELIB_PATH=" "${typelib-self-user}/lib/girepository-1.0"}
+      '';
+    };
+  };
+} ./wrap-gapps-hook.sh
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/lib.nix b/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/lib.nix
new file mode 100644
index 000000000000..1757bdbbe250
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/lib.nix
@@ -0,0 +1,30 @@
+{ runCommand
+}:
+
+rec {
+  runTest = name: body: runCommand name { } ''
+    set -o errexit
+    ${body}
+    touch $out
+  '';
+
+  skip = cond: text:
+    if cond then ''
+      echo "Skipping test $name" > /dev/stderr
+    '' else text;
+
+  fail = text: ''
+    echo "FAIL: $name: ${text}" > /dev/stderr
+    exit 1
+  '';
+
+  expectSomeLineContainingYInFileXToMentionZ = file: filter: expected: ''
+    if ! cat "${file}" | grep "${filter}"; then
+        ${fail "The file “${file}” should include a line containing “${filter}”."}
+    fi
+
+    if ! cat "${file}" | grep "${filter}" | grep ${expected}; then
+        ${fail "The file “${file}” should include a line containing “${filter}” that also contains “${expected}”."}
+    fi
+  '';
+}
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/sample-project/Makefile b/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/sample-project/Makefile
new file mode 100644
index 000000000000..5d234db11a0b
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/sample-project/Makefile
@@ -0,0 +1,30 @@
+PREFIX = $(out)
+BINDIR = $(PREFIX)/bin
+LIBEXECDIR = $(PREFIX)/libexec
+LIBDIR = $(PREFIX)/lib
+TYPELIBDIR = $(LIBDIR)/girepository-1.0
+
+all:
+	echo "Compiling…"
+install:
+	echo "Installing…"
+
+bin:
+	mkdir -p $(BINDIR)
+# Adds `bin-${foo}` targets, that install `${foo}` executable to `$(BINDIR)`.
+bin-%: bin
+	touch $(BINDIR)/$(@:bin-%=%)
+	chmod +x $(BINDIR)/$(@:bin-%=%)
+
+libexec:
+	mkdir -p $(LIBEXECDIR)
+# Adds `libexec-${foo}` targets, that install `${foo}` executable to `$(LIBEXECDIR)`.
+libexec-%: libexec
+	touch $(LIBEXECDIR)/$(@:libexec-%=%)
+	chmod +x $(LIBEXECDIR)/$(@:libexec-%=%)
+
+typelib:
+	mkdir -p $(TYPELIBDIR)
+# Adds `typelib-${foo}` targets, that install `${foo}-1.0.typelib` file to `$(TYPELIBDIR)`.
+typelib-%: typelib
+	touch $(TYPELIBDIR)/$(@:typelib-%=%)-1.0.typelib
diff --git a/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/wrap-gapps-hook.sh b/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/wrap-gapps-hook.sh
new file mode 100644
index 000000000000..1a46e075dbe7
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/wrap-gapps-hook.sh
@@ -0,0 +1,93 @@
+# shellcheck shell=bash
+gappsWrapperArgs=()
+
+find_gio_modules() {
+    if [ -d "$1/lib/gio/modules" ] && [ -n "$(ls -A "$1/lib/gio/modules")" ] ; then
+        gappsWrapperArgs+=(--prefix GIO_EXTRA_MODULES : "$1/lib/gio/modules")
+    fi
+}
+
+addEnvHooks "${targetOffset:?}" find_gio_modules
+
+gappsWrapperArgsHook() {
+    if [ -n "$GDK_PIXBUF_MODULE_FILE" ]; then
+        gappsWrapperArgs+=(--set GDK_PIXBUF_MODULE_FILE "$GDK_PIXBUF_MODULE_FILE")
+    fi
+
+    if [ -n "$XDG_ICON_DIRS" ]; then
+        gappsWrapperArgs+=(--prefix XDG_DATA_DIRS : "$XDG_ICON_DIRS")
+    fi
+
+    if [ -n "$GSETTINGS_SCHEMAS_PATH" ]; then
+        gappsWrapperArgs+=(--prefix XDG_DATA_DIRS : "$GSETTINGS_SCHEMAS_PATH")
+    fi
+
+    # Check for prefix as well
+    if [ -d "${prefix:?}/share" ]; then
+        gappsWrapperArgs+=(--prefix XDG_DATA_DIRS : "$prefix/share")
+    fi
+
+    if [ -d "$prefix/lib/gio/modules" ] && [ -n "$(ls -A "$prefix/lib/gio/modules")" ]; then
+        gappsWrapperArgs+=(--prefix GIO_EXTRA_MODULES : "$prefix/lib/gio/modules")
+    fi
+
+    for v in ${wrapPrefixVariables:-} GST_PLUGIN_SYSTEM_PATH_1_0 GI_TYPELIB_PATH GRL_PLUGIN_PATH; do
+        if [ -n "${!v}" ]; then
+            gappsWrapperArgs+=(--prefix "$v" : "${!v}")
+        fi
+    done
+}
+
+preFixupPhases+=" gappsWrapperArgsHook"
+
+wrapGApp() {
+    local program="$1"
+    shift 1
+    wrapProgram "$program" "${gappsWrapperArgs[@]}" "$@"
+}
+
+# Note: $gappsWrapperArgs still gets defined even if ${dontWrapGApps-} is set.
+wrapGAppsHook() {
+    # guard against running multiple times (e.g. due to propagation)
+    [ -z "$wrapGAppsHookHasRun" ] || return 0
+    wrapGAppsHookHasRun=1
+
+    if [[ -z "${dontWrapGApps:-}" ]]; then
+        targetDirsThatExist=()
+        targetDirsRealPath=()
+
+        # wrap binaries
+        targetDirs=("${prefix}/bin" "${prefix}/libexec")
+        for targetDir in "${targetDirs[@]}"; do
+            if [[ -d "${targetDir}" ]]; then
+                targetDirsThatExist+=("${targetDir}")
+                targetDirsRealPath+=("$(realpath "${targetDir}")/")
+                find "${targetDir}" -type f -executable -print0 |
+                    while IFS= read -r -d '' file; do
+                        echo "Wrapping program '${file}'"
+                        wrapGApp "${file}"
+                    done
+            fi
+        done
+
+        # wrap links to binaries that point outside targetDirs
+        # Note: links to binaries within targetDirs do not need
+        #       to be wrapped as the binaries have already been wrapped
+        if [[ ${#targetDirsThatExist[@]} -ne 0 ]]; then
+            find "${targetDirsThatExist[@]}" -type l -xtype f -executable -print0 |
+                while IFS= read -r -d '' linkPath; do
+                    linkPathReal=$(realpath "${linkPath}")
+                    for targetPath in "${targetDirsRealPath[@]}"; do
+                        if [[ "$linkPathReal" == "$targetPath"* ]]; then
+                            echo "Not wrapping link: '$linkPath' (already wrapped)"
+                            continue 2
+                        fi
+                    done
+                    echo "Wrapping link: '$linkPath'"
+                    wrapGApp "${linkPath}"
+                done
+        fi
+    fi
+}
+
+fixupOutputHooks+=(wrapGAppsHook)
diff --git a/nixpkgs/pkgs/build-support/setup-systemd-units.nix b/nixpkgs/pkgs/build-support/setup-systemd-units.nix
new file mode 100644
index 000000000000..4c7ee86669f5
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/setup-systemd-units.nix
@@ -0,0 +1,83 @@
+# | Build a script to install and start a set of systemd units on any
+# systemd-based system.
+#
+# Creates a symlink at /etc/systemd-static/${namespace} for slightly
+# improved atomicity.
+{ writeScriptBin
+, bash
+, coreutils
+, systemd
+, runCommand
+, lib
+}:
+  { units     # : AttrSet String (Either Path { path : Path, wanted-by : [ String ] })
+              # ^ A set whose names are unit names and values are
+              # either paths to the corresponding unit files or a set
+              # containing the path and the list of units this unit
+              # should be wanted-by (none by default).
+              #
+              # The names should include the unit suffix
+              # (e.g. ".service")
+  , namespace # : String
+              # The namespace for the unit files, to allow for
+              # multiple independent unit sets managed by
+              # `setupSystemdUnits`.
+  }:
+    let static = runCommand "systemd-static" {}
+          ''
+            mkdir -p $out
+            ${lib.concatStringsSep "\n" (lib.mapAttrsToList (nm: file:
+                "ln -sv ${file.path or file} $out/${nm}"
+             ) units)}
+          '';
+        add-unit-snippet = name: file:
+          ''
+            oldUnit=$(readlink -f "$unitDir/${name}" || echo "$unitDir/${name}")
+            if [ -f "$oldUnit" -a "$oldUnit" != "${file.path or file}" ]; then
+              unitsToStop+=("${name}")
+            fi
+            ln -sf "/etc/systemd-static/${namespace}/${name}" \
+              "$unitDir/.${name}.tmp"
+            mv -T "$unitDir/.${name}.tmp" "$unitDir/${name}"
+            ${lib.concatStringsSep "\n" (map (unit:
+                ''
+                  mkdir -p "$unitDir/${unit}.wants"
+                  ln -sf "../${name}" \
+                    "$unitDir/${unit}.wants/.${name}.tmp"
+                  mv -T "$unitDir/${unit}.wants/.${name}.tmp" \
+                    "$unitDir/${unit}.wants/${name}"
+                ''
+              ) file.wanted-by or [])}
+            unitsToStart+=("${name}")
+          '';
+    in
+      writeScriptBin "setup-systemd-units"
+        ''
+          #!${bash}/bin/bash -e
+          export PATH=${coreutils}/bin:${systemd}/bin
+
+          unitDir=/etc/systemd/system
+          if [ ! -w "$unitDir" ]; then
+            unitDir=/nix/var/nix/profiles/default/lib/systemd/system
+            mkdir -p "$unitDir"
+          fi
+          declare -a unitsToStop unitsToStart
+
+          oldStatic=$(readlink -f /etc/systemd-static/${namespace} || true)
+          if [ "$oldStatic" != "${static}" ]; then
+            ${lib.concatStringsSep "\n"
+                (lib.mapAttrsToList add-unit-snippet units)}
+            if [ ''${#unitsToStop[@]} -ne 0 ]; then
+              echo "Stopping unit(s) ''${unitsToStop[@]}" >&2
+              systemctl stop "''${unitsToStop[@]}"
+            fi
+            mkdir -p /etc/systemd-static
+            ln -sfT ${static} /etc/systemd-static/.${namespace}.tmp
+            mv -T /etc/systemd-static/.${namespace}.tmp /etc/systemd-static/${namespace}
+            systemctl daemon-reload
+            echo "Starting unit(s) ''${unitsToStart[@]}" >&2
+            systemctl start "''${unitsToStart[@]}"
+          else
+            echo "Units unchanged, doing nothing" >&2
+          fi
+        ''
diff --git a/nixpkgs/pkgs/build-support/singularity-tools/default.nix b/nixpkgs/pkgs/build-support/singularity-tools/default.nix
new file mode 100644
index 000000000000..318f5b430fef
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/singularity-tools/default.nix
@@ -0,0 +1,108 @@
+{ runCommand
+, lib
+, stdenv
+, storeDir ? builtins.storeDir
+, writeScript
+, singularity
+, writeReferencesToFile
+, bash
+, vmTools
+, gawk
+, util-linux
+, runtimeShell
+, e2fsprogs }:
+
+rec {
+  shellScript = name: text:
+    writeScript name ''
+      #!${runtimeShell}
+      set -e
+      ${text}
+    '';
+
+  mkLayer = {
+    name,
+    contents ? [],
+  }:
+    runCommand "singularity-layer-${name}" {
+      inherit contents;
+    } ''
+      mkdir $out
+      for f in $contents ; do
+        cp -ra $f $out/
+      done
+    '';
+
+  buildImage = {
+    name,
+    contents ? [],
+    diskSize ? 1024,
+    runScript ? "#!${stdenv.shell}\nexec /bin/sh",
+    runAsRoot ? null
+  }:
+    let layer = mkLayer {
+          inherit name;
+          contents = contents ++ [ bash runScriptFile ];
+          };
+        runAsRootFile = shellScript "run-as-root.sh" runAsRoot;
+        runScriptFile = shellScript "run-script.sh" runScript;
+        result = vmTools.runInLinuxVM (
+          runCommand "singularity-image-${name}.img" {
+            buildInputs = [ singularity e2fsprogs util-linux gawk ];
+            layerClosure = writeReferencesToFile layer;
+            preVM = vmTools.createEmptyImage {
+              size = diskSize;
+              fullName = "singularity-run-disk";
+            };
+          }
+          ''
+            rm -rf $out
+            mkdir disk
+            mkfs -t ext3 -b 4096 /dev/${vmTools.hd}
+            mount /dev/${vmTools.hd} disk
+            mkdir -p disk/img
+            cd disk/img
+            mkdir proc sys dev
+
+            # Run root script
+            ${lib.optionalString (runAsRoot != null) ''
+              mkdir -p ./${storeDir}
+              mount --rbind ${storeDir} ./${storeDir}
+              unshare -imnpuf --mount-proc chroot ./ ${runAsRootFile}
+              umount -R ./${storeDir}
+            ''}
+
+            # Build /bin and copy across closure
+            mkdir -p bin nix/store
+            for f in $(cat $layerClosure) ; do
+              cp -ar $f ./$f
+            done
+
+            for c in ${toString contents} ; do
+              for f in $c/bin/* ; do
+                if [ ! -e bin/$(basename $f) ] ; then
+                  ln -s $f bin/
+                fi
+              done
+            done
+
+            # Create runScript and link shell
+            if [ ! -e bin/sh ]; then
+              ln -s ${runtimeShell} bin/sh
+            fi
+            mkdir -p .singularity.d
+            ln -s ${runScriptFile} .singularity.d/runscript
+
+            # Fill out .singularity.d
+            mkdir -p .singularity.d/env
+            touch .singularity.d/env/94-appsbase.sh
+
+            cd ..
+            mkdir -p /var/singularity/mnt/{container,final,overlay,session,source}
+            echo "root:x:0:0:System administrator:/root:/bin/sh" > /etc/passwd
+            echo > /etc/resolv.conf
+            TMPDIR=$(pwd -P) singularity build $out ./img
+          '');
+
+    in result;
+}
diff --git a/nixpkgs/pkgs/build-support/skaware/build-skaware-package.nix b/nixpkgs/pkgs/build-support/skaware/build-skaware-package.nix
new file mode 100644
index 000000000000..b27b65f48a59
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/skaware/build-skaware-package.nix
@@ -0,0 +1,105 @@
+{ lib, stdenv, cleanPackaging, fetchurl }:
+{
+  # : string
+  pname
+  # : string
+, version
+  # : string
+, sha256
+  # : string
+, description
+  # : list Platform
+, platforms ? lib.platforms.all
+  # : list string
+, outputs ? [ "bin" "lib" "dev" "doc" "out" ]
+  # TODO(Profpatsch): automatically infer most of these
+  # : list string
+, configureFlags
+  # mostly for moving and deleting files from the build directory
+  # : lines
+, postInstall
+  # : list Maintainer
+, maintainers ? []
+
+
+}:
+
+let
+
+  # File globs that can always be deleted
+  commonNoiseFiles = [
+    ".gitignore"
+    "Makefile"
+    "INSTALL"
+    "configure"
+    "patch-for-solaris"
+    "src/**/*"
+    "tools/**/*"
+    "package/**/*"
+    "config.mak"
+  ];
+
+  # File globs that should be moved to $doc
+  commonMetaFiles = [
+    "COPYING"
+    "AUTHORS"
+    "NEWS"
+    "CHANGELOG"
+    "README"
+    "README.*"
+  ];
+
+in stdenv.mkDerivation {
+  inherit pname version;
+
+  src = fetchurl {
+    url = "https://skarnet.org/software/${pname}/${pname}-${version}.tar.gz";
+    inherit sha256;
+  };
+
+  inherit outputs;
+
+  dontDisableStatic = true;
+  enableParallelBuilding = true;
+
+  configureFlags = configureFlags ++ [
+    "--enable-absolute-paths"
+    # We assume every nix-based cross target has urandom.
+    # This might not hold for e.g. BSD.
+    "--with-sysdep-devurandom=yes"
+    (if stdenv.isDarwin
+      then "--disable-shared"
+      else "--enable-shared")
+  ]
+    # On darwin, the target triplet from -dumpmachine includes version number,
+    # but skarnet.org software uses the triplet to test binary compatibility.
+    # Explicitly setting target ensures code can be compiled against a skalibs
+    # binary built on a different version of darwin.
+    # http://www.skarnet.org/cgi-bin/archive.cgi?1:mss:623:heiodchokfjdkonfhdph
+    ++ (lib.optional stdenv.isDarwin
+         "--build=${stdenv.hostPlatform.system}");
+
+  # TODO(Profpatsch): ensure that there is always a $doc output!
+  postInstall = ''
+    echo "Cleaning & moving common files"
+    ${cleanPackaging.commonFileActions {
+       noiseFiles = commonNoiseFiles;
+       docFiles = commonMetaFiles;
+     }} $doc/share/doc/${pname}
+
+    ${postInstall}
+  '';
+
+  postFixup = ''
+    ${cleanPackaging.checkForRemainingFiles}
+  '';
+
+  meta = {
+    homepage = "https://skarnet.org/software/${pname}/";
+    inherit description platforms;
+    license = lib.licenses.isc;
+    maintainers = with lib.maintainers;
+      [ pmahoney Profpatsch qyliss ] ++ maintainers;
+  };
+
+}
diff --git a/nixpkgs/pkgs/build-support/skaware/clean-packaging.nix b/nixpkgs/pkgs/build-support/skaware/clean-packaging.nix
new file mode 100644
index 000000000000..d51cbec8aeb2
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/skaware/clean-packaging.nix
@@ -0,0 +1,53 @@
+# set of utilities that assure the cwd of a build
+# is completely clean after the build, meaning all
+# files were either discarded or moved to outputs.
+# This ensures nothing is forgotten and new files
+# are correctly handled on update.
+{ lib, stdenv, file, writeScript }:
+
+let
+  globWith = lib.concatMapStringsSep "\n";
+  rmNoise = noiseGlobs: globWith (f:
+    "rm -rf ${f}") noiseGlobs;
+  mvDoc = docGlobs: globWith
+    (f: ''mv ${f} "$DOCDIR" 2>/dev/null || true'')
+    docGlobs;
+
+  # Shell script that implements common move & remove actions
+  # $1 is the doc directory (will be created).
+  # Best used in conjunction with checkForRemainingFiles
+  commonFileActions =
+    { # list of fileglobs that are removed from the source dir
+      noiseFiles
+      # files that are moved to the doc directory ($1)
+      # TODO(Profpatsch): allow to set target dir with
+      # { glob = …; to = "html" } (relative to docdir)
+    , docFiles }:
+    writeScript "common-file-actions.sh" ''
+      #!${stdenv.shell}
+      set -e
+      DOCDIR="''${1?commonFileActions: DOCDIR as argv[1] required}"
+      shopt -s globstar extglob nullglob
+      mkdir -p "$DOCDIR"
+      ${mvDoc docFiles}
+      ${rmNoise noiseFiles}
+    '';
+
+  # Shell script to check whether the build directory is empty.
+  # If there are still files remaining, exit 1 with a helpful
+  # listing of all remaining files and their types.
+  checkForRemainingFiles = writeScript "check-for-remaining-files.sh" ''
+    #!${stdenv.shell}
+    echo "Checking for remaining source files"
+    rem=$(find -mindepth 1 -xtype f -print0 \
+           | tee $TMP/remaining-files)
+    if [[ "$rem" != "" ]]; then
+      echo "ERROR: These files should be either moved or deleted:"
+      cat $TMP/remaining-files | xargs -0 ${file}/bin/file
+      exit 1
+    fi
+  '';
+
+in {
+  inherit commonFileActions checkForRemainingFiles;
+}
diff --git a/nixpkgs/pkgs/build-support/snap/default.nix b/nixpkgs/pkgs/build-support/snap/default.nix
new file mode 100644
index 000000000000..ba5271868911
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/snap/default.nix
@@ -0,0 +1,4 @@
+{ callPackage, hello }:
+{
+  makeSnap = callPackage ./make-snap.nix { };
+}
diff --git a/nixpkgs/pkgs/build-support/snap/make-snap.nix b/nixpkgs/pkgs/build-support/snap/make-snap.nix
new file mode 100644
index 000000000000..cef7500bcbaf
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/snap/make-snap.nix
@@ -0,0 +1,84 @@
+{
+  runCommand, squashfsTools, closureInfo, lib, jq, writeText
+}:
+
+{
+  # The meta parameter is the contents of the `snap.yaml`, NOT the
+  # `snapcraft.yaml`.
+  #
+  # - `snap.yaml` is what is inside of the final Snap,
+  # - `snapcraft.yaml` is used by `snapcraft` to build snaps
+  #
+  # Since we skip the `snapcraft` tool, we skip the `snapcraft.yaml`
+  # file. For more information:
+  #
+  #   https://docs.snapcraft.io/snap-format
+  #
+  # Note: unsquashfs'ing an existing snap from the store can be helpful
+  # for determining what you you're missing.
+  #
+  meta
+}: let
+    snap_yaml = let
+      # Validate the snap's meta contains a name.
+      # Also: automatically set the `base` parameter and the layout for
+      # the `/nix` bind.
+      validate = { name, ... } @ args:
+        args // {
+          # Combine the provided arguments with the required options.
+
+          # base: built from https://github.com/NixOS/snapd-nix-base
+          # and published as The NixOS Foundation on the Snapcraft store.
+          base = "nix-base";
+          layout = (args.layout or {}) // {
+            # Bind mount the Snap's root nix directory to `/nix` in the
+            # execution environment's filesystem namespace.
+            "/nix".bind = "$SNAP/nix";
+          };
+        };
+    in writeText "snap.yaml"
+      (builtins.toJSON (validate meta));
+
+  # These are specifically required by snapd, so don't change them
+  # unless you've verified snapcraft / snapd can handle them. Best bet
+  # is to just mirror this list against how snapcraft creates images.
+  # from: https://github.com/snapcore/snapcraft/blob/b88e378148134383ffecf3658e3a940b67c9bcc9/snapcraft/internal/lifecycle/_packer.py#L96-L98
+  mksquashfs_args = [
+    "-noappend" "-comp" "xz" "-no-xattrs" "-no-fragments"
+
+    # Note: We want -all-root every time, since all the files are
+    # owned by root anyway. This is true for Nix, but not true for
+    # other builds.
+    # from: https://github.com/snapcore/snapcraft/blob/b88e378148134383ffecf3658e3a940b67c9bcc9/snapcraft/internal/lifecycle/_packer.py#L100
+    "-all-root"
+  ];
+
+in runCommand "squashfs.img" {
+  nativeBuildInputs = [ squashfsTools jq ];
+
+  closureInfo = closureInfo {
+    rootPaths = [ snap_yaml ];
+  };
+} ''
+  root=$PWD/root
+  mkdir $root
+
+  (
+    # Put the snap.yaml in to `/meta/snap.yaml`, setting the version
+    # to the hash part of the store path
+    mkdir $root/meta
+    version=$(echo $out | cut -d/ -f4 | cut -d- -f1)
+    cat ${snap_yaml} | jq  ". + { version: \"$version\" }" \
+      > $root/meta/snap.yaml
+  )
+
+  (
+    # Copy the store closure in to the root
+    mkdir -p $root/nix/store
+    cat $closureInfo/store-paths | xargs -I{} cp -r {} $root/nix/store/
+  )
+
+  # Generate the squashfs image.
+  mksquashfs $root $out \
+    ${lib.concatStringsSep " " mksquashfs_args}
+''
diff --git a/nixpkgs/pkgs/build-support/source-from-head-fun.nix b/nixpkgs/pkgs/build-support/source-from-head-fun.nix
new file mode 100644
index 000000000000..938df1efd18a
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/source-from-head-fun.nix
@@ -0,0 +1,16 @@
+/*
+   purpose: mantain bleeding edge head sources.
+
+   you run
+   app --update
+   app --publish
+   to create source snapshots
+
+   The documentation is availible at https://github.com/MarcWeber/nix-repository-manager/raw/master/README
+
+*/
+{ config }:
+  localTarName: publishedSrcSnapshot:
+  if config.sourceFromHead.useLocalRepos or false then
+    "${config.sourceFromHead.managedRepoDir or "/set/sourceFromHead.managedRepoDir/please"}/dist/${localTarName}"
+  else publishedSrcSnapshot
diff --git a/nixpkgs/pkgs/build-support/src-only/default.nix b/nixpkgs/pkgs/build-support/src-only/default.nix
new file mode 100644
index 000000000000..c721fdf40c69
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/src-only/default.nix
@@ -0,0 +1,31 @@
+{ stdenv }@orig:
+# srcOnly is a utility builder that only fetches and unpacks the given `src`,
+# maybe pathings it in the process with the optional `patches` and
+# `buildInputs` attributes.
+#
+# It can be invoked directly, or be used to wrap an existing derivation. Eg:
+#
+# > srcOnly pkgs.hello
+#
+{ name
+, src
+, stdenv ? orig.stdenv
+, patches ? []
+, # deprecated, use the nativeBuildInputs
+  buildInputs ? []
+, # used to pass extra unpackers
+  nativeBuildInputs ? []
+, # needed when passing an existing derivation
+  ...
+}:
+stdenv.mkDerivation {
+  inherit
+    buildInputs
+    name
+    nativeBuildInputs
+    patches
+    src
+    ;
+  installPhase = "cp -r . $out";
+  phases = ["unpackPhase" "patchPhase" "installPhase"];
+}
diff --git a/nixpkgs/pkgs/build-support/substitute-files/substitute-all-files.nix b/nixpkgs/pkgs/build-support/substitute-files/substitute-all-files.nix
new file mode 100644
index 000000000000..682e976dcfe5
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/substitute-files/substitute-all-files.nix
@@ -0,0 +1,26 @@
+{ lib, stdenv }:
+
+args:
+
+stdenv.mkDerivation ({
+  name = if args ? name then args.name else baseNameOf (toString args.src);
+  builder = builtins.toFile "builder.sh" ''
+    source $stdenv/setup
+    set -o pipefail
+
+    eval "$preInstall"
+
+    args=
+
+    pushd "$src"
+    echo -ne "${lib.concatStringsSep "\\0" args.files}" | xargs -0 -n1 -I {} -- find {} -type f -print0 | while read -d "" line; do
+      mkdir -p "$out/$(dirname "$line")"
+      substituteAll "$line" "$out/$line"
+    done
+    popd
+
+    eval "$postInstall"
+  '';
+  preferLocalBuild = true;
+  allowSubstitutes = false;
+} // args)
diff --git a/nixpkgs/pkgs/build-support/substitute/substitute-all.nix b/nixpkgs/pkgs/build-support/substitute/substitute-all.nix
new file mode 100644
index 000000000000..57b160bbe901
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/substitute/substitute-all.nix
@@ -0,0 +1,12 @@
+{ stdenvNoCC }:
+
+args:
+
+# see the substituteAll in the nixpkgs documentation for usage and constaints
+stdenvNoCC.mkDerivation ({
+  name = if args ? name then args.name else baseNameOf (toString args.src);
+  builder = ./substitute-all.sh;
+  inherit (args) src;
+  preferLocalBuild = true;
+  allowSubstitutes = false;
+} // args)
diff --git a/nixpkgs/pkgs/build-support/substitute/substitute-all.sh b/nixpkgs/pkgs/build-support/substitute/substitute-all.sh
new file mode 100644
index 000000000000..ec220481fcc0
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/substitute/substitute-all.sh
@@ -0,0 +1,19 @@
+source $stdenv/setup
+
+eval "$preInstall"
+
+args=
+
+target=$out
+if test -n "$dir"; then
+    target=$out/$dir/$name
+    mkdir -p $out/$dir
+fi
+
+substituteAll $src $target
+
+if test -n "$isExecutable"; then
+    chmod +x $target
+fi
+
+eval "$postInstall"
diff --git a/nixpkgs/pkgs/build-support/templaterpm/default.nix b/nixpkgs/pkgs/build-support/templaterpm/default.nix
new file mode 100644
index 000000000000..efe70efe6c44
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/templaterpm/default.nix
@@ -0,0 +1,25 @@
+{lib, stdenv, makeWrapper, python, toposort, rpm}:
+
+stdenv.mkDerivation {
+  pname = "nix-template-rpm";
+  version = "0.1";
+
+  nativeBuildInputs = [ makeWrapper ];
+  buildInputs = [ python toposort rpm ];
+
+  phases = [ "installPhase" "fixupPhase" ];
+
+  installPhase = ''
+    mkdir -p $out/bin
+    cp ${./nix-template-rpm.py} $out/bin/nix-template-rpm
+    wrapProgram $out/bin/nix-template-rpm \
+      --set PYTHONPATH "${rpm}/lib/${python.libPrefix}/site-packages":"${toposort}/lib/${python.libPrefix}/site-packages"
+    '';
+
+  meta = with lib; {
+    description = "Create templates of nix expressions from RPM .spec files";
+    maintainers = with maintainers; [ tstrobel ];
+    platforms = platforms.unix;
+    hydraPlatforms = [];
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/templaterpm/nix-template-rpm.py b/nixpkgs/pkgs/build-support/templaterpm/nix-template-rpm.py
new file mode 100755
index 000000000000..db8c0f2064c2
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/templaterpm/nix-template-rpm.py
@@ -0,0 +1,518 @@
+#!/bin/env python
+
+import sys
+import os
+import subprocess
+import argparse
+import re
+import shutil
+import rpm
+import urlparse
+import traceback
+import toposort
+
+
+
+
+
+class SPECTemplate(object):
+  def __init__(self, specFilename, outputDir, inputDir=None, buildRootInclude=None, translateTable=None, repositoryDir=None, allPackagesDir=None, maintainer="MAINTAINER"):
+    rpm.addMacro("buildroot","$out")
+    rpm.addMacro("_libdir","lib")
+    rpm.addMacro("_libexecdir","libexec")
+    rpm.addMacro("_sbindir","sbin")
+    rpm.addMacro("_sysconfdir","etc")
+    rpm.addMacro("_topdir","SPACER_DIR_FOR_REMOVAL")
+    rpm.addMacro("_sourcedir","SOURCE_DIR_SPACER")
+
+    self.packageGroups = [ "ocaml", "python" ]
+
+    ts = rpm.TransactionSet()
+
+    self.specFilename = specFilename
+    self.spec = ts.parseSpec(specFilename)
+
+    self.inputDir = inputDir
+    self.buildRootInclude = buildRootInclude
+    self.repositoryDir = repositoryDir
+    self.allPackagesDir = allPackagesDir
+    self.maintainer = maintainer
+
+    self.translateTable = translateTable
+
+    self.facts = self.getFacts()
+    self.key = self.getSelfKey()
+
+    tmpDir = os.path.join(outputDir, self.rewriteName(self.spec.sourceHeader['name']))
+    if self.translateTable is not None:
+      self.relOutputDir = self.translateTable.path(self.key,tmpDir)
+    else:
+      self.relOutputDir = tmpDir
+
+    self.final_output_dir = os.path.normpath( self.relOutputDir )
+
+    if self.repositoryDir is not None:
+      self.potential_repository_dir = os.path.normpath( os.path.join(self.repositoryDir,self.relOutputDir) )
+
+
+
+  def rewriteCommands(self, string):
+    string = string.replace('SPACER_DIR_FOR_REMOVAL/','')
+    string = string.replace('SPACER_DIR_FOR_REMOVAL','')
+    string = '\n'.join(map(lambda line: ' '.join(map(lambda x: x.replace('SOURCE_DIR_SPACER/',('${./' if (self.buildRootInclude is None) else '${buildRoot}/usr/share/buildroot/SOURCES/'))+('}' if (self.buildRootInclude is None) else '') if x.startswith('SOURCE_DIR_SPACER/') else x, line.split(' '))), string.split('\n')))
+    string = string.replace('\n','\n    ')
+    string = string.rstrip()
+    return string
+
+
+  def rewriteName(self, string):
+    parts = string.split('-')
+    parts = filter(lambda x: not x == "devel", parts)
+    parts = filter(lambda x: not x == "doc", parts)
+    if len(parts) > 1 and parts[0] in self.packageGroups:
+      return parts[0] + '-' + ''.join(parts[1:2] + map(lambda x: x.capitalize(), parts[2:]))
+    else:
+      return ''.join(parts[:1] + map(lambda x: x.capitalize(), parts[1:]))
+
+
+  def rewriteInputs(self,target,inputs):
+    camelcase = lambda l: l[:1] + map(lambda x: x.capitalize(), l[1:])
+    filterDevel = lambda l: filter(lambda x: not x == "devel", l)
+    filterDoc = lambda l: filter(lambda x: not x == "doc", l)
+    rewrite = lambda l: ''.join(camelcase(filterDoc(filterDevel(l))))
+
+    def filterPackageGroup(target):
+      if target is None:
+        return [ rewrite(x.split('-')) for x in inputs if (not x.split('-')[0] in self.packageGroups) or (len(x.split('-')) == 1) ]
+      elif target in self.packageGroups:
+        return [ target + '_' + rewrite(x.split('-')[1:]) for x in inputs if (x.split('-')[0] == target) and (len(x.split('-')) > 1)]
+      else:
+        raise Exception("Unknown target")
+        return []
+
+    if target is None:
+      packages = filterPackageGroup(None)
+      packages.sort()
+    elif target in self.packageGroups:
+      packages = filterPackageGroup(target)
+      packages.sort()
+    elif target == "ALL":
+      packages = []
+      for t in [None] + self.packageGroups:
+        tmp = filterPackageGroup(t)
+        tmp.sort()
+        packages += tmp
+    else:
+      raise Exception("Unknown target")
+      packages = []
+
+    return packages
+
+
+  def getBuildInputs(self,target=None):
+    inputs = self.rewriteInputs(target,self.spec.sourceHeader['requires'])
+    if self.translateTable is not None:
+      return map(lambda x: self.translateTable.name(x), inputs)
+    else:
+      return inputs
+
+  def getSelfKey(self):
+    name = self.spec.sourceHeader['name']
+    if len(name.split('-')) > 1 and name.split('-')[0] in self.packageGroups:
+      key = self.rewriteInputs(name.split('-')[0], [self.spec.sourceHeader['name']])[0]
+    else:
+      key = self.rewriteInputs(None, [self.spec.sourceHeader['name']])[0]
+    return key
+
+  def getSelf(self):
+    if self.translateTable is not None:
+      return self.translateTable.name(self.key)
+    else:
+      return self.key
+
+
+
+
+  def copyPatches(self, input_dir, output_dir):
+    patches = [source for (source, _, flag) in self.spec.sources if flag==2]
+    for filename in patches:
+      shutil.copyfile(os.path.join(input_dir, filename), os.path.join(output_dir, filename))
+
+
+  def copySources(self, input_dir, output_dir):
+    filenames = [source for (source, _, flag) in self.spec.sources if flag==1 if not urlparse.urlparse(source).scheme in ["http", "https"] ]
+    for filename in filenames:
+      shutil.copyfile(os.path.join(input_dir, filename), os.path.join(output_dir, filename))
+
+
+  def getFacts(self):
+    facts = {}
+    facts["name"] = self.rewriteName(self.spec.sourceHeader['name'])
+    facts["version"] = self.spec.sourceHeader['version']
+
+    facts["url"] = []
+    facts["sha256"] = []
+    sources = [source for (source, _, flag) in self.spec.sources if flag==1 if urlparse.urlparse(source).scheme in ["http", "https"] ]
+    for url in sources:
+      p = subprocess.Popen(['nix-prefetch-url', url], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+      output, err = p.communicate()
+      sha256 = output[:-1] #remove new line
+      facts["url"].append(url)
+      facts["sha256"].append(sha256)
+
+    patches = [source for (source, _, flag) in self.spec.sources if flag==2]
+    if self.buildRootInclude is None:
+      facts["patches"] = map(lambda x: './'+x, patches)
+    else:
+      facts["patches"] = map(lambda x: '"${buildRoot}/usr/share/buildroot/SOURCES/'+x+'"', reversed(patches))
+
+    return facts
+
+
+  @property
+  def name(self):
+    out = '  name = "' + self.facts["name"] + '-' + self.facts["version"] + '";\n'
+    out += '  version = "' + self.facts['version'] + '";\n'
+    return out
+
+
+  @property
+  def src(self):
+    sources = [source for (source, _, flag) in self.spec.sources if flag==1 if urlparse.urlparse(source).scheme in ["http", "https"] ]
+    out = ''
+    for (url,sha256) in zip(self.facts['url'],self.facts['sha256']):
+      out += '  src = fetchurl {\n'
+      out += '    url = "' + url + '";\n'
+      out += '    sha256 = "' + sha256 + '";\n'
+      out += '  };\n'
+    return out
+
+
+  @property
+  def patch(self):
+    out = '  patches = [ ' + ' '.join(self.facts['patches']) + ' ];\n'
+    return out
+
+
+  @property
+  def buildInputs(self):
+    out = '  buildInputs = [ '
+    out += ' '.join(self.getBuildInputs("ALL"))
+    out += ' ];\n'
+    return out
+
+
+  @property
+  def configure(self):
+    out = '  configurePhase = \'\'\n    ' + self.rewriteCommands(self.spec.prep) + '\n    \'\';\n';
+    return out
+
+
+  @property
+  def build(self):
+    out = '  buildPhase = \'\'\n    ' + self.rewriteCommands(self.spec.build) + '\n    \'\';\n';
+    return out
+
+
+  @property
+  def install(self):
+    out = '  installPhase = \'\'\n    ' + self.rewriteCommands(self.spec.install) + '\n    \'\';\n';
+    return out
+
+  @property
+  def ocamlExtra(self):
+    if "ocaml" in self.getBuildInputs("ALL"):
+      return '  createFindlibDestdir = true;\n'
+    else:
+      return ''
+
+
+  @property
+  def meta(self):
+    out = '  meta = with lib; {\n'
+    out += '    homepage = ' + self.spec.sourceHeader['url'] + ';\n'
+    out += '    description = "' + self.spec.sourceHeader['summary'] + '";\n'
+    out += '    license = lib.licenses.' + self.spec.sourceHeader['license'] + ';\n'
+    out += '    platforms = [ "i686-linux" "x86_64-linux" ];\n'
+    out += '    maintainers = with lib.maintainers; [ ' + self.maintainer + ' ];\n'
+    out += '  };\n'
+    out += '}\n'
+    return out
+
+
+  def __str__(self):
+    head = '{lib, stdenv, fetchurl, ' + ', '.join(self.getBuildInputs("ALL")) + '}:\n\n'
+    head += 'stdenv.mkDerivation {\n'
+    body = [ self.name, self.src, self.patch, self.buildInputs, self.configure, self.build, self.ocamlExtra, self.install, self.meta ]
+    return head + '\n'.join(body)
+
+
+  def getTemplate(self):
+    head = '{lib, stdenv, buildRoot, fetchurl, ' + ', '.join(self.getBuildInputs("ALL")) + '}:\n\n'
+    head += 'let\n'
+    head += '  buildRootInput = (import "${buildRoot}/usr/share/buildroot/buildRootInput.nix") { fetchurl=fetchurl; buildRoot=buildRoot; };\n'
+    head += 'in\n\n'
+    head += 'stdenv.mkDerivation {\n'
+    head += '  inherit (buildRootInput.'+self.rewriteName(self.spec.sourceHeader['name'])+') name version src;\n'
+    head += '  patches = buildRootInput.'+self.rewriteName(self.spec.sourceHeader['name'])+'.patches ++ [];\n\n'
+    body = [ self.buildInputs, self.configure, self.build, self.ocamlExtra, self.install, self.meta ]
+    return head + '\n'.join(body)
+
+
+  def getInclude(self):
+    head = self.rewriteName(self.spec.sourceHeader['name']) + ' = {\n'
+    body = [ self.name, self.src, self.patch ]
+    return head + '\n'.join(body) + '};\n'
+
+
+  def __cmp__(self,other):
+    if self.getSelf() in other.getBuildInputs("ALL"):
+      return 1
+    else:
+      return -1
+
+
+  def callPackage(self):
+    callPackage = '  ' + self.getSelf() + ' = callPackage ' + os.path.relpath(self.final_output_dir, self.allPackagesDir) + ' {'
+    newline = False;
+    for target in self.packageGroups:
+      tmp = self.getBuildInputs(target)
+      if len(tmp) > 0:
+        newline = True;
+        callPackage += '\n    ' + 'inherit (' + target + 'Packages) ' + ' '.join(tmp) + ';'
+    if newline:
+      callPackage += '\n  };'
+    else:
+      callPackage += ' };'
+    return callPackage
+
+
+
+  def generateCombined(self):
+    if not os.path.exists(self.final_output_dir):
+      os.makedirs(self.final_output_dir)
+
+    if self.inputDir is not None:
+      self.copySources(self.inputDir, self.final_output_dir)
+      self.copyPatches(self.inputDir, self.final_output_dir)
+
+    nixfile = open(os.path.join(self.final_output_dir,'default.nix'), 'w')
+    nixfile.write(str(self))
+    nixfile.close()
+
+    shutil.copyfile(self.specFilename, os.path.join(self.final_output_dir, os.path.basename(self.specFilename)))
+
+
+
+  def generateSplit(self):
+    if not os.path.exists(self.final_output_dir):
+      os.makedirs(self.final_output_dir)
+
+    nixfile = open(os.path.join(self.final_output_dir,'default.nix'), 'w')
+    nixfile.write(self.getTemplate())
+    nixfile.close()
+
+    return self.getInclude()
+
+
+
+
+
+
+class NixTemplate(object):
+  def __init__(self, nixfile):
+    self.nixfile = nixfile
+    self.original = { "name":None, "version":None, "url":None, "sha256":None, "patches":None }
+    self.update = { "name":None, "version":None, "url":None, "sha256":None, "patches":None }
+    self.matchedLines = {}
+
+    if os.path.isfile(nixfile):
+      with file(nixfile, 'r') as infile:
+        for (n,line) in enumerate(infile):
+          name = re.match(r'^\s*name\s*=\s*"(.*?)"\s*;\s*$', line)
+          version = re.match(r'^\s*version\s*=\s*"(.*?)"\s*;\s*$', line)
+          url = re.match(r'^\s*url\s*=\s*"?(.*?)"?\s*;\s*$', line)
+          sha256 = re.match(r'^\s*sha256\s*=\s*"(.*?)"\s*;\s*$', line)
+          patches = re.match(r'^\s*patches\s*=\s*(\[.*?\])\s*;\s*$', line)
+          if name is not None and self.original["name"] is None:
+              self.original["name"] = name.group(1)
+              self.matchedLines[n] = "name"
+          if version is not None and self.original["version"] is None:
+              self.original["version"] = version.group(1)
+              self.matchedLines[n] = "version"
+          if url is not None and self.original["url"] is None:
+              self.original["url"] = url.group(1)
+              self.matchedLines[n] = "url"
+          if sha256 is not None and self.original["sha256"] is None:
+              self.original["sha256"] = sha256.group(1)
+              self.matchedLines[n] = "sha256"
+          if patches is not None and self.original["patches"] is None:
+              self.original["patches"] = patches.group(1)
+              self.matchedLines[n] = "patches"
+
+
+  def generateUpdated(self, nixOut):
+    nixTemplateFile = open(os.path.normpath(self.nixfile),'r')
+    nixOutFile = open(os.path.normpath(nixOut),'w')
+    for (n,line) in enumerate(nixTemplateFile):
+      if self.matchedLines.has_key(n) and self.update[self.matchedLines[n]] is not None:
+        nixOutFile.write(line.replace(self.original[self.matchedLines[n]], self.update[self.matchedLines[n]], 1))
+      else:
+        nixOutFile.write(line)
+    nixTemplateFile.close()
+    nixOutFile.close()
+
+
+  def loadUpdate(self,orig):
+    if orig.has_key("name") and orig.has_key("version"):
+      self.update["name"] = orig["name"] + '-' + orig["version"]
+      self.update["version"] = orig["version"]
+    if orig.has_key("url") and orig.has_key("sha256") and len(orig["url"])>0:
+      self.update["url"] = orig["url"][0]
+      self.update["sha256"] = orig["sha256"][0]
+      for url in orig["url"][1:-1]:
+        sys.stderr.write("WARNING: URL has been dropped: %s\n" % url)
+    if orig.has_key("patches"):
+      self.update["patches"] = '[ ' + ' '.join(orig['patches']) + ' ]'
+
+
+class TranslationTable(object):
+  def __init__(self):
+    self.tablePath = {}
+    self.tableName = {}
+
+  def update(self, key, path, name=None):
+    self.tablePath[key] = path
+    if name is not None:
+      self.tableName[key] = name
+
+  def readTable(self, tableFile):
+    with file(tableFile, 'r') as infile:
+      for line in infile:
+        match = re.match(r'^(.+?)\s+(.+?)\s+(.+?)\s*$', line)
+        if match is not None:
+          if not self.tablePath.has_key(match.group(1)):
+            self.tablePath[match.group(1)] = match.group(2)
+          if not self.tableName.has_key(match.group(1)):
+            self.tableName[match.group(1)] = match.group(3)
+        else:
+          match = re.match(r'^(.+?)\s+(.+?)\s*$', line)
+          if not self.tablePath.has_key(match.group(1)):
+            self.tablePath[match.group(1)] = match.group(2)
+
+  def writeTable(self, tableFile):
+    outFile = open(os.path.normpath(tableFile),'w')
+    keys = self.tablePath.keys()
+    keys.sort()
+    for k in keys:
+      if self.tableName.has_key(k):
+        outFile.write( k + " " + self.tablePath[k] + " " + self.tableName[k] + "\n" )
+      else:
+        outFile.write( k + " " + self.tablePath[k] + "\n" )
+    outFile.close()
+
+  def name(self, key):
+   if self.tableName.has_key(key):
+     return self.tableName[key]
+   else:
+     return key
+
+  def path(self, key, orig):
+   if self.tablePath.has_key(key):
+     return self.tablePath[key]
+   else:
+     return orig
+
+
+
+
+
+if __name__ == "__main__":
+    #Parse command line options
+    parser = argparse.ArgumentParser(description="Generate .nix templates from RPM spec files")
+    parser.add_argument("specs", metavar="SPEC", nargs="+", help="spec file")
+    parser.add_argument("-o", "--output", metavar="OUT_DIR", required=True, help="output directory")
+    parser.add_argument("-b", "--buildRoot", metavar="BUILDROOT_DIR", default=None, help="buildroot output directory")
+    parser.add_argument("-i", "--inputSources", metavar="IN_DIR", default=None, help="sources input directory")
+    parser.add_argument("-m", "--maintainer", metavar="MAINTAINER", default="__NIX_MAINTAINER__", help="package maintainer")
+    parser.add_argument("-r", "--repository", metavar="REP_DIR", default=None, help="nix repository to compare output against")
+    parser.add_argument("-t", "--translate", metavar="TRANSLATE_TABLE", default=None, help="path of translation table for name and path")
+    parser.add_argument("-u", "--translateOut", metavar="TRANSLATE_OUT", default=None, help="output path for updated translation table")
+    parser.add_argument("-a", "--allPackages", metavar="ALL_PACKAGES", default=None, help="top level dir to call packages from")
+    args = parser.parse_args()
+
+    allPackagesDir = os.path.normpath( os.path.dirname(args.allPackages) )
+    if not os.path.exists(allPackagesDir):
+      os.makedirs(allPackagesDir)
+
+    buildRootContent = {}
+    nameMap = {}
+
+    newTable = TranslationTable()
+    if args.translate is not None:
+      table = TranslationTable()
+      table.readTable(args.translate)
+      newTable.readTable(args.translate)
+    else:
+      table = None
+
+    for specPath in args.specs:
+      try:
+        sys.stderr.write("INFO: generate nix file from: %s\n" % specPath)
+
+        spec = SPECTemplate(specPath, args.output, args.inputSources, args.buildRoot, table, args.repository, allPackagesDir, args.maintainer)
+        if args.repository is not None:
+          if os.path.exists(os.path.join(spec.potential_repository_dir,'default.nix')):
+            nixTemplate = NixTemplate(os.path.join(spec.potential_repository_dir,'default.nix'))
+            nixTemplate.loadUpdate(spec.facts)
+            if not os.path.exists(spec.final_output_dir):
+              os.makedirs(spec.final_output_dir)
+            nixTemplate.generateUpdated(os.path.join(spec.final_output_dir,'default.nix'))
+          else:
+            sys.stderr.write("WARNING: Repository does not contain template: %s\n" % os.path.join(spec.potential_repository_dir,'default.nix'))
+            if args.buildRoot is None:
+              spec.generateCombined()
+            else:
+              buildRootContent[spec.key] = spec.generateSplit()
+        else:
+          if args.buildRoot is None:
+            spec.generateCombined()
+          else:
+            buildRootContent[spec.key] = spec.generateSplit()
+
+        newTable.update(spec.key,spec.relOutputDir,spec.getSelf())
+        nameMap[spec.getSelf()] = spec
+
+      except Exception, e:
+        sys.stderr.write("ERROR: %s failed with:\n%s\n%s\n" % (specPath,e.message,traceback.format_exc()))
+
+    if args.translateOut is not None:
+      if not os.path.exists(os.path.dirname(os.path.normpath(args.translateOut))):
+        os.makedirs(os.path.dirname(os.path.normpath(args.translateOut)))
+      newTable.writeTable(args.translateOut)
+
+    graph = {}
+    for k, v in nameMap.items():
+      graph[k] = set(v.getBuildInputs("ALL"))
+
+    sortedSpecs = toposort.toposort_flatten(graph)
+    sortedSpecs = filter( lambda x: x in nameMap.keys(), sortedSpecs)
+
+    allPackagesFile = open(os.path.normpath( args.allPackages ), 'w')
+    allPackagesFile.write( '\n\n'.join(map(lambda x: x.callPackage(), map(lambda x: nameMap[x], sortedSpecs))) )
+    allPackagesFile.close()
+
+    if args.buildRoot is not None:
+      buildRootFilename = os.path.normpath( args.buildRoot )
+      if not os.path.exists(os.path.dirname(buildRootFilename)):
+        os.makedirs(os.path.dirname(buildRootFilename))
+      buildRootFile = open(buildRootFilename, 'w')
+      buildRootFile.write( "{ fetchurl, buildRoot }: {\n\n" )
+      keys = buildRootContent.keys()
+      keys.sort()
+      for k in keys:
+        buildRootFile.write( buildRootContent[k] + '\n' )
+      buildRootFile.write( "}\n" )
+      buildRootFile.close()
+
+
diff --git a/nixpkgs/pkgs/build-support/trivial-builders.nix b/nixpkgs/pkgs/build-support/trivial-builders.nix
new file mode 100644
index 000000000000..219f808403cc
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/trivial-builders.nix
@@ -0,0 +1,622 @@
+{ lib, stdenv, stdenvNoCC, lndir, runtimeShell }:
+
+rec {
+
+  /* Run the shell command `buildCommand' to produce a store path named
+  * `name'.  The attributes in `env' are added to the environment
+  * prior to running the command. By default `runCommand` runs in a
+  * stdenv with no compiler environment. `runCommandCC` uses the default
+  * stdenv, `pkgs.stdenv`.
+  *
+  * Examples:
+  * runCommand "name" {envVariable = true;} ''echo hello > $out''
+  * runCommandNoCC "name" {envVariable = true;} ''echo hello > $out'' # equivalent to prior
+  * runCommandCC "name" {} ''gcc -o myfile myfile.c; cp myfile $out'';
+  *
+  * The `*Local` variants force a derivation to be built locally,
+  * it is not substituted.
+  *
+  * This is intended for very cheap commands (<1s execution time).
+  * It saves on the network roundrip and can speed up a build.
+  *
+  * It is the same as adding the special fields
+  * `preferLocalBuild = true;`
+  * `allowSubstitutes = false;`
+  * to a derivation’s attributes.
+  */
+  runCommand = runCommandNoCC;
+  runCommandLocal = runCommandNoCCLocal;
+
+  runCommandNoCC = name: env: runCommandWith {
+    stdenv = stdenvNoCC;
+    runLocal = false;
+    inherit name;
+    derivationArgs = env;
+  };
+  runCommandNoCCLocal = name: env: runCommandWith {
+    stdenv = stdenvNoCC;
+    runLocal = true;
+    inherit name;
+    derivationArgs = env;
+  };
+
+  runCommandCC = name: env: runCommandWith {
+    stdenv = stdenv;
+    runLocal = false;
+    inherit name;
+    derivationArgs = env;
+  };
+  # `runCommandCCLocal` left out on purpose.
+  # We shouldn’t force the user to have a cc in scope.
+
+  /* Generalized version of the `runCommand`-variants
+   * which does customized behavior via a single
+   * attribute set passed as the first argument
+   * instead of having a lot of variants like
+   * `runCommand*`. Additionally it allows changing
+   * the used `stdenv` freely and has a more explicit
+   * approach to changing the arguments passed to
+   * `stdenv.mkDerivation`.
+   */
+  runCommandWith =
+    let
+      # prevent infinite recursion for the default stdenv value
+      defaultStdenv = stdenv;
+    in
+    { stdenv ? defaultStdenv
+    # which stdenv to use, defaults to a stdenv with a C compiler, pkgs.stdenv
+    , runLocal ? false
+    # whether to build this derivation locally instead of substituting
+    , derivationArgs ? {}
+    # extra arguments to pass to stdenv.mkDerivation
+    , name
+    # name of the resulting derivation
+    }: buildCommand:
+    stdenv.mkDerivation ({
+      name = lib.strings.sanitizeDerivationName name;
+      inherit buildCommand;
+      passAsFile = [ "buildCommand" ]
+        ++ (derivationArgs.passAsFile or []);
+    }
+    // (lib.optionalAttrs runLocal {
+          preferLocalBuild = true;
+          allowSubstitutes = false;
+       })
+    // builtins.removeAttrs derivationArgs [ "passAsFile" ]);
+
+
+  /* Writes a text file to the nix store.
+   * The contents of text is added to the file in the store.
+   *
+   * Examples:
+   * # Writes my-file to /nix/store/<store path>
+   * writeTextFile {
+   *   name = "my-file";
+   *   text = ''
+   *     Contents of File
+   *   '';
+   * }
+   * # See also the `writeText` helper function below.
+   *
+   * # Writes executable my-file to /nix/store/<store path>/bin/my-file
+   * writeTextFile {
+   *   name = "my-file";
+   *   text = ''
+   *     Contents of File
+   *   '';
+   *   executable = true;
+   *   destination = "/bin/my-file";
+   * }
+   */
+  writeTextFile =
+    { name # the name of the derivation
+    , text
+    , executable ? false # run chmod +x ?
+    , destination ? ""   # relative path appended to $out eg "/bin/foo"
+    , checkPhase ? ""    # syntax checks, e.g. for scripts
+    }:
+    runCommand name
+      { inherit text executable;
+        passAsFile = [ "text" ];
+        # Pointless to do this on a remote machine.
+        preferLocalBuild = true;
+        allowSubstitutes = false;
+      }
+      ''
+        n=$out${destination}
+        mkdir -p "$(dirname "$n")"
+
+        if [ -e "$textPath" ]; then
+          mv "$textPath" "$n"
+        else
+          echo -n "$text" > "$n"
+        fi
+
+        ${checkPhase}
+
+        (test -n "$executable" && chmod +x "$n") || true
+      '';
+
+  /*
+   * Writes a text file to nix store with no optional parameters available.
+   *
+   * Example:
+   * # Writes contents of file to /nix/store/<store path>
+   * writeText "my-file"
+   *   ''
+   *   Contents of File
+   *   '';
+   *
+  */
+  writeText = name: text: writeTextFile {inherit name text;};
+
+  /*
+   * Writes a text file to nix store in a specific directory with no
+   * optional parameters available.
+   *
+   * Example:
+   * # Writes contents of file to /nix/store/<store path>/share/my-file
+   * writeTextDir "share/my-file"
+   *   ''
+   *   Contents of File
+   *   '';
+   *
+  */
+  writeTextDir = path: text: writeTextFile {
+    inherit text;
+    name = builtins.baseNameOf path;
+    destination = "/${path}";
+  };
+
+  /*
+   * Writes a text file to /nix/store/<store path> and marks the file as
+   * executable.
+   *
+   * If passed as a build input, will be used as a setup hook. This makes setup
+   * hooks more efficient to create: you don't need a derivation that copies
+   * them to $out/nix-support/setup-hook, instead you can use the file as is.
+   *
+   * Example:
+   * # Writes my-file to /nix/store/<store path> and makes executable
+   * writeScript "my-file"
+   *   ''
+   *   Contents of File
+   *   '';
+   *
+  */
+  writeScript = name: text: writeTextFile {inherit name text; executable = true;};
+
+  /*
+   * Writes a text file to /nix/store/<store path>/bin/<name> and
+   * marks the file as executable.
+   *
+   * Example:
+   * # Writes my-file to /nix/store/<store path>/bin/my-file and makes executable.
+   * writeScriptBin "my-file"
+   *   ''
+   *   Contents of File
+   *   '';
+   *
+  */
+  writeScriptBin = name: text: writeTextFile {inherit name text; executable = true; destination = "/bin/${name}";};
+
+  /*
+   * Similar to writeScript. Writes a Shell script and checks its syntax.
+   * Automatically includes interpreter above the contents passed.
+   *
+   * Example:
+   * # Writes my-file to /nix/store/<store path> and makes executable.
+   * writeShellScript "my-file"
+   *   ''
+   *   Contents of File
+   *   '';
+   *
+  */
+  writeShellScript = name: text:
+    writeTextFile {
+      inherit name;
+      executable = true;
+      text = ''
+        #!${runtimeShell}
+        ${text}
+        '';
+      checkPhase = ''
+        ${stdenv.shell} -n $out
+      '';
+    };
+
+  /*
+   * Similar to writeShellScript and writeScriptBin.
+   * Writes an executable Shell script to /nix/store/<store path>/bin/<name> and checks its syntax.
+   * Automatically includes interpreter above the contents passed.
+   *
+   * Example:
+   * # Writes my-file to /nix/store/<store path>/bin/my-file and makes executable.
+   * writeShellScriptBin "my-file"
+   *   ''
+   *   Contents of File
+   *   '';
+   *
+  */
+  writeShellScriptBin = name : text :
+    writeTextFile {
+      inherit name;
+      executable = true;
+      destination = "/bin/${name}";
+      text = ''
+        #!${runtimeShell}
+        ${text}
+        '';
+      checkPhase = ''
+        ${stdenv.shell} -n $out/bin/${name}
+      '';
+    };
+
+  # Create a C binary
+  writeCBin = name: code:
+    runCommandCC name
+    {
+      inherit name code;
+      executable = true;
+      passAsFile = ["code"];
+      # Pointless to do this on a remote machine.
+      preferLocalBuild = true;
+      allowSubstitutes = false;
+    }
+    ''
+    n=$out/bin/$name
+    mkdir -p "$(dirname "$n")"
+    mv "$codePath" code.c
+    $CC -x c code.c -o "$n"
+    '';
+
+  /*
+   * Create a forest of symlinks to the files in `paths'.
+   *
+   * This creates a single derivation that replicates the directory structure
+   * of all the input paths.
+   *
+   * BEWARE: it may not "work right" when the passed paths contain symlinks to directories.
+   *
+   * Examples:
+   * # adds symlinks of hello to current build.
+   * symlinkJoin { name = "myhello"; paths = [ pkgs.hello ]; }
+   *
+   * # adds symlinks of hello and stack to current build and prints "links added"
+   * symlinkJoin { name = "myexample"; paths = [ pkgs.hello pkgs.stack ]; postBuild = "echo links added"; }
+   *
+   * This creates a derivation with a directory structure like the following:
+   *
+   * /nix/store/sglsr5g079a5235hy29da3mq3hv8sjmm-myexample
+   * |-- bin
+   * |   |-- hello -> /nix/store/qy93dp4a3rqyn2mz63fbxjg228hffwyw-hello-2.10/bin/hello
+   * |   `-- stack -> /nix/store/6lzdpxshx78281vy056lbk553ijsdr44-stack-2.1.3.1/bin/stack
+   * `-- share
+   *     |-- bash-completion
+   *     |   `-- completions
+   *     |       `-- stack -> /nix/store/6lzdpxshx78281vy056lbk553ijsdr44-stack-2.1.3.1/share/bash-completion/completions/stack
+   *     |-- fish
+   *     |   `-- vendor_completions.d
+   *     |       `-- stack.fish -> /nix/store/6lzdpxshx78281vy056lbk553ijsdr44-stack-2.1.3.1/share/fish/vendor_completions.d/stack.fish
+   * ...
+   *
+   * symlinkJoin and linkFarm are similar functions, but they output
+   * derivations with different structure.
+   *
+   * symlinkJoin is used to create a derivation with a familiar directory
+   * structure (top-level bin/, share/, etc), but with all actual files being symlinks to
+   * the files in the input derivations.
+   *
+   * symlinkJoin is used many places in nixpkgs to create a single derivation
+   * that appears to contain binaries, libraries, documentation, etc from
+   * multiple input derivations.
+   *
+   * linkFarm is instead used to create a simple derivation with symlinks to
+   * other derivations.  A derivation created with linkFarm is often used in CI
+   * as a easy way to build multiple derivations at once.
+   */
+  symlinkJoin =
+    args_@{ name
+         , paths
+         , preferLocalBuild ? true
+         , allowSubstitutes ? false
+         , postBuild ? ""
+         , ...
+         }:
+    let
+      args = removeAttrs args_ [ "name" "postBuild" ]
+        // {
+          inherit preferLocalBuild allowSubstitutes;
+          passAsFile = [ "paths" ];
+        }; # pass the defaults
+    in runCommand name args
+      ''
+        mkdir -p $out
+        for i in $(cat $pathsPath); do
+          ${lndir}/bin/lndir -silent $i $out
+        done
+        ${postBuild}
+      '';
+
+  /*
+   * Quickly create a set of symlinks to derivations.
+   *
+   * This creates a simple derivation with symlinks to all inputs.
+   *
+   * entries is a list of attribute sets like
+   * { name = "name" ; path = "/nix/store/..."; }
+   *
+   * Example:
+   *
+   * # Symlinks hello and stack paths in store to current $out/hello-test and
+   * # $out/foobar.
+   * linkFarm "myexample" [ { name = "hello-test"; path = pkgs.hello; } { name = "foobar"; path = pkgs.stack; } ]
+   *
+   * This creates a derivation with a directory structure like the following:
+   *
+   * /nix/store/qc5728m4sa344mbks99r3q05mymwm4rw-myexample
+   * |-- foobar -> /nix/store/6lzdpxshx78281vy056lbk553ijsdr44-stack-2.1.3.1
+   * `-- hello-test -> /nix/store/qy93dp4a3rqyn2mz63fbxjg228hffwyw-hello-2.10
+   *
+   * See the note on symlinkJoin for the difference between linkFarm and symlinkJoin.
+   */
+  linkFarm = name: entries: runCommand name { preferLocalBuild = true; allowSubstitutes = false; }
+    ''mkdir -p $out
+      cd $out
+      ${lib.concatMapStrings (x: ''
+          mkdir -p "$(dirname ${lib.escapeShellArg x.name})"
+          ln -s ${lib.escapeShellArg x.path} ${lib.escapeShellArg x.name}
+      '') entries}
+    '';
+
+  /*
+   * Easily create a linkFarm from a set of derivations.
+   *
+   * This calls linkFarm with a list of entries created from the list of input
+   * derivations.  It turns each input derivation into an attribute set
+   * like { name = drv.name ; path = drv }, and passes this to linkFarm.
+   *
+   * Example:
+   *
+   * # Symlinks the hello, gcc, and ghc derivations in $out
+   * linkFarmFromDrvs "myexample" [ pkgs.hello pkgs.gcc pkgs.ghc ]
+   *
+   * This creates a derivation with a directory structure like the following:
+   *
+   * /nix/store/m3s6wkjy9c3wy830201bqsb91nk2yj8c-myexample
+   * |-- gcc-wrapper-9.2.0 -> /nix/store/fqhjxf9ii4w4gqcsx59fyw2vvj91486a-gcc-wrapper-9.2.0
+   * |-- ghc-8.6.5 -> /nix/store/gnf3s07bglhbbk4y6m76sbh42siym0s6-ghc-8.6.5
+   * `-- hello-2.10 -> /nix/store/k0ll91c4npk4lg8lqhx00glg2m735g74-hello-2.10
+   */
+  linkFarmFromDrvs = name: drvs:
+    let mkEntryFromDrv = drv: { name = drv.name; path = drv; };
+    in linkFarm name (map mkEntryFromDrv drvs);
+
+
+  /*
+   * Make a package that just contains a setup hook with the given contents.
+   * This setup hook will be invoked by any package that includes this package
+   * as a buildInput. Optionally takes a list of substitutions that should be
+   * applied to the resulting script.
+   *
+   * Examples:
+   * # setup hook that depends on the hello package and runs ./myscript.sh
+   * myhellohook = makeSetupHook { deps = [ hello ]; } ./myscript.sh;
+   *
+   * # wrotes a setup hook where @bash@ myscript.sh is substituted for the
+   * # bash interpreter.
+   * myhellohookSub = makeSetupHook {
+   *                 deps = [ hello ];
+   *                 substitutions = { bash = "${pkgs.bash}/bin/bash"; };
+   *               } ./myscript.sh;
+   */
+  makeSetupHook = { name ? "hook", deps ? [], substitutions ? {} }: script:
+    runCommand name substitutions
+      (''
+        mkdir -p $out/nix-support
+        cp ${script} $out/nix-support/setup-hook
+      '' + lib.optionalString (deps != []) ''
+        printWords ${toString deps} > $out/nix-support/propagated-build-inputs
+      '' + lib.optionalString (substitutions != {}) ''
+        substituteAll ${script} $out/nix-support/setup-hook
+      '');
+
+
+  # Write the references (i.e. the runtime dependencies in the Nix store) of `path' to a file.
+
+  writeReferencesToFile = path: runCommand "runtime-deps"
+    {
+      exportReferencesGraph = ["graph" path];
+    }
+    ''
+      touch $out
+      while read path; do
+        echo $path >> $out
+        read dummy
+        read nrRefs
+        for ((i = 0; i < nrRefs; i++)); do read ref; done
+      done < graph
+    '';
+
+  /*
+    Write the set of references to a file, that is, their immediate dependencies.
+
+    This produces the equivalent of `nix-store -q --references`.
+   */
+  writeDirectReferencesToFile = path: runCommand "runtime-references"
+    {
+      exportReferencesGraph = ["graph" path];
+      inherit path;
+    }
+    ''
+      touch ./references
+      while read p; do
+        read dummy
+        read nrRefs
+        if [[ $p == $path ]]; then
+          for ((i = 0; i < nrRefs; i++)); do
+            read ref;
+            echo $ref >>./references
+          done
+        else
+          for ((i = 0; i < nrRefs; i++)); do
+            read ref;
+          done
+        fi
+      done < graph
+      sort ./references >$out
+    '';
+
+
+  /* Print an error message if the file with the specified name and
+   * hash doesn't exist in the Nix store. This function should only
+   * be used by non-redistributable software with an unfree license
+   * that we need to require the user to download manually. It produces
+   * packages that cannot be built automatically.
+   *
+   * Examples:
+   *
+   * requireFile {
+   *   name = "my-file";
+   *   url = "http://example.com/download/";
+   *   sha256 = "ffffffffffffffffffffffffffffffffffffffffffffffffffff";
+   * }
+   */
+  requireFile = { name ? null
+                , sha256 ? null
+                , sha1 ? null
+                , url ? null
+                , message ? null
+                , hashMode ? "flat"
+                } :
+    assert (message != null) || (url != null);
+    assert (sha256 != null) || (sha1 != null);
+    assert (name != null) || (url != null);
+    let msg =
+      if message != null then message
+      else ''
+        Unfortunately, we cannot download file ${name_} automatically.
+        Please go to ${url} to download it yourself, and add it to the Nix store
+        using either
+          nix-store --add-fixed ${hashAlgo} ${name_}
+        or
+          nix-prefetch-url --type ${hashAlgo} file:///path/to/${name_}
+      '';
+      hashAlgo = if sha256 != null then "sha256" else "sha1";
+      hash = if sha256 != null then sha256 else sha1;
+      name_ = if name == null then baseNameOf (toString url) else name;
+    in
+    stdenvNoCC.mkDerivation {
+      name = name_;
+      outputHashMode = hashMode;
+      outputHashAlgo = hashAlgo;
+      outputHash = hash;
+      preferLocalBuild = true;
+      allowSubstitutes = false;
+      builder = writeScript "restrict-message" ''
+        source ${stdenvNoCC}/setup
+        cat <<_EOF_
+
+        ***
+        ${msg}
+        ***
+
+        _EOF_
+        exit 1
+      '';
+    };
+
+
+  # Copy a path to the Nix store.
+  # Nix automatically copies files to the store before stringifying paths.
+  # If you need the store path of a file, ${copyPathToStore <path>} can be
+  # shortened to ${<path>}.
+  copyPathToStore = builtins.filterSource (p: t: true);
+
+
+  # Copy a list of paths to the Nix store.
+  copyPathsToStore = builtins.map copyPathToStore;
+
+  /* Applies a list of patches to a source directory.
+   *
+   * Examples:
+   *
+   * # Patching nixpkgs:
+   * applyPatches {
+   *   src = pkgs.path;
+   *   patches = [
+   *     (pkgs.fetchpatch {
+   *       url = "https://github.com/NixOS/nixpkgs/commit/1f770d20550a413e508e081ddc08464e9d08ba3d.patch";
+   *       sha256 = "1nlzx171y3r3jbk0qhvnl711kmdk57jlq4na8f8bs8wz2pbffymr";
+   *     })
+   *   ];
+   * }
+   */
+  applyPatches =
+    { src
+    , name ? (if builtins.typeOf src == "path"
+              then builtins.baseNameOf src
+              else
+                if builtins.isAttrs src && builtins.hasAttr "name" src
+                then src.name
+                else throw "applyPatches: please supply a `name` argument because a default name can only be computed when the `src` is a path or is an attribute set with a `name` attribute."
+             ) + "-patched"
+    , patches   ? []
+    , postPatch ? ""
+    }: stdenvNoCC.mkDerivation {
+      inherit name src patches postPatch;
+      preferLocalBuild = true;
+      allowSubstitutes = false;
+      phases = "unpackPhase patchPhase installPhase";
+      installPhase = "cp -R ./ $out";
+    };
+
+  /* An immutable file in the store with a length of 0 bytes. */
+  emptyFile = runCommand "empty-file" {
+    outputHashAlgo = "sha256";
+    outputHashMode = "recursive";
+    outputHash = "0ip26j2h11n1kgkz36rl4akv694yz65hr72q4kv4b3lxcbi65b3p";
+    preferLocalBuild = true;
+  } "touch $out";
+
+  /* An immutable empty directory in the store. */
+  emptyDirectory = runCommand "empty-directory" {
+    outputHashAlgo = "sha256";
+    outputHashMode = "recursive";
+    outputHash = "0sjjj9z1dhilhpc8pq4154czrb79z9cm044jvn75kxcjv6v5l2m5";
+    preferLocalBuild = true;
+  } "mkdir $out";
+
+  /* Checks the command output contains the specified version
+   *
+   * Although simplistic, this test assures that the main program
+   * can run. While there's no substitute for a real test case,
+   * it does catch dynamic linking errors and such. It also provides
+   * some protection against accidentally building the wrong version,
+   * for example when using an 'old' hash in a fixed-output derivation.
+   *
+   * Examples:
+   *
+   * passthru.tests.version = testVersion { package = hello; };
+   *
+   * passthru.tests.version = testVersion {
+   *   package = seaweedfs;
+   *   command = "weed version";
+   * };
+   *
+   * passthru.tests.version = testVersion {
+   *   package = key;
+   *   command = "KeY --help";
+   *   # Wrong '2.5' version in the code. Drop on next version.
+   *   version = "2.5";
+   * };
+   */
+  testVersion =
+    { package,
+      command ? "${package.meta.mainProgram or package.pname or package.name} --version",
+      version ? package.version,
+    }: runCommand "test-version" { nativeBuildInputs = [ package ]; meta.timeout = 60; } ''
+      ${command} | grep -Fw ${version}
+      touch $out
+    '';
+}
diff --git a/nixpkgs/pkgs/build-support/trivial-builders/test.nix b/nixpkgs/pkgs/build-support/trivial-builders/test.nix
new file mode 100644
index 000000000000..204fb54fca3d
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/trivial-builders/test.nix
@@ -0,0 +1,53 @@
+{ lib, nixosTest, pkgs, writeText, hello, figlet, stdenvNoCC }:
+
+# -------------------------------------------------------------------------- #
+#
+#                         trivial-builders test
+#
+# -------------------------------------------------------------------------- #
+#
+#  This file can be run independently (quick):
+#
+#      $ pkgs/build-support/trivial-builders/test.sh
+#
+#  or in the build sandbox with a ~20s VM overhead
+#
+#      $ nix-build -A tests.trivial-builders
+#
+# -------------------------------------------------------------------------- #
+
+let
+  invokeSamples = file:
+    lib.concatStringsSep " " (
+      lib.attrValues (import file { inherit pkgs; })
+    );
+in
+nixosTest {
+  name = "nixpkgs-trivial-builders";
+  nodes.machine = { ... }: {
+    virtualisation.writableStore = true;
+
+    # Test runs without network, so we don't substitute and prepare our deps
+    nix.binaryCaches = lib.mkForce [];
+    environment.etc."pre-built-paths".source = writeText "pre-built-paths" (
+      builtins.toJSON [hello figlet stdenvNoCC]
+    );
+    environment.variables = {
+      SAMPLE = invokeSamples ./test/sample.nix;
+      REFERENCES = invokeSamples ./test/invoke-writeReferencesToFile.nix;
+      DIRECT_REFS = invokeSamples ./test/invoke-writeDirectReferencesToFile.nix;
+    };
+  };
+  testScript = ''
+    machine.succeed("""
+      ${./test.sh} 2>/dev/console
+    """)
+  '';
+  meta = {
+    license = lib.licenses.mit; # nixpkgs license
+    maintainers = with lib.maintainers; [
+      roberth
+    ];
+    description = "Run the Nixpkgs trivial builders tests";
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/trivial-builders/test.sh b/nixpkgs/pkgs/build-support/trivial-builders/test.sh
new file mode 100755
index 000000000000..b7c4726a9be0
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/trivial-builders/test.sh
@@ -0,0 +1,56 @@
+#!/usr/bin/env bash
+
+# -------------------------------------------------------------------------- #
+#
+#                         trivial-builders test
+#
+# -------------------------------------------------------------------------- #
+#
+#  This file can be run independently (quick):
+#
+#      $ pkgs/build-support/trivial-builders/test.sh
+#
+#  or in the build sandbox with a ~20s VM overhead
+#
+#      $ nix-build -A tests.trivial-builders
+#
+# -------------------------------------------------------------------------- #
+
+# strict bash
+set -euo pipefail
+
+# debug
+# set -x
+# PS4='+(${BASH_SOURCE}:${LINENO}): ${FUNCNAME[0]:+${FUNCNAME[0]}(): }'
+
+cd "$(dirname ${BASH_SOURCE[0]})"  # nixpkgs root
+
+if [[ -z ${SAMPLE:-} ]]; then
+  sample=( `nix-build test/sample.nix` )
+  directRefs=( `nix-build test/invoke-writeDirectReferencesToFile.nix` )
+  references=( `nix-build test/invoke-writeReferencesToFile.nix` )
+else
+  # Injected by Nix (to avoid evaluating in a derivation)
+  # turn them into arrays
+  sample=($SAMPLE)
+  directRefs=($DIRECT_REFS)
+  references=($REFERENCES)
+fi
+
+echo >&2 Testing direct references...
+for i in "${!sample[@]}"; do
+  echo >&2 Checking '#'$i ${sample[$i]} ${directRefs[$i]}
+  diff -U3 \
+    <(sort <${directRefs[$i]}) \
+    <(nix-store -q --references ${sample[$i]} | sort)
+done
+
+echo >&2 Testing closure...
+for i in "${!sample[@]}"; do
+  echo >&2 Checking '#'$i ${sample[$i]} ${references[$i]}
+  diff -U3 \
+    <(sort <${references[$i]}) \
+    <(nix-store -q --requisites ${sample[$i]} | sort)
+done
+
+echo 'OK!'
diff --git a/nixpkgs/pkgs/build-support/trivial-builders/test/invoke-writeDirectReferencesToFile.nix b/nixpkgs/pkgs/build-support/trivial-builders/test/invoke-writeDirectReferencesToFile.nix
new file mode 100644
index 000000000000..ead3f7a2f571
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/trivial-builders/test/invoke-writeDirectReferencesToFile.nix
@@ -0,0 +1,4 @@
+{ pkgs ? import ../../../.. { config = {}; overlays = []; } }:
+pkgs.lib.mapAttrs
+  (k: v: pkgs.writeDirectReferencesToFile v)
+  (import ./sample.nix { inherit pkgs; })
diff --git a/nixpkgs/pkgs/build-support/trivial-builders/test/invoke-writeReferencesToFile.nix b/nixpkgs/pkgs/build-support/trivial-builders/test/invoke-writeReferencesToFile.nix
new file mode 100644
index 000000000000..99c6c2f7dcc4
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/trivial-builders/test/invoke-writeReferencesToFile.nix
@@ -0,0 +1,4 @@
+{ pkgs ? import ../../../.. { config = {}; overlays = []; } }:
+pkgs.lib.mapAttrs
+  (k: v: pkgs.writeReferencesToFile v)
+  (import ./sample.nix { inherit pkgs; })
diff --git a/nixpkgs/pkgs/build-support/trivial-builders/test/sample.nix b/nixpkgs/pkgs/build-support/trivial-builders/test/sample.nix
new file mode 100644
index 000000000000..27aee6b73dbe
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/trivial-builders/test/sample.nix
@@ -0,0 +1,19 @@
+{ pkgs ? import ../../../.. { config = {}; overlays = []; } }:
+let
+  inherit (pkgs)
+    figlet
+    hello
+    writeText
+    ;
+in
+{
+  hello = hello;
+  figlet = figlet;
+  norefs = writeText "hi" "hello";
+  helloRef = writeText "hi" "hello ${hello}";
+  helloFigletRef = writeText "hi" "hello ${hello} ${figlet}";
+  inherit (pkgs)
+    emptyFile
+    emptyDirectory
+  ;
+}
diff --git a/nixpkgs/pkgs/build-support/upstream-updater/attrset-to-dir.nix b/nixpkgs/pkgs/build-support/upstream-updater/attrset-to-dir.nix
new file mode 100644
index 000000000000..24f7b735c2e2
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/upstream-updater/attrset-to-dir.nix
@@ -0,0 +1,20 @@
+a :  
+a.stdenv.mkDerivation {
+  buildCommand = ''
+    mkdir -p "$out/attributes"
+    
+  '' + (a.lib.concatStrings (map
+    (n: ''
+      ln -s "${a.writeTextFile {name=n; text=builtins.getAttr n a.theAttrSet;}}" $out/attributes/${n};
+    '')
+    (builtins.attrNames a.theAttrSet)
+  ));
+
+  name = "attribute-set";
+  meta = {
+    description = "Contents of an attribute set";
+    maintainers = [
+      a.lib.maintainers.raskin
+    ];
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/upstream-updater/create-src-info-git.sh b/nixpkgs/pkgs/build-support/upstream-updater/create-src-info-git.sh
new file mode 100755
index 000000000000..6687a1f8f411
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/upstream-updater/create-src-info-git.sh
@@ -0,0 +1,24 @@
+#! /bin/sh
+
+[ -z "$1" ] && {
+  echo "Use $0 expression-basename repo-url branch-name package-base-name"
+  echo "Like:"
+  echo "$0 default http://git.example.com/repo origin/master hello"
+  exit 1;
+} >&2
+
+own_dir="$(cd "$(dirname "$0")"; sh -c pwd)"
+
+cp "$own_dir/../builder-defs/template-bdp-uud.nix" "$1.nix" 
+sed -e "s@src-for-default.nix@src-for-$1.nix@g; 
+    s@fetchUrlFromSrcInfo@fetchGitFromSrcInfo@g" -i "$1.nix"
+echo '{}' > "src-for-$1.nix"
+cat << EOF > src-info-for-$1.nix
+{
+  repoUrl = "$2";
+  rev = "$3";
+  baseName = "$4";
+  method = "fetchgit";
+}
+EOF
+
diff --git a/nixpkgs/pkgs/build-support/upstream-updater/create-src-info.sh b/nixpkgs/pkgs/build-support/upstream-updater/create-src-info.sh
new file mode 100755
index 000000000000..fbbbe33a9e7b
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/upstream-updater/create-src-info.sh
@@ -0,0 +1,20 @@
+#! /bin/sh
+
+[ -z "$1" ] && {
+  echo "Use $0 expression-basename download-page package-base-name"
+  echo "Like:"
+  echo "$0 default http://example.com/downloads hello"
+  exit 1;
+} >&2
+
+own_dir="$(cd "$(dirname "$0")"; sh -c pwd)"
+
+cp "$own_dir/../builder-defs/template-auto-callable.nix" "$1.nix" 
+sed -e "s@src-for-default.nix@src-for-$1.nix@g" -i "$1.nix"
+echo '{}' > "src-for-$1.nix"
+cat << EOF > src-info-for-$1.nix
+{
+  downloadPage = "$2";
+  baseName = "$3";
+}
+EOF
diff --git a/nixpkgs/pkgs/build-support/upstream-updater/design.txt b/nixpkgs/pkgs/build-support/upstream-updater/design.txt
new file mode 100644
index 000000000000..128be89d2075
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/upstream-updater/design.txt
@@ -0,0 +1,29 @@
+Next to file.nix we get src-for-file.nix 
+src-for-file.nix should evaluate to a flat attribute set with 
+string values.
+It is supposed to be imported in the main expression.
+In the ideal world it can export url, hash, version.
+
+src-for-file.nix generation is directed by 
+src-info-for-file.nix.
+
+Attributes:
+
+src-info-for-file.nix:
+
+downloadPage
+rev (for repos)
+baseName (default = unnamed-package)
+sourceRegexp (default = '.*[.]tar[.].*')
+choiceCommand (default = 'head -1')
+versionExtractorSedScript (default = 's/.*-([0-9.]+)[.].*/\1/')
+versionReferenceCreator (default = 's/-([0-9.]+)[.]/-${version}./')
+mirrorSedScript (default = none)
+
+src-for-file.nix:
+
+advertisedUrl (its match is the check for update presence)
+url
+hash
+version
+name
diff --git a/nixpkgs/pkgs/build-support/upstream-updater/snippets.sh b/nixpkgs/pkgs/build-support/upstream-updater/snippets.sh
new file mode 100644
index 000000000000..2c06696c928f
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/upstream-updater/snippets.sh
@@ -0,0 +1,14 @@
+# sed scripts
+
+#http://sourceforge.net/projects/webdruid/files/webdruid/0.6.0-alpha5/webdruid-0.6.0-alpha5.tar.gz/download
+#http://downloads.sourceforge.net/webdruid/files/webdruid/0.6.0-alpha5/webdruid-0.6.0-alpha5.tar.gz
+skipRedirectSF='s@sourceforge.net/projects@downloads.sourceforge.net/project@; s@/files@@; s@/download$@@;'
+extractReleaseSF='s@.*/([^/]+)/[^/]+@\1@'
+extractVersionSF='s@.*/[^/0-9]*([0-9].*)[.](tar|tgz|tbz2|zip).*@\1@'
+apacheMirror='s@http://www.apache.org/dist/@mirror://apache/@'
+skipRedirectApache='s@/dyn/closer.cgi[?]path=@/dist@'
+
+replaceAllVersionOccurences() {
+	echo s/"$version"/\${version}/g
+}
+dashDelimitedVersion='s/.*-([0-9.]+)-.*/\1/'
diff --git a/nixpkgs/pkgs/build-support/upstream-updater/test-case.nix b/nixpkgs/pkgs/build-support/upstream-updater/test-case.nix
new file mode 100644
index 000000000000..498f15c28a78
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/upstream-updater/test-case.nix
@@ -0,0 +1,13 @@
+{
+  a=1;
+  b="text";
+  c=''
+  text
+  '';
+  d=''
+    Multi-line text with special characters - 
+    like \ (backslash) and ''${} (dollar + 
+    curly braces) and $ (dollar) and ' (quote)
+    and " (double quote).
+  '';
+}
diff --git a/nixpkgs/pkgs/build-support/upstream-updater/update-upstream-data.sh b/nixpkgs/pkgs/build-support/upstream-updater/update-upstream-data.sh
new file mode 100755
index 000000000000..38c0d13a723d
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/upstream-updater/update-upstream-data.sh
@@ -0,0 +1,182 @@
+#! /bin/sh
+
+set -x
+
+own_dir="$(cd "$(dirname "$0")"; sh -c pwd)"
+
+source "$own_dir/snippets.sh"
+
+[ -z "$1" ] && {
+  echo "Specify main expression filename."
+  exit 1;
+}
+
+main_file="$1"
+main_dir="$(cd "$(dirname "$main_file")" ; sh -c pwd)"
+file_name="$(basename "$main_file")"
+defs_file="$main_dir"/src-info-for-"$file_name"
+src_file="$main_dir"/src-for-"$file_name"
+# OK, [vcs] revert is always possible
+new_src_file="$main_dir"/src-for-"$file_name"
+
+forcedUrl="$2"
+
+defs_dir="$("$own_dir"/attrset-to-dir.sh "$defs_file")"
+src_defs_dir="$("$own_dir"/attrset-to-dir.sh "$src_file")"
+
+getAttr () {
+    file="$defs_dir"/"$1"
+    data="$( ( [ -f "$file" ] && cat "$file" ) || echo "$2" )"
+    echo "attribute $1 obtained as: [[$data]]" >&2
+    echo "$data"
+}
+
+method="$(getAttr method fetchurl)"
+baseName="$(getAttr baseName 'unnamed-package')"
+commonPrefetchVars=" version name hash"
+
+prefetchClause=""
+[ fetchSFdirs = "$method" ] && {
+    if [ -z "$forcedUrl" ]; then 
+	freshUrl="$("$own_dir"/urls-from-page.sh "$(getAttr downloadPage)" |
+          eval "egrep '$(getAttr sourceRegexp '[-][0-9.]+/$')'" | 
+	  eval "egrep -v '$(getAttr blacklistRegexp '^$')'" |
+	  eval "$(getAttr choiceCommand 'head -n 1')" |
+	  eval "$(getAttr versionToFileCommand "sed -re 's@/([^/]*-[0-9.]+)/@/\1/\1$(getAttr fileSuffix .tar.gz)@'")" 
+	)"
+
+	if ! egrep ':' <<< "$freshUrl" ; then 
+		freshUrl="$(dirname "$(getAttr downloadPage).")/$freshUrl"
+	fi
+
+	echo "Found download link: $freshUrl" >&2
+    else
+        freshUrl="$forcedUrl"
+    fi
+
+    freshUrl="$(echo "$freshUrl" | sed -re "$skipRedirectSF")"
+    echo "Sourceforge-corrected URL: $freshUrl" >&2
+    
+    version="$(echo "$freshUrl" | 
+      sed -re "$(getAttr versionExtractorSedScript "$extractVersionSF")")"
+    baseName="$(getAttr baseName "$(echo "$freshUrl" | sed -re 's@.*/project/([^/]+)/.*@\1@')")"
+    url="$freshUrl"
+    name="$baseName-$version"
+    advertisedUrl="$freshUrl"
+
+    if [ x"$freshUrl" = x"$(cat "$src_defs_dir"/advertisedUrl)" ]; then
+        echo "Source link not changed" >&2
+        exit
+    fi
+    hash=$(nix-prefetch-url "$freshUrl")
+
+    prefetchVars="url advertisedUrl";
+}
+[ fetchSF = "$method" ] && {
+    if [ -z "$forcedUrl" ]; then 
+	freshUrl="$("$own_dir"/urls-from-page.sh "$(getAttr downloadPage)" |
+          eval "egrep \"$(getAttr sourceRegexp '.*[.]tar[.].*|.*[.]tgz$|.*[.]tbz2$')\"" | 
+	  eval "egrep -v \"$(getAttr blacklistRegexp '^$')\"" |
+          eval "$(getAttr choiceCommand 'head -1')")"
+
+	if ! egrep ':' <<< "$freshUrl" ; then 
+		freshUrl="$(dirname "$(getAttr downloadPage).")/$freshUrl"
+	fi
+
+	echo "Found download link: $freshUrl" >&2
+    else
+        freshUrl="$forcedUrl"
+    fi
+
+    freshUrl="$(echo "$freshUrl" | sed -re "$skipRedirectSF")"
+    echo "Sourceforge-corrected URL: $freshUrl" >&2
+    
+    version="$(echo "$freshUrl" | 
+      sed -re "$(getAttr versionExtractorSedScript "$extractVersionSF")")"
+    baseName="$(getAttr baseName "$(echo "$freshUrl" | sed -re 's@.*/project/([^/]+)/.*@\1@')")"
+    url="$freshUrl"
+    name="$baseName-$version"
+    advertisedUrl="$freshUrl"
+
+    if [ x"$freshUrl" = x"$(cat "$src_defs_dir"/advertisedUrl)" ]; then
+        echo "Source link not changed" >&2
+        exit
+    fi
+    hash=$(nix-prefetch-url "$freshUrl")
+
+    prefetchVars="url advertisedUrl";
+}
+[ fetchurl = "$method" ] && {
+    if [ -z "$forcedUrl" ] ; then
+        freshUrl="$("$own_dir"/urls-from-page.sh "$(getAttr downloadPage)" |
+          eval "egrep \"$(getAttr sourceRegexp  '.*[.]tar[.].*|.*[.]tgz$|.*[.]tbz2$')\"" | 
+	  eval "egrep -v \"$(getAttr blacklistRegexp '^$')\"" |
+          eval "$(getAttr choiceCommand 'head -1')")"
+    
+        if ! egrep ':' <<< "$freshUrl" ; then 
+    	    freshUrl="$(dirname "$(getAttr downloadPage).")/$freshUrl"
+        fi
+    
+        echo "Found download link: $freshUrl" >&2
+    else
+        freshUrl="$forcedUrl"
+    fi
+    
+    version="$(echo "$freshUrl" | 
+      eval "sed -re \"$(getAttr versionExtractorSedScript \
+        's/.*-([0-9.]+)[.].*/\1/')\"")"
+    
+    mirrorUrl="$(echo "$freshUrl" | 
+      eval "sed -r -e \"$(getAttr versionReferenceCreator \
+        's/-'"${version}"'[.]/-\${version}./')\"" |
+      eval "sed -r -e \"$(getAttr mirrorSedScript)\"")"
+    url="$mirrorUrl"
+    
+    name="$baseName-$version"
+    
+    advertisedUrl="$freshUrl"
+    url="$mirrorUrl"
+    
+    if [ x"$freshUrl" = x"$(cat "$src_defs_dir"/advertisedUrl)" ]; then
+        echo "Source link not changed" >&2
+        exit
+    fi
+    hash=$(nix-prefetch-url "$freshUrl")
+
+    prefetchVars="url advertisedUrl";
+}
+[ "fetchgit" = "$method" ] && {
+    repoUrl="$(getAttr repoUrl)"
+    export NIX_PREFETCH_GIT_CHECKOUT_HOOK="
+        cat .git/HEAD
+    "
+    export NIX_HASH_ALGO=sha256
+    rev="$(getAttr rev '')";
+    rev_and_hash="$("$own_dir"/../fetchgit/nix-prefetch-git "$repoUrl" "$rev" | tee /dev/stderr | tail -2)"
+
+    rev="$(echo "$rev_and_hash" | head -1)"
+    url="$repoUrl";
+    hash="$(echo "$rev_and_hash" | tail -1)"
+    version="$rev"
+    name="$baseName-$version"
+
+    prefetchVars="rev url";
+}
+
+prefetchAssignments="";
+for i in $commonPrefetchVars $prefetchVars; do
+  prefetchAssignments="$prefetchAssignments $i=\"$(eval echo \"\$$i\")\";$(echo -e '\n  ')"
+done;
+
+extraAssignments=""
+for i in $(getAttr extraVars ''); do
+  eval "$(getAttr "eval_$i" 'i=""')"
+  extraAssignments="$extraAssignments $i=\"$(eval echo \"\$$i\")\";$(echo -e '\n  ')"
+done
+
+cat << EOF > "$new_src_file"
+rec {
+  $prefetchAssignments
+  $extraAssignments
+}
+EOF
diff --git a/nixpkgs/pkgs/build-support/upstream-updater/update-walker-service-specific.sh b/nixpkgs/pkgs/build-support/upstream-updater/update-walker-service-specific.sh
new file mode 100644
index 000000000000..fe439c5d11e3
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/upstream-updater/update-walker-service-specific.sh
@@ -0,0 +1,20 @@
+SF_redirect () {
+  redirect
+  process 'http://[a-z]+[.]dl[.]sourceforge[.]net/' 'mirror://sourceforge/'
+  process '[?].*' ''
+}
+
+SF_version_dir () {
+  version_link 'http://sourceforge.net/.+/'"$1"'[0-9.]+/$'
+}
+
+SF_version_tarball () {
+  version_link "${1:-[.]tar[.]}.*/download\$"
+}
+
+GH_latest () {
+  prefetch_command_rel ../fetchgit/nix-prefetch-git
+  revision "$("$(dirname "$0")/urls-from-page.sh" "$CURRENT_URL/commits" | grep /commit/ | head -n 1 | xargs basename )"
+  version '.*' "git-$(date +%Y-%m-%d)"
+  NEED_TO_CHOOSE_URL=
+}
diff --git a/nixpkgs/pkgs/build-support/upstream-updater/update-walker.sh b/nixpkgs/pkgs/build-support/upstream-updater/update-walker.sh
new file mode 100755
index 000000000000..e60499b60f27
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/upstream-updater/update-walker.sh
@@ -0,0 +1,320 @@
+#! /bin/sh
+
+own_dir="$(cd "$(dirname "$0")"; pwd)"
+
+URL_WAS_SET=
+DL_URL_RE=
+CURRENT_URL=
+CURRENT_REV=
+PREFETCH_COMMAND=
+NEED_TO_CHOOSE_URL=1
+
+url () {
+  URL_WAS_SET=1
+  CURRENT_URL="$1"
+}
+
+dl_url_re () {
+  DL_URL_RE="$1"
+}
+
+version_unpack () {
+  sed -re '
+    s/[.]/ /g; 
+    s@/@ / @g
+    s/-(rc|pre)/ -1 \1 /g; 
+    s/-(gamma)/ -2 \1 /g; 
+    s/-(beta)/ -3 \1 /g; 
+    s/-(alpha)/ -4 \1 /g;
+    s/[-]/ - /g; 
+    '
+}
+
+version_repack () {
+  sed -re '
+    s/ - /-/g;
+    s/ -[0-9]+ ([a-z]+) /-\1/g;
+    s@ / @/@g
+    s/ /./g; 
+    '
+}
+
+version_sort () {
+  version_unpack | 
+    sort -t ' ' -n $(for i in $(seq 30); do echo " -k${i}n" ; done) | tac |
+    version_repack
+}
+
+position_choice () {
+  head -n "${1:-1}" | tail -n "${2:-1}"
+}
+
+matching_links () {
+  "$own_dir"/urls-from-page.sh "$CURRENT_URL" | grep -E "$1"
+}
+
+link () {
+  CURRENT_URL="$(matching_links "$1" | position_choice "$2" "$3")"
+  unset NEED_TO_CHOOSE_URL
+  echo "Linked by: $*"
+  echo "URL: $CURRENT_URL" >&2
+}
+
+version_link () {
+  CURRENT_URL="$(matching_links "$1" | version_sort | position_choice "$2" "$3")"
+  unset NEED_TO_CHOOSE_URL
+  echo "Linked version by: $*"
+  echo "URL: $CURRENT_URL" >&2
+}
+
+redirect () {
+  CURRENT_URL="$(curl -I -L --max-redirs "${1:-99}" "$CURRENT_URL" | 
+    grep -E '^Location: ' | position_choice "${2:-999999}" "$3" |
+    sed -e 's/^Location: //; s/\r//')"
+  echo "Redirected: $*"
+  echo "URL: $CURRENT_URL" >&2
+}
+
+replace () {
+  sed -re "s	$1	$2	g"
+}
+
+process () {
+  CURRENT_URL="$(echo "$CURRENT_URL" | replace "$1" "$2")"
+  echo "Processed: $*"
+  echo "URL: $CURRENT_URL" >&2
+}
+
+version () {
+  CURRENT_VERSION="$(echo "$CURRENT_URL" | replace "$1" "$2")"
+  echo "Version: $CURRENT_VERSION" >&2
+}
+
+ensure_version () {
+  echo "Ensuring version. CURRENT_VERSION: $CURRENT_VERSION" >&2
+  [ -z "$CURRENT_VERSION" ] && version '.*-([0-9.]+)[-._].*' '\1'
+}
+
+ensure_target () {
+  echo "Ensuring target. CURRENT_TARGET: $CURRENT_TARGET" >&2
+  [ -z "$CURRENT_TARGET" ] && target "$(basename "$CONFIG_NAME" .upstream).nix"
+}
+
+ensure_name () {
+  echo "Ensuring name. CURRENT_NAME: $CURRENT_NAME" >&2
+  [ -z "$CURRENT_NAME" ] && name "$(basename "$CONFIG_DIR")"
+  echo "Resulting name: $CURRENT_NAME"
+}
+
+ensure_attribute_name () {
+  echo "Ensuring attribute name. CURRENT_ATTRIBUTE_NAME: $CURRENT_ATTRIBUTE_NAME" >&2
+  ensure_name
+  [ -z "$CURRENT_ATTRIBUTE_NAME" ] && attribute_name "$CURRENT_NAME"
+  echo "Resulting attribute name: $CURRENT_ATTRIBUTE_NAME"
+}
+
+ensure_url () {
+  echo "Ensuring starting URL. CURRENT_URL: $CURRENT_URL" >&2
+  ensure_attribute_name
+  [ -z "$CURRENT_URL" ] && CURRENT_URL="$(retrieve_meta downloadPage)"
+  [ -z "$CURRENT_URL" ] && CURRENT_URL="$(retrieve_meta downloadpage)"
+  [ -z "$CURRENT_URL" ] && CURRENT_URL="$(retrieve_meta homepage)"
+  echo "Resulting URL: $CURRENT_URL"
+}
+
+ensure_choice () {
+  echo "Ensuring that choice is made." >&2
+  echo "NEED_TO_CHOOSE_URL: [$NEED_TO_CHOOSE_URL]." >&2
+  echo "CURRENT_URL: $CURRENT_URL" >&2
+  [ -z "$URL_WAS_SET" ] && [ -z "$CURRENT_URL" ] && ensure_url
+  [ -n "$NEED_TO_CHOOSE_URL" ] && {
+    version_link "${DL_URL_RE:-[.]tar[.]([^./])+\$}"
+    unset NEED_TO_CHOOSE_URL
+  }
+  [ -z "$CURRENT_URL" ] && {
+    echo "Error: empty CURRENT_URL"
+    echo "Error: empty CURRENT_URL" >&2
+    exit 1
+  }
+}
+
+revision () {
+  CURRENT_REV="$1"
+  echo "CURRENT_REV: $CURRENT_REV"
+}
+
+prefetch_command () {
+  PREFETCH_COMMAND="$1"
+}
+
+prefetch_command_rel () {
+  PREFETCH_COMMAND="$(dirname "$0")/$1"
+}
+
+ensure_hash () {
+  echo "Ensuring hash. CURRENT_HASH: $CURRENT_HASH" >&2
+  [ -z "$CURRENT_HASH" ] && hash
+}
+
+hash () {
+  CURRENT_HASH="$(${PREFETCH_COMMAND:-nix-prefetch-url} "$CURRENT_URL" $CURRENT_REV)"
+  echo "CURRENT_HASH: $CURRENT_HASH" >&2
+}
+
+name () {
+  CURRENT_NAME="$1"
+  echo "CURRENT_NAME: $CURRENT_NAME" >&2
+}
+
+attribute_name () {
+  CURRENT_ATTRIBUTE_NAME="$1"
+  echo "CURRENT_ATTRIBUTE_NAME: $CURRENT_ATTRIBUTE_NAME" >&2
+}
+
+retrieve_meta () {
+  nix-instantiate --eval-only '<nixpkgs>' -A "$CURRENT_ATTRIBUTE_NAME".meta."$1" | xargs
+}
+
+retrieve_version () {
+  PACKAGED_VERSION="$(retrieve_meta version)"
+}
+
+ensure_dl_url_re () {
+  echo "Ensuring DL_URL_RE. DL_URL_RE: $DL_URL_RE" >&2
+  [ -z "$DL_URL_RE" ] && dl_url_re "$(retrieve_meta downloadURLRegexp)"
+  echo "DL_URL_RE: $DL_URL_RE" >&2
+}
+
+directory_of () {
+  cd "$(dirname "$1")"; pwd
+}
+
+full_path () {
+  echo "$(directory_of "$1")/$(basename "$1")"
+}
+
+target () {
+  CURRENT_TARGET="$1"
+  { [ "$CURRENT_TARGET" = "${CURRENT_TARGET#/}" ] && CURRENT_TARGET="$CONFIG_DIR/$CURRENT_TARGET"; }
+  echo "Target set to: $CURRENT_TARGET"
+}
+
+marker () {
+  BEGIN_EXPRESSION="$1"
+}
+
+update_found () {
+  echo "Compare: $CURRENT_VERSION vs $PACKAGED_VERSION"
+  [ "$CURRENT_VERSION" != "$PACKAGED_VERSION" ]
+}
+
+do_write_expression () {
+  echo "${1}rec {"
+  echo "${1}  baseName=\"$CURRENT_NAME\";"
+  echo "${1}  version=\"$CURRENT_VERSION\";"
+  echo "${1}  name=\"\${baseName}-\${version}\";"
+  echo "${1}  hash=\"$CURRENT_HASH\";"
+  echo "${1}  url=\"$CURRENT_URL\";"
+  [ -n "$CURRENT_REV" ] && echo "${1}  rev=\"$CURRENT_REV\";"
+  echo "${1}  sha256=\"$CURRENT_HASH\";"
+  echo "$2"
+}
+
+line_position () {
+  file="$1"
+  regexp="$2"
+  count="${3:-1}"
+  grep -E "$regexp" -m "$count" -B 999999 "$file" | wc -l
+}
+
+replace_once () {
+  file="$1"
+  regexp="$2"
+  replacement="$3"
+  instance="${4:-1}"
+
+  echo "Replacing once:"
+  echo "file: [[$file]]"
+  echo "regexp: [[$regexp]]"
+  echo "replacement: [[$replacement]]"
+  echo "instance: [[$instance]]"
+
+  position="$(line_position "$file" "$regexp" "$instance")"
+  sed -re "${position}s	$regexp	$replacement	" -i "$file"
+}
+
+set_var_value () {
+  var="${1}"
+  value="${2}"
+  instance="${3:-1}"
+  file="${4:-$CURRENT_TARGET}"
+  no_quotes="${5:-0}"
+
+  quote='"'
+  let "$no_quotes" && quote=""
+
+  replace_once "$file" "${var} *= *.*" "${var} = ${quote}${value}${quote};" "$instance"
+}
+
+do_regenerate () {
+  BEFORE="$(cat "$1" | grep -F "$BEGIN_EXPRESSION" -B 999999;)"
+  AFTER_EXPANDED="$(cat "$1" | grep -F "$BEGIN_EXPRESSION" -A 999999 | grep -E '^ *[}] *; *$' -A 999999;)"
+  AFTER="$(echo "$AFTER_EXPANDED" | tail -n +2)"
+  CLOSE_BRACE="$(echo "$AFTER_EXPANDED" | head -n 1)"
+  SPACING="$(echo "$CLOSE_BRACE" | sed -re 's/[^ ].*//')"
+
+  echo "$BEFORE"
+  do_write_expression "$SPACING" "$CLOSE_BRACE"
+  echo "$AFTER"
+}
+
+do_overwrite () {
+  ensure_hash
+  do_regenerate "$1" > "$1.new.tmp"
+  mv "$1.new.tmp" "$1"
+}
+
+do_overwrite_just_version () {
+  ensure_hash
+  set_var_value version $CURRENT_VERSION
+  set_var_value sha256 $CURRENT_HASH
+}
+
+minimize_overwrite() {
+  do_overwrite(){
+    do_overwrite_just_version
+  }
+}
+
+process_config () {
+  CONFIG_DIR="$(directory_of "$1")"
+  CONFIG_NAME="$(basename "$1")"
+  BEGIN_EXPRESSION='# Generated upstream information';
+  if [ -f  "$CONFIG_DIR/$CONFIG_NAME" ] &&
+      [ "${CONFIG_NAME}" = "${CONFIG_NAME%.nix}" ]; then
+    source "$CONFIG_DIR/$CONFIG_NAME"
+  else
+    CONFIG_NAME="${CONFIG_NAME%.nix}"
+    ensure_attribute_name
+    [ -n "$(retrieve_meta updateWalker)" ] ||
+        [ -n "$FORCE_UPDATE_WALKER" ] || {
+      echo "Error: package not marked as safe for update-walker" >&2
+      echo "Set FORCE_UPDATE_WALKER=1 to override" >&2
+      exit 1;
+    }
+    [ -z "$(retrieve_meta fullRegenerate)" ] && eval "
+      minimize_overwrite
+    "
+  fi
+  ensure_attribute_name
+  retrieve_version
+  ensure_dl_url_re
+  ensure_choice
+  ensure_version
+  ensure_target
+  update_found && do_overwrite "$CURRENT_TARGET"
+}
+
+source "$own_dir/update-walker-service-specific.sh"
+
+process_config "$1"
diff --git a/nixpkgs/pkgs/build-support/upstream-updater/update-walker.txt b/nixpkgs/pkgs/build-support/upstream-updater/update-walker.txt
new file mode 100644
index 000000000000..ae47e5590294
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/upstream-updater/update-walker.txt
@@ -0,0 +1,118 @@
+update-walker is an imperative semiautomated update helper.
+
+It runs the X.upstream file to find the freshest version of the package in
+the specified upstream source and updates the corresponding X.nix file.
+
+
+
+The simplest available commands:
+
+url: set the upstream source list URL equal to $1; the default is
+meta.downloadPage with meta.homepage fallback
+
+dl_url_re: set the regular expression used to select download links to $1; the
+default is meta.downloadURLRegexp or '[.]tar[.]([^./])+\$' if it is not set
+
+target: specify target expression; default is to replace .upstream extension
+with .nix extension
+
+name: specify the derivation name; default is the basename of the dirname
+of the .upstream file
+
+attribute_name: specify the attribute name to evaluate for getting the current
+version from meta.version; default is to use the derivation name
+
+minimize_overwrite: set config options that mean that only version= and
+sha256= have to be replaced; the default is to regenerate a full upstream
+description block with url, name, version, hash etc.
+
+
+
+A lot of packages can be updated in a pseudo-declarative style using only
+the commands from the previous paragraph.
+
+Some packages do not need any non-default settings, in these case just setting
+meta.updateWalker to true is enough, you can run update-walker directly on the
+.nix file afterwards. In this case minimize_overwrite it implied unless
+meta.fullRegenerate is set.
+
+
+
+The packages that require more fine-grained control than the described options
+allow, you need to take into account the default control flow of the tool.
+
+First, the definitions from update-walker script and additional definitions
+from update-walker-service-specific.sh are loaded. Then the config is executed
+as a shell script. Some of the commands it can use do remember whether they
+have been used. Afterwards the following steps happen: 
+
+attribute_name is set to name unless it has been already set
+
+meta.version is read from the NixPkgs package called attribute_name
+
+download URL regexp is set to default unless it has been already set in the
+updater script
+
+the download page URL gets set to default value unless it has been set
+previously
+
+if the action of getting the download page and choosing the freshest link by
+version has not yet been taken, it happens
+
+if the version has not yet been extracted from the URL, it gets extracted
+
+target nix expression to update gets set to the default value unless it has
+been set explicitly
+
+if the URL version is fresher than the packaged version, the new file gets
+downloaded and its hash is calculated
+
+do_overwrite function is called; the default calculates a big upstream data
+block and puts it after the '# Generated upstream information' marker (the
+marker can be changed by the command marker)
+
+
+
+If the update needs some special logic, it is put into the updater script and
+the corresponding steps are skipped because the needed action has already been
+performed. 
+
+For example: 
+
+minimize_overwrite is exactly the same as
+
+do_overwrite() { do_overwrite_just_version; }
+
+redefinition. You can do a more complex do_overwrite redifinition, if needed.
+It can probably use ensure_hash to download the source and calculate the hash
+and set_var_value.
+
+set_var_value alters the $3-th instance of assigning the $1 name in the
+expression to the value $2. $3 defaults to 1. It can modify $4 instead of the
+current target, it can put the value without quotes if $5 is 1.
+
+
+
+Typical steps include:
+
+ensure_choice: download current URL and find the freshest version link on the
+page, it is now the new URL
+
+ensure_hash: download current URL and calculate the source package hash
+
+ensure_version: extract version from the URL
+
+SF_redirect: replace the current URL with a SourceForge.net mirror:// URL
+
+SF_version_dir: assume SourceForge.net layout and choose the freshest
+version-named subdirectory in the file catalog; you can optionally specify $1
+as a directory name regexp (digits and periods will be required after it)
+
+SF_version_tarball: assume SourceForge.net layout and choose the freshest
+tarball download link
+
+version: apply replacement of $1 with $2 (extended regexp format) to extract
+the version from URL
+
+version_link: choose the freshest versioned link, $1 is the regexp of
+acceptable links
diff --git a/nixpkgs/pkgs/build-support/upstream-updater/urls-from-page.sh b/nixpkgs/pkgs/build-support/upstream-updater/urls-from-page.sh
new file mode 100755
index 000000000000..db39286ccb02
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/upstream-updater/urls-from-page.sh
@@ -0,0 +1,14 @@
+#! /bin/sh
+
+url="$1"
+protocol="${url%%:*}"
+path="${url#$protocol://}"
+server="${path%%/*}"
+basepath="${path%/*}"
+relpath="${path#$server}"
+ 
+echo "URL: $url" >&2
+
+curl -A 'text/html; text/xhtml; text/xml; */*' -L -k "$url" | sed -re 's/^/-/;s/[^a-zA-Z][hH][rR][eE][fF]=("([^"]*)"|'\''([^'\'']*)'\''|([^"'\'' <>&]+)[ <>&])/\n+\2\3\4\n-/g' | \
+  sed -e '/^-/d; s/^[+]//; /^#/d;'"s/^\\//$protocol:\\/\\/$server\\//g" | \
+  sed -re 's`^[^:]*$`'"$protocol://$basepath/&\`"
diff --git a/nixpkgs/pkgs/build-support/vm/deb/deb-closure.pl b/nixpkgs/pkgs/build-support/vm/deb/deb-closure.pl
new file mode 100644
index 000000000000..fe23025df1d8
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/vm/deb/deb-closure.pl
@@ -0,0 +1,180 @@
+use strict;
+use Dpkg::Control;
+use Dpkg::Deps;
+use File::Basename;
+
+my $packagesFile = shift @ARGV;
+my $urlPrefix = shift @ARGV;
+my @toplevelPkgs = @ARGV;
+
+
+my %packages;
+
+
+# Parse the Packages file.
+open PACKAGES, "<$packagesFile" or die;
+
+while (1) {
+    my $cdata = Dpkg::Control->new(type => CTRL_INFO_PKG);
+    last if not $cdata->parse(\*PACKAGES, $packagesFile);
+    die unless defined $cdata->{Package};
+    #print STDERR $cdata->{Package}, "\n";
+    $packages{$cdata->{Package}} = $cdata;
+}
+
+close PACKAGES;
+
+
+# Flatten a Dpkg::Deps dependency value into a list of package names.
+sub getDeps {
+    my $deps = shift;
+    #print "$deps\n";
+    if ($deps->isa('Dpkg::Deps::AND')) {
+        my @res = ();
+        foreach my $dep ($deps->get_deps()) {
+            push @res, getDeps($dep);
+        }
+        return @res;
+    } elsif ($deps->isa('Dpkg::Deps::OR')) {
+        # Arbitrarily pick the first alternative.
+        return getDeps(($deps->get_deps())[0]);
+    } elsif ($deps->isa('Dpkg::Deps::Simple')) {
+        return ($deps->{package});
+    } else {
+        die "unknown dep type";
+    }
+}
+
+
+# Process the "Provides" and "Replaces" fields to be able to resolve
+# virtual dependencies.
+my %provides;
+
+foreach my $cdata (sort {$a->{Package} cmp $b->{Package}} (values %packages)) {
+    if (defined $cdata->{Provides}) {
+        my @provides = getDeps(Dpkg::Deps::deps_parse($cdata->{Provides}));
+        foreach my $name (@provides) {
+            #die "conflicting provide: $name\n" if defined $provides{$name};
+            #warn "provide by $cdata->{Package} conflicts with package with the same name: $name\n";
+            next if defined $packages{$name};
+            $provides{$name} = $cdata->{Package};
+        }
+    }
+    # Treat "Replaces" like "Provides".
+    if (defined $cdata->{Replaces}) {
+        my @replaces = getDeps(Dpkg::Deps::deps_parse($cdata->{Replaces}));
+        foreach my $name (@replaces) {
+            next if defined $packages{$name};
+            $provides{$name} = $cdata->{Package};
+        }
+    }
+}
+
+
+# Determine the closure of a package.
+my %donePkgs;
+my %depsUsed;
+my @order = ();
+
+sub closePackage {
+    my $pkgName = shift;
+    print STDERR ">>> $pkgName\n";
+    my $cdata = $packages{$pkgName};
+
+    if (!defined $cdata) {
+        die "unknown (virtual) package $pkgName"
+            unless defined $provides{$pkgName};
+        print STDERR "virtual $pkgName: using $provides{$pkgName}\n";
+        $pkgName = $provides{$pkgName};
+        $cdata = $packages{$pkgName};
+    }
+
+    die "unknown package $pkgName" unless defined $cdata;
+    return if defined $donePkgs{$pkgName};
+    $donePkgs{$pkgName} = 1;
+
+    if (defined $cdata->{Provides}) {
+        foreach my $name (getDeps(Dpkg::Deps::deps_parse($cdata->{Provides}))) {
+            $provides{$name} = $cdata->{Package};
+        }
+    }
+
+    my @depNames = ();
+
+    if (defined $cdata->{Depends}) {
+        print STDERR "    $pkgName: $cdata->{Depends}\n";
+        my $deps = Dpkg::Deps::deps_parse($cdata->{Depends});
+        die unless defined $deps;
+        push @depNames, getDeps($deps);
+    }
+
+    if (defined $cdata->{'Pre-Depends'}) {
+        print STDERR "    $pkgName: $cdata->{'Pre-Depends'}\n";
+        my $deps = Dpkg::Deps::deps_parse($cdata->{'Pre-Depends'});
+        die unless defined $deps;
+        push @depNames, getDeps($deps);
+    }
+
+    foreach my $depName (@depNames) {
+        closePackage($depName);
+    }
+
+    push @order, $pkgName;
+    $depsUsed{$pkgName} = \@depNames;
+}
+
+foreach my $pkgName (@toplevelPkgs) {
+    closePackage $pkgName;
+}
+
+
+# Generate the output Nix expression.
+print "# This is a generated file.  Do not modify!\n";
+print "# Following are the Debian packages constituting the closure of: @toplevelPkgs\n\n";
+print "{fetchurl}:\n\n";
+print "[\n\n";
+
+# Output the packages in strongly connected components.
+my %done;
+my %forward;
+my $newComponent = 1;
+foreach my $pkgName (@order) {
+    $done{$pkgName} = 1;
+    my $cdata = $packages{$pkgName};
+    my @deps = @{$depsUsed{$pkgName}};
+    foreach my $dep (@deps) {
+        $dep = $provides{$dep} if defined $provides{$dep};
+        $forward{$dep} = 1 unless defined $done{$dep};
+    }
+    delete $forward{$pkgName};
+
+    print "  [\n\n" if $newComponent;
+    $newComponent = 0;
+
+    my $origName = basename $cdata->{Filename};
+    my $cleanedName = $origName;
+    $cleanedName =~ s/~//g;
+
+    print "    (fetchurl {\n";
+    print "      url = $urlPrefix/$cdata->{Filename};\n";
+    print "      sha256 = \"$cdata->{SHA256}\";\n";
+    print "      name = \"$cleanedName\";\n" if $cleanedName ne $origName;
+    print "    })\n";
+    print "\n";
+
+    if (keys %forward == 0) {
+        print "  ]\n\n";
+        $newComponent = 1;
+    }
+}
+
+foreach my $pkgName (@order) {
+    my $cdata = $packages{$pkgName};
+}
+
+print "]\n";
+
+if ($newComponent != 1) {
+    print STDERR "argh: ", keys %forward, "\n";
+    exit 1;
+}
diff --git a/nixpkgs/pkgs/build-support/vm/default.nix b/nixpkgs/pkgs/build-support/vm/default.nix
new file mode 100644
index 000000000000..cfc19c03cfdd
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/vm/default.nix
@@ -0,0 +1,1159 @@
+{ lib
+, pkgs
+, kernel ? pkgs.linux
+, img ? pkgs.stdenv.hostPlatform.linux-kernel.target
+, storeDir ? builtins.storeDir
+, rootModules ?
+    [ "virtio_pci" "virtio_mmio" "virtio_blk" "virtio_balloon" "virtio_rng" "ext4" "unix" "9p" "9pnet_virtio" "crc32c_generic" ]
+      ++ pkgs.lib.optional (pkgs.stdenv.isi686 || pkgs.stdenv.isx86_64) "rtc_cmos"
+}:
+
+with pkgs;
+with import ../../../nixos/lib/qemu-flags.nix { inherit pkgs; };
+
+rec {
+
+  qemu = pkgs.qemu_kvm;
+
+  modulesClosure = makeModulesClosure {
+    inherit kernel rootModules;
+    firmware = kernel;
+  };
+
+
+  hd = "vda"; # either "sda" or "vda"
+
+  initrdUtils = runCommand "initrd-utils"
+    { buildInputs = [ nukeReferences ];
+      allowedReferences = [ "out" modulesClosure ]; # prevent accidents like glibc being included in the initrd
+    }
+    ''
+      mkdir -p $out/bin
+      mkdir -p $out/lib
+
+      # Copy what we need from Glibc.
+      cp -p ${pkgs.stdenv.glibc.out}/lib/ld-linux*.so.? $out/lib
+      cp -p ${pkgs.stdenv.glibc.out}/lib/libc.so.* $out/lib
+      cp -p ${pkgs.stdenv.glibc.out}/lib/libm.so.* $out/lib
+      cp -p ${pkgs.stdenv.glibc.out}/lib/libresolv.so.* $out/lib
+
+      # Copy BusyBox.
+      cp -pd ${pkgs.busybox}/bin/* $out/bin
+
+      # Run patchelf to make the programs refer to the copied libraries.
+      for i in $out/bin/* $out/lib/*; do if ! test -L $i; then nuke-refs $i; fi; done
+
+      for i in $out/bin/*; do
+          if [ -f "$i" -a ! -L "$i" ]; then
+              echo "patching $i..."
+              patchelf --set-interpreter $out/lib/ld-linux*.so.? --set-rpath $out/lib $i || true
+          fi
+      done
+    ''; # */
+
+
+  stage1Init = writeScript "vm-run-stage1" ''
+    #! ${initrdUtils}/bin/ash -e
+
+    export PATH=${initrdUtils}/bin
+
+    mkdir /etc
+    echo -n > /etc/fstab
+
+    mount -t proc none /proc
+    mount -t sysfs none /sys
+
+    echo 2 > /proc/sys/vm/panic_on_oom
+
+    for o in $(cat /proc/cmdline); do
+      case $o in
+        mountDisk=1)
+          mountDisk=1
+          ;;
+        command=*)
+          set -- $(IFS==; echo $o)
+          command=$2
+          ;;
+        out=*)
+          set -- $(IFS==; echo $o)
+          export out=$2
+          ;;
+      esac
+    done
+
+    echo "loading kernel modules..."
+    for i in $(cat ${modulesClosure}/insmod-list); do
+      insmod $i || echo "warning: unable to load $i"
+    done
+
+    mount -t devtmpfs devtmpfs /dev
+
+    ifconfig lo up
+
+    mkdir /fs
+
+    if test -z "$mountDisk"; then
+      mount -t tmpfs none /fs
+    else
+      mount /dev/${hd} /fs
+    fi
+
+    mkdir -p /fs/dev
+    mount -o bind /dev /fs/dev
+
+    mkdir -p /fs/dev/shm /fs/dev/pts
+    mount -t tmpfs -o "mode=1777" none /fs/dev/shm
+    mount -t devpts none /fs/dev/pts
+
+    echo "mounting Nix store..."
+    mkdir -p /fs${storeDir}
+    mount -t 9p store /fs${storeDir} -o trans=virtio,version=9p2000.L,cache=loose
+
+    mkdir -p /fs/tmp /fs/run /fs/var
+    mount -t tmpfs -o "mode=1777" none /fs/tmp
+    mount -t tmpfs -o "mode=755" none /fs/run
+    ln -sfn /run /fs/var/run
+
+    echo "mounting host's temporary directory..."
+    mkdir -p /fs/tmp/xchg
+    mount -t 9p xchg /fs/tmp/xchg -o trans=virtio,version=9p2000.L
+
+    mkdir -p /fs/proc
+    mount -t proc none /fs/proc
+
+    mkdir -p /fs/sys
+    mount -t sysfs none /fs/sys
+
+    mkdir -p /fs/etc
+    ln -sf /proc/mounts /fs/etc/mtab
+    echo "127.0.0.1 localhost" > /fs/etc/hosts
+    # Ensures tools requiring /etc/passwd will work (e.g. nix)
+    if [ ! -e /fs/etc/passwd ]; then
+      echo "root:x:0:0:System administrator:/root:/bin/sh" > /fs/etc/passwd
+    fi
+
+    echo "starting stage 2 ($command)"
+    exec switch_root /fs $command $out
+  '';
+
+
+  initrd = makeInitrd {
+    contents = [
+      { object = stage1Init;
+        symlink = "/init";
+      }
+    ];
+  };
+
+
+  stage2Init = writeScript "vm-run-stage2" ''
+    #! ${bash}/bin/sh
+    source /tmp/xchg/saved-env
+
+    # Set the system time from the hardware clock.  Works around an
+    # apparent KVM > 1.5.2 bug.
+    ${pkgs.util-linux}/bin/hwclock -s
+
+    export NIX_STORE=${storeDir}
+    export NIX_BUILD_TOP=/tmp
+    export TMPDIR=/tmp
+    export PATH=/empty
+    out="$1"
+    cd "$NIX_BUILD_TOP"
+
+    if ! test -e /bin/sh; then
+      ${coreutils}/bin/mkdir -p /bin
+      ${coreutils}/bin/ln -s ${bash}/bin/sh /bin/sh
+    fi
+
+    # Set up automatic kernel module loading.
+    export MODULE_DIR=${kernel}/lib/modules/
+    ${coreutils}/bin/cat <<EOF > /run/modprobe
+    #! ${bash}/bin/sh
+    export MODULE_DIR=$MODULE_DIR
+    exec ${kmod}/bin/modprobe "\$@"
+    EOF
+    ${coreutils}/bin/chmod 755 /run/modprobe
+    echo /run/modprobe > /proc/sys/kernel/modprobe
+
+    # For debugging: if this is the second time this image is run,
+    # then don't start the build again, but instead drop the user into
+    # an interactive shell.
+    if test -n "$origBuilder" -a ! -e /.debug; then
+      exec < /dev/null
+      ${coreutils}/bin/touch /.debug
+      $origBuilder $origArgs
+      echo $? > /tmp/xchg/in-vm-exit
+
+      ${busybox}/bin/mount -o remount,ro dummy /
+
+      ${busybox}/bin/poweroff -f
+    else
+      export PATH=/bin:/usr/bin:${coreutils}/bin
+      echo "Starting interactive shell..."
+      echo "(To run the original builder: \$origBuilder \$origArgs)"
+      exec ${busybox}/bin/setsid ${bashInteractive}/bin/bash < /dev/${qemuSerialDevice} &> /dev/${qemuSerialDevice}
+    fi
+  '';
+
+
+  qemuCommandLinux = ''
+    ${qemuBinary qemu} \
+      -nographic -no-reboot \
+      -device virtio-rng-pci \
+      -virtfs local,path=${storeDir},security_model=none,mount_tag=store \
+      -virtfs local,path=$TMPDIR/xchg,security_model=none,mount_tag=xchg \
+      ''${diskImage:+-drive file=$diskImage,if=virtio,cache=unsafe,werror=report} \
+      -kernel ${kernel}/${img} \
+      -initrd ${initrd}/initrd \
+      -append "console=${qemuSerialDevice} panic=1 command=${stage2Init} out=$out mountDisk=$mountDisk loglevel=4" \
+      $QEMU_OPTS
+  '';
+
+
+  vmRunCommand = qemuCommand: writeText "vm-run" ''
+    export > saved-env
+
+    PATH=${coreutils}/bin
+    mkdir xchg
+    mv saved-env xchg/
+
+    eval "$preVM"
+
+    if [ "$enableParallelBuilding" = 1 ]; then
+      if [ ''${NIX_BUILD_CORES:-0} = 0 ]; then
+        QEMU_OPTS+=" -smp cpus=$(nproc)"
+      else
+        QEMU_OPTS+=" -smp cpus=$NIX_BUILD_CORES"
+      fi
+    fi
+
+    # Write the command to start the VM to a file so that the user can
+    # debug inside the VM if the build fails (when Nix is called with
+    # the -K option to preserve the temporary build directory).
+    cat > ./run-vm <<EOF
+    #! ${bash}/bin/sh
+    ''${diskImage:+diskImage=$diskImage}
+    TMPDIR=$TMPDIR
+    cd $TMPDIR
+    ${qemuCommand}
+    EOF
+
+    mkdir -p -m 0700 $out
+
+    chmod +x ./run-vm
+    source ./run-vm
+
+    if ! test -e xchg/in-vm-exit; then
+      echo "Virtual machine didn't produce an exit code."
+      exit 1
+    fi
+
+    exitCode="$(cat xchg/in-vm-exit)"
+    if [ "$exitCode" != "0" ]; then
+      exit "$exitCode"
+    fi
+
+    eval "$postVM"
+  '';
+
+
+  createEmptyImage = {size, fullName}: ''
+    mkdir $out
+    diskImage=$out/disk-image.qcow2
+    ${qemu}/bin/qemu-img create -f qcow2 $diskImage "${toString size}M"
+
+    mkdir $out/nix-support
+    echo "${fullName}" > $out/nix-support/full-name
+  '';
+
+
+  defaultCreateRootFS = ''
+    mkdir /mnt
+    ${e2fsprogs}/bin/mkfs.ext4 /dev/${hd}
+    ${util-linux}/bin/mount -t ext4 /dev/${hd} /mnt
+
+    if test -e /mnt/.debug; then
+      exec ${bash}/bin/sh
+    fi
+    touch /mnt/.debug
+
+    mkdir /mnt/proc /mnt/dev /mnt/sys
+  '';
+
+
+  /* Run a derivation in a Linux virtual machine (using Qemu/KVM).  By
+     default, there is no disk image; the root filesystem is a tmpfs,
+     and the nix store is shared with the host (via the 9P protocol).
+     Thus, any pure Nix derivation should run unmodified, e.g. the
+     call
+
+       runInLinuxVM patchelf
+
+     will build the derivation `patchelf' inside a VM.  The attribute
+     `preVM' can optionally contain a shell command to be evaluated
+     *before* the VM is started (i.e., on the host).  The attribute
+     `memSize' specifies the memory size of the VM in megabytes,
+     defaulting to 512.  The attribute `diskImage' can optionally
+     specify a file system image to be attached to /dev/sda.  (Note
+     that currently we expect the image to contain a filesystem, not a
+     full disk image with a partition table etc.)
+
+     If the build fails and Nix is run with the `-K' option, a script
+     `run-vm' will be left behind in the temporary build directory
+     that allows you to boot into the VM and debug it interactively. */
+
+  runInLinuxVM = drv: lib.overrideDerivation drv ({ memSize ? 512, QEMU_OPTS ? "", args, builder, ... }: {
+    requiredSystemFeatures = [ "kvm" ];
+    builder = "${bash}/bin/sh";
+    args = ["-e" (vmRunCommand qemuCommandLinux)];
+    origArgs = args;
+    origBuilder = builder;
+    QEMU_OPTS = "${QEMU_OPTS} -m ${toString memSize}";
+    passAsFile = []; # HACK fix - see https://github.com/NixOS/nixpkgs/issues/16742
+  });
+
+
+  extractFs = {file, fs ? null} :
+    with pkgs; runInLinuxVM (
+    stdenv.mkDerivation {
+      name = "extract-file";
+      buildInputs = [ util-linux ];
+      buildCommand = ''
+        ln -s ${kernel}/lib /lib
+        ${kmod}/bin/modprobe loop
+        ${kmod}/bin/modprobe ext4
+        ${kmod}/bin/modprobe hfs
+        ${kmod}/bin/modprobe hfsplus
+        ${kmod}/bin/modprobe squashfs
+        ${kmod}/bin/modprobe iso9660
+        ${kmod}/bin/modprobe ufs
+        ${kmod}/bin/modprobe cramfs
+
+        mkdir -p $out
+        mkdir -p tmp
+        mount -o loop,ro,ufstype=44bsd ${lib.optionalString (fs != null) "-t ${fs} "}${file} tmp ||
+          mount -o loop,ro ${lib.optionalString (fs != null) "-t ${fs} "}${file} tmp
+        cp -Rv tmp/* $out/ || exit 0
+      '';
+    });
+
+
+  extractMTDfs = {file, fs ? null} :
+    with pkgs; runInLinuxVM (
+    stdenv.mkDerivation {
+      name = "extract-file-mtd";
+      buildInputs = [ util-linux mtdutils ];
+      buildCommand = ''
+        ln -s ${kernel}/lib /lib
+        ${kmod}/bin/modprobe mtd
+        ${kmod}/bin/modprobe mtdram total_size=131072
+        ${kmod}/bin/modprobe mtdchar
+        ${kmod}/bin/modprobe mtdblock
+        ${kmod}/bin/modprobe jffs2
+        ${kmod}/bin/modprobe zlib
+
+        mkdir -p $out
+        mkdir -p tmp
+
+        dd if=${file} of=/dev/mtd0
+        mount ${lib.optionalString (fs != null) "-t ${fs} "}/dev/mtdblock0 tmp
+
+        cp -R tmp/* $out/
+      '';
+    });
+
+
+  /* Like runInLinuxVM, but run the build not using the stdenv from
+     the Nix store, but using the tools provided by /bin, /usr/bin
+     etc. from the specified filesystem image, which typically is a
+     filesystem containing a non-NixOS Linux distribution. */
+
+  runInLinuxImage = drv: runInLinuxVM (lib.overrideDerivation drv (attrs: {
+    mountDisk = true;
+
+    /* Mount `image' as the root FS, but use a temporary copy-on-write
+       image since we don't want to (and can't) write to `image'. */
+    preVM = ''
+      diskImage=$(pwd)/disk-image.qcow2
+      origImage=${attrs.diskImage}
+      if test -d "$origImage"; then origImage="$origImage/disk-image.qcow2"; fi
+      ${qemu}/bin/qemu-img create -b "$origImage" -f qcow2 $diskImage
+    '';
+
+    /* Inside the VM, run the stdenv setup script normally, but at the
+       very end set $PATH and $SHELL to the `native' paths for the
+       distribution inside the VM. */
+    postHook = ''
+      PATH=/usr/bin:/bin:/usr/sbin:/sbin
+      SHELL=/bin/sh
+      eval "$origPostHook"
+    '';
+
+    origPostHook = if attrs ? postHook then attrs.postHook else "";
+
+    /* Don't run Nix-specific build steps like patchelf. */
+    fixupPhase = "true";
+  }));
+
+
+  /* Create a filesystem image of the specified size and fill it with
+     a set of RPM packages. */
+
+  fillDiskWithRPMs =
+    { size ? 4096, rpms, name, fullName, preInstall ? "", postInstall ? ""
+    , runScripts ? true, createRootFS ? defaultCreateRootFS
+    , QEMU_OPTS ? "", memSize ? 512
+    , unifiedSystemDir ? false
+    }:
+
+    runInLinuxVM (stdenv.mkDerivation {
+      inherit name preInstall postInstall rpms QEMU_OPTS memSize;
+      preVM = createEmptyImage {inherit size fullName;};
+
+      buildCommand = ''
+        ${createRootFS}
+
+        chroot=$(type -tP chroot)
+
+        # Make the Nix store available in /mnt, because that's where the RPMs live.
+        mkdir -p /mnt${storeDir}
+        ${util-linux}/bin/mount -o bind ${storeDir} /mnt${storeDir}
+
+        # Newer distributions like Fedora 18 require /lib etc. to be
+        # symlinked to /usr.
+        ${lib.optionalString unifiedSystemDir ''
+          mkdir -p /mnt/usr/bin /mnt/usr/sbin /mnt/usr/lib /mnt/usr/lib64
+          ln -s /usr/bin /mnt/bin
+          ln -s /usr/sbin /mnt/sbin
+          ln -s /usr/lib /mnt/lib
+          ln -s /usr/lib64 /mnt/lib64
+          ${util-linux}/bin/mount -t proc none /mnt/proc
+        ''}
+
+        echo "unpacking RPMs..."
+        set +o pipefail
+        for i in $rpms; do
+            echo "$i..."
+            ${rpm}/bin/rpm2cpio "$i" | chroot /mnt ${cpio}/bin/cpio -i --make-directories --unconditional
+        done
+
+        eval "$preInstall"
+
+        echo "initialising RPM DB..."
+        PATH=/usr/bin:/bin:/usr/sbin:/sbin $chroot /mnt \
+          ldconfig -v || true
+        PATH=/usr/bin:/bin:/usr/sbin:/sbin $chroot /mnt \
+          rpm --initdb
+
+        ${util-linux}/bin/mount -o bind /tmp /mnt/tmp
+
+        echo "installing RPMs..."
+        PATH=/usr/bin:/bin:/usr/sbin:/sbin $chroot /mnt \
+          rpm -iv --nosignature ${if runScripts then "" else "--noscripts"} $rpms
+
+        echo "running post-install script..."
+        eval "$postInstall"
+
+        rm /mnt/.debug
+
+        ${util-linux}/bin/umount /mnt${storeDir} /mnt/tmp ${lib.optionalString unifiedSystemDir "/mnt/proc"}
+        ${util-linux}/bin/umount /mnt
+      '';
+
+      passthru = { inherit fullName; };
+    });
+
+
+  /* Generate a script that can be used to run an interactive session
+     in the given image. */
+
+  makeImageTestScript = image: writeScript "image-test" ''
+    #! ${bash}/bin/sh
+    if test -z "$1"; then
+      echo "Syntax: $0 <copy-on-write-temp-file>"
+      exit 1
+    fi
+    diskImage="$1"
+    if ! test -e "$diskImage"; then
+      ${qemu}/bin/qemu-img create -b ${image}/disk-image.qcow2 -f qcow2 "$diskImage"
+    fi
+    export TMPDIR=$(mktemp -d)
+    export out=/dummy
+    export origBuilder=
+    export origArgs=
+    mkdir $TMPDIR/xchg
+    export > $TMPDIR/xchg/saved-env
+    mountDisk=1
+    ${qemuCommandLinux}
+  '';
+
+
+  /* Build RPM packages from the tarball `src' in the Linux
+     distribution installed in the filesystem `diskImage'.  The
+     tarball must contain an RPM specfile. */
+
+  buildRPM = attrs: runInLinuxImage (stdenv.mkDerivation ({
+    phases = "prepareImagePhase sysInfoPhase buildPhase installPhase";
+
+    outDir = "rpms/${attrs.diskImage.name}";
+
+    prepareImagePhase = ''
+      if test -n "$extraRPMs"; then
+        for rpmdir in $extraRPMs ; do
+          rpm -iv $(ls $rpmdir/rpms/*/*.rpm | grep -v 'src\.rpm' | sort | head -1)
+        done
+      fi
+    '';
+
+    sysInfoPhase = ''
+      echo "System/kernel: $(uname -a)"
+      if test -e /etc/fedora-release; then echo "Fedora release: $(cat /etc/fedora-release)"; fi
+      if test -e /etc/SuSE-release; then echo "SUSE release: $(cat /etc/SuSE-release)"; fi
+      header "installed RPM packages"
+      rpm -qa --qf "%{Name}-%{Version}-%{Release} (%{Arch}; %{Distribution}; %{Vendor})\n"
+      stopNest
+    '';
+
+    buildPhase = ''
+      eval "$preBuild"
+
+      # Hacky: RPM looks for <basename>.spec inside the tarball, so
+      # strip off the hash.
+      srcName="$(stripHash "$src")"
+      cp "$src" "$srcName" # `ln' doesn't work always work: RPM requires that the file is owned by root
+
+      export HOME=/tmp/home
+      mkdir $HOME
+
+      rpmout=/tmp/rpmout
+      mkdir $rpmout $rpmout/SPECS $rpmout/BUILD $rpmout/RPMS $rpmout/SRPMS
+
+      echo "%_topdir $rpmout" >> $HOME/.rpmmacros
+
+      if [ `uname -m` = i686 ]; then extra="--target i686-linux"; fi
+      rpmbuild -vv $extra -ta "$srcName"
+
+      eval "$postBuild"
+    '';
+
+    installPhase = ''
+      eval "$preInstall"
+
+      mkdir -p $out/$outDir
+      find $rpmout -name "*.rpm" -exec cp {} $out/$outDir \;
+
+      for i in $out/$outDir/*.rpm; do
+        header "Generated RPM/SRPM: $i"
+        rpm -qip $i
+        stopNest
+      done
+
+      eval "$postInstall"
+    ''; # */
+  } // attrs));
+
+
+  /* Create a filesystem image of the specified size and fill it with
+     a set of Debian packages.  `debs' must be a list of list of
+     .deb files, namely, the Debian packages grouped together into
+     strongly connected components.  See deb/deb-closure.nix. */
+
+  fillDiskWithDebs =
+    { size ? 4096, debs, name, fullName, postInstall ? null, createRootFS ? defaultCreateRootFS
+    , QEMU_OPTS ? "", memSize ? 512 }:
+
+    runInLinuxVM (stdenv.mkDerivation {
+      inherit name postInstall QEMU_OPTS memSize;
+
+      debs = (lib.intersperse "|" debs);
+
+      preVM = createEmptyImage {inherit size fullName;};
+
+      buildCommand = ''
+        ${createRootFS}
+
+        PATH=$PATH:${lib.makeBinPath [ dpkg dpkg glibc xz ]}
+
+        # Unpack the .debs.  We do this to prevent pre-install scripts
+        # (which have lots of circular dependencies) from barfing.
+        echo "unpacking Debs..."
+
+        for deb in $debs; do
+          if test "$deb" != "|"; then
+            echo "$deb..."
+            dpkg-deb --extract "$deb" /mnt
+          fi
+        done
+
+        # Make the Nix store available in /mnt, because that's where the .debs live.
+        mkdir -p /mnt/inst${storeDir}
+        ${util-linux}/bin/mount -o bind ${storeDir} /mnt/inst${storeDir}
+        ${util-linux}/bin/mount -o bind /proc /mnt/proc
+        ${util-linux}/bin/mount -o bind /dev /mnt/dev
+
+        # Misc. files/directories assumed by various packages.
+        echo "initialising Dpkg DB..."
+        touch /mnt/etc/shells
+        touch /mnt/var/lib/dpkg/status
+        touch /mnt/var/lib/dpkg/available
+        touch /mnt/var/lib/dpkg/diversions
+
+        # Now install the .debs.  This is basically just to register
+        # them with dpkg and to make their pre/post-install scripts
+        # run.
+        echo "installing Debs..."
+
+        export DEBIAN_FRONTEND=noninteractive
+
+        oldIFS="$IFS"
+        IFS="|"
+        for component in $debs; do
+          IFS="$oldIFS"
+          echo
+          echo ">>> INSTALLING COMPONENT: $component"
+          debs=
+          for i in $component; do
+            debs="$debs /inst/$i";
+          done
+          chroot=$(type -tP chroot)
+
+          # Create a fake start-stop-daemon script, as done in debootstrap.
+          mv "/mnt/sbin/start-stop-daemon" "/mnt/sbin/start-stop-daemon.REAL"
+          echo "#!/bin/true" > "/mnt/sbin/start-stop-daemon"
+          chmod 755 "/mnt/sbin/start-stop-daemon"
+
+          PATH=/usr/bin:/bin:/usr/sbin:/sbin $chroot /mnt \
+            /usr/bin/dpkg --install --force-all $debs < /dev/null || true
+
+          # Move the real start-stop-daemon back into its place.
+          mv "/mnt/sbin/start-stop-daemon.REAL" "/mnt/sbin/start-stop-daemon"
+        done
+
+        echo "running post-install script..."
+        eval "$postInstall"
+        ln -sf dash /mnt/bin/sh
+
+        rm /mnt/.debug
+
+        ${util-linux}/bin/umount /mnt/inst${storeDir}
+        ${util-linux}/bin/umount /mnt/proc
+        ${util-linux}/bin/umount /mnt/dev
+        ${util-linux}/bin/umount /mnt
+      '';
+
+      passthru = { inherit fullName; };
+    });
+
+
+  /* Generate a Nix expression containing fetchurl calls for the
+     closure of a set of top-level RPM packages from the
+     `primary.xml.gz' file of a Fedora or openSUSE distribution. */
+
+  rpmClosureGenerator =
+    {name, packagesLists, urlPrefixes, packages, archs ? []}:
+    assert (builtins.length packagesLists) == (builtins.length urlPrefixes);
+    runCommand "${name}.nix" {buildInputs = [perl perlPackages.XMLSimple]; inherit archs;} ''
+      ${lib.concatImapStrings (i: pl: ''
+        gunzip < ${pl} > ./packages_${toString i}.xml
+      '') packagesLists}
+      perl -w ${rpm/rpm-closure.pl} \
+        ${lib.concatImapStrings (i: pl: "./packages_${toString i}.xml ${pl.snd} " ) (lib.zipLists packagesLists urlPrefixes)} \
+        ${toString packages} > $out
+    '';
+
+
+  /* Helper function that combines rpmClosureGenerator and
+     fillDiskWithRPMs to generate a disk image from a set of package
+     names. */
+
+  makeImageFromRPMDist =
+    { name, fullName, size ? 4096
+    , urlPrefix ? "", urlPrefixes ? [urlPrefix]
+    , packagesList ? "", packagesLists ? [packagesList]
+    , packages, extraPackages ? []
+    , preInstall ? "", postInstall ? "", archs ? ["noarch" "i386"]
+    , runScripts ? true, createRootFS ? defaultCreateRootFS
+    , QEMU_OPTS ? "", memSize ? 512
+    , unifiedSystemDir ? false }:
+
+    fillDiskWithRPMs {
+      inherit name fullName size preInstall postInstall runScripts createRootFS unifiedSystemDir QEMU_OPTS memSize;
+      rpms = import (rpmClosureGenerator {
+        inherit name packagesLists urlPrefixes archs;
+        packages = packages ++ extraPackages;
+      }) { inherit fetchurl; };
+    };
+
+
+  /* Like `rpmClosureGenerator', but now for Debian/Ubuntu releases
+     (i.e. generate a closure from a Packages.bz2 file). */
+
+  debClosureGenerator =
+    {name, packagesLists, urlPrefix, packages}:
+
+    runCommand "${name}.nix" { buildInputs = [ perl dpkg ]; } ''
+      for i in ${toString packagesLists}; do
+        echo "adding $i..."
+        case $i in
+          *.xz | *.lzma)
+            xz -d < $i >> ./Packages
+            ;;
+          *.bz2)
+            bunzip2 < $i >> ./Packages
+            ;;
+          *.gz)
+            gzip -dc < $i >> ./Packages
+            ;;
+        esac
+      done
+
+      # Work around this bug: http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=452279
+      sed -i ./Packages -e s/x86_64-linux-gnu/x86-64-linux-gnu/g
+
+      perl -w ${deb/deb-closure.pl} \
+        ./Packages ${urlPrefix} ${toString packages} > $out
+    '';
+
+
+  /* Helper function that combines debClosureGenerator and
+     fillDiskWithDebs to generate a disk image from a set of package
+     names. */
+
+  makeImageFromDebDist =
+    { name, fullName, size ? 4096, urlPrefix
+    , packagesList ? "", packagesLists ? [packagesList]
+    , packages, extraPackages ? [], postInstall ? ""
+    , extraDebs ? [], createRootFS ? defaultCreateRootFS
+    , QEMU_OPTS ? "", memSize ? 512 }:
+
+    let
+      expr = debClosureGenerator {
+        inherit name packagesLists urlPrefix;
+        packages = packages ++ extraPackages;
+      };
+    in
+      (fillDiskWithDebs {
+        inherit name fullName size postInstall createRootFS QEMU_OPTS memSize;
+        debs = import expr {inherit fetchurl;} ++ extraDebs;
+      }) // {inherit expr;};
+
+
+  /* The set of supported RPM-based distributions. */
+
+  rpmDistros = {
+
+    # Note: no i386 release for Fedora >= 26
+    fedora26x86_64 =
+      let version = "26";
+      in {
+        name = "fedora-${version}-x86_64";
+        fullName = "Fedora ${version} (x86_64)";
+        packagesList = fetchurl rec {
+          url = "mirror://fedora/linux/releases/${version}/Everything/x86_64/os/repodata/${sha256}-primary.xml.gz";
+          sha256 = "880055a50c05b20641530d09b23f64501a000b2f92fe252417c530178730a95e";
+        };
+        urlPrefix = "mirror://fedora/linux/releases/${version}/Everything/x86_64/os";
+        archs = ["noarch" "x86_64"];
+        packages = commonFedoraPackages ++ [ "cronie" "util-linux" ];
+        unifiedSystemDir = true;
+      };
+
+    fedora27x86_64 =
+      let version = "27";
+      in {
+        name = "fedora-${version}-x86_64";
+        fullName = "Fedora ${version} (x86_64)";
+        packagesList = fetchurl rec {
+          url = "mirror://fedora/linux/releases/${version}/Everything/x86_64/os/repodata/${sha256}-primary.xml.gz";
+          sha256 = "48986ce4583cd09825c6d437150314446f0f49fa1a1bd62dcfa1085295030fe9";
+        };
+        urlPrefix = "mirror://fedora/linux/releases/${version}/Everything/x86_64/os";
+        archs = ["noarch" "x86_64"];
+        packages = commonFedoraPackages ++ [ "cronie" "util-linux" ];
+        unifiedSystemDir = true;
+      };
+
+    centos6i386 =
+      let version = "6.9";
+      in rec {
+        name = "centos-${version}-i386";
+        fullName = "CentOS ${version} (i386)";
+        urlPrefix = "mirror://centos/${version}/os/i386";
+        packagesList = fetchurl rec {
+          url = "${urlPrefix}/repodata/${sha256}-primary.xml.gz";
+          sha256 = "b826a45082ef68340325c0855f3d2e5d5a4d0f77d28ba3b871791d6f14a97aeb";
+        };
+        archs = ["noarch" "i386"];
+        packages = commonCentOSPackages ++ [ "procps" ];
+      };
+
+    centos6x86_64 =
+      let version = "6.9";
+      in rec {
+        name = "centos-${version}-x86_64";
+        fullName = "CentOS ${version} (x86_64)";
+        urlPrefix = "mirror://centos/${version}/os/x86_64";
+        packagesList = fetchurl rec {
+          url = "${urlPrefix}/repodata/${sha256}-primary.xml.gz";
+          sha256 = "ed2b2d4ac98d774d4cd3e91467e1532f7e8b0275cfc91a0d214b532dcaf1e979";
+        };
+        archs = ["noarch" "x86_64"];
+        packages = commonCentOSPackages ++ [ "procps" ];
+      };
+
+    # Note: no i386 release for 7.x
+    centos7x86_64 =
+      let version = "7.4.1708";
+      in rec {
+        name = "centos-${version}-x86_64";
+        fullName = "CentOS ${version} (x86_64)";
+        urlPrefix = "mirror://centos/${version}/os/x86_64";
+        packagesList = fetchurl rec {
+          url = "${urlPrefix}/repodata/${sha256}-primary.xml.gz";
+          sha256 = "b686d3a0f337323e656d9387b9a76ce6808b26255fc3a138b1a87d3b1cb95ed5";
+        };
+        archs = ["noarch" "x86_64"];
+        packages = commonCentOSPackages ++ [ "procps-ng" ];
+      };
+  };
+
+
+  /* The set of supported Dpkg-based distributions. */
+
+  debDistros = {
+    ubuntu1404i386 = {
+      name = "ubuntu-14.04-trusty-i386";
+      fullName = "Ubuntu 14.04 Trusty (i386)";
+      packagesLists =
+        [ (fetchurl {
+            url = "mirror://ubuntu/dists/trusty/main/binary-i386/Packages.bz2";
+            sha256 = "1d5y3v3v079gdq45hc07ja0bjlmzqfwdwwlq0brwxi8m75k3iz7x";
+          })
+          (fetchurl {
+            url = "mirror://ubuntu/dists/trusty/universe/binary-i386/Packages.bz2";
+            sha256 = "03x9w92by320rfklrqhcl3qpwmnxds9c8ijl5zhcb21d6dcz5z1a";
+          })
+        ];
+      urlPrefix = "mirror://ubuntu";
+      packages = commonDebPackages ++ [ "diffutils" "libc-bin" ];
+    };
+
+    ubuntu1404x86_64 = {
+      name = "ubuntu-14.04-trusty-amd64";
+      fullName = "Ubuntu 14.04 Trusty (amd64)";
+      packagesLists =
+        [ (fetchurl {
+            url = "mirror://ubuntu/dists/trusty/main/binary-amd64/Packages.bz2";
+            sha256 = "1hhzbyqfr5i0swahwnl5gfp5l9p9hspywb1vpihr3b74p1z935bh";
+          })
+          (fetchurl {
+            url = "mirror://ubuntu/dists/trusty/universe/binary-amd64/Packages.bz2";
+            sha256 = "04560ba8s4z4v5iawknagrkn9q1nzvpn081ycmqvhh73p3p3g1jm";
+          })
+        ];
+      urlPrefix = "mirror://ubuntu";
+      packages = commonDebPackages ++ [ "diffutils" "libc-bin" ];
+    };
+
+    ubuntu1604i386 = {
+      name = "ubuntu-16.04-xenial-i386";
+      fullName = "Ubuntu 16.04 Xenial (i386)";
+      packagesLists =
+        [ (fetchurl {
+            url = "mirror://ubuntu/dists/xenial/main/binary-i386/Packages.xz";
+            sha256 = "13r75sp4slqy8w32y5dnr7pp7p3cfvavyr1g7gwnlkyrq4zx4ahy";
+          })
+          (fetchurl {
+            url = "mirror://ubuntu/dists/xenial/universe/binary-i386/Packages.xz";
+            sha256 = "14fid1rqm3sc0wlygcvn0yx5aljf51c2jpd4x0zxij4019316hsh";
+          })
+        ];
+      urlPrefix = "mirror://ubuntu";
+      packages = commonDebPackages ++ [ "diffutils" "libc-bin" ];
+    };
+
+    ubuntu1604x86_64 = {
+      name = "ubuntu-16.04-xenial-amd64";
+      fullName = "Ubuntu 16.04 Xenial (amd64)";
+      packagesLists =
+        [ (fetchurl {
+            url = "mirror://ubuntu/dists/xenial/main/binary-amd64/Packages.xz";
+            sha256 = "110qnkhjkkwm316fbig3aivm2595ydz6zskc4ld5cr8ngcrqm1bn";
+          })
+          (fetchurl {
+            url = "mirror://ubuntu/dists/xenial/universe/binary-amd64/Packages.xz";
+            sha256 = "0mm7gj491yi6q4v0n4qkbsm94s59bvqir6fk60j73w7y4la8rg68";
+          })
+        ];
+      urlPrefix = "mirror://ubuntu";
+      packages = commonDebPackages ++ [ "diffutils" "libc-bin" ];
+    };
+
+    ubuntu1804i386 = {
+      name = "ubuntu-18.04-bionic-i386";
+      fullName = "Ubuntu 18.04 Bionic (i386)";
+      packagesLists =
+        [ (fetchurl {
+            url = "mirror://ubuntu/dists/bionic/main/binary-i386/Packages.xz";
+            sha256 = "0f0v4131kwf7m7f8j3288rlqdxk1k3vqy74b7fcfd6jz9j8d840i";
+          })
+          (fetchurl {
+            url = "mirror://ubuntu/dists/bionic/universe/binary-i386/Packages.xz";
+            sha256 = "1v75c0dqr0wp0dqd4hnci92qqs4hll8frqdbpswadgxm5chn91bw";
+          })
+        ];
+      urlPrefix = "mirror://ubuntu";
+      packages = commonDebPackages ++ [ "diffutils" "libc-bin" ];
+    };
+
+    ubuntu1804x86_64 = {
+      name = "ubuntu-18.04-bionic-amd64";
+      fullName = "Ubuntu 18.04 Bionic (amd64)";
+      packagesLists =
+        [ (fetchurl {
+            url = "mirror://ubuntu/dists/bionic/main/binary-amd64/Packages.xz";
+            sha256 = "1ls81bjyvmfz6i919kszl7xks1ibrh1xqhsk6698ackndkm0wp39";
+          })
+          (fetchurl {
+            url = "mirror://ubuntu/dists/bionic/universe/binary-amd64/Packages.xz";
+            sha256 = "1832nqpn4ap95b3sj870xqayrza9in4kih9jkmjax27pq6x15v1r";
+          })
+        ];
+      urlPrefix = "mirror://ubuntu";
+      packages = commonDebPackages ++ [ "diffutils" "libc-bin" ];
+    };
+
+    ubuntu2004i386 = {
+      name = "ubuntu-20.04-focal-i386";
+      fullName = "Ubuntu 20.04 Focal (i386)";
+      packagesLists =
+        [ (fetchurl {
+            url = "mirror://ubuntu/dists/focal/main/binary-i386/Packages.xz";
+            sha256 = "sha256-7RAYURoN3RKYQAHpwBS9TIV6vCmpURpphyMJQmV4wLc=";
+          })
+          (fetchurl {
+            url = "mirror://ubuntu/dists/focal/universe/binary-i386/Packages.xz";
+            sha256 = "sha256-oA551xVE80volUPgkMyvzpQ1d+GhuZd4DAe7dXZnULM=";
+          })
+        ];
+      urlPrefix = "mirror://ubuntu";
+      packages = commonDebPackages ++ [ "diffutils" "libc-bin" ];
+    };
+
+    ubuntu2004x86_64 = {
+      name = "ubuntu-20.04-focal-amd64";
+      fullName = "Ubuntu 20.04 Focal (amd64)";
+      packagesLists =
+        [ (fetchurl {
+            url = "mirror://ubuntu/dists/focal/main/binary-amd64/Packages.xz";
+            sha256 = "sha256-d1eSH/j+7Zw5NKDJk21EG6SiOL7j6myMHfXLzUP8mGE=";
+          })
+          (fetchurl {
+            url = "mirror://ubuntu/dists/focal/universe/binary-amd64/Packages.xz";
+            sha256 = "sha256-RqdG2seJvZU3rKVNsWgLnf9RwkgVMRE1A4IZnX2WudE=";
+          })
+        ];
+      urlPrefix = "mirror://ubuntu";
+      packages = commonDebPackages ++ [ "diffutils" "libc-bin" ];
+    };
+
+    debian9i386 = {
+      name = "debian-9.13-stretch-i386";
+      fullName = "Debian 9.13 Stretch (i386)";
+      packagesList = fetchurl {
+        url = "https://snapshot.debian.org/archive/debian/20210526T143040Z/dists/stretch/main/binary-i386/Packages.xz";
+        sha256 = "sha256-fFRumd20wuVaYxzw0VPkAw5mQo8kIg+eXII15VSz9wA=";
+      };
+      urlPrefix = "mirror://debian";
+      packages = commonDebianPackages;
+    };
+
+    debian9x86_64 = {
+      name = "debian-9.13-stretch-amd64";
+      fullName = "Debian 9.13 Stretch (amd64)";
+      packagesList = fetchurl {
+        url = "https://snapshot.debian.org/archive/debian/20210526T143040Z/dists/stretch/main/binary-amd64/Packages.xz";
+        sha256 = "sha256-1p4DEVpTGlBE3PtbQ90kYw4QNHkW0F4rna/Xz+ncMhw=";
+      };
+      urlPrefix = "mirror://debian";
+      packages = commonDebianPackages;
+    };
+
+    debian10i386 = {
+      name = "debian-10.9-buster-i386";
+      fullName = "Debian 10.9 Buster (i386)";
+      packagesList = fetchurl {
+        url = "https://snapshot.debian.org/archive/debian/20210526T143040Z/dists/buster/main/binary-i386/Packages.xz";
+        sha256 = "sha256-zlkbKV+IGBCyWKD4v4LFM/EUA4TYS9fkLBPuF6MgUDo=";
+      };
+      urlPrefix = "mirror://debian";
+      packages = commonDebianPackages;
+    };
+
+    debian10x86_64 = {
+      name = "debian-10.9-buster-amd64";
+      fullName = "Debian 10.9 Buster (amd64)";
+      packagesList = fetchurl {
+        url = "https://snapshot.debian.org/archive/debian/20210526T143040Z/dists/buster/main/binary-amd64/Packages.xz";
+        sha256 = "sha256-k13toY1b3CX7GBPQ7Jm24OMqCEsgPlGK8M99x57o69o=";
+      };
+      urlPrefix = "mirror://debian";
+      packages = commonDebianPackages;
+    };
+  };
+
+
+  /* Common packages for Fedora images. */
+  commonFedoraPackages = [
+    "autoconf"
+    "automake"
+    "basesystem"
+    "bzip2"
+    "curl"
+    "diffutils"
+    "fedora-release"
+    "findutils"
+    "gawk"
+    "gcc-c++"
+    "gzip"
+    "make"
+    "patch"
+    "perl"
+    "pkgconf-pkg-config"
+    "rpm"
+    "rpm-build"
+    "tar"
+    "unzip"
+  ];
+
+  commonCentOSPackages = [
+    "autoconf"
+    "automake"
+    "basesystem"
+    "bzip2"
+    "curl"
+    "diffutils"
+    "centos-release"
+    "findutils"
+    "gawk"
+    "gcc-c++"
+    "gzip"
+    "make"
+    "patch"
+    "perl"
+    "pkgconfig"
+    "rpm"
+    "rpm-build"
+    "tar"
+    "unzip"
+  ];
+
+  commonRHELPackages = [
+    "autoconf"
+    "automake"
+    "basesystem"
+    "bzip2"
+    "curl"
+    "diffutils"
+    "findutils"
+    "gawk"
+    "gcc-c++"
+    "gzip"
+    "make"
+    "patch"
+    "perl"
+    "pkgconfig"
+    "procps-ng"
+    "rpm"
+    "rpm-build"
+    "tar"
+    "unzip"
+  ];
+
+  /* Common packages for openSUSE images. */
+  commonOpenSUSEPackages = [
+    "aaa_base"
+    "autoconf"
+    "automake"
+    "bzip2"
+    "curl"
+    "diffutils"
+    "findutils"
+    "gawk"
+    "gcc-c++"
+    "gzip"
+    "make"
+    "patch"
+    "perl"
+    "pkg-config"
+    "rpm"
+    "tar"
+    "unzip"
+    "util-linux"
+    "gnu-getopt"
+  ];
+
+
+  /* Common packages for Debian/Ubuntu images. */
+  commonDebPackages = [
+    "base-passwd"
+    "dpkg"
+    "libc6-dev"
+    "perl"
+    "bash"
+    "dash"
+    "gzip"
+    "bzip2"
+    "tar"
+    "grep"
+    "mawk"
+    "sed"
+    "findutils"
+    "g++"
+    "make"
+    "curl"
+    "patch"
+    "locales"
+    "coreutils"
+    # Needed by checkinstall:
+    "util-linux"
+    "file"
+    "dpkg-dev"
+    "pkg-config"
+    # Needed because it provides /etc/login.defs, whose absence causes
+    # the "passwd" post-installs script to fail.
+    "login"
+    "passwd"
+  ];
+
+  commonDebianPackages = commonDebPackages ++ [ "sysvinit" "diff" ];
+
+
+  /* A set of functions that build the Linux distributions specified
+     in `rpmDistros' and `debDistros'.  For instance,
+     `diskImageFuns.ubuntu1004x86_64 { }' builds an Ubuntu 10.04 disk
+     image containing the default packages specified above.  Overrides
+     of the default image parameters can be given.  In particular,
+     `extraPackages' specifies the names of additional packages from
+     the distribution that should be included in the image; `packages'
+     allows the entire set of packages to be overriden; and `size'
+     sets the size of the disk in megabytes.  E.g.,
+     `diskImageFuns.ubuntu1004x86_64 { extraPackages = ["firefox"];
+     size = 8192; }' builds an 8 GiB image containing Firefox in
+     addition to the default packages. */
+  diskImageFuns =
+    (lib.mapAttrs (name: as: as2: makeImageFromRPMDist (as // as2)) rpmDistros) //
+    (lib.mapAttrs (name: as: as2: makeImageFromDebDist (as // as2)) debDistros);
+
+
+  /* Shorthand for `diskImageFuns.<attr> { extraPackages = ... }'. */
+  diskImageExtraFuns =
+    lib.mapAttrs (name: f: extraPackages: f { inherit extraPackages; }) diskImageFuns;
+
+
+  /* Default disk images generated from the `rpmDistros' and
+     `debDistros' sets. */
+  diskImages = lib.mapAttrs (name: f: f {}) diskImageFuns;
+
+}
diff --git a/nixpkgs/pkgs/build-support/vm/rpm/rpm-closure.pl b/nixpkgs/pkgs/build-support/vm/rpm/rpm-closure.pl
new file mode 100644
index 000000000000..6442cd91a957
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/vm/rpm/rpm-closure.pl
@@ -0,0 +1,184 @@
+use strict;
+use XML::Simple;
+use List::Util qw(min);
+
+my @packagesFiles = ();
+my @urlPrefixes = ();
+
+# rpm-closure.pl (<package-file> <url-prefix>)+ <toplevel-pkg>+
+
+while(-f $ARGV[0]) {
+    my $packagesFile = shift @ARGV;
+    my $urlPrefix = shift @ARGV;
+    push(@packagesFiles, $packagesFile);
+    push(@urlPrefixes, $urlPrefix);
+}
+
+
+sub rpmvercmp {
+    my ($version1, $version2) = @_;
+    my @vercmps1 = split /\./, $version1;
+    my @vercmps2 = split /\./, $version2;
+    my $l1 = scalar(@vercmps1);
+    my $l2 = scalar(@vercmps2);
+    my $l = min($l1, $l2);
+
+    for(my $i=0; $i<$l; $i++) {
+        my $v1 = $vercmps1[$i];
+        my $v2 = $vercmps2[$i];
+
+        if($v1 =~ /^[0-9]*$/ && $v2 =~ /^[0-9]*$/) {
+            if ( int($v1) > int($v2) ) {
+                return 1;
+            }
+            elsif ( int($v1) < int($v2) ) {
+                return -1;
+            }
+        } else {
+            if ( $v1 gt $v2 ) {
+                return 1;
+            }
+            elsif ( $v1 lt $v2 ) {
+                return -1;
+            }
+        }
+    }
+    if($l1 == $l2) {
+        return 0;
+    } elsif ($l1 > $l2) {
+        return 1;
+    } elsif ($l1 < $l2) {
+        return -1;
+    }
+}
+
+my @toplevelPkgs = @ARGV;
+
+my @archs = split ' ', ($ENV{'archs'} or "");
+
+my %pkgs;
+for (my $i = 0; $i < scalar(@packagesFiles); $i++) {
+    my $packagesFile = $packagesFiles[$i];
+    print STDERR "parsing packages in $packagesFile...\n";
+
+    my $xml = XMLin($packagesFile, ForceArray => ['package', 'rpm:entry', 'file'], KeyAttr => []) or die;
+
+    print STDERR "$packagesFile contains $xml->{packages} packages\n";
+
+    foreach my $pkg (@{$xml->{'package'}}) {
+        if (scalar @archs > 0) {
+            my $arch = $pkg->{arch};
+            my $found = 0;
+            foreach my $a (@archs) { $found = 1 if $arch eq $a; }
+            next if !$found;
+        }
+        if (defined $pkgs{$pkg->{name}}) {
+            my $earlierPkg = $pkgs{$pkg->{name}};
+            print STDERR "WARNING: duplicate occurrence of package $pkg->{name}\n";
+            #   <version epoch="0" ver="1.28.0" rel="2.el6"/>
+            my $cmp = rpmvercmp($pkg->{'version'}->{ver}, $earlierPkg->{'version'}->{ver});
+            if ($cmp > 0 || ($cmp == 0 && rpmvercmp($pkg->{'version'}->{rel}, $earlierPkg->{'version'}->{rel})>0)) {
+                print STDERR "WARNING: replaced package $pkg->{name} (".$earlierPkg->{'version'}->{ver}." ".$earlierPkg->{'version'}->{rel}.") with newer one (".$pkg->{'version'}->{ver}." ".$pkg->{'version'}->{rel}.")\n";
+                $pkg->{urlPrefix} = $urlPrefixes[$i];
+                $pkgs{$pkg->{name}} = $pkg;
+            }
+            next;
+        }
+        $pkg->{urlPrefix} = $urlPrefixes[$i];
+        $pkgs{$pkg->{name}} = $pkg;
+    }
+}
+
+my %provides;
+PKG: foreach my $pkgName (sort(keys %pkgs)) {
+    #print STDERR "looking at $pkgName\n";
+    my $pkg = $pkgs{$pkgName};
+
+    # Skip packages that conflict with a required package.
+    my $conflicts = $pkg->{format}->{'rpm:conflicts'}->{'rpm:entry'} // [];
+    foreach my $conflict (@{$conflicts}) {
+        next if $conflict->{flags} // "" eq "LT" || $conflict->{flags} // "" eq "LE";
+        #print STDERR "  $pkgName conflicts with $conflict->{name}\n";
+        if (grep { $_ eq $conflict->{name} } @toplevelPkgs) {
+            print STDERR "skipping package $pkgName because it conflicts with a required package\n";
+            next PKG;
+        }
+    }
+
+    my $provides = $pkg->{format}->{'rpm:provides'}->{'rpm:entry'} or die;
+    foreach my $req (@{$provides}) {
+        #print STDERR "  $pkgName provides $req->{name}\n";
+        #die "multiple provides for $req->{name}" if defined $provides{$req->{name}};
+        $provides{$req->{name}} = $pkgName;
+    }
+
+    if (defined $pkg->{format}->{file}) {
+        foreach my $file (@{$pkg->{format}->{file}}) {
+          #print STDERR "  provides file $file\n";
+          $provides{$file} = $pkgName;
+        }
+    }
+}
+
+
+my %donePkgs;
+my @needed = ();
+
+sub closePackage {
+    my $pkgName = shift;
+
+    return if defined $donePkgs{$pkgName};
+    $donePkgs{$pkgName} = 1;
+
+    print STDERR ">>> $pkgName\n";
+
+    my $pkg = $pkgs{$pkgName} or die "package $pkgName doesn't exist";
+
+    my $requires = $pkg->{format}->{'rpm:requires'}->{'rpm:entry'} || [];
+
+    my @deps = ();
+    foreach my $req (@{$requires}) {
+        next if $req->{name} =~ /^rpmlib\(/;
+        #print STDERR "  needs $req->{name}\n";
+        my $provider = $provides{$req->{name}};
+        if (!defined $provider) {
+            print STDERR "    WARNING: no provider for $req->{name}\n";
+            next;
+        }
+        #print STDERR "    satisfied by $provider\n";
+        push @deps, $provider;
+    }
+
+    closePackage($_) foreach @deps;
+
+    push @needed, $pkgName;
+}
+
+
+foreach my $pkgName (@toplevelPkgs) {
+    closePackage $pkgName;
+}
+
+
+# Generate the output Nix expression.
+print "# This is a generated file.  Do not modify!\n";
+print "# Following are the RPM packages constituting the closure of: @toplevelPkgs\n\n";
+print "{fetchurl}:\n\n";
+print "[\n\n";
+
+foreach my $pkgName (@needed) {
+    my $pkg = $pkgs{$pkgName};
+    print "  (fetchurl {\n";
+    print "    url = $pkg->{urlPrefix}/$pkg->{location}->{href};\n";
+    if ($pkg->{checksum}->{type} eq "sha") {
+        print "    sha1 = \"$pkg->{checksum}->{content}\";\n";
+    } elsif ($pkg->{checksum}->{type} eq "sha256") {
+        print "    sha256 = \"$pkg->{checksum}->{content}\";\n";
+    } else {
+        die "unsupported hash type";
+    }
+    print "  })\n";
+    print "\n";
+}
+
+print "]\n";
diff --git a/nixpkgs/pkgs/build-support/vm/test.nix b/nixpkgs/pkgs/build-support/vm/test.nix
new file mode 100644
index 000000000000..698503032671
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/vm/test.nix
@@ -0,0 +1,39 @@
+with import ../../.. {};
+with vmTools;
+
+{
+
+
+  # Run the PatchELF derivation in a VM.
+  buildPatchelfInVM = runInLinuxVM patchelf;
+
+  buildHelloInVM = runInLinuxVM hello;
+
+  buildPanInVM = runInLinuxVM pan;
+
+
+  testRPMImage = makeImageTestScript diskImages.fedora16x86_64;
+
+
+  buildPatchelfRPM = buildRPM {
+    name = "patchelf-rpm";
+    src = patchelf.src;
+    diskImage = diskImages.fedora16x86_64;
+  };
+
+
+  testUbuntuImage = makeImageTestScript diskImages.ubuntu810i386;
+
+
+  buildInDebian = runInLinuxImage (stdenv.mkDerivation {
+    name = "deb-compile";
+    src = patchelf.src;
+    diskImage = diskImages.ubuntu1204i386;
+    memSize = 512;
+    phases = "sysInfoPhase unpackPhase patchPhase configurePhase buildPhase checkPhase installPhase fixupPhase distPhase";
+    sysInfoPhase = ''
+      dpkg-query --list
+    '';
+  });
+
+}
diff --git a/nixpkgs/pkgs/build-support/wrapper-common/utils.bash b/nixpkgs/pkgs/build-support/wrapper-common/utils.bash
new file mode 100644
index 000000000000..f773270f7de9
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/wrapper-common/utils.bash
@@ -0,0 +1,131 @@
+# Accumulate suffixes for taking in the right input parameters with the `mangle*`
+# functions below. See setup-hook for details.
+accumulateRoles() {
+    declare -ga role_suffixes=()
+    if [ "${NIX_@wrapperName@_TARGET_BUILD_@suffixSalt@:-}" ]; then
+        role_suffixes+=('_FOR_BUILD')
+    fi
+    if [ "${NIX_@wrapperName@_TARGET_HOST_@suffixSalt@:-}" ]; then
+        role_suffixes+=('')
+    fi
+    if [ "${NIX_@wrapperName@_TARGET_TARGET_@suffixSalt@:-}" ]; then
+        role_suffixes+=('_FOR_TARGET')
+    fi
+}
+
+mangleVarListGeneric() {
+    local sep="$1"
+    shift
+    local var="$1"
+    shift
+    local -a role_suffixes=("$@")
+
+    local outputVar="${var}_@suffixSalt@"
+    declare -gx ${outputVar}+=''
+    # For each role we serve, we accumulate the input parameters into our own
+    # cc-wrapper-derivation-specific environment variables.
+    for suffix in "${role_suffixes[@]}"; do
+        local inputVar="${var}${suffix}"
+        if [ -v "$inputVar" ]; then
+            export ${outputVar}+="${!outputVar:+$sep}${!inputVar}"
+        fi
+    done
+}
+
+mangleVarList() {
+    mangleVarListGeneric " " "$@"
+}
+
+mangleVarBool() {
+    local var="$1"
+    shift
+    local -a role_suffixes=("$@")
+
+    local outputVar="${var}_@suffixSalt@"
+    declare -gxi ${outputVar}+=0
+    for suffix in "${role_suffixes[@]}"; do
+        local inputVar="${var}${suffix}"
+        if [ -v "$inputVar" ]; then
+            # "1" in the end makes `let` return success error code when
+            # expression itself evaluates to zero.
+            # We don't use `|| true` because that would silence actual
+            # syntax errors from bad variable values.
+            let "${outputVar} |= ${!inputVar:-0}" "1"
+        fi
+    done
+}
+
+# Combine a singular value from all roles. If multiple roles are being served,
+# and the value differs in these roles then the request is impossible to
+# satisfy and we abort immediately.
+mangleVarSingle() {
+    local var="$1"
+    shift
+    local -a role_suffixes=("$@")
+
+    local outputVar="${var}_@suffixSalt@"
+    for suffix in "${role_suffixes[@]}"; do
+        local inputVar="${var}${suffix}"
+        if [ -v "$inputVar" ]; then
+            if [ -v "$outputVar" ]; then
+                if [ "${!outputVar}" != "${!inputVar}" ]; then
+                    {
+                        echo "Multiple conflicting values defined for $outputVar"
+                        echo "Existing value is ${!outputVar}"
+                        echo "Attempting to set to ${!inputVar} via $inputVar"
+                    } >&2
+
+                    exit 1
+                fi
+            else
+                declare -gx ${outputVar}="${!inputVar}"
+            fi
+        fi
+    done
+}
+
+skip () {
+    if (( "${NIX_DEBUG:-0}" >= 1 )); then
+        echo "skipping impure path $1" >&2
+    fi
+}
+
+
+# Checks whether a path is impure.  E.g., `/lib/foo.so' is impure, but
+# `/nix/store/.../lib/foo.so' isn't.
+badPath() {
+    local p=$1
+
+    # Relative paths are okay (since they're presumably relative to
+    # the temporary build directory).
+    if [ "${p:0:1}" != / ]; then return 1; fi
+
+    # Otherwise, the path should refer to the store or some temporary
+    # directory (including the build directory).
+    test \
+        "$p" != "/dev/null" -a \
+        "${p#${NIX_STORE}}"     = "$p" -a \
+        "${p#${NIX_BUILD_TOP}}" = "$p" -a \
+        "${p#/tmp}"             = "$p" -a \
+        "${p#${TMP:-/tmp}}"     = "$p" -a \
+        "${p#${TMPDIR:-/tmp}}"  = "$p" -a \
+        "${p#${TEMP:-/tmp}}"    = "$p" -a \
+        "${p#${TEMPDIR:-/tmp}}" = "$p"
+}
+
+expandResponseParams() {
+    declare -ga params=("$@")
+    local arg
+    for arg in "$@"; do
+        if [[ "$arg" == @* ]]; then
+            # phase separation makes this look useless
+            # shellcheck disable=SC2157
+            if [ -x "@expandResponseParams@" ]; then
+                # params is used by caller
+                #shellcheck disable=SC2034
+                readarray -d '' params < <("@expandResponseParams@" "$@")
+                return 0
+            fi
+        fi
+    done
+}
diff --git a/nixpkgs/pkgs/build-support/writers/default.nix b/nixpkgs/pkgs/build-support/writers/default.nix
new file mode 100644
index 000000000000..47919c251af1
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/writers/default.nix
@@ -0,0 +1,321 @@
+{ pkgs, lib, gawk, gnused, gixy }:
+
+with lib;
+rec {
+  # Base implementation for non-compiled executables.
+  # Takes an interpreter, for example `${pkgs.bash}/bin/bash`
+  #
+  # Examples:
+  #   writeBash = makeScriptWriter { interpreter = "${pkgs.bash}/bin/bash"; }
+  #   makeScriptWriter { interpreter = "${pkgs.dash}/bin/dash"; } "hello" "echo hello world"
+  makeScriptWriter = { interpreter, check ? "" }: nameOrPath: content:
+    assert lib.or (types.path.check nameOrPath) (builtins.match "([0-9A-Za-z._])[0-9A-Za-z._-]*" nameOrPath != null);
+    assert lib.or (types.path.check content) (types.str.check content);
+    let
+      name = last (builtins.split "/" nameOrPath);
+    in
+
+    pkgs.runCommandLocal name (if (types.str.check content) then {
+      inherit content interpreter;
+      passAsFile = [ "content" ];
+    } else {
+      inherit interpreter;
+      contentPath = content;
+    }) ''
+      # On darwin a script cannot be used as an interpreter in a shebang but
+      # there doesn't seem to be a limit to the size of shebang and multiple
+      # arguments to the interpreter are allowed.
+      if [[ -n "${toString pkgs.stdenvNoCC.isDarwin}" ]] && isScript $interpreter
+      then
+        wrapperInterpreterLine=$(head -1 "$interpreter" | tail -c+3)
+        # Get first word from the line (note: xargs echo remove leading spaces)
+        wrapperInterpreter=$(echo "$wrapperInterpreterLine" | xargs echo | cut -d " " -f1)
+
+        if isScript $wrapperInterpreter
+        then
+          echo "error: passed interpreter ($interpreter) is a script which has another script ($wrapperInterpreter) as an interpreter, which is not supported."
+          exit 1
+        fi
+
+        # This should work as long as wrapperInterpreter is a shell, which is
+        # the case for programs wrapped with makeWrapper, like
+        # python3.withPackages etc.
+        interpreterLine="$wrapperInterpreterLine $interpreter"
+      else
+        interpreterLine=$interpreter
+      fi
+
+      echo "#! $interpreterLine" > $out
+      cat "$contentPath" >> $out
+      ${optionalString (check != "") ''
+        ${check} $out
+      ''}
+      chmod +x $out
+      ${optionalString (types.path.check nameOrPath) ''
+        mv $out tmp
+        mkdir -p $out/$(dirname "${nameOrPath}")
+        mv tmp $out/${nameOrPath}
+      ''}
+    '';
+
+  # Base implementation for compiled executables.
+  # Takes a compile script, which in turn takes the name as an argument.
+  #
+  # Examples:
+  #   writeSimpleC = makeBinWriter { compileScript = name: "gcc -o $out $contentPath"; }
+  makeBinWriter = { compileScript, strip ? true }: nameOrPath: content:
+    assert lib.or (types.path.check nameOrPath) (builtins.match "([0-9A-Za-z._])[0-9A-Za-z._-]*" nameOrPath != null);
+    assert lib.or (types.path.check content) (types.str.check content);
+    let
+      name = last (builtins.split "/" nameOrPath);
+    in
+    pkgs.runCommand name (if (types.str.check content) then {
+      inherit content;
+      passAsFile = [ "content" ];
+    } else {
+      contentPath = content;
+    }) ''
+      ${compileScript}
+      ${lib.optionalString strip
+         "${pkgs.binutils-unwrapped}/bin/strip --strip-unneeded $out"}
+      ${optionalString (types.path.check nameOrPath) ''
+        mv $out tmp
+        mkdir -p $out/$(dirname "${nameOrPath}")
+        mv tmp $out/${nameOrPath}
+      ''}
+    '';
+
+  # Like writeScript but the first line is a shebang to bash
+  #
+  # Example:
+  #   writeBash "example" ''
+  #     echo hello world
+  #   ''
+  writeBash = makeScriptWriter {
+    interpreter = "${pkgs.bash}/bin/bash";
+  };
+
+  # Like writeScriptBIn but the first line is a shebang to bash
+  writeBashBin = name:
+    writeBash "/bin/${name}";
+
+  # writeC writes an executable c package called `name` to `destination` using `libraries`.
+  #
+  #  Examples:
+  #    writeC "hello-world-ncurses" { libraries = [ pkgs.ncurses ]; } ''
+  #      #include <ncurses.h>
+  #      int main() {
+  #        initscr();
+  #        printw("Hello World !!!");
+  #        refresh(); endwin();
+  #        return 0;
+  #      }
+  #    ''
+  writeC = name: {
+    libraries ? [],
+    strip ? true
+  }:
+    makeBinWriter {
+      compileScript = ''
+        PATH=${makeBinPath [
+          pkgs.binutils-unwrapped
+          pkgs.coreutils
+          pkgs.findutils
+          pkgs.gcc
+          pkgs.pkg-config
+        ]}
+        export PKG_CONFIG_PATH=${concatMapStringsSep ":" (pkg: "${pkg}/lib/pkgconfig") libraries}
+        gcc \
+            ${optionalString (libraries != [])
+              "$(pkg-config --cflags --libs ${
+                concatMapStringsSep " " (pkg: "$(find ${escapeShellArg pkg}/lib/pkgconfig -name \\*.pc)") libraries
+              })"
+            } \
+            -O \
+            -o "$out" \
+            -Wall \
+            -x c \
+            "$contentPath"
+      '';
+      inherit strip;
+    } name;
+
+  # writeCBin takes the same arguments as writeC but outputs a directory (like writeScriptBin)
+  writeCBin = name:
+    writeC "/bin/${name}";
+
+  # Like writeScript but the first line is a shebang to dash
+  #
+  # Example:
+  #   writeDash "example" ''
+  #     echo hello world
+  #   ''
+  writeDash = makeScriptWriter {
+    interpreter = "${pkgs.dash}/bin/dash";
+  };
+
+  # Like writeScriptBin but the first line is a shebang to dash
+  writeDashBin = name:
+    writeDash "/bin/${name}";
+
+  # writeHaskell takes a name, an attrset with libraries and haskell version (both optional)
+  # and some haskell source code and returns an executable.
+  #
+  # Example:
+  #   writeHaskell "missiles" { libraries = [ pkgs.haskellPackages.acme-missiles ]; } ''
+  #     import Acme.Missiles
+  #
+  #     main = launchMissiles
+  #   '';
+  writeHaskell = name: {
+    libraries ? [],
+    ghc ? pkgs.ghc,
+    ghcArgs ? [],
+    strip ? true
+  }:
+    makeBinWriter {
+      compileScript = ''
+        cp $contentPath tmp.hs
+        ${ghc.withPackages (_: libraries )}/bin/ghc ${lib.escapeShellArgs ghcArgs} tmp.hs
+        mv tmp $out
+      '';
+      inherit strip;
+    } name;
+
+  # writeHaskellBin takes the same arguments as writeHaskell but outputs a directory (like writeScriptBin)
+  writeHaskellBin = name:
+    writeHaskell "/bin/${name}";
+
+  writeRust = name: {
+      rustc ? pkgs.rustc,
+      rustcArgs ? [],
+      strip ? true
+  }:
+    makeBinWriter {
+      compileScript = ''
+        cp "$contentPath" tmp.rs
+        PATH=${makeBinPath [pkgs.gcc]} ${lib.getBin rustc}/bin/rustc ${lib.escapeShellArgs rustcArgs} -o "$out" tmp.rs
+      '';
+      inherit strip;
+    } name;
+
+  writeRustBin = name:
+    writeRust "/bin/${name}";
+
+  # writeJS takes a name an attributeset with libraries and some JavaScript sourcecode and
+  # returns an executable
+  #
+  # Example:
+  #   writeJS "example" { libraries = [ pkgs.nodePackages.uglify-js ]; } ''
+  #     var UglifyJS = require("uglify-js");
+  #     var code = "function add(first, second) { return first + second; }";
+  #     var result = UglifyJS.minify(code);
+  #     console.log(result.code);
+  #   ''
+  writeJS = name: { libraries ? [] }: content:
+  let
+    node-env = pkgs.buildEnv {
+      name = "node";
+      paths = libraries;
+      pathsToLink = [
+        "/lib/node_modules"
+      ];
+    };
+  in writeDash name ''
+    export NODE_PATH=${node-env}/lib/node_modules
+    exec ${pkgs.nodejs}/bin/node ${pkgs.writeText "js" content}
+  '';
+
+  # writeJSBin takes the same arguments as writeJS but outputs a directory (like writeScriptBin)
+  writeJSBin = name:
+    writeJS "/bin/${name}";
+
+  awkFormatNginx = builtins.toFile "awkFormat-nginx.awk" ''
+    awk -f
+    {sub(/^[ \t]+/,"");idx=0}
+    /\{/{ctx++;idx=1}
+    /\}/{ctx--}
+    {id="";for(i=idx;i<ctx;i++)id=sprintf("%s%s", id, "\t");printf "%s%s\n", id, $0}
+   '';
+
+  writeNginxConfig = name: text: pkgs.runCommandLocal name {
+    inherit text;
+    passAsFile = [ "text" ];
+    nativeBuildInputs = [ gawk gnused gixy ];
+  } /* sh */ ''
+    # nginx-config-formatter has an error - https://github.com/1connect/nginx-config-formatter/issues/16
+    awk -f ${awkFormatNginx} "$textPath" | sed '/^\s*$/d' > $out
+    gixy $out
+  '';
+
+  # writePerl takes a name an attributeset with libraries and some perl sourcecode and
+  # returns an executable
+  #
+  # Example:
+  #   writePerl "example" { libraries = [ pkgs.perlPackages.boolean ]; } ''
+  #     use boolean;
+  #     print "Howdy!\n" if true;
+  #   ''
+  writePerl = name: { libraries ? [] }:
+    makeScriptWriter {
+      interpreter = "${pkgs.perl.withPackages (p: libraries)}/bin/perl";
+    } name;
+
+  # writePerlBin takes the same arguments as writePerl but outputs a directory (like writeScriptBin)
+  writePerlBin = name:
+    writePerl "/bin/${name}";
+
+  # makePythonWriter takes python and compatible pythonPackages and produces python script writer,
+  # which validates the script with flake8 at build time. If any libraries are specified,
+  # python.withPackages is used as interpreter, otherwise the "bare" python is used.
+  makePythonWriter = python: pythonPackages: name: { libraries ? [], flakeIgnore ? [] }:
+  let
+    ignoreAttribute = optionalString (flakeIgnore != []) "--ignore ${concatMapStringsSep "," escapeShellArg flakeIgnore}";
+  in
+  makeScriptWriter {
+    interpreter =
+      if libraries == []
+      then "${python}/bin/python"
+      else "${python.withPackages (ps: libraries)}/bin/python"
+    ;
+    check = writeDash "python2check.sh" ''
+      exec ${pythonPackages.flake8}/bin/flake8 --show-source ${ignoreAttribute} "$1"
+    '';
+  } name;
+
+  # writePython2 takes a name an attributeset with libraries and some python2 sourcecode and
+  # returns an executable
+  #
+  # Example:
+  # writePython2 "test_python2" { libraries = [ pkgs.python2Packages.enum ]; } ''
+  #   from enum import Enum
+  #
+  #   class Test(Enum):
+  #       a = "success"
+  #
+  #   print Test.a
+  # ''
+  writePython2 = makePythonWriter pkgs.python2 pkgs.python2Packages;
+
+  # writePython2Bin takes the same arguments as writePython2 but outputs a directory (like writeScriptBin)
+  writePython2Bin = name:
+    writePython2 "/bin/${name}";
+
+  # writePython3 takes a name an attributeset with libraries and some python3 sourcecode and
+  # returns an executable
+  #
+  # Example:
+  # writePython3 "test_python3" { libraries = [ pkgs.python3Packages.pyyaml ]; } ''
+  #   import yaml
+  #
+  #   y = yaml.load("""
+  #     - test: success
+  #   """)
+  #   print(y[0]['test'])
+  # ''
+  writePython3 = makePythonWriter pkgs.python3 pkgs.python3Packages;
+
+  # writePython3Bin takes the same arguments as writePython3 but outputs a directory (like writeScriptBin)
+  writePython3Bin = name:
+    writePython3 "/bin/${name}";
+}
diff --git a/nixpkgs/pkgs/build-support/writers/test.nix b/nixpkgs/pkgs/build-support/writers/test.nix
new file mode 100644
index 000000000000..d0824b17bd1b
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/writers/test.nix
@@ -0,0 +1,206 @@
+{ glib
+, haskellPackages
+, lib
+, nodePackages
+, perlPackages
+, python2Packages
+, python3Packages
+, runCommand
+, writers
+, writeText
+}:
+with writers;
+let
+
+  bin = {
+    bash = writeBashBin "test-writers-bash-bin" ''
+     if [[ "test" == "test" ]]; then echo "success"; fi
+    '';
+
+    c = writeCBin "test-writers-c" { libraries = [ ]; } ''
+      #include <stdio.h>
+      int main() {
+        printf("success\n");
+        return 0;
+      }
+    '';
+
+    dash = writeDashBin "test-writers-dash-bin" ''
+     test '~' = '~' && echo 'success'
+    '';
+
+    rust = writeRustBin "test-writers-rust-bin" {} ''
+      fn main(){
+        println!("success")
+      }
+    '';
+
+    haskell = writeHaskellBin "test-writers-haskell-bin" { libraries = [ haskellPackages.acme-default ]; } ''
+      import Data.Default
+
+      int :: Int
+      int = def
+
+      main :: IO ()
+      main = case int of
+        18871 -> putStrLn $ id "success"
+        _ -> print "fail"
+    '';
+
+    js = writeJSBin "test-writers-js-bin" { libraries = [ nodePackages.semver ]; } ''
+      var semver = require('semver');
+
+      if (semver.valid('1.2.3')) {
+        console.log('success')
+      } else {
+        console.log('fail')
+      }
+    '';
+
+    perl = writePerlBin "test-writers-perl-bin" { libraries = [ perlPackages.boolean ]; } ''
+      use boolean;
+      print "success\n" if true;
+    '';
+
+    python2 = writePython2Bin "test-writers-python2-bin" { libraries = [ python2Packages.enum ]; } ''
+      from enum import Enum
+
+
+      class Test(Enum):
+          a = "success"
+
+
+      print Test.a
+    '';
+
+    python3 = writePython3Bin "test-writers-python3-bin" { libraries = [ python3Packages.pyyaml ]; } ''
+      import yaml
+
+      y = yaml.load("""
+        - test: success
+      """)
+      print(y[0]['test'])
+    '';
+  };
+
+  simple = {
+    bash = writeBash "test-writers-bash" ''
+     if [[ "test" == "test" ]]; then echo "success"; fi
+    '';
+
+    c = writeC "test-writers-c" { libraries = [ glib.dev ]; } ''
+      #include <gio/gio.h>
+      #include <stdio.h>
+      int main() {
+        GApplication *application = g_application_new ("hello.world", G_APPLICATION_FLAGS_NONE);
+        g_application_register (application, NULL, NULL);
+        GNotification *notification = g_notification_new ("Hello world!");
+        g_notification_set_body (notification, "This is an example notification.");
+        GIcon *icon = g_themed_icon_new ("dialog-information");
+        g_notification_set_icon (notification, icon);
+        g_object_unref (icon);
+        g_object_unref (notification);
+        g_object_unref (application);
+        printf("success\n");
+        return 0;
+      }
+    '';
+
+    dash = writeDash "test-writers-dash" ''
+     test '~' = '~' && echo 'success'
+    '';
+
+    haskell = writeHaskell "test-writers-haskell" { libraries = [ haskellPackages.acme-default ]; } ''
+      import Data.Default
+
+      int :: Int
+      int = def
+
+      main :: IO ()
+      main = case int of
+        18871 -> putStrLn $ id "success"
+        _ -> print "fail"
+    '';
+
+    js = writeJS "test-writers-js" { libraries = [ nodePackages.semver ]; } ''
+      var semver = require('semver');
+
+      if (semver.valid('1.2.3')) {
+        console.log('success')
+      } else {
+        console.log('fail')
+      }
+    '';
+
+    perl = writePerl "test-writers-perl" { libraries = [ perlPackages.boolean ]; } ''
+      use boolean;
+      print "success\n" if true;
+    '';
+
+    python2 = writePython2 "test-writers-python2" { libraries = [ python2Packages.enum ]; } ''
+      from enum import Enum
+
+
+      class Test(Enum):
+          a = "success"
+
+
+      print Test.a
+    '';
+
+    python3 = writePython3 "test-writers-python3" { libraries = [ python3Packages.pyyaml ]; } ''
+      import yaml
+
+      y = yaml.load("""
+        - test: success
+      """)
+      print(y[0]['test'])
+    '';
+
+    python2NoLibs = writePython2 "test-writers-python2-no-libs" {} ''
+      print("success")
+    '';
+
+    python3NoLibs = writePython3 "test-writers-python3-no-libs" {} ''
+      print("success")
+    '';
+  };
+
+
+  path = {
+    bash = writeBash "test-writers-bash-path" (writeText "test" ''
+      if [[ "test" == "test" ]]; then echo "success"; fi
+    '');
+    haskell = writeHaskell "test-writers-haskell-path" { libraries = [ haskellPackages.acme-default ]; } (writeText "test" ''
+      import Data.Default
+
+      int :: Int
+      int = def
+
+      main :: IO ()
+      main = case int of
+        18871 -> putStrLn $ id "success"
+        _ -> print "fail"
+    '');
+  };
+
+  writeTest = expectedValue: name: test:
+    writeDash "run-${name}" ''
+      if test "$(${test})" != "${expectedValue}"; then
+        echo 'test ${test} failed'
+        exit 1
+      fi
+    '';
+
+in runCommand "test-writers" {
+  passthru = { inherit writeTest bin simple; };
+  meta.platforms = lib.platforms.all;
+} ''
+  ${lib.concatMapStringsSep "\n" (test: writeTest "success" test.name "${test}/bin/${test.name}") (lib.attrValues bin)}
+  ${lib.concatMapStringsSep "\n" (test: writeTest "success" test.name test) (lib.attrValues simple)}
+  ${lib.concatMapStringsSep "\n" (test: writeTest "success" test.name test) (lib.attrValues path)}
+
+  echo 'nix-writers successfully tested' >&2
+  touch $out
+''
+