about summary refs log tree commit diff
path: root/nixpkgs/maintainers/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'nixpkgs/maintainers/scripts')
-rw-r--r--nixpkgs/maintainers/scripts/README.md62
-rw-r--r--nixpkgs/maintainers/scripts/all-tarballs.nix16
-rw-r--r--nixpkgs/maintainers/scripts/bootstrap-files/README.md85
-rwxr-xr-xnixpkgs/maintainers/scripts/bootstrap-files/refresh-tarballs.bash282
-rw-r--r--nixpkgs/maintainers/scripts/build.nix55
l---------nixpkgs/maintainers/scripts/check-by-name.sh1
-rw-r--r--nixpkgs/maintainers/scripts/check-hydra-by-maintainer.nix71
-rwxr-xr-xnixpkgs/maintainers/scripts/check-maintainer-github-handles.sh66
-rw-r--r--nixpkgs/maintainers/scripts/check-maintainers-sorted.nix57
-rwxr-xr-xnixpkgs/maintainers/scripts/convert-to-import-cargo-lock.sh4
-rw-r--r--nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/.gitignore1
-rw-r--r--nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/Cargo.lock106
-rw-r--r--nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/Cargo.toml12
-rw-r--r--nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/default.nix16
-rw-r--r--nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/shell.nix5
-rw-r--r--nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/src/main.rs241
-rwxr-xr-xnixpkgs/maintainers/scripts/copy-tarballs.pl252
-rwxr-xr-xnixpkgs/maintainers/scripts/db-to-md.sh88
-rwxr-xr-xnixpkgs/maintainers/scripts/debian-patches.sh34
-rwxr-xr-xnixpkgs/maintainers/scripts/dep-licenses.sh57
-rwxr-xr-xnixpkgs/maintainers/scripts/doc/escape-code-markup.py97
-rwxr-xr-xnixpkgs/maintainers/scripts/doc/list-systemd-manpages.zsh33
-rwxr-xr-xnixpkgs/maintainers/scripts/doc/replace-xrefs-by-empty-links.py32
-rw-r--r--nixpkgs/maintainers/scripts/doc/unknown-code-language.lua12
-rw-r--r--nixpkgs/maintainers/scripts/eval-release.nix25
-rwxr-xr-xnixpkgs/maintainers/scripts/eval-release.sh11
-rwxr-xr-xnixpkgs/maintainers/scripts/feature-freeze-teams.pl98
-rwxr-xr-xnixpkgs/maintainers/scripts/fetch-kde-qt.sh196
-rw-r--r--nixpkgs/maintainers/scripts/find-tarballs.nix50
-rwxr-xr-xnixpkgs/maintainers/scripts/fix-maintainers.pl58
-rwxr-xr-xnixpkgs/maintainers/scripts/get-maintainer.sh73
-rw-r--r--nixpkgs/maintainers/scripts/haskell/dependencies.nix10
-rwxr-xr-xnixpkgs/maintainers/scripts/haskell/hydra-report.hs819
-rw-r--r--nixpkgs/maintainers/scripts/haskell/maintained-broken-pkgs.nix22
-rw-r--r--nixpkgs/maintainers/scripts/haskell/maintainer-handles.nix21
-rwxr-xr-xnixpkgs/maintainers/scripts/haskell/mark-broken.sh66
-rwxr-xr-xnixpkgs/maintainers/scripts/haskell/merge-and-open-pr.sh129
-rwxr-xr-xnixpkgs/maintainers/scripts/haskell/regenerate-hackage-packages.sh120
-rwxr-xr-xnixpkgs/maintainers/scripts/haskell/regenerate-transitive-broken-packages.sh25
-rw-r--r--nixpkgs/maintainers/scripts/haskell/test-configurations.nix158
-rw-r--r--nixpkgs/maintainers/scripts/haskell/transitive-broken-packages.nix16
-rwxr-xr-xnixpkgs/maintainers/scripts/haskell/update-cabal2nix-unstable.sh17
-rwxr-xr-xnixpkgs/maintainers/scripts/haskell/update-hackage.sh35
-rwxr-xr-xnixpkgs/maintainers/scripts/haskell/update-stackage.sh87
-rwxr-xr-xnixpkgs/maintainers/scripts/haskell/upload-nixos-package-list-to-hackage.sh43
-rwxr-xr-xnixpkgs/maintainers/scripts/hydra-eval-failures.py112
-rwxr-xr-xnixpkgs/maintainers/scripts/hydra_eval_check13
-rw-r--r--nixpkgs/maintainers/scripts/luarocks-config.lua8
-rw-r--r--nixpkgs/maintainers/scripts/luarocks-packages.csv124
-rwxr-xr-xnixpkgs/maintainers/scripts/mdize-module.sh83
-rwxr-xr-xnixpkgs/maintainers/scripts/nix-call-package5
-rwxr-xr-xnixpkgs/maintainers/scripts/nix-diff.sh277
-rw-r--r--nixpkgs/maintainers/scripts/nix-generate-from-cpan.nix26
-rwxr-xr-xnixpkgs/maintainers/scripts/nix-generate-from-cpan.pl473
-rw-r--r--nixpkgs/maintainers/scripts/nixpkgs-lint.nix24
-rwxr-xr-xnixpkgs/maintainers/scripts/nixpkgs-lint.pl173
-rwxr-xr-xnixpkgs/maintainers/scripts/patchelf-hints.sh84
-rw-r--r--nixpkgs/maintainers/scripts/pluginupdate.py815
-rwxr-xr-xnixpkgs/maintainers/scripts/rebuild-amount.sh133
-rwxr-xr-xnixpkgs/maintainers/scripts/remove-old-aliases.py213
-rwxr-xr-xnixpkgs/maintainers/scripts/sha-to-sri.py228
-rwxr-xr-xnixpkgs/maintainers/scripts/update-channel-branches.sh112
-rw-r--r--nixpkgs/maintainers/scripts/update-dotnet-lockfiles.nix72
-rwxr-xr-xnixpkgs/maintainers/scripts/update-octave-packages468
-rw-r--r--nixpkgs/maintainers/scripts/update-octave-shell.nix12
-rwxr-xr-xnixpkgs/maintainers/scripts/update-python-libraries3
-rwxr-xr-xnixpkgs/maintainers/scripts/update-redirected-urls.sh12
-rwxr-xr-xnixpkgs/maintainers/scripts/update-ruby-packages16
-rwxr-xr-xnixpkgs/maintainers/scripts/update.nix227
-rw-r--r--nixpkgs/maintainers/scripts/update.py247
-rw-r--r--nixpkgs/maintainers/scripts/vanity-manual-equalities.txt7
-rwxr-xr-xnixpkgs/maintainers/scripts/vanity.sh122
72 files changed, 7853 insertions, 0 deletions
diff --git a/nixpkgs/maintainers/scripts/README.md b/nixpkgs/maintainers/scripts/README.md
new file mode 100644
index 000000000000..f8fc7aff955d
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/README.md
@@ -0,0 +1,62 @@
+# Maintainer scripts
+
+This folder contains various executable scripts for nixpkgs maintainers,
+and supporting data or nixlang files as needed.
+These scripts generally aren't a stable interface and may changed or be removed.
+
+What follows is a (very incomplete) overview of available scripts.
+
+
+## Metadata
+
+### `check-by-name.sh`
+
+An alias for `pkgs/test/nixpkgs-check-by-name/scripts/run-local.sh`, see [documentation](../../pkgs/test/nixpkgs-check-by-name/scripts/README.md).
+
+### `get-maintainer.sh`
+
+`get-maintainer.sh [selector] value` returns a JSON object describing
+a given nixpkgs maintainer, equivalent to `lib.maintainers.${x} // { handle = x; }`.
+
+This allows looking up a maintainer's attrset (including GitHub and Matrix
+handles, email address etc.) based on any of their handles, more correctly and
+robustly than text search through `maintainers-list.nix`.
+
+```
+❯ ./get-maintainer.sh nicoo
+{
+  "email": "nicoo@debian.org",
+  "github": "nbraud",
+  "githubId": 1155801,
+  "keys": [
+    {
+      "fingerprint": "E44E 9EA5 4B8E 256A FB73 49D3 EC9D 3708 72BC 7A8C"
+    }
+  ],
+  "name": "nicoo",
+  "handle": "nicoo"
+}
+
+❯ ./get-maintainer.sh name 'Silvan Mosberger'
+{
+  "email": "contact@infinisil.com",
+  "github": "infinisil",
+  "githubId": 20525370,
+  "keys": [
+    {
+      "fingerprint": "6C2B 55D4 4E04 8266 6B7D  DA1A 422E 9EDA E015 7170"
+    }
+  ],
+  "matrix": "@infinisil:matrix.org",
+  "name": "Silvan Mosberger",
+  "handle": "infinisil"
+}
+```
+
+The maintainer is designated by a `selector` which must be one of:
+- `handle` (default): the maintainer's attribute name in `lib.maintainers`;
+- `email`, `name`, `github`, `githubId`, `matrix`, `name`:
+  attributes of the maintainer's object, matched exactly;
+  see [`maintainer-list.nix`] for the fields' definition.
+
+[`maintainer-list.nix`]: ../maintainer-list.nix
diff --git a/nixpkgs/maintainers/scripts/all-tarballs.nix b/nixpkgs/maintainers/scripts/all-tarballs.nix
new file mode 100644
index 000000000000..83236e6fa91e
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/all-tarballs.nix
@@ -0,0 +1,16 @@
+/* Helper expression for copy-tarballs. This returns (nearly) all
+   tarballs used the free packages in Nixpkgs.
+
+   Typical usage:
+
+   $ copy-tarballs.pl --expr 'import <nixpkgs/maintainers/scripts/all-tarballs.nix>'
+*/
+
+import ../../pkgs/top-level/release.nix
+  { # Don't apply ‘hydraJob’ to jobs, because then we can't get to the
+    # dependency graph.
+    scrubJobs = false;
+    # No need to evaluate on i686.
+    supportedSystems = [ "x86_64-linux" ];
+    bootstrapConfigs = [];
+  }
diff --git a/nixpkgs/maintainers/scripts/bootstrap-files/README.md b/nixpkgs/maintainers/scripts/bootstrap-files/README.md
new file mode 100644
index 000000000000..ae385cbd6ce8
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/bootstrap-files/README.md
@@ -0,0 +1,85 @@
+# Bootstrap files
+
+Currently `nixpkgs` builds most of it's packages using bootstrap seed
+binaries (without the reliance on external inputs):
+
+- `bootstrap-tools`: an archive with the compiler toolchain and other
+  helper tools enough to build the rest of the `nixpkgs`.
+- initial binaries needed to unpack `bootstrap-tools.*`. On `linux`
+  it's just `busybox`, on `darwin` it's `sh`, `bzip2`, `mkdir` and
+  `cpio`. These binaries can be executed directly from the store.
+
+These are called "bootstrap files".
+
+Bootstrap files should always be fetched from hydra and uploaded to
+`tarballs.nixos.org` to guarantee that all the binaries were built from
+the code committed into `nixpkgs` repository.
+
+The uploads to `tarballs.nixos.org` are done by `@lovesegfault` today.
+
+This document describes the procedure of updating bootstrap files in
+`nixpkgs`.
+
+## How to request the bootstrap seed update
+
+To get the tarballs updated let's use an example `i686-unknown-linux-gnu`
+target:
+
+1. Create a local update:
+
+   ```
+   $ maintainers/scripts/bootstrap-files/refresh-tarballs.bash --commit --targets=i686-unknown-linux-gnu
+   ```
+
+2. Test the update locally. I'll build local `hello` derivation with
+   the result:
+
+   ```
+   $ nix-build -A hello --argstr system i686-linux
+   ```
+
+   To validate cross-targets `binfmt` `NixOS` helper can be useful.
+   For `riscv64-unknown-linux-gnu` the `/etc/nixox/configuraqtion.nix`
+   entry would be `boot.binfmt.emulatedSystems = [ "riscv64-linux" ]`.
+
+3. Propose the commit as a PR to update bootstrap tarballs, tag people
+   who can help you test the updated architecture and once reviewed tag
+  `@lovesegfault` to upload the tarballs.
+
+## Bootstrap files job definitions
+
+There are two types of bootstrap files:
+
+- natively built `stdenvBootstrapTools.build` hydra jobs in
+  [`nixpkgs:trunk`](https://hydra.nixos.org/jobset/nixpkgs/trunk#tabs-jobs)
+  jobset. Incomplete list of examples is:
+
+  * `aarch64-unknown-linux-musl.nix`
+  * `i686-unknown-linux-gnu.nix`
+
+  These are Tier 1 hydra platforms.
+
+- cross-built by `bootstrapTools.build` hydra jobs in
+  [`nixpkgs:cross-trunk`](https://hydra.nixos.org/jobset/nixpkgs/cross-trunk#tabs-jobs)
+  jobset. Incomplete list of examples is:
+
+  * `mips64el-unknown-linux-gnuabi64.nix`
+  * `mips64el-unknown-linux-gnuabin32.nix`
+  * `mipsel-unknown-linux-gnu.nix`
+  * `powerpc64le-unknown-linux-gnu.nix`
+  * `riscv64-unknown-linux-gnu.nix`
+
+  These are usually Tier 2 and lower targets.
+
+The `.build` job contains `/on-server/` subdirectory with binaries to
+be uploaded to `tarballs.nixos.org`.
+The files are uploaded to `tarballs.nixos.org` by writers to `S3` store.
+
+## TODOs
+
+- `pkgs/stdenv/darwin` file layout is slightly different from
+  `pkgs/stdenv/linux`. Once `linux` seed update becomes a routine we can
+  bring `darwin` in sync if it's feasible.
+- `darwin` definition of `.build` `on-server/` directory layout differs
+  and should be updated.
+
diff --git a/nixpkgs/maintainers/scripts/bootstrap-files/refresh-tarballs.bash b/nixpkgs/maintainers/scripts/bootstrap-files/refresh-tarballs.bash
new file mode 100755
index 000000000000..21c43ade27f1
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/bootstrap-files/refresh-tarballs.bash
@@ -0,0 +1,282 @@
+#!/usr/bin/env nix-shell
+#! nix-shell --pure
+#! nix-shell -i bash
+#! nix-shell -p curl cacert
+#! nix-shell -p git
+#! nix-shell -p nix
+#! nix-shell -p jq
+
+# How the refresher works:
+#
+# For a given list of <targets>:
+# 1. fetch latest successful '.build` job
+# 2. fetch oldest evaluation that contained that '.build', extract nixpkgs commit
+# 3. fetch all the `.build` artifacts from '$out/on-server/' directory
+# 4. calculate hashes and craft the commit message with the details on
+#    how to upload the result to 'tarballs.nixos.org'
+
+usage() {
+    cat >&2 <<EOF
+Usage:
+    $0 [ --commit ] --targets=<target>[,<target>,...]
+
+    The tool must be ran from the root directory of 'nixpkgs' repository.
+
+Synopsis:
+    'refresh-tarballs.bash' script fetches latest bootstrapFiles built
+    by hydra, registers them in 'nixpkgs' and provides commands to
+    upload seed files to 'tarballs.nixos.org'.
+
+    This is usually done in the following cases:
+
+    1. Single target fix: current bootstrap files for a single target
+       are problematic for some reason (target-specific bug). In this
+       case we can refresh just that target as:
+
+       \$ $0 --commit --targets=i686-unknown-linux-gnu
+
+    2. Routine refresh: all bootstrap files should be refreshed to avoid
+       debugging problems that only occur on very old binaries.
+
+       \$ $0 --commit --all-targets
+
+To get help on uploading refreshed binaries to 'tarballs.nixos.org'
+please have a look at <maintainers/scripts/bootstrap-files/README.md>.
+EOF
+    exit 1
+}
+
+# log helpers
+
+die() {
+    echo "ERROR: $*" >&2
+    exit 1
+}
+
+info() {
+    echo "INFO: $*" >&2
+}
+
+[[ ${#@} -eq 0 ]] && usage
+
+# known targets
+
+NATIVE_TARGETS=(
+    aarch64-unknown-linux-gnu
+    aarch64-unknown-linux-musl
+    i686-unknown-linux-gnu
+    x86_64-unknown-linux-gnu
+    x86_64-unknown-linux-musl
+
+    # TODO: add darwin here once a few prerequisites are satisfied:
+    #   - bootstrap-files are factored out into a separate file
+    #   - the build artifacts are factored out into an `on-server`
+    #     directory. Right onw if does not match `linux` layout.
+    #
+    #aarch64-apple-darwin
+    #x86_64-apple-darwin
+)
+
+is_native() {
+    local t target=$1
+    for t in "${NATIVE_TARGETS[@]}"; do
+        [[ $t == $target ]] && return 0
+    done
+    return 1
+}
+
+CROSS_TARGETS=(
+    armv5tel-unknown-linux-gnueabi
+    armv6l-unknown-linux-gnueabihf
+    armv6l-unknown-linux-musleabihf
+    armv7l-unknown-linux-gnueabihf
+    mips64el-unknown-linux-gnuabi64
+    mips64el-unknown-linux-gnuabin32
+    mipsel-unknown-linux-gnu
+    powerpc64le-unknown-linux-gnu
+    riscv64-unknown-linux-gnu
+)
+
+is_cross() {
+    local t target=$1
+    for t in "${CROSS_TARGETS[@]}"; do
+        [[ $t == $target ]] && return 0
+    done
+    return 1
+}
+
+# collect passed options
+
+targets=()
+commit=no
+
+for arg in "$@"; do
+    case "$arg" in
+        --all-targets)
+            targets+=(
+                ${CROSS_TARGETS[@]}
+                ${NATIVE_TARGETS[@]}
+            )
+            ;;
+        --targets=*)
+            # Convert "--targets=a,b,c" to targets=(a b c) bash array.
+            comma_targets=${arg#--targets=}
+            targets+=(${comma_targets//,/ })
+            ;;
+        --commit)
+            commit=yes
+            ;;
+        *)
+            usage
+            ;;
+    esac
+done
+
+for target in "${targets[@]}"; do
+    # Native and cross jobsets differ a bit. We'll have to pick the
+    # one based on target name:
+    if is_native $target; then
+        jobset=nixpkgs/trunk
+        job="stdenvBootstrapTools.${target}.build"
+    elif is_cross $target; then
+        jobset=nixpkgs/cross-trunk
+        job="bootstrapTools.${target}.build"
+    else
+        die "'$target' is not present in either of 'NATIVE_TARGETS' or 'CROSS_TARGETS'. Please add one."
+    fi
+
+    # 'nixpkgs' prefix where we will write new tarball hashes
+    case "$target" in
+        *linux*) nixpkgs_prefix="pkgs/stdenv/linux" ;;
+        *darwin*) nixpkgs_prefix="pkgs/stdenv/darwin" ;;
+        *) die "don't know where to put '$target'" ;;
+    esac
+
+    # We enforce s3 prefix for all targets here. This slightly differs
+    # from manual uploads targets where names were chosen inconsistently.
+    s3_prefix="stdenv/$target"
+
+    # resolve 'latest' build to the build 'id', construct the link.
+    latest_build_uri="https://hydra.nixos.org/job/$jobset/$job/latest"
+    latest_build="$target.latest-build"
+    info "Fetching latest successful build from '${latest_build_uri}'"
+    curl -s -H "Content-Type: application/json" -L "$latest_build_uri" > "$latest_build"
+    [[ $? -ne 0 ]] && die "Failed to fetch latest successful build"
+    latest_build_id=$(jq '.id' < "$latest_build")
+    [[ $? -ne 0 ]] && die "Did not find 'id' in latest build"
+    build_uri="https://hydra.nixos.org/build/${latest_build_id}"
+
+    # We pick oldest jobset evaluation and extract the 'nicpkgs' commit.
+    #
+    # We use oldest instead of latest to make the result more stable
+    # across unrelated 'nixpkgs' updates. Ideally two subsequent runs of
+    # this refresher should produce the same output (provided there are
+    # no bootstrapTools updates committed between the two runs).
+    oldest_eval_id=$(jq '.jobsetevals|min' < "$latest_build")
+    [[ $? -ne 0 ]] && die "Did not find 'jobsetevals' in latest build"
+    eval_uri="https://hydra.nixos.org/eval/${oldest_eval_id}"
+    eval_meta="$target.eval-meta"
+    info "Fetching oldest eval details from '${eval_uri}' (can take a minute)"
+    curl -s -H "Content-Type: application/json"  -L "${eval_uri}" > "$eval_meta"
+    [[ $? -ne 0 ]] && die "Failed to fetch eval metadata"
+    nixpkgs_revision=$(jq --raw-output ".jobsetevalinputs.nixpkgs.revision" < "$eval_meta")
+    [[ $? -ne 0 ]] && die "Failed to fetch revision"
+
+    # Extract the build paths out of the build metadata
+    drvpath=$(jq --raw-output '.drvpath' < "${latest_build}")
+    [[ $? -ne 0 ]] && die "Did not find 'drvpath' in latest build"
+    outpath=$(jq --raw-output '.buildoutputs.out.path' < "${latest_build}")
+    [[ $? -ne 0 ]] && die "Did not find 'buildoutputs' in latest build"
+    build_timestamp=$(jq --raw-output '.timestamp' < "${latest_build}")
+    [[ $? -ne 0 ]] && die "Did not find 'timestamp' in latest build"
+    build_time=$(TZ=UTC LANG=C date --date="@${build_timestamp}" --rfc-email)
+    [[ $? -ne 0 ]] && die "Failed to format timestamp"
+
+    info "Fetching bootstrap tools to calculate hashes from '${outpath}'"
+    nix-store --realize "$outpath"
+    [[ $? -ne 0 ]] && die "Failed to fetch '${outpath}' from hydra"
+
+    fnames=()
+
+    target_file="${nixpkgs_prefix}/bootstrap-files/${target}.nix"
+    info "Writing '${target_file}'"
+    {
+        # header
+        cat <<EOF
+# Autogenerated by maintainers/scripts/bootstrap-files/refresh-tarballs.bash as:
+# $ ./refresh-tarballs.bash --targets=${target}
+#
+# Metadata:
+# - nixpkgs revision: ${nixpkgs_revision}
+# - hydra build: ${latest_build_uri}
+# - resolved hydra build: ${build_uri}
+# - instantiated derivation: ${drvpath}
+# - output directory: ${outpath}
+# - build time: ${build_time}
+{
+EOF
+      for p in "${outpath}/on-server"/*; do
+          fname=$(basename "$p")
+          fnames+=("$fname")
+          case "$fname" in
+              bootstrap-tools.tar.xz) attr=bootstrapTools ;;
+              busybox) attr=$fname ;;
+              *) die "Don't know how to map '$fname' to attribute name. Please update me."
+          esac
+
+          executable_arg=
+          executable_nix=
+          if [[ -x "$p" ]]; then
+              executable_arg="--executable"
+              executable_nix="    executable = true;"
+          fi
+          sha256=$(nix-prefetch-url $executable_arg --name "$fname" "file://$p")
+          [[ $? -ne 0 ]] && die "Failed to get the hash for '$p'"
+          sri=$(nix-hash --to-sri "sha256:$sha256")
+          [[ $? -ne 0 ]] && die "Failed to convert '$sha256' hash to an SRI form"
+
+          # individual file entries
+          cat <<EOF
+  $attr = import <nix/fetchurl.nix> {
+    url = "http://tarballs.nixos.org/${s3_prefix}/${nixpkgs_revision}/$fname";
+    hash = "${sri}";$(printf "\n%s" "${executable_nix}")
+  };
+EOF
+      done
+      # footer
+      cat <<EOF
+}
+EOF
+    } > "${target_file}"
+
+        target_file_commit_msg=${target}.commit_message
+        cat > "$target_file_commit_msg" <<EOF
+${nixpkgs_prefix}: update ${target} bootstrap-files
+
+sha256sum of files to be uploaded:
+
+$(
+echo "$ sha256sum ${outpath}/on-server/*"
+sha256sum ${outpath}/on-server/*
+)
+
+Suggested commands to upload files to 'tarballs.nixos.org':
+
+    $ nix-store --realize ${outpath}
+    $ aws s3 cp --recursive --acl public-read ${outpath}/on-server/ s3://nixpkgs-tarballs/${s3_prefix}/${nixpkgs_revision}
+    $ aws s3 cp --recursive s3://nixpkgs-tarballs/${s3_prefix}/${nixpkgs_revision} ./
+    $ sha256sum ${fnames[*]}
+    $ sha256sum ${outpath}/on-server/*
+EOF
+
+    cat "$target_file_commit_msg"
+    if [[ $commit == yes ]]; then
+        git commit "${target_file}" -F "$target_file_commit_msg"
+    else
+        info "DRY RUN: git commit ${target_file} -F $target_file_commit_msg"
+    fi
+    rm -- "$target_file_commit_msg"
+
+    # delete temp files
+    rm -- "$latest_build" "$eval_meta"
+done
diff --git a/nixpkgs/maintainers/scripts/build.nix b/nixpkgs/maintainers/scripts/build.nix
new file mode 100644
index 000000000000..ca401700b4a6
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/build.nix
@@ -0,0 +1,55 @@
+{ maintainer
+, localSystem ? { system = args.system or builtins.currentSystem; }
+, system ? localSystem.system
+, crossSystem ? localSystem
+, ...
+}@args:
+
+# based on update.nix
+# nix-build build.nix --argstr maintainer <yourname>
+
+# to build for aarch64-linux using boot.binfmt.emulatedSystems:
+# nix-build build.nix --argstr maintainer <yourname> --argstr system aarch64-linux
+
+let
+  pkgs = import ./../../default.nix (removeAttrs args [ "maintainer" ]);
+  maintainer_ = pkgs.lib.maintainers.${maintainer};
+  packagesWith = cond: return: set:
+    (pkgs.lib.flatten
+      (pkgs.lib.mapAttrsToList
+        (name: pkg:
+          let
+            result = builtins.tryEval
+              (
+                if pkgs.lib.isDerivation pkg && cond name pkg then
+                  # Skip packages whose closure fails on evaluation.
+                  # This happens for pkgs like `python27Packages.djangoql`
+                  # that have disabled Python pkgs as dependencies.
+                  builtins.seq pkg.outPath
+                    [ (return name pkg) ]
+                else if pkg.recurseForDerivations or false || pkg.recurseForRelease or false
+                then packagesWith cond return pkg
+                else [ ]
+              );
+          in
+          if result.success then result.value
+          else [ ]
+        )
+        set
+      )
+    );
+in
+packagesWith
+  (name: pkg:
+    (
+      if builtins.hasAttr "meta" pkg && builtins.hasAttr "maintainers" pkg.meta
+      then (
+        if builtins.isList pkg.meta.maintainers
+        then builtins.elem maintainer_ pkg.meta.maintainers
+        else maintainer_ == pkg.meta.maintainers
+      )
+      else false
+    )
+  )
+  (name: pkg: pkg)
+  pkgs
diff --git a/nixpkgs/maintainers/scripts/check-by-name.sh b/nixpkgs/maintainers/scripts/check-by-name.sh
new file mode 120000
index 000000000000..545dbedf0a31
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/check-by-name.sh
@@ -0,0 +1 @@
+../../pkgs/test/nixpkgs-check-by-name/scripts/run-local.sh
\ No newline at end of file
diff --git a/nixpkgs/maintainers/scripts/check-hydra-by-maintainer.nix b/nixpkgs/maintainers/scripts/check-hydra-by-maintainer.nix
new file mode 100644
index 000000000000..c40729a3974e
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/check-hydra-by-maintainer.nix
@@ -0,0 +1,71 @@
+{ maintainer }:
+let
+  pkgs = import ./../../default.nix {
+    config.allowAliases = false;
+  };
+  inherit (pkgs) lib;
+  maintainer_ = pkgs.lib.maintainers.${maintainer};
+  packagesWith = cond: return: prefix: set:
+    (lib.flatten
+      (lib.mapAttrsToList
+        (name: pkg:
+          let
+            result = builtins.tryEval
+              (
+                if lib.isDerivation pkg && cond name pkg then
+                # Skip packages whose closure fails on evaluation.
+                # This happens for pkgs like `python27Packages.djangoql`
+                # that have disabled Python pkgs as dependencies.
+                  builtins.seq pkg.outPath
+                    [ (return "${prefix}${name}") ]
+                else if pkg.recurseForDerivations or false || pkg.recurseForRelease or false
+                # then packagesWith cond return pkg
+                then packagesWith cond return "${name}." pkg
+                else [ ]
+              );
+          in
+          if result.success then result.value
+          else [ ]
+        )
+        set
+      )
+    );
+
+  packages = packagesWith
+    (name: pkg:
+      (
+        if builtins.hasAttr "meta" pkg && builtins.hasAttr "maintainers" pkg.meta
+        then
+          (
+            if builtins.isList pkg.meta.maintainers
+            then builtins.elem maintainer_ pkg.meta.maintainers
+            else maintainer_ == pkg.meta.maintainers
+          )
+        else false
+      )
+    )
+    (name: name)
+    ""
+    pkgs;
+
+in
+pkgs.stdenv.mkDerivation {
+  name = "nixpkgs-update-script";
+  buildInputs = [ pkgs.hydra-check ];
+  buildCommand = ''
+    echo ""
+    echo "----------------------------------------------------------------"
+    echo ""
+    echo "nix-shell maintainers/scripts/check-hydra-by-maintainer.nix --argstr maintainer SuperSandro2000"
+    echo ""
+    echo "----------------------------------------------------------------"
+    exit 1
+  '';
+  shellHook = ''
+    unset shellHook # do not contaminate nested shells
+    echo "Please stand by"
+    echo nix-shell -p hydra-check --run "hydra-check ${builtins.concatStringsSep " " packages}"
+    nix-shell -p hydra-check --run "hydra-check ${builtins.concatStringsSep " " packages}"
+    exit $?
+  '';
+}
diff --git a/nixpkgs/maintainers/scripts/check-maintainer-github-handles.sh b/nixpkgs/maintainers/scripts/check-maintainer-github-handles.sh
new file mode 100755
index 000000000000..a5555ca9e909
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/check-maintainer-github-handles.sh
@@ -0,0 +1,66 @@
+#!/usr/bin/env nix-shell
+#!nix-shell -i bash -p jq parallel
+
+# Example how to work with the `lib.maintainers` attrset.
+# Can be used to check whether all user handles are still valid.
+
+set -o errexit -o noclobber -o nounset -o pipefail
+shopt -s failglob inherit_errexit
+
+function checkCommits {
+    local ret status tmp user
+    user="$1"
+    tmp=$(mktemp)
+    curl --silent -w "%{http_code}" \
+         "https://github.com/NixOS/nixpkgs/commits?author=$user" \
+         > "$tmp"
+    # the last line of tmp contains the http status
+    status=$(tail -n1 "$tmp")
+    ret=
+    case $status in
+        200) if <"$tmp" grep -i "no commits found" > /dev/null; then
+                 ret=1
+             else
+                 ret=0
+             fi
+             ;;
+        # because of github’s hard request limits, this can take some time
+        429) sleep 2
+             printf "."
+             checkCommits "$user"
+             ret=$?
+             ;;
+        *)   printf "BAD STATUS: $(tail -n1 "$tmp") for %s\n" "$user"; ret=1
+             ret=1
+             ;;
+    esac
+    rm "$tmp"
+    return $ret
+}
+export -f checkCommits
+
+function checkUser {
+    local user="$1"
+    local status=
+    status="$(curl --silent --head "https://github.com/${user}" | grep Status)"
+    # checks whether a user handle can be found on github
+    if [[ "$status" =~ 404 ]]; then
+        printf "%s\t\t\t\t%s\n" "$status" "$user"
+    # checks whether the user handle has any nixpkgs commits
+    elif checkCommits "$user"; then
+        printf "OK!\t\t\t\t%s\n" "$user"
+    else
+        printf "No Commits!\t\t\t%s\n" "$user"
+    fi
+}
+export -f checkUser
+
+# output the maintainers set as json
+# and filter out the github username of each maintainer (if it exists)
+# then check some at the same time
+nix-instantiate -A lib.maintainers --eval --strict --json \
+    | jq -r '.[]|.github|select(.)' \
+    | parallel -j5 checkUser
+
+# To check some arbitrary users:
+# parallel -j100 checkUser ::: "eelco" "profpatsch" "Profpatsch" "a"
diff --git a/nixpkgs/maintainers/scripts/check-maintainers-sorted.nix b/nixpkgs/maintainers/scripts/check-maintainers-sorted.nix
new file mode 100644
index 000000000000..3de4e07550c4
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/check-maintainers-sorted.nix
@@ -0,0 +1,57 @@
+let
+  lib = import ../../lib;
+  inherit (lib)
+    add attrNames elemAt foldl' genList length replaceStrings sort toLower trace;
+
+  maintainers = import ../maintainer-list.nix;
+  simplify = replaceStrings [ "-" "_" ] [ "" "" ];
+  compare = a: b: simplify (toLower a) < simplify (toLower b);
+  namesSorted =
+    sort
+      (a: b: a.key < b.key)
+      (map
+        (n: let pos = builtins.unsafeGetAttrPos n maintainers;
+            in assert pos == null -> throw "maintainers entry ${n} is malformed";
+              { name = n; line = pos.line; key = toLower (simplify n); })
+        (attrNames maintainers));
+  before = { name, line, key }:
+    foldl'
+      (acc: n: if n.key < key && (acc == null || n.key > acc.key) then n else acc)
+      null
+      namesSorted;
+  errors = foldl' add 0
+    (map
+      (i: let a = elemAt namesSorted i;
+              b = elemAt namesSorted (i + 1);
+              lim = let t = before a; in if t == null then "the initial {" else t.name;
+          in if a.line >= b.line
+             then trace
+               ("maintainer ${a.name} (line ${toString a.line}) should be listed "
+                + "after ${lim}, not after ${b.name} (line ${toString b.line})")
+               1
+             else 0)
+      (genList (i: i) (length namesSorted - 1)));
+in
+assert errors == 0; "all good!"
+
+# generate edit commands to sort the list.
+# may everything following the last current entry (closing } ff) in the wrong place
+# with lib;
+# concatStringsSep
+#   "\n"
+#   (let first = foldl' (acc: n: if n.line < acc then n.line else acc) 999999999 namesSorted;
+#        commands = map
+#          (i: let e = elemAt namesSorted i;
+#                  begin = foldl'
+#                    (acc: n: if n.line < e.line && n.line > acc then n.line else acc)
+#                    1
+#                    namesSorted;
+#                  end =
+#                    foldl' (acc: n: if n.line > e.line && n.line < acc then n.line else acc)
+#                      999999999
+#                      namesSorted;
+#              in "${toString e.line},${toString (end - 1)} p")
+#          (genList (i: i) (length namesSorted));
+#    in map
+#      (c: "sed -ne '${c}' maintainers/maintainer-list.nix")
+#      ([ "1,${toString (first - 1)} p" ] ++ commands))
diff --git a/nixpkgs/maintainers/scripts/convert-to-import-cargo-lock.sh b/nixpkgs/maintainers/scripts/convert-to-import-cargo-lock.sh
new file mode 100755
index 000000000000..b38825d4d3e0
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/convert-to-import-cargo-lock.sh
@@ -0,0 +1,4 @@
+#!/usr/bin/env nix-shell
+#!nix-shell -I nixpkgs=. -i bash -p "import ./maintainers/scripts/convert-to-import-cargo-lock" nix-prefetch-git
+
+convert-to-import-cargo-lock "$@"
diff --git a/nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/.gitignore b/nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/.gitignore
new file mode 100644
index 000000000000..ea8c4bf7f35f
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/.gitignore
@@ -0,0 +1 @@
+/target
diff --git a/nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/Cargo.lock b/nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/Cargo.lock
new file mode 100644
index 000000000000..b69fbc59ae84
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/Cargo.lock
@@ -0,0 +1,106 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "anyhow"
+version = "1.0.69"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "224afbd727c3d6e4b90103ece64b8d1b67fbb1973b1046c2281eed3f3803f800"
+
+[[package]]
+name = "basic-toml"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e819b667739967cd44d308b8c7b71305d8bb0729ac44a248aa08f33d01950b4"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "convert-to-import-cargo-lock"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "basic-toml",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.51"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5d727cae5b39d21da60fa540906919ad737832fe0b1c165da3a34d6548c849d6"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "ryu"
+version = "1.0.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde"
+
+[[package]]
+name = "serde"
+version = "1.0.152"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.152"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.93"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cad406b69c91885b5107daf2c29572f6c8cdb3c66826821e286c533490c0bc76"
+dependencies = [
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "syn"
+version = "1.0.107"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc"
diff --git a/nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/Cargo.toml b/nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/Cargo.toml
new file mode 100644
index 000000000000..41f5729f01a2
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "convert-to-import-cargo-lock"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+anyhow = { version = "1.0.69" }
+basic-toml = "0.1.1"
+serde = { version = "1.0.152", features = ["derive"] }
+serde_json = "1.0.93"
diff --git a/nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/default.nix b/nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/default.nix
new file mode 100644
index 000000000000..f4c1f553d64f
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/default.nix
@@ -0,0 +1,16 @@
+with import ../../../. { };
+
+rustPlatform.buildRustPackage {
+  name = "convert-to-import-cargo-lock";
+
+  src = lib.cleanSourceWith {
+    src = ./.;
+    filter = name: type:
+      let
+        name' = builtins.baseNameOf name;
+      in
+      name' != "default.nix" && name' != "target";
+  };
+
+  cargoLock.lockFile = ./Cargo.lock;
+}
diff --git a/nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/shell.nix b/nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/shell.nix
new file mode 100644
index 000000000000..8e913fdcd8be
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/shell.nix
@@ -0,0 +1,5 @@
+with import ../../../. { };
+
+mkShell {
+  packages = [ rustc cargo clippy rustfmt ] ++ lib.optional stdenv.isDarwin libiconv;
+}
diff --git a/nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/src/main.rs b/nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/src/main.rs
new file mode 100644
index 000000000000..6eb6768d14e9
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/convert-to-import-cargo-lock/src/main.rs
@@ -0,0 +1,241 @@
+#![warn(clippy::pedantic)]
+#![allow(clippy::too_many_lines)]
+
+use anyhow::anyhow;
+use serde::Deserialize;
+use std::{collections::HashMap, env, fs, path::PathBuf, process::Command};
+
+#[derive(Deserialize)]
+struct CargoLock<'a> {
+    #[serde(rename = "package", borrow)]
+    packages: Vec<Package<'a>>,
+    metadata: Option<HashMap<&'a str, &'a str>>,
+}
+
+#[derive(Deserialize)]
+struct Package<'a> {
+    name: &'a str,
+    version: &'a str,
+    source: Option<&'a str>,
+    checksum: Option<&'a str>,
+}
+
+#[derive(Deserialize)]
+struct PrefetchOutput {
+    sha256: String,
+}
+
+fn main() -> anyhow::Result<()> {
+    let mut hashes = HashMap::new();
+
+    let attr_count = env::args().len() - 1;
+
+    for (i, attr) in env::args().skip(1).enumerate() {
+        println!("converting {attr} ({}/{attr_count})", i + 1);
+
+        convert(&attr, &mut hashes)?;
+    }
+
+    Ok(())
+}
+
+fn convert(attr: &str, hashes: &mut HashMap<String, String>) -> anyhow::Result<()> {
+    let package_path = nix_eval(format!("{attr}.meta.position"))?
+        .and_then(|p| p.split_once(':').map(|(f, _)| PathBuf::from(f)));
+
+    if package_path.is_none() {
+        eprintln!("can't automatically convert {attr}: doesn't exist");
+        return Ok(());
+    }
+
+    let package_path = package_path.unwrap();
+
+    if package_path.with_file_name("Cargo.lock").exists() {
+        eprintln!("skipping {attr}: already has a vendored Cargo.lock");
+        return Ok(());
+    }
+
+    let mut src = PathBuf::from(
+        String::from_utf8(
+            Command::new("nix-build")
+                .arg("-A")
+                .arg(format!("{attr}.src"))
+                .output()?
+                .stdout,
+        )?
+        .trim(),
+    );
+
+    if !src.exists() {
+        eprintln!("can't automatically convert {attr}: src doesn't exist (bad attr?)");
+        return Ok(());
+    } else if !src.metadata()?.is_dir() {
+        eprintln!("can't automatically convert {attr}: src isn't a directory");
+        return Ok(());
+    }
+
+    if let Some(mut source_root) = nix_eval(format!("{attr}.sourceRoot"))?.map(PathBuf::from) {
+        source_root = source_root.components().skip(1).collect();
+        src.push(source_root);
+    }
+
+    let cargo_lock_path = src.join("Cargo.lock");
+
+    if !cargo_lock_path.exists() {
+        eprintln!("can't automatically convert {attr}: src doesn't contain Cargo.lock");
+        return Ok(());
+    }
+
+    let cargo_lock_content = fs::read_to_string(cargo_lock_path)?;
+
+    let cargo_lock: CargoLock = basic_toml::from_str(&cargo_lock_content)?;
+
+    let mut git_dependencies = Vec::new();
+
+    for package in cargo_lock.packages.iter().filter(|p| {
+        p.source.is_some()
+            && p.checksum
+                .or_else(|| {
+                    cargo_lock
+                        .metadata
+                        .as_ref()?
+                        .get(
+                            format!("checksum {} {} ({})", p.name, p.version, p.source.unwrap())
+                                .as_str(),
+                        )
+                        .copied()
+                })
+                .is_none()
+    }) {
+        let (typ, original_url) = package
+            .source
+            .unwrap()
+            .split_once('+')
+            .expect("dependency should have well-formed source url");
+
+        if let Some(hash) = hashes.get(original_url) {
+            continue;
+        }
+
+        assert_eq!(
+            typ, "git",
+            "packages without checksums should be git dependencies"
+        );
+
+        let (mut url, rev) = original_url
+            .split_once('#')
+            .expect("git dependency should have commit");
+
+        // TODO: improve
+        if let Some((u, _)) = url.split_once('?') {
+            url = u;
+        }
+
+        let prefetch_output: PrefetchOutput = serde_json::from_slice(
+            &Command::new("nix-prefetch-git")
+                .args(["--url", url, "--rev", rev, "--quiet", "--fetch-submodules"])
+                .output()?
+                .stdout,
+        )?;
+
+        let output_hash = String::from_utf8(
+            Command::new("nix")
+                .args([
+                    "--extra-experimental-features",
+                    "nix-command",
+                    "hash",
+                    "to-sri",
+                    "--type",
+                    "sha256",
+                    &prefetch_output.sha256,
+                ])
+                .output()?
+                .stdout,
+        )?;
+
+        let hash = output_hash.trim().to_string();
+
+        git_dependencies.push((
+            format!("{}-{}", package.name, package.version),
+            output_hash.trim().to_string().clone(),
+        ));
+
+        hashes.insert(original_url.to_string(), hash);
+    }
+
+    fs::write(
+        package_path.with_file_name("Cargo.lock"),
+        cargo_lock_content,
+    )?;
+
+    let mut package_lines: Vec<_> = fs::read_to_string(&package_path)?
+        .lines()
+        .map(String::from)
+        .collect();
+
+    let (cargo_deps_line_index, cargo_deps_line) = package_lines
+        .iter_mut()
+        .enumerate()
+        .find(|(_, l)| {
+            l.trim_start().starts_with("cargoHash") || l.trim_start().starts_with("cargoSha256")
+        })
+        .expect("package should contain cargoHash/cargoSha256");
+
+    let spaces = " ".repeat(cargo_deps_line.len() - cargo_deps_line.trim_start().len());
+
+    if git_dependencies.is_empty() {
+        *cargo_deps_line = format!("{spaces}cargoLock.lockFile = ./Cargo.lock;");
+    } else {
+        *cargo_deps_line = format!("{spaces}cargoLock = {{");
+
+        let mut index_iter = cargo_deps_line_index + 1..;
+
+        package_lines.insert(
+            index_iter.next().unwrap(),
+            format!("{spaces}  lockFile = ./Cargo.lock;"),
+        );
+
+        package_lines.insert(
+            index_iter.next().unwrap(),
+            format!("{spaces}  outputHashes = {{"),
+        );
+
+        for ((dep, hash), index) in git_dependencies.drain(..).zip(&mut index_iter) {
+            package_lines.insert(index, format!("{spaces}    {dep:?} = {hash:?};"));
+        }
+
+        package_lines.insert(index_iter.next().unwrap(), format!("{spaces}  }};"));
+        package_lines.insert(index_iter.next().unwrap(), format!("{spaces}}};"));
+    }
+
+    if package_lines.last().map(String::as_str) != Some("") {
+        package_lines.push(String::new());
+    }
+
+    fs::write(package_path, package_lines.join("\n"))?;
+
+    Ok(())
+}
+
+fn nix_eval(attr: impl AsRef<str>) -> anyhow::Result<Option<String>> {
+    let output = String::from_utf8(
+        Command::new("nix-instantiate")
+            .args(["--eval", "-A", attr.as_ref()])
+            .output()?
+            .stdout,
+    )?;
+
+    let trimmed = output.trim();
+
+    if trimmed.is_empty() || trimmed == "null" {
+        Ok(None)
+    } else {
+        Ok(Some(
+            trimmed
+                .strip_prefix('"')
+                .and_then(|p| p.strip_suffix('"'))
+                .ok_or_else(|| anyhow!("couldn't parse nix-instantiate output: {output:?}"))?
+                .to_string(),
+        ))
+    }
+}
diff --git a/nixpkgs/maintainers/scripts/copy-tarballs.pl b/nixpkgs/maintainers/scripts/copy-tarballs.pl
new file mode 100755
index 000000000000..b17cd82f4d1c
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/copy-tarballs.pl
@@ -0,0 +1,252 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i perl -p perl perlPackages.NetAmazonS3 perlPackages.FileSlurp perlPackages.JSON perlPackages.LWPProtocolHttps nixUnstable nixUnstable.perl-bindings
+
+# This command uploads tarballs to tarballs.nixos.org, the
+# content-addressed cache used by fetchurl as a fallback for when
+# upstream tarballs disappear or change. Usage:
+#
+# 1) To upload one or more files:
+#
+#    $ copy-tarballs.pl --file /path/to/tarball.tar.gz
+#
+# 2) To upload all files obtained via calls to fetchurl in a Nix derivation:
+#
+#    $ copy-tarballs.pl --expr '(import <nixpkgs> {}).hello'
+
+use strict;
+use warnings;
+use File::Basename;
+use File::Path;
+use File::Slurp;
+use JSON;
+use Net::Amazon::S3;
+use Nix::Store;
+
+isValidPath("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-foo"); # FIXME: forces Nix::Store initialisation
+
+sub usage {
+    die "Syntax: $0 [--dry-run] [--exclude REGEXP] [--expr EXPR | --file FILES...]\n";
+}
+
+my $dryRun = 0;
+my $expr;
+my @fileNames;
+my $exclude;
+
+while (@ARGV) {
+    my $flag = shift @ARGV;
+
+    if ($flag eq "--expr") {
+        $expr = shift @ARGV or die "--expr requires an argument";
+    } elsif ($flag eq "--file") {
+        @fileNames = @ARGV;
+        last;
+    } elsif ($flag eq "--dry-run") {
+        $dryRun = 1;
+    } elsif ($flag eq "--exclude") {
+        $exclude = shift @ARGV or die "--exclude requires an argument";
+    } else {
+        usage();
+    }
+}
+
+my $bucket;
+
+if (not defined $ENV{DEBUG}) {
+    # S3 setup.
+    my $aws_access_key_id = $ENV{'AWS_ACCESS_KEY_ID'} or die "AWS_ACCESS_KEY_ID not set\n";
+    my $aws_secret_access_key = $ENV{'AWS_SECRET_ACCESS_KEY'} or die "AWS_SECRET_ACCESS_KEY not set\n";
+
+    my $s3 = Net::Amazon::S3->new(
+        { aws_access_key_id     => $aws_access_key_id,
+          aws_secret_access_key => $aws_secret_access_key,
+          retry                 => 1,
+          host                  => "s3-eu-west-1.amazonaws.com",
+        });
+
+    $bucket = $s3->bucket("nixpkgs-tarballs") or die;
+}
+
+my $doWrite = 0;
+my $cacheFile = ($ENV{"HOME"} or die "\$HOME is not set") . "/.cache/nix/copy-tarballs";
+my %cache;
+$cache{$_} = 1 foreach read_file($cacheFile, err_mode => 'quiet', chomp => 1);
+$doWrite = 1;
+
+END() {
+    File::Path::mkpath(dirname($cacheFile), 0, 0755);
+    write_file($cacheFile, map { "$_\n" } keys %cache) if $doWrite;
+}
+
+sub alreadyMirrored {
+    my ($algo, $hash) = @_;
+    my $key = "$algo/$hash";
+    return 1 if defined $cache{$key};
+    my $res = defined $bucket->get_key($key);
+    $cache{$key} = 1 if $res;
+    return $res;
+}
+
+sub uploadFile {
+    my ($fn, $name) = @_;
+
+    my $md5_16 = hashFile("md5", 0, $fn) or die;
+    my $sha1_16 = hashFile("sha1", 0, $fn) or die;
+    my $sha256_32 = hashFile("sha256", 1, $fn) or die;
+    my $sha256_16 = hashFile("sha256", 0, $fn) or die;
+    my $sha512_32 = hashFile("sha512", 1, $fn) or die;
+    my $sha512_16 = hashFile("sha512", 0, $fn) or die;
+
+    my $mainKey = "sha512/$sha512_16";
+
+    # Create redirects from the other hash types.
+    sub redirect {
+        my ($name, $dest) = @_;
+        #print STDERR "linking $name to $dest...\n";
+        $bucket->add_key($name, "", {
+            'x-amz-website-redirect-location' => "/" . $dest,
+            'x-amz-acl' => "public-read"
+        })
+            or die "failed to create redirect from $name to $dest\n";
+        $cache{$name} = 1;
+    }
+    redirect "md5/$md5_16", $mainKey;
+    redirect "sha1/$sha1_16", $mainKey;
+    redirect "sha256/$sha256_32", $mainKey;
+    redirect "sha256/$sha256_16", $mainKey;
+    redirect "sha512/$sha512_32", $mainKey;
+
+    # Upload the file as sha512/<hash-in-base-16>.
+    print STDERR "uploading $fn to $mainKey...\n";
+    $bucket->add_key_filename($mainKey, $fn, {
+        'x-amz-meta-original-name' => $name,
+        'x-amz-acl' => "public-read"
+    })
+        or die "failed to upload $fn to $mainKey\n";
+    $cache{$mainKey} = 1;
+}
+
+if (scalar @fileNames) {
+    my $res = 0;
+    foreach my $fn (@fileNames) {
+        eval {
+            if (alreadyMirrored("sha512", hashFile("sha512", 0, $fn))) {
+                print STDERR "$fn is already mirrored\n";
+            } else {
+                uploadFile($fn, basename $fn);
+            }
+        };
+        if ($@) {
+            warn "$@";
+            $res = 1;
+        }
+    }
+    exit $res;
+}
+
+elsif (defined $expr) {
+
+    # Evaluate find-tarballs.nix.
+    my $pid = open(JSON, "-|", "nix-instantiate", "--eval", "--json", "--strict",
+                   "<nixpkgs/maintainers/scripts/find-tarballs.nix>",
+                   "--arg", "expr", $expr);
+    my $stdout = <JSON>;
+    waitpid($pid, 0);
+    die "$0: evaluation failed\n" if $?;
+    close JSON;
+
+    my $fetches = decode_json($stdout);
+
+    print STDERR "evaluation returned ", scalar(@{$fetches}), " tarballs\n";
+
+    # Check every fetchurl call discovered by find-tarballs.nix.
+    my $mirrored = 0;
+    my $have = 0;
+    foreach my $fetch (sort { $a->{urls}->[0] cmp $b->{urls}->[0] } @{$fetches}) {
+        my $urls = $fetch->{urls};
+        my $algo = $fetch->{type};
+        my $hash = $fetch->{hash};
+        my $name = $fetch->{name};
+        my $isPatch = $fetch->{isPatch};
+
+        if ($isPatch) {
+            print STDERR "skipping $urls->[0] (support for patches is missing)\n";
+            next;
+        }
+
+        if ($hash =~ /^([a-z0-9]+)-([A-Za-z0-9+\/=]+)$/) {
+            $algo = $1;
+            $hash = `nix hash to-base16 $hash` or die;
+            chomp $hash;
+        }
+
+        next unless $algo =~ /^[a-z0-9]+$/;
+
+        # Convert non-SRI base-64 to base-16.
+        if ($hash =~ /^[A-Za-z0-9+\/=]+$/) {
+            $hash = `nix hash to-base16 --type '$algo' $hash` or die;
+            chomp $hash;
+        }
+
+        my $storePath = makeFixedOutputPath(0, $algo, $hash, $name);
+
+        for my $url (@$urls) {
+            if (defined $ENV{DEBUG}) {
+                print "$url $algo $hash\n";
+                next;
+            }
+
+            if ($url !~ /^http:/ && $url !~ /^https:/ && $url !~ /^ftp:/ && $url !~ /^mirror:/) {
+                print STDERR "skipping $url (unsupported scheme)\n";
+                next;
+            }
+
+            next if defined $exclude && $url =~ /$exclude/;
+
+            if (alreadyMirrored($algo, $hash)) {
+                $have++;
+                last;
+            }
+
+            print STDERR "mirroring $url ($storePath, $algo, $hash)...\n";
+
+            if ($dryRun) {
+                $mirrored++;
+                last;
+            }
+
+            # Substitute the output.
+            if (!isValidPath($storePath)) {
+                system("nix-store", "-r", $storePath);
+            }
+
+            # Otherwise download the file using nix-prefetch-url.
+            if (!isValidPath($storePath)) {
+                $ENV{QUIET} = 1;
+                $ENV{PRINT_PATH} = 1;
+                my $fh;
+                my $pid = open($fh, "-|", "nix-prefetch-url", "--type", $algo, $url, $hash) or die;
+                waitpid($pid, 0) or die;
+                if ($? != 0) {
+                    print STDERR "failed to fetch $url: $?\n";
+                    next;
+                }
+                <$fh>; my $storePath2 = <$fh>; chomp $storePath2;
+                if ($storePath ne $storePath2) {
+                    warn "strange: $storePath != $storePath2\n";
+                    next;
+                }
+            }
+
+            uploadFile($storePath, $url);
+            $mirrored++;
+            last;
+        }
+    }
+
+    print STDERR "mirrored $mirrored files, already have $have files\n";
+}
+
+else {
+    usage();
+}
diff --git a/nixpkgs/maintainers/scripts/db-to-md.sh b/nixpkgs/maintainers/scripts/db-to-md.sh
new file mode 100755
index 000000000000..aa2a2775b6de
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/db-to-md.sh
@@ -0,0 +1,88 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -I nixpkgs=. -i bash -p pandoc
+
+# This script is temporarily needed while we transition the manual to
+# CommonMark. It converts DocBook files into our CommonMark flavour.
+
+debug=
+files=()
+
+while [ "$#" -gt 0 ]; do
+    i="$1"; shift 1
+    case "$i" in
+      --debug)
+        debug=1
+        ;;
+      *)
+        files+=("$i")
+        ;;
+    esac
+done
+
+echo "WARNING: This is an experimental script and might not preserve all formatting." > /dev/stderr
+echo "Please report any issues you discover." > /dev/stderr
+
+outExtension="md"
+if [[ $debug ]]; then
+    outExtension="json"
+fi
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
+
+# NOTE: Keep in sync with Nixpkgs manual (/doc/Makefile).
+# TODO: Remove raw-attribute when we can get rid of DocBook altogether.
+pandoc_commonmark_enabled_extensions=+attributes+fenced_divs+footnotes+bracketed_spans+definition_lists+pipe_tables+raw_attribute
+targetLang="commonmark${pandoc_commonmark_enabled_extensions}+smart"
+if [[ $debug ]]; then
+    targetLang=json
+fi
+pandoc_flags=(
+    # Not needed:
+    # - diagram-generator.lua (we do not support that in NixOS manual to limit dependencies)
+    # - media extraction (was only required for diagram generator)
+    # - myst-reader/roles.lua (only relevant for MyST → DocBook)
+    # - link-manpages.lua (links should only be added to display output)
+    # - docbook-writer/rst-roles.lua (only relevant for → DocBook)
+    # - docbook-writer/labelless-link-is-xref.lua (only relevant for → DocBook)
+    "--lua-filter=$DIR/../../doc/build-aux/pandoc-filters/docbook-reader/citerefentry-to-rst-role.lua"
+    "--lua-filter=$DIR/../../doc/build-aux/pandoc-filters/myst-writer/roles.lua"
+    "--lua-filter=$DIR/doc/unknown-code-language.lua"
+    -f docbook
+    -t "$targetLang"
+    --tab-stop=2
+    --wrap=none
+)
+
+for file in "${files[@]}"; do
+    if [[ ! -f "$file" ]]; then
+        echo "db-to-md.sh: $file does not exist" > /dev/stderr
+        exit 1
+    else
+    rootElement=$(xmllint --xpath 'name(//*)' "$file")
+
+    if [[ $rootElement = chapter ]]; then
+        extension=".chapter.$outExtension"
+    elif [[ $rootElement = section ]]; then
+        extension=".section.$outExtension"
+    else
+        echo "db-to-md.sh: $file contains an unsupported root element $rootElement" > /dev/stderr
+        exit 1
+    fi
+
+    outFile="${file%".section.xml"}"
+    outFile="${outFile%".chapter.xml"}"
+    outFile="${outFile%".xml"}$extension"
+    temp1=$(mktemp)
+    $DIR/doc/escape-code-markup.py "$file" "$temp1"
+    if [[ $debug ]]; then
+        echo "Converted $file to $temp1" > /dev/stderr
+    fi
+    temp2=$(mktemp)
+    $DIR/doc/replace-xrefs-by-empty-links.py "$temp1" "$temp2"
+    if [[ $debug ]]; then
+        echo "Converted $temp1 to $temp2" > /dev/stderr
+    fi
+    pandoc "$temp2" -o "$outFile" "${pandoc_flags[@]}"
+    echo "Converted $file to $outFile" > /dev/stderr
+  fi
+done
diff --git a/nixpkgs/maintainers/scripts/debian-patches.sh b/nixpkgs/maintainers/scripts/debian-patches.sh
new file mode 100755
index 000000000000..1f021c224c3a
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/debian-patches.sh
@@ -0,0 +1,34 @@
+#!/usr/bin/env bash
+
+# Download patches from debian project
+# Usage $0 debian-patches.txt debian-patches.nix
+# An example input and output files can be found in tools/graphics/plotutils
+
+DEB_URL=https://sources.debian.org/data/main
+declare -a deb_patches
+mapfile -t deb_patches < $1
+
+# First letter
+deb_prefix="${deb_patches[0]:0:1}"
+prefix="${DEB_URL}/${deb_prefix}/${deb_patches[0]}/debian/patches"
+
+if [[ -n "$2" ]]; then
+    exec 1> $2
+fi
+
+cat <<EOF
+# Generated by $(basename $0) from $(basename $1)
+let
+  prefix = "${prefix}";
+in
+[
+EOF
+for ((i=1;i < ${#deb_patches[@]}; ++i)); do
+    url="${prefix}/${deb_patches[$i]}"
+    sha256=$(nix-prefetch-url $url)
+    echo "  {"
+    echo "    url = \"\${prefix}/${deb_patches[$i]}\";"
+    echo "    sha256 = \"$sha256\";"
+    echo "  }"
+done
+echo "]"
diff --git a/nixpkgs/maintainers/scripts/dep-licenses.sh b/nixpkgs/maintainers/scripts/dep-licenses.sh
new file mode 100755
index 000000000000..816dcf6d7f76
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/dep-licenses.sh
@@ -0,0 +1,57 @@
+#!/bin/sh
+
+attr=$1
+
+: ${NIXPKGS=/etc/nixos/nixpkgs}
+
+tmp=$(mktemp --tmpdir -d nixpkgs-dep-license.XXXXXX)
+
+exitHandler() {
+    exitCode=$?
+    rm -rf "$tmp"
+    return $exitCode
+}
+
+trap "exitHandler" EXIT
+
+# fetch the trace and the drvPath of the attribute.
+nix-instantiate $NIXPKGS -A $attr --show-trace > "$tmp/drvPath" 2> "$tmp/trace" || {
+  cat 1>&2 - "$tmp/trace" <<EOF
+An error occurred while evaluating $attr.
+EOF
+  exit 1
+}
+
+# generate a sed script based on the trace output.
+sed '
+  \,@:.*:@, {
+    # \1  *.drv file
+    # \2  License terms
+    s,.*@:drv:\(.*\):\(.*\):@.*,s!\1!\1: \2!; t;,
+    s!Str(\\\"\([^,]*\)\\\",\[\])!\1!g
+    b
+  }
+  d
+' "$tmp/trace" > "$tmp/filter.sed"
+
+if test $(wc -l "$tmp/filter.sed" | sed 's/ .*//') == 0; then
+  echo 1>&2 "
+No derivation mentionned in the stack trace.  Either your derivation does
+not use stdenv.mkDerivation or you forgot to use the stdenv adapter named
+traceDrvLicenses.
+
+-  defaultStdenv = allStdenvs.stdenv;
++  defaultStdenv = traceDrvLicenses allStdenvs.stdenv;
+"
+  exit 1
+fi
+
+
+# remove all dependencies which are using stdenv.mkDerivation
+echo '
+d
+' >> "$tmp/filter.sed"
+
+nix-store -q --tree $(cat "$tmp/drvPath") | sed -f "$tmp/filter.sed"
+
+exit 0;
diff --git a/nixpkgs/maintainers/scripts/doc/escape-code-markup.py b/nixpkgs/maintainers/scripts/doc/escape-code-markup.py
new file mode 100755
index 000000000000..015435b698e6
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/doc/escape-code-markup.py
@@ -0,0 +1,97 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -I nixpkgs=channel:nixos-unstable -i python3 -p python3 -p python3.pkgs.lxml
+
+"""
+Pandoc will strip any markup within code elements so
+let’s escape them so that they can be handled manually.
+"""
+
+import lxml.etree as ET
+import re
+import sys
+
+def replace_element_by_text(el: ET.Element, text: str) -> None:
+    """
+    Author: bernulf
+    Source: https://stackoverflow.com/a/10520552/160386
+    SPDX-License-Identifier: CC-BY-SA-3.0
+    """
+    text = text + (el.tail or "")
+    parent = el.getparent()
+    if parent is not None:
+        previous = el.getprevious()
+        if previous is not None:
+            previous.tail = (previous.tail or "") + text
+        else:
+            parent.text = (parent.text or "") + text
+        parent.remove(el)
+
+DOCBOOK_NS = "http://docbook.org/ns/docbook"
+
+# List of elements that pandoc’s DocBook reader strips markup from.
+# https://github.com/jgm/pandoc/blob/master/src/Text/Pandoc/Readers/DocBook.hs
+code_elements = [
+    # CodeBlock
+    "literallayout",
+    "screen",
+    "programlisting",
+    # Code (inline)
+    "classname",
+    "code",
+    "filename",
+    "envar",
+    "literal",
+    "computeroutput",
+    "prompt",
+    "parameter",
+    "option",
+    "markup",
+    "wordasword",
+    "command",
+    "varname",
+    "function",
+    "type",
+    "symbol",
+    "constant",
+    "userinput",
+    "systemitem",
+]
+
+XMLNS_REGEX = re.compile(r'\s+xmlns(?::[^=]+)?="[^"]*"')
+ROOT_ELEMENT_REGEX = re.compile(r'^\s*<[^>]+>')
+
+def remove_xmlns(match: re.Match) -> str:
+    """
+    Removes xmlns attributes.
+
+    Expects a match containing an opening tag.
+    """
+    return XMLNS_REGEX.sub('', match.group(0))
+
+if __name__ == '__main__':
+    assert len(sys.argv) >= 3, "usage: escape-code-markup.py <input> <output>"
+
+    tree = ET.parse(sys.argv[1])
+    name_predicate = " or ".join([f"local-name()='{el}'" for el in code_elements])
+
+    for markup in tree.xpath(f"//*[({name_predicate}) and namespace-uri()='{DOCBOOK_NS}']/*"):
+        text = ET.tostring(markup, encoding=str)
+
+        # tostring adds xmlns attributes to the element we want to stringify
+        # as if it was supposed to be usable standalone.
+        # We are just converting it to CDATA so we do not care.
+        # Let’s strip the namespace declarations to keep the code clean.
+        #
+        # Note that this removes even namespaces that were potentially
+        # in the original file. Though, that should be very rare –
+        # most of the time, we will stringify empty DocBook elements
+        # like <xref> or <co> or, at worst, <link> with xlink:href attribute.
+        #
+        # Also note that the regex expects the root element to be first
+        # thing in the string. But that should be fine, the tostring method
+        # does not produce XML declaration or doctype by default.
+        text = ROOT_ELEMENT_REGEX.sub(remove_xmlns, text)
+
+        replace_element_by_text(markup, text)
+
+    tree.write(sys.argv[2])
diff --git a/nixpkgs/maintainers/scripts/doc/list-systemd-manpages.zsh b/nixpkgs/maintainers/scripts/doc/list-systemd-manpages.zsh
new file mode 100755
index 000000000000..6737a4d3fef1
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/doc/list-systemd-manpages.zsh
@@ -0,0 +1,33 @@
+#!/usr/bin/env nix-shell
+#!nix-shell -i zsh -p zsh
+set -euo pipefail
+
+# cd into nixpkgs' root, get the store path of `systemd.man`
+cd "$(dirname "$0")/../../.."
+SYSTEMD_MAN_DIR="$(nix-build -A systemd.man)/share/man"
+
+# For each manual section
+for section in {1..8}; do
+  sec_dir="${SYSTEMD_MAN_DIR}/man${section}"
+
+  # skip section 3 (library calls)
+  ! [[ $section -eq 3 ]] || continue
+
+  # for each manpage in that section (potentially none)
+  for manpage in ${sec_dir}/*(N); do
+    # strip the directory prefix and (compressed) manpage suffix
+    page="$(basename "$manpage" ".${section}.gz")"
+
+    # if this is the manpage of a service unit
+    if [[ "$page" =~ ".*\.service" ]]; then
+     # ... and a manpage exists without the `.service` suffix
+     potential_alias="${sec_dir}/${page%\.service}.${section}.gz"
+     ! [[ -e "${potential_alias}" &&
+              # ... which points to the same file, then skip
+              "$(gunzip -c "${potential_alias}")" == ".so ${page}.${section}" ]] || continue
+    fi
+
+    # else produce a JSON fragment, with the link to the upstream manpage (as HTML)
+    echo "  \"${page}(${section})\": \"https://www.freedesktop.org/software/systemd/man/${page}.html\","
+  done
+done
diff --git a/nixpkgs/maintainers/scripts/doc/replace-xrefs-by-empty-links.py b/nixpkgs/maintainers/scripts/doc/replace-xrefs-by-empty-links.py
new file mode 100755
index 000000000000..2006ef897f7a
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/doc/replace-xrefs-by-empty-links.py
@@ -0,0 +1,32 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -I nixpkgs=channel:nixos-unstable -i python3 -p python3 -p python3.pkgs.lxml
+
+"""
+Pandoc will try to resolve xrefs and replace them with regular links.
+let’s replace them with links with empty labels which MyST
+and our pandoc filters recognize as cross-references.
+"""
+
+import lxml.etree as ET
+import sys
+
+XLINK_NS = "http://www.w3.org/1999/xlink"
+
+ns = {
+    "db": "http://docbook.org/ns/docbook",
+}
+
+
+if __name__ == '__main__':
+    assert len(sys.argv) >= 3, "usage: replace-xrefs-by-empty-links.py <input> <output>"
+
+    tree = ET.parse(sys.argv[1])
+    for xref in tree.findall(".//db:xref", ns):
+        text = ET.tostring(xref, encoding=str)
+        parent = xref.getparent()
+        link = parent.makeelement('link')
+        target_name = xref.get("linkend")
+        link.set(f"{{{XLINK_NS}}}href", f"#{target_name}")
+        parent.replace(xref, link)
+
+    tree.write(sys.argv[2])
diff --git a/nixpkgs/maintainers/scripts/doc/unknown-code-language.lua b/nixpkgs/maintainers/scripts/doc/unknown-code-language.lua
new file mode 100644
index 000000000000..85d8df4690ba
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/doc/unknown-code-language.lua
@@ -0,0 +1,12 @@
+--[[
+Adds “unknown” class to CodeBlock AST nodes without any classes.
+
+This will cause Pandoc to use fenced code block, which we prefer.
+]]
+
+function CodeBlock(elem)
+  if #elem.classes == 0 then
+    elem.classes:insert('unknown')
+    return elem
+  end
+end
diff --git a/nixpkgs/maintainers/scripts/eval-release.nix b/nixpkgs/maintainers/scripts/eval-release.nix
new file mode 100644
index 000000000000..4f0ca2465025
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/eval-release.nix
@@ -0,0 +1,25 @@
+# Evaluate `release.nix' like Hydra would.  Too bad nix-instantiate
+# can't to do this.
+
+with import ../../lib;
+
+let
+  trace = if builtins.getEnv "VERBOSE" == "1" then builtins.trace else (x: y: y);
+
+  rel = removeAttrs (import ../../pkgs/top-level/release.nix { }) [ "tarball" "unstable" "xbursttools" ];
+
+  # Add the ‘recurseForDerivations’ attribute to ensure that
+  # nix-instantiate recurses into nested attribute sets.
+  recurse = path: attrs:
+    if (builtins.tryEval attrs).success then
+      if isDerivation attrs
+      then
+        if (builtins.tryEval attrs.drvPath).success
+        then { inherit (attrs) name drvPath; }
+        else { failed = true; }
+      else if attrs == null then {}
+      else { recurseForDerivations = true; } //
+           mapAttrs (n: v: let path' = path ++ [n]; in trace path' (recurse path' v)) attrs
+    else { };
+
+in recurse [] rel
diff --git a/nixpkgs/maintainers/scripts/eval-release.sh b/nixpkgs/maintainers/scripts/eval-release.sh
new file mode 100755
index 000000000000..b588c767b6ae
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/eval-release.sh
@@ -0,0 +1,11 @@
+#!/usr/bin/env bash
+
+if [[ -z "$VERBOSE" ]]; then
+  echo "You may set VERBOSE=1 to see debug output or to any other non-empty string to make this script completely silent"
+fi
+unset HOME NIXPKGS_CONFIG # Force empty config
+
+# With the default heap size (380MB), nix-instantiate fails:
+# Too many heap sections: Increase MAXHINCR or MAX_HEAP_SECTS
+export GC_INITIAL_HEAP_SIZE=${GC_INITIAL_HEAP_SIZE:-2000000000} # 2GB
+nix-instantiate --strict --eval-only --xml --show-trace "$(dirname "$0")"/eval-release.nix 2>&1 > /dev/null
diff --git a/nixpkgs/maintainers/scripts/feature-freeze-teams.pl b/nixpkgs/maintainers/scripts/feature-freeze-teams.pl
new file mode 100755
index 000000000000..1c1a5c00907d
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/feature-freeze-teams.pl
@@ -0,0 +1,98 @@
+#!/usr/bin/env nix-shell
+#!nix-shell -i perl -p perl -p perlPackages.JSON perlPackages.LWPUserAgent perlPackages.LWPProtocolHttps perlPackages.TermReadKey
+
+# This script generates a list of teams to ping for the Feature Freeze announcement on Discourse.
+# It's intended to be used by Release Managers before creating such posts.
+#
+# The script interactively reads a GitHub username and a corresponding GitHub Personal Access token.
+# This is required to access the GitHub Teams API so the token needs at least the read:org privilege.
+
+## no critic (InputOutput::RequireCheckedSyscalls, InputOutput::ProhibitBacktickOperators)
+use strict;
+use warnings;
+use Carp;
+use Cwd 'abs_path';
+use File::Basename;
+use JSON qw(decode_json);
+use LWP::UserAgent;
+use Term::ReadKey qw(ReadLine ReadMode);
+
+sub github_team_members {
+    my ($team_name, $username, $token) = @_;
+    my @ret;
+
+    my $req = HTTP::Request->new('GET', "https://api.github.com/orgs/NixOS/teams/$team_name/members", [ 'Accept' => 'application/vnd.github.v3+json' ]);
+    $req->authorization_basic($username, $token);
+    my $response = LWP::UserAgent->new->request($req);
+
+    if ($response->is_success) {
+        my $content = decode_json($response->decoded_content);
+        foreach (@{$content}) {
+            push @ret, $_->{'login'};
+        }
+    } else {
+        print {*STDERR} "!! Requesting members of GitHub Team '$team_name' failed: " . $response->status_line;
+    }
+
+    return \@ret;
+}
+
+# Read GitHub credentials
+print {*STDERR} 'GitHub username: ';
+my $github_user = ReadLine(0);
+ReadMode('noecho');
+print {*STDERR} 'GitHub personal access token (no echo): ';
+my $github_token = ReadLine(0);
+ReadMode('restore');
+print {*STDERR} "\n";
+chomp $github_user;
+chomp $github_token;
+
+# Read nix output
+my $nix_version = `nix --version`;
+my $out;
+my $lib_path = abs_path(dirname(__FILE__)) . '../../../lib';
+if ($nix_version =~ m/2[.]3[.]/msx) {
+    $out = `nix eval --json '(import $lib_path).teams'` || croak 'nix eval failed';
+} else {
+    $out = `nix --extra-experimental-features nix-command eval --json --impure --expr '(import $lib_path).teams'` || croak('nix eval failed');
+}
+my $data = decode_json($out);
+
+# Process teams
+print {*STDERR} "\n";
+while (my ($team_nix_key, $team_config) = each %{$data}) {
+    # Ignore teams that don't want to be or can't be pinged
+    if (not defined $team_config->{enableFeatureFreezePing} or not $team_config->{enableFeatureFreezePing}) {
+        next;
+    }
+    if (not defined $team_config->{shortName}) {
+        print {*STDERR} "!! The team with the nix key '$team_nix_key' has no shortName set - ignoring";
+        next;
+    }
+    #  Team name
+    print {*STDERR} "$team_config->{shortName}:";
+    # GitHub Teams
+    my @github_members;
+    if (defined $team_config->{githubTeams}) {
+        foreach (@{$team_config->{githubTeams}}) {
+            print {*STDERR} " \@NixOS/${_}";
+            push @github_members, @{github_team_members($_, $github_user, $github_token)};
+        }
+    }
+    my %github_members = map { $_ => 1 } @github_members;
+    # Members
+    if (defined $team_config->{members}) {
+        foreach (@{$team_config->{members}}) {
+            my %user = %{$_};
+            my $github_handle = $user{'github'};
+            # Ensure we don't ping team members twice (as team member and directly)
+            if (defined $github_members{$github_handle}) {
+                next;
+            }
+            print {*STDERR} " \@$github_handle";
+        }
+    }
+
+    print {*STDERR} "\n";
+}
diff --git a/nixpkgs/maintainers/scripts/fetch-kde-qt.sh b/nixpkgs/maintainers/scripts/fetch-kde-qt.sh
new file mode 100755
index 000000000000..c43e8ad904d7
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/fetch-kde-qt.sh
@@ -0,0 +1,196 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p coreutils findutils gnused nix wget
+
+set -efuo pipefail
+export LC_COLLATE=C # fix sort order
+
+# parse files and folders from https://download.kde.org/ and https://download.qt.io/
+# you can override this function in fetch.sh
+function PARSE_INDEX() {
+    cat "$1" | grep -o -E -e '\s+href="[^"]+\.tar\.xz"' -e '\s+href="[-_a-zA-Z0-9]+/"' | cut -d'"' -f2 | sort | uniq
+}
+
+if [ $# != 1 ]; then
+    echo "example use:" >&2
+    echo "cd nixpkgs/" >&2
+    echo "./maintainers/scripts/fetch-kde-qt.sh pkgs/development/libraries/qt-5/5.12" >&2
+    exit 1
+fi
+
+if ! echo "$1" | grep -q '^pkgs/'; then
+    echo "error: path argument must start with pkgs/" >&2
+    exit 1
+fi
+
+# need absolute path for the pushd-popd block
+if [ -f "$1" ]; then
+    echo "ok: using fetchfile $1"
+    fetchfilerel="$1"
+    fetchfile="$(readlink -f "$fetchfilerel")" # resolve absolute path
+    basedir="$(dirname "$fetchfile")"
+    basedirrel="$(dirname "$fetchfilerel")"
+elif [ -d "$1" ]; then
+    echo "ok: using basedir $1"
+    basedirrel="$1"
+    basedir="$(readlink -f "$basedirrel")" # resolve absolute path
+    if ! [ -d "$basedir" ]; then
+        basedir="$(dirname "$basedir")"
+    fi
+    fetchfile="$basedir/fetch.sh"
+else
+    echo 'error: $1 must be file or dir' >&2
+    exit 1
+fi
+
+pkgname=$(basename "$basedir")
+SRCS="$basedir/srcs.nix"
+srcsrel="$basedirrel/srcs.nix"
+
+source "$fetchfile"
+
+if [ -n "$WGET_ARGS" ]; then # old format
+    BASE_URL="${WGET_ARGS[0]}" # convert to new format
+    # validate
+    if ! echo "$BASE_URL" | grep -q -E '^(http|https|ftp)://'; then
+        printf 'error: from WGET_ARGS, converted invalid BASE_URL: %q\n' "$BASE_URL" >&2
+        exit 1
+    fi
+    printf 'ok: from WGET_ARGS, converted BASE_URL: %q\n' "$BASE_URL"
+elif [ -n "$BASE_URL" ]; then # new format
+    :
+else
+    echo "error: fetch.sh must set either WGET_ARGS or BASE_URL" >&2
+    exit 1
+fi
+
+tmptpl=tmp.fetch-kde-qt.$pkgname.XXXXXXXXXX
+
+tmp=$(mktemp -d $tmptpl)
+pushd $tmp >/dev/null
+echo "tempdir is $tmp"
+
+wgetargs='--quiet --show-progress'
+#wgetargs='' # debug
+
+dirlist="$BASE_URL"
+filelist=""
+base_url_len=${#BASE_URL}
+
+clean_urls() {
+    # // -> /
+    sed -E 's,//+,/,g' | sed -E 's,^(http|https|ftp):/,&/,'
+}
+
+while [ -n "$dirlist" ]
+do
+    for dirurl in $dirlist
+    do
+        echo "fetching index.html from $dirurl"
+        relpath=$(echo "./${dirurl:$base_url_len}" | clean_urls)
+        mkdir -p "$relpath"
+        indexfile=$(echo "$relpath/index.html" | clean_urls)
+        wget $wgetargs -O "$indexfile" "$dirurl"
+        echo "parsing $indexfile"
+        filedirlist="$(PARSE_INDEX "$indexfile")"
+        filelist_next="$(echo "$filedirlist" | grep '\.tar\.xz$' | while read file; do echo "$dirurl/$file"; done)"
+        filelist_next="$(echo "$filelist_next" | clean_urls)"
+        [ -n "$filelist" ] && filelist+=$'\n'
+        filelist+="$filelist_next"
+        dirlist="$(echo "$filedirlist" | grep -v '\.tar\.xz$' | while read dir; do echo "$dirurl/$dir"; done || true)"
+        dirlist="$(echo "$dirlist" | clean_urls)"
+    done
+done
+
+filecount=$(echo "$filelist" | wc -l)
+
+if [ -z "$filelist" ]
+then
+    echo "error: no files parsed from $tmp/index.html"
+    exit 1
+fi
+
+echo "parsed $filecount tar.xz files:"; echo "$filelist"
+
+# most time is spent here
+echo "fetching $filecount sha256 files ..."
+urllist="$(echo "$filelist" | while read file; do echo "$file.sha256"; done)"
+# wget -r: keep directory structure
+echo "$urllist" | xargs wget $wgetargs -nH -r -c --no-parent && {
+    actual=$(find . -type f -name '*.sha256' | wc -l)
+    echo "fetching $filecount sha256 files done: got $actual files"
+} || {
+    # workaround: in rare cases, the server does not provide the sha256 files
+    # for example when the release is just a few hours old
+    # and the servers are not yet fully synced
+    actual=$(find . -type f -name '*.sha256' | wc -l)
+    echo "fetching $filecount sha256 files failed: got only $actual files"
+
+    # TODO fetch only missing tar.xz files
+    echo "fetching $filecount tar.xz files ..."
+    echo "$filelist" | xargs wget $wgetargs -nH -r -c --no-parent
+
+    echo "generating sha256 files ..."
+    find . -type f -name '*.tar.xz' | while read src; do
+        name=$(basename "$src")
+        sha256=$(sha256sum "$src" | cut -d' ' -f1)
+        echo "$sha256  $name" >"$src.sha256"
+    done
+}
+
+csv=$(mktemp $tmptpl.csv)
+echo "writing temporary file $csv ..."
+find . -type f -name '*.sha256' | while read sha256file; do
+    src="${sha256file%.*}" # remove extension
+    sha256=$(cat $sha256file | cut -d' ' -f1) # base16
+    sha256=$(nix-hash --type sha256 --to-base32 $sha256)
+    # Sanitize file name
+    filename=$(basename "$src" | tr '@' '_')
+    nameVersion="${filename%.tar.*}"
+    name=$(echo "$nameVersion" | sed -e 's,-[[:digit:]].*,,' | sed -e 's,-opensource-src$,,' | sed -e 's,-everywhere-src$,,')
+    version=$(echo "$nameVersion" | sed -e 's,^\([[:alpha:]][[:alnum:]]*-\)\+,,')
+    echo "$name,$version,$src,$filename,$sha256" >>$csv
+done
+
+files_before=$(grep -c 'src = ' "$SRCS")
+
+echo "writing output file $SRCS ..."
+cat >"$SRCS" <<EOF
+# DO NOT EDIT! This file is generated automatically.
+# Command: $0 $@
+{ fetchurl, mirror }:
+
+{
+EOF
+
+gawk -F , "{ print \$1 }" $csv | sort | uniq | while read name; do
+    versions=$(gawk -F , "/^$name,/ { print \$2 }" $csv)
+    latestVersion=$(echo "$versions" | sort -rV | head -n 1)
+    src=$(gawk -F , "/^$name,$latestVersion,/ { print \$3 }" $csv)
+    filename=$(gawk -F , "/^$name,$latestVersion,/ { print \$4 }" $csv)
+    sha256=$(gawk -F , "/^$name,$latestVersion,/ { print \$5 }" $csv)
+    url="${src:2}"
+    cat >>"$SRCS" <<EOF
+  $name = {
+    version = "$latestVersion";
+    src = fetchurl {
+      url = "\${mirror}/$url";
+      sha256 = "$sha256";
+      name = "$filename";
+    };
+  };
+EOF
+done
+
+echo "}" >>"$SRCS"
+
+files_after=$(grep -c 'src = ' "$SRCS")
+echo "files before: $files_before"
+echo "files after:  $files_after"
+
+echo "compare:"
+echo "git diff $srcsrel"
+
+popd >/dev/null
+rm -fr $tmp >/dev/null
+
+rm -f $csv >/dev/null
diff --git a/nixpkgs/maintainers/scripts/find-tarballs.nix b/nixpkgs/maintainers/scripts/find-tarballs.nix
new file mode 100644
index 000000000000..c47b5168abd9
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/find-tarballs.nix
@@ -0,0 +1,50 @@
+# This expression returns a list of all fetchurl calls used by ‘expr’.
+
+with import ../.. { };
+with lib;
+
+{ expr }:
+
+let
+
+  root = expr;
+
+  uniqueFiles = map (x: x.file) (genericClosure {
+    startSet = map (file: { key = with file; (if type == null then "" else type + "+") + hash; inherit file; }) files;
+    operator = const [ ];
+  });
+
+  files = map (drv: { urls = drv.urls or [ drv.url ]; hash = drv.outputHash; isPatch = (drv?postFetch && drv.postFetch != ""); type = drv.outputHashAlgo; name = drv.name; }) fetchurlDependencies;
+
+  fetchurlDependencies =
+    filter
+      (drv: drv.outputHash or "" != "" && drv.outputHashMode or "flat" == "flat"
+          && (drv ? url || drv ? urls))
+      dependencies;
+
+  dependencies = map (x: x.value) (genericClosure {
+    startSet = map keyDrv (derivationsIn' root);
+    operator = { key, value }: map keyDrv (immediateDependenciesOf value);
+  });
+
+  derivationsIn' = x:
+    if !canEval x then []
+    else if isDerivation x then optional (canEval x.drvPath) x
+    else if isList x then concatLists (map derivationsIn' x)
+    else if isAttrs x then concatLists (mapAttrsToList (n: v: addErrorContext "while finding tarballs in '${n}':" (derivationsIn' v)) x)
+    else [ ];
+
+  keyDrv = drv: if canEval drv.drvPath then { key = drv.drvPath; value = drv; } else { };
+
+  immediateDependenciesOf = drv:
+    concatLists (mapAttrsToList (n: v: derivationsIn v) (removeAttrs drv (["meta" "passthru"] ++ optionals (drv?passthru) (attrNames drv.passthru))));
+
+  derivationsIn = x:
+    if !canEval x then []
+    else if isDerivation x then optional (canEval x.drvPath) x
+    else if isList x then concatLists (map derivationsIn x)
+    else [ ];
+
+  canEval = val: (builtins.tryEval val).success;
+
+in uniqueFiles
diff --git a/nixpkgs/maintainers/scripts/fix-maintainers.pl b/nixpkgs/maintainers/scripts/fix-maintainers.pl
new file mode 100755
index 000000000000..c953cff5cc48
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/fix-maintainers.pl
@@ -0,0 +1,58 @@
+#!/usr/bin/env nix-shell
+#!nix-shell -i perl -p perl -p perlPackages.LWP -p perlPackages.LWPProtocolHttps -p perlPackages.LWPUserAgent -p perlPackages.JSON -p perlPackages.PathTiny
+use LWP::UserAgent;
+use JSON;
+use Path::Tiny;
+use strict;
+use warnings;
+
+my $maintainers_list_nix = "../maintainer-list.nix";
+my $maintainers_json = from_json(`nix-instantiate --json --eval --expr 'builtins.fromJSON (builtins.toJSON (import $maintainers_list_nix))'`);
+
+STDOUT->autoflush(1);
+
+my $ua = LWP::UserAgent->new();
+
+if (!defined $ENV{GH_TOKEN}) {
+    die "Set GH_TOKEN before running this script";
+}
+
+keys %$maintainers_json; # reset the internal iterator so a prior each() doesn't affect the loop
+while(my($k, $v) = each %$maintainers_json) {
+    my $current_user = %$v{'github'};
+    if (!defined $current_user) {
+        print "$k has no github handle\n";
+    }
+    my $github_id = %$v{'githubId'};
+    if (!defined $github_id) {
+        print "$k has no githubId\n";
+        next;
+    }
+    my $url = 'https://api.github.com/user/' . $github_id;
+    my $resp = $ua->get(
+        $url,
+        "Authorization" => "Token $ENV{GH_TOKEN}"
+    );
+
+    if ($resp->header("X-RateLimit-Remaining") == 0) {
+        my $ratelimit_reset = $resp->header("X-RateLimit-Reset");
+        print "Request limit exceeded, waiting until " . scalar localtime $ratelimit_reset . "\n";
+        sleep($ratelimit_reset - time() + 5);
+    }
+    if ($resp->code != 200) {
+        print "$k likely deleted their github account\n";
+        next;
+    }
+    my $resp_json = from_json($resp->content);
+    my $api_user = %$resp_json{"login"};
+    if (!defined $current_user) {
+        print "$k is known on github as $api_user.\n";
+    }
+    elsif (lc($current_user) ne lc($api_user)) {
+        print "$k is now known on github as $api_user. Editing maintainer-list.nix…\n";
+        my $file = path($maintainers_list_nix);
+        my $data = $file->slurp_utf8;
+        $data =~ s/github = "$current_user";$/github = "$api_user";/m;
+        $file->spew_utf8($data);
+    }
+}
diff --git a/nixpkgs/maintainers/scripts/get-maintainer.sh b/nixpkgs/maintainers/scripts/get-maintainer.sh
new file mode 100755
index 000000000000..3061d2ccc72f
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/get-maintainer.sh
@@ -0,0 +1,73 @@
+#!/usr/bin/env nix-shell
+#!nix-shell -i bash -p jq ncurses
+# shellcheck shell=bash
+
+# Get a nixpkgs maintainer's metadata as a JSON object
+#  see HELP_MESSAGE just below, or README.md.
+
+set -euo pipefail
+
+declare -A SELECTORS=( [handle]= [email]= [github]= [githubId]= [matrix]= [name]= )
+HELP_MESSAGE="usage: '$0' [selector] value
+examples:
+  get-maintainer.sh nicoo
+  get-maintainer.sh githubId 1155801
+
+\`selector\` defaults to 'handle', can be one of:
+  ${!SELECTORS[*]}
+"
+
+MAINTAINERS_DIR="$(dirname "$0")/.."
+
+die() {
+  tput setaf 1 # red
+  echo "'$0': $*"
+  tput setaf 0 # back to black
+  exit 1
+}
+
+listAsJSON() {
+  nix-instantiate --eval --strict --json "${MAINTAINERS_DIR}/maintainer-list.nix"
+}
+
+parseArgs() {
+  [ $# -gt 0 -a $# -lt 3 ] || {
+      echo "$HELP_MESSAGE"
+      die "invalid number of arguments (must be 1 or 2)"
+  }
+
+  if [ $# -eq 1 ]; then
+    selector=handle
+  else
+    selector="$1"
+    shift
+  fi
+  [ -z "${SELECTORS[$selector]-n}" ] || {
+    echo "Valid selectors are:" "${!SELECTORS[@]}" >&2
+    die "invalid selector '$selector'"
+  }
+
+  value="$1"
+  shift
+}
+
+query() {
+  # explode { a: A, b: B, ... } into A + {handle: a}, B + {handle: b}, ...
+  local explode="to_entries[] | .value + { \"handle\": .key }"
+
+  # select matching items from the list
+  # TODO(nicoo): Support approximate matching for `name` ?
+  local select
+  case "$selector" in
+    githubId)
+      select="select(.${selector} == $value)"
+      ;;
+    *)
+      select="select(.${selector} == \"$value\")"
+  esac
+
+  echo "$explode | $select"
+}
+
+parseArgs "$@"
+listAsJSON | jq -e "$(query)"
diff --git a/nixpkgs/maintainers/scripts/haskell/dependencies.nix b/nixpkgs/maintainers/scripts/haskell/dependencies.nix
new file mode 100644
index 000000000000..fd8338c0029a
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/haskell/dependencies.nix
@@ -0,0 +1,10 @@
+# Nix script to calculate the Haskell dependencies of every haskellPackage. Used by ./hydra-report.hs.
+let
+  pkgs = import ../../.. {};
+  inherit (pkgs) lib;
+  getDeps = _: pkg: {
+    deps = builtins.filter (x: x != null) (map (x: x.pname or null) (pkg.propagatedBuildInputs or []));
+    broken = (pkg.meta.hydraPlatforms or [null]) == [];
+  };
+in
+  lib.mapAttrs getDeps pkgs.haskellPackages
diff --git a/nixpkgs/maintainers/scripts/haskell/hydra-report.hs b/nixpkgs/maintainers/scripts/haskell/hydra-report.hs
new file mode 100755
index 000000000000..8b4f798cc543
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/haskell/hydra-report.hs
@@ -0,0 +1,819 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -p "haskellPackages.ghcWithPackages (p: [p.aeson p.req])"
+#! nix-shell -p hydra-unstable
+#! nix-shell -i runhaskell
+
+{-
+
+The purpose of this script is
+
+1) download the state of the nixpkgs/haskell-updates job from hydra (with get-report)
+2) print a summary of the state suitable for pasting into a github comment (with ping-maintainers)
+3) print a list of broken packages suitable for pasting into configuration-hackage2nix.yaml
+
+Because step 1) is quite expensive and takes roughly ~5 minutes the result is cached in a json file in XDG_CACHE.
+
+-}
+{-# LANGUAGE BlockArguments #-}
+{-# LANGUAGE DeriveAnyClass #-}
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE DerivingStrategies #-}
+{-# LANGUAGE DuplicateRecordFields #-}
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE NamedFieldPuns #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE TupleSections #-}
+{-# LANGUAGE ViewPatterns #-}
+{-# OPTIONS_GHC -Wall #-}
+{-# LANGUAGE DataKinds #-}
+
+import Control.Monad (forM_, forM, (<=<))
+import Control.Monad.Trans (MonadIO (liftIO))
+import Data.Aeson (
+   FromJSON,
+   FromJSONKey,
+   ToJSON,
+   decodeFileStrict',
+   eitherDecodeStrict',
+   encodeFile,
+ )
+import Data.Foldable (Foldable (toList), foldl')
+import Data.List.NonEmpty (NonEmpty, nonEmpty)
+import qualified Data.List.NonEmpty as NonEmpty
+import Data.Map.Strict (Map)
+import qualified Data.Map.Strict as Map
+import Data.Maybe (fromMaybe, mapMaybe, isNothing)
+import Data.Monoid (Sum (Sum, getSum))
+import Data.Sequence (Seq)
+import qualified Data.Sequence as Seq
+import Data.Set (Set)
+import qualified Data.Set as Set
+import Data.Text (Text)
+import qualified Data.Text as Text
+import Data.Text.Encoding (encodeUtf8)
+import qualified Data.Text.IO as Text
+import Data.Time (defaultTimeLocale, formatTime, getCurrentTime)
+import Data.Time.Clock (UTCTime)
+import GHC.Generics (Generic)
+import Network.HTTP.Req (
+    GET (GET),
+    HttpResponse (HttpResponseBody),
+    NoReqBody (NoReqBody),
+    Option,
+    Req,
+    Scheme (Https),
+    bsResponse,
+    defaultHttpConfig,
+    header,
+    https,
+    jsonResponse,
+    req,
+    responseBody,
+    responseTimeout,
+    runReq,
+    (/:),
+ )
+import System.Directory (XdgDirectory (XdgCache), getXdgDirectory)
+import System.Environment (getArgs)
+import System.Process (readProcess)
+import Prelude hiding (id)
+import Data.List (sortOn)
+import Control.Concurrent.Async (concurrently)
+import Control.Exception (evaluate)
+import qualified Data.IntMap.Strict as IntMap
+import qualified Data.IntSet as IntSet
+import Data.Bifunctor (second)
+import Data.Data (Proxy)
+import Data.ByteString (ByteString)
+import qualified Data.ByteString.Char8 as ByteString
+import Distribution.Simple.Utils (safeLast, fromUTF8BS)
+
+newtype JobsetEvals = JobsetEvals
+   { evals :: Seq Eval
+   }
+   deriving stock (Generic, Show)
+   deriving anyclass (ToJSON, FromJSON)
+
+newtype Nixpkgs = Nixpkgs {revision :: Text}
+   deriving stock (Generic, Show)
+   deriving anyclass (ToJSON, FromJSON)
+
+newtype JobsetEvalInputs = JobsetEvalInputs {nixpkgs :: Nixpkgs}
+   deriving stock (Generic, Show)
+   deriving anyclass (ToJSON, FromJSON)
+
+data Eval = Eval
+   { id :: Int
+   , jobsetevalinputs :: JobsetEvalInputs
+   , builds :: Seq Int
+   }
+   deriving (Generic, ToJSON, FromJSON, Show)
+
+-- | Hydra job name.
+--
+-- Examples:
+-- - @"haskellPackages.lens.x86_64-linux"@
+-- - @"haskell.packages.ghc925.cabal-install.aarch64-darwin"@
+-- - @"pkgsMusl.haskell.compiler.ghc90.x86_64-linux"@
+-- - @"arion.aarch64-linux"@
+newtype JobName = JobName { unJobName :: Text }
+   deriving stock (Generic, Show)
+   deriving newtype (Eq, FromJSONKey, FromJSON, Ord, ToJSON)
+
+-- | Datatype representing the result of querying the build evals of the
+-- haskell-updates Hydra jobset.
+--
+-- The URL <https://hydra.nixos.org/eval/EVAL_ID/builds> (where @EVAL_ID@ is a
+-- value like 1792418) returns a list of 'Build'.
+data Build = Build
+   { job :: JobName
+   , buildstatus :: Maybe Int
+     -- ^ Status of the build.  See 'getBuildState' for the meaning of each state.
+   , finished :: Int
+     -- ^ Whether or not the build is finished.  @0@ if finished, non-zero otherwise.
+   , id :: Int
+   , nixname :: Text
+     -- ^ Nix name of the derivation.
+     --
+     -- Examples:
+     -- - @"lens-5.2.1"@
+     -- - @"cabal-install-3.8.0.1"@
+     -- - @"lens-static-x86_64-unknown-linux-musl-5.1.1"@
+   , system :: Text
+     -- ^ System
+     --
+     -- Examples:
+     -- - @"x86_64-linux"@
+     -- - @"aarch64-darwin"@
+   , jobsetevals :: Seq Int
+   }
+   deriving (Generic, ToJSON, FromJSON, Show)
+
+data HydraSlownessWorkaroundFlag = HydraSlownessWorkaround | NoHydraSlownessWorkaround
+data RequestLogsFlag = RequestLogs | NoRequestLogs
+
+main :: IO ()
+main = do
+   args <- getArgs
+   case args of
+      ["get-report", "--slow"] -> getBuildReports HydraSlownessWorkaround
+      ["get-report"] -> getBuildReports NoHydraSlownessWorkaround
+      ["ping-maintainers"] -> printMaintainerPing
+      ["mark-broken-list", "--no-request-logs"] -> printMarkBrokenList NoRequestLogs
+      ["mark-broken-list"] -> printMarkBrokenList RequestLogs
+      ["eval-info"] -> printEvalInfo
+      _ -> putStrLn "Usage: get-report [--slow] | ping-maintainers | mark-broken-list [--no-request-logs] | eval-info"
+
+reportFileName :: IO FilePath
+reportFileName = getXdgDirectory XdgCache "haskell-updates-build-report.json"
+
+showT :: Show a => a -> Text
+showT = Text.pack . show
+
+getBuildReports :: HydraSlownessWorkaroundFlag -> IO ()
+getBuildReports opt = runReq defaultHttpConfig do
+   evalMay <- Seq.lookup 0 . evals <$> hydraJSONQuery mempty ["jobset", "nixpkgs", "haskell-updates", "evals"]
+   eval@Eval{id} <- maybe (liftIO $ fail "No Evaluation found") pure evalMay
+   liftIO . putStrLn $ "Fetching evaluation " <> show id <> " from Hydra. This might take a few minutes..."
+   buildReports <- getEvalBuilds opt id
+   liftIO do
+      fileName <- reportFileName
+      putStrLn $ "Finished fetching all builds from Hydra, saving report as " <> fileName
+      now <- getCurrentTime
+      encodeFile fileName (eval, now, buildReports)
+
+getEvalBuilds :: HydraSlownessWorkaroundFlag -> Int -> Req (Seq Build)
+getEvalBuilds NoHydraSlownessWorkaround id =
+  hydraJSONQuery mempty ["eval", showT id, "builds"]
+getEvalBuilds HydraSlownessWorkaround id = do
+  Eval{builds} <- hydraJSONQuery mempty [ "eval", showT id ]
+  forM builds $ \buildId -> do
+    liftIO $ putStrLn $ "Querying build " <> show buildId
+    hydraJSONQuery mempty [ "build", showT buildId ]
+
+hydraQuery :: HttpResponse a => Proxy a -> Option 'Https -> [Text] -> Req (HttpResponseBody a)
+hydraQuery responseType option query = do
+  let customHeaderOpt =
+        header
+          "User-Agent"
+          "hydra-report.hs/v1 (nixpkgs;maintainers/scripts/haskell) pls fix https://github.com/NixOS/nixos-org-configurations/issues/270"
+      customTimeoutOpt = responseTimeout 900_000_000 -- 15 minutes
+      opts = customHeaderOpt <> customTimeoutOpt <> option
+      url = foldl' (/:) (https "hydra.nixos.org") query
+  responseBody <$> req GET url NoReqBody responseType opts
+
+hydraJSONQuery :: FromJSON a => Option 'Https -> [Text] -> Req a
+hydraJSONQuery = hydraQuery jsonResponse
+
+hydraPlainQuery :: [Text] -> Req ByteString
+hydraPlainQuery = hydraQuery bsResponse mempty
+
+hydraEvalCommand :: FilePath
+hydraEvalCommand = "hydra-eval-jobs"
+
+hydraEvalParams :: [String]
+hydraEvalParams = ["-I", ".", "pkgs/top-level/release-haskell.nix"]
+
+nixExprCommand :: FilePath
+nixExprCommand = "nix-instantiate"
+
+nixExprParams :: [String]
+nixExprParams = ["--eval", "--strict", "--json"]
+
+-- | This newtype is used to parse a Hydra job output from @hydra-eval-jobs@.
+-- The only field we are interested in is @maintainers@, which is why this
+-- is just a newtype.
+--
+-- Note that there are occasionally jobs that don't have a maintainers
+-- field, which is why this has to be @Maybe Text@.
+newtype Maintainers = Maintainers { maintainers :: Maybe Text }
+  deriving stock (Generic, Show)
+  deriving anyclass (FromJSON, ToJSON)
+
+-- | This is a 'Map' from Hydra job name to maintainer email addresses.
+--
+-- It has values similar to the following:
+--
+-- @@
+--  fromList
+--    [ ("arion.aarch64-linux", Maintainers (Just "robert@example.com"))
+--    , ("bench.x86_64-linux", Maintainers (Just ""))
+--    , ("conduit.x86_64-linux", Maintainers (Just "snoy@man.com, web@ber.com"))
+--    , ("lens.x86_64-darwin", Maintainers (Just "ek@category.com"))
+--    ]
+-- @@
+--
+-- Note that Hydra jobs without maintainers will have an empty string for the
+-- maintainer list.
+type HydraJobs = Map JobName Maintainers
+
+-- | Map of email addresses to GitHub handles.
+-- This is built from the file @../../maintainer-list.nix@.
+--
+-- It has values similar to the following:
+--
+-- @@
+--  fromList
+--    [ ("robert@example.com", "rob22")
+--    , ("ek@category.com", "edkm")
+--    ]
+-- @@
+type EmailToGitHubHandles = Map Text Text
+
+-- | Map of Hydra jobs to maintainer GitHub handles.
+--
+-- It has values similar to the following:
+--
+-- @@
+--  fromList
+--    [ ("arion.aarch64-linux", ["rob22"])
+--    , ("conduit.x86_64-darwin", ["snoyb", "webber"])
+--    ]
+-- @@
+type MaintainerMap = Map JobName (NonEmpty Text)
+
+-- | Information about a package which lists its dependencies and whether the
+-- package is marked broken.
+data DepInfo = DepInfo {
+   deps :: Set PkgName,
+   broken :: Bool
+}
+   deriving stock (Generic, Show)
+   deriving anyclass (FromJSON, ToJSON)
+
+-- | Map from package names to their DepInfo. This is the data we get out of a
+-- nix call.
+type DependencyMap = Map PkgName DepInfo
+
+-- | Map from package names to its broken state, number of reverse dependencies (fst) and
+-- unbroken reverse dependencies (snd).
+type ReverseDependencyMap = Map PkgName (Int, Int)
+
+-- | Calculate the (unbroken) reverse dependencies of a package by transitively
+-- going through all packages if it’s a dependency of them.
+calculateReverseDependencies :: DependencyMap -> ReverseDependencyMap
+calculateReverseDependencies depMap =
+   Map.fromDistinctAscList $ zip keys (zip (rdepMap False) (rdepMap True))
+ where
+    -- This code tries to efficiently invert the dependency map and calculate
+    -- it’s transitive closure by internally identifying every pkg with it’s index
+    -- in the package list and then using memoization.
+    keys :: [PkgName]
+    keys = Map.keys depMap
+
+    pkgToIndexMap :: Map PkgName Int
+    pkgToIndexMap = Map.fromDistinctAscList (zip keys [0..])
+
+    depInfos :: [DepInfo]
+    depInfos = Map.elems depMap
+
+    depInfoToIdx :: DepInfo -> (Bool, [Int])
+    depInfoToIdx DepInfo{broken,deps} =
+       (broken, mapMaybe (`Map.lookup` pkgToIndexMap) $ Set.toList deps)
+
+    intDeps :: [(Int, (Bool, [Int]))]
+    intDeps = zip [0..] (fmap depInfoToIdx depInfos)
+
+    rdepMap onlyUnbroken = IntSet.size <$> resultList
+     where
+       resultList = go <$> [0..]
+       oneStepMap = IntMap.fromListWith IntSet.union $ (\(key,(_,deps)) -> (,IntSet.singleton key) <$> deps) <=< filter (\(_, (broken,_)) -> not (broken && onlyUnbroken)) $ intDeps
+       go pkg = IntSet.unions (oneStep:((resultList !!) <$> IntSet.toList oneStep))
+        where oneStep = IntMap.findWithDefault mempty pkg oneStepMap
+
+-- | Generate a mapping of Hydra job names to maintainer GitHub handles. Calls
+-- hydra-eval-jobs and the nix script ./maintainer-handles.nix.
+getMaintainerMap :: IO MaintainerMap
+getMaintainerMap = do
+   hydraJobs :: HydraJobs <-
+      readJSONProcess hydraEvalCommand hydraEvalParams "Failed to decode hydra-eval-jobs output: "
+   handlesMap :: EmailToGitHubHandles <-
+      readJSONProcess nixExprCommand ("maintainers/scripts/haskell/maintainer-handles.nix":nixExprParams) "Failed to decode nix output for lookup of github handles: "
+   pure $ Map.mapMaybe (splitMaintainersToGitHubHandles handlesMap) hydraJobs
+  where
+   -- Split a comma-spearated string of Maintainers into a NonEmpty list of
+   -- GitHub handles.
+   splitMaintainersToGitHubHandles
+      :: EmailToGitHubHandles -> Maintainers -> Maybe (NonEmpty Text)
+   splitMaintainersToGitHubHandles handlesMap (Maintainers maint) =
+      nonEmpty .  mapMaybe (`Map.lookup` handlesMap) .  Text.splitOn ", " $ fromMaybe "" maint
+
+-- | Get the a map of all dependencies of every package by calling the nix
+-- script ./dependencies.nix.
+getDependencyMap :: IO DependencyMap
+getDependencyMap =
+   readJSONProcess
+      nixExprCommand
+      ("maintainers/scripts/haskell/dependencies.nix" : nixExprParams)
+      "Failed to decode nix output for lookup of dependencies: "
+
+-- | Run a process that produces JSON on stdout and and decode the JSON to a
+-- data type.
+--
+-- If the JSON-decoding fails, throw the JSON-decoding error.
+readJSONProcess
+   :: FromJSON a
+   => FilePath -- ^ Filename of executable.
+   -> [String] -- ^ Arguments
+   -> String -- ^ String to prefix to JSON-decode error.
+   -> IO a
+readJSONProcess exe args err = do
+   output <- readProcess exe args ""
+   let eitherDecodedOutput = eitherDecodeStrict' . encodeUtf8 . Text.pack $ output
+   case eitherDecodedOutput of
+     Left decodeErr -> error $ err <> decodeErr <> "\nRaw: '" <> take 1000 output <> "'"
+     Right decodedOutput -> pure decodedOutput
+
+-- BuildStates are sorted by subjective importance/concerningness
+data BuildState
+  = Failed
+  | DependencyFailed
+  | OutputLimitExceeded
+  | Unknown (Maybe Int)
+  | TimedOut
+  | Canceled
+  | HydraFailure
+  | Unfinished
+  | Success
+  deriving stock (Show, Eq, Ord)
+
+icon :: BuildState -> Text
+icon = \case
+   Failed -> "❌"
+   DependencyFailed -> "❗"
+   OutputLimitExceeded -> "⚠️"
+   Unknown x -> "unknown code " <> showT x
+   TimedOut -> "⌛🚫"
+   Canceled -> "🚫"
+   Unfinished -> "⏳"
+   HydraFailure -> "🚧"
+   Success -> "✅"
+
+platformIcon :: Platform -> Text
+platformIcon (Platform x) = case x of
+   "x86_64-linux" -> "🐧"
+   "aarch64-linux" -> "📱"
+   "x86_64-darwin" -> "🍎"
+   "aarch64-darwin" -> "🍏"
+   _ -> x
+
+platformIsOS :: OS -> Platform -> Bool
+platformIsOS os (Platform x) = case (os, x) of
+   (Linux, "x86_64-linux") -> True
+   (Linux, "aarch64-linux") -> True
+   (Darwin, "x86_64-darwin") -> True
+   (Darwin, "aarch64-darwin") -> True
+   _ -> False
+
+
+-- | A package name.  This is parsed from a 'JobName'.
+--
+-- Examples:
+--
+-- - The 'JobName' @"haskellPackages.lens.x86_64-linux"@ produces the 'PkgName'
+--   @"lens"@.
+-- - The 'JobName' @"haskell.packages.ghc925.cabal-install.aarch64-darwin"@
+--   produces the 'PkgName' @"cabal-install"@.
+-- - The 'JobName' @"pkgsMusl.haskell.compiler.ghc90.x86_64-linux"@ produces
+--   the 'PkgName' @"ghc90"@.
+-- - The 'JobName' @"arion.aarch64-linux"@ produces the 'PkgName' @"arion"@.
+--
+-- 'PkgName' is also used as a key in 'DependencyMap' and 'ReverseDependencyMap'.
+-- In this case, 'PkgName' originally comes from attribute names in @haskellPackages@
+-- in Nixpkgs.
+newtype PkgName = PkgName Text
+   deriving stock (Generic, Show)
+   deriving newtype (Eq, FromJSON, FromJSONKey, Ord, ToJSON)
+
+-- | A package set name.  This is parsed from a 'JobName'.
+--
+-- Examples:
+--
+-- - The 'JobName' @"haskellPackages.lens.x86_64-linux"@ produces the 'PkgSet'
+--   @"haskellPackages"@.
+-- - The 'JobName' @"haskell.packages.ghc925.cabal-install.aarch64-darwin"@
+--   produces the 'PkgSet' @"haskell.packages.ghc925"@.
+-- - The 'JobName' @"pkgsMusl.haskell.compiler.ghc90.x86_64-linux"@ produces
+--   the 'PkgSet' @"pkgsMusl.haskell.compiler"@.
+-- - The 'JobName' @"arion.aarch64-linux"@ produces the 'PkgSet' @""@.
+--
+-- As you can see from the last example, 'PkgSet' can be empty (@""@) for
+-- top-level jobs.
+newtype PkgSet = PkgSet Text
+   deriving stock (Generic, Show)
+   deriving newtype (Eq, FromJSON, FromJSONKey, Ord, ToJSON)
+
+data BuildResult = BuildResult {state :: BuildState, id :: Int} deriving (Show, Eq, Ord)
+newtype Platform = Platform {platform :: Text} deriving (Show, Eq, Ord)
+data SummaryEntry = SummaryEntry {
+   summaryBuilds :: Table PkgSet Platform BuildResult,
+   summaryMaintainers :: Set Text,
+   summaryReverseDeps :: Int,
+   summaryUnbrokenReverseDeps :: Int
+}
+type StatusSummary = Map PkgName SummaryEntry
+
+data OS = Linux | Darwin
+
+newtype Table row col a = Table (Map (row, col) a)
+
+singletonTable :: row -> col -> a -> Table row col a
+singletonTable row col a = Table $ Map.singleton (row, col) a
+
+unionTable :: (Ord row, Ord col) => Table row col a -> Table row col a -> Table row col a
+unionTable (Table l) (Table r) = Table $ Map.union l r
+
+filterWithKeyTable :: (row -> col -> a -> Bool) -> Table row col a -> Table row col a
+filterWithKeyTable f (Table t) = Table $ Map.filterWithKey (\(r,c) a -> f r c a) t
+
+nullTable :: Table row col a -> Bool
+nullTable (Table t) = Map.null t
+
+instance (Ord row, Ord col, Semigroup a) => Semigroup (Table row col a) where
+   Table l <> Table r = Table (Map.unionWith (<>) l r)
+instance (Ord row, Ord col, Semigroup a) => Monoid (Table row col a) where
+   mempty = Table Map.empty
+instance Functor (Table row col) where
+   fmap f (Table a) = Table (fmap f a)
+instance Foldable (Table row col) where
+   foldMap f (Table a) = foldMap f a
+
+getBuildState :: Build -> BuildState
+getBuildState Build{finished, buildstatus} = case (finished, buildstatus) of
+   (0, _) -> Unfinished
+   (_, Just 0) -> Success
+   (_, Just 1) -> Failed
+   (_, Just 2) -> DependencyFailed
+   (_, Just 3) -> HydraFailure
+   (_, Just 4) -> Canceled
+   (_, Just 7) -> TimedOut
+   (_, Just 11) -> OutputLimitExceeded
+   (_, i) -> Unknown i
+
+combineStatusSummaries :: Seq StatusSummary -> StatusSummary
+combineStatusSummaries = foldl (Map.unionWith unionSummary) Map.empty
+  where
+   unionSummary :: SummaryEntry -> SummaryEntry -> SummaryEntry
+   unionSummary (SummaryEntry lb lm lr lu) (SummaryEntry rb rm rr ru) =
+      SummaryEntry (unionTable lb rb) (lm <> rm) (max lr rr) (max lu ru)
+
+buildToPkgNameAndSet :: Build -> (PkgName, PkgSet)
+buildToPkgNameAndSet Build{job = JobName jobName, system} = (name, set)
+  where
+   packageName :: Text
+   packageName = fromMaybe jobName (Text.stripSuffix ("." <> system) jobName)
+
+   splitted :: Maybe (NonEmpty Text)
+   splitted = nonEmpty $ Text.splitOn "." packageName
+
+   name :: PkgName
+   name = PkgName $ maybe packageName NonEmpty.last splitted
+
+   set :: PkgSet
+   set = PkgSet $ maybe "" (Text.intercalate "." . NonEmpty.init) splitted
+
+buildToStatusSummary :: MaintainerMap -> ReverseDependencyMap -> Build -> StatusSummary
+buildToStatusSummary maintainerMap reverseDependencyMap build@Build{job, id, system} =
+   Map.singleton pkgName summaryEntry
+  where
+   (pkgName, pkgSet) = buildToPkgNameAndSet build
+
+   maintainers :: Set Text
+   maintainers = maybe mempty (Set.fromList . toList) (Map.lookup job maintainerMap)
+
+   (reverseDeps, unbrokenReverseDeps) =
+      Map.findWithDefault (0,0) pkgName reverseDependencyMap
+
+   buildTable :: Table PkgSet Platform BuildResult
+   buildTable =
+      singletonTable pkgSet (Platform system) (BuildResult (getBuildState build) id)
+
+   summaryEntry = SummaryEntry buildTable maintainers reverseDeps unbrokenReverseDeps
+
+readBuildReports :: IO (Eval, UTCTime, Seq Build)
+readBuildReports = do
+   file <- reportFileName
+   fromMaybe (error $ "Could not decode " <> file) <$> decodeFileStrict' file
+
+sep :: Text
+sep = " | "
+joinTable :: [Text] -> Text
+joinTable t = sep <> Text.intercalate sep t <> sep
+
+type NumSummary = Table Platform BuildState Int
+
+printTable :: (Ord rows, Ord cols) => Text -> (rows -> Text) -> (cols -> Text) -> (entries -> Text) -> Table rows cols entries -> [Text]
+printTable name showR showC showE (Table mapping) = joinTable <$> (name : map showC cols) : replicate (length cols + sepsInName + 1) "---" : map printRow rows
+  where
+   sepsInName = Text.count "|" name
+   printRow row = showR row : map (\col -> maybe "" showE (Map.lookup (row, col) mapping)) cols
+   rows = toList $ Set.fromList (fst <$> Map.keys mapping)
+   cols = toList $ Set.fromList (snd <$> Map.keys mapping)
+
+printJob :: Int -> PkgName -> (Table PkgSet Platform BuildResult, Text) -> [Text]
+printJob evalId (PkgName name) (Table mapping, maintainers) =
+   if length sets <= 1
+      then map printSingleRow sets
+      else ["- [ ] " <> makeJobSearchLink (PkgSet "") name <> " " <> maintainers] <> map printRow sets
+  where
+   printRow :: PkgSet -> Text
+   printRow (PkgSet set) =
+      "  - " <> printState (PkgSet set) <> " " <>
+      makeJobSearchLink (PkgSet set) (if Text.null set then "toplevel" else set)
+
+   printSingleRow set =
+      "- [ ] " <> printState set <> " " <>
+      makeJobSearchLink set (makePkgName set) <> " " <> maintainers
+
+   makePkgName :: PkgSet -> Text
+   makePkgName (PkgSet set) = (if Text.null set then "" else set <> ".") <> name
+
+   printState set =
+      Text.intercalate " " $ map (\pf -> maybe "" (label pf) $ Map.lookup (set, pf) mapping) platforms
+
+   makeJobSearchLink :: PkgSet -> Text -> Text
+   makeJobSearchLink set linkLabel = makeSearchLink evalId linkLabel (makePkgName set)
+
+   sets :: [PkgSet]
+   sets = toList $ Set.fromList (fst <$> Map.keys mapping)
+
+   platforms :: [Platform]
+   platforms = toList $ Set.fromList (snd <$> Map.keys mapping)
+
+   label pf (BuildResult s i) = "[[" <> platformIcon pf <> icon s <> "]](https://hydra.nixos.org/build/" <> showT i <> ")"
+
+makeSearchLink :: Int -> Text -> Text -> Text
+makeSearchLink evalId linkLabel query = "[" <> linkLabel <> "](" <> "https://hydra.nixos.org/eval/" <> showT evalId <> "?filter=" <> query <> ")"
+
+statusToNumSummary :: StatusSummary -> NumSummary
+statusToNumSummary = fmap getSum . foldMap (fmap Sum . jobTotals)
+
+jobTotals :: SummaryEntry -> Table Platform BuildState Int
+jobTotals (summaryBuilds -> Table mapping) = getSum <$> Table (Map.foldMapWithKey (\(_, platform) (BuildResult buildstate _) -> Map.singleton (platform, buildstate) (Sum 1)) mapping)
+
+details :: Text -> [Text] -> [Text]
+details summary content = ["<details><summary>" <> summary <> " </summary>", ""] <> content <> ["</details>", ""]
+
+evalLine :: Eval -> UTCTime -> Text
+evalLine Eval{id, jobsetevalinputs = JobsetEvalInputs{nixpkgs = Nixpkgs{revision}}} fetchTime =
+   "*evaluation ["
+    <> showT id
+    <> "](https://hydra.nixos.org/eval/"
+    <> showT id
+    <> ") of nixpkgs commit ["
+    <> Text.take 7 revision
+    <> "](https://github.com/NixOS/nixpkgs/commits/"
+    <> revision
+    <> ") as of "
+    <> Text.pack (formatTime defaultTimeLocale "%Y-%m-%d %H:%M UTC" fetchTime)
+    <> "*"
+
+printBuildSummary :: Eval -> UTCTime -> StatusSummary -> [(PkgName, Int)] -> Text
+printBuildSummary eval@Eval{id} fetchTime summary topBrokenRdeps =
+   Text.unlines $
+      headline <> [""] <> tldr <> (("  * "<>) <$> (errors <> warnings)) <> [""]
+         <> totals
+         <> optionalList "#### Maintained Linux packages with build failure" (maintainedList (fails summaryLinux))
+         <> optionalList "#### Maintained Linux packages with failed dependency" (maintainedList (failedDeps summaryLinux))
+         <> optionalList "#### Maintained Linux packages with unknown error" (maintainedList (unknownErr summaryLinux))
+         <> optionalHideableList "#### Maintained Darwin packages with build failure" (maintainedList (fails summaryDarwin))
+         <> optionalHideableList "#### Maintained Darwin packages with failed dependency" (maintainedList (failedDeps summaryDarwin))
+         <> optionalHideableList "#### Maintained Darwin packages with unknown error" (maintainedList (unknownErr summaryDarwin))
+         <> optionalHideableList "#### Unmaintained packages with build failure" (unmaintainedList (fails summary))
+         <> optionalHideableList "#### Unmaintained packages with failed dependency" (unmaintainedList (failedDeps summary))
+         <> optionalHideableList "#### Unmaintained packages with unknown error" (unmaintainedList (unknownErr summary))
+         <> optionalHideableList "#### Top 50 broken packages, sorted by number of reverse dependencies" (brokenLine <$> topBrokenRdeps)
+         <> ["","*⤴️: The number of packages that depend (directly or indirectly) on this package (if any). If two numbers are shown the first (lower) number considers only packages which currently have enabled hydra jobs, i.e. are not marked broken. The second (higher) number considers all packages.*",""]
+         <> footer
+  where
+   footer = ["*Report generated with [maintainers/scripts/haskell/hydra-report.hs](https://github.com/NixOS/nixpkgs/blob/haskell-updates/maintainers/scripts/haskell/hydra-report.hs)*"]
+
+   headline =
+      [ "### [haskell-updates build report from hydra](https://hydra.nixos.org/jobset/nixpkgs/haskell-updates)"
+      , evalLine eval fetchTime
+      ]
+
+   totals :: [Text]
+   totals =
+      [ "#### Build summary"
+      , ""
+      ] <>
+      printTable
+         "Platform"
+         (\x -> makeSearchLink id (platform x <> " " <> platformIcon x) ("." <> platform x))
+         (\x -> showT x <> " " <> icon x)
+         showT
+         numSummary
+
+   brokenLine :: (PkgName, Int) -> Text
+   brokenLine (PkgName name, rdeps) =
+      "[" <> name <> "](https://packdeps.haskellers.com/reverse/" <> name <>
+      ") ⤴️ " <> Text.pack (show rdeps) <> "  "
+
+   numSummary = statusToNumSummary summary
+
+   summaryLinux :: StatusSummary
+   summaryLinux = withOS Linux summary
+
+   summaryDarwin :: StatusSummary
+   summaryDarwin = withOS Darwin summary
+
+   -- Remove all BuildResult from the Table that have Platform that isn't for
+   -- the given OS.
+   tableForOS :: OS -> Table PkgSet Platform BuildResult -> Table PkgSet Platform BuildResult
+   tableForOS os = filterWithKeyTable (\_ platform _ -> platformIsOS os platform)
+
+   -- Remove all BuildResult from the StatusSummary that have a Platform that
+   -- isn't for the given OS.  Completely remove all PkgName from StatusSummary
+   -- that end up with no BuildResults.
+   withOS
+      :: OS
+      -> StatusSummary
+      -> StatusSummary
+   withOS os =
+      Map.mapMaybe
+         (\e@SummaryEntry{summaryBuilds} ->
+            let buildsForOS = tableForOS os summaryBuilds
+            in if nullTable buildsForOS then Nothing else Just e { summaryBuilds = buildsForOS }
+         )
+
+   jobsByState :: (BuildState -> Bool) -> StatusSummary -> StatusSummary
+   jobsByState predicate = Map.filter (predicate . worstState)
+
+   worstState :: SummaryEntry -> BuildState
+   worstState = foldl' min Success . fmap state . summaryBuilds
+
+   fails :: StatusSummary -> StatusSummary
+   fails = jobsByState (== Failed)
+
+   failedDeps :: StatusSummary -> StatusSummary
+   failedDeps = jobsByState (== DependencyFailed)
+
+   unknownErr :: StatusSummary -> StatusSummary
+   unknownErr = jobsByState (\x -> x > DependencyFailed && x < TimedOut)
+
+   withMaintainer :: StatusSummary -> Map PkgName (Table PkgSet Platform BuildResult, NonEmpty Text)
+   withMaintainer =
+      Map.mapMaybe
+         (\e -> (summaryBuilds e,) <$> nonEmpty (Set.toList (summaryMaintainers e)))
+
+   withoutMaintainer :: StatusSummary -> StatusSummary
+   withoutMaintainer = Map.mapMaybe (\e -> if Set.null (summaryMaintainers e) then Just e else Nothing)
+
+   optionalList :: Text -> [Text] -> [Text]
+   optionalList heading list = if null list then mempty else [heading] <> list
+
+   optionalHideableList :: Text -> [Text] -> [Text]
+   optionalHideableList heading list = if null list then mempty else [heading] <> details (showT (length list) <> " job(s)") list
+
+   maintainedList :: StatusSummary -> [Text]
+   maintainedList = showMaintainedBuild <=< Map.toList . withMaintainer
+
+   summaryEntryGetReverseDeps :: SummaryEntry -> (Int, Int)
+   summaryEntryGetReverseDeps sumEntry =
+      ( negate $ summaryUnbrokenReverseDeps sumEntry
+      , negate $ summaryReverseDeps sumEntry
+      )
+
+   sortOnReverseDeps :: [(PkgName, SummaryEntry)] -> [(PkgName, SummaryEntry)]
+   sortOnReverseDeps = sortOn (\(_, sumEntry) -> summaryEntryGetReverseDeps sumEntry)
+
+   unmaintainedList :: StatusSummary -> [Text]
+   unmaintainedList = showBuild <=< sortOnReverseDeps . Map.toList . withoutMaintainer
+
+   showBuild :: (PkgName, SummaryEntry) -> [Text]
+   showBuild (name, entry) =
+      printJob
+         id
+         name
+         ( summaryBuilds entry
+         , Text.pack
+            ( if summaryReverseDeps entry > 0
+               then
+                  " ⤴️ " <> show (summaryUnbrokenReverseDeps entry) <>
+                  " | " <> show (summaryReverseDeps entry)
+               else ""
+            )
+         )
+
+   showMaintainedBuild
+      :: (PkgName, (Table PkgSet Platform BuildResult, NonEmpty Text)) -> [Text]
+   showMaintainedBuild (name, (table, maintainers)) =
+      printJob
+         id
+         name
+         ( table
+         , Text.intercalate " " (fmap ("@" <>) (toList maintainers))
+         )
+
+   tldr = case (errors, warnings) of
+            ([],[]) -> ["🟢 **Ready to merge** (if there are no [evaluation errors](https://hydra.nixos.org/jobset/nixpkgs/haskell-updates))"]
+            ([],_) -> ["🟡 **Potential issues** (and possibly [evaluation errors](https://hydra.nixos.org/jobset/nixpkgs/haskell-updates))"]
+            _ -> ["🔴 **Branch not mergeable**"]
+   warnings =
+      if' (Unfinished > maybe Success worstState maintainedJob) "`maintained` jobset failed." <>
+      if' (Unfinished == maybe Success worstState mergeableJob) "`mergeable` jobset is not finished." <>
+      if' (Unfinished == maybe Success worstState maintainedJob) "`maintained` jobset is not finished."
+   errors =
+      if' (isNothing mergeableJob) "No `mergeable` job found." <>
+      if' (isNothing maintainedJob) "No `maintained` job found." <>
+      if' (Unfinished > maybe Success worstState mergeableJob) "`mergeable` jobset failed." <>
+      if' (outstandingJobs (Platform "x86_64-linux") > 100) "Too many outstanding jobs on x86_64-linux." <>
+      if' (outstandingJobs (Platform "aarch64-linux") > 100) "Too many outstanding jobs on aarch64-linux."
+
+   if' p e = if p then [e] else mempty
+
+   outstandingJobs platform | Table m <- numSummary = Map.findWithDefault 0 (platform, Unfinished) m
+
+   maintainedJob = Map.lookup (PkgName "maintained") summary
+   mergeableJob = Map.lookup (PkgName "mergeable") summary
+
+printEvalInfo :: IO ()
+printEvalInfo = do
+   (eval, fetchTime, _) <- readBuildReports
+   putStrLn (Text.unpack $ evalLine eval fetchTime)
+
+printMaintainerPing :: IO ()
+printMaintainerPing = do
+   (maintainerMap, (reverseDependencyMap, topBrokenRdeps)) <- concurrently getMaintainerMap do
+      depMap <- getDependencyMap
+      rdepMap <- evaluate . calculateReverseDependencies $ depMap
+      let tops = take 50 . sortOn (negate . snd) . fmap (second fst) . filter (\x -> maybe False broken $ Map.lookup (fst x) depMap) . Map.toList $ rdepMap
+      pure (rdepMap, tops)
+   (eval, fetchTime, buildReport) <- readBuildReports
+   let statusSummaries =
+          fmap (buildToStatusSummary maintainerMap reverseDependencyMap) buildReport
+       buildSum :: StatusSummary
+       buildSum = combineStatusSummaries statusSummaries
+       textBuildSummary = printBuildSummary eval fetchTime buildSum topBrokenRdeps
+   Text.putStrLn textBuildSummary
+
+printMarkBrokenList :: RequestLogsFlag -> IO ()
+printMarkBrokenList reqLogs = do
+   (_, fetchTime, buildReport) <- readBuildReports
+   runReq defaultHttpConfig $ forM_ buildReport \build@Build{job, id} ->
+      case (getBuildState build, Text.splitOn "." $ unJobName job) of
+         (Failed, ["haskellPackages", name, "x86_64-linux"]) -> do
+            -- We use the last probable error cause found in the build log file.
+            error_message <- fromMaybe "failure" <$>
+              case reqLogs of
+                NoRequestLogs -> pure Nothing
+                RequestLogs -> do
+                  -- Fetch build log from hydra to figure out the cause of the error.
+                  build_log <- ByteString.lines <$> hydraPlainQuery ["build", showT id, "nixlog", "1", "raw"]
+                  pure $ safeLast $ mapMaybe probableErrorCause build_log
+            liftIO $ putStrLn $ "  - " <> Text.unpack name <> " # " <> error_message <> " in job https://hydra.nixos.org/build/" <> show id <> " at " <> formatTime defaultTimeLocale "%Y-%m-%d" fetchTime
+         _ -> pure ()
+
+{- | This function receives a line from a Nix Haskell builder build log and returns a possible error cause.
+ | We might need to add other causes in the future if errors happen in unusual parts of the builder.
+-}
+probableErrorCause :: ByteString -> Maybe String
+probableErrorCause "Setup: Encountered missing or private dependencies:" = Just "dependency missing"
+probableErrorCause "running tests" = Just "test failure"
+probableErrorCause build_line | ByteString.isPrefixOf "Building" build_line = Just ("failure building " <> fromUTF8BS (fst $ ByteString.breakSubstring " for" $ ByteString.drop 9 build_line))
+probableErrorCause build_line | ByteString.isSuffixOf "Phase" build_line = Just ("failure in " <> fromUTF8BS build_line)
+probableErrorCause _ = Nothing
diff --git a/nixpkgs/maintainers/scripts/haskell/maintained-broken-pkgs.nix b/nixpkgs/maintainers/scripts/haskell/maintained-broken-pkgs.nix
new file mode 100644
index 000000000000..7416bfa46ee0
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/haskell/maintained-broken-pkgs.nix
@@ -0,0 +1,22 @@
+let
+  nixpkgs = import ../../..;
+  inherit (nixpkgs {}) haskellPackages lib;
+  maintainedPkgs = lib.filterAttrs (
+    _: v: builtins.length (v.meta.maintainers or []) > 0
+  ) haskellPackages;
+  brokenPkgs = lib.filterAttrs (_: v: v.meta.broken) maintainedPkgs;
+  transitiveBrokenPkgs = lib.filterAttrs
+    (_: v: !(builtins.tryEval (v.outPath or null)).success && !v.meta.broken)
+    maintainedPkgs;
+  infoList = pkgs: lib.concatStringsSep "\n" (lib.mapAttrsToList (name: drv: "${name} ${(builtins.elemAt drv.meta.maintainers 0).github}") pkgs);
+in {
+  report = ''
+    BROKEN:
+    ${infoList brokenPkgs}
+
+    TRANSITIVE BROKEN:
+    ${infoList transitiveBrokenPkgs}
+  '';
+  transitiveErrors =
+    builtins.attrValues transitiveBrokenPkgs;
+}
diff --git a/nixpkgs/maintainers/scripts/haskell/maintainer-handles.nix b/nixpkgs/maintainers/scripts/haskell/maintainer-handles.nix
new file mode 100644
index 000000000000..d650e82f8b0c
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/haskell/maintainer-handles.nix
@@ -0,0 +1,21 @@
+# Nix script to lookup maintainer github handles from their email address. Used by ./hydra-report.hs.
+#
+# This script produces an attr set mapping of email addresses to GitHub handles:
+#
+# ```nix
+# > import ./maintainer-handles.nix
+# { "cdep.illabout@gmail.com" = "cdepillabout"; "john@smith.com" = "johnsmith"; ... }
+# ```
+#
+# This mapping contains all maintainers in ../../mainatainer-list.nix, but it
+# ignores maintainers who don't have a GitHub account or an email address.
+let
+  pkgs = import ../../.. {};
+  maintainers = import ../../maintainer-list.nix;
+  inherit (pkgs) lib;
+  mkMailGithubPair = _: maintainer:
+    if (maintainer ? email) && (maintainer ? github) then
+      { "${maintainer.email}" = maintainer.github; }
+    else
+      {};
+in lib.zipAttrsWith (_: builtins.head) (lib.mapAttrsToList mkMailGithubPair maintainers)
diff --git a/nixpkgs/maintainers/scripts/haskell/mark-broken.sh b/nixpkgs/maintainers/scripts/haskell/mark-broken.sh
new file mode 100755
index 000000000000..9aa9433b8023
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/haskell/mark-broken.sh
@@ -0,0 +1,66 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p coreutils git -I nixpkgs=.
+
+# This script uses the data pulled with
+# maintainers/scripts/haskell/hydra-report.hs get-report to produce a list of
+# failing builds that get written to the hackage2nix config. Then
+# hackage-packages.nix gets regenerated and transitive-broken packages get
+# marked as dont-distribute in the config as well.
+# This should disable builds for most failing jobs in the haskell-updates jobset.
+
+set -euo pipefail
+
+do_commit=false
+mark_broken_list_flags=""
+
+for arg in "$@"; do
+    case "$arg" in
+        --do-commit)
+            do_commit=true
+            ;;
+        --no-request-logs)
+            mark_broken_list_flags="$mark_broken_list_flags $arg"
+            ;;
+        *)
+            echo "$0: unknown flag: $arg"
+            exit 100
+            ;;
+    esac
+done
+
+broken_config="pkgs/development/haskell-modules/configuration-hackage2nix/broken.yaml"
+
+tmpfile=$(mktemp)
+trap "rm ${tmpfile}" 0
+
+echo "Remember that you need to manually run 'maintainers/scripts/haskell/hydra-report.hs get-report' sometime before running this script."
+echo "Generating a list of broken builds and displaying for manual confirmation ..."
+maintainers/scripts/haskell/hydra-report.hs mark-broken-list $mark_broken_list_flags | sort -i > "$tmpfile"
+
+$EDITOR "$tmpfile"
+
+tail -n +3 "$broken_config" >> "$tmpfile"
+
+cat > "$broken_config" << EOF
+broken-packages:
+  # These packages don't compile.
+EOF
+
+# clear environment here to avoid things like allowing broken builds in
+sort -iu "$tmpfile" >> "$broken_config"
+clear="env -u HOME -u NIXPKGS_CONFIG"
+$clear maintainers/scripts/haskell/regenerate-hackage-packages.sh
+evalline=$(maintainers/scripts/haskell/hydra-report.hs eval-info)
+
+if $do_commit; then
+git add $broken_config
+git add pkgs/development/haskell-modules/configuration-hackage2nix/transitive-broken.yaml
+git add pkgs/development/haskell-modules/hackage-packages.nix
+git commit -F - << EOF
+haskellPackages: mark builds failing on hydra as broken
+
+This commit has been generated by maintainers/scripts/haskell/mark-broken.sh based on
+$evalline
+from the haskell-updates jobset on hydra under https://hydra.nixos.org/jobset/nixpkgs/haskell-updates
+EOF
+fi
diff --git a/nixpkgs/maintainers/scripts/haskell/merge-and-open-pr.sh b/nixpkgs/maintainers/scripts/haskell/merge-and-open-pr.sh
new file mode 100755
index 000000000000..62565d24d623
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/haskell/merge-and-open-pr.sh
@@ -0,0 +1,129 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p git gh -I nixpkgs=.
+#
+# Script to merge the currently open haskell-updates PR into master, bump the
+# Stackage version and Hackage versions, and open the next haskell-updates PR.
+
+set -eu -o pipefail
+
+# exit after printing first argument to this function
+function die {
+  # echo the first argument
+  echo "ERROR: $1"
+  echo "Aborting!"
+
+  exit 1
+}
+
+function help {
+  echo "Usage: $0 HASKELL_UPDATES_PR_NUM"
+  echo "Merge the currently open haskell-updates PR into master, and open the next one."
+  echo
+  echo "  -h, --help                print this help"
+  echo "  HASKELL_UPDATES_PR_NUM    number of the currently open PR on NixOS/nixpkgs"
+  echo "                            for the haskell-updates branch"
+  echo
+  echo "Example:"
+  echo "  \$ $0 137340"
+
+  exit 1
+}
+
+# Read in the current haskell-updates PR number from the command line.
+while [[ $# -gt 0 ]]; do
+  key="$1"
+
+  case $key in
+    -h|--help)
+      help
+      ;;
+    *)
+      curr_haskell_updates_pr_num="$1"
+      shift
+      ;;
+  esac
+done
+
+if [[ -z "${curr_haskell_updates_pr_num-}" ]] ; then
+  die "You must pass the current haskell-updates PR number as the first argument to this script."
+fi
+
+# Make sure you have gh authentication setup.
+if ! gh auth status 2>/dev/null ; then
+  die "You must setup the \`gh\` command.  Run \`gh auth login\`."
+fi
+
+# Make sure this is configured before we start doing anything
+push_remote="$(git config branch.haskell-updates.pushRemote)" \
+  || die 'Can'\''t determine pushRemote for haskell-updates. Please set using `git config branch.haskell-updates.pushremote <remote name>`.'
+
+# Fetch nixpkgs to get an up-to-date origin/haskell-updates branch.
+echo "Fetching origin..."
+git fetch origin >/dev/null
+
+# Make sure we are currently on a local haskell-updates branch.
+curr_branch="$(git rev-parse --abbrev-ref HEAD)"
+if [[ "$curr_branch" != "haskell-updates" ]]; then
+    die "Current branch is not called \"haskell-updates\"."
+fi
+
+# Make sure our local haskell-updates branch is on the same commit as
+# origin/haskell-updates.
+curr_branch_commit="$(git rev-parse haskell-updates)"
+origin_haskell_updates_commit="$(git rev-parse origin/haskell-updates)"
+if [[ "$curr_branch_commit" != "$origin_haskell_updates_commit" ]]; then
+    die "Current branch is not at the same commit as origin/haskell-updates"
+fi
+
+# Merge the current open haskell-updates PR.
+echo "Merging https://github.com/NixOS/nixpkgs/pull/${curr_haskell_updates_pr_num}..."
+gh pr merge --repo NixOS/nixpkgs --merge "$curr_haskell_updates_pr_num"
+
+# Update the list of Haskell package versions in NixOS on Hackage.
+echo "Updating list of Haskell package versions in NixOS on Hackage..."
+./maintainers/scripts/haskell/upload-nixos-package-list-to-hackage.sh
+
+# Update stackage, Hackage hashes, and regenerate Haskell package set
+echo "Updating Stackage..."
+./maintainers/scripts/haskell/update-stackage.sh --do-commit
+echo "Updating Hackage hashes..."
+./maintainers/scripts/haskell/update-hackage.sh --do-commit
+echo "Regenerating Hackage packages..."
+# Using fast here because after the hackage-update eval errors will likely break the transitive dependencies check.
+./maintainers/scripts/haskell/regenerate-hackage-packages.sh --fast --do-commit
+
+# Push these new commits to the haskell-updates branch
+echo "Pushing commits just created to the remote $push_remote/haskell-updates branch..."
+git push "$push_remote" haskell-updates
+
+# Open new PR
+new_pr_body=$(cat <<EOF
+### This Merge
+
+This PR is the regular merge of the \`haskell-updates\` branch into \`master\`.
+
+This branch is being continually built and tested by hydra at https://hydra.nixos.org/jobset/nixpkgs/haskell-updates. You may be able to find an up-to-date Hydra build report at [cdepillabout/nix-haskell-updates-status](https://github.com/cdepillabout/nix-haskell-updates-status).
+
+We roughly aim to merge these \`haskell-updates\` PRs at least once every two weeks. See the @NixOS/haskell [team calendar](https://cloud.maralorn.de/apps/calendar/p/H6migHmKX7xHoTFa) for who is currently in charge of this branch.
+
+### haskellPackages Workflow Summary
+
+Our workflow is currently described in [\`pkgs/development/haskell-modules/HACKING.md\`](https://github.com/NixOS/nixpkgs/blob/haskell-updates/pkgs/development/haskell-modules/HACKING.md).
+
+The short version is this:
+* We regularly update the Stackage and Hackage pins on \`haskell-updates\` (normally at the beginning of a merge window).
+* The community fixes builds of Haskell packages on that branch.
+* We aim at at least one merge of \`haskell-updates\` into \`master\` every two weeks.
+* We only do the merge if the [\`mergeable\`](https://hydra.nixos.org/job/nixpkgs/haskell-updates/mergeable) job is succeeding on hydra.
+* If a [\`maintained\`](https://hydra.nixos.org/job/nixpkgs/haskell-updates/maintained) package is still broken at the time of merge, we will only merge if the maintainer has been pinged 7 days in advance. (If you care about a Haskell package, become a maintainer!)
+
+More information about Haskell packages in nixpkgs can be found [in the nixpkgs manual](https://nixos.org/manual/nixpkgs/unstable/#haskell).
+
+---
+
+This is the follow-up to #${curr_haskell_updates_pr_num}. Come to [#haskell:nixos.org](https://matrix.to/#/#haskell:nixos.org) if you have any questions.
+EOF
+)
+
+echo "Opening a PR for the next haskell-updates merge cycle..."
+gh pr create --repo NixOS/nixpkgs --base master --head haskell-updates --title "haskellPackages: update stackage and hackage" --body "$new_pr_body"
diff --git a/nixpkgs/maintainers/scripts/haskell/regenerate-hackage-packages.sh b/nixpkgs/maintainers/scripts/haskell/regenerate-hackage-packages.sh
new file mode 100755
index 000000000000..96a18aa8ed87
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/haskell/regenerate-hackage-packages.sh
@@ -0,0 +1,120 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p coreutils haskellPackages.cabal2nix-unstable git nix -I nixpkgs=.
+
+set -euo pipefail
+
+self=$0
+
+print_help () {
+cat <<END_HELP
+Usage: $self [options]
+
+Options:
+   --do-commit    Commit changes to this file.
+   -f | --fast    Do not update the transitive-broken.yaml file.
+   -h | --help    Show this help.
+
+This script is used to regenerate nixpkgs' Haskell package set, using the
+tool hackage2nix from the nixos/cabal2nix repo. hackage2nix looks at the
+config files in pkgs/development/haskell-modules/configuration-hackage2nix
+and generates a Nix expression for package version specified there, using the
+Cabal files from the Hackage database (available under all-cabal-hashes) and
+its companion tool cabal2nix.
+
+Unless --fast is used, it will then use the generated nix expression by
+running regenerate-transitive-broken-packages.sh which updates the transitive-broken.yaml
+file. Then it re-runs hackage2nix.
+
+Related scripts are update-hackage.sh, for updating the snapshot of the
+Hackage database used by hackage2nix, and update-cabal2nix-unstable.sh,
+for updating the version of hackage2nix used to perform this task.
+
+Note that this script doesn't gcroot anything, so it may be broken by an
+unfortunately timed nix-store --gc.
+
+END_HELP
+}
+
+DO_COMMIT=0
+REGENERATE_TRANSITIVE=1
+
+options=$(getopt -o "fh" -l "help,fast,do-commit" -- "$@")
+
+eval set -- "$options"
+
+while true; do
+   case "$1" in
+      --do-commit)
+         DO_COMMIT=1
+         ;;
+      -f|--fast)
+         REGENERATE_TRANSITIVE=0
+         ;;
+      -h|--help)
+         print_help
+         exit 0
+         ;;
+      --)
+         break;;
+      *)
+         print_help
+         exit 1
+         ;;
+   esac
+   shift
+done
+
+HACKAGE2NIX="${HACKAGE2NIX:-hackage2nix}"
+
+# To prevent hackage2nix fails because of encoding.
+# See: https://github.com/NixOS/nixpkgs/pull/122023
+export LC_ALL=C.UTF-8
+
+config_dir=pkgs/development/haskell-modules/configuration-hackage2nix
+
+run_hackage2nix() {
+"$HACKAGE2NIX" \
+   --hackage "$unpacked_hackage" \
+   --preferred-versions <(for n in "$unpacked_hackage"/*/preferred-versions; do cat "$n"; echo; done) \
+   --nixpkgs "$PWD" \
+   --config "$compiler_config" \
+   --config "$config_dir/main.yaml" \
+   --config "$config_dir/stackage.yaml" \
+   --config "$config_dir/broken.yaml" \
+   --config "$config_dir/transitive-broken.yaml"
+}
+
+echo "Obtaining Hackage data …"
+extraction_derivation='with import ./. {}; runCommandLocal "unpacked-cabal-hashes" { } "tar xf ${all-cabal-hashes} --strip-components=1 --one-top-level=$out"'
+unpacked_hackage="$(nix-build -E "$extraction_derivation" --no-out-link)"
+
+echo "Generating compiler configuration …"
+compiler_config="$(nix-build -A haskellPackages.cabal2nix-unstable.compilerConfig --no-out-link)"
+
+echo "Running hackage2nix to regenerate pkgs/development/haskell-modules/hackage-packages.nix …"
+run_hackage2nix
+
+if [[ "$REGENERATE_TRANSITIVE" -eq 1 ]]; then
+
+echo "Regenerating transitive-broken.yaml … (pass --fast to $self to skip this step)"
+
+maintainers/scripts/haskell/regenerate-transitive-broken-packages.sh
+
+echo "Running hackage2nix again to reflect changes in transitive-broken.yaml …"
+
+run_hackage2nix
+
+fi
+
+
+if [[ "$DO_COMMIT" -eq 1 ]]; then
+git add pkgs/development/haskell-modules/configuration-hackage2nix/transitive-broken.yaml
+git add pkgs/development/haskell-modules/hackage-packages.nix
+git commit -F - << EOF
+haskellPackages: regenerate package set based on current config
+
+This commit has been generated by maintainers/scripts/haskell/regenerate-hackage-packages.sh
+EOF
+fi
+
+echo "Regeneration of hackage-packages.nix finished."
diff --git a/nixpkgs/maintainers/scripts/haskell/regenerate-transitive-broken-packages.sh b/nixpkgs/maintainers/scripts/haskell/regenerate-transitive-broken-packages.sh
new file mode 100755
index 000000000000..a317dba4d4e7
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/haskell/regenerate-transitive-broken-packages.sh
@@ -0,0 +1,25 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p coreutils jq nix -I nixpkgs=.
+
+set -euo pipefail
+
+TMP_TEMPLATE=transitive-broken.XXXXXXX
+readonly TMP_TEMPLATE
+
+tmpfile=$(mktemp "$TMP_TEMPLATE")
+
+trap 'rm -f "${tmpfile}"' 0
+
+config_file=pkgs/development/haskell-modules/configuration-hackage2nix/transitive-broken.yaml
+
+cat > $tmpfile << EOF
+# This file is automatically generated by
+# maintainers/scripts/haskell/regenerate-transitive-broken-packages.sh
+# It is supposed to list all haskellPackages that cannot evaluate because they
+# depend on a dependency marked as broken.
+dont-distribute-packages:
+EOF
+
+nix-instantiate --eval --option restrict-eval true -I . --strict --json maintainers/scripts/haskell/transitive-broken-packages.nix | jq -r . | LC_ALL=C.UTF-8 sort -i >> $tmpfile
+
+mv $tmpfile $config_file
diff --git a/nixpkgs/maintainers/scripts/haskell/test-configurations.nix b/nixpkgs/maintainers/scripts/haskell/test-configurations.nix
new file mode 100644
index 000000000000..8473ed4db8a2
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/haskell/test-configurations.nix
@@ -0,0 +1,158 @@
+/* Nix expression to test for regressions in the Haskell configuration overlays.
+
+   test-configurations.nix determines all attributes touched by given Haskell
+   configuration overlays (i. e. pkgs/development/haskell-modules/configuration-*.nix)
+   and builds all derivations (or at least a reasonable subset) affected by
+   these overrides.
+
+   By default, it checks `configuration-{common,nix,ghc-8.10.x}.nix`. You can
+   invoke it like this:
+
+     nix-build maintainers/scripts/haskell/test-configurations.nix --keep-going
+
+   It is possible to specify other configurations:
+
+     nix-build maintainers/scripts/haskell/test-configurations.nix \
+       --arg files '[ "configuration-ghc-9.0.x.nix" "configuration-ghc-9.2.x.nix" ]' \
+       --keep-going
+
+   You can also just supply a single string:
+
+     nix-build maintainers/scripts/haskell/test-configurations.nix \
+       --argstr files "configuration-arm.nix" --keep-going
+
+   You can even supply full paths which is handy, as it allows for tab-completing
+   the configurations:
+
+     nix-build maintainers/scripts/haskell/test-configurations.nix \
+       --argstr files pkgs/development/haskell-modules/configuration-arm.nix \
+       --keep-going
+
+   By default, derivation that fail to evaluate are skipped, unless they are
+   “just” marked as broken. You can check for other eval errors like this:
+
+     nix-build maintainers/scripts/haskell/test-configurations.nix \
+       --arg skipEvalErrors false --keep-going
+
+   You can also disable checking broken packages by passing a nixpkgs config:
+
+     nix-build maintainers/scripts/haskell/test-configurations.nix \
+       --arg config '{ allowBroken = false; }' --keep-going
+
+   By default the haskell.packages.ghc*Binary sets used for bootstrapping GHC
+   are _not_ tested. You can change this using:
+
+     nix-build maintainers/scripts/haskell/test-configurations.nix \
+       --arg skipBinaryGHCs false --keep-going
+
+*/
+{ files ? [
+    "configuration-common.nix"
+    "configuration-nix.nix"
+    "configuration-ghc-8.10.x.nix"
+  ]
+, nixpkgsPath ? ../../..
+, config ? { allowBroken = true; }
+, skipEvalErrors ? true
+, skipBinaryGHCs ? true
+}:
+
+let
+  pkgs = import nixpkgsPath { inherit config; };
+  inherit (pkgs) lib;
+
+  # see usage explanation for the input format `files` allows
+  files' = builtins.map builtins.baseNameOf (
+    if !builtins.isList files then [ files ] else files
+  );
+
+  packageSetsWithVersionedHead = pkgs.haskell.packages // (
+    let
+      headSet = pkgs.haskell.packages.ghcHEAD;
+      # Determine the next GHC release version following GHC HEAD.
+      # GHC HEAD always has an uneven, tentative version number, e.g. 9.7.
+      # GHC releases always have even numbers, i.e. GHC 9.8 is branched off from
+      # GHC HEAD 9.7. Since we use the to be release number for GHC HEAD's
+      # configuration file, we need to calculate this here.
+      headVersion = lib.pipe headSet.ghc.version [
+        lib.versions.splitVersion
+        (lib.take 2)
+        lib.concatStrings
+        lib.strings.toInt
+        (builtins.add 1)
+        toString
+      ];
+    in
+    {
+      "ghc${headVersion}" = headSet;
+    }
+  );
+
+  setsForFile = fileName:
+    let
+      # extract the unique part of the config's file name
+      configName = builtins.head (
+        builtins.match "configuration-(.+).nix" fileName
+      );
+      # match the major and minor version of the GHC the config is intended for, if any
+      configVersion = lib.concatStrings (
+        builtins.match "ghc-([0-9]+).([0-9]+).x" configName
+      );
+      # return all package sets under haskell.packages matching the version components
+      setsForVersion =  builtins.map (name: packageSetsWithVersionedHead.${name}) (
+        builtins.filter (setName:
+          lib.hasPrefix "ghc${configVersion}" setName
+          && (skipBinaryGHCs -> !(lib.hasInfix "Binary" setName))
+        ) (
+          builtins.attrNames packageSetsWithVersionedHead
+        )
+      );
+
+      defaultSets = [ pkgs.haskellPackages ];
+    in {
+      # use plain haskellPackages for the version-agnostic files
+      # TODO(@sternenseemann): also consider currently selected versioned sets
+      "common" = defaultSets;
+      "nix" = defaultSets;
+      "arm" = defaultSets;
+      "darwin" = defaultSets;
+    }.${configName} or setsForVersion;
+
+  # attribute set that has all the attributes of haskellPackages set to null
+  availableHaskellPackages = builtins.listToAttrs (
+    builtins.map (attr: lib.nameValuePair attr null) (
+      builtins.attrNames pkgs.haskellPackages
+    )
+  );
+
+  # evaluate a configuration and only return the attributes changed by it,
+  # pass availableHaskellPackages as super in case intersectAttrs is used
+  overriddenAttrs = fileName: builtins.attrNames (
+    lib.fix (self:
+      import (nixpkgsPath + "/pkgs/development/haskell-modules/${fileName}") {
+        haskellLib = pkgs.haskell.lib.compose;
+        inherit pkgs;
+      } self availableHaskellPackages
+    )
+  );
+
+  # list of derivations that are affected by overrides in the given configuration
+  # overlays. For common, nix, darwin etc. only the derivation from the default
+  # package set will be emitted.
+  packages = builtins.filter (v:
+    lib.warnIf (v.meta.broken or false) "${v.pname} is marked as broken" (
+      v != null
+      && (skipEvalErrors -> (builtins.tryEval (v.outPath or v)).success)
+    )
+  ) (
+    lib.concatMap (fileName:
+      let
+        sets = setsForFile fileName;
+        attrs = overriddenAttrs fileName;
+      in
+        lib.concatMap (set: builtins.map (attr: set.${attr}) attrs) sets
+    ) files'
+  );
+in
+
+packages
diff --git a/nixpkgs/maintainers/scripts/haskell/transitive-broken-packages.nix b/nixpkgs/maintainers/scripts/haskell/transitive-broken-packages.nix
new file mode 100644
index 000000000000..50ccb14577bc
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/haskell/transitive-broken-packages.nix
@@ -0,0 +1,16 @@
+let
+  nixpkgs = import ../../..;
+  inherit (nixpkgs {}) pkgs lib;
+  getEvaluating = x:
+    builtins.attrNames (
+      lib.filterAttrs (
+        _: v: (builtins.tryEval (v.outPath or null)).success && lib.isDerivation v && !v.meta.broken
+      ) x
+    );
+  brokenDeps = lib.subtractLists
+    (getEvaluating pkgs.haskellPackages)
+    (getEvaluating (nixpkgs { config.allowBroken = true; }).haskellPackages);
+in
+''
+  ${lib.concatMapStringsSep "\n" (x: " - ${x}") brokenDeps}
+''
diff --git a/nixpkgs/maintainers/scripts/haskell/update-cabal2nix-unstable.sh b/nixpkgs/maintainers/scripts/haskell/update-cabal2nix-unstable.sh
new file mode 100755
index 000000000000..545c5773ff48
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/haskell/update-cabal2nix-unstable.sh
@@ -0,0 +1,17 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p coreutils curl jq gnused haskellPackages.cabal2nix-unstable -I nixpkgs=.
+
+# Updates cabal2nix-unstable to the latest master of the nixos/cabal2nix repository.
+# See regenerate-hackage-packages.sh for details on the purpose of this script.
+
+set -euo pipefail
+
+# fetch current master HEAD from Github
+head_info="$(curl -H "Accept: application/vnd.github.v3+json" https://api.github.com/repos/NixOS/cabal2nix/branches/master)"
+# extract commit hash
+commit="$(jq -r .commit.sha <<< "$head_info")"
+# extract commit timestamp and convert to date
+date="$(date "--date=$(jq -r .commit.commit.committer.date <<< "$head_info")" +%F)"
+# generate nix expression from cabal file, replacing the version with the commit date
+echo '# This file defines cabal2nix-unstable, used by maintainers/scripts/haskell/regenerate-hackage-packages.sh.' > pkgs/development/haskell-modules/cabal2nix-unstable.nix
+cabal2nix --subpath cabal2nix "https://github.com/NixOS/cabal2nix/archive/$commit.tar.gz" | sed -e 's/version = ".*"/version = "'"unstable-$date"'"/' >> pkgs/development/haskell-modules/cabal2nix-unstable.nix
diff --git a/nixpkgs/maintainers/scripts/haskell/update-hackage.sh b/nixpkgs/maintainers/scripts/haskell/update-hackage.sh
new file mode 100755
index 000000000000..5aa644a3d0fa
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/haskell/update-hackage.sh
@@ -0,0 +1,35 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p nix curl jq git gnused -I nixpkgs=.
+
+# See regenerate-hackage-packages.sh for details on the purpose of this script.
+
+set -euo pipefail
+
+pin_file=pkgs/data/misc/hackage/pin.json
+current_commit="$(jq -r .commit $pin_file)"
+old_date="$(jq -r .msg $pin_file | sed 's/Update from Hackage at //')"
+git_info="$(curl -H "Accept: application/vnd.github.v3+json" https://api.github.com/repos/commercialhaskell/all-cabal-hashes/branches/hackage)"
+head_commit="$(echo "$git_info" | jq -r .commit.sha)"
+commit_msg="$(echo "$git_info" | jq -r .commit.commit.message)"
+new_date="$(echo "$commit_msg" | sed 's/Update from Hackage at //')"
+
+if [ "$current_commit" != "$head_commit" ]; then
+   url="https://github.com/commercialhaskell/all-cabal-hashes/archive/$head_commit.tar.gz"
+   hash="$(nix-prefetch-url "$url")"
+   jq -n \
+     --arg commit "$head_commit" \
+     --arg hash "$hash" \
+     --arg url "$url" \
+     --arg commit_msg "$commit_msg" \
+     '{commit: $commit, url: $url, sha256: $hash, msg: $commit_msg}' \
+     > $pin_file
+fi
+
+if [[ "${1:-}" == "--do-commit" ]]; then
+git add pkgs/data/misc/hackage/pin.json
+git commit -F - << EOF
+all-cabal-hashes: $old_date -> $new_date
+
+This commit has been generated by maintainers/scripts/haskell/update-hackage.sh
+EOF
+fi
diff --git a/nixpkgs/maintainers/scripts/haskell/update-stackage.sh b/nixpkgs/maintainers/scripts/haskell/update-stackage.sh
new file mode 100755
index 000000000000..5dc52abdd668
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/haskell/update-stackage.sh
@@ -0,0 +1,87 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p nix curl jq git gnused gnugrep -I nixpkgs=.
+# shellcheck shell=bash
+
+set -eu -o pipefail
+
+# Stackage solver to use, LTS or Nightly
+# (should be capitalized like the display name)
+SOLVER=LTS
+# Stackage solver verson, if any. Use latest if empty
+VERSION=21
+TMP_TEMPLATE=update-stackage.XXXXXXX
+readonly SOLVER
+readonly VERSION
+readonly TMP_TEMPLATE
+
+toLower() {
+    printf "%s" "$1" | tr '[:upper:]' '[:lower:]'
+}
+
+tmpfile=$(mktemp "$TMP_TEMPLATE")
+tmpfile_new=$(mktemp "$TMP_TEMPLATE")
+
+stackage_config="pkgs/development/haskell-modules/configuration-hackage2nix/stackage.yaml"
+
+trap 'rm "${tmpfile}" "${tmpfile_new}"' 0
+touch "$tmpfile" "$tmpfile_new" # Creating files here so that trap creates no errors.
+
+curl -L -s "https://stackage.org/$(toLower "$SOLVER")${VERSION:+-$VERSION}/cabal.config" >"$tmpfile"
+old_version=$(grep '^# Stackage' $stackage_config | sed -e 's/.\+ \([A-Za-z]\+ [0-9.-]\+\)$/\1/g')
+version="$SOLVER $(sed -rn "s/^--.*http:..(www.)?stackage.org.snapshot.$(toLower "$SOLVER")-//p" "$tmpfile")"
+
+if [[ "$old_version" == "$version" ]]; then
+   echo "No new stackage version"
+   exit 0 # Nothing to do
+fi
+
+echo "Updating Stackage from $old_version to $version."
+
+# Create a simple yaml version of the file.
+sed -r \
+    -e '/^--/d' \
+    -e 's|^constraints:||' \
+    -e 's|^ +|  - |' \
+    -e 's|,$||' \
+    -e '/^with-compiler:/d' \
+    -e '/installed$/d' \
+    -e '/^$/d' \
+    < "${tmpfile}" | sort --ignore-case >"${tmpfile_new}"
+
+cat > $stackage_config << EOF
+# Stackage $version
+# This file is auto-generated by
+# maintainers/scripts/haskell/update-stackage.sh
+default-package-overrides:
+EOF
+
+# Drop restrictions on some tools where we always want the latest version.
+sed -r \
+    -e '/ cabal2nix /d' \
+    -e '/ distribution-nixpkgs /d' \
+    -e '/ jailbreak-cabal /d' \
+    -e '/ language-nix /d' \
+    -e '/ hackage-db /d' \
+    -e '/ cabal-install /d' \
+    -e '/ lsp /d' \
+    -e '/ lsp-types /d' \
+    -e '/ lsp-test /d' \
+    -e '/ hie-bios /d' \
+    -e '/ ShellCheck /d' \
+    -e '/ Agda /d' \
+    -e '/ stack /d' \
+    < "${tmpfile_new}" >> $stackage_config
+# Explanations:
+# cabal2nix, distribution-nixpkgs, jailbreak-cabal, language-nix: These are our packages and we know what we are doing.
+# lsp, lsp-types, lsp-test, hie-bios: These are tightly coupled to hls which is not in stackage. They have no rdeps in stackage.
+# ShellCheck: latest version of command-line dev tool.
+# Agda: The Agda community is fast-moving; we strive to always include the newest versions of Agda and the Agda packages in nixpkgs.
+
+if [[ "${1:-}" == "--do-commit" ]]; then
+git add $stackage_config
+git commit -F - << EOF
+haskellPackages: stackage $old_version -> $version
+
+This commit has been generated by maintainers/scripts/haskell/update-stackage.sh
+EOF
+fi
diff --git a/nixpkgs/maintainers/scripts/haskell/upload-nixos-package-list-to-hackage.sh b/nixpkgs/maintainers/scripts/haskell/upload-nixos-package-list-to-hackage.sh
new file mode 100755
index 000000000000..9130941a5366
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/haskell/upload-nixos-package-list-to-hackage.sh
@@ -0,0 +1,43 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p nix curl gnused -I nixpkgs=.
+
+# On Hackage every package description shows a category "Distributions" which
+# lists a "NixOS" version.
+# This script uploads a csv to hackage which will update the displayed versions
+# based on the current versions in nixpkgs. This happens with a simple http
+# request.
+
+# For authorization you just need to have any valid hackage account. This
+# script uses the `username` and `password-command` field from your
+# ~/.cabal/config file.
+
+# e.g. username: maralorn
+#      password-command: pass hackage.haskell.org (this can be any command, but not an arbitrary shell expression. Like cabal we only read the first output line and ignore the rest.)
+# Those fields are specified under `upload` on the `cabal` man page.
+
+if test -z "$CABAL_DIR"; then
+  dirs=(
+    "$HOME/.cabal"
+    "${XDG_CONFIG_HOME:-$HOME/.config}/cabal"
+  )
+  missing=true
+
+  for dir in "${dirs[@]}"; do
+    if test -d "$dir"; then
+      export CABAL_DIR="$dir"
+      missing=false
+      break
+    fi
+  done
+
+  if $missing; then
+    echo "Could not find the cabal configuration directory in any of: ${dirs[@]}" >&2
+    exit 101
+  fi
+fi
+
+package_list="$(nix-build -A haskell.package-list)/nixos-hackage-packages.csv"
+username=$(grep "^username:" "$CABAL_DIR/config" | sed "s/^username: //")
+password_command=$(grep "^password-command:" "$CABAL_DIR/config" | sed "s/^password-command: //")
+curl -u "$username:$($password_command | head -n1)" --digest -H "Content-type: text/csv" -T "$package_list" https://hackage.haskell.org/distro/NixOS/packages.csv
+echo
diff --git a/nixpkgs/maintainers/scripts/hydra-eval-failures.py b/nixpkgs/maintainers/scripts/hydra-eval-failures.py
new file mode 100755
index 000000000000..b7518b128574
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/hydra-eval-failures.py
@@ -0,0 +1,112 @@
+#!/usr/bin/env nix-shell
+#!nix-shell -i python3 -p "python3.withPackages(ps: with ps; [ requests pyquery click ])"
+
+# To use, just execute this script with --help to display help.
+
+import subprocess
+import json
+import sys
+
+import click
+import requests
+from pyquery import PyQuery as pq
+
+def map_dict (f, d):
+    for k,v in d.items():
+        d[k] = f(v)
+
+maintainers_json = subprocess.check_output([
+    'nix-instantiate', '-A', 'lib.maintainers', '--eval', '--strict', '--json'
+])
+maintainers = json.loads(maintainers_json)
+MAINTAINERS = map_dict(lambda v: v.get('github', None), maintainers)
+
+def get_response_text(url):
+    return pq(requests.get(url).text)  # IO
+
+EVAL_FILE = {
+    'nixos': 'nixos/release.nix',
+    'nixpkgs': 'pkgs/top-level/release.nix',
+}
+
+
+def get_maintainers(attr_name):
+    try:
+        nixname = attr_name.split('.')
+        meta_json = subprocess.check_output([
+            'nix-instantiate',
+            '--eval',
+            '--strict',
+            '-A',
+            '.'.join(nixname[1:]) + '.meta',
+            EVAL_FILE[nixname[0]],
+            '--arg',
+            'nixpkgs',
+            './.',
+            '--json'])
+        meta = json.loads(meta_json)
+        return meta.get('maintainers', [])
+    except:
+       return []
+
+def filter_github_users(maintainers):
+    github_only = []
+    for i in maintainers:
+        if i.get('github'):
+            github_only.append(i)
+    return github_only
+
+def print_build(table_row):
+    a = pq(table_row)('a')[1]
+    print("- [ ] [{}]({})".format(a.text, a.get('href')), flush=True)
+
+    job_maintainers = filter_github_users(get_maintainers(a.text))
+    if job_maintainers:
+        print("  - maintainers: {}".format(" ".join(map(lambda u: '@' + u.get('github'), job_maintainers))))
+    # TODO: print last three persons that touched this file
+    # TODO: pinpoint the diff that broke this build, or maybe it's transient or maybe it never worked?
+
+    sys.stdout.flush()
+
+@click.command()
+@click.option(
+    '--jobset',
+    default="nixos/release-19.09",
+    help='Hydra project like nixos/release-19.09')
+def cli(jobset):
+    """
+    Given a Hydra project, inspect latest evaluation
+    and print a summary of failed builds
+    """
+
+    url = "https://hydra.nixos.org/jobset/{}".format(jobset)
+
+    # get the last evaluation
+    click.echo(click.style(
+        'Getting latest evaluation for {}'.format(url), fg='green'))
+    d = get_response_text(url)
+    evaluations = d('#tabs-evaluations').find('a[class="row-link"]')
+    latest_eval_url = evaluations[0].get('href')
+
+    # parse last evaluation page
+    click.echo(click.style(
+        'Parsing evaluation {}'.format(latest_eval_url), fg='green'))
+    d = get_response_text(latest_eval_url + '?full=1')
+
+    # TODO: aborted evaluations
+    # TODO: dependency failed without propagated builds
+    print('\nFailures:')
+    for tr in d('img[alt="Failed"]').parents('tr'):
+        print_build(tr)
+
+    print('\nDependency failures:')
+    for tr in d('img[alt="Dependency failed"]').parents('tr'):
+        print_build(tr)
+
+
+
+if __name__ == "__main__":
+    try:
+        cli()
+    except Exception as e:
+        import pdb;pdb.post_mortem()
diff --git a/nixpkgs/maintainers/scripts/hydra_eval_check b/nixpkgs/maintainers/scripts/hydra_eval_check
new file mode 100755
index 000000000000..c8e03424f320
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/hydra_eval_check
@@ -0,0 +1,13 @@
+#! /bin/sh
+
+# give absolute path of release.nix as argument
+hydra_eval_jobs \
+  --argstr system x86_64-linux \
+  --argstr system i686-linux \
+  --argstr system x86_64-darwin \
+  --argstr system i686-cygwin \
+  --argstr system x86_64-cygwin \
+  --argstr system i686-freebsd \
+  --arg officialRelease false \
+  --arg nixpkgs "{ outPath = builtins.storePath ./. ; rev = 1234; }" \
+  $@
diff --git a/nixpkgs/maintainers/scripts/luarocks-config.lua b/nixpkgs/maintainers/scripts/luarocks-config.lua
new file mode 100644
index 000000000000..f7ac9f9e4ba6
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/luarocks-config.lua
@@ -0,0 +1,8 @@
+
+-- default of luarocks listed at src/luarocks/core/cfg.lua
+-- keep this list synced with pkgs/build-support/fetchurl/mirrors.nix
+rocks_servers = {
+	"https://luarocks.org",
+	"https://raw.githubusercontent.com/rocks-moonscript-org/moonrocks-mirror/master/"
+}
+version_check_on_fail = false
diff --git a/nixpkgs/maintainers/scripts/luarocks-packages.csv b/nixpkgs/maintainers/scripts/luarocks-packages.csv
new file mode 100644
index 000000000000..939905ab81d9
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/luarocks-packages.csv
@@ -0,0 +1,124 @@
+name,src,ref,server,version,luaversion,maintainers
+alt-getopt,,,,,,arobyn
+bit32,,,,5.3.0-1,5.1,lblasc
+argparse,,,,,,
+basexx,,,,,,
+binaryheap,,,,,,vcunat
+busted,,,,,,
+cassowary,,,,,,marsam alerque
+cldr,,,,,,alerque
+compat53,,,,,,vcunat
+cosmo,,,,,,marsam
+coxpcall,,,,1.17.0-1,,
+cqueues,,,,,,vcunat
+cyan,,,,,,
+digestif,https://github.com/astoff/digestif.git,,,,5.3,
+dkjson,,,,,,
+fennel,,,,,,misterio77
+fidget.nvim,,,,,,mrcjkb
+fifo,,,,,,
+fluent,,,,,,alerque
+fzy,,,,,,mrcjkb
+gitsigns.nvim,https://github.com/lewis6991/gitsigns.nvim.git,,,,5.1,
+haskell-tools.nvim,,,,,,
+http,,,,0.3-0,,vcunat
+image.nvim,,,,,,teto
+inspect,,,,,,
+jsregexp,,,,,,
+ldbus,,,http://luarocks.org/dev,,,
+ldoc,,,,,,
+lgi,,,,,,
+linenoise,https://github.com/hoelzro/lua-linenoise.git,,,,,
+ljsyscall,,,,,5.1,lblasc
+lmathx,,,,,5.3,alexshpilkin
+lmpfrlib,,,,,5.3,alexshpilkin
+loadkit,,,,,,alerque
+lpeg,,,,,,vyp
+lpeg_patterns,,,,,,
+lpeglabel,,,,1.6.0,,
+lrexlib-gnu,,,,,,
+lrexlib-pcre,,,,,,vyp
+lrexlib-posix,,,,,,
+lua-cjson,,,,,,
+lua-cmsgpack,,,,,,
+lua-curl,,,,,,
+lua-ffi-zlib,,,,,,
+lua-lsp,,,,,,
+lua-messagepack,,,,,,
+lua-protobuf,,,,,,lockejan
+lua-resty-http,,,,,,
+lua-resty-jwt,,,,,,
+lua-resty-openidc,,,,,,
+lua-resty-openssl,,,,,,
+lua-resty-session,,,,,,
+lua-rtoml,https://github.com/lblasc/lua-rtoml,,,,,lblasc
+lua-subprocess,https://github.com/0x0ade/lua-subprocess,,,,5.1,scoder12
+lua-term,,,,,,
+lua-toml,,,,,,
+lua-zlib,,,,,,koral
+lua_cliargs,,,,,,
+luabitop,https://github.com/teto/luabitop.git,,,,,
+luacheck,,,,,,
+luacov,,,,,,
+luadbi,,,,,,
+luadbi-mysql,,,,,,
+luadbi-postgresql,,,,,,
+luadbi-sqlite3,,,,,,
+luaepnf,,,,,,
+luaevent,,,,,,
+luaexpat,,,,1.4.1-1,,arobyn flosse
+luaffi,,,http://luarocks.org/dev,,,
+luafilesystem,,,,1.8.0-1,,flosse
+lualdap,,,,,,aanderse
+lualogging,,,,,,
+luaossl,,,,,5.1,
+luaposix,,,,34.1.1-1,,vyp lblasc
+luarepl,,,,,,
+luarocks-build-rust-mlua,,,,,,mrcjkb
+luasec,,,,,,flosse
+luasnip,,,,,,
+luasocket,,,,,,
+luasql-sqlite3,,,,,,vyp
+luassert,,,,,,
+luasystem,,,,,,
+luaunbound,,,,,,
+luaunit,,,,,,lockejan
+luautf8,,,,,,pstn
+luazip,,,,,,
+lua-yajl,,,,,,pstn
+lua-iconv,,,,7.0.0,,
+luuid,,,,20120509-2,,
+luv,,,,1.44.2-1,,
+lush.nvim,https://github.com/rktjmp/lush.nvim,,,,,teto
+lyaml,,,,,,lblasc
+magick,,,,,5.1,donovanglover
+markdown,,,,,,
+mediator_lua,,,,,,
+middleclass,,,,,,
+mpack,,,,,,
+moonscript,https://github.com/leafo/moonscript.git,dev-1,,,,arobyn
+nlua,,,,,,teto
+nui.nvim,,,,,,mrcjkb
+nvim-cmp,https://github.com/hrsh7th/nvim-cmp,,,,,
+nvim-nio,,,,,,mrcjkb
+penlight,https://github.com/lunarmodules/Penlight.git,,,,,alerque
+plenary.nvim,https://github.com/nvim-lua/plenary.nvim.git,,,,5.1,
+rapidjson,https://github.com/xpol/lua-rapidjson.git,,,,,
+rocks.nvim,,,,,5.1,teto mrcjkb
+rest.nvim,,,,,5.1,teto
+rustaceanvim,,,,,,mrcjkb
+say,https://github.com/Olivine-Labs/say.git,,,,,
+serpent,,,,,,lockejan
+sqlite,,,,,,
+std._debug,https://github.com/lua-stdlib/_debug.git,,,,,
+std.normalize,,,,,,
+stdlib,,,,41.2.2,,vyp
+teal-language-server,,,http://luarocks.org/dev,,,
+telescope.nvim,,,,,5.1,
+telescope-manix,,,,,,
+tl,,,,,,mephistophiles
+toml,,,,,,mrcjkb
+toml-edit,,,,,5.1,mrcjkb
+vstruct,https://github.com/ToxicFrog/vstruct.git,,,,,
+vusted,,,,,,figsoda
+xml2lua,,,,,,teto
diff --git a/nixpkgs/maintainers/scripts/mdize-module.sh b/nixpkgs/maintainers/scripts/mdize-module.sh
new file mode 100755
index 000000000000..e2d2e5467aa9
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/mdize-module.sh
@@ -0,0 +1,83 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -I nixpkgs=. -i bash -p delta jq perl
+
+set -euo pipefail
+shopt -s inherit_errexit
+
+cat <<'EOF'
+This script attempts to automatically convert option descriptions from
+DocBook syntax to markdown. Naturally this process is incomplete and
+imperfect, so any changes generated by this script MUST be reviewed.
+
+Possible problems include: incorrectly replaced tags, badly formatted
+markdown, DocBook tags this script doesn't recognize remaining in the
+output and crashing the docs build, incorrect escaping of markdown
+metacharacters, incorrect unescaping of XML entities—and the list goes on.
+
+Always review the generated changes!
+
+Some known limitations:
+  - Does not transform literalDocBook items
+  - Replacements can occur in non-option code, such as string literals
+
+
+EOF
+
+
+
+build-options-json() {
+    nix-build --no-out-link --expr '
+        let
+            sys = import ./nixos/default.nix {
+                configuration = {};
+            };
+        in
+        [
+            sys.config.system.build.manual.optionsJSON
+        ]
+    '
+}
+
+
+
+git diff --quiet || {
+    echo "Worktree is dirty. Please stash or commit first."
+    exit 1
+}
+
+echo "Building options.json ..."
+old_options=$(build-options-json)
+
+echo "Applying replacements ..."
+perl -pi -e '
+    BEGIN {
+        undef $/;
+    }
+
+    s,<literal>([^`]*?)</literal>,`$1`,smg;
+    s,<replaceable>([^»]*?)</replaceable>,«$1»,smg;
+    s,<filename>([^`]*?)</filename>,{file}`$1`,smg;
+    s,<option>([^`]*?)</option>,{option}`$1`,smg;
+    s,<code>([^`]*?)</code>,`$1`,smg;
+    s,<command>([^`]*?)</command>,{command}`$1`,smg;
+    s,<link xlink:href="(.+?)" ?/>,<$1>,smg;
+    s,<link xlink:href="(.+?)">(.*?)</link>,[$2]($1),smg;
+    s,<package>([^`]*?)</package>,`$1`,smg;
+    s,<emphasis>([^*]*?)</emphasis>,*$1*,smg;
+    s,<citerefentry>\s*
+        <refentrytitle>\s*(.*?)\s*</refentrytitle>\s*
+        <manvolnum>\s*(.*?)\s*</manvolnum>\s*
+      </citerefentry>,{manpage}`$1($2)`,smgx;
+    s,^( +description =),\1 lib.mdDoc,smg;
+' "$@"
+
+echo "Building options.json again ..."
+new_options=$(build-options-json)
+
+
+! cmp -s {$old_options,$new_options}/share/doc/nixos/options.json && {
+    diff -U10 \
+        <(jq . <$old_options/share/doc/nixos/options.json) \
+        <(jq . <$new_options/share/doc/nixos/options.json) \
+        | delta
+}
diff --git a/nixpkgs/maintainers/scripts/nix-call-package b/nixpkgs/maintainers/scripts/nix-call-package
new file mode 100755
index 000000000000..be478fca2b75
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/nix-call-package
@@ -0,0 +1,5 @@
+#! /bin/sh
+
+echo "let pkgs = import <nixpkgs$2> {}; x = pkgs.callPackage $1 { $3 }; in ${4:-x}" |
+nix-instantiate --show-trace - |
+xargs nix-store -r -K
diff --git a/nixpkgs/maintainers/scripts/nix-diff.sh b/nixpkgs/maintainers/scripts/nix-diff.sh
new file mode 100755
index 000000000000..0c65e29cf435
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/nix-diff.sh
@@ -0,0 +1,277 @@
+#!/usr/bin/env nix-shell
+#! nix-shell -i bash -p coreutils gnugrep gnused
+
+################################################################################
+# nix-diff.sh                                                                  #
+################################################################################
+# This script "diffs" Nix profile generations.                                 #
+#                                                                              #
+# Example:                                                                     #
+################################################################################
+# > nix-diff.sh 90 92                                                          #
+# + gnumake-4.2.1                                                              #
+# + gnumake-4.2.1-doc                                                          #
+# - htmldoc-1.8.29                                                             #
+################################################################################
+# The example shows that as of generation 92 and since generation 90,          #
+# gnumake-4.2.1 and gnumake-4.2.1-doc have been installed, while               #
+# htmldoc-1.8.29 has been removed.                                             #
+#                                                                              #
+# The example above shows the default, minimal output mode of this script.     #
+# For more features, run `nix-diff.sh -h` for usage instructions.              #
+################################################################################
+
+usage() {
+    cat <<EOF
+usage: nix-diff.sh [-h | [-p profile | -s] [-q] [-l] [range]]
+-h:         print this message before exiting
+-q:         list the derivations installed in the parent generation
+-l:         diff every available intermediate generation between parent and
+            child
+-p profile: specify the Nix profile to use
+            * defaults to ~/.nix-profile
+-s:         use the system profile
+            * equivalent to: -p /nix/var/nix/profiles/system
+profile:    * should be something like /nix/var/nix/profiles/default, not a
+              generation link like /nix/var/nix/profiles/default-2-link
+range:      the range of generations to diff
+            * the following patterns are allowed, where A, B, and N are positive
+              integers, and G is the currently active generation:
+                A..B => diffs from generation A to generation B
+                ~N   => diffs from the Nth newest generation (older than G) to G
+                A    => diffs from generation A to G
+            * defaults to ~1
+EOF
+}
+
+usage_tip() {
+    echo 'run `nix-diff.sh -h` for usage instructions' >&2
+    exit 1
+}
+
+while getopts :hqlp:s opt; do
+    case $opt in
+        h)
+            usage
+            exit
+            ;;
+        q)
+            opt_query=1
+            ;;
+        l)
+            opt_log=1
+            ;;
+        p)
+            opt_profile=$OPTARG
+            ;;
+        s)
+            opt_profile=/nix/var/nix/profiles/system
+            ;;
+        \?)
+            echo "error: invalid option -$OPTARG" >&2
+            usage_tip
+            ;;
+    esac
+done
+shift $((OPTIND-1))
+
+if [ -n "$opt_profile" ]; then
+    if ! [ -L "$opt_profile" ]; then
+        echo "error: expecting \`$opt_profile\` to be a symbolic link" >&2
+        usage_tip
+    fi
+else
+    opt_profile=$(readlink ~/.nix-profile)
+    if (( $? != 0 )); then
+        echo 'error: unable to dereference `~/.nix-profile`' >&2
+        echo 'specify the profile manually with the `-p` flag' >&2
+        usage_tip
+    fi
+fi
+
+list_gens() {
+    nix-env -p "$opt_profile" --list-generations \
+        | sed -r 's:^\s*::' \
+        | cut -d' ' -f1
+}
+
+current_gen() {
+    nix-env -p "$opt_profile" --list-generations \
+        | grep -E '\(current\)\s*$' \
+        | sed -r 's:^\s*::' \
+        | cut -d' ' -f1
+}
+
+neg_gen() {
+    local i=0 from=$1 n=$2 tmp
+    for gen in $(list_gens | sort -rn); do
+        if ((gen < from)); then
+            tmp=$gen
+            ((i++))
+            ((i == n)) && break
+        fi
+    done
+    if ((i < n)); then
+        echo -n "error: there aren't $n generation(s) older than" >&2
+        echo " generation $from" >&2
+        return 1
+    fi
+    echo $tmp
+}
+
+match() {
+    argv=("$@")
+    for i in $(seq $(($#-1))); do
+        if grep -E "^${argv[$i]}\$" <(echo "$1") >/dev/null; then
+            echo $i
+            return
+        fi
+    done
+    echo 0
+}
+
+case $(match "$1" '' '[0-9]+' '[0-9]+\.\.[0-9]+' '~[0-9]+') in
+    1)
+        diffTo=$(current_gen)
+        diffFrom=$(neg_gen $diffTo 1)
+        (($? == 1)) && usage_tip
+        ;;
+    2)
+        diffFrom=$1
+        diffTo=$(current_gen)
+        ;;
+    3)
+        diffFrom=${1%%.*}
+        diffTo=${1##*.}
+        ;;
+    4)
+        diffTo=$(current_gen)
+        diffFrom=$(neg_gen $diffTo ${1#*~})
+        (($? == 1)) && usage_tip
+        ;;
+    0)
+        echo 'error: invalid invocation' >&2
+        usage_tip
+        ;;
+esac
+
+dirA="${opt_profile}-${diffFrom}-link"
+dirB="${opt_profile}-${diffTo}-link"
+
+declare -a temp_files
+temp_length() {
+    echo -n ${#temp_files[@]}
+}
+temp_make() {
+    temp_files[$(temp_length)]=$(mktemp)
+}
+temp_clean() {
+    rm -f ${temp_files[@]}
+}
+temp_name() {
+    echo -n "${temp_files[$(($(temp_length)-1))]}"
+}
+trap 'temp_clean' EXIT
+
+temp_make
+versA=$(temp_name)
+refs=$(nix-store -q --references "$dirA")
+(( $? != 0 )) && exit 1
+echo "$refs" \
+    | grep -v env-manifest.nix \
+    | sort \
+          > "$versA"
+
+print_tag() {
+    local gen=$1
+    nix-env -p "$opt_profile" --list-generations \
+        | grep -E "^\s*${gen}" \
+        | sed -r 's:^\s*::' \
+        | sed -r 's:\s*$::'
+}
+
+if [ -n "$opt_query" ]; then
+    print_tag $diffFrom
+    cat "$versA" \
+        | sed -r 's:^[^-]+-(.*)$:    \1:'
+
+    print_line=1
+fi
+
+if [ -n "$opt_log" ]; then
+    gens=$(for gen in $(list_gens); do
+               ((diffFrom < gen && gen < diffTo)) && echo $gen
+           done)
+    # Force the $diffTo generation to be included in this list, instead of using
+    # `gen <= diffTo` in the preceding loop, so we encounter an error upon the
+    # event of its nonexistence.
+    gens=$(echo "$gens"
+           echo $diffTo)
+else
+    gens=$diffTo
+fi
+
+temp_make
+add=$(temp_name)
+temp_make
+rem=$(temp_name)
+temp_make
+out=$(temp_name)
+
+for gen in $gens; do
+
+    [ -n "$print_line" ] && echo
+
+    temp_make
+    versB=$(temp_name)
+
+    dirB="${opt_profile}-${gen}-link"
+    refs=$(nix-store -q --references "$dirB")
+    (( $? != 0 )) && exit 1
+    echo "$refs" \
+        | grep -v env-manifest.nix \
+        | sort \
+              > "$versB"
+
+    in=$(comm -3 -1 "$versA" "$versB")
+    sed -r 's:^[^-]*-(.*)$:\1+:' <(echo "$in") \
+        | sort -f \
+               > "$add"
+
+    un=$(comm -3 -2 "$versA" "$versB")
+    sed -r 's:^[^-]*-(.*)$:\1-:' <(echo "$un") \
+        | sort -f \
+               > "$rem"
+
+    cat "$rem" "$add" \
+        | sort -f \
+        | sed -r 's:(.*)-$:- \1:' \
+        | sed -r 's:(.*)\+$:\+ \1:' \
+        | grep -v '^$' \
+              > "$out"
+
+    if [ -n "$opt_query" -o -n "$opt_log" ]; then
+
+        lines=$(wc -l "$out" | cut -d' ' -f1)
+        tag=$(print_tag "$gen")
+        (( $? != 0 )) && exit 1
+        if [ $lines -eq 0 ]; then
+            echo "$tag   (no change)"
+        else
+            echo "$tag"
+        fi
+        cat "$out" \
+            | sed 's:^:    :'
+
+        print_line=1
+
+    else
+        echo "diffing from generation $diffFrom to $diffTo"
+        cat "$out"
+    fi
+
+    versA=$versB
+
+done
+
+exit 0
diff --git a/nixpkgs/maintainers/scripts/nix-generate-from-cpan.nix b/nixpkgs/maintainers/scripts/nix-generate-from-cpan.nix
new file mode 100644
index 000000000000..bf48a5318611
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/nix-generate-from-cpan.nix
@@ -0,0 +1,26 @@
+{ stdenv, lib, makeWrapper, perl, perlPackages }:
+
+stdenv.mkDerivation {
+  name = "nix-generate-from-cpan-3";
+
+  nativeBuildInputs = [ makeWrapper ];
+
+  buildInputs = with perlPackages; [
+    perl GetoptLongDescriptive CPANPLUS Readonly LogLog4perl
+  ];
+
+  dontUnpack = true;
+
+  installPhase = ''
+    mkdir -p $out/bin
+    cp ${./nix-generate-from-cpan.pl} $out/bin/nix-generate-from-cpan
+    patchShebangs $out/bin/nix-generate-from-cpan
+    wrapProgram $out/bin/nix-generate-from-cpan --set PERL5LIB $PERL5LIB
+  '';
+
+  meta = {
+    maintainers = with lib.maintainers; [ eelco ];
+    description = "Utility to generate a Nix expression for a Perl package from CPAN";
+    platforms = lib.platforms.unix;
+  };
+}
diff --git a/nixpkgs/maintainers/scripts/nix-generate-from-cpan.pl b/nixpkgs/maintainers/scripts/nix-generate-from-cpan.pl
new file mode 100755
index 000000000000..6754f79009ec
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/nix-generate-from-cpan.pl
@@ -0,0 +1,473 @@
+#!/usr/bin/env perl
+
+use utf8;
+use strict;
+use warnings;
+
+use CPAN::Meta();
+use CPANPLUS::Backend();
+use MIME::Base64;
+use Module::CoreList;
+use Getopt::Long::Descriptive qw( describe_options );
+use JSON::PP qw( encode_json );
+use Log::Log4perl qw(:easy);
+use Readonly();
+
+# Readonly hash that maps CPAN style license strings to information
+# necessary to generate a Nixpkgs style license attribute.
+Readonly::Hash my %LICENSE_MAP => (
+
+    # The Perl 5 License (Artistic 1 & GPL 1 or later).
+    perl_5 => {
+        licenses => [qw( artistic1 gpl1Plus )]
+    },
+
+    # GNU Affero General Public License, Version 3.
+    agpl_3 => {
+        licenses => [qw( agpl3Plus )],
+        amb      => 1
+    },
+
+    # Apache Software License, Version 1.1.
+    apache_1_1 => {
+        licenses => ["Apache License 1.1"],
+        in_set   => 0
+    },
+
+    # Apache License, Version 2.0.
+    apache_2_0 => {
+        licenses => [qw( asl20 )]
+    },
+
+    # Artistic License, (Version 1).
+    artistic_1 => {
+        licenses => [qw( artistic1 )]
+    },
+
+    # Artistic License, Version 2.0.
+    artistic_2 => {
+        licenses => [qw( artistic2 )]
+    },
+
+    # BSD License (three-clause).
+    bsd => {
+        licenses => [qw( bsd3 )],
+        amb      => 1
+    },
+
+    # FreeBSD License (two-clause).
+    freebsd => {
+        licenses => [qw( bsd2 )]
+    },
+
+    # GNU Free Documentation License, Version 1.2.
+    gfdl_1_2 => {
+        licenses => [qw( fdl12Plus )]
+    },
+
+    # GNU Free Documentation License, Version 1.3.
+    gfdl_1_3 => {
+        licenses => [qw( fdl13Plus )]
+    },
+
+    # GNU General Public License, Version 1.
+    gpl_1 => {
+        licenses => [qw( gpl1Plus )],
+        amb      => 1
+    },
+
+    # GNU General Public License, Version 2. Note, we will interpret
+    # "gpl" alone as GPL v2+.
+    gpl_2 => {
+        licenses => [qw( gpl2Plus )],
+        amb      => 1
+    },
+
+    # GNU General Public License, Version 3.
+    gpl_3 => {
+        licenses => [qw( gpl3Plus )],
+        amb      => 1
+    },
+
+    # GNU Lesser General Public License, Version 2.1. Note, we will
+    # interpret "gpl" alone as LGPL v2.1+.
+    lgpl_2_1 => {
+        licenses => [qw( lgpl21Plus )],
+        amb      => 1
+    },
+
+    # GNU Lesser General Public License, Version 3.0.
+    lgpl_3_0 => {
+        licenses => [qw( lgpl3Plus )],
+        amb      => 1
+    },
+
+    # MIT (aka X11) License.
+    mit => {
+        licenses => [qw( mit )]
+    },
+
+    # Mozilla Public License, Version 1.0.
+    mozilla_1_0 => {
+        licenses => [qw( mpl10 )]
+    },
+
+    # Mozilla Public License, Version 1.1.
+    mozilla_1_1 => {
+        licenses => [qw( mpl11 )]
+    },
+
+    # OpenSSL License.
+    openssl => {
+        licenses => [qw( openssl )]
+    },
+
+    # Q Public License, Version 1.0.
+    qpl_1_0 => {
+        licenses => [qw( qpl )]
+    },
+
+    # Original SSLeay License.
+    ssleay => {
+        licenses => ["Original SSLeay License"],
+        in_set   => 0
+    },
+
+    # Sun Internet Standards Source License (SISSL).
+    sun => {
+        licenses => ["Sun Industry Standards Source License v1.1"],
+        in_set   => 0
+    },
+
+    # zlib License.
+    zlib => {
+        licenses => [qw( zlib )]
+    },
+
+    # Other Open Source Initiative (OSI) approved license.
+    open_source => {
+        licenses => [qw( free )],
+        amb      => 1
+    },
+
+    # Requires special permission from copyright holder.
+    restricted => {
+        licenses => [qw( unfree )],
+        amb      => 1
+    },
+
+    # Not an OSI approved license, but not restricted. Note, we
+    # currently map this to unfreeRedistributable, which is a
+    # conservative choice.
+    unrestricted => {
+        licenses => [qw( unfreeRedistributable )],
+        amb      => 1
+    },
+
+    # License not provided in metadata.
+    unknown => {
+        licenses => [],
+        amb      => 1
+    }
+);
+
+sub handle_opts {
+    my ( $opt, $usage ) = describe_options(
+        'usage: $0 %o MODULE',
+        [ 'maintainer|m=s', 'the package maintainer' ],
+        [ 'debug|d',        'enable debug output' ],
+        [ 'help',           'print usage message and exit' ]
+    );
+
+    if ( $opt->help ) {
+        print $usage->text;
+        exit;
+    }
+
+    my $module_name = $ARGV[0];
+
+    if ( !defined $module_name ) {
+        print STDERR "Missing module name\n";
+        print STDERR $usage->text;
+        exit 1;
+    }
+
+    return ( $opt, $module_name );
+}
+
+# Takes a Perl package attribute name and returns 1 if the name cannot
+# be referred to as a bareword. This typically happens if the package
+# name is a reserved Nix keyword.
+sub is_reserved {
+    my ($pkg) = @_;
+
+    return $pkg =~ /^(?: assert    |
+                         else      |
+                         if        |
+                         import    |
+                         in        |
+                         inherit   |
+                         let       |
+                         rec       |
+                         then      |
+                         while     |
+                         with      )$/x;
+}
+
+sub pkg_to_attr {
+    my ($module) = @_;
+    my $attr_name = $module->package_name;
+    if ( $attr_name eq "libwww-perl" ) {
+        return "LWP";
+    }
+    else {
+        $attr_name =~ s/-//g;
+        return $attr_name;
+    }
+}
+
+sub get_pkg_name {
+    my ($module) = @_;
+    return ( $module->package_name, $module->package_version =~ s/^v(\d)/$1/r );
+}
+
+sub read_meta {
+    my ($pkg_path) = @_;
+
+    my $yaml_path = "$pkg_path/META.yml";
+    my $json_path = "$pkg_path/META.json";
+    my $meta;
+
+    if ( -r $json_path ) {
+        $meta = CPAN::Meta->load_file($json_path);
+    }
+    elsif ( -r $yaml_path ) {
+        $meta = CPAN::Meta->load_file($yaml_path);
+    }
+    else {
+        WARN("package has no META.yml or META.json");
+    }
+
+    return $meta;
+}
+
+# Map a module to the attribute corresponding to its package
+# (e.g. HTML::HeadParser will be mapped to HTMLParser, because that
+# module is in the HTML-Parser package).
+sub module_to_pkg {
+    my ( $cb, $module_name ) = @_;
+    my @modules = $cb->search( type => "name", allow => [$module_name] );
+    if ( scalar @modules == 0 ) {
+
+        # Fallback.
+        $module_name =~ s/:://g;
+        return $module_name;
+    }
+    my $module    = $modules[0];
+    my $attr_name = pkg_to_attr($module);
+    DEBUG("mapped dep $module_name to $attr_name");
+    return $attr_name;
+}
+
+sub get_deps {
+    my ( $cb, $meta, $type ) = @_;
+
+    return if !defined $meta;
+
+    my $prereqs = $meta->effective_prereqs;
+    my $deps = $prereqs->requirements_for( $type, "requires" );
+    my @res;
+    foreach my $n ( $deps->required_modules ) {
+        next if $n eq "perl";
+
+        my @core = Module::CoreList->find_modules(qr/^$n$/);
+        next if (@core);
+
+        my $pkg = module_to_pkg( $cb, $n );
+
+        # If the package name is reserved then we need to refer to it
+        # through the "self" variable.
+        $pkg = "self.\"$pkg\"" if is_reserved($pkg);
+
+        push @res, $pkg;
+    }
+    return @res;
+}
+
+sub uniq {
+    return keys %{ { map { $_ => 1 } @_ } };
+}
+
+sub render_license {
+    my ($cpan_license) = @_;
+
+    return if !defined $cpan_license;
+
+    my $licenses;
+
+    # If the license is ambiguous then we'll print an extra warning.
+    # For example, "gpl_2" is ambiguous since it may refer to exactly
+    # "GPL v2" or to "GPL v2 or later".
+    my $amb = 0;
+
+    # Whether the license is available inside `lib.licenses`.
+    my $in_set = 1;
+
+    my $nix_license = $LICENSE_MAP{$cpan_license};
+    if ( !$nix_license ) {
+        WARN("Unknown license: $cpan_license");
+        $licenses = [$cpan_license];
+        $in_set   = 0;
+    }
+    else {
+        $licenses = $nix_license->{licenses};
+        $amb      = $nix_license->{amb};
+        $in_set   = !$nix_license->{in_set};
+    }
+
+    my $license_line;
+
+    if ( @$licenses == 0 ) {
+
+        # Avoid defining the license line.
+    }
+    elsif ($in_set) {
+        my $lic = 'lib.licenses';
+        if ( @$licenses == 1 ) {
+            $license_line = "$lic.$licenses->[0]";
+        }
+        else {
+            $license_line = "with $lic; [ " . join( ' ', @$licenses ) . " ]";
+        }
+    }
+    else {
+        if ( @$licenses == 1 ) {
+            $license_line = $licenses->[0];
+        }
+        else {
+            $license_line = '[ ' . join( ' ', @$licenses ) . ' ]';
+        }
+    }
+
+    INFO("license: $cpan_license");
+    WARN("License '$cpan_license' is ambiguous, please verify") if $amb;
+
+    return $license_line;
+}
+
+sub sha256_to_sri {
+    my ($sha256) = @_;
+    return "sha256-" . encode_base64(pack("H*", $sha256), '');
+}
+
+my ( $opt, $module_name ) = handle_opts();
+
+Log::Log4perl->easy_init(
+    {
+        level => $opt->debug ? $DEBUG : $INFO,
+        layout => '%m%n'
+    }
+);
+
+my $cb = CPANPLUS::Backend->new;
+
+my @modules = $cb->search( type => "name", allow => [$module_name] );
+die "module $module_name not found\n" if scalar @modules == 0;
+die "multiple packages that match module $module_name\n" if scalar @modules > 1;
+my $module = $modules[0];
+
+my ($pkg_name, $pkg_version) = get_pkg_name $module;
+my $attr_name = pkg_to_attr $module;
+
+INFO( "attribute name: ", $attr_name );
+INFO( "module: ",         $module->module );
+INFO( "version: ",        $module->version );
+INFO( "package: ", $module->package, " (", "$pkg_name-$pkg_version", ", ", $attr_name, ")" );
+INFO( "path: ",    $module->path );
+
+my $tar_path = $module->fetch();
+my $sri_hash = sha256_to_sri($module->status->checksum_value);
+INFO( "downloaded to: ", $tar_path );
+INFO( "hash: ", $sri_hash );
+
+my $pkg_path = $module->extract();
+INFO( "unpacked to: ", $pkg_path );
+
+my $meta = read_meta($pkg_path);
+
+DEBUG( "metadata: ", encode_json( $meta->as_struct ) ) if defined $meta;
+
+my @runtime_deps = sort( uniq( get_deps( $cb, $meta, "runtime" ) ) );
+INFO("runtime deps: @runtime_deps");
+
+my @build_deps = sort( uniq(
+        get_deps( $cb, $meta, "configure" ),
+        get_deps( $cb, $meta, "build" ),
+        get_deps( $cb, $meta, "test" )
+) );
+
+# Filter out runtime dependencies since those are already handled.
+my %in_runtime_deps = map { $_ => 1 } @runtime_deps;
+@build_deps = grep { not $in_runtime_deps{$_} } @build_deps;
+
+INFO("build deps: @build_deps");
+
+my $homepage = $meta ? $meta->resources->{homepage} : undef;
+INFO("homepage: $homepage") if defined $homepage;
+
+my $description = $meta ? $meta->abstract : undef;
+if ( defined $description ) {
+    $description = uc( substr( $description, 0, 1 ) )
+      . substr( $description, 1 );    # capitalise first letter
+    $description =~ s/\.$//;          # remove period at the end
+    $description =~ s/\s*$//;
+    $description =~ s/^\s*//;
+    $description =~ s/\n+/ /;         # Replace new lines by space.
+    INFO("description: $description");
+}
+
+#print(Data::Dumper::Dumper($meta->licenses) . "\n");
+my $license = $meta ? render_license( $meta->licenses ) : undef;
+
+INFO( "RSS feed: https://metacpan.org/feed/distribution/",
+    $module->package_name );
+
+my $build_fun = -e "$pkg_path/Build.PL"
+  && !-e "$pkg_path/Makefile.PL" ? "buildPerlModule" : "buildPerlPackage";
+
+print STDERR "===\n";
+
+print <<EOF;
+  ${\(is_reserved($attr_name) ? "\"$attr_name\"" : $attr_name)} = $build_fun {
+    pname = "$pkg_name";
+    version = "$pkg_version";
+    src = fetchurl {
+      url = "mirror://cpan/${\$module->path}/${\$module->package}";
+      hash = "$sri_hash";
+    };
+EOF
+print <<EOF if scalar @build_deps > 0;
+    buildInputs = [ @build_deps ];
+EOF
+print <<EOF if scalar @runtime_deps > 0;
+    propagatedBuildInputs = [ @runtime_deps ];
+EOF
+print <<EOF;
+    meta = {
+EOF
+print <<EOF if defined $homepage;
+      homepage = "$homepage";
+EOF
+print <<EOF if defined $description && $description ne "Unknown";
+      description = "$description";
+EOF
+print <<EOF if defined $license;
+      license = $license;
+EOF
+print <<EOF if $opt->maintainer;
+      maintainers = [ maintainers.${\$opt->maintainer} ];
+EOF
+print <<EOF;
+    };
+  };
+EOF
diff --git a/nixpkgs/maintainers/scripts/nixpkgs-lint.nix b/nixpkgs/maintainers/scripts/nixpkgs-lint.nix
new file mode 100644
index 000000000000..873905373af0
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/nixpkgs-lint.nix
@@ -0,0 +1,24 @@
+{ stdenv, lib, makeWrapper, perl, perlPackages }:
+
+stdenv.mkDerivation {
+  name = "nixpkgs-lint-1";
+
+  nativeBuildInputs = [ makeWrapper ];
+  buildInputs = [ perl perlPackages.XMLSimple ];
+
+  dontUnpack = true;
+  buildPhase = "true";
+
+  installPhase =
+    ''
+      mkdir -p $out/bin
+      cp ${./nixpkgs-lint.pl} $out/bin/nixpkgs-lint
+      wrapProgram $out/bin/nixpkgs-lint --set PERL5LIB $PERL5LIB
+    '';
+
+  meta = with lib; {
+    maintainers = [ maintainers.eelco ];
+    description = "A utility for Nixpkgs contributors to check Nixpkgs for common errors";
+    platforms = platforms.unix;
+  };
+}
diff --git a/nixpkgs/maintainers/scripts/nixpkgs-lint.pl b/nixpkgs/maintainers/scripts/nixpkgs-lint.pl
new file mode 100755
index 000000000000..43fb39413613
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/nixpkgs-lint.pl
@@ -0,0 +1,173 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i perl -p perl perlPackages.XMLSimple
+
+use strict;
+use List::Util qw(min);
+use XML::Simple qw(:strict);
+use Getopt::Long qw(:config gnu_getopt);
+
+# Parse the command line.
+my $path = "<nixpkgs>";
+my $filter = "*";
+my $maintainer;
+
+sub showHelp {
+    print <<EOF;
+Usage: $0 [--package=NAME] [--maintainer=REGEXP] [--file=PATH]
+
+Check Nixpkgs for common errors/problems.
+
+  -p, --package        filter packages by name (default is ‘*’)
+  -m, --maintainer     filter packages by maintainer (case-insensitive regexp)
+  -f, --file           path to Nixpkgs (default is ‘<nixpkgs>’)
+
+Examples:
+  \$ nixpkgs-lint -f /my/nixpkgs -p firefox
+  \$ nixpkgs-lint -f /my/nixpkgs -m eelco
+EOF
+    exit 0;
+}
+
+GetOptions("package|p=s" => \$filter,
+           "maintainer|m=s" => \$maintainer,
+           "file|f=s" => \$path,
+           "help" => sub { showHelp() }
+    ) or exit 1;
+
+# Evaluate Nixpkgs into an XML representation.
+my $xml = `nix-env -f '$path' --arg overlays '[]' -qa '$filter' --xml --meta --drv-path`;
+die "$0: evaluation of ‘$path’ failed\n" if $? != 0;
+
+my $info = XMLin($xml, KeyAttr => { 'item' => '+attrPath', 'meta' => 'name' }, ForceArray => 1, SuppressEmpty => '' ) or die "cannot parse XML output";
+
+# Check meta information.
+print "=== Package meta information ===\n\n";
+my $nrBadNames = 0;
+my $nrMissingMaintainers = 0;
+my $nrMissingPlatforms = 0;
+my $nrMissingDescriptions = 0;
+my $nrBadDescriptions = 0;
+my $nrMissingLicenses = 0;
+
+foreach my $attr (sort keys %{$info->{item}}) {
+    my $pkg = $info->{item}->{$attr};
+
+    my $pkgName = $pkg->{name};
+    my $pkgVersion = "";
+    if ($pkgName =~ /(.*)(-[0-9].*)$/) {
+        $pkgName = $1;
+        $pkgVersion = $2;
+    }
+
+    # Check the maintainers.
+    my @maintainers;
+    my $x = $pkg->{meta}->{maintainers};
+    if (defined $x && $x->{type} eq "strings") {
+        @maintainers = map { $_->{value} } @{$x->{string}};
+    } elsif (defined $x->{value}) {
+        @maintainers = ($x->{value});
+    }
+
+    if (defined $maintainer && scalar(grep { $_ =~ /$maintainer/i } @maintainers) == 0) {
+        delete $info->{item}->{$attr};
+        next;
+    }
+
+    if (scalar @maintainers == 0) {
+        print "$attr: Lacks a maintainer\n";
+        $nrMissingMaintainers++;
+    }
+
+    # Check the platforms.
+    if (!defined $pkg->{meta}->{platforms}) {
+        print "$attr: Lacks a platform\n";
+        $nrMissingPlatforms++;
+    }
+
+    # Package names should not be capitalised.
+    if ($pkgName =~ /^[A-Z]/) {
+        print "$attr: package name ‘$pkgName’ should not be capitalised\n";
+        $nrBadNames++;
+    }
+
+    if ($pkgVersion eq "") {
+        print "$attr: package has no version\n";
+        $nrBadNames++;
+    }
+
+    # Check the license.
+    if (!defined $pkg->{meta}->{license}) {
+        print "$attr: Lacks a license\n";
+        $nrMissingLicenses++;
+    }
+
+    # Check the description.
+    my $description = $pkg->{meta}->{description}->{value};
+    if (!$description) {
+        print "$attr: Lacks a description\n";
+        $nrMissingDescriptions++;
+    } else {
+        my $bad = 0;
+        if ($description =~ /^\s/) {
+            print "$attr: Description starts with whitespace\n";
+            $bad = 1;
+        }
+        if ($description =~ /\s$/) {
+            print "$attr: Description ends with whitespace\n";
+            $bad = 1;
+        }
+        if ($description =~ /\.$/) {
+            print "$attr: Description ends with a period\n";
+            $bad = 1;
+        }
+        if (index(lc($description), lc($attr)) != -1) {
+            print "$attr: Description contains package name\n";
+            $bad = 1;
+        }
+        $nrBadDescriptions++ if $bad;
+    }
+}
+
+print "\n";
+
+# Find packages that have the same name.
+print "=== Package name collisions ===\n\n";
+
+my %pkgsByName;
+
+foreach my $attr (sort keys %{$info->{item}}) {
+    my $pkg = $info->{item}->{$attr};
+    #print STDERR "attr = $attr, name = $pkg->{name}\n";
+    $pkgsByName{$pkg->{name}} //= [];
+    push @{$pkgsByName{$pkg->{name}}}, $pkg;
+}
+
+my $nrCollisions = 0;
+foreach my $name (sort keys %pkgsByName) {
+    my @pkgs = @{$pkgsByName{$name}};
+
+    # Filter attributes that are aliases of each other (e.g. yield the
+    # same derivation path).
+    my %drvsSeen;
+    @pkgs = grep { my $x = $drvsSeen{$_->{drvPath}}; $drvsSeen{$_->{drvPath}} = 1; !defined $x } @pkgs;
+
+    # Filter packages that have a lower priority.
+    my $highest = min (map { $_->{meta}->{priority}->{value} // 0 } @pkgs);
+    @pkgs = grep { ($_->{meta}->{priority}->{value} // 0) == $highest } @pkgs;
+
+    next if scalar @pkgs == 1;
+
+    $nrCollisions++;
+    print "The following attributes evaluate to a package named ‘$name’:\n";
+    print "  ", join(", ", map { $_->{attrPath} } @pkgs), "\n\n";
+}
+
+print "=== Bottom line ===\n";
+print "Number of packages: ", scalar(keys %{$info->{item}}), "\n";
+print "Number of bad names: $nrBadNames\n";
+print "Number of missing maintainers: $nrMissingMaintainers\n";
+print "Number of missing platforms: $nrMissingPlatforms\n";
+print "Number of missing licenses: $nrMissingLicenses\n";
+print "Number of missing descriptions: $nrMissingDescriptions\n";
+print "Number of bad descriptions: $nrBadDescriptions\n";
+print "Number of name collisions: $nrCollisions\n";
diff --git a/nixpkgs/maintainers/scripts/patchelf-hints.sh b/nixpkgs/maintainers/scripts/patchelf-hints.sh
new file mode 100755
index 000000000000..802ecab0f0a1
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/patchelf-hints.sh
@@ -0,0 +1,84 @@
+
+usage() {
+    echo "
+$0 <path to unpacked binary distribution directory>
+
+This program return the list of libraries and where to find them based on
+your currently installed programs.
+";
+    exit 1
+}
+
+if test $# -ne 1; then
+  usage
+fi
+
+binaryDist=$1
+
+hasBinaries=false
+for bin in $(find $binaryDist -executable -type f) :; do
+    if test $bin = ":"; then
+        $hasBinaries || \
+            echo "No patchable found in this directory."
+        break
+    fi
+    hasBinaries=true
+
+    echo ""
+    echo "$bin:"
+    hasLibraries=false
+    unset interpreter
+    unset addRPath
+    for lib in $(strings $bin | grep '^\(/\|\)lib.*\.so' | sort | uniq) :; do
+        if test $lib = ":"; then
+            $hasLibraries || \
+                echo "  This program is a script or it is statically linked."
+            break
+        fi
+        hasLibraries=true
+
+        echo "  $lib:";
+
+        libPath=$lib
+        lib=$(basename $lib)
+
+        #versionLessLib=$(echo $lib | sed 's,[.][.0-9]*$,,')
+
+        libs="$(
+            find /nix/store/*/lib* \( -type f -or -type l \) -name $lib |
+            grep -v '\(bootstrap-tools\|system-path\|user-environment\|extra-utils\)'
+        )"
+
+        echo "$libs" |
+        sed 's,^/nix/store/[a-z0-9]*-\([^/]*\)/.*/\([^/]*\)$,    \1 -> \2,' |
+        sort |
+        uniq;
+
+        names=$(
+            echo "$libs" |
+            sed 's,^/nix/store/[a-z0-9]*-\([^/]*\)-[.0-9]*/.*$,\1,' |
+            sort |
+            uniq;
+        )
+
+        if test "$names" = "glibc"; then names="glibc"; fi
+        if echo $names | grep -c "gcc" &> /dev/null; then names="stdenv.cc.cc"; fi
+
+        if test $lib != $libPath; then
+            interpreter="--interpreter \${$names}/lib/$lib"
+        elif echo $addRPath | grep -c "$names" &> /dev/null; then
+            :
+        else
+            addRPath=${addRPath+$addRPath:}"\${$names}/lib"
+        fi
+    done;
+    $hasLibraries && \
+        echo "
+  Patchelf command:
+
+    patchelf $interpreter \\
+      ${addRPath+--set-rpath $addRPath \\
+}      \$out/$bin
+
+"
+done;
diff --git a/nixpkgs/maintainers/scripts/pluginupdate.py b/nixpkgs/maintainers/scripts/pluginupdate.py
new file mode 100644
index 000000000000..056abda85bfd
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/pluginupdate.py
@@ -0,0 +1,815 @@
+# python library used to update plugins:
+# - pkgs/applications/editors/vim/plugins/update.py
+# - pkgs/applications/editors/kakoune/plugins/update.py
+# - maintainers/scripts/update-luarocks-packages
+
+# format:
+# $ nix run nixpkgs#black maintainers/scripts/pluginupdate.py
+# type-check:
+# $ nix run nixpkgs#python3.pkgs.mypy maintainers/scripts/pluginupdate.py
+# linted:
+# $ nix run nixpkgs#python3.pkgs.flake8 -- --ignore E501,E265 maintainers/scripts/pluginupdate.py
+
+import argparse
+import csv
+import functools
+import http
+import json
+import logging
+import os
+import re
+import subprocess
+import sys
+import time
+import traceback
+import urllib.error
+import urllib.parse
+import urllib.request
+import xml.etree.ElementTree as ET
+from dataclasses import asdict, dataclass
+from datetime import UTC, datetime
+from functools import wraps
+from multiprocessing.dummy import Pool
+from pathlib import Path
+from tempfile import NamedTemporaryFile
+from typing import Any, Callable, Dict, List, Optional, Tuple, Union
+from urllib.parse import urljoin, urlparse
+
+import git
+
+ATOM_ENTRY = "{http://www.w3.org/2005/Atom}entry"  # " vim gets confused here
+ATOM_LINK = "{http://www.w3.org/2005/Atom}link"  # "
+ATOM_UPDATED = "{http://www.w3.org/2005/Atom}updated"  # "
+
+LOG_LEVELS = {
+    logging.getLevelName(level): level
+    for level in [logging.DEBUG, logging.INFO, logging.WARN, logging.ERROR]
+}
+
+log = logging.getLogger()
+
+
+def retry(ExceptionToCheck: Any, tries: int = 4, delay: float = 3, backoff: float = 2):
+    """Retry calling the decorated function using an exponential backoff.
+    http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
+    original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
+    (BSD licensed)
+    :param ExceptionToCheck: the exception on which to retry
+    :param tries: number of times to try (not retry) before giving up
+    :param delay: initial delay between retries in seconds
+    :param backoff: backoff multiplier e.g. value of 2 will double the delay
+        each retry
+    """
+
+    def deco_retry(f: Callable) -> Callable:
+        @wraps(f)
+        def f_retry(*args: Any, **kwargs: Any) -> Any:
+            mtries, mdelay = tries, delay
+            while mtries > 1:
+                try:
+                    return f(*args, **kwargs)
+                except ExceptionToCheck as e:
+                    print(f"{str(e)}, Retrying in {mdelay} seconds...")
+                    time.sleep(mdelay)
+                    mtries -= 1
+                    mdelay *= backoff
+            return f(*args, **kwargs)
+
+        return f_retry  # true decorator
+
+    return deco_retry
+
+
+@dataclass
+class FetchConfig:
+    proc: int
+    github_token: str
+
+
+def make_request(url: str, token=None) -> urllib.request.Request:
+    headers = {}
+    if token is not None:
+        headers["Authorization"] = f"token {token}"
+    return urllib.request.Request(url, headers=headers)
+
+
+# a dictionary of plugins and their new repositories
+Redirects = Dict["PluginDesc", "Repo"]
+
+
+class Repo:
+    def __init__(self, uri: str, branch: str) -> None:
+        self.uri = uri
+        """Url to the repo"""
+        self._branch = branch
+        # Redirect is the new Repo to use
+        self.redirect: Optional["Repo"] = None
+        self.token = "dummy_token"
+
+    @property
+    def name(self):
+        return self.uri.split("/")[-1]
+
+    @property
+    def branch(self):
+        return self._branch or "HEAD"
+
+    def __str__(self) -> str:
+        return f"{self.uri}"
+
+    def __repr__(self) -> str:
+        return f"Repo({self.name}, {self.uri})"
+
+    @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
+    def has_submodules(self) -> bool:
+        return True
+
+    @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
+    def latest_commit(self) -> Tuple[str, datetime]:
+        log.debug("Latest commit")
+        loaded = self._prefetch(None)
+        updated = datetime.strptime(loaded["date"], "%Y-%m-%dT%H:%M:%S%z")
+
+        return loaded["rev"], updated
+
+    def _prefetch(self, ref: Optional[str]):
+        cmd = ["nix-prefetch-git", "--quiet", "--fetch-submodules", self.uri]
+        if ref is not None:
+            cmd.append(ref)
+        log.debug(cmd)
+        data = subprocess.check_output(cmd)
+        loaded = json.loads(data)
+        return loaded
+
+    def prefetch(self, ref: Optional[str]) -> str:
+        print("Prefetching")
+        loaded = self._prefetch(ref)
+        return loaded["sha256"]
+
+    def as_nix(self, plugin: "Plugin") -> str:
+        return f"""fetchgit {{
+      url = "{self.uri}";
+      rev = "{plugin.commit}";
+      sha256 = "{plugin.sha256}";
+    }}"""
+
+
+class RepoGitHub(Repo):
+    def __init__(self, owner: str, repo: str, branch: str) -> None:
+        self.owner = owner
+        self.repo = repo
+        self.token = None
+        """Url to the repo"""
+        super().__init__(self.url(""), branch)
+        log.debug(
+            "Instantiating github repo owner=%s and repo=%s", self.owner, self.repo
+        )
+
+    @property
+    def name(self):
+        return self.repo
+
+    def url(self, path: str) -> str:
+        res = urljoin(f"https://github.com/{self.owner}/{self.repo}/", path)
+        return res
+
+    @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
+    def has_submodules(self) -> bool:
+        try:
+            req = make_request(self.url(f"blob/{self.branch}/.gitmodules"), self.token)
+            urllib.request.urlopen(req, timeout=10).close()
+        except urllib.error.HTTPError as e:
+            if e.code == 404:
+                return False
+            else:
+                raise
+        return True
+
+    @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
+    def latest_commit(self) -> Tuple[str, datetime]:
+        commit_url = self.url(f"commits/{self.branch}.atom")
+        log.debug("Sending request to %s", commit_url)
+        commit_req = make_request(commit_url, self.token)
+        with urllib.request.urlopen(commit_req, timeout=10) as req:
+            self._check_for_redirect(commit_url, req)
+            xml = req.read()
+
+            # Filter out illegal XML characters
+            illegal_xml_regex = re.compile(b"[\x00-\x08\x0B-\x0C\x0E-\x1F\x7F]")
+            xml = illegal_xml_regex.sub(b"", xml)
+
+            root = ET.fromstring(xml)
+            latest_entry = root.find(ATOM_ENTRY)
+            assert latest_entry is not None, f"No commits found in repository {self}"
+            commit_link = latest_entry.find(ATOM_LINK)
+            assert commit_link is not None, f"No link tag found feed entry {xml}"
+            url = urlparse(commit_link.get("href"))
+            updated_tag = latest_entry.find(ATOM_UPDATED)
+            assert (
+                updated_tag is not None and updated_tag.text is not None
+            ), f"No updated tag found feed entry {xml}"
+            updated = datetime.strptime(updated_tag.text, "%Y-%m-%dT%H:%M:%SZ")
+            return Path(str(url.path)).name, updated
+
+    def _check_for_redirect(self, url: str, req: http.client.HTTPResponse):
+        response_url = req.geturl()
+        if url != response_url:
+            new_owner, new_name = (
+                urllib.parse.urlsplit(response_url).path.strip("/").split("/")[:2]
+            )
+
+            new_repo = RepoGitHub(owner=new_owner, repo=new_name, branch=self.branch)
+            self.redirect = new_repo
+
+    def prefetch(self, commit: str) -> str:
+        if self.has_submodules():
+            sha256 = super().prefetch(commit)
+        else:
+            sha256 = self.prefetch_github(commit)
+        return sha256
+
+    def prefetch_github(self, ref: str) -> str:
+        cmd = ["nix-prefetch-url", "--unpack", self.url(f"archive/{ref}.tar.gz")]
+        log.debug("Running %s", cmd)
+        data = subprocess.check_output(cmd)
+        return data.strip().decode("utf-8")
+
+    def as_nix(self, plugin: "Plugin") -> str:
+        if plugin.has_submodules:
+            submodule_attr = "\n      fetchSubmodules = true;"
+        else:
+            submodule_attr = ""
+
+        return f"""fetchFromGitHub {{
+      owner = "{self.owner}";
+      repo = "{self.repo}";
+      rev = "{plugin.commit}";
+      sha256 = "{plugin.sha256}";{submodule_attr}
+    }}"""
+
+
+@dataclass(frozen=True)
+class PluginDesc:
+    repo: Repo
+    branch: str
+    alias: Optional[str]
+
+    @property
+    def name(self):
+        if self.alias is None:
+            return self.repo.name
+        else:
+            return self.alias
+
+    def __lt__(self, other):
+        return self.repo.name < other.repo.name
+
+    @staticmethod
+    def load_from_csv(config: FetchConfig, row: Dict[str, str]) -> "PluginDesc":
+        branch = row["branch"]
+        repo = make_repo(row["repo"], branch.strip())
+        repo.token = config.github_token
+        return PluginDesc(repo, branch.strip(), row["alias"])
+
+    @staticmethod
+    def load_from_string(config: FetchConfig, line: str) -> "PluginDesc":
+        branch = "HEAD"
+        alias = None
+        uri = line
+        if " as " in uri:
+            uri, alias = uri.split(" as ")
+            alias = alias.strip()
+        if "@" in uri:
+            uri, branch = uri.split("@")
+        repo = make_repo(uri.strip(), branch.strip())
+        repo.token = config.github_token
+        return PluginDesc(repo, branch.strip(), alias)
+
+
+@dataclass
+class Plugin:
+    name: str
+    commit: str
+    has_submodules: bool
+    sha256: str
+    date: Optional[datetime] = None
+
+    @property
+    def normalized_name(self) -> str:
+        return self.name.replace(".", "-")
+
+    @property
+    def version(self) -> str:
+        assert self.date is not None
+        return self.date.strftime("%Y-%m-%d")
+
+    def as_json(self) -> Dict[str, str]:
+        copy = self.__dict__.copy()
+        del copy["date"]
+        return copy
+
+
+def load_plugins_from_csv(
+    config: FetchConfig,
+    input_file: Path,
+) -> List[PluginDesc]:
+    log.debug("Load plugins from csv %s", input_file)
+    plugins = []
+    with open(input_file, newline="") as csvfile:
+        log.debug("Writing into %s", input_file)
+        reader = csv.DictReader(
+            csvfile,
+        )
+        for line in reader:
+            plugin = PluginDesc.load_from_csv(config, line)
+            plugins.append(plugin)
+
+    return plugins
+
+
+
+def run_nix_expr(expr, nixpkgs: str):
+    '''
+    :param expr nix expression to fetch current plugins
+    :param nixpkgs Path towards a nixpkgs checkout
+    '''
+    with CleanEnvironment(nixpkgs) as nix_path:
+        cmd = [
+            "nix",
+            "eval",
+            "--extra-experimental-features",
+            "nix-command",
+            "--impure",
+            "--json",
+            "--expr",
+            expr,
+            "--nix-path",
+            nix_path,
+        ]
+        log.debug("Running command: %s", " ".join(cmd))
+        out = subprocess.check_output(cmd, timeout=90)
+        data = json.loads(out)
+        return data
+
+
+class Editor:
+    """The configuration of the update script."""
+
+    def __init__(
+        self,
+        name: str,
+        root: Path,
+        get_plugins: str,
+        default_in: Optional[Path] = None,
+        default_out: Optional[Path] = None,
+        deprecated: Optional[Path] = None,
+        cache_file: Optional[str] = None,
+    ):
+        log.debug("get_plugins:", get_plugins)
+        self.name = name
+        self.root = root
+        self.get_plugins = get_plugins
+        self.default_in = default_in or root.joinpath(f"{name}-plugin-names")
+        self.default_out = default_out or root.joinpath("generated.nix")
+        self.deprecated = deprecated or root.joinpath("deprecated.json")
+        self.cache_file = cache_file or f"{name}-plugin-cache.json"
+        self.nixpkgs_repo = None
+
+    def add(self, args):
+        """CSV spec"""
+        log.debug("called the 'add' command")
+        fetch_config = FetchConfig(args.proc, args.github_token)
+        editor = self
+        for plugin_line in args.add_plugins:
+            log.debug("using plugin_line", plugin_line)
+            pdesc = PluginDesc.load_from_string(fetch_config, plugin_line)
+            log.debug("loaded as pdesc", pdesc)
+            append = [pdesc]
+            editor.rewrite_input(
+                fetch_config, args.input_file, editor.deprecated, append=append
+            )
+            plugin, _ = prefetch_plugin(
+                pdesc,
+            )
+            autocommit = not args.no_commit
+            if autocommit:
+                commit(
+                    editor.nixpkgs_repo,
+                    "{drv_name}: init at {version}".format(
+                        drv_name=editor.get_drv_name(plugin.normalized_name),
+                        version=plugin.version,
+                    ),
+                    [args.outfile, args.input_file],
+                )
+
+    # Expects arguments generated by 'update' subparser
+    def update(self, args):
+        """CSV spec"""
+        print("the update member function should be overriden in subclasses")
+
+    def get_current_plugins(self, nixpkgs) -> List[Plugin]:
+        """To fill the cache"""
+        data = run_nix_expr(self.get_plugins, nixpkgs)
+        plugins = []
+        for name, attr in data.items():
+            p = Plugin(name, attr["rev"], attr["submodules"], attr["sha256"])
+            plugins.append(p)
+        return plugins
+
+    def load_plugin_spec(self, config: FetchConfig, plugin_file) -> List[PluginDesc]:
+        """CSV spec"""
+        return load_plugins_from_csv(config, plugin_file)
+
+    def generate_nix(self, _plugins, _outfile: str):
+        """Returns nothing for now, writes directly to outfile"""
+        raise NotImplementedError()
+
+    def get_update(self, input_file: str, outfile: str, config: FetchConfig):
+        cache: Cache = Cache(self.get_current_plugins(self.nixpkgs), self.cache_file)
+        _prefetch = functools.partial(prefetch, cache=cache)
+
+        def update() -> dict:
+            plugins = self.load_plugin_spec(config, input_file)
+
+            try:
+                pool = Pool(processes=config.proc)
+                results = pool.map(_prefetch, plugins)
+            finally:
+                cache.store()
+
+            plugins, redirects = check_results(results)
+
+            self.generate_nix(plugins, outfile)
+
+            return redirects
+
+        return update
+
+    @property
+    def attr_path(self):
+        return self.name + "Plugins"
+
+    def get_drv_name(self, name: str):
+        return self.attr_path + "." + name
+
+    def rewrite_input(self, *args, **kwargs):
+        return rewrite_input(*args, **kwargs)
+
+    def create_parser(self):
+        common = argparse.ArgumentParser(
+            add_help=False,
+            description=(
+                f"""
+                Updates nix derivations for {self.name} plugins.\n
+                By default from {self.default_in} to {self.default_out}"""
+            ),
+        )
+        common.add_argument(
+            "--nixpkgs",
+            type=str,
+            default=os.getcwd(),
+            help="Adjust log level",
+        )
+        common.add_argument(
+            "--input-names",
+            "-i",
+            dest="input_file",
+            type=Path,
+            default=self.default_in,
+            help="A list of plugins in the form owner/repo",
+        )
+        common.add_argument(
+            "--out",
+            "-o",
+            dest="outfile",
+            default=self.default_out,
+            type=Path,
+            help="Filename to save generated nix code",
+        )
+        common.add_argument(
+            "--proc",
+            "-p",
+            dest="proc",
+            type=int,
+            default=30,
+            help="Number of concurrent processes to spawn. Setting --github-token allows higher values.",
+        )
+        common.add_argument(
+            "--github-token",
+            "-t",
+            type=str,
+            default=os.getenv("GITHUB_API_TOKEN"),
+            help="""Allows to set --proc to higher values.
+            Uses GITHUB_API_TOKEN environment variables as the default value.""",
+        )
+        common.add_argument(
+            "--no-commit",
+            "-n",
+            action="store_true",
+            default=False,
+            help="Whether to autocommit changes",
+        )
+        common.add_argument(
+            "--debug",
+            "-d",
+            choices=LOG_LEVELS.keys(),
+            default=logging.getLevelName(logging.WARN),
+            help="Adjust log level",
+        )
+
+        main = argparse.ArgumentParser(
+            parents=[common],
+            description=(
+                f"""
+                Updates nix derivations for {self.name} plugins.\n
+                By default from {self.default_in} to {self.default_out}"""
+            ),
+        )
+
+        subparsers = main.add_subparsers(dest="command", required=False)
+        padd = subparsers.add_parser(
+            "add",
+            parents=[],
+            description="Add new plugin",
+            add_help=False,
+        )
+        padd.set_defaults(func=self.add)
+        padd.add_argument(
+            "add_plugins",
+            default=None,
+            nargs="+",
+            help=f"Plugin to add to {self.attr_path} from Github in the form owner/repo",
+        )
+
+        pupdate = subparsers.add_parser(
+            "update",
+            description="Update all or a subset of existing plugins",
+            add_help=False,
+        )
+        pupdate.set_defaults(func=self.update)
+        return main
+
+    def run(
+        self,
+    ):
+        """
+        Convenience function
+        """
+        parser = self.create_parser()
+        args = parser.parse_args()
+        command = args.command or "update"
+        log.setLevel(LOG_LEVELS[args.debug])
+        log.info("Chose to run command: %s", command)
+        self.nixpkgs = args.nixpkgs
+
+        self.nixpkgs_repo = git.Repo(args.nixpkgs, search_parent_directories=True)
+
+        getattr(self, command)(args)
+
+
+class CleanEnvironment(object):
+    def __init__(self, nixpkgs):
+        self.local_pkgs = nixpkgs
+
+    def __enter__(self) -> str:
+        """
+        local_pkgs = str(Path(__file__).parent.parent.parent)
+        """
+        self.old_environ = os.environ.copy()
+        self.empty_config = NamedTemporaryFile()
+        self.empty_config.write(b"{}")
+        self.empty_config.flush()
+        return f"localpkgs={self.local_pkgs}"
+
+    def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
+        os.environ.update(self.old_environ)
+        self.empty_config.close()
+
+
+def prefetch_plugin(
+    p: PluginDesc,
+    cache: "Optional[Cache]" = None,
+) -> Tuple[Plugin, Optional[Repo]]:
+    repo, branch, alias = p.repo, p.branch, p.alias
+    name = alias or p.repo.name
+    commit = None
+    log.info(f"Fetching last commit for plugin {name} from {repo.uri}@{branch}")
+    commit, date = repo.latest_commit()
+    cached_plugin = cache[commit] if cache else None
+    if cached_plugin is not None:
+        log.debug("Cache hit !")
+        cached_plugin.name = name
+        cached_plugin.date = date
+        return cached_plugin, repo.redirect
+
+    has_submodules = repo.has_submodules()
+    log.debug(f"prefetch {name}")
+    sha256 = repo.prefetch(commit)
+
+    return (
+        Plugin(name, commit, has_submodules, sha256, date=date),
+        repo.redirect,
+    )
+
+
+def print_download_error(plugin: PluginDesc, ex: Exception):
+    print(f"{plugin}: {ex}", file=sys.stderr)
+    ex_traceback = ex.__traceback__
+    tb_lines = [
+        line.rstrip("\n")
+        for line in traceback.format_exception(ex.__class__, ex, ex_traceback)
+    ]
+    print("\n".join(tb_lines))
+
+
+def check_results(
+    results: List[Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]]
+) -> Tuple[List[Tuple[PluginDesc, Plugin]], Redirects]:
+    """ """
+    failures: List[Tuple[PluginDesc, Exception]] = []
+    plugins = []
+    redirects: Redirects = {}
+    for pdesc, result, redirect in results:
+        if isinstance(result, Exception):
+            failures.append((pdesc, result))
+        else:
+            new_pdesc = pdesc
+            if redirect is not None:
+                redirects.update({pdesc: redirect})
+                new_pdesc = PluginDesc(redirect, pdesc.branch, pdesc.alias)
+            plugins.append((new_pdesc, result))
+
+    print(f"{len(results) - len(failures)} plugins were checked", end="")
+    if len(failures) == 0:
+        print()
+        return plugins, redirects
+    else:
+        print(f", {len(failures)} plugin(s) could not be downloaded:\n")
+
+        for plugin, exception in failures:
+            print_download_error(plugin, exception)
+
+        sys.exit(1)
+
+
+def make_repo(uri: str, branch) -> Repo:
+    """Instantiate a Repo with the correct specialization depending on server (gitub spec)"""
+    # dumb check to see if it's of the form owner/repo (=> github) or https://...
+    res = urlparse(uri)
+    if res.netloc in ["github.com", ""]:
+        res = res.path.strip("/").split("/")
+        repo = RepoGitHub(res[0], res[1], branch)
+    else:
+        repo = Repo(uri.strip(), branch)
+    return repo
+
+
+def get_cache_path(cache_file_name: str) -> Optional[Path]:
+    xdg_cache = os.environ.get("XDG_CACHE_HOME", None)
+    if xdg_cache is None:
+        home = os.environ.get("HOME", None)
+        if home is None:
+            return None
+        xdg_cache = str(Path(home, ".cache"))
+
+    return Path(xdg_cache, cache_file_name)
+
+
+class Cache:
+    def __init__(self, initial_plugins: List[Plugin], cache_file_name: str) -> None:
+        self.cache_file = get_cache_path(cache_file_name)
+
+        downloads = {}
+        for plugin in initial_plugins:
+            downloads[plugin.commit] = plugin
+        downloads.update(self.load())
+        self.downloads = downloads
+
+    def load(self) -> Dict[str, Plugin]:
+        if self.cache_file is None or not self.cache_file.exists():
+            return {}
+
+        downloads: Dict[str, Plugin] = {}
+        with open(self.cache_file) as f:
+            data = json.load(f)
+            for attr in data.values():
+                p = Plugin(
+                    attr["name"], attr["commit"], attr["has_submodules"], attr["sha256"]
+                )
+                downloads[attr["commit"]] = p
+        return downloads
+
+    def store(self) -> None:
+        if self.cache_file is None:
+            return
+
+        os.makedirs(self.cache_file.parent, exist_ok=True)
+        with open(self.cache_file, "w+") as f:
+            data = {}
+            for name, attr in self.downloads.items():
+                data[name] = attr.as_json()
+            json.dump(data, f, indent=4, sort_keys=True)
+
+    def __getitem__(self, key: str) -> Optional[Plugin]:
+        return self.downloads.get(key, None)
+
+    def __setitem__(self, key: str, value: Plugin) -> None:
+        self.downloads[key] = value
+
+
+def prefetch(
+    pluginDesc: PluginDesc, cache: Cache
+) -> Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]:
+    try:
+        plugin, redirect = prefetch_plugin(pluginDesc, cache)
+        cache[plugin.commit] = plugin
+        return (pluginDesc, plugin, redirect)
+    except Exception as e:
+        return (pluginDesc, e, None)
+
+
+def rewrite_input(
+    config: FetchConfig,
+    input_file: Path,
+    deprecated: Path,
+    # old pluginDesc and the new
+    redirects: Redirects = {},
+    append: List[PluginDesc] = [],
+):
+    plugins = load_plugins_from_csv(
+        config,
+        input_file,
+    )
+
+    plugins.extend(append)
+
+    if redirects:
+        cur_date_iso = datetime.now().strftime("%Y-%m-%d")
+        with open(deprecated, "r") as f:
+            deprecations = json.load(f)
+        for pdesc, new_repo in redirects.items():
+            new_pdesc = PluginDesc(new_repo, pdesc.branch, pdesc.alias)
+            old_plugin, _ = prefetch_plugin(pdesc)
+            new_plugin, _ = prefetch_plugin(new_pdesc)
+            if old_plugin.normalized_name != new_plugin.normalized_name:
+                deprecations[old_plugin.normalized_name] = {
+                    "new": new_plugin.normalized_name,
+                    "date": cur_date_iso,
+                }
+        with open(deprecated, "w") as f:
+            json.dump(deprecations, f, indent=4, sort_keys=True)
+            f.write("\n")
+
+    with open(input_file, "w") as f:
+        log.debug("Writing into %s", input_file)
+        # fields = dataclasses.fields(PluginDesc)
+        fieldnames = ["repo", "branch", "alias"]
+        writer = csv.DictWriter(f, fieldnames, dialect="unix", quoting=csv.QUOTE_NONE)
+        writer.writeheader()
+        for plugin in sorted(plugins):
+            writer.writerow(asdict(plugin))
+
+
+def commit(repo: git.Repo, message: str, files: List[Path]) -> None:
+    repo.index.add([str(f.resolve()) for f in files])
+
+    if repo.index.diff("HEAD"):
+        print(f'committing to nixpkgs "{message}"')
+        repo.index.commit(message)
+    else:
+        print("no changes in working tree to commit")
+
+
+def update_plugins(editor: Editor, args):
+    """The main entry function of this module.
+    All input arguments are grouped in the `Editor`."""
+
+    log.info("Start updating plugins")
+    fetch_config = FetchConfig(args.proc, args.github_token)
+    update = editor.get_update(args.input_file, args.outfile, fetch_config)
+
+    redirects = update()
+    editor.rewrite_input(fetch_config, args.input_file, editor.deprecated, redirects)
+
+    autocommit = not args.no_commit
+
+    if autocommit:
+        try:
+            repo = git.Repo(os.getcwd())
+            updated = datetime.now(tz=UTC).strftime('%Y-%m-%d')
+            print(args.outfile)
+            commit(repo,
+                   f"{editor.attr_path}: update on {updated}", [args.outfile]
+                   )
+        except git.InvalidGitRepositoryError as e:
+            print(f"Not in a git repository: {e}", file=sys.stderr)
+            sys.exit(1)
+
+    if redirects:
+        update()
+        if autocommit:
+            commit(
+                editor.nixpkgs_repo,
+                f"{editor.attr_path}: resolve github repository redirects",
+                [args.outfile, args.input_file, editor.deprecated],
+            )
diff --git a/nixpkgs/maintainers/scripts/rebuild-amount.sh b/nixpkgs/maintainers/scripts/rebuild-amount.sh
new file mode 100755
index 000000000000..32810f6b98c0
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/rebuild-amount.sh
@@ -0,0 +1,133 @@
+#!/usr/bin/env bash
+set -e
+
+# --print: avoid dependency on environment
+optPrint=
+if [ "$1" == "--print" ]; then
+    optPrint=true
+    shift
+fi
+
+if [ "$#" != 1 ] && [ "$#" != 2 ]; then
+    cat <<EOF
+    Usage: $0 [--print] from-commit-spec [to-commit-spec]
+        You need to be in a git-controlled nixpkgs tree.
+        The current state of the tree will be used if the second commit is missing.
+
+        Examples:
+          effect of latest commit:
+              $ $0 HEAD^
+              $ $0 --print HEAD^
+          effect of the whole patch series for 'staging' branch:
+              $ $0 origin/staging staging
+EOF
+    exit 1
+fi
+
+# A slightly hacky way to get the config.
+parallel="$(echo 'config.rebuild-amount.parallel or false' | nix-repl . 2>/dev/null \
+            | grep -v '^\(nix-repl.*\)\?$' | tail -n 1 || true)"
+
+echo "Estimating rebuild amount by counting changed Hydra jobs (parallel=${parallel:-unset})."
+
+toRemove=()
+
+cleanup() {
+    rm -rf "${toRemove[@]}"
+}
+trap cleanup EXIT
+
+MKTEMP='mktemp --tmpdir nix-rebuild-amount-XXXXXXXX'
+
+nixexpr() {
+    cat <<EONIX
+        let
+          lib = import $1/lib;
+          hydraJobs = import $1/pkgs/top-level/release.nix
+            # Compromise: accuracy vs. resources needed for evaluation.
+            { supportedSystems = cfg.systems or [ "x86_64-linux" "x86_64-darwin" ]; };
+          cfg = (import $1 {}).config.rebuild-amount or {};
+
+          recurseIntoAttrs = attrs: attrs // { recurseForDerivations = true; };
+
+          # hydraJobs leaves recurseForDerivations as empty attrmaps;
+          # that would break nix-env and we also need to recurse everywhere.
+          tweak = lib.mapAttrs
+            (name: val:
+              if name == "recurseForDerivations" then true
+              else if lib.isAttrs val && val.type or null != "derivation"
+                      then recurseIntoAttrs (tweak val)
+              else val
+            );
+
+          # Some of these contain explicit references to platform(s) we want to avoid;
+          # some even (transitively) depend on ~/.nixpkgs/config.nix (!)
+          blacklist = [
+            "tarball" "metrics" "manual"
+            "darwin-tested" "unstable" "stdenvBootstrapTools"
+            "moduleSystem" "lib-tests" # these just confuse the output
+          ];
+
+        in
+          tweak (builtins.removeAttrs hydraJobs blacklist)
+EONIX
+}
+
+# Output packages in tree $2 that weren't in $1.
+# Changing the output hash or name is taken as a change.
+# Extra nix-env parameters can be in $3
+newPkgs() {
+    # We use files instead of pipes, as running multiple nix-env processes
+    # could eat too much memory for a standard 4GiB machine.
+    local -a list
+    for i in 1 2; do
+        local l="$($MKTEMP)"
+        list[$i]="$l"
+        toRemove+=("$l")
+
+        local expr="$($MKTEMP)"
+        toRemove+=("$expr")
+        nixexpr "${!i}" > "$expr"
+
+        nix-env -f "$expr" -qaP --no-name --out-path --show-trace $3 \
+            | sort > "${list[$i]}" &
+
+        if [ "$parallel" != "true" ]; then
+            wait
+        fi
+    done
+
+    wait
+    comm -13 "${list[@]}"
+}
+
+# Prepare nixpkgs trees.
+declare -a tree
+for i in 1 2; do
+    if [ -n "${!i}" ]; then # use the given commit
+        dir="$($MKTEMP -d)"
+        tree[$i]="$dir"
+        toRemove+=("$dir")
+
+        git clone --shared --no-checkout --quiet . "${tree[$i]}"
+        (cd "${tree[$i]}" && git checkout --quiet "${!i}")
+    else #use the current tree
+        tree[$i]="$(pwd)"
+    fi
+done
+
+newlist="$($MKTEMP)"
+toRemove+=("$newlist")
+# Notes:
+#    - the evaluation is done on x86_64-linux, like on Hydra.
+#    - using $newlist file so that newPkgs() isn't in a sub-shell (because of toRemove)
+newPkgs "${tree[1]}" "${tree[2]}" '--argstr system "x86_64-linux"' > "$newlist"
+
+# Hacky: keep only the last word of each attribute path and sort.
+sed -n 's/\([^. ]*\.\)*\([^. ]*\) .*$/\2/p' < "$newlist" \
+    | sort | uniq -c
+
+if [ -n "$optPrint" ]; then
+    echo
+    cat "$newlist"
+fi
diff --git a/nixpkgs/maintainers/scripts/remove-old-aliases.py b/nixpkgs/maintainers/scripts/remove-old-aliases.py
new file mode 100755
index 000000000000..3c5f8edc50ad
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/remove-old-aliases.py
@@ -0,0 +1,213 @@
+#!/usr/bin/env nix-shell
+#!nix-shell -i python3 -p "python3.withPackages(ps: with ps; [ ])" nix
+"""
+A program to remove old aliases or convert old aliases to throws
+Example usage:
+./maintainers/scripts/remove-old-aliases.py --year 2018 --file ./pkgs/top-level/aliases.nix
+
+Check this file with mypy after every change!
+$ mypy --strict maintainers/scripts/remove-old-aliases.py
+"""
+import argparse
+import shutil
+import subprocess
+from datetime import date as datetimedate
+from datetime import datetime
+from pathlib import Path
+
+
+def process_args() -> argparse.Namespace:
+    """process args"""
+    arg_parser = argparse.ArgumentParser()
+    arg_parser.add_argument(
+        "--year", required=True, type=int, help="operate on aliases older than $year"
+    )
+    arg_parser.add_argument(
+        "--month",
+        type=int,
+        default=1,
+        help="operate on aliases older than $year-$month",
+    )
+    arg_parser.add_argument(
+        "--only-throws",
+        action="store_true",
+        help="only operate on throws. e.g remove throws older than $date",
+    )
+    arg_parser.add_argument("--file", required=True, type=Path, help="alias file")
+    arg_parser.add_argument(
+        "--dry-run", action="store_true", help="don't modify files, only print results"
+    )
+    return arg_parser.parse_args()
+
+
+def get_date_lists(
+    txt: list[str], cutoffdate: datetimedate, only_throws: bool
+) -> tuple[list[str], list[str], list[str]]:
+    """get a list of lines in which the date is older than $cutoffdate"""
+    date_older_list: list[str] = []
+    date_older_throw_list: list[str] = []
+    date_sep_line_list: list[str] = []
+
+    for lineno, line in enumerate(txt, start=1):
+        line = line.rstrip()
+        my_date = None
+        for string in line.split():
+            string = string.strip(":")
+            try:
+                # strip ':' incase there is a string like 2019-11-01:
+                my_date = datetime.strptime(string, "%Y-%m-%d").date()
+            except ValueError:
+                try:
+                    my_date = datetime.strptime(string, "%Y-%m").date()
+                except ValueError:
+                    continue
+
+        if (
+            my_date is None
+            or my_date > cutoffdate
+            or "preserve, reason:" in line.lower()
+        ):
+            continue
+
+        if "=" not in line:
+            date_sep_line_list.append(f"{lineno} {line}")
+        # 'if' lines could be complicated
+        elif "if " in line and "if =" not in line:
+            print(f"RESOLVE MANUALLY {line}")
+        elif "throw" in line:
+            date_older_throw_list.append(line)
+        elif not only_throws:
+            date_older_list.append(line)
+
+    return (
+        date_older_list,
+        date_sep_line_list,
+        date_older_throw_list,
+    )
+
+
+def convert_to_throw(date_older_list: list[str]) -> list[tuple[str, str]]:
+    """convert a list of lines to throws"""
+    converted_list = []
+    for line in date_older_list.copy():
+        indent: str = " " * (len(line) - len(line.lstrip()))
+        before_equal = ""
+        after_equal = ""
+        try:
+            before_equal, after_equal = (x.strip() for x in line.split("=", maxsplit=2))
+        except ValueError as err:
+            print(err, line, "\n")
+            date_older_list.remove(line)
+            continue
+
+        alias = before_equal
+        alias_unquoted = before_equal.strip('"')
+        replacement = next(x.strip(";:") for x in after_equal.split())
+        replacement = replacement.removeprefix("pkgs.")
+
+        converted = (
+            f"{indent}{alias} = throw \"'{alias_unquoted}' has been"
+            f" renamed to/replaced by '{replacement}'\";"
+            f" # Converted to throw {datetime.today().strftime('%Y-%m-%d')}"
+        )
+        converted_list.append((line, converted))
+
+    return converted_list
+
+
+def generate_text_to_write(
+    txt: list[str],
+    date_older_list: list[str],
+    converted_to_throw: list[tuple[str, str]],
+    date_older_throw_list: list[str],
+) -> list[str]:
+    """generate a list of text to be written to the aliasfile"""
+    text_to_write: list[str] = []
+    for line in txt:
+        text_to_append: str = ""
+        if converted_to_throw:
+            for tupl in converted_to_throw:
+                if line == tupl[0]:
+                    text_to_append = f"{tupl[1]}\n"
+        if line not in date_older_list and line not in date_older_throw_list:
+            text_to_append = f"{line}\n"
+        if text_to_append:
+            text_to_write.append(text_to_append)
+
+    return text_to_write
+
+
+def write_file(
+    aliasfile: Path,
+    text_to_write: list[str],
+) -> None:
+    """write file"""
+    temp_aliasfile = Path(f"{aliasfile}.raliases")
+    with open(temp_aliasfile, "w", encoding="utf-8") as far:
+        for line in text_to_write:
+            far.write(line)
+    print("\nChecking the syntax of the new aliasfile")
+    try:
+        subprocess.run(
+            ["nix-instantiate", "--eval", temp_aliasfile],
+            check=True,
+            stdout=subprocess.DEVNULL,
+        )
+    except subprocess.CalledProcessError:
+        print(
+            "\nSyntax check failed,",
+            "there may have been a line which only has\n"
+            'aliasname = "reason why";\n'
+            "when it should have been\n"
+            'aliasname = throw "reason why";',
+        )
+        temp_aliasfile.unlink()
+        return
+    shutil.move(f"{aliasfile}.raliases", aliasfile)
+    print(f"{aliasfile} modified! please verify with 'git diff'.")
+
+
+def main() -> None:
+    """main"""
+    args = process_args()
+
+    only_throws = args.only_throws
+    aliasfile = Path(args.file).absolute()
+    cutoffdate = (datetime.strptime(f"{args.year}-{args.month}-01", "%Y-%m-%d")).date()
+
+    txt: list[str] = (aliasfile.read_text(encoding="utf-8")).splitlines()
+
+    date_older_list: list[str] = []
+    date_sep_line_list: list[str] = []
+    date_older_throw_list: list[str] = []
+
+    date_older_list, date_sep_line_list, date_older_throw_list = get_date_lists(
+        txt, cutoffdate, only_throws
+    )
+
+    converted_to_throw: list[tuple[str, str]] = []
+    if date_older_list:
+        converted_to_throw = convert_to_throw(date_older_list)
+        print(" Will be converted to throws. ".center(100, "-"))
+        for l_n in date_older_list:
+            print(l_n)
+
+    if date_older_throw_list:
+        print(" Will be removed. ".center(100, "-"))
+        for l_n in date_older_throw_list:
+            print(l_n)
+
+    if date_sep_line_list:
+        print(" On separate line, resolve manually. ".center(100, "-"))
+        for l_n in date_sep_line_list:
+            print(l_n)
+
+    if not args.dry_run:
+        text_to_write = generate_text_to_write(
+            txt, date_older_list, converted_to_throw, date_older_throw_list
+        )
+        write_file(aliasfile, text_to_write)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/nixpkgs/maintainers/scripts/sha-to-sri.py b/nixpkgs/maintainers/scripts/sha-to-sri.py
new file mode 100755
index 000000000000..1af7ff215ad3
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/sha-to-sri.py
@@ -0,0 +1,228 @@
+#!/usr/bin/env nix-shell
+#! nix-shell -i "python3 -I" -p "python3.withPackages(p: with p; [ rich structlog ])"
+
+from abc import ABC, abstractclassmethod, abstractmethod
+from contextlib import contextmanager
+from pathlib import Path
+from structlog.contextvars import bound_contextvars as log_context
+from typing import ClassVar, List, Tuple
+
+import hashlib, re, structlog
+
+
+logger = structlog.getLogger("sha-to-SRI")
+
+
+class Encoding(ABC):
+    alphabet: ClassVar[str]
+
+    @classmethod
+    @property
+    def name(cls) -> str:
+        return cls.__name__.lower()
+
+    def toSRI(self, s: str) -> str:
+        digest = self.decode(s)
+        assert len(digest) == self.n
+
+        from base64 import b64encode
+        return f"{self.hashName}-{b64encode(digest).decode()}"
+
+    @classmethod
+    def all(cls, h) -> 'List[Encoding]':
+        return [ c(h) for c in cls.__subclasses__() ]
+
+    def __init__(self, h):
+        self.n = h.digest_size
+        self.hashName = h.name
+
+    @property
+    @abstractmethod
+    def length(self) -> int:
+        ...
+
+    @property
+    def regex(self) -> str:
+        return f"[{self.alphabet}]{{{self.length}}}"
+
+    @abstractmethod
+    def decode(self, s: str) -> bytes:
+        ...
+
+
+class Nix32(Encoding):
+    alphabet = "0123456789abcdfghijklmnpqrsvwxyz"
+    inverted  = { c: i for i, c in enumerate(alphabet) }
+
+    @property
+    def length(self):
+        return 1 + (8 * self.n) // 5
+    def decode(self, s: str):
+        assert len(s) == self.length
+        out = [ 0 for _ in range(self.n) ]
+        # TODO: Do better than a list of byte-sized ints
+
+        for n, c in enumerate(reversed(s)):
+            digit = self.inverted[c]
+            i, j = divmod(5 * n, 8)
+            out[i] = out[i] | (digit << j) & 0xff
+            rem = digit >> (8 - j)
+            if rem == 0:
+                continue
+            elif i < self.n:
+                out[i+1] = rem
+            else:
+                raise ValueError(f"Invalid nix32 hash: '{s}'")
+
+        return bytes(out)
+
+class Hex(Encoding):
+    alphabet = "0-9A-Fa-f"
+
+    @property
+    def length(self):
+        return 2 * self.n
+    def decode(self, s: str):
+        from binascii import unhexlify
+        return unhexlify(s)
+
+class Base64(Encoding):
+    alphabet = "A-Za-z0-9+/"
+
+    @property
+    def format(self) -> Tuple[int, int]:
+        """Number of characters in data and padding."""
+        i, k = divmod(self.n, 3)
+        return 4 * i + (0 if k == 0 else k + 1), (3 - k) % 3
+    @property
+    def length(self):
+        return sum(self.format)
+    @property
+    def regex(self):
+        data, padding = self.format
+        return f"[{self.alphabet}]{{{data}}}={{{padding}}}"
+    def decode(self, s):
+        from base64 import b64decode
+        return b64decode(s, validate = True)
+
+
+_HASHES = (hashlib.new(n) for n in ('SHA-256', 'SHA-512'))
+ENCODINGS = {
+    h.name: Encoding.all(h)
+    for h in _HASHES
+}
+
+RE = {
+    h: "|".join(
+        (f"({h}-)?" if e.name == 'base64' else '') +
+        f"(?P<{h}_{e.name}>{e.regex})"
+        for e in encodings
+    ) for h, encodings in ENCODINGS.items()
+}
+
+_DEF_RE = re.compile("|".join(
+    f"(?P<{h}>{h} = (?P<{h}_quote>['\"])({re})(?P={h}_quote);)"
+    for h, re in RE.items()
+))
+
+
+def defToSRI(s: str) -> str:
+    def f(m: re.Match[str]) -> str:
+        try:
+            for h, encodings in ENCODINGS.items():
+                if m.group(h) is None:
+                    continue
+
+                for e in encodings:
+                    s = m.group(f"{h}_{e.name}")
+                    if s is not None:
+                        return f'hash = "{e.toSRI(s)}";'
+
+                raise ValueError(f"Match with '{h}' but no subgroup")
+            raise ValueError("Match with no hash")
+
+        except ValueError as exn:
+            logger.error(
+                "Skipping",
+                exc_info = exn,
+            )
+            return m.group()
+
+    return _DEF_RE.sub(f, s)
+
+
+@contextmanager
+def atomicFileUpdate(target: Path):
+    '''Atomically replace the contents of a file.
+
+    Guarantees that no temporary files are left behind, and `target` is either
+    left untouched, or overwritten with new content if no exception was raised.
+
+    Yields a pair `(original, new)` of open files.
+    `original` is the pre-existing file at `target`, open for reading;
+    `new` is an empty, temporary file in the same filder, open for writing.
+
+    Upon exiting the context, the files are closed; if no exception was
+    raised, `new` (atomically) replaces the `target`, otherwise it is deleted.
+    '''
+    # That's mostly copied from noto-emoji.py, should DRY it out
+    from tempfile import mkstemp
+    fd, _p = mkstemp(
+        dir = target.parent,
+        prefix = target.name,
+    )
+    tmpPath = Path(_p)
+
+    try:
+        with target.open() as original:
+            with tmpPath.open('w') as new:
+                yield (original, new)
+
+        tmpPath.replace(target)
+
+    except Exception:
+        tmpPath.unlink(missing_ok = True)
+        raise
+
+
+def fileToSRI(p: Path):
+    with atomicFileUpdate(p) as (og, new):
+        for i, line in enumerate(og):
+            with log_context(line=i):
+                new.write(defToSRI(line))
+
+
+_SKIP_RE = re.compile(
+    "(generated by)|(do not edit)",
+    re.IGNORECASE
+)
+
+if __name__ == "__main__":
+    from sys import argv, stderr
+    logger.info("Starting!")
+
+    for arg in argv[1:]:
+        p = Path(arg)
+        with log_context(path=str(p)):
+            try:
+                if p.name == "yarn.nix" or p.name.find("generated") != -1:
+                    logger.warning("File looks autogenerated, skipping!")
+                    continue
+
+                with p.open() as f:
+                    for line in f:
+                        if line.strip():
+                            break
+
+                    if _SKIP_RE.search(line):
+                        logger.warning("File looks autogenerated, skipping!")
+                        continue
+
+                fileToSRI(p)
+            except Exception as exn:
+                logger.error(
+                    "Unhandled exception, skipping file!",
+                    exc_info = exn,
+                )
+            else:
+                logger.info("Finished processing file")
diff --git a/nixpkgs/maintainers/scripts/update-channel-branches.sh b/nixpkgs/maintainers/scripts/update-channel-branches.sh
new file mode 100755
index 000000000000..eaa731adccce
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/update-channel-branches.sh
@@ -0,0 +1,112 @@
+#!/usr/bin/env bash
+set -e
+
+: ${NIXOS_CHANNELS:=https://nixos.org/channels/}
+: ${CHANNELS_NAMESPACE:=refs/heads/channels/}
+
+# List all channels which are currently in the repository which we would
+# have to remove if they are not found again.
+deadChannels=$(git for-each-ref --format="%(refname)" "$CHANNELS_NAMESPACE")
+
+updateRef() {
+    local channelName=$1
+    local newRev=$2
+
+    # if the inputs are not valid, then we do not update any branch.
+    test -z "$newRev" -o -z "$channelName" && return;
+
+    # Update the local refs/heads/channels/* branches to be in-sync with the
+    # channel references.
+    local branch=$CHANNELS_NAMESPACE$channelName
+    oldRev=$(git rev-parse --short "$branch" 2>/dev/null || true)
+    if test "$oldRev" != "$newRev"; then
+        if git update-ref "$branch" "$newRev" 2>/dev/null; then
+            if test -z "$oldRev"; then
+                echo " * [new branch]      $newRev           -> ${branch#refs/heads/}"
+            else
+                echo "                     $oldRev..$newRev  -> ${branch#refs/heads/}"
+            fi
+        else
+            if test -z "$oldRev"; then
+                echo " * [missing rev]     $newRev           -> ${branch#refs/heads/}"
+            else
+                echo "   [missing rev]     $oldRev..$newRev  -> ${branch#refs/heads/}"
+            fi
+        fi
+    fi
+
+    # Filter out the current channel from the list of dead channels.
+    deadChannels=$(grep -v "$CHANNELS_NAMESPACE$channelName" <<EOF
+$deadChannels
+EOF
+) ||true
+}
+
+# Find the name of all channels which are listed in the directory.
+echo "Fetching channels from $NIXOS_CHANNELS:"
+for channelName in : $(curl -s "$NIXOS_CHANNELS" | sed -n '/folder/ { s,.*href=",,; s,/".*,,; p }'); do
+    test "$channelName" = : && continue;
+
+    # Do not follow redirections, such that we can extract the
+    # short-changeset from the name of the directory where we are
+    # redirected to.
+    sha1=$(curl -sI "$NIXOS_CHANNELS$channelName" | sed -n '/Location/ { s,.*\.\([a-f0-9]*\)[ \r]*$,\1,; p; }')
+
+    updateRef "remotes/$channelName" "$sha1"
+done
+
+echo "Fetching channels from nixos-version:"
+if currentSystem=$(nixos-version 2>/dev/null); then
+    # If the system is entirely build from a custom nixpkgs version,
+    # then the version is not annotated in git version. This sed
+    # expression is basically matching that the expressions end with
+    # ".<sha1> (Name)" to extract the sha1.
+    sha1=$(echo "$currentSystem" | sed -n 's,^.*\.\([a-f0-9]*\) *(.*)$,\1,; T skip; p; :skip;')
+
+    updateRef current-system "$sha1"
+fi
+
+echo "Fetching channels from $HOME/.nix-defexpr:"
+for revFile in : $(find -L "$HOME/.nix-defexpr/" -maxdepth 4 -name svn-revision); do
+    test "$revFile" = : && continue;
+
+    # Deconstruct a path such as, into:
+    #
+    #   /home/luke/.nix-defexpr/channels_root/nixos/nixpkgs/svn-revision
+    #     channelName = root/nixos
+    #
+    #   /home/luke/.nix-defexpr/channels/nixpkgs/svn-revision
+    #     channelName = nixpkgs
+    #
+    user=${revFile#*.nix-defexpr/channels}
+    repo=${user#*/}
+    repo=${repo%%/*}
+    user=${user%%/*}
+    user=${user#_}
+    test -z "$user" && user=$USER
+    channelName="$user${user:+/}$repo"
+
+    sha1=$(sed -n 's,^.*\.\([a-f0-9]*\)$,\1,; T skip; p; :skip;' "$revFile")
+
+    updateRef "$channelName" "$sha1"
+done
+
+# Suggest to remove channel branches which are no longer found by this
+# script. This is to handle the cases where a local/remote channel
+# disappear. We should not attempt to remove manually any branches, as they
+# might be user branches.
+if test -n "$deadChannels"; then
+
+    echo "
+Some old channel branches are still in your repository, if you
+want to remove them, run the following command(s):
+"
+
+    while read branch; do
+        echo "    git update-ref -d $branch"
+    done <<EOF
+$deadChannels
+EOF
+
+    echo
+fi
diff --git a/nixpkgs/maintainers/scripts/update-dotnet-lockfiles.nix b/nixpkgs/maintainers/scripts/update-dotnet-lockfiles.nix
new file mode 100644
index 000000000000..22ceff1ffa99
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/update-dotnet-lockfiles.nix
@@ -0,0 +1,72 @@
+/*
+  To run:
+
+      nix-shell maintainers/scripts/update-dotnet-lockfiles.nix
+
+  This script finds all the derivations in nixpkgs that have a 'fetch-deps'
+  attribute, and runs all of them sequentially. This is useful to test changes
+  to 'fetch-deps', 'nuget-to-nix', or other changes to the dotnet build
+  infrastructure. Regular updates should be done through the individual packages
+  update scripts.
+ */
+let
+  pkgs = import ../.. {};
+
+  inherit (pkgs) lib;
+
+  packagesWith = cond: pkgs:
+    let
+      packagesWithInner = attrs:
+        lib.unique (
+          lib.concatLists (
+            lib.mapAttrsToList (name: elem:
+              let
+                result = builtins.tryEval elem;
+              in
+                if result.success then
+                  let
+                    value = result.value;
+                  in
+                    if lib.isDerivation value then
+                      lib.optional (cond value) value
+                    else
+                      if lib.isAttrs value && (value.recurseForDerivations or false || value.recurseForRelease or false) then
+                        packagesWithInner value
+                      else []
+                else []) attrs));
+    in
+      packagesWithInner pkgs;
+
+  packages =
+    packagesWith (pkgs: pkgs ? fetch-deps) pkgs;
+
+  helpText = ''
+    Please run:
+
+        % nix-shell maintainers/scripts/update-dotnet-lockfiles.nix
+  '';
+
+  fetchScripts = map (p: p.fetch-deps) packages;
+
+in pkgs.stdenv.mkDerivation {
+  name = "nixpkgs-update-dotnet-lockfiles";
+  buildCommand = ''
+    echo ""
+    echo "----------------------------------------------------------------"
+    echo ""
+    echo "Not possible to update packages using \`nix-build\`"
+    echo ""
+    echo "${helpText}"
+    echo "----------------------------------------------------------------"
+    exit 1
+  '';
+  shellHook = ''
+    unset shellHook # do not contaminate nested shells
+    set -e
+    for x in $fetchScripts; do
+      $x
+    done
+    exit
+  '';
+  inherit fetchScripts;
+}
diff --git a/nixpkgs/maintainers/scripts/update-octave-packages b/nixpkgs/maintainers/scripts/update-octave-packages
new file mode 100755
index 000000000000..00a1646184d5
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/update-octave-packages
@@ -0,0 +1,468 @@
+#!/usr/bin/env nix-shell
+#!nix-shell update-octave-shell.nix -i python3
+
+"""
+Update a Octave package expression by passing in the `.nix` file, or the directory containing it.
+You can pass in multiple files or paths.
+
+You'll likely want to use
+``
+  $ ./update-octave-libraries ../../pkgs/development/octave-modules/**/default.nix
+``
+to update all non-pinned libraries in that folder.
+"""
+
+import argparse
+import os
+import pathlib
+import re
+import requests
+import yaml
+from concurrent.futures import ThreadPoolExecutor as Pool
+from packaging.version import Version as _Version
+from packaging.version import InvalidVersion
+from packaging.specifiers import SpecifierSet
+import collections
+import subprocess
+import tempfile
+
+INDEX = "https://raw.githubusercontent.com/gnu-octave/packages/main/packages"
+"""url of Octave packages' source on GitHub"""
+
+EXTENSIONS = ['tar.gz', 'tar.bz2', 'tar', 'zip']
+"""Permitted file extensions. These are evaluated from left to right and the first occurance is returned."""
+
+PRERELEASES = False
+
+GIT = "git"
+
+NIXPGKS_ROOT = subprocess.check_output(["git", "rev-parse", "--show-toplevel"]).decode('utf-8').strip()
+
+import logging
+logging.basicConfig(level=logging.INFO)
+
+
+class Version(_Version, collections.abc.Sequence):
+
+    def __init__(self, version):
+        super().__init__(version)
+        # We cannot use `str(Version(0.04.21))` because that becomes `0.4.21`
+        # https://github.com/avian2/unidecode/issues/13#issuecomment-354538882
+        self.raw_version = version
+
+    def __getitem__(self, i):
+        return self._version.release[i]
+
+    def __len__(self):
+        return len(self._version.release)
+
+    def __iter__(self):
+        yield from self._version.release
+
+
+def _get_values(attribute, text):
+    """Match attribute in text and return all matches.
+
+    :returns: List of matches.
+    """
+    regex = '{}\s+=\s+"(.*)";'.format(attribute)
+    regex = re.compile(regex)
+    values = regex.findall(text)
+    return values
+
+def _get_unique_value(attribute, text):
+    """Match attribute in text and return unique match.
+
+    :returns: Single match.
+    """
+    values = _get_values(attribute, text)
+    n = len(values)
+    if n > 1:
+        raise ValueError("found too many values for {}".format(attribute))
+    elif n == 1:
+        return values[0]
+    else:
+        raise ValueError("no value found for {}".format(attribute))
+
+def _get_line_and_value(attribute, text):
+    """Match attribute in text. Return the line and the value of the attribute."""
+    regex = '({}\s+=\s+"(.*)";)'.format(attribute)
+    regex = re.compile(regex)
+    value = regex.findall(text)
+    n = len(value)
+    if n > 1:
+        raise ValueError("found too many values for {}".format(attribute))
+    elif n == 1:
+        return value[0]
+    else:
+        raise ValueError("no value found for {}".format(attribute))
+
+
+def _replace_value(attribute, value, text):
+    """Search and replace value of attribute in text."""
+    old_line, old_value = _get_line_and_value(attribute, text)
+    new_line = old_line.replace(old_value, value)
+    new_text = text.replace(old_line, new_line)
+    return new_text
+
+
+def _fetch_page(url):
+    r = requests.get(url)
+    if r.status_code == requests.codes.ok:
+        return list(yaml.safe_load_all(r.content))[0]
+    else:
+        raise ValueError("request for {} failed".format(url))
+
+
+def _fetch_github(url):
+    headers = {}
+    token = os.environ.get('GITHUB_API_TOKEN')
+    if token:
+        headers["Authorization"] = f"token {token}"
+    r = requests.get(url, headers=headers)
+
+    if r.status_code == requests.codes.ok:
+        return r.json()
+    else:
+        raise ValueError("request for {} failed".format(url))
+
+
+SEMVER = {
+    'major' : 0,
+    'minor' : 1,
+    'patch' : 2,
+}
+
+
+def _determine_latest_version(current_version, target, versions):
+    """Determine latest version, given `target`, returning the more recent version.
+    """
+    current_version = Version(current_version)
+
+    def _parse_versions(versions):
+        for v in versions:
+            try:
+                yield Version(v)
+            except InvalidVersion:
+                pass
+
+    versions = _parse_versions(versions)
+
+    index = SEMVER[target]
+
+    ceiling = list(current_version[0:index])
+    if len(ceiling) == 0:
+        ceiling = None
+    else:
+        ceiling[-1]+=1
+        ceiling = Version(".".join(map(str, ceiling)))
+
+    # We do not want prereleases
+    versions = SpecifierSet(prereleases=PRERELEASES).filter(versions)
+
+    if ceiling is not None:
+        versions = SpecifierSet(f"<{ceiling}").filter(versions)
+
+    return (max(sorted(versions))).raw_version
+
+
+def _get_latest_version_octave_packages(package, extension, current_version, target):
+    """Get latest version and hash from Octave Packages."""
+    url = "{}/{}.yaml".format(INDEX, package)
+    yaml = _fetch_page(url)
+
+    versions = list(map(lambda pv: pv['id'], yaml['versions']))
+    version = _determine_latest_version(current_version, target, versions)
+
+    try:
+        releases = [v if v['id'] == version else None for v in yaml['versions']]
+    except KeyError as e:
+        raise KeyError('Could not find version {} for {}'.format(version, package)) from e
+    for release in releases:
+        if release['url'].endswith(extension):
+            sha256 = release['sha256']
+            break
+    else:
+        sha256 = None
+    return version, sha256, None
+
+
+def _get_latest_version_github(package, extension, current_version, target):
+    def strip_prefix(tag):
+        return re.sub("^[^0-9]*", "", tag)
+
+    def get_prefix(string):
+        matches = re.findall(r"^([^0-9]*)", string)
+        return next(iter(matches), "")
+
+    # when invoked as an updateScript, UPDATE_NIX_ATTR_PATH will be set
+    # this allows us to work with packages which live outside of octave-modules
+    attr_path = os.environ.get("UPDATE_NIX_ATTR_PATH", f"octavePackages.{package}")
+    try:
+        homepage = subprocess.check_output(
+            ["nix", "eval", "-f", f"{NIXPGKS_ROOT}/default.nix", "--raw", f"{attr_path}.src.meta.homepage"])\
+            .decode('utf-8')
+    except Exception as e:
+        raise ValueError(f"Unable to determine homepage: {e}")
+    owner_repo = homepage[len("https://github.com/"):]  # remove prefix
+    owner, repo = owner_repo.split("/")
+
+    url = f"https://api.github.com/repos/{owner}/{repo}/releases"
+    all_releases = _fetch_github(url)
+    releases = list(filter(lambda x: not x['prerelease'], all_releases))
+
+    if len(releases) == 0:
+        raise ValueError(f"{homepage} does not contain any stable releases")
+
+    versions = map(lambda x: strip_prefix(x['tag_name']), releases)
+    version = _determine_latest_version(current_version, target, versions)
+
+    release = next(filter(lambda x: strip_prefix(x['tag_name']) == version, releases))
+    prefix = get_prefix(release['tag_name'])
+    try:
+        sha256 = subprocess.check_output(["nix-prefetch-url", "--type", "sha256", "--unpack", f"{release['tarball_url']}"], stderr=subprocess.DEVNULL)\
+            .decode('utf-8').strip()
+    except:
+        # this may fail if they have both a branch and a tag of the same name, attempt tag name
+        tag_url = str(release['tarball_url']).replace("tarball","tarball/refs/tags")
+        sha256 = subprocess.check_output(["nix-prefetch-url", "--type", "sha256", "--unpack", tag_url], stderr=subprocess.DEVNULL)\
+            .decode('utf-8').strip()
+
+
+    return version, sha256, prefix
+
+def _get_latest_version_git(package, extension, current_version, target):
+    """NOTE: Unimplemented!"""
+    # attr_path = os.environ.get("UPDATE_NIX_ATTR_PATH", f"octavePackages.{package}")
+    # try:
+    #     download_url = subprocess.check_output(
+    #         ["nix", "--extra-experimental-features", "nix-command", "eval", "-f", f"{NIXPGKS_ROOT}/default.nix", "--raw", f"{attr_path}.src.url"])\
+    #         .decode('utf-8')
+    # except Exception as e:
+    #     raise ValueError(f"Unable to determine download link: {e}")
+
+    # with tempfile.TemporaryDirectory(prefix=attr_path) as new_clone_location:
+    #     subprocess.run(["git", "clone", download_url, new_clone_location])
+    #     newest_commit = subprocess.check_output(
+    #         ["git" "rev-parse" "$(git branch -r)" "|" "tail" "-n" "1"]).decode('utf-8')
+    pass
+
+
+FETCHERS = {
+    'fetchFromGitHub'   :   _get_latest_version_github,
+    'fetchurl'          :   _get_latest_version_octave_packages,
+    'fetchgit'          :   _get_latest_version_git,
+}
+
+
+DEFAULT_SETUPTOOLS_EXTENSION = 'tar.gz'
+
+
+FORMATS = {
+    'setuptools'        :   DEFAULT_SETUPTOOLS_EXTENSION,
+}
+
+def _determine_fetcher(text):
+    # Count occurrences of fetchers.
+    nfetchers = sum(text.count('src = {}'.format(fetcher)) for fetcher in FETCHERS.keys())
+    if nfetchers == 0:
+        raise ValueError("no fetcher.")
+    elif nfetchers > 1:
+        raise ValueError("multiple fetchers.")
+    else:
+        # Then we check which fetcher to use.
+        for fetcher in FETCHERS.keys():
+            if 'src = {}'.format(fetcher) in text:
+                return fetcher
+
+
+def _determine_extension(text, fetcher):
+    """Determine what extension is used in the expression.
+
+    If we use:
+    - fetchPypi, we check if format is specified.
+    - fetchurl, we determine the extension from the url.
+    - fetchFromGitHub we simply use `.tar.gz`.
+    """
+    if fetcher == 'fetchurl':
+        url = _get_unique_value('url', text)
+        extension = os.path.splitext(url)[1]
+
+    elif fetcher == 'fetchFromGitHub' or fetcher == 'fetchgit':
+        if "fetchSubmodules" in text:
+            raise ValueError("fetchFromGitHub fetcher doesn't support submodules")
+        extension = "tar.gz"
+
+    return extension
+
+
+def _update_package(path, target):
+
+    # Read the expression
+    with open(path, 'r') as f:
+        text = f.read()
+
+    # Determine pname. Many files have more than one pname
+    pnames = _get_values('pname', text)
+
+    # Determine version.
+    version = _get_unique_value('version', text)
+
+    # First we check how many fetchers are mentioned.
+    fetcher = _determine_fetcher(text)
+
+    extension = _determine_extension(text, fetcher)
+
+    # Attempt a fetch using each pname, e.g. backports-zoneinfo vs backports.zoneinfo
+    successful_fetch = False
+    for pname in pnames:
+        if fetcher == "fetchgit":
+            logging.warning(f"You must update {pname} MANUALLY!")
+            return { 'path': path, 'target': target, 'pname': pname,
+                     'old_version': version, 'new_version': version }
+        try:
+            new_version, new_sha256, prefix = FETCHERS[fetcher](pname, extension, version, target)
+            successful_fetch = True
+            break
+        except ValueError:
+            continue
+
+    if not successful_fetch:
+        raise ValueError(f"Unable to find correct package using these pnames: {pnames}")
+
+    if new_version == version:
+        logging.info("Path {}: no update available for {}.".format(path, pname))
+        return False
+    elif Version(new_version) <= Version(version):
+        raise ValueError("downgrade for {}.".format(pname))
+    if not new_sha256:
+        raise ValueError("no file available for {}.".format(pname))
+
+    text = _replace_value('version', new_version, text)
+    # hashes from pypi are 16-bit encoded sha256's, normalize it to sri to avoid merge conflicts
+    # sri hashes have been the default format since nix 2.4+
+    sri_hash = subprocess.check_output(["nix", "--extra-experimental-features", "nix-command", "hash", "to-sri", "--type", "sha256", new_sha256]).decode('utf-8').strip()
+
+
+    # fetchers can specify a sha256, or a sri hash
+    try:
+        text = _replace_value('sha256', sri_hash, text)
+    except ValueError:
+        text = _replace_value('hash', sri_hash, text)
+
+    if fetcher == 'fetchFromGitHub':
+        # in the case of fetchFromGitHub, it's common to see `rev = version;` or `rev = "v${version}";`
+        # in which no string value is meant to be substituted. However, we can just overwrite the previous value.
+        regex = '(rev\s+=\s+[^;]*;)'
+        regex = re.compile(regex)
+        matches = regex.findall(text)
+        n = len(matches)
+
+        if n == 0:
+            raise ValueError("Unable to find rev value for {}.".format(pname))
+        else:
+            # forcefully rewrite rev, incase tagging conventions changed for a release
+            match = matches[0]
+            text = text.replace(match, f'rev = "refs/tags/{prefix}${{version}}";')
+            # incase there's no prefix, just rewrite without interpolation
+            text = text.replace('"${version}";', 'version;')
+
+    with open(path, 'w') as f:
+        f.write(text)
+
+        logging.info("Path {}: updated {} from {} to {}".format(path, pname, version, new_version))
+
+    result = {
+        'path'  : path,
+        'target': target,
+        'pname': pname,
+        'old_version'   : version,
+        'new_version'   : new_version,
+        #'fetcher'       : fetcher,
+        }
+
+    return result
+
+
+def _update(path, target):
+
+    # We need to read and modify a Nix expression.
+    if os.path.isdir(path):
+        path = os.path.join(path, 'default.nix')
+
+    # If a default.nix does not exist, we quit.
+    if not os.path.isfile(path):
+        logging.info("Path {}: does not exist.".format(path))
+        return False
+
+    # If file is not a Nix expression, we quit.
+    if not path.endswith(".nix"):
+        logging.info("Path {}: does not end with `.nix`.".format(path))
+        return False
+
+    try:
+        return _update_package(path, target)
+    except ValueError as e:
+        logging.warning("Path {}: {}".format(path, e))
+        return False
+
+
+def _commit(path, pname, old_version, new_version, pkgs_prefix="octave: ", **kwargs):
+    """Commit result.
+    """
+
+    msg = f'{pkgs_prefix}{pname}: {old_version} -> {new_version}'
+
+    try:
+        subprocess.check_call([GIT, 'add', path])
+        subprocess.check_call([GIT, 'commit', '-m', msg])
+    except subprocess.CalledProcessError as e:
+        subprocess.check_call([GIT, 'checkout', path])
+        raise subprocess.CalledProcessError(f'Could not commit {path}') from e
+
+    return True
+
+
+def main():
+
+    epilog = """
+environment variables:
+  GITHUB_API_TOKEN\tGitHub API token used when updating github packages
+    """
+    parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, epilog=epilog)
+    parser.add_argument('package', type=str, nargs='+')
+    parser.add_argument('--target', type=str, choices=SEMVER.keys(), default='major')
+    parser.add_argument('--commit', action='store_true', help='Create a commit for each package update')
+    parser.add_argument('--use-pkgs-prefix', action='store_true', help='Use octavePackages.${pname}: instead of octave: ${pname}: when making commits')
+
+    args = parser.parse_args()
+    target = args.target
+
+    packages = list(map(os.path.abspath, args.package))
+
+    logging.info("Updating packages...")
+
+    # Use threads to update packages concurrently
+    with Pool() as p:
+        results = list(filter(bool, p.map(lambda pkg: _update(pkg, target), packages)))
+
+    logging.info("Finished updating packages.")
+
+    commit_options = {}
+    if args.use_pkgs_prefix:
+        logging.info("Using octavePackages. prefix for commits")
+        commit_options["pkgs_prefix"] = "octavePackages."
+
+    # Commits are created sequentially.
+    if args.commit:
+        logging.info("Committing updates...")
+        # list forces evaluation
+        list(map(lambda x: _commit(**x, **commit_options), results))
+        logging.info("Finished committing updates")
+
+    count = len(results)
+    logging.info("{} package(s) updated".format(count))
+
+
+if __name__ == '__main__':
+    main()
diff --git a/nixpkgs/maintainers/scripts/update-octave-shell.nix b/nixpkgs/maintainers/scripts/update-octave-shell.nix
new file mode 100644
index 000000000000..51d4844c79f3
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/update-octave-shell.nix
@@ -0,0 +1,12 @@
+{ nixpkgs ? import ../.. { }
+}:
+with nixpkgs;
+let
+  pyEnv = python3.withPackages(ps: with ps; [ packaging requests toolz pyyaml ]);
+in
+mkShell {
+  packages = [
+    pyEnv
+    nix-prefetch-scripts
+  ];
+}
diff --git a/nixpkgs/maintainers/scripts/update-python-libraries b/nixpkgs/maintainers/scripts/update-python-libraries
new file mode 100755
index 000000000000..8717808daaf0
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/update-python-libraries
@@ -0,0 +1,3 @@
+#!/usr/bin/env nix-shell
+#!nix-shell -I nixpkgs=channel:nixpkgs-unstable -i bash -p "python3.withPackages (ps: with ps; [ packaging requests ])" -p nix-prefetch-git
+exec python3 pkgs/development/interpreters/python/update-python-libraries/update-python-libraries.py $@
diff --git a/nixpkgs/maintainers/scripts/update-redirected-urls.sh b/nixpkgs/maintainers/scripts/update-redirected-urls.sh
new file mode 100755
index 000000000000..5ffa9aca5f68
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/update-redirected-urls.sh
@@ -0,0 +1,12 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -p bash curl ripgrep jq -i bash
+
+set -euxo pipefail
+
+# Possibly also add non-https redirect, but there were non of those when I first
+# made this script to test that. Feel free to add it when it is relevant.
+curl https://repology.org/api/v1/repository/nix_unstable/problems \
+   | jq -r '.[] | select(.type == "homepage_permanent_https_redirect") | .data | "s@\(.url)@\(.target)@"' \
+   | sort | uniq | tee script.sed
+find -name '*.nix' | xargs -P4 -- sed -f script.sed -i
+rm script.sed
diff --git a/nixpkgs/maintainers/scripts/update-ruby-packages b/nixpkgs/maintainers/scripts/update-ruby-packages
new file mode 100755
index 000000000000..60da1a1b5938
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/update-ruby-packages
@@ -0,0 +1,16 @@
+#!/usr/bin/env nix-shell
+#!nix-shell -i bash -p bundler bundix
+
+set -euf -o pipefail
+
+(
+  cd pkgs/development/ruby-modules/with-packages
+  rm -f gemset.nix Gemfile.lock
+  # Since bundler 2+, the lock command generates a platform-dependent
+  # Gemfile.lock, hence causing to bundix to generate a gemset tied to the
+  # platform from where it was executed.
+  BUNDLE_FORCE_RUBY_PLATFORM=1 bundle lock
+  bundix
+  mv gemset.nix ../../../top-level/ruby-packages.nix
+  rm -f Gemfile.lock
+)
diff --git a/nixpkgs/maintainers/scripts/update.nix b/nixpkgs/maintainers/scripts/update.nix
new file mode 100755
index 000000000000..3aff32caf581
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/update.nix
@@ -0,0 +1,227 @@
+/*
+  To run:
+
+      nix-shell maintainers/scripts/update.nix
+
+  See https://nixos.org/manual/nixpkgs/unstable/#var-passthru-updateScript
+*/
+{ package ? null
+, maintainer ? null
+, predicate ? null
+, path ? null
+, max-workers ? null
+, include-overlays ? false
+, keep-going ? null
+, commit ? null
+}:
+
+let
+  pkgs = import ./../../default.nix (
+    if include-overlays == false then
+      { overlays = []; }
+    else if include-overlays == true then
+      { } # Let Nixpkgs include overlays impurely.
+    else { overlays = include-overlays; }
+  );
+
+  inherit (pkgs) lib;
+
+  /* Remove duplicate elements from the list based on some extracted value. O(n^2) complexity.
+   */
+  nubOn = f: list:
+    if list == [] then
+      []
+    else
+      let
+        x = lib.head list;
+        xs = lib.filter (p: f x != f p) (lib.drop 1 list);
+      in
+        [x] ++ nubOn f xs;
+
+  /* Recursively find all packages (derivations) in `pkgs` matching `cond` predicate.
+
+    Type: packagesWithPath :: AttrPath → (AttrPath → derivation → bool) → AttrSet → List<AttrSet{attrPath :: str; package :: derivation; }>
+          AttrPath :: [str]
+
+    The packages will be returned as a list of named pairs comprising of:
+      - attrPath: stringified attribute path (based on `rootPath`)
+      - package: corresponding derivation
+   */
+  packagesWithPath = rootPath: cond: pkgs:
+    let
+      packagesWithPathInner = path: pathContent:
+        let
+          result = builtins.tryEval pathContent;
+
+          somewhatUniqueRepresentant =
+            { package, attrPath }: {
+              inherit (package) updateScript;
+              # Some updaters use the same `updateScript` value for all packages.
+              # Also compare `meta.description`.
+              position = package.meta.position or null;
+              # We cannot always use `meta.position` since it might not be available
+              # or it might be shared among multiple packages.
+            };
+
+          dedupResults = lst: nubOn somewhatUniqueRepresentant (lib.concatLists lst);
+        in
+          if result.success then
+            let
+              evaluatedPathContent = result.value;
+            in
+              if lib.isDerivation evaluatedPathContent then
+                lib.optional (cond path evaluatedPathContent) { attrPath = lib.concatStringsSep "." path; package = evaluatedPathContent; }
+              else if lib.isAttrs evaluatedPathContent then
+                # If user explicitly points to an attrSet or it is marked for recursion, we recur.
+                if path == rootPath || evaluatedPathContent.recurseForDerivations or false || evaluatedPathContent.recurseForRelease or false then
+                  dedupResults (lib.mapAttrsToList (name: elem: packagesWithPathInner (path ++ [name]) elem) evaluatedPathContent)
+                else []
+              else []
+          else [];
+    in
+      packagesWithPathInner rootPath pkgs;
+
+  /* Recursively find all packages (derivations) in `pkgs` matching `cond` predicate.
+   */
+  packagesWith = packagesWithPath [];
+
+  /* Recursively find all packages in `pkgs` with updateScript matching given predicate.
+   */
+  packagesWithUpdateScriptMatchingPredicate = cond:
+    packagesWith (path: pkg: builtins.hasAttr "updateScript" pkg && cond path pkg);
+
+  /* Recursively find all packages in `pkgs` with updateScript by given maintainer.
+   */
+  packagesWithUpdateScriptAndMaintainer = maintainer':
+    let
+      maintainer =
+        if ! builtins.hasAttr maintainer' lib.maintainers then
+          builtins.throw "Maintainer with name `${maintainer'} does not exist in `maintainers/maintainer-list.nix`."
+        else
+          builtins.getAttr maintainer' lib.maintainers;
+    in
+      packagesWithUpdateScriptMatchingPredicate (path: pkg:
+                         (if builtins.hasAttr "maintainers" pkg.meta
+                           then (if builtins.isList pkg.meta.maintainers
+                                   then builtins.elem maintainer pkg.meta.maintainers
+                                   else maintainer == pkg.meta.maintainers
+                                )
+                           else false
+                         )
+                   );
+
+  /* Recursively find all packages under `path` in `pkgs` with updateScript.
+   */
+  packagesWithUpdateScript = path: pkgs:
+    let
+      prefix = lib.splitString "." path;
+      pathContent = lib.attrByPath prefix null pkgs;
+    in
+      if pathContent == null then
+        builtins.throw "Attribute path `${path}` does not exist."
+      else
+        packagesWithPath prefix (path: pkg: builtins.hasAttr "updateScript" pkg)
+                       pathContent;
+
+  /* Find a package under `path` in `pkgs` and require that it has an updateScript.
+   */
+  packageByName = path: pkgs:
+    let
+        package = lib.attrByPath (lib.splitString "." path) null pkgs;
+    in
+      if package == null then
+        builtins.throw "Package with an attribute name `${path}` does not exist."
+      else if ! builtins.hasAttr "updateScript" package then
+        builtins.throw "Package with an attribute name `${path}` does not have a `passthru.updateScript` attribute defined."
+      else
+        { attrPath = path; inherit package; };
+
+  /* List of packages matched based on the CLI arguments.
+   */
+  packages =
+    if package != null then
+      [ (packageByName package pkgs) ]
+    else if predicate != null then
+      packagesWithUpdateScriptMatchingPredicate predicate pkgs
+    else if maintainer != null then
+      packagesWithUpdateScriptAndMaintainer maintainer pkgs
+    else if path != null then
+      packagesWithUpdateScript path pkgs
+    else
+      builtins.throw "No arguments provided.\n\n${helpText}";
+
+  helpText = ''
+    Please run:
+
+        % nix-shell maintainers/scripts/update.nix --argstr maintainer garbas
+
+    to run all update scripts for all packages that lists \`garbas\` as a maintainer
+    and have \`updateScript\` defined, or:
+
+        % nix-shell maintainers/scripts/update.nix --argstr package gnome.nautilus
+
+    to run update script for specific package, or
+
+        % nix-shell maintainers/scripts/update.nix --arg predicate '(path: pkg: pkg.updateScript.name or null == "gnome-update-script")'
+
+    to run update script for all packages matching given predicate, or
+
+        % nix-shell maintainers/scripts/update.nix --argstr path gnome
+
+    to run update script for all package under an attribute path.
+
+    You can also add
+
+        --argstr max-workers 8
+
+    to increase the number of jobs in parallel, or
+
+        --argstr keep-going true
+
+    to continue running when a single update fails.
+
+    You can also make the updater automatically commit on your behalf from updateScripts
+    that support it by adding
+
+        --argstr commit true
+  '';
+
+  /* Transform a matched package into an object for update.py.
+   */
+  packageData = { package, attrPath }: {
+    name = package.name;
+    pname = lib.getName package;
+    oldVersion = lib.getVersion package;
+    updateScript = map builtins.toString (lib.toList (package.updateScript.command or package.updateScript));
+    supportedFeatures = package.updateScript.supportedFeatures or [];
+    attrPath = package.updateScript.attrPath or attrPath;
+  };
+
+  /* JSON file with data for update.py.
+   */
+  packagesJson = pkgs.writeText "packages.json" (builtins.toJSON (map packageData packages));
+
+  optionalArgs =
+    lib.optional (max-workers != null) "--max-workers=${max-workers}"
+    ++ lib.optional (keep-going == "true") "--keep-going"
+    ++ lib.optional (commit == "true") "--commit";
+
+  args = [ packagesJson ] ++ optionalArgs;
+
+in pkgs.stdenv.mkDerivation {
+  name = "nixpkgs-update-script";
+  buildCommand = ''
+    echo ""
+    echo "----------------------------------------------------------------"
+    echo ""
+    echo "Not possible to update packages using \`nix-build\`"
+    echo ""
+    echo "${helpText}"
+    echo "----------------------------------------------------------------"
+    exit 1
+  '';
+  shellHook = ''
+    unset shellHook # do not contaminate nested shells
+    exec ${pkgs.python3.interpreter} ${./update.py} ${builtins.concatStringsSep " " args}
+  '';
+}
diff --git a/nixpkgs/maintainers/scripts/update.py b/nixpkgs/maintainers/scripts/update.py
new file mode 100644
index 000000000000..bbed2bda5e03
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/update.py
@@ -0,0 +1,247 @@
+from __future__ import annotations
+from typing import Dict, Generator, List, Optional, Tuple
+import argparse
+import asyncio
+import contextlib
+import json
+import os
+import re
+import subprocess
+import sys
+import tempfile
+
+class CalledProcessError(Exception):
+    process: asyncio.subprocess.Process
+
+class UpdateFailedException(Exception):
+    pass
+
+def eprint(*args, **kwargs):
+    print(*args, file=sys.stderr, **kwargs)
+
+async def check_subprocess(*args, **kwargs):
+    """
+    Emulate check argument of subprocess.run function.
+    """
+    process = await asyncio.create_subprocess_exec(*args, **kwargs)
+    returncode = await process.wait()
+
+    if returncode != 0:
+        error = CalledProcessError()
+        error.process = process
+
+        raise error
+
+    return process
+
+async def run_update_script(nixpkgs_root: str, merge_lock: asyncio.Lock, temp_dir: Optional[Tuple[str, str]], package: Dict, keep_going: bool):
+    worktree: Optional[str] = None
+
+    update_script_command = package['updateScript']
+
+    if temp_dir is not None:
+        worktree, _branch = temp_dir
+
+        # Ensure the worktree is clean before update.
+        await check_subprocess('git', 'reset', '--hard', '--quiet', 'HEAD', cwd=worktree)
+
+        # Update scripts can use $(dirname $0) to get their location but we want to run
+        # their clones in the git worktree, not in the main nixpkgs repo.
+        update_script_command = map(lambda arg: re.sub(r'^{0}'.format(re.escape(nixpkgs_root)), worktree, arg), update_script_command)
+
+    eprint(f" - {package['name']}: UPDATING ...")
+
+    try:
+        update_process = await check_subprocess(
+            'env',
+            f"UPDATE_NIX_NAME={package['name']}",
+            f"UPDATE_NIX_PNAME={package['pname']}",
+            f"UPDATE_NIX_OLD_VERSION={package['oldVersion']}",
+            f"UPDATE_NIX_ATTR_PATH={package['attrPath']}",
+            *update_script_command,
+            stdout=asyncio.subprocess.PIPE,
+            stderr=asyncio.subprocess.PIPE,
+            cwd=worktree,
+        )
+        update_info = await update_process.stdout.read()
+
+        await merge_changes(merge_lock, package, update_info, temp_dir)
+    except KeyboardInterrupt as e:
+        eprint('Cancelling…')
+        raise asyncio.exceptions.CancelledError()
+    except CalledProcessError as e:
+        eprint(f" - {package['name']}: ERROR")
+        eprint()
+        eprint(f"--- SHOWING ERROR LOG FOR {package['name']} ----------------------")
+        eprint()
+        stderr = await e.process.stderr.read()
+        eprint(stderr.decode('utf-8'))
+        with open(f"{package['pname']}.log", 'wb') as logfile:
+            logfile.write(stderr)
+        eprint()
+        eprint(f"--- SHOWING ERROR LOG FOR {package['name']} ----------------------")
+
+        if not keep_going:
+            raise UpdateFailedException(f"The update script for {package['name']} failed with exit code {e.process.returncode}")
+
+@contextlib.contextmanager
+def make_worktree() -> Generator[Tuple[str, str], None, None]:
+    with tempfile.TemporaryDirectory() as wt:
+        branch_name = f'update-{os.path.basename(wt)}'
+        target_directory = f'{wt}/nixpkgs'
+
+        subprocess.run(['git', 'worktree', 'add', '-b', branch_name, target_directory])
+        yield (target_directory, branch_name)
+        subprocess.run(['git', 'worktree', 'remove', '--force', target_directory])
+        subprocess.run(['git', 'branch', '-D', branch_name])
+
+async def commit_changes(name: str, merge_lock: asyncio.Lock, worktree: str, branch: str, changes: List[Dict]) -> None:
+    for change in changes:
+        # Git can only handle a single index operation at a time
+        async with merge_lock:
+            await check_subprocess('git', 'add', *change['files'], cwd=worktree)
+            commit_message = '{attrPath}: {oldVersion} -> {newVersion}'.format(**change)
+            if 'commitMessage' in change:
+                commit_message = change['commitMessage']
+            elif 'commitBody' in change:
+                commit_message = commit_message + '\n\n' + change['commitBody']
+            await check_subprocess('git', 'commit', '--quiet', '-m', commit_message, cwd=worktree)
+            await check_subprocess('git', 'cherry-pick', branch)
+
+async def check_changes(package: Dict, worktree: str, update_info: str):
+    if 'commit' in package['supportedFeatures']:
+        changes = json.loads(update_info)
+    else:
+        changes = [{}]
+
+    # Try to fill in missing attributes when there is just a single change.
+    if len(changes) == 1:
+        # Dynamic data from updater take precedence over static data from passthru.updateScript.
+        if 'attrPath' not in changes[0]:
+            # update.nix is always passing attrPath
+            changes[0]['attrPath'] = package['attrPath']
+
+        if 'oldVersion' not in changes[0]:
+            # update.nix is always passing oldVersion
+            changes[0]['oldVersion'] = package['oldVersion']
+
+        if 'newVersion' not in changes[0]:
+            attr_path = changes[0]['attrPath']
+            obtain_new_version_process = await check_subprocess('nix-instantiate', '--expr', f'with import ./. {{}}; lib.getVersion {attr_path}', '--eval', '--strict', '--json', stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, cwd=worktree)
+            changes[0]['newVersion'] = json.loads((await obtain_new_version_process.stdout.read()).decode('utf-8'))
+
+        if 'files' not in changes[0]:
+            changed_files_process = await check_subprocess('git', 'diff', '--name-only', 'HEAD', stdout=asyncio.subprocess.PIPE, cwd=worktree)
+            changed_files = (await changed_files_process.stdout.read()).splitlines()
+            changes[0]['files'] = changed_files
+
+            if len(changed_files) == 0:
+                return []
+
+    return changes
+
+async def merge_changes(merge_lock: asyncio.Lock, package: Dict, update_info: str, temp_dir: Optional[Tuple[str, str]]) -> None:
+    if temp_dir is not None:
+        worktree, branch = temp_dir
+        changes = await check_changes(package, worktree, update_info)
+
+        if len(changes) > 0:
+            await commit_changes(package['name'], merge_lock, worktree, branch, changes)
+        else:
+            eprint(f" - {package['name']}: DONE, no changes.")
+    else:
+        eprint(f" - {package['name']}: DONE.")
+
+async def updater(nixpkgs_root: str, temp_dir: Optional[Tuple[str, str]], merge_lock: asyncio.Lock, packages_to_update: asyncio.Queue[Optional[Dict]], keep_going: bool, commit: bool):
+    while True:
+        package = await packages_to_update.get()
+        if package is None:
+            # A sentinel received, we are done.
+            return
+
+        if not ('commit' in package['supportedFeatures'] or 'attrPath' in package):
+            temp_dir = None
+
+        await run_update_script(nixpkgs_root, merge_lock, temp_dir, package, keep_going)
+
+async def start_updates(max_workers: int, keep_going: bool, commit: bool, packages: List[Dict]):
+    merge_lock = asyncio.Lock()
+    packages_to_update: asyncio.Queue[Optional[Dict]] = asyncio.Queue()
+
+    with contextlib.ExitStack() as stack:
+        temp_dirs: List[Optional[Tuple[str, str]]] = []
+
+        # Do not create more workers than there are packages.
+        num_workers = min(max_workers, len(packages))
+
+        nixpkgs_root_process = await check_subprocess('git', 'rev-parse', '--show-toplevel', stdout=asyncio.subprocess.PIPE)
+        nixpkgs_root = (await nixpkgs_root_process.stdout.read()).decode('utf-8').strip()
+
+        # Set up temporary directories when using auto-commit.
+        for i in range(num_workers):
+            temp_dir = stack.enter_context(make_worktree()) if commit else None
+            temp_dirs.append(temp_dir)
+
+        # Fill up an update queue,
+        for package in packages:
+            await packages_to_update.put(package)
+
+        # Add sentinels, one for each worker.
+        # A workers will terminate when it gets sentinel from the queue.
+        for i in range(num_workers):
+            await packages_to_update.put(None)
+
+        # Prepare updater workers for each temp_dir directory.
+        # At most `num_workers` instances of `run_update_script` will be running at one time.
+        updaters = asyncio.gather(*[updater(nixpkgs_root, temp_dir, merge_lock, packages_to_update, keep_going, commit) for temp_dir in temp_dirs])
+
+        try:
+            # Start updater workers.
+            await updaters
+        except asyncio.exceptions.CancelledError:
+            # When one worker is cancelled, cancel the others too.
+            updaters.cancel()
+        except UpdateFailedException as e:
+            # When one worker fails, cancel the others, as this exception is only thrown when keep_going is false.
+            updaters.cancel()
+            eprint(e)
+            sys.exit(1)
+
+def main(max_workers: int, keep_going: bool, commit: bool, packages_path: str) -> None:
+    with open(packages_path) as f:
+        packages = json.load(f)
+
+    eprint()
+    eprint('Going to be running update for following packages:')
+    for package in packages:
+        eprint(f" - {package['name']}")
+    eprint()
+
+    confirm = input('Press Enter key to continue...')
+    if confirm == '':
+        eprint()
+        eprint('Running update for:')
+
+        asyncio.run(start_updates(max_workers, keep_going, commit, packages))
+
+        eprint()
+        eprint('Packages updated!')
+        sys.exit()
+    else:
+        eprint('Aborting!')
+        sys.exit(130)
+
+parser = argparse.ArgumentParser(description='Update packages')
+parser.add_argument('--max-workers', '-j', dest='max_workers', type=int, help='Number of updates to run concurrently', nargs='?', default=4)
+parser.add_argument('--keep-going', '-k', dest='keep_going', action='store_true', help='Do not stop after first failure')
+parser.add_argument('--commit', '-c', dest='commit', action='store_true', help='Commit the changes')
+parser.add_argument('packages', help='JSON file containing the list of package names and their update scripts')
+
+if __name__ == '__main__':
+    args = parser.parse_args()
+
+    try:
+        main(args.max_workers, args.keep_going, args.commit, args.packages)
+    except KeyboardInterrupt as e:
+        # Let’s cancel outside of the main loop too.
+        sys.exit(130)
diff --git a/nixpkgs/maintainers/scripts/vanity-manual-equalities.txt b/nixpkgs/maintainers/scripts/vanity-manual-equalities.txt
new file mode 100644
index 000000000000..4a7bc3aea44e
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/vanity-manual-equalities.txt
@@ -0,0 +1,7 @@
+viric	viriketo@gmail.com
+Pjotr Prins	pjotr.public01@thebird.nl
+Pjotr Prins	pjotr.public05@thebird.nl
+Wouter den Breejen	wbreejen
+MarcWeber	marcweber
+Ricardo Correia	Ricardo M. Correia
+ertesx@gmx.de	ertes
diff --git a/nixpkgs/maintainers/scripts/vanity.sh b/nixpkgs/maintainers/scripts/vanity.sh
new file mode 100755
index 000000000000..b879488165df
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/vanity.sh
@@ -0,0 +1,122 @@
+#! /bin/sh
+
+export LANG=C LC_ALL=C LC_COLLATE=C
+
+# Load git log
+raw_git_log="$(git log)"
+git_data="$(echo "$raw_git_log" | grep 'Author:' |
+  sed -e 's/^ *Author://; s/\\//g; s/^ *//; s/ *$//;
+  s/ @ .*//; s/ *[<]/\t/; s/[>]//')"
+
+# Name - nick - email correspondence from log and from maintainer list
+# Also there are a few manual entries
+maintainers="$(cat "$(dirname "$0")/../maintainer-list.nix" |
+  grep '=' | sed -re 's/\\"/''/g;
+  s/[ 	]*([^ 	=]*)[ 	]*=[ 	]*" *(.*[^ ]) *[<](.*)[>] *".*/\1\t\2\t\3/')"
+git_lines="$( ( echo "$git_data";
+    cat "$(dirname "$0")/vanity-manual-equalities.txt") | sort |uniq)"
+
+emails="$(
+    ( echo "$maintainers" | cut -f 3; echo "$git_data" | cut -f 2 ) |
+    sort | uniq | grep -E ".+@.+[.].+"
+    )"
+
+fetchGithubName () {
+    commitid="$(
+        echo "$raw_git_log" | grep -B3 "Author: .*[<]$1[>]" | head -n 3 |
+            grep '^commit ' | tail -n 1 | sed -e 's/^commit //'
+    )"
+    userid="$(
+        curl https://github.com/NixOS/nixpkgs/commit/"$commitid" 2>/dev/null |
+        grep committed -B10 | grep 'href="/' |
+        sed -re 's@.* href="/@@; s@".*@@' |
+        grep -v "/commit/"
+    )";
+    echo "$userid"
+}
+
+[ -n "$NIXPKGS_GITHUB_NAME_CACHE" ] && {
+    echo "$emails" | while read email; do
+        line="$(grep "$email	" "$NIXPKGS_GITHUB_NAME_CACHE")"
+        [ -z "$line" ] && {
+            echo "$email	$(fetchGithubName "$email")" >> \
+                "$NIXPKGS_GITHUB_NAME_CACHE"
+        }
+    done
+}
+
+# For RDF
+normalize_name () {
+    sed -e 's/%/%25/g; s/ /%20/g; s/'\''/%27/g; s/"/%22/g; s/`/%60/g; s/\^/%5e/g; '
+}
+
+denormalize_name () {
+    sed -e 's/%20/ /g; s/%27/'\''/g; s/%22/"/g; s/%60/`/g; s/%5e/^/g; s/%25/%/g;';
+}
+
+n3="$(mktemp --suffix .n3)"
+
+# «The same person» relation and a sorting hint
+# Full name is something with a space
+(
+echo "$git_lines" | sed -re 's@(.*)\t(.*)@<my://name/\1>	<my://can-be>	<my://name/\2>.@'
+echo "$git_lines" | sed -re 's@(.*)\t(.*)@<my://name/\2>	<my://can-be>	<my://name/\1>.@'
+echo "$maintainers" | sed -re 's@(.*)\t(.*)\t(.*)@<my://name/\1>	<my://can-be>	<my://name/\2>.@'
+echo "$maintainers" | sed -re 's@(.*)\t(.*)\t(.*)@<my://name/\2>	<my://can-be>	<my://name/\3>.@'
+echo "$maintainers" | sed -re 's@(.*)\t(.*)\t(.*)@<my://name/\3>	<my://can-be>	<my://name/\1>.@'
+echo "$git_lines" | grep ' ' | cut -f 1 | sed -e 's@.*@<my://name/&>	<my://is-name>	<my://0>.@'
+echo "$git_lines" | grep -v ' ' | cut -f 1 | sed -e 's@.*@<my://name/&>	<my://is-name>	<my://1>.@'
+echo "$maintainers" | cut -f 2 | sed -e 's@.*@<my://name/&>	<my://is-name>	<my://0>.@'
+[ -n "$NIXPKGS_GITHUB_NAME_CACHE" ] && cat "$NIXPKGS_GITHUB_NAME_CACHE" |
+    grep -v "	$" |
+    sed -re 's@(.*)\t(.*)@<my://name/\1>	<my://at-github>	<my://github/\2>.@'
+) | normalize_name | grep -E '<my://[-a-z]+>' | sort | uniq > "$n3"
+
+# Get transitive closure
+sparql="$(nix-build '<nixpkgs>' -Q -A apache-jena --no-out-link)/bin/sparql"
+name_list="$(
+    "$sparql" --results=TSV --data="$n3" "
+    select ?x ?y ?g where {
+      ?x <my://can-be>+ ?y.
+      ?x <my://is-name> ?g.
+        }
+    " | tail -n +2 |
+    sed -re 's@<my://name/@@g; s@<my://@@g; s@>@@g;' |
+    sort -k 2,3 -t '	'
+)"
+github_name_list="$(
+    "$sparql" --results=TSV --data="$n3" "
+    select ?x ?y where {
+      ?x (<my://can-be>+ / <my://at-github>) ?y.
+        }
+    " | tail -n +2 |
+    sed -re 's@<my://(name|github)/@@g; s@<my://@@g; s@>@@g;'
+)"
+
+# Take first spelling option for every person
+name_list_canonical="$(echo "$name_list" | cut -f 1,2 | uniq -f1)"
+
+cleaner_script="$(echo "$name_list_canonical" | denormalize_name |
+  sed -re 's/(.*)\t(.*)/s#^\2$#\1#g/g')"
+
+# Add github usernames
+if [ -n "$NIXPKGS_GITHUB_NAME_CACHE" ]; then
+    github_adder_script="$(mktemp)"
+    echo "$github_name_list" |
+        grep -E "$(echo "$name_list_canonical" | cut -f 2 |
+        tr '\n' '|' )" |
+    sort | uniq |
+        sed -re 's/(.*)\t(.*)/s| \1$| \1\t\2|g;/' |
+    denormalize_name > "$github_adder_script"
+else
+    github_adder_script='/dev/null'
+fi
+
+echo "$name_list" | denormalize_name
+
+echo
+
+echo "$git_data" | cut -f 1 |
+    sed -e "$cleaner_script" |
+    sort | uniq -c | sort -k1n | sed -rf "$github_adder_script" |
+    sed -re 's/^ *([0-9]+) /\1\t/'