about summary refs log tree commit diff
path: root/nixpkgs/doc
diff options
context:
space:
mode:
authorAlyssa Ross <hi@alyssa.is>2023-06-16 06:56:35 +0000
committerAlyssa Ross <hi@alyssa.is>2023-06-16 06:56:35 +0000
commit99fcaeccb89621dd492203ce1f2d551c06f228ed (patch)
tree41cb730ae07383004789779b0f6e11cb3f4642a3 /nixpkgs/doc
parent59c5f5ac8682acc13bb22bc29c7cf02f7d75f01f (diff)
parent75a5ebf473cd60148ba9aec0d219f72e5cf52519 (diff)
downloadnixlib-99fcaeccb89621dd492203ce1f2d551c06f228ed.tar
nixlib-99fcaeccb89621dd492203ce1f2d551c06f228ed.tar.gz
nixlib-99fcaeccb89621dd492203ce1f2d551c06f228ed.tar.bz2
nixlib-99fcaeccb89621dd492203ce1f2d551c06f228ed.tar.lz
nixlib-99fcaeccb89621dd492203ce1f2d551c06f228ed.tar.xz
nixlib-99fcaeccb89621dd492203ce1f2d551c06f228ed.tar.zst
nixlib-99fcaeccb89621dd492203ce1f2d551c06f228ed.zip
Merge branch 'nixos-unstable' of https://github.com/NixOS/nixpkgs
Conflicts:
	nixpkgs/nixos/modules/config/console.nix
	nixpkgs/nixos/modules/services/mail/mailman.nix
	nixpkgs/nixos/modules/services/mail/public-inbox.nix
	nixpkgs/nixos/modules/services/mail/rss2email.nix
	nixpkgs/nixos/modules/services/networking/ssh/sshd.nix
	nixpkgs/pkgs/applications/networking/instant-messengers/dino/default.nix
	nixpkgs/pkgs/applications/networking/irc/weechat/default.nix
	nixpkgs/pkgs/applications/window-managers/sway/default.nix
	nixpkgs/pkgs/build-support/go/module.nix
	nixpkgs/pkgs/build-support/rust/build-rust-package/default.nix
	nixpkgs/pkgs/development/interpreters/python/default.nix
	nixpkgs/pkgs/development/node-packages/overrides.nix
	nixpkgs/pkgs/development/tools/b4/default.nix
	nixpkgs/pkgs/servers/dict/dictd-db.nix
	nixpkgs/pkgs/servers/mail/public-inbox/default.nix
	nixpkgs/pkgs/tools/security/pinentry/default.nix
	nixpkgs/pkgs/tools/text/unoconv/default.nix
	nixpkgs/pkgs/top-level/all-packages.nix
Diffstat (limited to 'nixpkgs/doc')
-rw-r--r--nixpkgs/doc/.gitignore3
-rw-r--r--nixpkgs/doc/Makefile7
-rw-r--r--nixpkgs/doc/build-aux/pandoc-filters/docbook-writer/rst-roles.lua4
-rw-r--r--nixpkgs/doc/build-aux/pandoc-filters/link-manpages.nix28
-rw-r--r--nixpkgs/doc/build-aux/pandoc-filters/link-unix-man-references.lua17
-rw-r--r--nixpkgs/doc/build-aux/pandoc-filters/myst-reader/roles.lua13
-rw-r--r--nixpkgs/doc/builders/fetchers.chapter.md81
-rw-r--r--nixpkgs/doc/builders/images.xml3
-rw-r--r--nixpkgs/doc/builders/images/appimagetools.section.md2
-rw-r--r--nixpkgs/doc/builders/images/binarycache.section.md49
-rw-r--r--nixpkgs/doc/builders/images/dockertools.section.md190
-rw-r--r--nixpkgs/doc/builders/images/makediskimage.section.md108
-rw-r--r--nixpkgs/doc/builders/images/ocitools.section.md2
-rw-r--r--nixpkgs/doc/builders/images/portableservice.section.md81
-rw-r--r--nixpkgs/doc/builders/packages/cataclysm-dda.section.md2
-rw-r--r--nixpkgs/doc/builders/packages/citrix.section.md4
-rw-r--r--nixpkgs/doc/builders/packages/dlib.section.md2
-rw-r--r--nixpkgs/doc/builders/packages/eclipse.section.md6
-rw-r--r--nixpkgs/doc/builders/packages/firefox.section.md2
-rw-r--r--nixpkgs/doc/builders/packages/ibus.section.md2
-rw-r--r--nixpkgs/doc/builders/packages/weechat.section.md2
-rw-r--r--nixpkgs/doc/builders/special.xml3
-rw-r--r--nixpkgs/doc/builders/special/darwin-builder.section.md149
-rw-r--r--nixpkgs/doc/builders/special/fhs-environments.section.md43
-rw-r--r--nixpkgs/doc/builders/special/makesetuphook.section.md37
-rw-r--r--nixpkgs/doc/builders/special/mkshell.section.md4
-rw-r--r--nixpkgs/doc/builders/special/vm-tools.section.md148
-rw-r--r--nixpkgs/doc/builders/testers.chapter.md119
-rw-r--r--nixpkgs/doc/contributing/coding-conventions.chapter.md61
-rw-r--r--nixpkgs/doc/contributing/contributing-to-documentation.chapter.md20
-rw-r--r--nixpkgs/doc/contributing/quick-start.chapter.md2
-rw-r--r--nixpkgs/doc/contributing/reviewing-contributions.chapter.md12
-rw-r--r--nixpkgs/doc/contributing/submitting-changes.chapter.md12
-rw-r--r--nixpkgs/doc/default.nix8
-rw-r--r--nixpkgs/doc/doc-support/default.nix26
-rw-r--r--nixpkgs/doc/doc-support/lib-function-docs.nix37
-rw-r--r--nixpkgs/doc/doc-support/lib-function-locations.nix24
-rw-r--r--nixpkgs/doc/doc-support/parameters.xml10
-rw-r--r--nixpkgs/doc/doc-support/xmlformat.conf72
-rw-r--r--nixpkgs/doc/functions/generators.section.md2
-rw-r--r--nixpkgs/doc/functions/library.xml22
-rw-r--r--nixpkgs/doc/functions/library/.gitkeep0
-rw-r--r--nixpkgs/doc/functions/library/asserts.xml112
-rw-r--r--nixpkgs/doc/functions/library/attrsets.xml1751
-rw-r--r--nixpkgs/doc/functions/nix-gitignore.section.md4
-rw-r--r--nixpkgs/doc/hooks/autoconf.section.md4
-rw-r--r--nixpkgs/doc/hooks/automake.section.md4
-rw-r--r--nixpkgs/doc/hooks/autopatchelf.section.md12
-rw-r--r--nixpkgs/doc/hooks/breakpoint.section.md16
-rw-r--r--nixpkgs/doc/hooks/cmake.section.md4
-rw-r--r--nixpkgs/doc/hooks/gdk-pixbuf.section.md4
-rw-r--r--nixpkgs/doc/hooks/ghc.section.md4
-rw-r--r--nixpkgs/doc/hooks/gnome.section.md4
-rw-r--r--nixpkgs/doc/hooks/index.xml27
-rw-r--r--nixpkgs/doc/hooks/installShellFiles.section.md26
-rw-r--r--nixpkgs/doc/hooks/libiconv.section.md4
-rw-r--r--nixpkgs/doc/hooks/libxml2.section.md4
-rw-r--r--nixpkgs/doc/hooks/meson.section.md26
-rw-r--r--nixpkgs/doc/hooks/ninja.section.md4
-rw-r--r--nixpkgs/doc/hooks/patch-rc-path-hooks.section.md50
-rw-r--r--nixpkgs/doc/hooks/perl.section.md4
-rw-r--r--nixpkgs/doc/hooks/pkg-config.section.md4
-rw-r--r--nixpkgs/doc/hooks/postgresql-test-hook.section.md10
-rw-r--r--nixpkgs/doc/hooks/python.section.md4
-rw-r--r--nixpkgs/doc/hooks/qt-4.section.md4
-rw-r--r--nixpkgs/doc/hooks/scons.section.md4
-rw-r--r--nixpkgs/doc/hooks/tetex-tex-live.section.md4
-rw-r--r--nixpkgs/doc/hooks/unzip.section.md4
-rw-r--r--nixpkgs/doc/hooks/validatePkgConfig.section.md4
-rw-r--r--nixpkgs/doc/hooks/waf.section.md4
-rw-r--r--nixpkgs/doc/hooks/xcbuild.section.md4
-rw-r--r--nixpkgs/doc/languages-frameworks/agda.section.md6
-rw-r--r--nixpkgs/doc/languages-frameworks/android.section.md7
-rw-r--r--nixpkgs/doc/languages-frameworks/beam.section.md33
-rw-r--r--nixpkgs/doc/languages-frameworks/bower.section.md2
-rw-r--r--nixpkgs/doc/languages-frameworks/chicken.section.md6
-rw-r--r--nixpkgs/doc/languages-frameworks/coq.section.md67
-rw-r--r--nixpkgs/doc/languages-frameworks/crystal.section.md4
-rw-r--r--nixpkgs/doc/languages-frameworks/cuda.section.md25
-rw-r--r--nixpkgs/doc/languages-frameworks/cuelang.section.md93
-rw-r--r--nixpkgs/doc/languages-frameworks/dart.section.md65
-rw-r--r--nixpkgs/doc/languages-frameworks/dhall.section.md10
-rw-r--r--nixpkgs/doc/languages-frameworks/dotnet.section.md78
-rw-r--r--nixpkgs/doc/languages-frameworks/emscripten.section.md6
-rw-r--r--nixpkgs/doc/languages-frameworks/gnome.section.md10
-rw-r--r--nixpkgs/doc/languages-frameworks/go.section.md33
-rw-r--r--nixpkgs/doc/languages-frameworks/haskell.section.md1164
-rw-r--r--nixpkgs/doc/languages-frameworks/hy.section.md4
-rw-r--r--nixpkgs/doc/languages-frameworks/idris.section.md2
-rw-r--r--nixpkgs/doc/languages-frameworks/index.xml7
-rw-r--r--nixpkgs/doc/languages-frameworks/ios.section.md4
-rw-r--r--nixpkgs/doc/languages-frameworks/javascript.section.md85
-rw-r--r--nixpkgs/doc/languages-frameworks/lisp.section.md304
-rw-r--r--nixpkgs/doc/languages-frameworks/lua.section.md19
-rw-r--r--nixpkgs/doc/languages-frameworks/nim.section.md4
-rw-r--r--nixpkgs/doc/languages-frameworks/ocaml.section.md24
-rw-r--r--nixpkgs/doc/languages-frameworks/perl.section.md16
-rw-r--r--nixpkgs/doc/languages-frameworks/pkg-config.section.md51
-rw-r--r--nixpkgs/doc/languages-frameworks/python.section.md500
-rw-r--r--nixpkgs/doc/languages-frameworks/qt.section.md74
-rw-r--r--nixpkgs/doc/languages-frameworks/ruby.section.md2
-rw-r--r--nixpkgs/doc/languages-frameworks/rust.section.md385
-rw-r--r--nixpkgs/doc/languages-frameworks/swift.section.md176
-rw-r--r--nixpkgs/doc/languages-frameworks/texlive.section.md20
-rw-r--r--nixpkgs/doc/languages-frameworks/vim.section.md84
-rw-r--r--nixpkgs/doc/manpage-urls.json32
-rw-r--r--nixpkgs/doc/manual.xml15
-rw-r--r--nixpkgs/doc/module-system/module-system.chapter.md105
-rw-r--r--nixpkgs/doc/old/cross.txt10
-rw-r--r--nixpkgs/doc/preface.chapter.md6
-rw-r--r--nixpkgs/doc/stdenv/cross-compilation.chapter.md4
-rw-r--r--nixpkgs/doc/stdenv/meta.chapter.md106
-rw-r--r--nixpkgs/doc/stdenv/multiple-output.chapter.md2
-rw-r--r--nixpkgs/doc/stdenv/stdenv.chapter.md515
-rw-r--r--nixpkgs/doc/using/configuration.chapter.md12
-rw-r--r--nixpkgs/doc/using/overrides.chapter.md6
116 files changed, 4754 insertions, 2958 deletions
diff --git a/nixpkgs/doc/.gitignore b/nixpkgs/doc/.gitignore
index b5c58be03d15..b08285995f66 100644
--- a/nixpkgs/doc/.gitignore
+++ b/nixpkgs/doc/.gitignore
@@ -6,3 +6,6 @@ functions/library/locations.xml
 highlightjs
 manual-full.xml
 out
+result
+result-*
+media
diff --git a/nixpkgs/doc/Makefile b/nixpkgs/doc/Makefile
index f8d2d7248fab..208f23f5023a 100644
--- a/nixpkgs/doc/Makefile
+++ b/nixpkgs/doc/Makefile
@@ -3,7 +3,7 @@ MD_TARGETS=$(addsuffix .xml, $(basename $(shell find . -type f -regex '.*\.md$$'
 PANDOC ?= pandoc
 
 pandoc_media_dir = media
-# NOTE: Keep in sync with NixOS manual (/nixos/doc/manual/md-to-db.sh) and conversion script (/maintainers/scripts/db-to-md.sh).
+# NOTE: Keep in sync with conversion script (/maintainers/scripts/db-to-md.sh).
 # TODO: Remove raw-attribute when we can get rid of DocBook altogether.
 pandoc_commonmark_enabled_extensions = +attributes+fenced_divs+footnotes+bracketed_spans+definition_lists+pipe_tables+raw_attribute
 # Not needed:
@@ -11,7 +11,7 @@ pandoc_commonmark_enabled_extensions = +attributes+fenced_divs+footnotes+bracket
 pandoc_flags = --extract-media=$(pandoc_media_dir) \
 	--lua-filter=$(PANDOC_LUA_FILTERS_DIR)/diagram-generator.lua \
 	--lua-filter=build-aux/pandoc-filters/myst-reader/roles.lua \
-	--lua-filter=build-aux/pandoc-filters/link-unix-man-references.lua \
+	--lua-filter=$(PANDOC_LINK_MANPAGES_FILTER) \
 	--lua-filter=build-aux/pandoc-filters/docbook-writer/rst-roles.lua \
 	--lua-filter=build-aux/pandoc-filters/docbook-writer/labelless-link-is-xref.lua \
 	-f commonmark$(pandoc_commonmark_enabled_extensions)+smart
@@ -19,6 +19,9 @@ pandoc_flags = --extract-media=$(pandoc_media_dir) \
 .PHONY: all
 all: validate format out/html/index.html out/epub/manual.epub
 
+.PHONY: render-md
+render-md: ${MD_TARGETS}
+
 .PHONY: debug
 debug:
 	nix-shell --run "xmloscopy --docbook5 ./manual.xml ./manual-full.xml"
diff --git a/nixpkgs/doc/build-aux/pandoc-filters/docbook-writer/rst-roles.lua b/nixpkgs/doc/build-aux/pandoc-filters/docbook-writer/rst-roles.lua
index 1c745393a04b..5c1b034d0792 100644
--- a/nixpkgs/doc/build-aux/pandoc-filters/docbook-writer/rst-roles.lua
+++ b/nixpkgs/doc/build-aux/pandoc-filters/docbook-writer/rst-roles.lua
@@ -31,6 +31,10 @@ function Code(elem)
       tag = 'command'
     elseif elem.attributes['role'] == 'option' then
       tag = 'option'
+    elseif elem.attributes['role'] == 'var' then
+      tag = 'varname'
+    elseif elem.attributes['role'] == 'env' then
+      tag = 'envar'
     end
 
     if tag ~= nil then
diff --git a/nixpkgs/doc/build-aux/pandoc-filters/link-manpages.nix b/nixpkgs/doc/build-aux/pandoc-filters/link-manpages.nix
new file mode 100644
index 000000000000..2589a7c34251
--- /dev/null
+++ b/nixpkgs/doc/build-aux/pandoc-filters/link-manpages.nix
@@ -0,0 +1,28 @@
+{ pkgs ? import ../../.. {} }:
+let
+  inherit (pkgs) lib;
+  manpageURLs = lib.importJSON (pkgs.path + "/doc/manpage-urls.json");
+in pkgs.writeText "link-manpages.lua" ''
+  --[[
+  Adds links to known man pages that aren't already in a link.
+  ]]
+
+  local manpage_urls = {
+  ${lib.concatStringsSep "\n" (lib.mapAttrsToList (man: url:
+    "  [${builtins.toJSON man}] = ${builtins.toJSON url},") manpageURLs)}
+  }
+
+  traverse = 'topdown'
+
+  -- Returning false as the second value aborts processing of child elements.
+  function Link(elem)
+    return elem, false
+  end
+
+  function Code(elem)
+    local is_man_role = elem.classes:includes('interpreted-text') and elem.attributes['role'] == 'manpage'
+    if is_man_role and manpage_urls[elem.text] ~= nil then
+      return pandoc.Link(elem, manpage_urls[elem.text]), false
+    end
+  end
+''
diff --git a/nixpkgs/doc/build-aux/pandoc-filters/link-unix-man-references.lua b/nixpkgs/doc/build-aux/pandoc-filters/link-unix-man-references.lua
deleted file mode 100644
index e437ac73a1cb..000000000000
--- a/nixpkgs/doc/build-aux/pandoc-filters/link-unix-man-references.lua
+++ /dev/null
@@ -1,17 +0,0 @@
---[[
-Turns a manpage reference into a link, when a mapping is defined below.
-]]
-
-local man_urls = {
-  ["tmpfiles.d(5)"] = "https://www.freedesktop.org/software/systemd/man/tmpfiles.d.html",
-  ["nix.conf(5)"] = "https://nixos.org/manual/nix/stable/#sec-conf-file",
-  ["systemd.time(7)"] = "https://www.freedesktop.org/software/systemd/man/systemd.time.html",
-  ["systemd.timer(5)"] = "https://www.freedesktop.org/software/systemd/man/systemd.timer.html",
-}
-
-function Code(elem)
-  local is_man_role = elem.classes:includes('interpreted-text') and elem.attributes['role'] == 'manpage'
-  if is_man_role and man_urls[elem.text] ~= nil then
-    return pandoc.Link(elem, man_urls[elem.text])
-  end
-end
diff --git a/nixpkgs/doc/build-aux/pandoc-filters/myst-reader/roles.lua b/nixpkgs/doc/build-aux/pandoc-filters/myst-reader/roles.lua
index c33a688eeba7..f4ef6d390b40 100644
--- a/nixpkgs/doc/build-aux/pandoc-filters/myst-reader/roles.lua
+++ b/nixpkgs/doc/build-aux/pandoc-filters/myst-reader/roles.lua
@@ -17,9 +17,16 @@ function Inlines(inlines)
     if correct_tags then
       -- docutils supports alphanumeric strings separated by [-._:]
       -- We are slightly more liberal for simplicity.
-      local role = first.text:match('^{([-._+:%w]+)}$')
-      if role ~= nil then
-        inlines:remove(i)
+      -- Allow preceding punctuation (eg '('), otherwise '({file}`...`)'
+      -- does not match. Also allow anything followed by a non-breaking space
+      -- since pandoc emits those after certain abbreviations (e.g. e.g.).
+      local prefix, role = first.text:match('^(.*){([-._+:%w]+)}$')
+      if role ~= nil and (prefix == '' or prefix:match("^.*[%p ]$") ~= nil) then
+        if prefix == '' then
+          inlines:remove(i)
+        else
+          first.text = prefix
+        end
         second.attributes['role'] = role
         second.classes:insert('interpreted-text')
       end
diff --git a/nixpkgs/doc/builders/fetchers.chapter.md b/nixpkgs/doc/builders/fetchers.chapter.md
index 12d8a5d887fd..4d4f3f427cd4 100644
--- a/nixpkgs/doc/builders/fetchers.chapter.md
+++ b/nixpkgs/doc/builders/fetchers.chapter.md
@@ -3,7 +3,7 @@
 Building software with Nix often requires downloading source code and other files from the internet.
 `nixpkgs` provides *fetchers* for different protocols and services. Fetchers are functions that simplify downloading files.
 
-## Caveats
+## Caveats {#chap-pkgs-fetchers-caveats}
 
 Fetchers create [fixed output derivations](https://nixos.org/manual/nix/stable/#fixed-output-drvs) from downloaded files.
 Nix can reuse the downloaded files via the hash of the resulting derivation.
@@ -14,7 +14,7 @@ For example, consider the following fetcher:
 ```nix
 fetchurl {
   url = "http://www.example.org/hello-1.0.tar.gz";
-  sha256 = "0v6r3wwnsk5pdjr188nip3pjgn1jrn5pc5ajpcfy6had6b3v4dwm";
+  hash = "sha256-lTeyxzJNQeMdu1IVdovNMtgn77jRIhSybLdMbTkf2Ww=";
 };
 ```
 
@@ -23,17 +23,17 @@ A common mistake is to update a fetcher’s URL, or a version parameter, without
 ```nix
 fetchurl {
   url = "http://www.example.org/hello-1.1.tar.gz";
-  sha256 = "0v6r3wwnsk5pdjr188nip3pjgn1jrn5pc5ajpcfy6had6b3v4dwm";
+  hash = "sha256-lTeyxzJNQeMdu1IVdovNMtgn77jRIhSybLdMbTkf2Ww=";
 };
 ```
 
 **This will reuse the old contents**.
-Remember to invalidate the hash argument, in this case by setting the `sha256` attribute to an empty string.
+Remember to invalidate the hash argument, in this case by setting the `hash` attribute to an empty string.
 
 ```nix
 fetchurl {
   url = "http://www.example.org/hello-1.1.tar.gz";
-  sha256 = "";
+  hash = "";
 };
 ```
 
@@ -42,14 +42,14 @@ Use the resulting error message to determine the correct hash.
 ```
 error: hash mismatch in fixed-output derivation '/path/to/my.drv':
          specified: sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=
-            got:    sha256-RApQUm78dswhBLC/rfU9y0u6pSAzHceIJqgmetRD24E=
+            got:    sha256-lTeyxzJNQeMdu1IVdovNMtgn77jRIhSybLdMbTkf2Ww=
 ```
 
 A similar problem arises while testing changes to a fetcher's implementation. If the output of the derivation already exists in the Nix store, test failures can go undetected. The [`invalidateFetcherByDrvHash`](#tester-invalidateFetcherByDrvHash) function helps prevent reusing cached derivations.
 
 ## `fetchurl` and `fetchzip` {#fetchurl}
 
-Two basic fetchers are `fetchurl` and `fetchzip`. Both of these have two required arguments, a URL and a hash. The hash is typically `sha256`, although many more hash algorithms are supported. Nixpkgs contributors are currently recommended to use `sha256`. This hash will be used by Nix to identify your source. A typical usage of `fetchurl` is provided below.
+Two basic fetchers are `fetchurl` and `fetchzip`. Both of these have two required arguments, a URL and a hash. The hash is typically `hash`, although many more hash algorithms are supported. Nixpkgs contributors are currently recommended to use `hash`. This hash will be used by Nix to identify your source. A typical usage of `fetchurl` is provided below.
 
 ```nix
 { stdenv, fetchurl }:
@@ -58,7 +58,7 @@ stdenv.mkDerivation {
   name = "hello";
   src = fetchurl {
     url = "http://www.example.org/hello.tar.gz";
-    sha256 = "1111111111111111111111111111111111111111111111111111";
+    hash = "sha256-BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB=";
   };
 }
 ```
@@ -71,23 +71,24 @@ The main difference between `fetchurl` and `fetchzip` is in how they store the c
 
 - `relative`: Similar to using `git-diff`'s `--relative` flag, only keep changes inside the specified directory, making paths relative to it.
 - `stripLen`: Remove the first `stripLen` components of pathnames in the patch.
+- `decode`: Pipe the downloaded data through this command before processing it as a patch.
 - `extraPrefix`: Prefix pathnames by this string.
 - `excludes`: Exclude files matching these patterns (applies after the above arguments).
 - `includes`: Include only files matching these patterns (applies after the above arguments).
 - `revert`: Revert the patch.
 
-Note that because the checksum is computed after applying these effects, using or modifying these arguments will have no effect unless the `sha256` argument is changed as well.
+Note that because the checksum is computed after applying these effects, using or modifying these arguments will have no effect unless the `hash` argument is changed as well.
 
 
 Most other fetchers return a directory rather than a single file.
 
 ## `fetchsvn` {#fetchsvn}
 
-Used with Subversion. Expects `url` to a Subversion directory, `rev`, and `sha256`.
+Used with Subversion. Expects `url` to a Subversion directory, `rev`, and `hash`.
 
 ## `fetchgit` {#fetchgit}
 
-Used with Git. Expects `url` to a Git repo, `rev`, and `sha256`. `rev` in this case can be full the git commit id (SHA1 hash) or a tag name like `refs/tags/v1.0`.
+Used with Git. Expects `url` to a Git repo, `rev`, and `hash`. `rev` in this case can be full the git commit id (SHA1 hash) or a tag name like `refs/tags/v1.0`.
 
 Additionally, the following optional arguments can be given: `fetchSubmodules = true` makes `fetchgit` also fetch the submodules of a repository. If `deepClone` is set to true, the entire repository is cloned as opposing to just creating a shallow clone. `deepClone = true` also implies `leaveDotGit = true` which means that the `.git` directory of the clone won't be removed after checkout.
 
@@ -100,42 +101,47 @@ stdenv.mkDerivation {
   name = "hello";
   src = fetchgit {
     url = "https://...";
-    sparseCheckout = ''
-      directory/to/be/included
-      another/directory
-    '';
-    sha256 = "0000000000000000000000000000000000000000000000000000";
+    sparseCheckout = [
+      "directory/to/be/included"
+      "another/directory"
+    ];
+    hash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=";
   };
 }
 ```
 
 ## `fetchfossil` {#fetchfossil}
 
-Used with Fossil. Expects `url` to a Fossil archive, `rev`, and `sha256`.
+Used with Fossil. Expects `url` to a Fossil archive, `rev`, and `hash`.
 
 ## `fetchcvs` {#fetchcvs}
 
-Used with CVS. Expects `cvsRoot`, `tag`, and `sha256`.
+Used with CVS. Expects `cvsRoot`, `tag`, and `hash`.
 
 ## `fetchhg` {#fetchhg}
 
-Used with Mercurial. Expects `url`, `rev`, and `sha256`.
+Used with Mercurial. Expects `url`, `rev`, and `hash`.
 
 A number of fetcher functions wrap part of `fetchurl` and `fetchzip`. They are mainly convenience functions intended for commonly used destinations of source code in Nixpkgs. These wrapper fetchers are listed below.
 
 ## `fetchFromGitea` {#fetchfromgitea}
 
-`fetchFromGitea` expects five arguments. `domain` is the gitea server name. `owner` is a string corresponding to the Gitea user or organization that controls this repository. `repo` corresponds to the name of the software repository. These are located at the top of every Gitea HTML page as `owner`/`repo`. `rev` corresponds to the Git commit hash or tag (e.g `v1.0`) that will be downloaded from Git. Finally, `sha256` corresponds to the hash of the extracted directory. Again, other hash algorithms are also available but `sha256` is currently preferred.
+`fetchFromGitea` expects five arguments. `domain` is the gitea server name. `owner` is a string corresponding to the Gitea user or organization that controls this repository. `repo` corresponds to the name of the software repository. These are located at the top of every Gitea HTML page as `owner`/`repo`. `rev` corresponds to the Git commit hash or tag (e.g `v1.0`) that will be downloaded from Git. Finally, `hash` corresponds to the hash of the extracted directory. Again, other hash algorithms are also available but `hash` is currently preferred.
 
 ## `fetchFromGitHub` {#fetchfromgithub}
 
-`fetchFromGitHub` expects four arguments. `owner` is a string corresponding to the GitHub user or organization that controls this repository. `repo` corresponds to the name of the software repository. These are located at the top of every GitHub HTML page as `owner`/`repo`. `rev` corresponds to the Git commit hash or tag (e.g `v1.0`) that will be downloaded from Git. Finally, `sha256` corresponds to the hash of the extracted directory. Again, other hash algorithms are also available, but `sha256` is currently preferred.
+`fetchFromGitHub` expects four arguments. `owner` is a string corresponding to the GitHub user or organization that controls this repository. `repo` corresponds to the name of the software repository. These are located at the top of every GitHub HTML page as `owner`/`repo`. `rev` corresponds to the Git commit hash or tag (e.g `v1.0`) that will be downloaded from Git. Finally, `hash` corresponds to the hash of the extracted directory. Again, other hash algorithms are also available, but `hash` is currently preferred.
+
+To use a different GitHub instance, use `githubBase` (defaults to `"github.com"`).
 
 `fetchFromGitHub` uses `fetchzip` to download the source archive generated by GitHub for the specified revision. If `leaveDotGit`, `deepClone` or `fetchSubmodules` are set to `true`, `fetchFromGitHub` will use `fetchgit` instead. Refer to its section for documentation of these options.
 
 ## `fetchFromGitLab` {#fetchfromgitlab}
 
-This is used with GitLab repositories. The arguments expected are very similar to `fetchFromGitHub` above.
+This is used with GitLab repositories. It behaves similarly to `fetchFromGitHub`, and expects `owner`, `repo`, `rev`, and `hash`.
+
+To use a specific GitLab instance, use `domain` (defaults to `"gitlab.com"`).
+
 
 ## `fetchFromGitiles` {#fetchfromgitiles}
 
@@ -143,7 +149,7 @@ This is used with Gitiles repositories. The arguments expected are similar to `f
 
 ## `fetchFromBitbucket` {#fetchfrombitbucket}
 
-This is used with BitBucket repositories. The arguments expected are very similar to fetchFromGitHub above.
+This is used with BitBucket repositories. The arguments expected are very similar to `fetchFromGitHub` above.
 
 ## `fetchFromSavannah` {#fetchfromsavannah}
 
@@ -156,10 +162,37 @@ This is used with repo.or.cz repositories. The arguments expected are very simil
 ## `fetchFromSourcehut` {#fetchfromsourcehut}
 
 This is used with sourcehut repositories. Similar to `fetchFromGitHub` above,
-it expects `owner`, `repo`, `rev` and `sha256`, but don't forget the tilde (~)
+it expects `owner`, `repo`, `rev` and `hash`, but don't forget the tilde (~)
 in front of the username! Expected arguments also include `vc` ("git" (default)
 or "hg"), `domain` and `fetchSubmodules`.
 
 If `fetchSubmodules` is `true`, `fetchFromSourcehut` uses `fetchgit`
 or `fetchhg` with `fetchSubmodules` or `fetchSubrepos` set to `true`,
 respectively. Otherwise, the fetcher uses `fetchzip`.
+
+## `requireFile` {#requirefile}
+
+`requireFile` allows requesting files that cannot be fetched automatically, but whose content is known.
+This is a useful last-resort workaround for license restrictions that prohibit redistribution, or for downloads that are only accessible after authenticating interactively in a browser.
+If the requested file is present in the Nix store, the resulting derivation will not be built, because its expected output is already available.
+Otherwise, the builder will run, but fail with a message explaining to the user how to provide the file. The following code, for example:
+
+```
+requireFile {
+  name = "jdk-${version}_linux-x64_bin.tar.gz";
+  url = "https://www.oracle.com/java/technologies/javase-jdk11-downloads.html";
+  sha256 = "94bd34f85ee38d3ef59e5289ec7450b9443b924c55625661fffe66b03f2c8de2";
+}
+```
+results in this error message:
+```
+***
+Unfortunately, we cannot download file jdk-11.0.10_linux-x64_bin.tar.gz automatically.
+Please go to https://www.oracle.com/java/technologies/javase-jdk11-downloads.html to download it yourself, and add it to the Nix store
+using either
+  nix-store --add-fixed sha256 jdk-11.0.10_linux-x64_bin.tar.gz
+or
+  nix-prefetch-url --type sha256 file:///path/to/jdk-11.0.10_linux-x64_bin.tar.gz
+
+***
+```
diff --git a/nixpkgs/doc/builders/images.xml b/nixpkgs/doc/builders/images.xml
index cd10d69a96dd..a4661ab5a7af 100644
--- a/nixpkgs/doc/builders/images.xml
+++ b/nixpkgs/doc/builders/images.xml
@@ -9,4 +9,7 @@
  <xi:include href="images/dockertools.section.xml" />
  <xi:include href="images/ocitools.section.xml" />
  <xi:include href="images/snaptools.section.xml" />
+ <xi:include href="images/portableservice.section.xml" />
+ <xi:include href="images/makediskimage.section.xml" />
+ <xi:include href="images/binarycache.section.xml" />
 </chapter>
diff --git a/nixpkgs/doc/builders/images/appimagetools.section.md b/nixpkgs/doc/builders/images/appimagetools.section.md
index 67e63dc5f61a..0c72315a26e8 100644
--- a/nixpkgs/doc/builders/images/appimagetools.section.md
+++ b/nixpkgs/doc/builders/images/appimagetools.section.md
@@ -35,7 +35,7 @@ appimageTools.wrapType2 { # or wrapType1
   name = "patchwork";
   src = fetchurl {
     url = "https://github.com/ssbc/patchwork/releases/download/v3.11.4/Patchwork-3.11.4-linux-x86_64.AppImage";
-    sha256 = "1blsprpkvm0ws9b96gb36f0rbf8f5jgmw4x6dsb1kswr4ysf591s";
+    hash = "sha256-OqTitCeZ6xmWbqYTXp8sDrmVgTNjPZNW0hzUPW++mq4=";
   };
   extraPkgs = pkgs: with pkgs; [ ];
 }
diff --git a/nixpkgs/doc/builders/images/binarycache.section.md b/nixpkgs/doc/builders/images/binarycache.section.md
new file mode 100644
index 000000000000..62e47dad7c66
--- /dev/null
+++ b/nixpkgs/doc/builders/images/binarycache.section.md
@@ -0,0 +1,49 @@
+# pkgs.mkBinaryCache {#sec-pkgs-binary-cache}
+
+`pkgs.mkBinaryCache` is a function for creating Nix flat-file binary caches. Such a cache exists as a directory on disk, and can be used as a Nix substituter by passing `--substituter file:///path/to/cache` to Nix commands.
+
+Nix packages are most commonly shared between machines using [HTTP, SSH, or S3](https://nixos.org/manual/nix/stable/package-management/sharing-packages.html), but a flat-file binary cache can still be useful in some situations. For example, you can copy it directly to another machine, or make it available on a network file system. It can also be a convenient way to make some Nix packages available inside a container via bind-mounting.
+
+Note that this function is meant for advanced use-cases. The more idiomatic way to work with flat-file binary caches is via the [nix-copy-closure](https://nixos.org/manual/nix/stable/command-ref/nix-copy-closure.html) command. You may also want to consider [dockerTools](#sec-pkgs-dockerTools) for your containerization needs.
+
+## Example {#sec-pkgs-binary-cache-example}
+
+The following derivation will construct a flat-file binary cache containing the closure of `hello`.
+
+```nix
+mkBinaryCache {
+  rootPaths = [hello];
+}
+```
+
+- `rootPaths` specifies a list of root derivations. The transitive closure of these derivations' outputs will be copied into the cache.
+
+Here's an example of building and using the cache.
+
+Build the cache on one machine, `host1`:
+
+```shellSession
+nix-build -E 'with import <nixpkgs> {}; mkBinaryCache { rootPaths = [hello]; }'
+```
+
+```shellSession
+/nix/store/cc0562q828rnjqjyfj23d5q162gb424g-binary-cache
+```
+
+Copy the resulting directory to the other machine, `host2`:
+
+```shellSession
+scp result host2:/tmp/hello-cache
+```
+
+Substitute the derivation using the flat-file binary cache on the other machine, `host2`:
+```shellSession
+nix-build -A hello '<nixpkgs>' \
+  --option require-sigs false \
+  --option trusted-substituters file:///tmp/hello-cache \
+  --option substituters file:///tmp/hello-cache
+```
+
+```shellSession
+/nix/store/gl5a41azbpsadfkfmbilh9yk40dh5dl0-hello-2.12.1
+```
diff --git a/nixpkgs/doc/builders/images/dockertools.section.md b/nixpkgs/doc/builders/images/dockertools.section.md
index 6fdd4b5cadd3..3ac4f224b5de 100644
--- a/nixpkgs/doc/builders/images/dockertools.section.md
+++ b/nixpkgs/doc/builders/images/dockertools.section.md
@@ -62,6 +62,8 @@ The above example will build a Docker image `redis/latest` from the given base i
 
 - `config` is used to specify the configuration of the containers that will be started off the built image in Docker. The available options are listed in the [Docker Image Specification v1.2.0](https://github.com/moby/moby/blob/master/image/spec/v1.2.md#image-json-field-descriptions).
 
+- `architecture` is _optional_ and used to specify the image architecture, this is useful for multi-architecture builds that don't need cross compiling. If not specified it will default to `hostPlatform`.
+
 - `diskSize` is used to specify the disk size of the VM used to build the image in megabytes. By default it's 1024 MiB.
 
 - `buildVMMemorySize` is used to specify the memory size of the VM to build the image in megabytes. By default it's 512 MiB.
@@ -141,7 +143,9 @@ Create a Docker image with many of the store paths being on their own layer to i
 
 `config` _optional_
 
-: Run-time configuration of the container. A full list of the options are available at in the [Docker Image Specification v1.2.0](https://github.com/moby/moby/blob/master/image/spec/v1.2.md#image-json-field-descriptions).
+`architecture` is _optional_ and used to specify the image architecture, this is useful for multi-architecture builds that don't need cross compiling. If not specified it will default to `hostPlatform`.
+
+: Run-time configuration of the container. A full list of the options available is in the [Docker Image Specification v1.2.0](https://github.com/moby/moby/blob/master/image/spec/v1.2.md#image-json-field-descriptions).
 
     *Default:* `{}`
 
@@ -245,10 +249,10 @@ Its parameters are described in the example below:
 pullImage {
   imageName = "nixos/nix";
   imageDigest =
-    "sha256:20d9485b25ecfd89204e843a962c1bd70e9cc6858d65d7f5fadc340246e2116b";
+    "sha256:473a2b527958665554806aea24d0131bacec46d23af09fef4598eeab331850fa";
   finalImageName = "nix";
-  finalImageTag = "1.11";
-  sha256 = "0mqjy3zq2v6rrhizgb9nvhczl87lcfphq9601wcprdika2jz7qh8";
+  finalImageTag = "2.11.1";
+  sha256 = "sha256-qvhj+Hlmviz+KEBVmsyPIzTB3QlVAFzwAY1zDPIBGxc=";
   os = "linux";
   arch = "x86_64";
 }
@@ -308,7 +312,44 @@ The parameters relative to the base image have the same synopsis as described in
 
 The `name` argument is the name of the derivation output, which defaults to `fromImage.name`.
 
-## shadowSetup {#ssec-pkgs-dockerTools-shadowSetup}
+## Environment Helpers {#ssec-pkgs-dockerTools-helpers}
+
+Some packages expect certain files to be available globally.
+When building an image from scratch (i.e. without `fromImage`), these files are missing.
+`pkgs.dockerTools` provides some helpers to set up an environment with the necessary files.
+You can include them in `copyToRoot` like this:
+
+```nix
+buildImage {
+  name = "environment-example";
+  copyToRoot = with pkgs.dockerTools; [
+    usrBinEnv
+    binSh
+    caCertificates
+    fakeNss
+  ];
+}
+```
+
+### usrBinEnv {#sssec-pkgs-dockerTools-helpers-usrBinEnv}
+
+This provides the `env` utility at `/usr/bin/env`.
+
+### binSh {#sssec-pkgs-dockerTools-helpers-binSh}
+
+This provides `bashInteractive` at `/bin/sh`.
+
+### caCertificates {#sssec-pkgs-dockerTools-helpers-caCertificates}
+
+This sets up `/etc/ssl/certs/ca-certificates.crt`.
+
+### fakeNss {#sssec-pkgs-dockerTools-helpers-fakeNss}
+
+Provides `/etc/passwd` and `/etc/group` that contain root and nobody.
+Useful when packaging binaries that insist on using nss to look up
+username/groups (like nginx).
+
+### shadowSetup {#ssec-pkgs-dockerTools-shadowSetup}
 
 This constant string is a helper for setting up the base files for managing users and groups, only if such files don't exist already. It is suitable for being used in a [`buildImage` `runAsRoot`](#ex-dockerTools-buildImage-runAsRoot) script for cases like in the example below:
 
@@ -357,3 +398,142 @@ buildImage {
   };
 }
 ```
+
+## buildNixShellImage {#ssec-pkgs-dockerTools-buildNixShellImage}
+
+Create a Docker image that sets up an environment similar to that of running `nix-shell` on a derivation.
+When run in Docker, this environment somewhat resembles the Nix sandbox typically used by `nix-build`, with a major difference being that access to the internet is allowed.
+It additionally also behaves like an interactive `nix-shell`, running things like `shellHook` and setting an interactive prompt.
+If the derivation is fully buildable (i.e. `nix-build` can be used on it), running `buildDerivation` inside such a Docker image will build the derivation, with all its outputs being available in the correct `/nix/store` paths, pointed to by the respective environment variables like `$out`, etc.
+
+::: {.warning}
+The behavior doesn't match `nix-shell` or `nix-build` exactly and this function is known not to work correctly for e.g. fixed-output derivations, content-addressed derivations, impure derivations and other special types of derivations.
+:::
+
+### Arguments {#ssec-pkgs-dockerTools-buildNixShellImage-arguments}
+
+`drv`
+
+: The derivation on which to base the Docker image.
+
+    Adding packages to the Docker image is possible by e.g. extending the list of `nativeBuildInputs` of this derivation like
+
+    ```nix
+    buildNixShellImage {
+      drv = someDrv.overrideAttrs (old: {
+        nativeBuildInputs = old.nativeBuildInputs or [] ++ [
+          somethingExtra
+        ];
+      });
+      # ...
+    }
+    ```
+
+    Similarly, you can extend the image initialization script by extending `shellHook`
+
+`name` _optional_
+
+: The name of the resulting image.
+
+    *Default:* `drv.name + "-env"`
+
+`tag` _optional_
+
+: Tag of the generated image.
+
+    *Default:* the resulting image derivation output path's hash
+
+`uid`/`gid` _optional_
+
+: The user/group ID to run the container as. This is like a `nixbld` build user.
+
+    *Default:* 1000/1000
+
+`homeDirectory` _optional_
+
+: The home directory of the user the container is running as
+
+    *Default:* `/build`
+
+`shell` _optional_
+
+: The path to the `bash` binary to use as the shell. This shell is started when running the image.
+
+    *Default:* `pkgs.bashInteractive + "/bin/bash"`
+
+`command` _optional_
+
+: Run this command in the environment of the derivation, in an interactive shell. See the `--command` option in the [`nix-shell` documentation](https://nixos.org/manual/nix/stable/command-ref/nix-shell.html?highlight=nix-shell#options).
+
+    *Default:* (none)
+
+`run` _optional_
+
+: Same as `command`, but runs the command in a non-interactive shell instead. See the `--run` option in the [`nix-shell` documentation](https://nixos.org/manual/nix/stable/command-ref/nix-shell.html?highlight=nix-shell#options).
+
+    *Default:* (none)
+
+### Example {#ssec-pkgs-dockerTools-buildNixShellImage-example}
+
+The following shows how to build the `pkgs.hello` package inside a Docker container built with `buildNixShellImage`.
+
+```nix
+with import <nixpkgs> {};
+dockerTools.buildNixShellImage {
+  drv = hello;
+}
+```
+
+Build the derivation:
+
+```console
+nix-build hello.nix
+```
+
+    these 8 derivations will be built:
+      /nix/store/xmw3a5ln29rdalavcxk1w3m4zb2n7kk6-nix-shell-rc.drv
+    ...
+    Creating layer 56 from paths: ['/nix/store/crpnj8ssz0va2q0p5ibv9i6k6n52gcya-stdenv-linux']
+    Creating layer 57 with customisation...
+    Adding manifests...
+    Done.
+    /nix/store/cpyn1lc897ghx0rhr2xy49jvyn52bazv-hello-2.12-env.tar.gz
+
+Load the image:
+
+```console
+docker load -i result
+```
+
+    0d9f4c4cd109: Loading layer [==================================================>]   2.56MB/2.56MB
+    ...
+    ab1d897c0697: Loading layer [==================================================>]  10.24kB/10.24kB
+    Loaded image: hello-2.12-env:pgj9h98nal555415faa43vsydg161bdz
+
+Run the container:
+
+```console
+docker run -it hello-2.12-env:pgj9h98nal555415faa43vsydg161bdz
+```
+
+    [nix-shell:/build]$
+
+In the running container, run the build:
+
+```console
+buildDerivation
+```
+
+    unpacking sources
+    unpacking source archive /nix/store/8nqv6kshb3vs5q5bs2k600xpj5bkavkc-hello-2.12.tar.gz
+    ...
+    patching script interpreter paths in /nix/store/z5wwy5nagzy15gag42vv61c2agdpz2f2-hello-2.12
+    checking for references to /build/ in /nix/store/z5wwy5nagzy15gag42vv61c2agdpz2f2-hello-2.12...
+
+Check the build result:
+
+```console
+$out/bin/hello
+```
+
+    Hello, world!
diff --git a/nixpkgs/doc/builders/images/makediskimage.section.md b/nixpkgs/doc/builders/images/makediskimage.section.md
new file mode 100644
index 000000000000..e50479c4e83e
--- /dev/null
+++ b/nixpkgs/doc/builders/images/makediskimage.section.md
@@ -0,0 +1,108 @@
+# `<nixpkgs/nixos/lib/make-disk-image.nix>` {#sec-make-disk-image}
+
+`<nixpkgs/nixos/lib/make-disk-image.nix>` is a function to create _disk images_ in multiple formats: raw, QCOW2 (QEMU), QCOW2-Compressed (compressed version), VDI (VirtualBox), VPC (VirtualPC).
+
+This function can create images in two ways:
+
+- using `cptofs` without any virtual machine to create a Nix store disk image,
+- using a virtual machine to create a full NixOS installation.
+
+When testing early-boot or lifecycle parts of NixOS such as a bootloader or multiple generations, it is necessary to opt for a full NixOS system installation.
+Whereas for many web servers, applications, it is possible to work with a Nix store only disk image and is faster to build.
+
+NixOS tests also use this function when preparing the VM. The `cptofs` method is used when `virtualisation.useBootLoader` is false (the default). Otherwise the second method is used.
+
+## Features {#sec-make-disk-image-features}
+
+For reference, read the function signature source code for documentation on arguments: <https://github.com/NixOS/nixpkgs/blob/master/nixos/lib/make-disk-image.nix>.
+Features are separated in various sections depending on if you opt for a Nix-store only image or a full NixOS image.
+
+### Common {#sec-make-disk-image-features-common}
+
+- arbitrary NixOS configuration
+- automatic or bound disk size: `diskSize` parameter, `additionalSpace` can be set when `diskSize` is `auto` to add a constant of disk space
+- multiple partition table layouts: EFI, legacy, legacy + GPT, hybrid, none through `partitionTableType` parameter
+- OVMF or EFI firmwares and variables templates can be customized
+- root filesystem `fsType` can be customized to whatever `mkfs.${fsType}` exist during operations
+- root filesystem label can be customized, defaults to `nix-store` if it's a Nix store image, otherwise `nixpkgs/nixos`
+- arbitrary code can be executed after disk image was produced with `postVM`
+- the current nixpkgs can be realized as a channel in the disk image, which will change the hash of the image when the sources are updated
+- additional store paths can be provided through `additionalPaths`
+
+### Full NixOS image {#sec-make-disk-image-features-full-image}
+
+- arbitrary contents with permissions can be placed in the target filesystem using `contents`
+- a `/etc/nixpkgs/nixos/configuration.nix` can be provided through `configFile`
+- bootloaders are supported
+- EFI variables can be mutated during image production and the result is exposed in `$out`
+- boot partition size when partition table is `efi` or `hybrid`
+
+### On bit-to-bit reproducibility {#sec-make-disk-image-features-reproducibility}
+
+Images are **NOT** deterministic, please do not hesitate to try to fix this, source of determinisms are (not exhaustive) :
+
+- bootloader installation have timestamps
+- SQLite Nix store database contain registration times
+- `/etc/shadow` is in a non-deterministic order
+
+A `deterministic` flag is available for best efforts determinism.
+
+## Usage {#sec-make-disk-image-usage}
+
+To produce a Nix-store only image:
+```nix
+let
+  pkgs = import <nixpkgs> {};
+  lib = pkgs.lib;
+  make-disk-image = import <nixpkgs/nixos/lib/make-disk-image.nix>;
+in
+  make-disk-image {
+    inherit pkgs lib;
+    config = {};
+    additionalPaths = [ ];
+    format = "qcow2";
+    onlyNixStore = true;
+    partitionTableType = "none";
+    installBootLoader = false;
+    touchEFIVars = false;
+    diskSize = "auto";
+    additionalSpace = "0M"; # Defaults to 512M.
+    copyChannel = false;
+  }
+```
+
+Some arguments can be left out, they are shown explicitly for the sake of the example.
+
+Building this derivation will provide a QCOW2 disk image containing only the Nix store and its registration information.
+
+To produce a NixOS installation image disk with UEFI and bootloader installed:
+```nix
+let
+  pkgs = import <nixpkgs> {};
+  lib = pkgs.lib;
+  make-disk-image = import <nixpkgs/nixos/lib/make-disk-image.nix>;
+  evalConfig = import <nixpkgs/nixos/lib/eval-config.nix>;
+in
+  make-disk-image {
+    inherit pkgs lib;
+    config = evalConfig {
+      modules = [
+        {
+          fileSystems."/" = { device = "/dev/vda"; fsType = "ext4"; autoFormat = true; };
+          boot.grub.device = "/dev/vda";
+        }
+      ];
+    };
+    format = "qcow2";
+    onlyNixStore = false;
+    partitionTableType = "legacy+gpt";
+    installBootLoader = true;
+    touchEFIVars = true;
+    diskSize = "auto";
+    additionalSpace = "0M"; # Defaults to 512M.
+    copyChannel = false;
+    memSize = 2048; # Qemu VM memory size in megabytes. Defaults to 1024M.
+  }
+```
+
+
diff --git a/nixpkgs/doc/builders/images/ocitools.section.md b/nixpkgs/doc/builders/images/ocitools.section.md
index d3ab8776786b..c35f65bce007 100644
--- a/nixpkgs/doc/builders/images/ocitools.section.md
+++ b/nixpkgs/doc/builders/images/ocitools.section.md
@@ -34,4 +34,4 @@ buildContainer {
 
 - `mounts` specifies additional mount points chosen by the user. By default only a minimal set of necessary filesystems are mounted into the container (e.g procfs, cgroupfs)
 
-- `readonly` makes the container\'s rootfs read-only if it is set to true. The default value is false `false`.
+- `readonly` makes the container's rootfs read-only if it is set to true. The default value is false `false`.
diff --git a/nixpkgs/doc/builders/images/portableservice.section.md b/nixpkgs/doc/builders/images/portableservice.section.md
new file mode 100644
index 000000000000..5400928b158f
--- /dev/null
+++ b/nixpkgs/doc/builders/images/portableservice.section.md
@@ -0,0 +1,81 @@
+# pkgs.portableService {#sec-pkgs-portableService}
+
+`pkgs.portableService` is a function to create _portable service images_,
+as read-only, immutable, `squashfs` archives.
+
+systemd supports a concept of [Portable Services](https://systemd.io/PORTABLE_SERVICES/).
+Portable Services are a delivery method for system services that uses two specific features of container management:
+
+* Applications are bundled. I.e. multiple services, their binaries and
+  all their dependencies are packaged in an image, and are run directly from it.
+* Stricter default security policies, i.e. sandboxing of applications.
+
+This allows using Nix to build images which can be run on many recent Linux distributions.
+
+The primary tool for interacting with Portable Services is `portablectl`,
+and they are managed by the `systemd-portabled` system service.
+
+::: {.note}
+Portable services are supported starting with systemd 239 (released on 2018-06-22).
+:::
+
+A very simple example of using `portableService` is described below:
+
+[]{#ex-pkgs-portableService}
+
+```nix
+pkgs.portableService {
+  pname = "demo";
+  version = "1.0";
+  units = [ demo-service demo-socket ];
+}
+```
+
+The above example will build an squashfs archive image in `result/$pname_$version.raw`. The image will contain the
+file system structure as required by the portable service specification, and a subset of the Nix store with all the
+dependencies of the two derivations in the `units` list.
+`units` must be a list of derivations, and their names must be prefixed with the service name (`"demo"` in this case).
+Otherwise `systemd-portabled` will ignore them.
+
+::: {.note}
+The `.raw` file extension of the image is required by the portable services specification.
+:::
+
+Some other options available are:
+- `description`, `homepage`
+
+  Are added to the `/etc/os-release` in the image and are shown by the portable services tooling.
+  Default to empty values, not added to os-release.
+- `symlinks`
+
+  A list of attribute sets {object, symlink}. Symlinks will be created  in the root filesystem of the image to
+  objects in the Nix store. Defaults to an empty list.
+- `contents`
+
+  A list of additional derivations to be included in the image Nix store, as-is. Defaults to an empty list.
+- `squashfsTools`
+
+  Defaults to `pkgs.squashfsTools`, allows you to override the package that provides `mksquashfs`.
+- `squash-compression`, `squash-block-size`
+
+  Options to `mksquashfs`. Default to `"xz -Xdict-size 100%"` and `"1M"` respectively.
+
+A typical usage of `symlinks` would be:
+```nix
+  symlinks = [
+    { object = "${pkgs.cacert}/etc/ssl"; symlink = "/etc/ssl"; }
+    { object = "${pkgs.bash}/bin/bash"; symlink = "/bin/sh"; }
+    { object = "${pkgs.php}/bin/php"; symlink = "/usr/bin/php"; }
+  ];
+```
+to create these symlinks for legacy applications that assume them existing globally.
+
+Once the image is created, and deployed on a host in `/var/lib/portables/`, you can attach the image and run the service. As root run:
+```console
+portablectl attach demo_1.0.raw
+systemctl enable --now demo.socket
+systemctl enable --now demo.service
+```
+::: {.note}
+See the [man page](https://www.freedesktop.org/software/systemd/man/portablectl.html) of `portablectl` for more info on its usage.
+:::
diff --git a/nixpkgs/doc/builders/packages/cataclysm-dda.section.md b/nixpkgs/doc/builders/packages/cataclysm-dda.section.md
index bfeacb47feff..f401e9b9efa5 100644
--- a/nixpkgs/doc/builders/packages/cataclysm-dda.section.md
+++ b/nixpkgs/doc/builders/packages/cataclysm-dda.section.md
@@ -103,7 +103,7 @@ let
         owner = "Someone";
         repo = "AwesomeMod";
         rev = "...";
-        sha256 = "...";
+        hash = "...";
       };
       # Path to be installed in the unpacked source (default: ".")
       modRoot = "contents/under/this/path/will/be/installed";
diff --git a/nixpkgs/doc/builders/packages/citrix.section.md b/nixpkgs/doc/builders/packages/citrix.section.md
index 4721f7e90f7a..bcf0924249bc 100644
--- a/nixpkgs/doc/builders/packages/citrix.section.md
+++ b/nixpkgs/doc/builders/packages/citrix.section.md
@@ -4,7 +4,7 @@ The [Citrix Workspace App](https://www.citrix.com/products/workspace-app/) is a
 
 ## Basic usage {#sec-citrix-base}
 
-The tarball archive needs to be downloaded manually, as the license agreements of the vendor for [Citrix Workspace](https://www.citrix.de/downloads/workspace-app/linux/workspace-app-for-linux-latest.html) needs to be accepted first. Then run `nix-prefetch-url file://$PWD/linuxx64-$version.tar.gz`. With the archive available in the store, the package can be built and installed with Nix.
+The tarball archive needs to be downloaded manually, as the license agreements of the vendor for [Citrix Workspace](https://www.citrix.com/downloads/workspace-app/linux/workspace-app-for-linux-latest.html) needs to be accepted first. Then run `nix-prefetch-url file://$PWD/linuxx64-$version.tar.gz`. With the archive available in the store, the package can be built and installed with Nix.
 
 ## Citrix Self-service {#sec-citrix-selfservice}
 
@@ -19,7 +19,7 @@ $ selfservice
 
 ## Custom certificates {#sec-citrix-custom-certs}
 
-The `Citrix Workspace App` in `nixpkgs` trusts several certificates [from the Mozilla database](https://curl.haxx.se/docs/caextract.html) by default. However, several companies using Citrix might require their own corporate certificate. On distros with imperative packaging, these certs can be stored easily in [`$ICAROOT`](https://developer-docs.citrix.com/projects/receiver-for-linux-command-reference/en/13.7/), however this directory is a store path in `nixpkgs`. In order to work around this issue, the package provides a simple mechanism to add custom certificates without rebuilding the entire package using `symlinkJoin`:
+The `Citrix Workspace App` in `nixpkgs` trusts several certificates [from the Mozilla database](https://curl.haxx.se/docs/caextract.html) by default. However, several companies using Citrix might require their own corporate certificate. On distros with imperative packaging, these certs can be stored easily in [`$ICAROOT`](https://citrix.github.io/receiver-for-linux-command-reference/), however this directory is a store path in `nixpkgs`. In order to work around this issue, the package provides a simple mechanism to add custom certificates without rebuilding the entire package using `symlinkJoin`:
 
 ```nix
 with import <nixpkgs> { config.allowUnfree = true; };
diff --git a/nixpkgs/doc/builders/packages/dlib.section.md b/nixpkgs/doc/builders/packages/dlib.section.md
index 8f0aa8610180..022195310a71 100644
--- a/nixpkgs/doc/builders/packages/dlib.section.md
+++ b/nixpkgs/doc/builders/packages/dlib.section.md
@@ -4,7 +4,7 @@
 
 ## Compiling without AVX support {#compiling-without-avx-support}
 
-Especially older CPUs don\'t support [AVX](https://en.wikipedia.org/wiki/Advanced_Vector_Extensions) (Advanced Vector Extensions) instructions that are used by DLib to optimize their algorithms.
+Especially older CPUs don't support [AVX](https://en.wikipedia.org/wiki/Advanced_Vector_Extensions) (Advanced Vector Extensions) instructions that are used by DLib to optimize their algorithms.
 
 On the affected hardware errors like `Illegal instruction` will occur. In those cases AVX support needs to be disabled:
 
diff --git a/nixpkgs/doc/builders/packages/eclipse.section.md b/nixpkgs/doc/builders/packages/eclipse.section.md
index 8cf7426833b8..e19510e131a0 100644
--- a/nixpkgs/doc/builders/packages/eclipse.section.md
+++ b/nixpkgs/doc/builders/packages/eclipse.section.md
@@ -43,11 +43,11 @@ packageOverrides = pkgs: {
         name = "myplugin1-1.0";
         srcFeature = fetchurl {
           url = "http://…/features/myplugin1.jar";
-          sha256 = "123…";
+          hash = "sha256-123…";
         };
         srcPlugin = fetchurl {
           url = "http://…/plugins/myplugin1.jar";
-          sha256 = "123…";
+          hash = "sha256-123…";
         };
       });
       (plugins.buildEclipseUpdateSite {
@@ -55,7 +55,7 @@ packageOverrides = pkgs: {
         src = fetchurl {
           stripRoot = false;
           url = "http://…/myplugin2.zip";
-          sha256 = "123…";
+          hash = "sha256-123…";
         };
       });
     ];
diff --git a/nixpkgs/doc/builders/packages/firefox.section.md b/nixpkgs/doc/builders/packages/firefox.section.md
index 0dd786a599d0..46bc0457a3dc 100644
--- a/nixpkgs/doc/builders/packages/firefox.section.md
+++ b/nixpkgs/doc/builders/packages/firefox.section.md
@@ -12,7 +12,7 @@ The `wrapFirefox` function allows to pass policies, preferences and extensions t
       (fetchFirefoxAddon {
         name = "ublock"; # Has to be unique!
         url = "https://addons.mozilla.org/firefox/downloads/file/3679754/ublock_origin-1.31.0-an+fx.xpi";
-        sha256 = "1h768ljlh3pi23l27qp961v1hd0nbj2vasgy11bmcrlqp40zgvnr";
+        hash = "sha256-2e73AbmYZlZXCP5ptYVcFjQYdjDp4iPoEPEOSCVF5sA=";
       })
     ];
 
diff --git a/nixpkgs/doc/builders/packages/ibus.section.md b/nixpkgs/doc/builders/packages/ibus.section.md
index 1b09d3fbbab9..ec78cd0c9a96 100644
--- a/nixpkgs/doc/builders/packages/ibus.section.md
+++ b/nixpkgs/doc/builders/packages/ibus.section.md
@@ -4,7 +4,7 @@ This package is an ibus-based completion method to speed up typing.
 
 ## Activating the engine {#sec-ibus-typing-booster-activate}
 
-IBus needs to be configured accordingly to activate `typing-booster`. The configuration depends on the desktop manager in use. For detailed instructions, please refer to the [upstream docs](https://mike-fabian.github.io/ibus-typing-booster/documentation.html).
+IBus needs to be configured accordingly to activate `typing-booster`. The configuration depends on the desktop manager in use. For detailed instructions, please refer to the [upstream docs](https://mike-fabian.github.io/ibus-typing-booster/).
 
 On NixOS, you need to explicitly enable `ibus` with given engines before customizing your desktop to use `typing-booster`. This can be achieved using the `ibus` module:
 
diff --git a/nixpkgs/doc/builders/packages/weechat.section.md b/nixpkgs/doc/builders/packages/weechat.section.md
index 767cc604ab45..755b6e6ad1ea 100644
--- a/nixpkgs/doc/builders/packages/weechat.section.md
+++ b/nixpkgs/doc/builders/packages/weechat.section.md
@@ -73,7 +73,7 @@ stdenv.mkDerivation {
   name = "exemplary-weechat-script";
   src = fetchurl {
     url = "https://scripts.tld/your-scripts.tar.gz";
-    sha256 = "...";
+    hash = "...";
   };
   passthru.scripts = [ "foo.py" "bar.lua" ];
   installPhase = ''
diff --git a/nixpkgs/doc/builders/special.xml b/nixpkgs/doc/builders/special.xml
index 8902ce5c8132..18cf6cfd39c7 100644
--- a/nixpkgs/doc/builders/special.xml
+++ b/nixpkgs/doc/builders/special.xml
@@ -6,5 +6,8 @@
   This chapter describes several special builders.
  </para>
  <xi:include href="special/fhs-environments.section.xml" />
+ <xi:include href="special/makesetuphook.section.xml" />
  <xi:include href="special/mkshell.section.xml" />
+ <xi:include href="special/darwin-builder.section.xml" />
+ <xi:include href="special/vm-tools.section.xml" />
 </chapter>
diff --git a/nixpkgs/doc/builders/special/darwin-builder.section.md b/nixpkgs/doc/builders/special/darwin-builder.section.md
new file mode 100644
index 000000000000..30bf2d095102
--- /dev/null
+++ b/nixpkgs/doc/builders/special/darwin-builder.section.md
@@ -0,0 +1,149 @@
+# darwin.builder {#sec-darwin-builder}
+
+`darwin.builder` provides a way to bootstrap a Linux builder on a macOS machine.
+
+This requires macOS version 12.4 or later.
+
+This also requires that port 22 on your machine is free (since Nix does not
+permit specifying a non-default SSH port for builders).
+
+You will also need to be a trusted user for your Nix installation.  In other
+words, your `/etc/nix/nix.conf` should have something like:
+
+```
+extra-trusted-users = <your username goes here>
+```
+
+To launch the builder, run the following flake:
+
+```ShellSession
+$ nix run nixpkgs#darwin.builder
+```
+
+That will prompt you to enter your `sudo` password:
+
+```
++ sudo --reset-timestamp /nix/store/…-install-credentials.sh ./keys
+Password:
+```
+
+… so that it can install a private key used to `ssh` into the build server.
+After that the script will launch the virtual machine and automatically log you
+in as the `builder` user:
+
+```
+<<< Welcome to NixOS 22.11.20220901.1bd8d11 (aarch64) - ttyAMA0 >>>
+
+Run 'nixos-help' for the NixOS manual.
+
+nixos login: builder (automatic login)
+
+
+[builder@nixos:~]$
+```
+
+> Note: When you need to stop the VM, run `shutdown now` as the `builder` user.
+
+To delegate builds to the remote builder, add the following options to your
+`nix.conf` file:
+
+```
+# - Replace ${ARCH} with either aarch64 or x86_64 to match your host machine
+# - Replace ${MAX_JOBS} with the maximum number of builds (pick 4 if you're not sure)
+builders = ssh-ng://builder@localhost ${ARCH}-linux /etc/nix/builder_ed25519 ${MAX_JOBS} - - - c3NoLWVkMjU1MTkgQUFBQUMzTnphQzFsWkRJMU5URTVBQUFBSUpCV2N4Yi9CbGFxdDFhdU90RStGOFFVV3JVb3RpQzVxQkorVXVFV2RWQ2Igcm9vdEBuaXhvcwo=
+
+# Not strictly necessary, but this will reduce your disk utilization
+builders-use-substitutes = true
+```
+
+… and then restart your Nix daemon to apply the change:
+
+```ShellSession
+$ sudo launchctl kickstart -k system/org.nixos.nix-daemon
+```
+
+## Example flake usage
+
+```
+{
+  inputs = {
+    nixpkgs.url = "github:nixos/nixpkgs/nixpkgs-22.11-darwin";
+    darwin.url = "github:lnl7/nix-darwin/master";
+    darwin.inputs.nixpkgs.follows = "nixpkgs";
+  };
+
+  outputs = { self, darwin, nixpkgs, ... }@inputs:
+  let
+
+    inherit (darwin.lib) darwinSystem;
+    system = "aarch64-darwin";
+    pkgs = nixpkgs.legacyPackages."${system}";
+    linuxSystem = builtins.replaceStrings [ "darwin" ] [ "linux" ] system;
+
+    darwin-builder = nixpkgs.lib.nixosSystem {
+      system = linuxSystem;
+      modules = [
+        "${nixpkgs}/nixos/modules/profiles/macos-builder.nix"
+        { virtualisation.host.pkgs = pkgs; }
+      ];
+    };
+  in {
+
+    darwinConfigurations = {
+      machine1 = darwinSystem {
+        inherit system;
+        modules = [
+          {
+            nix.distributedBuilds = true;
+            nix.buildMachines = [{
+              hostName = "ssh://builder@localhost";
+              system = linuxSystem;
+              maxJobs = 4;
+              supportedFeatures = [ "kvm" "benchmark" "big-parallel" ];
+            }];
+
+            launchd.daemons.darwin-builder = {
+              command = "${darwin-builder.config.system.build.macos-builder-installer}/bin/create-builder";
+              serviceConfig = {
+                KeepAlive = true;
+                RunAtLoad = true;
+                StandardOutPath = "/var/log/darwin-builder.log";
+                StandardErrorPath = "/var/log/darwin-builder.log";
+              };
+            };
+          }
+        ];
+      };
+    };
+
+  };
+}
+```
+
+## Reconfiguring the builder
+
+Initially you should not change the builder configuration else you will not be
+able to use the binary cache. However, after you have the builder running locally
+you may use it to build a modified builder with additional storage or memory.
+
+To do this, you just need to set the `virtualisation.darwin-builder.*` parameters as
+in the example below and rebuild.
+
+```
+    darwin-builder = nixpkgs.lib.nixosSystem {
+      system = linuxSystem;
+      modules = [
+        "${nixpkgs}/nixos/modules/profiles/macos-builder.nix"
+        {
+          virtualisation.host.pkgs = pkgs;
+          virtualisation.darwin-builder.diskSize = 5120;
+          virtualisation.darwin-builder.memorySize = 1024;
+          virtualisation.darwin-builder.hostPort = 33022;
+          virtualisation.darwin-builder.workingDirectory = "/var/lib/darwin-builder";
+        }
+      ];
+```
+
+You may make any other changes to your VM in this attribute set. For example,
+you could enable Docker or X11 forwarding to your Darwin host.
+
diff --git a/nixpkgs/doc/builders/special/fhs-environments.section.md b/nixpkgs/doc/builders/special/fhs-environments.section.md
index cacad261e28f..5a248e4ead92 100644
--- a/nixpkgs/doc/builders/special/fhs-environments.section.md
+++ b/nixpkgs/doc/builders/special/fhs-environments.section.md
@@ -1,9 +1,12 @@
-# buildFHSUserEnv {#sec-fhs-environments}
+# buildFHSEnv {#sec-fhs-environments}
 
-`buildFHSUserEnv` provides a way to build and run FHS-compatible lightweight sandboxes. It creates an isolated root with bound `/nix/store`, so its footprint in terms of disk space needed is quite small. This allows one to run software which is hard or unfeasible to patch for NixOS -- 3rd-party source trees with FHS assumptions, games distributed as tarballs, software with integrity checking and/or external self-updated binaries. It uses Linux namespaces feature to create temporary lightweight environments which are destroyed after all child processes exit, without root user rights requirement. Accepted arguments are:
+`buildFHSEnv` provides a way to build and run FHS-compatible lightweight sandboxes. It creates an isolated root filesystem with the host's `/nix/store`, so its footprint in terms of disk space is quite small. This allows you to run software which is hard or unfeasible to patch for NixOS; 3rd-party source trees with FHS assumptions, games distributed as tarballs, software with integrity checking and/or external self-updated binaries for instance.
+It uses Linux' namespaces feature to create temporary lightweight environments which are destroyed after all child processes exit, without requiring elevated privileges. It works similar to containerisation technology such as Docker or FlatPak but provides no security-relevant separation from the host system.
+
+Accepted arguments are:
 
 - `name`
-        Environment name.
+        The name of the environment and the wrapper executable.
 - `targetPkgs`
         Packages to be installed for the main host's architecture (i.e. x86_64 on x86_64 installations). Along with libraries binaries are also installed.
 - `multiPkgs`
@@ -17,33 +20,35 @@
 - `extraInstallCommands`
         Additional commands to be executed for finalizing the derivation with runner script.
 - `runScript`
-        A command that would be executed inside the sandbox and passed all the command line arguments. It defaults to `bash`.
+        A shell command to be executed inside the sandbox. It defaults to `bash`. Command line arguments passed to the resulting wrapper are appended to this command by default.
+        This command must be escaped; i.e. `"foo app" --do-stuff --with "some file"`. See `lib.escapeShellArgs`.
 - `profile`
         Optional script for `/etc/profile` within the sandbox.
 
-One can create a simple environment using a `shell.nix` like that:
+You can create a simple environment using a `shell.nix` like this:
 
 ```nix
 { pkgs ? import <nixpkgs> {} }:
 
-(pkgs.buildFHSUserEnv {
+(pkgs.buildFHSEnv {
   name = "simple-x11-env";
-  targetPkgs = pkgs: (with pkgs;
-    [ udev
-      alsa-lib
-    ]) ++ (with pkgs.xorg;
-    [ libX11
-      libXcursor
-      libXrandr
-    ]);
-  multiPkgs = pkgs: (with pkgs;
-    [ udev
-      alsa-lib
-    ]);
+  targetPkgs = pkgs: (with pkgs; [
+    udev
+    alsa-lib
+  ]) ++ (with pkgs.xorg; [
+    libX11
+    libXcursor
+    libXrandr
+  ]);
+  multiPkgs = pkgs: (with pkgs; [
+    udev
+    alsa-lib
+  ]);
   runScript = "bash";
 }).env
 ```
 
-Running `nix-shell` would then drop you into a shell with these libraries and binaries available. You can use this to run closed-source applications which expect FHS structure without hassles: simply change `runScript` to the application path, e.g. `./bin/start.sh` -- relative paths are supported.
+Running `nix-shell` on it would drop you into a shell inside an FHS env where those libraries and binaries are available in FHS-compliant paths. Applications that expect an FHS structure (i.e. proprietary binaries) can run inside this environment without modification.
+You can build a wrapper by running your binary in `runScript`, e.g. `./bin/start.sh`. Relative paths work as expected.
 
 Additionally, the FHS builder links all relocated gsettings-schemas (the glib setup-hook moves them to `share/gsettings-schemas/${name}/glib-2.0/schemas`) to their standard FHS location. This means you don't need to wrap binaries with `wrapGAppsHook`.
diff --git a/nixpkgs/doc/builders/special/makesetuphook.section.md b/nixpkgs/doc/builders/special/makesetuphook.section.md
new file mode 100644
index 000000000000..fee508dc29c2
--- /dev/null
+++ b/nixpkgs/doc/builders/special/makesetuphook.section.md
@@ -0,0 +1,37 @@
+# pkgs.makeSetupHook {#sec-pkgs.makeSetupHook}
+
+`pkgs.makeSetupHook` is a builder that produces hooks that go in to `nativeBuildInputs`
+
+## Usage {#sec-pkgs.makeSetupHook-usage}
+
+```nix
+pkgs.makeSetupHook {
+  name = "something-hook";
+  propagatedBuildInputs = [ pkgs.commandsomething ];
+  depsTargetTargetPropagated = [ pkgs.libsomething ];
+} ./script.sh
+```
+
+#### setup hook that depends on the hello package and runs hello and @shell@ is substituted with path to bash {#sec-pkgs.makeSetupHook-usage-example}
+
+```nix
+pkgs.makeSetupHook {
+    name = "run-hello-hook";
+    propagatedBuildInputs = [ pkgs.hello ];
+    substitutions = { shell = "${pkgs.bash}/bin/bash"; };
+    passthru.tests.greeting = callPackage ./test { };
+    meta.platforms = lib.platforms.linux;
+} (writeScript "run-hello-hook.sh" ''
+    #!@shell@
+    hello
+'')
+```
+
+## Attributes {#sec-pkgs.makeSetupHook-attributes}
+
+* `name` Set the name of the hook.
+* `propagatedBuildInputs` Runtime dependencies (such as binaries) of the hook.
+* `depsTargetTargetPropagated` Non-binary dependencies.
+* `meta`
+* `passthru`
+* `substitutions` Variables for `substituteAll`
diff --git a/nixpkgs/doc/builders/special/mkshell.section.md b/nixpkgs/doc/builders/special/mkshell.section.md
index 73cc57f485bd..96d43535955f 100644
--- a/nixpkgs/doc/builders/special/mkshell.section.md
+++ b/nixpkgs/doc/builders/special/mkshell.section.md
@@ -20,7 +20,7 @@ pkgs.mkShell {
 }
 ```
 
-## Attributes
+## Attributes {#sec-pkgs-mkShell-attributes}
 
 * `name` (default: `nix-shell`). Set the name of the derivation.
 * `packages` (default: `[]`). Add executable packages to the `nix-shell` environment.
@@ -29,7 +29,7 @@ pkgs.mkShell {
 
 ... all the attributes of `stdenv.mkDerivation`.
 
-## Building the shell
+## Building the shell {#sec-pkgs-mkShell-building}
 
 This derivation output will contain a text file that contains a reference to
 all the build inputs. This is useful in CI where we want to make sure that
diff --git a/nixpkgs/doc/builders/special/vm-tools.section.md b/nixpkgs/doc/builders/special/vm-tools.section.md
new file mode 100644
index 000000000000..3b6fb0d2556b
--- /dev/null
+++ b/nixpkgs/doc/builders/special/vm-tools.section.md
@@ -0,0 +1,148 @@
+# vmTools {#sec-vm-tools}
+
+A set of VM related utilities, that help in building some packages in more advanced scenarios.
+
+## `vmTools.createEmptyImage` {#vm-tools-createEmptyImage}
+
+A bash script fragment that produces a disk image at `destination`.
+
+### Attributes
+
+* `size`. The disk size, in MiB.
+* `fullName`. Name that will be written to `${destination}/nix-support/full-name`.
+* `destination` (optional, default `$out`). Where to write the image files.
+
+## `vmTools.runInLinuxVM` {#vm-tools-runInLinuxVM}
+
+Run a derivation in a Linux virtual machine (using Qemu/KVM).
+By default, there is no disk image; the root filesystem is a `tmpfs`, and the Nix store is shared with the host (via the [9P protocol](https://wiki.qemu.org/Documentation/9p#9p_Protocol)).
+Thus, any pure Nix derivation should run unmodified.
+
+If the build fails and Nix is run with the `-K/--keep-failed` option, a script `run-vm` will be left behind in the temporary build directory that allows you to boot into the VM and debug it interactively.
+
+### Attributes
+
+* `preVM` (optional). Shell command to be evaluated *before* the VM is started (i.e., on the host).
+* `memSize` (optional, default `512`). The memory size of the VM in MiB.
+* `diskImage` (optional). A file system image to be attached to `/dev/sda`.
+  Note that currently we expect the image to contain a filesystem, not a full disk image with a partition table etc.
+
+### Examples
+
+Build the derivation hello inside a VM:
+```nix
+{ pkgs }: with pkgs; with vmTools;
+runInLinuxVM hello
+```
+
+Build inside a VM with extra memory:
+```nix
+{ pkgs }: with pkgs; with vmTools;
+runInLinuxVM (hello.overrideAttrs (_: { memSize = 1024; }))
+```
+
+Use VM with a disk image (implicitly sets `diskImage`, see [`vmTools.createEmptyImage`](#vm-tools-createEmptyImage)):
+```nix
+{ pkgs }: with pkgs; with vmTools;
+runInLinuxVM (hello.overrideAttrs (_: {
+  preVM = createEmptyImage {
+    size = 1024;
+    fullName = "vm-image";
+  };
+}))
+```
+
+## `vmTools.extractFs` {#vm-tools-extractFs}
+
+Takes a file, such as an ISO, and extracts its contents into the store.
+
+### Attributes
+
+* `file`. Path to the file to be extracted.
+  Note that currently we expect the image to contain a filesystem, not a full disk image with a partition table etc.
+* `fs` (optional). Filesystem of the contents of the file.
+
+### Examples
+
+Extract the contents of an ISO file:
+```nix
+{ pkgs }: with pkgs; with vmTools;
+extractFs { file = ./image.iso; }
+```
+
+## `vmTools.extractMTDfs` {#vm-tools-extractMTDfs}
+
+Like [](#vm-tools-extractFs), but it makes use of a [Memory Technology Device (MTD)](https://en.wikipedia.org/wiki/Memory_Technology_Device).
+
+## `vmTools.runInLinuxImage` {#vm-tools-runInLinuxImage}
+
+Like [](#vm-tools-runInLinuxVM), but instead of using `stdenv` from the Nix store, run the build using the tools provided by `/bin`, `/usr/bin`, etc. from the specified filesystem image, which typically is a filesystem containing a [FHS](https://en.wikipedia.org/wiki/Filesystem_Hierarchy_Standard)-based Linux distribution.
+
+## `vmTools.makeImageTestScript` {#vm-tools-makeImageTestScript}
+
+Generate a script that can be used to run an interactive session in the given image.
+
+### Examples
+
+Create a script for running a Fedora 27 VM:
+```nix
+{ pkgs }: with pkgs; with vmTools;
+makeImageTestScript diskImages.fedora27x86_64
+```
+
+Create a script for running an Ubuntu 20.04 VM:
+```nix
+{ pkgs }: with pkgs; with vmTools;
+makeImageTestScript diskImages.ubuntu2004x86_64
+```
+
+## `vmTools.diskImageFuns` {#vm-tools-diskImageFuns}
+
+A set of functions that build a predefined set of minimal Linux distributions images.
+
+### Images
+
+* Fedora
+  * `fedora26x86_64`
+  * `fedora27x86_64`
+* CentOS
+  * `centos6i386`
+  * `centos6x86_64`
+  * `centos7x86_64`
+* Ubuntu
+  * `ubuntu1404i386`
+  * `ubuntu1404x86_64`
+  * `ubuntu1604i386`
+  * `ubuntu1604x86_64`
+  * `ubuntu1804i386`
+  * `ubuntu1804x86_64`
+  * `ubuntu2004i386`
+  * `ubuntu2004x86_64`
+  * `ubuntu2204i386`
+  * `ubuntu2204x86_64`
+* Debian
+  * `debian10i386`
+  * `debian10x86_64`
+  * `debian11i386`
+  * `debian11x86_64`
+
+### Attributes
+
+* `size` (optional, defaults to `4096`). The size of the image, in MiB.
+* `extraPackages` (optional). A list names of additional packages from the distribution that should be included in the image.
+
+### Examples
+
+8GiB image containing Firefox in addition to the default packages:
+```nix
+{ pkgs }: with pkgs; with vmTools;
+diskImageFuns.ubuntu2004x86_64 { extraPackages = [ "firefox" ]; size = 8192; }
+```
+
+## `vmTools.diskImageExtraFuns` {#vm-tools-diskImageExtraFuns}
+
+Shorthand for `vmTools.diskImageFuns.<attr> { extraPackages = ... }`.
+
+## `vmTools.diskImages` {#vm-tools-diskImages}
+
+Shorthand for `vmTools.diskImageFuns.<attr> { }`.
diff --git a/nixpkgs/doc/builders/testers.chapter.md b/nixpkgs/doc/builders/testers.chapter.md
index c6fb71de0180..928a57673e77 100644
--- a/nixpkgs/doc/builders/testers.chapter.md
+++ b/nixpkgs/doc/builders/testers.chapter.md
@@ -1,6 +1,19 @@
 # Testers {#chap-testers}
 This chapter describes several testing builders which are available in the <literal>testers</literal> namespace.
 
+## `hasPkgConfigModule` {#tester-hasPkgConfigModule}
+
+Checks whether a package exposes a certain `pkg-config` module.
+
+Example:
+
+```nix
+passthru.tests.pkg-config = testers.hasPkgConfigModule {
+  package = finalAttrs.finalPackage;
+  moduleName = "libfoo";
+}
+```
+
 ## `testVersion` {#tester-testVersion}
 
 Checks the command output contains the specified version
@@ -14,19 +27,89 @@ for example when using an 'old' hash in a fixed-output derivation.
 Examples:
 
 ```nix
-passthru.tests.version = testVersion { package = hello; };
+passthru.tests.version = testers.testVersion { package = hello; };
 
-passthru.tests.version = testVersion {
+passthru.tests.version = testers.testVersion {
   package = seaweedfs;
   command = "weed version";
 };
 
-passthru.tests.version = testVersion {
+passthru.tests.version = testers.testVersion {
   package = key;
   command = "KeY --help";
   # Wrong '2.5' version in the code. Drop on next version.
   version = "2.5";
 };
+
+passthru.tests.version = testers.testVersion {
+  package = ghr;
+  # The output needs to contain the 'version' string without any prefix or suffix.
+  version = "v${version}";
+};
+```
+
+## `testBuildFailure` {#tester-testBuildFailure}
+
+Make sure that a build does not succeed. This is useful for testing testers.
+
+This returns a derivation with an override on the builder, with the following effects:
+
+ - Fail the build when the original builder succeeds
+ - Move `$out` to `$out/result`, if it exists (assuming `out` is the default output)
+ - Save the build log to `$out/testBuildFailure.log` (same)
+
+Example:
+
+```nix
+runCommand "example" {
+  failed = testers.testBuildFailure (runCommand "fail" {} ''
+    echo ok-ish >$out
+    echo failing though
+    exit 3
+  '');
+} ''
+  grep -F 'ok-ish' $failed/result
+  grep -F 'failing though' $failed/testBuildFailure.log
+  [[ 3 = $(cat $failed/testBuildFailure.exit) ]]
+  touch $out
+'';
+```
+
+While `testBuildFailure` is designed to keep changes to the original builder's
+environment to a minimum, some small changes are inevitable.
+
+ - The file `$TMPDIR/testBuildFailure.log` is present. It should not be deleted.
+ - `stdout` and `stderr` are a pipe instead of a tty. This could be improved.
+ - One or two extra processes are present in the sandbox during the original
+   builder's execution.
+ - The derivation and output hashes are different, but not unusual.
+ - The derivation includes a dependency on `buildPackages.bash` and
+   `expect-failure.sh`, which is built to include a transitive dependency on
+   `buildPackages.coreutils` and possibly more. These are not added to `PATH`
+   or any other environment variable, so they should be hard to observe.
+
+## `testEqualContents` {#tester-equalContents}
+
+Check that two paths have the same contents.
+
+Example:
+
+```nix
+testers.testEqualContents {
+  assertion = "sed -e performs replacement";
+  expected = writeText "expected" ''
+    foo baz baz
+  '';
+  actual = runCommand "actual" {
+    # not really necessary for a package that's in stdenv
+    nativeBuildInputs = [ gnused ];
+    base = writeText "base" ''
+      foo bar baz
+    '';
+  } ''
+    sed -e 's/bar/baz/g' $base >$out
+  '';
+}
 ```
 
 ## `testEqualDerivation` {#tester-testEqualDerivation}
@@ -42,7 +125,7 @@ Otherwise, the build log explains the difference via `nix-diff`.
 Example:
 
 ```nix
-testEqualDerivation
+testers.testEqualDerivation
   "The hello package must stay the same when enabling checks."
   hello
   (hello.overrideAttrs(o: { doCheck = true; }))
@@ -73,14 +156,34 @@ fixed output derivation.
 Example:
 
 ```nix
-tests.fetchgit = invalidateFetcherByDrvHash fetchgit {
+tests.fetchgit = testers.invalidateFetcherByDrvHash fetchgit {
   name = "nix-source";
   url = "https://github.com/NixOS/nix";
   rev = "9d9dbe6ed05854e03811c361a3380e09183f4f4a";
-  sha256 = "sha256-7DszvbCNTjpzGRmpIVAWXk20P0/XTrWZ79KSOGLrUWY=";
+  hash = "sha256-7DszvbCNTjpzGRmpIVAWXk20P0/XTrWZ79KSOGLrUWY=";
 };
 ```
 
+## `runNixOSTest` {#tester-runNixOSTest}
+
+A helper function that behaves exactly like the NixOS `runTest`, except it also assigns this Nixpkgs package set as the `pkgs` of the test and makes the `nixpkgs.*` options read-only.
+
+If your test is part of the Nixpkgs repository, or if you need a more general entrypoint, see ["Calling a test" in the NixOS manual](https://nixos.org/manual/nixos/stable/index.html#sec-calling-nixos-tests).
+
+Example:
+
+```nix
+pkgs.testers.runNixOSTest ({ lib, ... }: {
+  name = "hello";
+  nodes.machine = { pkgs, ... }: {
+    environment.systemPackages = [ pkgs.hello ];
+  };
+  testScript = ''
+    machine.succeed("hello")
+  '';
+})
+```
+
 ## `nixosTest` {#tester-nixosTest}
 
 Run a NixOS VM network test using this evaluation of Nixpkgs.
@@ -95,7 +198,7 @@ letting NixOS invoke Nixpkgs anew.
 If a test machine needs to set NixOS options under `nixpkgs`, it must set only the
 `nixpkgs.pkgs` option.
 
-### Parameter
+### Parameter {#tester-nixosTest-parameter}
 
 A [NixOS VM test network](https://nixos.org/nixos/manual/index.html#sec-nixos-tests), or path to it. Example:
 
@@ -117,7 +220,7 @@ A [NixOS VM test network](https://nixos.org/nixos/manual/index.html#sec-nixos-te
 }
 ```
 
-### Result
+### Result {#tester-nixosTest-result}
 
 A derivation that runs the VM test.
 
diff --git a/nixpkgs/doc/contributing/coding-conventions.chapter.md b/nixpkgs/doc/contributing/coding-conventions.chapter.md
index 6473fa151a43..03cd3dd458c8 100644
--- a/nixpkgs/doc/contributing/coding-conventions.chapter.md
+++ b/nixpkgs/doc/contributing/coding-conventions.chapter.md
@@ -204,13 +204,13 @@ The key words _must_, _must not_, _required_, _shall_, _shall not_, _should_, _s
 
 In Nixpkgs, there are generally three different names associated with a package:
 
-- The `name` attribute of the derivation (excluding the version part). This is what most users see, in particular when using `nix-env`.
+- The `pname` attribute of the derivation. This is what most users see, in particular when using `nix-env`.
 
 - The variable name used for the instantiated package in `all-packages.nix`, and when passing it as a dependency to other functions. Typically this is called the _package attribute name_. This is what Nix expression authors see. It can also be used when installing using `nix-env -iA`.
 
 - The filename for (the directory containing) the Nix expression.
 
-Most of the time, these are the same. For instance, the package `e2fsprogs` has a `name` attribute `"e2fsprogs-version"`, is bound to the variable name `e2fsprogs` in `all-packages.nix`, and the Nix expression is in `pkgs/os-specific/linux/e2fsprogs/default.nix`.
+Most of the time, these are the same. For instance, the package `e2fsprogs` has a `pname` attribute `"e2fsprogs"`, is bound to the variable name `e2fsprogs` in `all-packages.nix`, and the Nix expression is in `pkgs/os-specific/linux/e2fsprogs/default.nix`.
 
 There are a few naming guidelines:
 
@@ -220,7 +220,9 @@ There are a few naming guidelines:
 
 - The `version` attribute _must_ start with a digit e.g`"0.3.1rc2".
 
-- If a package is not a release but a commit from a repository, then the `version` attribute _must_ be the date of that (fetched) commit. The date _must_ be in `"unstable-YYYY-MM-DD"` format.
+- If a package is a commit from a repository without a version assigned, then the `version` attribute _should_ be the latest upstream version preceding that commit, followed by `-unstable-` and the date of the (fetched) commit. The date _must_ be in `"YYYY-MM-DD"` format.
+
+Example: Given a project had its latest releases `2.2` in November 2021, and `3.0` in January 2022, a commit authored on March 15, 2022 for an upcoming bugfix release `2.2.1` would have `version = "2.2-unstable-2022-03-15"`.
 
 - Dashes in the package `pname` _should_ be preserved in new variable names, rather than converted to underscores or camel cased — e.g., `http-parser` instead of `http_parser` or `httpParser`. The hyphenated style is preferred in all three package names.
 
@@ -260,6 +262,10 @@ When in doubt, consider refactoring the `pkgs/` tree, e.g. creating new categori
 
     - `development/tools/build-managers` (e.g. `gnumake`)
 
+  - **If it’s a _language server_:**
+
+    - `development/tools/language-servers` (e.g. `ccls` or `rnix-lsp`)
+
   - **Else:**
 
     - `development/tools/misc` (e.g. `binutils`)
@@ -426,9 +432,10 @@ In the file `pkgs/top-level/all-packages.nix` you can find fetch helpers, these
 
   ```nix
   src = fetchgit {
+    url = "git@github.com:NixOS/nix.git"
     url = "git://github.com/NixOS/nix.git";
     rev = "1f795f9f44607cc5bec70d1300150bfefcef2aae";
-    sha256 = "1cw5fszffl5pkpa6s6wjnkiv6lm5k618s32sp60kvmvpy7a2v9kg";
+    hash = "sha256-7D4m+saJjbSFP5hOwpQq2FGR2rr+psQMTcyb1ZvtXsQ=";
   }
   ```
 
@@ -438,7 +445,7 @@ In the file `pkgs/top-level/all-packages.nix` you can find fetch helpers, these
   src = fetchgit {
     url = "https://github.com/NixOS/nix.git";
     rev = "1f795f9f44607cc5bec70d1300150bfefcef2aae";
-    sha256 = "1cw5fszffl5pkpa6s6wjnkiv6lm5k618s32sp60kvmvpy7a2v9kg";
+    hash = "sha256-7D4m+saJjbSFP5hOwpQq2FGR2rr+psQMTcyb1ZvtXsQ=";
   }
   ```
 
@@ -449,11 +456,14 @@ In the file `pkgs/top-level/all-packages.nix` you can find fetch helpers, these
     owner = "NixOS";
     repo = "nix";
     rev = "1f795f9f44607cc5bec70d1300150bfefcef2aae";
-    sha256 = "1i2yxndxb6yc9l6c99pypbd92lfq5aac4klq7y2v93c9qvx2cgpc";
+    hash = "ha256-7D4m+saJjbSFP5hOwpQq2FGR2rr+psQMTcyb1ZvtXsQ=";
   }
   ```
 
-Find the value to put as `sha256` by running `nix-shell -p nix-prefetch-github --run "nix-prefetch-github --rev 1f795f9f44607cc5bec70d1300150bfefcef2aae NixOS nix"`. 
+When fetching from GitHub, commits must always be referenced by their full commit hash. This is because GitHub shares commit hashes among all forks and returns `404 Not Found` when a short commit hash is ambiguous. It already happens for some short, 6-character commit hashes in `nixpkgs`.
+It is a practical vector for a denial-of-service attack by pushing large amounts of auto generated commits into forks and was already [demonstrated against GitHub Actions Beta](https://blog.teddykatz.com/2019/11/12/github-actions-dos.html).
+
+Find the value to put as `hash` by running `nix-shell -p nix-prefetch-github --run "nix-prefetch-github --rev 1f795f9f44607cc5bec70d1300150bfefcef2aae NixOS nix"`.
 
 ## Obtaining source hash {#sec-source-hashes}
 
@@ -477,15 +487,23 @@ Preferred source hash type is sha256. There are several ways to get it.
 
 4. Extracting hash from local source tarball can be done with `sha256sum`. Use `nix-prefetch-url file:///path/to/tarball` if you want base32 hash.
 
-5. Fake hash: set fake hash in package expression, perform build and extract correct hash from error Nix prints.
+5. Fake hash: set the hash to one of
+
+   - `""`
+   - `lib.fakeHash`
+   - `lib.fakeSha256`
+   - `lib.fakeSha512`
+
+   in the package expression, attempt build and extract correct hash from error messages.
+
+   ::: {.warning}
+   You must use one of these four fake hashes and not some arbitrarily-chosen hash.
 
-    For package updates it is enough to change one symbol to make hash fake. For new packages, you can use `lib.fakeSha256`, `lib.fakeSha512` or any other fake hash.
+   See [](#sec-source-hashes-security).
+   :::
 
     This is last resort method when reconstructing source URL is non-trivial and `nix-prefetch-url -A` isn’t applicable (for example, [one of `kodi` dependencies](https://github.com/NixOS/nixpkgs/blob/d2ab091dd308b99e4912b805a5eb088dd536adb9/pkgs/applications/video/kodi/default.nix#L73)). The easiest way then would be replace hash with a fake one and rebuild. Nix build will fail and error message will contain desired hash.
 
-::: {.warning}
-This method has security problems. Check below for details.
-:::
 
 ### Obtaining hashes securely {#sec-source-hashes-security}
 
@@ -497,7 +515,7 @@ Let's say Man-in-the-Middle (MITM) sits close to your network. Then instead of f
 
 - `https://` URLs are secure in methods 1, 2, 3;
 
-- `https://` URLs are not secure in method 5. When obtaining hashes with fake hash method, TLS checks are disabled. So refetch source hash from several different networks to exclude MITM scenario. Alternatively, use fake hash method to make Nix error, but instead of extracting hash from error, extract `https://` URL and prefetch it with method 1.
+- `https://` URLs are secure in method 5 *only if* you use one of the listed fake hashes. If you use any other hash, `fetchurl` will pass `--insecure` to `curl` and may then degrade to HTTP in case of TLS certificate expiration.
 
 ## Patches {#sec-patches}
 
@@ -508,7 +526,7 @@ patches = [
   (fetchpatch {
     name = "fix-check-for-using-shared-freetype-lib.patch";
     url = "http://git.ghostscript.com/?p=ghostpdl.git;a=patch;h=8f5d285";
-    sha256 = "1f0k043rng7f0rfl9hhb89qzvvksqmkrikmm38p61yfx51l325xr";
+    hash = "sha256-uRcxaCjd+WAuGrXOmGfFeu79cUILwkRdBu48mwcBE7g=";
   })
 ];
 ```
@@ -658,3 +676,18 @@ stdenv.mkDerivation {
   ...
 }
 ```
+
+### Import From Derivation {#ssec-import-from-derivation}
+
+Import From Derivation (IFD) is disallowed in Nixpkgs for performance reasons:
+[Hydra] evaluates the entire package set, and sequential builds during evaluation would increase evaluation times to become impractical.
+
+[Hydra]: https://github.com/NixOS/hydra
+
+Import From Derivation can be worked around in some cases by committing generated intermediate files to version control and reading those instead.
+
+<!-- TODO: remove the following and link to Nix manual once https://github.com/NixOS/nix/pull/7332 is merged -->
+
+See also [NixOS Wiki: Import From Derivation].
+
+[NixOS Wiki: Import From Derivation]: https://nixos.wiki/wiki/Import_From_Derivation
diff --git a/nixpkgs/doc/contributing/contributing-to-documentation.chapter.md b/nixpkgs/doc/contributing/contributing-to-documentation.chapter.md
index db16f13b474b..a732eee4b962 100644
--- a/nixpkgs/doc/contributing/contributing-to-documentation.chapter.md
+++ b/nixpkgs/doc/contributing/contributing-to-documentation.chapter.md
@@ -27,7 +27,7 @@ If the build succeeds, the manual will be in `./result/share/doc/nixpkgs/manual.
 
 As per [RFC 0072](https://github.com/NixOS/rfcs/pull/72), all new documentation content should be written in [CommonMark](https://commonmark.org/) Markdown dialect.
 
-Additional syntax extensions are available, though not all extensions can be used in NixOS option documentation. The following extensions are currently used:
+Additional syntax extensions are available, all of which can be used in NixOS option documentation. The following extensions are currently used:
 
 - []{#ssec-contributing-markup-anchors}
   Explicitly defined **anchors** on headings, to allow linking to sections. These should be always used, to ensure the anchors can be linked even when the heading text changes, and to prevent conflicts between [automatically assigned identifiers](https://github.com/jgm/commonmark-hs/blob/master/commonmark-extensions/test/auto_identifiers.md).
@@ -38,6 +38,10 @@ Additional syntax extensions are available, though not all extensions can be use
   ## Syntax {#sec-contributing-markup}
   ```
 
+  ::: {.note}
+  NixOS option documentation does not support headings in general.
+  :::
+
 - []{#ssec-contributing-markup-anchors-inline}
   **Inline anchors**, which allow linking arbitrary place in the text (e.g. individual list items, sentences…).
 
@@ -53,22 +57,20 @@ Additional syntax extensions are available, though not all extensions can be use
   This syntax is taken from [MyST](https://myst-parser.readthedocs.io/en/latest/using/syntax.html#targets-and-cross-referencing).
 
 - []{#ssec-contributing-markup-inline-roles}
-  If you want to link to a man page, you can use `` {manpage}`nix.conf(5)` ``, which will turn into {manpage}`nix.conf(5)`. The references will turn into links when a mapping exists in {file}`doc/build-aux/pandoc-filters/link-unix-man-references.lua`.
+  If you want to link to a man page, you can use `` {manpage}`nix.conf(5)` ``, which will turn into {manpage}`nix.conf(5)`. The references will turn into links when a mapping exists in {file}`doc/manpage-urls.json`.
 
   A few markups for other kinds of literals are also available:
 
   - `` {command}`rm -rfi` `` turns into {command}`rm -rfi`
-  - `` {option}`networking.useDHCP` `` turns into {option}`networking.useDHCP`
+  - `` {env}`XDG_DATA_DIRS` `` turns into {env}`XDG_DATA_DIRS`
   - `` {file}`/etc/passwd` `` turns into {file}`/etc/passwd`
+  - `` {option}`networking.useDHCP` `` turns into {option}`networking.useDHCP`
+  - `` {var}`/etc/passwd` `` turns into {var}`/etc/passwd`
 
   These literal kinds are used mostly in NixOS option documentation.
 
   This syntax is taken from [MyST](https://myst-parser.readthedocs.io/en/latest/syntax/syntax.html#roles-an-in-line-extension-point). Though, the feature originates from [reStructuredText](https://www.sphinx-doc.org/en/master/usage/restructuredtext/roles.html#role-manpage) with slightly different syntax.
 
-  ::: {.note}
-  Inline roles are available for option documentation.
-  :::
-
 - []{#ssec-contributing-markup-admonitions}
   **Admonitions**, set off from the text to bring attention to something.
 
@@ -94,10 +96,6 @@ Additional syntax extensions are available, though not all extensions can be use
     - [`tip`](https://tdg.docbook.org/tdg/5.0/tip.html)
     - [`warning`](https://tdg.docbook.org/tdg/5.0/warning.html)
 
-  ::: {.note}
-  Admonitions are available for option documentation.
-  :::
-
 - []{#ssec-contributing-markup-definition-lists}
   [**Definition lists**](https://github.com/jgm/commonmark-hs/blob/master/commonmark-extensions/test/definition_lists.md), for defining a group of terms:
 
diff --git a/nixpkgs/doc/contributing/quick-start.chapter.md b/nixpkgs/doc/contributing/quick-start.chapter.md
index 96b30d3822c1..e6bb5f2b0b60 100644
--- a/nixpkgs/doc/contributing/quick-start.chapter.md
+++ b/nixpkgs/doc/contributing/quick-start.chapter.md
@@ -34,7 +34,7 @@ To add a package to Nixpkgs:
 
    - Apache HTTPD: [`pkgs/servers/http/apache-httpd/2.4.nix`](https://github.com/NixOS/nixpkgs/blob/master/pkgs/servers/http/apache-httpd/2.4.nix). A bunch of optional features, variable substitutions in the configure flags, a post-install hook, and miscellaneous hackery.
 
-   - Thunderbird: [`pkgs/applications/networking/mailreaders/thunderbird/default.nix`](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/networking/mailreaders/thunderbird/default.nix). Lots of dependencies.
+   - buildMozillaMach: [`pkgs/applications/networking/browser/firefox/common.nix`](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/networking/browsers/firefox/common.nix). A reusable build function for Firefox, Thunderbird and Librewolf.
 
    - JDiskReport, a Java utility: [`pkgs/tools/misc/jdiskreport/default.nix`](https://github.com/NixOS/nixpkgs/blob/master/pkgs/tools/misc/jdiskreport/default.nix). Nixpkgs doesn’t have a decent `stdenv` for Java yet so this is pretty ad-hoc.
 
diff --git a/nixpkgs/doc/contributing/reviewing-contributions.chapter.md b/nixpkgs/doc/contributing/reviewing-contributions.chapter.md
index 4452695a6f38..b4caf11f6d4b 100644
--- a/nixpkgs/doc/contributing/reviewing-contributions.chapter.md
+++ b/nixpkgs/doc/contributing/reviewing-contributions.chapter.md
@@ -12,7 +12,7 @@ When reviewing a pull request, please always be nice and polite. Controversial c
 
 GitHub provides reactions as a simple and quick way to provide feedback to pull requests or any comments. The thumb-down reaction should be used with care and if possible accompanied with some explanation so the submitter has directions to improve their contribution.
 
-pull request reviews should include a list of what has been reviewed in a comment, so other reviewers and mergers can know the state of the review.
+Pull request reviews should include a list of what has been reviewed in a comment, so other reviewers and mergers can know the state of the review.
 
 All the review template samples provided in this section are generic and meant as examples. Their usage is optional and the reviewer is free to adapt them to their liking.
 
@@ -185,7 +185,7 @@ Sample template for a new module review is provided below.
 ##### Comments
 ```
 
-## Individual maintainer list {#reviewing-contributions-indvidual-maintainer-list}
+## Individual maintainer list {#reviewing-contributions-individual-maintainer-list}
 
 When adding users to `maintainers/maintainer-list.nix`, the following
 checks should be performed:
@@ -201,7 +201,7 @@ checks should be performed:
     them to either recommit using that key or to remove their key
     information.
 
-    Given a maintainter entry like this:
+    Given a maintainer entry like this:
 
     ``` nix
     {
@@ -302,6 +302,12 @@ Container system, boot system and library changes are some examples of the pull
 
 It is possible for community members that have enough knowledge and experience on a special topic to contribute by merging pull requests.
 
+In case the PR is stuck waiting for the original author to apply a trivial
+change (a typo, capitalisation change, etc.) and the author allowed the members
+to modify the PR, consider applying it yourself. (or commit the existing review
+suggestion) You should pay extra attention to make sure the addition doesn't go
+against the idea of the original PR and would not be opposed by the author.
+
 <!--
 The following paragraphs about how to deal with unactive contributors is just a proposition and should be modified to what the community agrees to be the right policy.
 
diff --git a/nixpkgs/doc/contributing/submitting-changes.chapter.md b/nixpkgs/doc/contributing/submitting-changes.chapter.md
index d1aa701f0b35..30fe4fa47d0d 100644
--- a/nixpkgs/doc/contributing/submitting-changes.chapter.md
+++ b/nixpkgs/doc/contributing/submitting-changes.chapter.md
@@ -76,7 +76,7 @@ Security fixes are submitted in the same way as other changes and thus the same
   (fetchpatch {
     name = "CVE-2019-11068.patch";
     url = "https://gitlab.gnome.org/GNOME/libxslt/commit/e03553605b45c88f0b4b2980adfbbb8f6fca2fd6.patch";
-    sha256 = "0pkpb4837km15zgg6h57bncp66d5lwrlvkr73h0lanywq7zrwhj8";
+    hash = "sha256-SEKe/8HcW0UBHCfPTTOnpRlzmV2nQPPeL6HOMxBZd14=";
   })
   ```
 
@@ -199,7 +199,7 @@ It’s important to test any executables generated by a build when you change or
 
 ### Meets Nixpkgs contribution standards {#submitting-changes-contribution-standards}
 
-The last checkbox is fits [CONTRIBUTING.md](https://github.com/NixOS/nixpkgs/blob/master/CONTRIBUTING.md). The contributing document has detailed information on standards the Nix community has for commit messages, reviews, licensing of contributions you make to the project, etc\... Everyone should read and understand the standards the community has for contributing before submitting a pull request.
+The last checkbox is fits [CONTRIBUTING.md](https://github.com/NixOS/nixpkgs/blob/master/CONTRIBUTING.md). The contributing document has detailed information on standards the Nix community has for commit messages, reviews, licensing of contributions you make to the project, etc... Everyone should read and understand the standards the community has for contributing before submitting a pull request.
 
 ## Hotfixing pull requests {#submitting-changes-hotfixing-pull-requests}
 
@@ -244,12 +244,16 @@ The `master` branch is the main development branch. It should only see non-break
 
 The `staging` branch is a development branch where mass-rebuilds go. Mass rebuilds are commits that cause rebuilds for many packages, like more than 500 (or perhaps, if it's 'light' packages, 1000). It should only see non-breaking mass-rebuild commits. That means it is not to be used for testing, and changes must have been well tested already. If the branch is already in a broken state, please refrain from adding extra new breakages.
 
+During the process of a releasing a new NixOS version, this branch or the release-critical packages can be restricted to non-breaking changes.
+
 ### Staging-next branch {#submitting-changes-staging-next-branch}
 
-The `staging-next` branch is for stabilizing mass-rebuilds submitted to the `staging` branch prior to merging them into `master`. Mass-rebuilds must go via the `staging` branch. It must only see non-breaking commits that are fixing issues blocking it from being merged into the `master ` branch.
+The `staging-next` branch is for stabilizing mass-rebuilds submitted to the `staging` branch prior to merging them into `master`. Mass-rebuilds must go via the `staging` branch. It must only see non-breaking commits that are fixing issues blocking it from being merged into the `master` branch.
 
 If the branch is already in a broken state, please refrain from adding extra new breakages. Stabilize it for a few days and then merge into master.
 
+During the process of a releasing a new NixOS version, this branch or the release-critical packages can be restricted to non-breaking changes.
+
 ### Stable release branches {#submitting-changes-stable-release-branches}
 
 The same staging workflow applies to stable release branches, but the main branch is called `release-*` instead of `master`.
@@ -286,7 +290,7 @@ Other examples of reasons are:
 - The previous download links were all broken
 - Crash when starting on some X11 systems
 
-#### Acceptable backport criteria
+#### Acceptable backport criteria {#acceptable-backport-criteria}
 
 The stable branch does have some changes which cannot be backported. Most notable are breaking changes. The desire is to have stable users be uninterrupted when updating packages.
 
diff --git a/nixpkgs/doc/default.nix b/nixpkgs/doc/default.nix
index ac382ec8519c..4f55c95a04c1 100644
--- a/nixpkgs/doc/default.nix
+++ b/nixpkgs/doc/default.nix
@@ -1,6 +1,5 @@
 { pkgs ? (import ./.. { }), nixpkgs ? { }}:
 let
-  lib = pkgs.lib;
   doc-support = import ./doc-support { inherit pkgs nixpkgs; };
 in pkgs.stdenv.mkDerivation {
   name = "nixpkgs-manual";
@@ -15,12 +14,16 @@ in pkgs.stdenv.mkDerivation {
     xmlformat
   ];
 
-  src = lib.cleanSource ./.;
+  src = pkgs.nix-gitignore.gitignoreSource [] ./.;
 
   postPatch = ''
     ln -s ${doc-support} ./doc-support/result
   '';
 
+  preBuild = ''
+    make -j$NIX_BUILD_CORES render-md
+  '';
+
   installPhase = ''
     dest="$out/share/doc/nixpkgs"
     mkdir -p "$(dirname "$dest")"
@@ -36,4 +39,5 @@ in pkgs.stdenv.mkDerivation {
 
   # Environment variables
   PANDOC_LUA_FILTERS_DIR = "${pkgs.pandoc-lua-filters}/share/pandoc/filters";
+  PANDOC_LINK_MANPAGES_FILTER = import build-aux/pandoc-filters/link-manpages.nix { inherit pkgs; };
 }
diff --git a/nixpkgs/doc/doc-support/default.nix b/nixpkgs/doc/doc-support/default.nix
index 429c7a5fbe80..cfa7cbdc8283 100644
--- a/nixpkgs/doc/doc-support/default.nix
+++ b/nixpkgs/doc/doc-support/default.nix
@@ -3,8 +3,23 @@ let
   inherit (pkgs) lib;
   inherit (lib) hasPrefix removePrefix;
 
-  locationsXml = import ./lib-function-locations.nix { inherit pkgs nixpkgs; };
-  functionDocs = import ./lib-function-docs.nix { inherit locationsXml pkgs; };
+  libsets = [
+    { name = "asserts"; description = "assertion functions"; }
+    { name = "attrsets"; description = "attribute set functions"; }
+    { name = "strings"; description = "string manipulation functions"; }
+    { name = "versions"; description = "version string functions"; }
+    { name = "trivial"; description = "miscellaneous functions"; }
+    { name = "lists"; description = "list manipulation functions"; }
+    { name = "debug"; description = "debugging functions"; }
+    { name = "options"; description = "NixOS / nixpkgs option handling"; }
+    { name = "path"; description = "path functions"; }
+    { name = "filesystem"; description = "filesystem functions"; }
+    { name = "sources"; description = "source filtering functions"; }
+    { name = "cli"; description = "command-line serialization functions"; }
+  ];
+
+  locationsXml = import ./lib-function-locations.nix { inherit pkgs nixpkgs libsets; };
+  functionDocs = import ./lib-function-docs.nix { inherit locationsXml pkgs libsets; };
   version = pkgs.lib.version;
 
   epub-xsl = pkgs.writeText "epub.xsl" ''
@@ -30,7 +45,10 @@ let
   # NB: This file describes the Nixpkgs manual, which happens to use module
   #     docs infra originally developed for NixOS.
   optionsDoc = pkgs.nixosOptionsDoc {
-    inherit (pkgs.lib.evalModules { modules = [ ../../pkgs/top-level/config.nix ]; }) options;
+    inherit (pkgs.lib.evalModules {
+      modules = [ ../../pkgs/top-level/config.nix ];
+      class = "nixpkgsConfig";
+    }) options;
     documentType = "none";
     transformOptions = opt:
       opt // {
@@ -60,7 +78,7 @@ in pkgs.runCommand "doc-support" {}
     ln -s ${epub-xsl} ./epub.xsl
     ln -s ${xhtml-xsl} ./xhtml.xsl
 
-    ln -s ${../../nixos/doc/xmlformat.conf} ./xmlformat.conf
+    ln -s ${./xmlformat.conf} ./xmlformat.conf
     ln -s ${pkgs.documentation-highlighter} ./highlightjs
 
     echo -n "${version}" > ./version
diff --git a/nixpkgs/doc/doc-support/lib-function-docs.nix b/nixpkgs/doc/doc-support/lib-function-docs.nix
index f6d613cac0b6..cf218fa70401 100644
--- a/nixpkgs/doc/doc-support/lib-function-docs.nix
+++ b/nixpkgs/doc/doc-support/lib-function-docs.nix
@@ -1,27 +1,36 @@
-# Generates the documentation for library functons via nixdoc. To add
-# another library function file to this list, the include list in the
-# file `doc/functions/library.xml` must also be updated.
+# Generates the documentation for library functions via nixdoc.
 
-{ pkgs ? import ./.. {}, locationsXml }:
+{ pkgs, locationsXml, libsets }:
 
 with pkgs; stdenv.mkDerivation {
   name = "nixpkgs-lib-docs";
-  src = ./../../lib;
+  src = ../../lib;
 
   buildInputs = [ nixdoc ];
   installPhase = ''
     function docgen {
-      nixdoc -c "$1" -d "$2" -f "../lib/$1.nix"  > "$out/$1.xml"
+      # TODO: wrap lib.$1 in <literal>, make nixdoc not escape it
+      if [[ -e "../lib/$1.nix" ]]; then
+        nixdoc -c "$1" -d "lib.$1: $2" -f "$1.nix" > "$out/$1.xml"
+      else
+        nixdoc -c "$1" -d "lib.$1: $2" -f "$1/default.nix" > "$out/$1.xml"
+      fi
+      echo "<xi:include href='$1.xml' />" >> "$out/index.xml"
     }
 
-    mkdir -p $out
-    ln -s ${locationsXml} $out/locations.xml
+    mkdir -p "$out"
+
+    cat > "$out/index.xml" << 'EOF'
+    <?xml version="1.0" encoding="utf-8"?>
+    <root xmlns:xi="http://www.w3.org/2001/XInclude">
+    EOF
+
+    ${lib.concatMapStrings ({ name, description }: ''
+      docgen ${name} ${lib.escapeShellArg description}
+    '') libsets}
 
-    docgen strings 'String manipulation functions'
-    docgen trivial 'Miscellaneous functions'
-    docgen lists 'List manipulation functions'
-    docgen debug 'Debugging functions'
-    docgen options 'NixOS / nixpkgs option handling'
-    docgen sources 'Source filtering functions'
+    echo "</root>" >> "$out/index.xml"
+
+    ln -s ${locationsXml} $out/locations.xml
   '';
 }
diff --git a/nixpkgs/doc/doc-support/lib-function-locations.nix b/nixpkgs/doc/doc-support/lib-function-locations.nix
index 68edd2709854..1ee59648330a 100644
--- a/nixpkgs/doc/doc-support/lib-function-locations.nix
+++ b/nixpkgs/doc/doc-support/lib-function-locations.nix
@@ -1,24 +1,24 @@
-{ pkgs ? (import ./.. { }), nixpkgs ? { }}:
+{ pkgs, nixpkgs ? { }, libsets }:
 let
-  revision = pkgs.lib.trivial.revisionWithDefault (nixpkgs.revision or "master");
+  revision = pkgs.lib.trivial.revisionWithDefault (nixpkgs.rev or "master");
 
-  libDefPos = set:
-    builtins.map
-      (name: {
-        name = name;
+  libDefPos = prefix: set:
+    builtins.concatMap
+      (name: [{
+        name = builtins.concatStringsSep "." (prefix ++ [name]);
         location = builtins.unsafeGetAttrPos name set;
-      })
-      (builtins.attrNames set);
+      }] ++ nixpkgsLib.optionals
+        (builtins.length prefix == 0 && builtins.isAttrs set.${name})
+        (libDefPos (prefix ++ [name]) set.${name})
+      ) (builtins.attrNames set);
 
   libset = toplib:
     builtins.map
       (subsetname: {
         subsetname = subsetname;
-        functions = libDefPos toplib.${subsetname};
+        functions = libDefPos [] toplib.${subsetname};
       })
-      (builtins.filter
-        (name: builtins.isAttrs toplib.${name})
-        (builtins.attrNames toplib));
+      (builtins.map (x: x.name) libsets);
 
   nixpkgsLib = pkgs.lib;
 
diff --git a/nixpkgs/doc/doc-support/parameters.xml b/nixpkgs/doc/doc-support/parameters.xml
index 8b413dcd337a..5b39d2f7f1a5 100644
--- a/nixpkgs/doc/doc-support/parameters.xml
+++ b/nixpkgs/doc/doc-support/parameters.xml
@@ -2,12 +2,16 @@
 <xsl:stylesheet
     xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
     version="1.0">
- <xsl:param name="section.autolabel" select="1" />
- <xsl:param name="section.label.includes.component.label" select="1" />
+ <xsl:param name="chapter.autolabel" select="0" />
+ <xsl:param name="part.autolabel" select="0" />
+ <xsl:param name="preface.autolabel" select="0" />
+ <xsl:param name="reference.autolabel" select="0" />
+ <xsl:param name="section.autolabel" select="0" />
  <xsl:param name="html.stylesheet" select="'style.css overrides.css highlightjs/mono-blue.css'" />
  <xsl:param name="html.script" select="'./highlightjs/highlight.pack.js ./highlightjs/loader.js'" />
- <xsl:param name="xref.with.number.and.title" select="1" />
+ <xsl:param name="xref.with.number.and.title" select="0" />
  <xsl:param name="use.id.as.filename" select="1" />
+ <xsl:param name="generate.section.toc.level" select="1" />
  <xsl:param name="toc.section.depth" select="0" />
  <xsl:param name="admon.style" select="''" />
  <xsl:param name="callout.graphics.extension" select="'.svg'" />
diff --git a/nixpkgs/doc/doc-support/xmlformat.conf b/nixpkgs/doc/doc-support/xmlformat.conf
new file mode 100644
index 000000000000..c3f39c7fd81b
--- /dev/null
+++ b/nixpkgs/doc/doc-support/xmlformat.conf
@@ -0,0 +1,72 @@
+#
+# DocBook Configuration file for "xmlformat"
+# see http://www.kitebird.com/software/xmlformat/
+# 10 Sept. 2004
+#
+
+# Only block elements
+ackno address appendix article biblioentry bibliography bibliomixed \
+biblioset blockquote book bridgehead callout calloutlist caption caution \
+chapter chapterinfo classsynopsis cmdsynopsis colophon constraintdef \
+constructorsynopsis dedication destructorsynopsis entry epigraph equation example \
+figure formalpara funcsynopsis glossary glossdef glossdiv glossentry glosslist \
+glosssee glossseealso graphic graphicco highlights imageobjectco important \
+index indexdiv indexentry indexinfo info informalequation informalexample \
+informalfigure informaltable legalnotice literallayout lot lotentry mediaobject \
+mediaobjectco msgmain msgset note orderedlist para part preface primaryie \
+procedure qandadiv qandaentry qandaset refentry refentrytitle reference \
+refnamediv refsect1 refsect2 refsect3 refsection revhistory screenshot sect1 \
+sect2 sect3 sect4 sect5 section seglistitem set setindex sidebar simpara \
+simplesect step substeps synopfragment synopsis table term title \
+toc variablelist varlistentry warning itemizedlist listitem \
+footnote colspec partintro row simplelist subtitle tbody tgroup thead tip
+  format      block
+  normalize   no
+
+
+#appendix bibliography chapter glossary preface reference
+#  element-break   3
+
+sect1 section
+  element-break   2
+
+
+#
+para abstract
+  format       block
+  entry-break  1
+  exit-break   1
+  normalize    yes
+
+title
+  format       block
+  normalize = yes
+  entry-break = 0
+  exit-break = 0
+
+# Inline elements
+abbrev accel acronym action application citation citebiblioid citerefentry citetitle \
+classname co code command computeroutput constant country database date email emphasis \
+envar errorcode errorname errortext errortype exceptionname fax filename \
+firstname firstterm footnoteref foreignphrase funcdef funcparams function \
+glossterm group guibutton guiicon guilabel guimenu guimenuitem guisubmenu \
+hardware holder honorific indexterm inlineequation inlinegraphic inlinemediaobject \
+interface interfacename \
+keycap keycode keycombo keysym lineage link literal manvolnum markup medialabel \
+menuchoice methodname methodparam modifier mousebutton olink ooclass ooexception \
+oointerface option optional otheraddr othername package paramdef parameter personname \
+phrase pob postcode productname prompt property quote refpurpose replaceable \
+returnvalue revnumber sgmltag state street structfield structname subscript \
+superscript surname symbol systemitem token trademark type ulink userinput \
+uri varargs varname void wordasword xref year mathphrase member tag
+  format       inline
+
+programlisting screen
+  format       verbatim
+  entry-break = 0
+  exit-break = 0
+
+# This is needed so that the spacing inside those tags is kept.
+term cmdsynopsis arg
+  normalize yes
+  format    block
diff --git a/nixpkgs/doc/functions/generators.section.md b/nixpkgs/doc/functions/generators.section.md
index d54e5027c799..8b3ae6843a22 100644
--- a/nixpkgs/doc/functions/generators.section.md
+++ b/nixpkgs/doc/functions/generators.section.md
@@ -16,7 +16,7 @@ let
              if v == true then ''"yes"''
         else if v == false then ''"no"''
         else if isString v then ''"${v}"''
-        # and delegats all other values to the default generator
+        # and delegates all other values to the default generator
         else generators.mkValueStringDefault {} v;
     } ":";
   };
diff --git a/nixpkgs/doc/functions/library.xml b/nixpkgs/doc/functions/library.xml
index 21bcf5b88c9d..788ea0b94f1f 100644
--- a/nixpkgs/doc/functions/library.xml
+++ b/nixpkgs/doc/functions/library.xml
@@ -8,23 +8,7 @@
   Nixpkgs provides a standard library at <varname>pkgs.lib</varname>, or through <code>import &lt;nixpkgs/lib&gt;</code>.
  </para>
 
- <xi:include href="./library/asserts.xml" />
-
- <xi:include href="./library/attrsets.xml" />
-
-<!-- These docs are generated via nixdoc. To add another generated
-      library function file to this list, the file
-      `lib-function-docs.nix` must also be updated. -->
-
- <xi:include href="./library/generated/strings.xml" />
-
- <xi:include href="./library/generated/trivial.xml" />
-
- <xi:include href="./library/generated/lists.xml" />
-
- <xi:include href="./library/generated/debug.xml" />
-
- <xi:include href="./library/generated/options.xml" />
-
- <xi:include href="./library/generated/sources.xml" />
+ <!-- The index must have a root element to declare namespaces, but we
+      don't want to include it, so we select all of its children. -->
+ <xi:include href="./library/generated/index.xml" xpointer="xpointer(/root/*)" />
 </section>
diff --git a/nixpkgs/doc/functions/library/.gitkeep b/nixpkgs/doc/functions/library/.gitkeep
new file mode 100644
index 000000000000..e69de29bb2d1
--- /dev/null
+++ b/nixpkgs/doc/functions/library/.gitkeep
diff --git a/nixpkgs/doc/functions/library/asserts.xml b/nixpkgs/doc/functions/library/asserts.xml
deleted file mode 100644
index 7c94222ef139..000000000000
--- a/nixpkgs/doc/functions/library/asserts.xml
+++ /dev/null
@@ -1,112 +0,0 @@
-<section xmlns="http://docbook.org/ns/docbook"
-         xmlns:xlink="http://www.w3.org/1999/xlink"
-         xmlns:xi="http://www.w3.org/2001/XInclude"
-         xml:id="sec-functions-library-asserts">
- <title>Assert functions</title>
-
- <section xml:id="function-library-lib.asserts.assertMsg">
-  <title><function>lib.asserts.assertMsg</function></title>
-
-  <subtitle><literal>assertMsg :: Bool -> String -> Bool</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.asserts.assertMsg" />
-
-  <para>
-   Print a trace message if <literal>pred</literal> is false.
-  </para>
-
-  <para>
-   Intended to be used to augment asserts with helpful error messages.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>pred</varname>
-    </term>
-    <listitem>
-     <para>
-      Condition under which the <varname>msg</varname> should <emphasis>not</emphasis> be printed.
-     </para>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>msg</varname>
-    </term>
-    <listitem>
-     <para>
-      Message to print.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.asserts.assertMsg-example-false">
-   <title>Printing when the predicate is false</title>
-<programlisting><![CDATA[
-assert lib.asserts.assertMsg ("foo" == "bar") "foo is not bar, silly"
-stderr> trace: foo is not bar, silly
-stderr> assert failed
-]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.asserts.assertOneOf">
-  <title><function>lib.asserts.assertOneOf</function></title>
-
-  <subtitle><literal>assertOneOf :: String -> String ->
-      StringList -> Bool</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.asserts.assertOneOf" />
-
-  <para>
-   Specialized <function>asserts.assertMsg</function> for checking if <varname>val</varname> is one of the elements of <varname>xs</varname>. Useful for checking enums.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>name</varname>
-    </term>
-    <listitem>
-     <para>
-      The name of the variable the user entered <varname>val</varname> into, for inclusion in the error message.
-     </para>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>val</varname>
-    </term>
-    <listitem>
-     <para>
-      The value of what the user provided, to be compared against the values in <varname>xs</varname>.
-     </para>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>xs</varname>
-    </term>
-    <listitem>
-     <para>
-      The list of valid values.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.asserts.assertOneOf-example">
-   <title>Ensuring a user provided a possible value</title>
-<programlisting><![CDATA[
-let sslLibrary = "bearssl";
-in lib.asserts.assertOneOf "sslLibrary" sslLibrary [ "openssl" "libressl" ];
-=> false
-stderr> trace: sslLibrary must be one of "openssl", "libressl", but is: "bearssl"
-        ]]></programlisting>
-  </example>
- </section>
-</section>
diff --git a/nixpkgs/doc/functions/library/attrsets.xml b/nixpkgs/doc/functions/library/attrsets.xml
deleted file mode 100644
index 052bfa1f6ae3..000000000000
--- a/nixpkgs/doc/functions/library/attrsets.xml
+++ /dev/null
@@ -1,1751 +0,0 @@
-<section xmlns="http://docbook.org/ns/docbook"
-         xmlns:xlink="http://www.w3.org/1999/xlink"
-         xmlns:xi="http://www.w3.org/2001/XInclude"
-         xml:id="sec-functions-library-attrset">
- <title>Attribute-Set Functions</title>
-
- <section xml:id="function-library-lib.attrsets.attrByPath">
-  <title><function>lib.attrset.attrByPath</function></title>
-
-  <subtitle><literal>attrByPath :: [String] -> Any -> AttrSet -> Any</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.attrByPath" />
-
-  <para>
-   Return an attribute from within nested attribute sets.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>attrPath</varname>
-    </term>
-    <listitem>
-     <para>
-      A list of strings representing the path through the nested attribute set <varname>set</varname>.
-     </para>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>default</varname>
-    </term>
-    <listitem>
-     <para>
-      Default value if <varname>attrPath</varname> does not resolve to an existing value.
-     </para>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>set</varname>
-    </term>
-    <listitem>
-     <para>
-      The nested attributeset to select values from.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrset.attrByPath-example-value-exists">
-   <title>Extracting a value from a nested attribute set</title>
-<programlisting><![CDATA[
-let set = { a = { b = 3; }; };
-in lib.attrsets.attrByPath [ "a" "b" ] 0 set
-=> 3
-]]></programlisting>
-  </example>
-
-  <example xml:id="function-library-lib.attrset.attrByPath-example-default-value">
-   <title>No value at the path, instead using the default</title>
-<programlisting><![CDATA[
-lib.attrsets.attrByPath [ "a" "b" ] 0 {}
-=> 0
-]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.hasAttrByPath">
-  <title><function>lib.attrsets.hasAttrByPath</function></title>
-
-  <subtitle><literal>hasAttrByPath :: [String] -> AttrSet -> Bool</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.hasAttrByPath" />
-
-  <para>
-   Determine if an attribute exists within a nested attribute set.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>attrPath</varname>
-    </term>
-    <listitem>
-     <para>
-      A list of strings representing the path through the nested attribute set <varname>set</varname>.
-     </para>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>set</varname>
-    </term>
-    <listitem>
-     <para>
-      The nested attributeset to check.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.hasAttrByPath-example">
-   <title>A nested value does exist inside a set</title>
-<programlisting><![CDATA[
-lib.attrsets.hasAttrByPath
-  [ "a" "b" "c" "d" ]
-  { a = { b = { c = { d = 123; }; }; }; }
-=> true
-]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.setAttrByPath">
-  <title><function>lib.attrsets.setAttrByPath</function></title>
-
-  <subtitle><literal>setAttrByPath :: [String] -> Any -> AttrSet</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.setAttrByPath" />
-
-  <para>
-   Create a new attribute set with <varname>value</varname> set at the nested attribute location specified in <varname>attrPath</varname>.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>attrPath</varname>
-    </term>
-    <listitem>
-     <para>
-      A list of strings representing the path through the nested attribute set.
-     </para>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>value</varname>
-    </term>
-    <listitem>
-     <para>
-      The value to set at the location described by <varname>attrPath</varname>.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.setAttrByPath-example">
-   <title>Creating a new nested attribute set</title>
-<programlisting><![CDATA[
-lib.attrsets.setAttrByPath [ "a" "b" ] 3
-=> { a = { b = 3; }; }
-]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.getAttrFromPath">
-  <title><function>lib.attrsets.getAttrFromPath</function></title>
-
-  <subtitle><literal>getAttrFromPath :: [String] -> AttrSet -> Value</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.getAttrFromPath" />
-
-  <para>
-   Like <xref linkend="function-library-lib.attrsets.attrByPath" /> except without a default, and it will throw if the value doesn't exist.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>attrPath</varname>
-    </term>
-    <listitem>
-     <para>
-      A list of strings representing the path through the nested attribute set <varname>set</varname>.
-     </para>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>set</varname>
-    </term>
-    <listitem>
-     <para>
-      The nested attribute set to find the value in.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.getAttrPath-example-success">
-   <title>Succesfully getting a value from an attribute set</title>
-<programlisting><![CDATA[
-lib.attrsets.getAttrFromPath [ "a" "b" ] { a = { b = 3; }; }
-=> 3
-]]></programlisting>
-  </example>
-
-  <example xml:id="function-library-lib.attrsets.getAttrPath-example-throw">
-   <title>Throwing after failing to get a value from an attribute set</title>
-<programlisting><![CDATA[
-lib.attrsets.getAttrFromPath [ "x" "y" ] { }
-=> error: cannot find attribute `x.y'
-]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.attrVals">
-  <title><function>lib.attrsets.attrVals</function></title>
-
-  <subtitle><literal>attrVals :: [String] -> AttrSet -> [Any]</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.attrVals" />
-
-  <para>
-   Return the specified attributes from a set. All values must exist.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>nameList</varname>
-    </term>
-    <listitem>
-     <para>
-      The list of attributes to fetch from <varname>set</varname>. Each attribute name must exist on the attrbitue set.
-     </para>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>set</varname>
-    </term>
-    <listitem>
-     <para>
-      The set to get attribute values from.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.attrVals-example-success">
-   <title>Getting several values from an attribute set</title>
-<programlisting><![CDATA[
-lib.attrsets.attrVals [ "a" "b" "c" ] { a = 1; b = 2; c = 3; }
-=> [ 1 2 3 ]
-]]></programlisting>
-  </example>
-
-  <example xml:id="function-library-lib.attrsets.attrVals-failure">
-   <title>Getting missing values from an attribute set</title>
-<programlisting><![CDATA[
-lib.attrsets.attrVals [ "d" ] { }
-error: attribute 'd' missing
-]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.attrValues">
-  <title><function>lib.attrsets.attrValues</function></title>
-
-  <subtitle><literal>attrValues :: AttrSet -> [Any]</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.attrValues" />
-
-  <para>
-   Get all the attribute values from an attribute set.
-  </para>
-
-  <para>
-   Provides a backwards-compatible interface of <function>builtins.attrValues</function> for Nix version older than 1.8.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>attrs</varname>
-    </term>
-    <listitem>
-     <para>
-      The attribute set.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.attrValues-example">
-   <title></title>
-<programlisting><![CDATA[
-lib.attrsets.attrValues { a = 1; b = 2; c = 3; }
-=> [ 1 2 3 ]
-]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.catAttrs">
-  <title><function>lib.attrsets.catAttrs</function></title>
-
-  <subtitle><literal>catAttrs :: String -> [AttrSet] -> [Any]</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.catAttrs" />
-
-  <para>
-   Collect each attribute named `attr' from the list of attribute sets, <varname>sets</varname>. Sets that don't contain the named attribute are ignored.
-  </para>
-
-  <para>
-   Provides a backwards-compatible interface of <function>builtins.catAttrs</function> for Nix version older than 1.9.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>attr</varname>
-    </term>
-    <listitem>
-     <para>
-      Attribute name to select from each attribute set in <varname>sets</varname>.
-     </para>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>sets</varname>
-    </term>
-    <listitem>
-     <para>
-      The list of attribute sets to select <varname>attr</varname> from.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.catAttrs-example">
-   <title>Collect an attribute from a list of attribute sets.</title>
-   <para>
-    Attribute sets which don't have the attribute are ignored.
-   </para>
-<programlisting><![CDATA[
-catAttrs "a" [{a = 1;} {b = 0;} {a = 2;}]
-=> [ 1 2 ]
-      ]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.filterAttrs">
-  <title><function>lib.attrsets.filterAttrs</function></title>
-
-  <subtitle><literal>filterAttrs :: (String -> Any -> Bool) -> AttrSet -> AttrSet</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.filterAttrs" />
-
-  <para>
-   Filter an attribute set by removing all attributes for which the given predicate return false.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>pred</varname>
-    </term>
-    <listitem>
-     <para>
-      <literal>String -> Any -> Bool</literal>
-     </para>
-     <para>
-      Predicate which returns true to include an attribute, or returns false to exclude it.
-     </para>
-     <variablelist>
-      <varlistentry>
-       <term>
-        <varname>name</varname>
-       </term>
-       <listitem>
-        <para>
-         The attribute's name
-        </para>
-       </listitem>
-      </varlistentry>
-      <varlistentry>
-       <term>
-        <varname>value</varname>
-       </term>
-       <listitem>
-        <para>
-         The attribute's value
-        </para>
-       </listitem>
-      </varlistentry>
-     </variablelist>
-     <para>
-      Returns <literal>true</literal> to include the attribute, <literal>false</literal> to exclude the attribute.
-     </para>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>set</varname>
-    </term>
-    <listitem>
-     <para>
-      The attribute set to filter
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.filterAttrs-example">
-   <title>Filtering an attributeset</title>
-<programlisting><![CDATA[
-filterAttrs (n: v: n == "foo") { foo = 1; bar = 2; }
-=> { foo = 1; }
-]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.filterAttrsRecursive">
-  <title><function>lib.attrsets.filterAttrsRecursive</function></title>
-
-  <subtitle><literal>filterAttrsRecursive :: (String -> Any -> Bool) -> AttrSet -> AttrSet</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.filterAttrsRecursive" />
-
-  <para>
-   Filter an attribute set recursively by removing all attributes for which the given predicate return false.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>pred</varname>
-    </term>
-    <listitem>
-     <para>
-      <literal>String -> Any -> Bool</literal>
-     </para>
-     <para>
-      Predicate which returns true to include an attribute, or returns false to exclude it.
-     </para>
-     <variablelist>
-      <varlistentry>
-       <term>
-        <varname>name</varname>
-       </term>
-       <listitem>
-        <para>
-         The attribute's name
-        </para>
-       </listitem>
-      </varlistentry>
-      <varlistentry>
-       <term>
-        <varname>value</varname>
-       </term>
-       <listitem>
-        <para>
-         The attribute's value
-        </para>
-       </listitem>
-      </varlistentry>
-     </variablelist>
-     <para>
-      Returns <literal>true</literal> to include the attribute, <literal>false</literal> to exclude the attribute.
-     </para>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>set</varname>
-    </term>
-    <listitem>
-     <para>
-      The attribute set to filter
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.filterAttrsRecursive-example">
-   <title>Recursively filtering an attribute set</title>
-<programlisting><![CDATA[
-lib.attrsets.filterAttrsRecursive
-  (n: v: v != null)
-  {
-    levelA = {
-      example = "hi";
-      levelB = {
-        hello = "there";
-        this-one-is-present = {
-          this-is-excluded = null;
-        };
-      };
-      this-one-is-also-excluded = null;
-    };
-    also-excluded = null;
-  }
-=> {
-     levelA = {
-       example = "hi";
-       levelB = {
-         hello = "there";
-         this-one-is-present = { };
-       };
-     };
-   }
-     ]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.foldAttrs">
-  <title><function>lib.attrsets.foldAttrs</function></title>
-
-  <subtitle><literal>foldAttrs :: (Any -> Any -> Any) -> Any -> [AttrSets] -> Any</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.foldAttrs" />
-
-  <para>
-   Apply fold function to values grouped by key.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>op</varname>
-    </term>
-    <listitem>
-     <para>
-      <literal>Any -> Any -> Any</literal>
-     </para>
-     <para>
-      Given a value <varname>val</varname> and a collector <varname>col</varname>, combine the two.
-     </para>
-     <variablelist>
-      <varlistentry>
-       <term>
-        <varname>val</varname>
-       </term>
-       <listitem>
-        <para>
-         An attribute's value
-        </para>
-       </listitem>
-      </varlistentry>
-      <varlistentry>
-       <term>
-        <varname>col</varname>
-       </term>
-       <listitem>
-<!-- TODO: make this not bad, use more fold-ey terms -->
-        <para>
-         The result of previous <function>op</function> calls with other values and <function>nul</function>.
-        </para>
-       </listitem>
-      </varlistentry>
-     </variablelist>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>nul</varname>
-    </term>
-    <listitem>
-     <para>
-      The null-value, the starting value.
-     </para>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>list_of_attrs</varname>
-    </term>
-    <listitem>
-     <para>
-      A list of attribute sets to fold together by key.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.foldAttrs-example">
-   <title>Combining an attribute of lists in to one attribute set</title>
-<programlisting><![CDATA[
-lib.attrsets.foldAttrs
-  (n: a: [n] ++ a) []
-  [
-    { a = 2; b = 7; }
-    { a = 3; }
-    { b = 6; }
-  ]
-=> { a = [ 2 3 ]; b = [ 7 6 ]; }
-]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.collect">
-  <title><function>lib.attrsets.collect</function></title>
-
-  <subtitle><literal>collect :: (Any -> Bool) -> AttrSet -> [Any]</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.collect" />
-
-  <para>
-   Recursively collect sets that verify a given predicate named <varname>pred</varname> from the set <varname>attrs</varname>. The recursion stops when <varname>pred</varname> returns <literal>true</literal>.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>pred</varname>
-    </term>
-    <listitem>
-     <para>
-      <literal>Any -> Bool</literal>
-     </para>
-     <para>
-      Given an attribute's value, determine if recursion should stop.
-     </para>
-     <variablelist>
-      <varlistentry>
-       <term>
-        <varname>value</varname>
-       </term>
-       <listitem>
-        <para>
-         The attribute set value.
-        </para>
-       </listitem>
-      </varlistentry>
-     </variablelist>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>attrs</varname>
-    </term>
-    <listitem>
-     <para>
-      The attribute set to recursively collect.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.collect-example-lists">
-   <title>Collecting all lists from an attribute set</title>
-<programlisting><![CDATA[
-lib.attrsets.collect isList { a = { b = ["b"]; }; c = [1]; }
-=> [["b"] [1]]
-]]></programlisting>
-  </example>
-
-  <example xml:id="function-library-lib.attrsets.collect-example-outpath">
-   <title>Collecting all attribute-sets which contain the <literal>outPath</literal> attribute name.</title>
-<programlisting><![CDATA[
-collect (x: x ? outPath)
-  { a = { outPath = "a/"; }; b = { outPath = "b/"; }; }
-=> [{ outPath = "a/"; } { outPath = "b/"; }]
-]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.nameValuePair">
-  <title><function>lib.attrsets.nameValuePair</function></title>
-
-  <subtitle><literal>nameValuePair :: String -> Any -> AttrSet</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.nameValuePair" />
-
-  <para>
-   Utility function that creates a <literal>{name, value}</literal> pair as expected by <function>builtins.listToAttrs</function>.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>name</varname>
-    </term>
-    <listitem>
-     <para>
-      The attribute name.
-     </para>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>value</varname>
-    </term>
-    <listitem>
-     <para>
-      The attribute value.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.nameValuePair-example">
-   <title>Creating a name value pair</title>
-<programlisting><![CDATA[
-nameValuePair "some" 6
-=> { name = "some"; value = 6; }
-]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.mapAttrs">
-  <title><function>lib.attrsets.mapAttrs</function></title>
-
-  <subtitle><literal></literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.mapAttrs" />
-
-  <para>
-   Apply a function to each element in an attribute set, creating a new attribute set.
-  </para>
-
-  <para>
-   Provides a backwards-compatible interface of <function>builtins.mapAttrs</function> for Nix version older than 2.1.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>fn</varname>
-    </term>
-    <listitem>
-     <para>
-      <literal>String -> Any -> Any</literal>
-     </para>
-     <para>
-      Given an attribute's name and value, return a new value.
-     </para>
-     <variablelist>
-      <varlistentry>
-       <term>
-        <varname>name</varname>
-       </term>
-       <listitem>
-        <para>
-         The name of the attribute.
-        </para>
-       </listitem>
-      </varlistentry>
-      <varlistentry>
-       <term>
-        <varname>value</varname>
-       </term>
-       <listitem>
-        <para>
-         The attribute's value.
-        </para>
-       </listitem>
-      </varlistentry>
-     </variablelist>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.mapAttrs-example">
-   <title>Modifying each value of an attribute set</title>
-<programlisting><![CDATA[
-lib.attrsets.mapAttrs
-  (name: value: name + "-" + value)
-  { x = "foo"; y = "bar"; }
-=> { x = "x-foo"; y = "y-bar"; }
-]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.mapAttrs-prime">
-  <title><function>lib.attrsets.mapAttrs&apos;</function></title>
-
-  <subtitle><literal>mapAttrs' :: (String -> Any -> { name = String; value = Any }) -> AttrSet -> AttrSet</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.mapAttrs-prime" />
-
-  <para>
-   Like <function>mapAttrs</function>, but allows the name of each attribute to be changed in addition to the value. The applied function should return both the new name and value as a <function>nameValuePair</function>.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>fn</varname>
-    </term>
-    <listitem>
-     <para>
-      <literal>String -> Any -> { name = String; value = Any }</literal>
-     </para>
-     <para>
-      Given an attribute's name and value, return a new <link
-       linkend="function-library-lib.attrsets.nameValuePair">name value pair</link>.
-     </para>
-     <variablelist>
-      <varlistentry>
-       <term>
-        <varname>name</varname>
-       </term>
-       <listitem>
-        <para>
-         The name of the attribute.
-        </para>
-       </listitem>
-      </varlistentry>
-      <varlistentry>
-       <term>
-        <varname>value</varname>
-       </term>
-       <listitem>
-        <para>
-         The attribute's value.
-        </para>
-       </listitem>
-      </varlistentry>
-     </variablelist>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>set</varname>
-    </term>
-    <listitem>
-     <para>
-      The attribute set to map over.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.mapAttrs-prime-example">
-   <title>Change the name and value of each attribute of an attribute set</title>
-<programlisting><![CDATA[
-lib.attrsets.mapAttrs' (name: value: lib.attrsets.nameValuePair ("foo_" + name) ("bar-" + value))
-   { x = "a"; y = "b"; }
-=> { foo_x = "bar-a"; foo_y = "bar-b"; }
-
-    ]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.mapAttrsToList">
-  <title><function>lib.attrsets.mapAttrsToList</function></title>
-
-  <subtitle><literal>mapAttrsToList :: (String -> Any -> Any) ->
-   AttrSet -> [Any]</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.mapAttrsToList" />
-
-  <para>
-   Call <varname>fn</varname> for each attribute in the given <varname>set</varname> and return the result in a list.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>fn</varname>
-    </term>
-    <listitem>
-     <para>
-      <literal>String -> Any -> Any</literal>
-     </para>
-     <para>
-      Given an attribute's name and value, return a new value.
-     </para>
-     <variablelist>
-      <varlistentry>
-       <term>
-        <varname>name</varname>
-       </term>
-       <listitem>
-        <para>
-         The name of the attribute.
-        </para>
-       </listitem>
-      </varlistentry>
-      <varlistentry>
-       <term>
-        <varname>value</varname>
-       </term>
-       <listitem>
-        <para>
-         The attribute's value.
-        </para>
-       </listitem>
-      </varlistentry>
-     </variablelist>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>set</varname>
-    </term>
-    <listitem>
-     <para>
-      The attribute set to map over.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.mapAttrsToList-example">
-   <title>Combine attribute values and names in to a list</title>
-<programlisting><![CDATA[
-lib.attrsets.mapAttrsToList (name: value: "${name}=${value}")
-   { x = "a"; y = "b"; }
-=> [ "x=a" "y=b" ]
-]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.mapAttrsRecursive">
-  <title><function>lib.attrsets.mapAttrsRecursive</function></title>
-
-  <subtitle><literal>mapAttrsRecursive :: ([String] > Any -> Any) -> AttrSet -> AttrSet</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.mapAttrsRecursive" />
-
-  <para>
-   Like <function>mapAttrs</function>, except that it recursively applies itself to attribute sets. Also, the first argument of the argument function is a <emphasis>list</emphasis> of the names of the containing attributes.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>f</varname>
-    </term>
-    <listitem>
-     <para>
-      <literal>[ String ] -> Any -> Any</literal>
-     </para>
-     <para>
-      Given a list of attribute names and value, return a new value.
-     </para>
-     <variablelist>
-      <varlistentry>
-       <term>
-        <varname>name_path</varname>
-       </term>
-       <listitem>
-        <para>
-         The list of attribute names to this value.
-        </para>
-        <para>
-         For example, the <varname>name_path</varname> for the <literal>example</literal> string in the attribute set <literal>{ foo = { bar = "example"; }; }</literal> is <literal>[ "foo" "bar" ]</literal>.
-        </para>
-       </listitem>
-      </varlistentry>
-      <varlistentry>
-       <term>
-        <varname>value</varname>
-       </term>
-       <listitem>
-        <para>
-         The attribute's value.
-        </para>
-       </listitem>
-      </varlistentry>
-     </variablelist>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>set</varname>
-    </term>
-    <listitem>
-     <para>
-      The attribute set to recursively map over.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.mapAttrsRecursive-example">
-   <title>A contrived example of using <function>lib.attrsets.mapAttrsRecursive</function></title>
-<programlisting><![CDATA[
-mapAttrsRecursive
-  (path: value: concatStringsSep "-" (path ++ [value]))
-  {
-    n = {
-      a = "A";
-      m = {
-        b = "B";
-        c = "C";
-      };
-    };
-    d = "D";
-  }
-=> {
-     n = {
-       a = "n-a-A";
-       m = {
-         b = "n-m-b-B";
-         c = "n-m-c-C";
-       };
-     };
-     d = "d-D";
-   }
-    ]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.mapAttrsRecursiveCond">
-  <title><function>lib.attrsets.mapAttrsRecursiveCond</function></title>
-
-  <subtitle><literal>mapAttrsRecursiveCond :: (AttrSet -> Bool) -> ([ String ] -> Any -> Any) -> AttrSet -> AttrSet</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.mapAttrsRecursiveCond" />
-
-  <para>
-   Like <function>mapAttrsRecursive</function>, but it takes an additional predicate function that tells it whether to recursive into an attribute set. If it returns false, <function>mapAttrsRecursiveCond</function> does not recurse, but does apply the map function. It is returns true, it does recurse, and does not apply the map function.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>cond</varname>
-    </term>
-    <listitem>
-     <para>
-      <literal>(AttrSet -> Bool)</literal>
-     </para>
-     <para>
-      Determine if <function>mapAttrsRecursive</function> should recurse deeper in to the attribute set.
-     </para>
-     <variablelist>
-      <varlistentry>
-       <term>
-        <varname>attributeset</varname>
-       </term>
-       <listitem>
-        <para>
-         An attribute set.
-        </para>
-       </listitem>
-      </varlistentry>
-     </variablelist>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>f</varname>
-    </term>
-    <listitem>
-     <para>
-      <literal>[ String ] -> Any -> Any</literal>
-     </para>
-     <para>
-      Given a list of attribute names and value, return a new value.
-     </para>
-     <variablelist>
-      <varlistentry>
-       <term>
-        <varname>name_path</varname>
-       </term>
-       <listitem>
-        <para>
-         The list of attribute names to this value.
-        </para>
-        <para>
-         For example, the <varname>name_path</varname> for the <literal>example</literal> string in the attribute set <literal>{ foo = { bar = "example"; }; }</literal> is <literal>[ "foo" "bar" ]</literal>.
-        </para>
-       </listitem>
-      </varlistentry>
-      <varlistentry>
-       <term>
-        <varname>value</varname>
-       </term>
-       <listitem>
-        <para>
-         The attribute's value.
-        </para>
-       </listitem>
-      </varlistentry>
-     </variablelist>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>set</varname>
-    </term>
-    <listitem>
-     <para>
-      The attribute set to recursively map over.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.mapAttrsRecursiveCond-example">
-   <title>Only convert attribute values to JSON if the containing attribute set is marked for recursion</title>
-<programlisting><![CDATA[
-lib.attrsets.mapAttrsRecursiveCond
-  ({ recurse ? false, ... }: recurse)
-  (name: value: builtins.toJSON value)
-  {
-    dorecur = {
-      recurse = true;
-      hello = "there";
-    };
-    dontrecur = {
-      converted-to- = "json";
-    };
-  }
-=> {
-     dorecur = {
-       hello = "\"there\"";
-       recurse = "true";
-     };
-     dontrecur = "{\"converted-to\":\"json\"}";
-   }
-    ]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.genAttrs">
-  <title><function>lib.attrsets.genAttrs</function></title>
-
-  <subtitle><literal>genAttrs :: [ String ] -> (String -> Any) -> AttrSet</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.genAttrs" />
-
-  <para>
-   Generate an attribute set by mapping a function over a list of attribute names.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>names</varname>
-    </term>
-    <listitem>
-     <para>
-      Names of values in the resulting attribute set.
-     </para>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>f</varname>
-    </term>
-    <listitem>
-     <para>
-      <literal>String -> Any</literal>
-     </para>
-     <para>
-      Takes the name of the attribute and return the attribute's value.
-     </para>
-     <variablelist>
-      <varlistentry>
-       <term>
-        <varname>name</varname>
-       </term>
-       <listitem>
-        <para>
-         The name of the attribute to generate a value for.
-        </para>
-       </listitem>
-      </varlistentry>
-     </variablelist>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.genAttrs-example">
-   <title>Generate an attrset based on names only</title>
-<programlisting><![CDATA[
-lib.attrsets.genAttrs [ "foo" "bar" ] (name: "x_${name}")
-=> { foo = "x_foo"; bar = "x_bar"; }
-     ]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.isDerivation">
-  <title><function>lib.attrsets.isDerivation</function></title>
-
-  <subtitle><literal>isDerivation :: Any -> Bool</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.isDerivation" />
-
-  <para>
-   Check whether the argument is a derivation. Any set with <code>{ type = "derivation"; }</code> counts as a derivation.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>value</varname>
-    </term>
-    <listitem>
-     <para>
-      The value which is possibly a derivation.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.isDerivation-example-true">
-   <title>A package is a derivation</title>
-<programlisting><![CDATA[
-lib.attrsets.isDerivation (import <nixpkgs> {}).ruby
-=> true
-     ]]></programlisting>
-  </example>
-
-  <example xml:id="function-library-lib.attrsets.isDerivation-example-false">
-   <title>Anything else is not a derivation</title>
-<programlisting><![CDATA[
-lib.attrsets.isDerivation "foobar"
-=> false
-     ]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.toDerivation">
-  <title><function>lib.attrsets.toDerivation</function></title>
-
-  <subtitle><literal>toDerivation :: Path -> Derivation</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.toDerivation" />
-
-  <para>
-   Converts a store path to a fake derivation.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>path</varname>
-    </term>
-    <listitem>
-     <para>
-      A store path to convert to a derivation.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
- </section>
-
- <section xml:id="function-library-lib.attrsets.optionalAttrs">
-  <title><function>lib.attrsets.optionalAttrs</function></title>
-
-  <subtitle><literal>optionalAttrs :: Bool -> AttrSet</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.optionalAttrs" />
-
-  <para>
-   Conditionally return an attribute set or an empty attribute set.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>cond</varname>
-    </term>
-    <listitem>
-     <para>
-      Condition under which the <varname>as</varname> attribute set is returned.
-     </para>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>as</varname>
-    </term>
-    <listitem>
-     <para>
-      The attribute set to return if <varname>cond</varname> is true.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.optionalAttrs-example-true">
-   <title>Return the provided attribute set when <varname>cond</varname> is true</title>
-<programlisting><![CDATA[
-lib.attrsets.optionalAttrs true { my = "set"; }
-=> { my = "set"; }
-     ]]></programlisting>
-  </example>
-
-  <example xml:id="function-library-lib.attrsets.optionalAttrs-example-false">
-   <title>Return an empty attribute set when <varname>cond</varname> is false</title>
-<programlisting><![CDATA[
-lib.attrsets.optionalAttrs false { my = "set"; }
-=> { }
-     ]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.zipAttrsWithNames">
-  <title><function>lib.attrsets.zipAttrsWithNames</function></title>
-
-  <subtitle><literal>zipAttrsWithNames :: [ String ] -> (String -> [ Any ] -> Any) -> [ AttrSet ] -> AttrSet</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.zipAttrsWithNames" />
-
-  <para>
-   Merge sets of attributes and use the function <varname>f</varname> to merge attribute values where the attribute name is in <varname>names</varname>.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>names</varname>
-    </term>
-    <listitem>
-     <para>
-      A list of attribute names to zip.
-     </para>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>f</varname>
-    </term>
-    <listitem>
-     <para>
-      <literal>(String -> [ Any ] -> Any</literal>
-     </para>
-     <para>
-      Accepts an attribute name, all the values, and returns a combined value.
-     </para>
-     <variablelist>
-      <varlistentry>
-       <term>
-        <varname>name</varname>
-       </term>
-       <listitem>
-        <para>
-         The name of the attribute each value came from.
-        </para>
-       </listitem>
-      </varlistentry>
-      <varlistentry>
-       <term>
-        <varname>vs</varname>
-       </term>
-       <listitem>
-        <para>
-         A list of values collected from the list of attribute sets.
-        </para>
-       </listitem>
-      </varlistentry>
-     </variablelist>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>sets</varname>
-    </term>
-    <listitem>
-     <para>
-      A list of attribute sets to zip together.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.zipAttrsWithNames-example">
-   <title>Summing a list of attribute sets of numbers</title>
-<programlisting><![CDATA[
-lib.attrsets.zipAttrsWithNames
-  [ "a" "b" ]
-  (name: vals: "${name} ${toString (builtins.foldl' (a: b: a + b) 0 vals)}")
-  [
-    { a = 1; b = 1; c = 1; }
-    { a = 10; }
-    { b = 100; }
-    { c = 1000; }
-  ]
-=> { a = "a 11"; b = "b 101"; }
-     ]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.zipAttrsWith">
-  <title><function>lib.attrsets.zipAttrsWith</function></title>
-
-  <subtitle><literal>zipAttrsWith :: (String -> [ Any ] -> Any) -> [ AttrSet ] -> AttrSet</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.zipAttrsWith" />
-
-  <para>
-   Merge sets of attributes and use the function <varname>f</varname> to merge attribute values. Similar to <xref
-   linkend="function-library-lib.attrsets.zipAttrsWithNames" /> where all key names are passed for <varname>names</varname>.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>f</varname>
-    </term>
-    <listitem>
-     <para>
-      <literal>(String -> [ Any ] -> Any</literal>
-     </para>
-     <para>
-      Accepts an attribute name, all the values, and returns a combined value.
-     </para>
-     <variablelist>
-      <varlistentry>
-       <term>
-        <varname>name</varname>
-       </term>
-       <listitem>
-        <para>
-         The name of the attribute each value came from.
-        </para>
-       </listitem>
-      </varlistentry>
-      <varlistentry>
-       <term>
-        <varname>vs</varname>
-       </term>
-       <listitem>
-        <para>
-         A list of values collected from the list of attribute sets.
-        </para>
-       </listitem>
-      </varlistentry>
-     </variablelist>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>sets</varname>
-    </term>
-    <listitem>
-     <para>
-      A list of attribute sets to zip together.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.zipAttrsWith-example">
-   <title>Summing a list of attribute sets of numbers</title>
-<programlisting><![CDATA[
-lib.attrsets.zipAttrsWith
-  (name: vals: "${name} ${toString (builtins.foldl' (a: b: a + b) 0 vals)}")
-  [
-    { a = 1; b = 1; c = 1; }
-    { a = 10; }
-    { b = 100; }
-    { c = 1000; }
-  ]
-=> { a = "a 11"; b = "b 101"; c = "c 1001"; }
-     ]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.zipAttrs">
-  <title><function>lib.attrsets.zipAttrs</function></title>
-
-  <subtitle><literal>zipAttrs :: [ AttrSet ] -> AttrSet</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.zipAttrs" />
-
-  <para>
-   Merge sets of attributes and combine each attribute value in to a list. Similar to <xref linkend="function-library-lib.attrsets.zipAttrsWith" /> where the merge function returns a list of all values.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>sets</varname>
-    </term>
-    <listitem>
-     <para>
-      A list of attribute sets to zip together.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.zipAttrs-example">
-   <title>Combining a list of attribute sets</title>
-<programlisting><![CDATA[
-lib.attrsets.zipAttrs
-  [
-    { a = 1; b = 1; c = 1; }
-    { a = 10; }
-    { b = 100; }
-    { c = 1000; }
-  ]
-=> { a = [ 1 10 ]; b = [ 1 100 ]; c = [ 1 1000 ]; }
-     ]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.recursiveUpdateUntil">
-  <title><function>lib.attrsets.recursiveUpdateUntil</function></title>
-
-  <subtitle><literal>recursiveUpdateUntil :: ( [ String ] -> AttrSet -> AttrSet -> Bool ) -> AttrSet -> AttrSet -> AttrSet</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.recursiveUpdateUntil" />
-
-  <para>
-   Does the same as the update operator <literal>//</literal> except that attributes are merged until the given predicate is verified. The predicate should accept 3 arguments which are the path to reach the attribute, a part of the first attribute set and a part of the second attribute set. When the predicate is verified, the value of the first attribute set is replaced by the value of the second attribute set.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>pred</varname>
-    </term>
-    <listitem>
-     <para>
-      <literal>[ String ] -> AttrSet -> AttrSet -> Bool</literal>
-     </para>
-     <variablelist>
-      <varlistentry>
-       <term>
-        <varname>path</varname>
-       </term>
-       <listitem>
-        <para>
-         The path to the values in the left and right hand sides.
-        </para>
-       </listitem>
-      </varlistentry>
-      <varlistentry>
-       <term>
-        <varname>l</varname>
-       </term>
-       <listitem>
-        <para>
-         The left hand side value.
-        </para>
-       </listitem>
-      </varlistentry>
-      <varlistentry>
-       <term>
-        <varname>r</varname>
-       </term>
-       <listitem>
-        <para>
-         The right hand side value.
-        </para>
-       </listitem>
-      </varlistentry>
-     </variablelist>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>lhs</varname>
-    </term>
-    <listitem>
-     <para>
-      The left hand attribute set of the merge.
-     </para>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>rhs</varname>
-    </term>
-    <listitem>
-     <para>
-      The right hand attribute set of the merge.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.recursiveUpdateUntil-example">
-   <title>Recursively merging two attribute sets</title>
-<programlisting><![CDATA[
-lib.attrsets.recursiveUpdateUntil (path: l: r: path == ["foo"])
-  {
-    # first attribute set
-    foo.bar = 1;
-    foo.baz = 2;
-    bar = 3;
-  }
-  {
-    #second attribute set
-    foo.bar = 1;
-    foo.quz = 2;
-    baz = 4;
-  }
-=> {
-  foo.bar = 1; # 'foo.*' from the second set
-  foo.quz = 2; #
-  bar = 3;     # 'bar' from the first set
-  baz = 4;     # 'baz' from the second set
-}
-     ]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.recursiveUpdate">
-  <title><function>lib.attrsets.recursiveUpdate</function></title>
-
-  <subtitle><literal>recursiveUpdate :: AttrSet -> AttrSet -> AttrSet</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.recursiveUpdate" />
-
-  <para>
-   A recursive variant of the update operator <literal>//</literal>. The recursion stops when one of the attribute values is not an attribute set, in which case the right hand side value takes precedence over the left hand side value.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>lhs</varname>
-    </term>
-    <listitem>
-     <para>
-      The left hand attribute set of the merge.
-     </para>
-    </listitem>
-   </varlistentry>
-   <varlistentry>
-    <term>
-     <varname>rhs</varname>
-    </term>
-    <listitem>
-     <para>
-      The right hand attribute set of the merge.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.recursiveUpdate-example">
-   <title>Recursively merging two attribute sets</title>
-<programlisting><![CDATA[
-recursiveUpdate
-  {
-    boot.loader.grub.enable = true;
-    boot.loader.grub.device = "/dev/hda";
-  }
-  {
-    boot.loader.grub.device = "";
-  }
-=> {
-  boot.loader.grub.enable = true;
-  boot.loader.grub.device = "";
-}
-]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.recurseIntoAttrs">
-  <title><function>lib.attrsets.recurseIntoAttrs</function></title>
-
-  <subtitle><literal>recurseIntoAttrs :: AttrSet -> AttrSet</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.recurseIntoAttrs" />
-
-  <para>
-   Make various Nix tools consider the contents of the resulting attribute set when looking for what to build, find, etc.
-  </para>
-
-  <para>
-   This function only affects a single attribute set; it does not apply itself recursively for nested attribute sets.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>attrs</varname>
-    </term>
-    <listitem>
-     <para>
-      An attribute set to scan for derivations.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.recurseIntoAttrs-example">
-   <title>Making Nix look inside an attribute set</title>
-<programlisting><![CDATA[
-{ pkgs ? import <nixpkgs> {} }:
-{
-  myTools = pkgs.lib.recurseIntoAttrs {
-    inherit (pkgs) hello figlet;
-  };
-}
-]]></programlisting>
-  </example>
- </section>
-
- <section xml:id="function-library-lib.attrsets.cartesianProductOfSets">
-  <title><function>lib.attrsets.cartesianProductOfSets</function></title>
-
-  <subtitle><literal>cartesianProductOfSets :: AttrSet -> [ AttrSet ]</literal>
-  </subtitle>
-
-  <xi:include href="./locations.xml" xpointer="lib.attrsets.cartesianProductOfSets" />
-
-  <para>
-   Return the cartesian product of attribute set value combinations.
-  </para>
-
-  <variablelist>
-   <varlistentry>
-    <term>
-     <varname>set</varname>
-    </term>
-    <listitem>
-     <para>
-      An attribute set with attributes that carry lists of values.
-     </para>
-    </listitem>
-   </varlistentry>
-  </variablelist>
-
-  <example xml:id="function-library-lib.attrsets.cartesianProductOfSets-example">
-   <title>Creating the cartesian product of a list of attribute values</title>
-<programlisting><![CDATA[
-cartesianProductOfSets { a = [ 1 2 ]; b = [ 10 20 ]; }
-=> [
-     { a = 1; b = 10; }
-     { a = 1; b = 20; }
-     { a = 2; b = 10; }
-     { a = 2; b = 20; }
-   ]
-]]></programlisting>
-  </example>
- </section>
-</section>
diff --git a/nixpkgs/doc/functions/nix-gitignore.section.md b/nixpkgs/doc/functions/nix-gitignore.section.md
index 2fb833b23000..8eb4081d2878 100644
--- a/nixpkgs/doc/functions/nix-gitignore.section.md
+++ b/nixpkgs/doc/functions/nix-gitignore.section.md
@@ -4,7 +4,7 @@
 
 ## Usage {#sec-pkgs-nix-gitignore-usage}
 
-`pkgs.nix-gitignore` exports a number of functions, but you\'ll most likely need either `gitignoreSource` or `gitignoreSourcePure`. As their first argument, they both accept either 1. a file with gitignore lines or 2. a string with gitignore lines, or 3. a list of either of the two. They will be concatenated into a single big string.
+`pkgs.nix-gitignore` exports a number of functions, but you'll most likely need either `gitignoreSource` or `gitignoreSourcePure`. As their first argument, they both accept either 1. a file with gitignore lines or 2. a string with gitignore lines, or 3. a list of either of the two. They will be concatenated into a single big string.
 
 ```nix
 { pkgs ? import <nixpkgs> {} }:
@@ -30,7 +30,7 @@ gitignoreSourcePure = gitignoreFilterSourcePure (_: _: true);
 gitignoreSource = gitignoreFilterSource (_: _: true);
 ```
 
-Those filter functions accept the same arguments the `builtins.filterSource` function would pass to its filters, thus `fn: gitignoreFilterSourcePure fn ""` should be extensionally equivalent to `filterSource`. The file is blacklisted if it\'s blacklisted by either your filter or the gitignoreFilter.
+Those filter functions accept the same arguments the `builtins.filterSource` function would pass to its filters, thus `fn: gitignoreFilterSourcePure fn ""` should be extensionally equivalent to `filterSource`. The file is blacklisted if it's blacklisted by either your filter or the gitignoreFilter.
 
 If you want to make your own filter from scratch, you may use
 
diff --git a/nixpkgs/doc/hooks/autoconf.section.md b/nixpkgs/doc/hooks/autoconf.section.md
new file mode 100644
index 000000000000..13d75910f192
--- /dev/null
+++ b/nixpkgs/doc/hooks/autoconf.section.md
@@ -0,0 +1,4 @@
+
+### Autoconf {#setup-hook-autoconf}
+
+The `autoreconfHook` derivation adds `autoreconfPhase`, which runs autoreconf, libtoolize and automake, essentially preparing the configure script in autotools-based builds. Most autotools-based packages come with the configure script pre-generated, but this hook is necessary for a few packages and when you need to patch the package’s configure scripts.
diff --git a/nixpkgs/doc/hooks/automake.section.md b/nixpkgs/doc/hooks/automake.section.md
new file mode 100644
index 000000000000..562ac18fcd93
--- /dev/null
+++ b/nixpkgs/doc/hooks/automake.section.md
@@ -0,0 +1,4 @@
+
+### Automake {#setup-hook-automake}
+
+Adds the `share/aclocal` subdirectory of each build input to the `ACLOCAL_PATH` environment variable.
diff --git a/nixpkgs/doc/hooks/autopatchelf.section.md b/nixpkgs/doc/hooks/autopatchelf.section.md
new file mode 100644
index 000000000000..9c2852ccf279
--- /dev/null
+++ b/nixpkgs/doc/hooks/autopatchelf.section.md
@@ -0,0 +1,12 @@
+
+### autoPatchelfHook {#setup-hook-autopatchelfhook}
+
+This is a special setup hook which helps in packaging proprietary software in that it automatically tries to find missing shared library dependencies of ELF files based on the given `buildInputs` and `nativeBuildInputs`.
+
+You can also specify a `runtimeDependencies` variable which lists dependencies to be unconditionally added to rpath of all executables. This is useful for programs that use dlopen 3 to load libraries at runtime.
+
+In certain situations you may want to run the main command (`autoPatchelf`) of the setup hook on a file or a set of directories instead of unconditionally patching all outputs. This can be done by setting the `dontAutoPatchelf` environment variable to a non-empty value.
+
+By default `autoPatchelf` will fail as soon as any ELF file requires a dependency which cannot be resolved via the given build inputs. In some situations you might prefer to just leave missing dependencies unpatched and continue to patch the rest. This can be achieved by setting the `autoPatchelfIgnoreMissingDeps` environment variable to a non-empty value. `autoPatchelfIgnoreMissingDeps` can be set to a list like `autoPatchelfIgnoreMissingDeps = [ "libcuda.so.1" "libcudart.so.1" ];` or to simply `[ "*" ]` to ignore all missing dependencies.
+
+The `autoPatchelf` command also recognizes a `--no-recurse` command line flag, which prevents it from recursing into subdirectories.
diff --git a/nixpkgs/doc/hooks/breakpoint.section.md b/nixpkgs/doc/hooks/breakpoint.section.md
new file mode 100644
index 000000000000..9600e06b7934
--- /dev/null
+++ b/nixpkgs/doc/hooks/breakpoint.section.md
@@ -0,0 +1,16 @@
+
+### breakpointHook {#breakpointhook}
+
+This hook will make a build pause instead of stopping when a failure happens. It prevents nix from cleaning up the build environment immediately and allows the user to attach to a build environment using the `cntr` command. Upon build error it will print instructions on how to use `cntr`, which can be used to enter the environment for debugging. Installing cntr and running the command will provide shell access to the build sandbox of failed build. At `/var/lib/cntr` the sandboxed filesystem is mounted. All commands and files of the system are still accessible within the shell. To execute commands from the sandbox use the cntr exec subcommand. `cntr` is only supported on Linux-based platforms. To use it first add `cntr` to your `environment.systemPackages` on NixOS or alternatively to the root user on non-NixOS systems. Then in the package that is supposed to be inspected, add `breakpointHook` to `nativeBuildInputs`.
+
+```nix
+nativeBuildInputs = [ breakpointHook ];
+```
+
+When a build failure happens there will be an instruction printed that shows how to attach with `cntr` to the build sandbox.
+
+::: {.note}
+Caution with remote builds
+
+This won’t work with remote builds as the build environment is on a different machine and can’t be accessed by `cntr`. Remote builds can be turned off by setting `--option builders ''` for `nix-build` or `--builders ''` for `nix build`.
+:::
diff --git a/nixpkgs/doc/hooks/cmake.section.md b/nixpkgs/doc/hooks/cmake.section.md
new file mode 100644
index 000000000000..58fbfa45a2e0
--- /dev/null
+++ b/nixpkgs/doc/hooks/cmake.section.md
@@ -0,0 +1,4 @@
+
+### cmake {#cmake}
+
+Overrides the default configure phase to run the CMake command. By default, we use the Make generator of CMake. In addition, dependencies are added automatically to `CMAKE_PREFIX_PATH` so that packages are correctly detected by CMake. Some additional flags are passed in to give similar behavior to configure-based packages. You can disable this hook’s behavior by setting `configurePhase` to a custom value, or by setting `dontUseCmakeConfigure`. `cmakeFlags` controls flags passed only to CMake. By default, parallel building is enabled as CMake supports parallel building almost everywhere. When Ninja is also in use, CMake will detect that and use the ninja generator.
diff --git a/nixpkgs/doc/hooks/gdk-pixbuf.section.md b/nixpkgs/doc/hooks/gdk-pixbuf.section.md
new file mode 100644
index 000000000000..565216560abc
--- /dev/null
+++ b/nixpkgs/doc/hooks/gdk-pixbuf.section.md
@@ -0,0 +1,4 @@
+
+### gdk-pixbuf {#setup-hook-gdk-pixbuf}
+
+Exports `GDK_PIXBUF_MODULE_FILE` environment variable to the builder. Add librsvg package to `buildInputs` to get svg support. See also the [setup hook description in GNOME platform docs](#ssec-gnome-hooks-gdk-pixbuf).
diff --git a/nixpkgs/doc/hooks/ghc.section.md b/nixpkgs/doc/hooks/ghc.section.md
new file mode 100644
index 000000000000..a4b0841ea486
--- /dev/null
+++ b/nixpkgs/doc/hooks/ghc.section.md
@@ -0,0 +1,4 @@
+
+### GHC {#ghc}
+
+Creates a temporary package database and registers every Haskell build input in it (TODO: how?).
diff --git a/nixpkgs/doc/hooks/gnome.section.md b/nixpkgs/doc/hooks/gnome.section.md
new file mode 100644
index 000000000000..8c209d9b472c
--- /dev/null
+++ b/nixpkgs/doc/hooks/gnome.section.md
@@ -0,0 +1,4 @@
+
+### GNOME platform {#gnome-platform}
+
+Hooks related to GNOME platform and related libraries like GLib, GTK and GStreamer are described in [](#sec-language-gnome).
diff --git a/nixpkgs/doc/hooks/index.xml b/nixpkgs/doc/hooks/index.xml
index 6a046eae2885..0917fac6c0ac 100644
--- a/nixpkgs/doc/hooks/index.xml
+++ b/nixpkgs/doc/hooks/index.xml
@@ -6,5 +6,32 @@
  <para>
   Nixpkgs has several hook packages that augment the stdenv phases.
  </para>
+ <para>
+  The stdenv built-in hooks are documented in <xref linkend="ssec-setup-hooks"/>.
+ </para>
+ <xi:include href="./autoconf.section.xml" />
+ <xi:include href="./automake.section.xml" />
+ <xi:include href="./autopatchelf.section.xml" />
+ <xi:include href="./breakpoint.section.xml" />
+ <xi:include href="./cmake.section.xml" />
+ <xi:include href="./gdk-pixbuf.section.xml" />
+ <xi:include href="./ghc.section.xml" />
+ <xi:include href="./gnome.section.xml" />
+ <xi:include href="./installShellFiles.section.xml" />
+ <xi:include href="./libiconv.section.xml" />
+ <xi:include href="./libxml2.section.xml" />
+ <xi:include href="./meson.section.xml" />
+ <xi:include href="./ninja.section.xml" />
+ <xi:include href="./patch-rc-path-hooks.section.xml" />
+ <xi:include href="./perl.section.xml" />
+ <xi:include href="./pkg-config.section.xml" />
  <xi:include href="./postgresql-test-hook.section.xml" />
+ <xi:include href="./python.section.xml" />
+ <xi:include href="./qt-4.section.xml" />
+ <xi:include href="./scons.section.xml" />
+ <xi:include href="./tetex-tex-live.section.xml" />
+ <xi:include href="./unzip.section.xml" />
+ <xi:include href="./validatePkgConfig.section.xml" />
+ <xi:include href="./waf.section.xml" />
+ <xi:include href="./xcbuild.section.xml" />
 </chapter>
diff --git a/nixpkgs/doc/hooks/installShellFiles.section.md b/nixpkgs/doc/hooks/installShellFiles.section.md
new file mode 100644
index 000000000000..d27527503fed
--- /dev/null
+++ b/nixpkgs/doc/hooks/installShellFiles.section.md
@@ -0,0 +1,26 @@
+
+### `installShellFiles` {#installshellfiles}
+
+This hook helps with installing manpages and shell completion files. It exposes 2 shell functions `installManPage` and `installShellCompletion` that can be used from your `postInstall` hook.
+
+The `installManPage` function takes one or more paths to manpages to install. The manpages must have a section suffix, and may optionally be compressed (with `.gz` suffix). This function will place them into the correct directory.
+
+The `installShellCompletion` function takes one or more paths to shell completion files. By default it will autodetect the shell type from the completion file extension, but you may also specify it by passing one of `--bash`, `--fish`, or `--zsh`. These flags apply to all paths listed after them (up until another shell flag is given). Each path may also have a custom installation name provided by providing a flag `--name NAME` before the path. If this flag is not provided, zsh completions will be renamed automatically such that `foobar.zsh` becomes `_foobar`. A root name may be provided for all paths using the flag `--cmd NAME`; this synthesizes the appropriate name depending on the shell (e.g. `--cmd foo` will synthesize the name `foo.bash` for bash and `_foo` for zsh). The path may also be a fifo or named fd (such as produced by `<(cmd)`), in which case the shell and name must be provided.
+
+```nix
+nativeBuildInputs = [ installShellFiles ];
+postInstall = ''
+  installManPage doc/foobar.1 doc/barfoo.3
+  # explicit behavior
+  installShellCompletion --bash --name foobar.bash share/completions.bash
+  installShellCompletion --fish --name foobar.fish share/completions.fish
+  installShellCompletion --zsh --name _foobar share/completions.zsh
+  # implicit behavior
+  installShellCompletion share/completions/foobar.{bash,fish,zsh}
+  # using named fd
+  installShellCompletion --cmd foobar \
+    --bash <($out/bin/foobar --bash-completion) \
+    --fish <($out/bin/foobar --fish-completion) \
+    --zsh <($out/bin/foobar --zsh-completion)
+'';
+```
diff --git a/nixpkgs/doc/hooks/libiconv.section.md b/nixpkgs/doc/hooks/libiconv.section.md
new file mode 100644
index 000000000000..c228fe339e14
--- /dev/null
+++ b/nixpkgs/doc/hooks/libiconv.section.md
@@ -0,0 +1,4 @@
+
+### libiconv, libintl {#libiconv-libintl}
+
+A few libraries automatically add to `NIX_LDFLAGS` their library, making their symbols automatically available to the linker. This includes libiconv and libintl (gettext). This is done to provide compatibility between GNU Linux, where libiconv and libintl are bundled in, and other systems where that might not be the case. Sometimes, this behavior is not desired. To disable this behavior, set `dontAddExtraLibs`.
diff --git a/nixpkgs/doc/hooks/libxml2.section.md b/nixpkgs/doc/hooks/libxml2.section.md
new file mode 100644
index 000000000000..770ef9ff3ffe
--- /dev/null
+++ b/nixpkgs/doc/hooks/libxml2.section.md
@@ -0,0 +1,4 @@
+
+### libxml2 {#setup-hook-libxml2}
+
+Adds every file named `catalog.xml` found under the `xml/dtd` and `xml/xsl` subdirectories of each build input to the `XML_CATALOG_FILES` environment variable.
diff --git a/nixpkgs/doc/hooks/meson.section.md b/nixpkgs/doc/hooks/meson.section.md
new file mode 100644
index 000000000000..32804b5e32f2
--- /dev/null
+++ b/nixpkgs/doc/hooks/meson.section.md
@@ -0,0 +1,26 @@
+
+### Meson {#meson}
+
+Overrides the configure phase to run meson to generate Ninja files. To run these files, you should accompany Meson with ninja. By default, `enableParallelBuilding` is enabled as Meson supports parallel building almost everywhere.
+
+#### Variables controlling Meson {#variables-controlling-meson}
+
+##### `mesonFlags` {#mesonflags}
+
+Controls the flags passed to meson.
+
+##### `mesonBuildType` {#mesonbuildtype}
+
+Which [`--buildtype`](https://mesonbuild.com/Builtin-options.html#core-options) to pass to Meson. We default to `plain`.
+
+##### `mesonAutoFeatures` {#mesonautofeatures}
+
+What value to set [`-Dauto_features=`](https://mesonbuild.com/Builtin-options.html#core-options) to. We default to `enabled`.
+
+##### `mesonWrapMode` {#mesonwrapmode}
+
+What value to set [`-Dwrap_mode=`](https://mesonbuild.com/Builtin-options.html#core-options) to. We default to `nodownload` as we disallow network access.
+
+##### `dontUseMesonConfigure` {#dontusemesonconfigure}
+
+Disables using Meson’s `configurePhase`.
diff --git a/nixpkgs/doc/hooks/ninja.section.md b/nixpkgs/doc/hooks/ninja.section.md
new file mode 100644
index 000000000000..5ea1ee87070a
--- /dev/null
+++ b/nixpkgs/doc/hooks/ninja.section.md
@@ -0,0 +1,4 @@
+
+### ninja {#ninja}
+
+Overrides the build, install, and check phase to run ninja instead of make. You can disable this behavior with the `dontUseNinjaBuild`, `dontUseNinjaInstall`, and `dontUseNinjaCheck`, respectively. Parallel building is enabled by default in Ninja.
diff --git a/nixpkgs/doc/hooks/patch-rc-path-hooks.section.md b/nixpkgs/doc/hooks/patch-rc-path-hooks.section.md
new file mode 100644
index 000000000000..5c870dc782c2
--- /dev/null
+++ b/nixpkgs/doc/hooks/patch-rc-path-hooks.section.md
@@ -0,0 +1,50 @@
+
+# `patchRcPath` hooks {#sec-patchRcPathHooks}
+
+These hooks provide shell-specific utilities (with the same name as the hook) to patch shell scripts meant to be sourced by software users.
+
+The typical usage is to patch initialisation or [rc](https://unix.stackexchange.com/questions/3467/what-does-rc-in-bashrc-stand-for) scripts inside `$out/bin` or `$out/etc`.
+Such scripts, when being sourced, would insert the binary locations of certain commands into `PATH`, modify other environment variables or run a series of start-up commands.
+When shipped from the upstream, they sometimes use commands that might not be available in the environment they are getting sourced in.
+
+The compatible shells for each hook are:
+
+ - `patchRcPathBash`: [Bash](https://www.gnu.org/software/bash/), [ksh](http://www.kornshell.org/), [zsh](https://www.zsh.org/) and other shells supporting the Bash-like parameter expansions.
+ - `patchRcPathCsh`: Csh scripts, such as those targeting [tcsh](https://www.tcsh.org/).
+ - `patchRcPathFish`: [Fish](https://fishshell.com/) scripts.
+ - `patchRcPathPosix`: POSIX-conformant shells supporting the limited parameter expansions specified by the POSIX standard. Current implementation uses the parameter expansion `${foo-}` only.
+
+For each supported shell, it modifies the script with a `PATH` prefix that is later removed when the script ends.
+It allows nested patching, which guarantees that a patched script may source another patched script.
+
+Syntax to apply the utility to a script:
+
+```sh
+patchRcPath<shell> <file> <PATH-prefix>
+```
+
+Example usage:
+
+Given a package `foo` containing an init script `this-foo.fish` that depends on `coreutils`, `man` and `which`,
+patch the init script for users to source without having the above dependencies in their `PATH`:
+
+```nix
+{ lib, stdenv, patchRcPathFish}:
+stdenv.mkDerivation {
+
+  # ...
+
+  nativeBuildInputs = [
+    patchRcPathFish
+  ];
+
+  postFixup = ''
+    patchRcPathFish $out/bin/this-foo.fish ${lib.makeBinPath [ coreutils man which ]}
+  '';
+}
+```
+
+::: {.note}
+`patchRcPathCsh` and `patchRcPathPosix` implementation depends on `sed` to do the string processing.
+The others are in vanilla shell and have no third-party dependencies.
+:::
diff --git a/nixpkgs/doc/hooks/perl.section.md b/nixpkgs/doc/hooks/perl.section.md
new file mode 100644
index 000000000000..403227a9bf18
--- /dev/null
+++ b/nixpkgs/doc/hooks/perl.section.md
@@ -0,0 +1,4 @@
+
+### Perl {#setup-hook-perl}
+
+Adds the `lib/site_perl` subdirectory of each build input to the `PERL5LIB` environment variable. For instance, if `buildInputs` contains Perl, then the `lib/site_perl` subdirectory of each input is added to the `PERL5LIB` environment variable.
diff --git a/nixpkgs/doc/hooks/pkg-config.section.md b/nixpkgs/doc/hooks/pkg-config.section.md
new file mode 100644
index 000000000000..969c81f6d18a
--- /dev/null
+++ b/nixpkgs/doc/hooks/pkg-config.section.md
@@ -0,0 +1,4 @@
+
+### pkg-config {#setup-hook-pkg-config}
+
+Adds the `lib/pkgconfig` and `share/pkgconfig` subdirectories of each build input to the `PKG_CONFIG_PATH` environment variable.
diff --git a/nixpkgs/doc/hooks/postgresql-test-hook.section.md b/nixpkgs/doc/hooks/postgresql-test-hook.section.md
index 077fac14ebbf..8b37ca1e4b3e 100644
--- a/nixpkgs/doc/hooks/postgresql-test-hook.section.md
+++ b/nixpkgs/doc/hooks/postgresql-test-hook.section.md
@@ -9,7 +9,7 @@ stdenv.mkDerivation {
 
   # ...
 
-  checkInputs = [
+  nativeCheckInputs = [
     postgresql
     postgresqlTestHook
   ];
@@ -40,12 +40,18 @@ Exported variables:
 
 Bash-only variables:
 
- - `postgresqlTestUserOptions`: SQL options to use when creating the `$PGUSER` role, default: `LOGIN`.
+ - `postgresqlTestUserOptions`: SQL options to use when creating the `$PGUSER` role, default: `"LOGIN"`. Example: `"LOGIN SUPERUSER"`
  - `postgresqlTestSetupSQL`: SQL commands to run as database administrator after startup, default: statements that create `$PGUSER` and `$PGDATABASE`.
  - `postgresqlTestSetupCommands`: bash commands to run after database start, defaults to running `$postgresqlTestSetupSQL` as database administrator.
  - `postgresqlEnableTCP`: set to `1` to enable TCP listening. Flaky; not recommended.
  - `postgresqlStartCommands`: defaults to `pg_ctl start`.
 
+## Hooks {#sec-postgresqlTestHook-hooks}
+
+A number of additional hooks are ran in postgresqlTestHook
+
+ - `postgresqlTestSetupPost`: ran after postgresql has been set up.
+
 ## TCP and the Nix sandbox {#sec-postgresqlTestHook-tcp}
 
 `postgresqlEnableTCP` relies on network sandboxing, which is not available on macOS and some custom Nix installations, resulting in flaky tests.
diff --git a/nixpkgs/doc/hooks/python.section.md b/nixpkgs/doc/hooks/python.section.md
new file mode 100644
index 000000000000..a46a727e95b1
--- /dev/null
+++ b/nixpkgs/doc/hooks/python.section.md
@@ -0,0 +1,4 @@
+
+### Python {#setup-hook-python}
+
+Adds the `lib/${python.libPrefix}/site-packages` subdirectory of each build input to the `PYTHONPATH` environment variable.
diff --git a/nixpkgs/doc/hooks/qt-4.section.md b/nixpkgs/doc/hooks/qt-4.section.md
new file mode 100644
index 000000000000..f15d858e2377
--- /dev/null
+++ b/nixpkgs/doc/hooks/qt-4.section.md
@@ -0,0 +1,4 @@
+
+### Qt 4 {#qt-4}
+
+Sets the `QTDIR` environment variable to Qt’s path.
diff --git a/nixpkgs/doc/hooks/scons.section.md b/nixpkgs/doc/hooks/scons.section.md
new file mode 100644
index 000000000000..1392269e5d55
--- /dev/null
+++ b/nixpkgs/doc/hooks/scons.section.md
@@ -0,0 +1,4 @@
+
+### scons {#scons}
+
+Overrides the build, install, and check phases. This uses the scons build system as a replacement for make. scons does not provide a configure phase, so everything is managed at build and install time.
diff --git a/nixpkgs/doc/hooks/tetex-tex-live.section.md b/nixpkgs/doc/hooks/tetex-tex-live.section.md
new file mode 100644
index 000000000000..0ecdcc12e45a
--- /dev/null
+++ b/nixpkgs/doc/hooks/tetex-tex-live.section.md
@@ -0,0 +1,4 @@
+
+### teTeX / TeX Live {#tetex-tex-live}
+
+Adds the `share/texmf-nix` subdirectory of each build input to the `TEXINPUTS` environment variable.
diff --git a/nixpkgs/doc/hooks/unzip.section.md b/nixpkgs/doc/hooks/unzip.section.md
new file mode 100644
index 000000000000..91dc072de662
--- /dev/null
+++ b/nixpkgs/doc/hooks/unzip.section.md
@@ -0,0 +1,4 @@
+
+### unzip {#unzip}
+
+This setup hook will allow you to unzip .zip files specified in `$src`. There are many similar packages like `unrar`, `undmg`, etc.
diff --git a/nixpkgs/doc/hooks/validatePkgConfig.section.md b/nixpkgs/doc/hooks/validatePkgConfig.section.md
new file mode 100644
index 000000000000..8719ae930fcb
--- /dev/null
+++ b/nixpkgs/doc/hooks/validatePkgConfig.section.md
@@ -0,0 +1,4 @@
+
+### validatePkgConfig {#validatepkgconfig}
+
+The `validatePkgConfig` hook validates all pkg-config (`.pc`) files in a package. This helps catching some common errors in pkg-config files, such as undefined variables.
diff --git a/nixpkgs/doc/hooks/waf.section.md b/nixpkgs/doc/hooks/waf.section.md
new file mode 100644
index 000000000000..de65abde4502
--- /dev/null
+++ b/nixpkgs/doc/hooks/waf.section.md
@@ -0,0 +1,4 @@
+
+### wafHook {#wafhook}
+
+Overrides the configure, build, and install phases. This will run the “waf” script used by many projects. If `wafPath` (default `./waf`) doesn’t exist, it will copy the version of waf available in Nixpkgs. `wafFlags` can be used to pass flags to the waf script.
diff --git a/nixpkgs/doc/hooks/xcbuild.section.md b/nixpkgs/doc/hooks/xcbuild.section.md
new file mode 100644
index 000000000000..1426431f6dce
--- /dev/null
+++ b/nixpkgs/doc/hooks/xcbuild.section.md
@@ -0,0 +1,4 @@
+
+### xcbuildHook {#xcbuildhook}
+
+Overrides the build and install phases to run the "xcbuild" command. This hook is needed when a project only comes with build files for the XCode build system. You can disable this behavior by setting buildPhase and configurePhase to a custom value. xcbuildFlags controls flags passed only to xcbuild.
diff --git a/nixpkgs/doc/languages-frameworks/agda.section.md b/nixpkgs/doc/languages-frameworks/agda.section.md
index 775a7a1a6429..ff3d70ef0c62 100644
--- a/nixpkgs/doc/languages-frameworks/agda.section.md
+++ b/nixpkgs/doc/languages-frameworks/agda.section.md
@@ -52,7 +52,7 @@ agda.withPackages (p: [
       repo = "agda-stdlib";
       owner = "agda";
       rev = "v1.5";
-      sha256 = "16fcb7ssj6kj687a042afaa2gq48rc8abihpm14k684ncihb2k4w";
+      hash = "sha256-nEyxYGSWIDNJqBfGpRDLiOAnlHJKEKAOMnIaqfVZzJk=";
     };
   }))
 ])
@@ -83,7 +83,7 @@ agda.withPackages (p: [
       owner = "owner";
       version = "...";
       rev = "...";
-      sha256 = "...";
+      hash = "...";
     };
   })
 ])
@@ -216,7 +216,7 @@ you can test whether it builds correctly by writing in a comment:
 @ofborg build agdaPackages.iowa-stdlib
 ```
 
-### Maintaining Agda packages
+### Maintaining Agda packages {#agda-maintaining-packages}
 
 As mentioned before, the aim is to have a compatible, and up-to-date package set.
 These two conditions sometimes exclude each other:
diff --git a/nixpkgs/doc/languages-frameworks/android.section.md b/nixpkgs/doc/languages-frameworks/android.section.md
index 28128ead6631..6f9717ca09cc 100644
--- a/nixpkgs/doc/languages-frameworks/android.section.md
+++ b/nixpkgs/doc/languages-frameworks/android.section.md
@@ -13,6 +13,7 @@ with import <nixpkgs> {};
 
 let
   androidComposition = androidenv.composeAndroidPackages {
+    cmdLineToolsVersion = "8.0";
     toolsVersion = "26.1.1";
     platformToolsVersion = "30.0.5";
     buildToolsVersions = [ "30.0.3" ];
@@ -42,7 +43,10 @@ exceptions are the tools, platform-tools and build-tools sub packages.
 
 The following parameters are supported:
 
-* `toolsVersion`, specifies the version of the tools package to use
+* `cmdLineToolsVersion `, specifies the version of the `cmdline-tools` package to use
+* `toolsVersion`, specifies the version of the `tools` package. Notice `tools` is
+  obsolete, and currently only `26.1.1` is available, so there's not a lot of
+  options here, however, you can set it as `null` if you don't want it.
 * `platformsToolsVersion` specifies the version of the `platform-tools` plugin
 * `buildToolsVersions` specifies the versions of the `build-tools` plugins to
   use.
@@ -232,7 +236,6 @@ androidenv.emulateApp {
   platformVersion = "24";
   abiVersion = "armeabi-v7a"; # mips, x86, x86_64
   systemImageType = "default";
-  useGoogleAPIs = false;
   app = ./MyApp.apk;
   package = "MyApp";
   activity = "MainActivity";
diff --git a/nixpkgs/doc/languages-frameworks/beam.section.md b/nixpkgs/doc/languages-frameworks/beam.section.md
index f6c74cb01e40..4c1650781f05 100644
--- a/nixpkgs/doc/languages-frameworks/beam.section.md
+++ b/nixpkgs/doc/languages-frameworks/beam.section.md
@@ -14,7 +14,7 @@ nixpkgs follows the [official elixir deprecation schedule](https://hexdocs.pm/el
 
 All BEAM-related expressions are available via the top-level `beam` attribute, which includes:
 
-- `interpreters`: a set of compilers running on the BEAM, including multiple Erlang/OTP versions (`beam.interpreters.erlangR22`, etc), Elixir (`beam.interpreters.elixir`) and LFE (Lisp Flavoured Erlang) (`beam.interpreters.lfe`).
+- `interpreters`: a set of compilers running on the BEAM, including multiple Erlang/OTP versions (`beam.interpreters.erlang_22`, etc), Elixir (`beam.interpreters.elixir`) and LFE (Lisp Flavoured Erlang) (`beam.interpreters.lfe`).
 
 - `packages`: a set of package builders (Mix and rebar3), each compiled with a specific Erlang/OTP version, e.g. `beam.packages.erlang22`.
 
@@ -22,7 +22,7 @@ The default Erlang compiler, defined by `beam.interpreters.erlang`, is aliased a
 
 To create a package builder built with a custom Erlang version, use the lambda, `beam.packagesWith`, which accepts an Erlang/OTP derivation and produces a package builder similar to `beam.packages.erlang`.
 
-Many Erlang/OTP distributions available in `beam.interpreters` have versions with ODBC and/or Java enabled or without wx (no observer support). For example, there's `beam.interpreters.erlangR22_odbc_javac`, which corresponds to `beam.interpreters.erlangR22` and `beam.interpreters.erlangR22_nox`, which corresponds to `beam.interpreters.erlangR22`.
+Many Erlang/OTP distributions available in `beam.interpreters` have versions with ODBC and/or Java enabled or without wx (no observer support). For example, there's `beam.interpreters.erlang_22_odbc_javac`, which corresponds to `beam.interpreters.erlang_22` and `beam.interpreters.erlang_22_nox`, which corresponds to `beam.interpreters.erlang_22`.
 
 ## Build Tools {#build-tools}
 
@@ -93,7 +93,7 @@ Practical steps:
 - run `mix2nix > mix_deps.nix` in the upstream repo.
 - pass `mixNixDeps = with pkgs; import ./mix_deps.nix { inherit lib beamPackages; };` as an argument to mixRelease.
 
-If there are git depencencies.
+If there are git dependencies.
 
 - You'll need to fix the version artificially in mix.exs and regenerate the mix.lock with fixed version (on upstream). This will enable you to run `mix2nix > mix_deps.nix`.
 - From the mix_deps.nix file, remove the dependencies that had git versions and pass them as an override to the import function.
@@ -115,7 +115,7 @@ If there are git depencencies.
           owner = "elixir-libraries";
           repo = "prometheus.ex";
           rev = "a4e9beb3c1c479d14b352fd9d6dd7b1f6d7deee5";
-          sha256 = "1v0q4bi7sb253i8q016l7gwlv5562wk5zy3l2sa446csvsacnpjk";
+          hash = "sha256-U17LlN6aGUKUFnT4XyYXppRN+TvUBIBRHEUsfeIiGOw=";
         };
         # you can re-use the same beamDeps argument as generated
         beamDeps = with final; [ prometheus ];
@@ -124,11 +124,11 @@ If there are git depencencies.
 };
 ```
 
-You will need to run the build process once to fix the sha256 to correspond to your new git src.
+You will need to run the build process once to fix the hash to correspond to your new git src.
 
 ###### FOD {#fixed-output-derivation}
 
-A fixed output derivation will download mix dependencies from the internet. To ensure reproducibility, a hash will be supplied. Note that mix is relatively reproducible. An FOD generating a different hash on each run hasn't been observed (as opposed to npm where the chances are relatively high). See [elixir_ls](https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/beam-modules/elixir_ls.nix) for a usage example of FOD.
+A fixed output derivation will download mix dependencies from the internet. To ensure reproducibility, a hash will be supplied. Note that mix is relatively reproducible. An FOD generating a different hash on each run hasn't been observed (as opposed to npm where the chances are relatively high). See [elixir-ls](https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/beam-modules/elixir-ls/default.nix) for a usage example of FOD.
 
 Practical steps
 
@@ -138,13 +138,13 @@ Practical steps
   mixFodDeps = fetchMixDeps {
     pname = "mix-deps-${pname}";
     inherit src version;
-    sha256 = lib.fakeSha256;
+    hash = lib.fakeHash;
   };
 ```
 
-The first build will complain about the sha256 value, you can replace with the suggested value after that.
+The first build will complain about the hash value, you can replace with the suggested value after that.
 
-Note that if after you've replaced the value, nix suggests another sha256, then mix is not fetching the dependencies reproducibly. An FOD will not work in that case and you will have to use mix2nix.
+Note that if after you've replaced the value, nix suggests another hash, then mix is not fetching the dependencies reproducibly. An FOD will not work in that case and you will have to use mix2nix.
 
 ##### mixRelease - example {#mix-release-example}
 
@@ -154,7 +154,7 @@ Here is how your `default.nix` file would look for a phoenix project.
 with import <nixpkgs> { };
 
 let
-  # beam.interpreters.erlangR23 is available if you need a particular version
+  # beam.interpreters.erlang_23 is available if you need a particular version
   packages = beam.packagesWith beam.interpreters.erlang;
 
   pname = "your_project";
@@ -170,7 +170,8 @@ let
     pname = "mix-deps-${pname}";
     inherit src version;
     # nix will complain and tell you the right value to replace this with
-    sha256 = lib.fakeSha256;
+    hash = lib.fakeHash;
+    mixEnv = ""; # default is "prod", when empty includes all dependencies, such as "dev", "test".
     # if you have build time environment variables add them here
     MY_ENV_VAR="my_value";
   };
@@ -273,25 +274,25 @@ Usually, we need to create a `shell.nix` file and do our development inside of t
 
 with pkgs;
 let
-  elixir = beam.packages.erlangR24.elixir_1_12;
+  elixir = beam.packages.erlang_24.elixir_1_12;
 in
 mkShell {
   buildInputs = [ elixir ];
 }
 ```
 
-### Using an overlay
+### Using an overlay {#beam-using-overlays}
 
-If you need to use an overlay to change some attributes of a derivation, e.g. if you need a bugfix from a version that is not yet available in nixpkgs, you can override attributes such as `version` (and the corresponding `sha256`) and then use this overlay in your development environment:
+If you need to use an overlay to change some attributes of a derivation, e.g. if you need a bugfix from a version that is not yet available in nixpkgs, you can override attributes such as `version` (and the corresponding `hash`) and then use this overlay in your development environment:
 
-#### `shell.nix`
+#### `shell.nix` {#beam-using-overlays-shell.nix}
 
 ```nix
 let
   elixir_1_13_1_overlay = (self: super: {
       elixir_1_13 = super.elixir_1_13.override {
         version = "1.13.1";
-        sha256 = "0z0b1w2vvw4vsnb99779c2jgn9bgslg7b1pmd9vlbv02nza9qj5p";
+        sha256 = "sha256-t0ic1LcC7EV3avWGdR7VbyX7pGDpnJSW1ZvwvQUPC3w=";
       };
     });
   pkgs = import <nixpkgs> { overlays = [ elixir_1_13_1_overlay ]; };
diff --git a/nixpkgs/doc/languages-frameworks/bower.section.md b/nixpkgs/doc/languages-frameworks/bower.section.md
index 6226dc0702d7..f39539059c04 100644
--- a/nixpkgs/doc/languages-frameworks/bower.section.md
+++ b/nixpkgs/doc/languages-frameworks/bower.section.md
@@ -1,6 +1,6 @@
 # Bower {#sec-bower}
 
-[Bower](https://bower.io) is a package manager for web site front-end components. Bower packages (comprising of build artefacts and sometimes sources) are stored in `git` repositories, typically on Github. The package registry is run by the Bower team with package metadata coming from the `bower.json` file within each package.
+[Bower](https://bower.io) is a package manager for web site front-end components. Bower packages (comprising of build artifacts and sometimes sources) are stored in `git` repositories, typically on Github. The package registry is run by the Bower team with package metadata coming from the `bower.json` file within each package.
 
 The end result of running Bower is a `bower_components` directory which can be included in the web app's build process.
 
diff --git a/nixpkgs/doc/languages-frameworks/chicken.section.md b/nixpkgs/doc/languages-frameworks/chicken.section.md
index d8c35bd20c50..d329943dc3c2 100644
--- a/nixpkgs/doc/languages-frameworks/chicken.section.md
+++ b/nixpkgs/doc/languages-frameworks/chicken.section.md
@@ -4,7 +4,7 @@
 [R⁵RS](https://schemers.org/Documents/Standards/R5RS/HTML/)-compliant Scheme
 compiler. It includes an interactive mode and a custom package format, "eggs".
 
-## Using Eggs
+## Using Eggs {#sec-chicken-using}
 
 Eggs described in nixpkgs are available inside the
 `chickenPackages.chickenEggs` attrset. Including an egg as a build input is
@@ -22,7 +22,7 @@ might write:
 Both `chicken` and its eggs have a setup hook which configures the environment
 variables `CHICKEN_INCLUDE_PATH` and `CHICKEN_REPOSITORY_PATH`.
 
-## Updating Eggs
+## Updating Eggs {#sec-chicken-updating-eggs}
 
 nixpkgs only knows about a subset of all published eggs. It uses
 [egg2nix](https://github.com/the-kenny/egg2nix) to generate a
@@ -36,7 +36,7 @@ $ cd pkgs/development/compilers/chicken/5/
 $ egg2nix eggs.scm > eggs.nix
 ```
 
-## Adding Eggs
+## Adding Eggs {#sec-chicken-adding-eggs}
 
 When we run `egg2nix`, we obtain one collection of eggs with
 mutually-compatible versions. This means that when we add new eggs, we may
diff --git a/nixpkgs/doc/languages-frameworks/coq.section.md b/nixpkgs/doc/languages-frameworks/coq.section.md
index 901332a7d34f..6ca199708377 100644
--- a/nixpkgs/doc/languages-frameworks/coq.section.md
+++ b/nixpkgs/doc/languages-frameworks/coq.section.md
@@ -8,7 +8,7 @@ The Coq derivation is overridable through the `coq.override overrides`, where ov
 * `customOCamlPackages` (optional, defaults to `null`, which lets Coq choose a version automatically), which can be set to any of the ocaml packages attribute of `ocaml-ng` (such as `ocaml-ng.ocamlPackages_4_10` which is the default for Coq 8.11 for example).
 * `coq-version` (optional, defaults to the short version e.g. "8.10"), is a version number of the form "x.y" that indicates which Coq's version build behavior to mimic when using a source which is not a release. E.g. `coq.override { version = "d370a9d1328a4e1cdb9d02ee032f605a9d94ec7a"; coq-version = "8.10"; }`.
 
-The associated package set can be optained using `mkCoqPackages coq`, where `coq` is the derivation to use.
+The associated package set can be obtained using `mkCoqPackages coq`, where `coq` is the derivation to use.
 
 ## Coq packages attribute sets: `coqPackages` {#coq-packages-attribute-sets-coqpackages}
 
@@ -24,23 +24,23 @@ The recommended way of defining a derivation for a Coq library, is to use the `c
   * if it is a string of the form `"#N"`, and the domain is github, then it tries to download the current head of the pull request `#N` from github,
 * `defaultVersion` (optional). Coq libraries may be compatible with some specific versions of Coq only. The `defaultVersion` attribute is used when no `version` is provided (or if `version = null`) to select the version of the library to use by default, depending on the context. This selection will mainly depend on a `coq` version number but also possibly on other packages versions (e.g. `mathcomp`). If its value ends up to be `null`, the package is marked for removal in end-user `coqPackages` attribute set.
 * `release` (optional, defaults to `{}`), lists all the known releases of the library and for each of them provides an attribute set with at least a `sha256` attribute (you may put the empty string `""` in order to automatically insert a fake sha256, this will trigger an error which will allow you to find the correct sha256), each attribute set of the list of releases also takes optional overloading arguments for the fetcher as below (i.e.`domain`, `owner`, `repo`, `rev` assuming the default fetcher is used) and optional overrides for the result of the fetcher (i.e. `version` and `src`).
-* `fetcher` (optional, defaults to a generic fetching mechanism supporting github or gitlab based infrastructures), is a function that takes at least an `owner`, a `repo`, a `rev`, and a `sha256` and returns an attribute set with a `version` and `src`.
+* `fetcher` (optional, defaults to a generic fetching mechanism supporting github or gitlab based infrastructures), is a function that takes at least an `owner`, a `repo`, a `rev`, and a `hash` and returns an attribute set with a `version` and `src`.
 * `repo` (optional, defaults to the value of `pname`),
 * `owner` (optional, defaults to `"coq-community"`).
 * `domain` (optional, defaults to `"github.com"`), domains including the strings `"github"` or `"gitlab"` in their names are automatically supported, otherwise, one must change the `fetcher` argument to support them (cf `pkgs/development/coq-modules/heq/default.nix` for an example),
 * `releaseRev` (optional, defaults to `(v: v)`), provides a default mapping from release names to revision hashes/branch names/tags,
 * `displayVersion` (optional), provides a way to alter the computation of `name` from `pname`, by explaining how to display version numbers,
 * `namePrefix` (optional, defaults to `[ "coq" ]`), provides a way to alter the computation of `name` from `pname`, by explaining which dependencies must occur in `name`,
-* `nativeBuildInputs` (optional), is a list of executables that are required to build the current derivation, in addition to the default ones (namely `which`, `dune` and `ocaml` depending on whether `useDune2`, `useDune2ifVersion` and `mlPlugin` are set).
+* `nativeBuildInputs` (optional), is a list of executables that are required to build the current derivation, in addition to the default ones (namely `which`, `dune` and `ocaml` depending on whether `useDune`, `useDuneifVersion` and `mlPlugin` are set).
 * `extraNativeBuildInputs` (optional, deprecated), an additional list of derivation to add to `nativeBuildInputs`,
 * `overrideNativeBuildInputs` (optional) replaces the default list of derivation to which `nativeBuildInputs` and `extraNativeBuildInputs` adds extra elements,
 * `buildInputs` (optional), is a list of libraries and dependencies that are required to build and run the current derivation, in addition to the default one `[ coq ]`,
 * `extraBuildInputs` (optional, deprecated), an additional list of derivation to add to `buildInputs`,
 * `overrideBuildInputs` (optional) replaces the default list of derivation to which `buildInputs` and `extraBuildInputs` adds extras elements,
-* `propagatedBuildInputs` (optional) is passed as is to `mkDerivation`, we recommend to use this for Coq libraries and Coq plugin dependencies, as this makes sure the paths of the compiled libraries and plugins will always be added to the build environements of subsequent derivation, which is necessary for Coq packages to work correctly,
+* `propagatedBuildInputs` (optional) is passed as is to `mkDerivation`, we recommend to use this for Coq libraries and Coq plugin dependencies, as this makes sure the paths of the compiled libraries and plugins will always be added to the build environments of subsequent derivation, which is necessary for Coq packages to work correctly,
 * `mlPlugin` (optional, defaults to `false`). Some extensions (plugins) might require OCaml and sometimes other OCaml packages. Standard dependencies can be added by setting the current option to `true`. For a finer grain control, the `coq.ocamlPackages` attribute can be used in `nativeBuildInputs`, `buildInputs`, and `propagatedBuildInputs` to depend on the same package set Coq was built against.
-* `useDune2ifVersion` (optional, default to `(x: false)` uses Dune2 to build the package if the provided predicate evaluates to true on the version, e.g. `useDune2ifVersion = versions.isGe "1.1"`  will use dune if the version of the package is greater or equal to `"1.1"`,
-* `useDune2` (optional, defaults to `false`) uses Dune2 to build the package if set to true, the presence of this attribute overrides the behavior of the previous one.
+* `useDuneifVersion` (optional, default to `(x: false)` uses Dune to build the package if the provided predicate evaluates to true on the version, e.g. `useDuneifVersion = versions.isGe "1.1"`  will use dune if the version of the package is greater or equal to `"1.1"`,
+* `useDune` (optional, defaults to `false`) uses Dune to build the package if set to true, the presence of this attribute overrides the behavior of the previous one.
 * `opam-name` (optional, defaults to concatenating with a dash separator the components of `namePrefix` and `pname`), name of the Dune package to build.
 * `enableParallelBuilding` (optional, defaults to `true`), since it is activated by default, we provide a way to disable it.
 * `extraInstallFlags` (optional), allows to extend `installFlags` which initializes the variable `COQMF_COQLIB` so as to install in the proper subdirectory. Indeed Coq libraries should be installed in `$(out)/lib/coq/${coq.coq-version}/user-contrib/`. Such directories are automatically added to the `$COQPATH` environment variable by the hook defined in the Coq derivation.
@@ -88,3 +88,58 @@ with lib; mkCoqDerivation {
   };
 }
 ```
+
+## Three ways of overriding Coq packages {#coq-overriding-packages}
+
+There are three distinct ways of changing a Coq package by overriding one of its values: `.override`, `overrideCoqDerivation`, and `.overrideAttrs`.  This section explains what sort of values can be overridden with each of these methods.
+
+### `.override` {#coq-override}
+
+`.override` lets you change arguments to a Coq derivation.  In the case of the `multinomials` package above, `.override` would let you override arguments like `mkCoqDerivation`, `version`, `coq`, `mathcomp`, `mathcom-finmap`, etc.
+
+For example, assuming you have a special `mathcomp` dependency you want to use, here is how you could override the `mathcomp` dependency:
+
+```nix
+multinomials.override {
+  mathcomp = my-special-mathcomp;
+}
+```
+
+In Nixpkgs, all Coq derivations take a `version` argument.  This can be overridden in order to easily use a different version:
+
+```nix
+coqPackages.multinomials.override {
+  version = "1.5.1";
+}
+```
+
+Refer to [](#coq-packages-attribute-sets-coqpackages) for all the different formats that you can potentially pass to `version`, as well as the restrictions.
+
+### `overrideCoqDerivation` {#coq-overrideCoqDerivation}
+
+The `overrideCoqDerivation` function lets you easily change arguments to `mkCoqDerivation`.  These arguments are described in [](#coq-packages-attribute-sets-coqpackages).
+
+For example, here is how you could locally add a new release of the `multinomials` library, and set the `defaultVersion` to use this release:
+
+```nix
+coqPackages.lib.overrideCoqDerivation
+  {
+    defaultVersion = "2.0";
+    release."2.0".sha256 = "1lq8x86vd3vqqh2yq6hvyagpnhfq5wmk5pg2z0xq7b7dbbbhyfkk";
+  }
+  coqPackages.multinomials
+```
+
+### `.overrideAttrs` {#coq-overrideAttrs}
+
+`.overrideAttrs` lets you override arguments to the underlying `stdenv.mkDerivation` call. Internally, `mkCoqDerivation` uses `stdenv.mkDerivation` to create derivations for Coq libraries.  You can override arguments to `stdenv.mkDerivation` with `.overrideAttrs`.
+
+For instance, here is how you could add some code to be performed in the derivation after installation is complete:
+
+```nix
+coqPackages.multinomials.overrideAttrs (oldAttrs: {
+  postInstall = oldAttrs.postInstall or "" + ''
+    echo "you can do anything you want here"
+  '';
+})
+```
diff --git a/nixpkgs/doc/languages-frameworks/crystal.section.md b/nixpkgs/doc/languages-frameworks/crystal.section.md
index cbabba24f0c1..b97e75a58da1 100644
--- a/nixpkgs/doc/languages-frameworks/crystal.section.md
+++ b/nixpkgs/doc/languages-frameworks/crystal.section.md
@@ -27,7 +27,7 @@ crystal.buildCrystalPackage rec {
     owner = "mint-lang";
     repo = "mint";
     rev = version;
-    sha256 = "0vxbx38c390rd2ysvbwgh89v2232sh5rbsp3nk9wzb70jybpslvl";
+    hash = "sha256-dFN9l5fgrM/TtOPqlQvUYgixE4KPr629aBmkwdDoq28=";
   };
 
   # Insert the path to your shards.nix file here
@@ -62,7 +62,7 @@ crystal.buildCrystalPackage rec {
     owner = "mint-lang";
     repo = "mint";
     rev = version;
-    sha256 = "0vxbx38c390rd2ysvbwgh89v2232sh5rbsp3nk9wzb70jybpslvl";
+    hash = "sha256-dFN9l5fgrM/TtOPqlQvUYgixE4KPr629aBmkwdDoq28=";
   };
 
   shardsFile = ./shards.nix;
diff --git a/nixpkgs/doc/languages-frameworks/cuda.section.md b/nixpkgs/doc/languages-frameworks/cuda.section.md
index fccf66bf79d2..6b19e02e74e9 100644
--- a/nixpkgs/doc/languages-frameworks/cuda.section.md
+++ b/nixpkgs/doc/languages-frameworks/cuda.section.md
@@ -8,7 +8,7 @@ A package set is available for each CUDA version, so for example
 `cudaPackages_11_6`. Within each set is a matching version of the above listed
 packages. Additionally, other versions of the packages that are packaged and
 compatible are available as well. For example, there can be a
-`cudaPackages.cudnn_8_3_2` package.
+`cudaPackages.cudnn_8_3` package.
 
 To use one or more CUDA packages in an expression, give the expression a `cudaPackages` parameter, and in case CUDA is optional
 ```nix
@@ -27,8 +27,27 @@ package set to make it the default. This guarantees you get a consistent package
 set.
 ```nix
 mypkg = let
-  cudaPackages = cudaPackages_11_5.overrideScope' (final: prev {
-    cudnn = prev.cudnn_8_3_2;
+  cudaPackages = cudaPackages_11_5.overrideScope' (final: prev: {
+    cudnn = prev.cudnn_8_3;
   }});
 in callPackage { inherit cudaPackages; };
 ```
+
+The CUDA NVCC compiler requires flags to determine which hardware you
+want to target for in terms of SASS (real hardware) or PTX (JIT kernels).
+
+Nixpkgs tries to target support real architecture defaults based on the
+CUDA toolkit version with PTX support for future hardware.  Experienced
+users may optimize this configuration for a variety of reasons such as
+reducing binary size and compile time, supporting legacy hardware, or
+optimizing for specific hardware.
+
+You may provide capabilities to add support or reduce binary size through
+`config` using `cudaCapabilities = [ "6.0" "7.0" ];` and
+`cudaForwardCompat = true;` if you want PTX support for future hardware.
+
+Please consult [GPUs supported](https://en.wikipedia.org/wiki/CUDA#GPUs_supported)
+for your specific card(s).
+
+Library maintainers should consult [NVCC Docs](https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/)
+and release notes for their software package.
diff --git a/nixpkgs/doc/languages-frameworks/cuelang.section.md b/nixpkgs/doc/languages-frameworks/cuelang.section.md
new file mode 100644
index 000000000000..86304208aa20
--- /dev/null
+++ b/nixpkgs/doc/languages-frameworks/cuelang.section.md
@@ -0,0 +1,93 @@
+# Cue (Cuelang) {#cuelang}
+
+[Cuelang](https://cuelang.org/) is a language to:
+
+- describe schemas and validate backward-compatibility
+- generate code and schemas in various formats (e.g. JSON Schema, OpenAPI)
+- do configuration akin to [Dhall Lang](https://dhall-lang.org/)
+- perform data validation
+
+## Cuelang schema quick start {#cuelang-quickstart}
+
+Cuelang schemas are similar to JSON, here is a quick cheatsheet:
+
+- Default types includes: `null`, `string`, `bool`, `bytes`, `number`, `int`, `float`, lists as `[...T]` where `T` is a type.
+- All structures, defined by: `myStructName: { <fields> }` are **open** -- they accept fields which are not specified.
+- Closed structures can be built by doing `myStructName: close({ <fields> })` -- they are strict in what they accept.
+- `#X` are **definitions**, referenced definitions are **recursively closed**, i.e. all its children structures are **closed**.
+- `&` operator is the [unification operator](https://cuelang.org/docs/references/spec/#unification) (similar to a type-level merging operator), `|` is the [disjunction operator](https://cuelang.org/docs/references/spec/#disjunction) (similar to a type-level union operator).
+- Values **are** types, i.e. `myStruct: { a: 3 }` is a valid type definition that only allows `3` as value.
+
+- Read <https://cuelang.org/docs/concepts/logic/> to learn more about the semantics.
+- Read <https://cuelang.org/docs/references/spec/> to learn about the language specification.
+
+## `writeCueValidator` {#cuelang-writeCueValidator}
+
+Nixpkgs provides a `pkgs.writeCueValidator` helper, which will write a validation script based on the provided Cuelang schema.
+
+Here is an example:
+```
+pkgs.writeCueValidator
+  (pkgs.writeText "schema.cue" ''
+    #Def1: {
+      field1: string
+    }
+  '')
+  { document = "#Def1"; }
+```
+
+- The first parameter is the Cue schema file.
+- The second parameter is an options parameter, currently, only: `document` can be passed.
+
+`document` : match your input data against this fragment of structure or definition, e.g. you may use the same schema file but different documents based on the data you are validating.
+
+Another example, given the following `validator.nix` :
+```
+{ pkgs ? import <nixpkgs> {} }:
+let
+  genericValidator = version:
+  pkgs.writeCueValidator
+    (pkgs.writeText "schema.cue" ''
+      #Version1: {
+        field1: string
+      }
+      #Version2: #Version1 & {
+        field1: "unused"
+      }''
+    )
+    { document = "#Version${toString version}"; };
+in
+{
+  validateV1 = genericValidator 1;
+  validateV2 = genericValidator 2;
+}
+```
+
+The result is a script that will validate the file you pass as the first argument against the schema you provided `writeCueValidator`.
+
+It can be any format that `cue vet` supports, i.e. YAML or JSON for example.
+
+Here is an example, named `example.json`, given the following JSON:
+```
+{ "field1": "abc" }
+```
+
+You can run the result script (named `validate`) as the following:
+
+```console
+$ nix-build validator.nix
+$ ./result example.json
+$ ./result-2 example.json
+field1: conflicting values "unused" and "abc":
+    ./example.json:1:13
+    ../../../../../../nix/store/v64dzx3vr3glpk0cq4hzmh450lrwh6sg-schema.cue:5:11
+$ sed -i 's/"abc"/3/' example.json
+$ ./result example.json
+field1: conflicting values 3 and string (mismatched types int and string):
+    ./example.json:1:13
+    ../../../../../../nix/store/v64dzx3vr3glpk0cq4hzmh450lrwh6sg-schema.cue:5:11
+```
+
+**Known limitations**
+
+* The script will enforce **concrete** values and will not accept lossy transformations (strictness). You can add these options if you need them.
diff --git a/nixpkgs/doc/languages-frameworks/dart.section.md b/nixpkgs/doc/languages-frameworks/dart.section.md
new file mode 100644
index 000000000000..b00327b78eb2
--- /dev/null
+++ b/nixpkgs/doc/languages-frameworks/dart.section.md
@@ -0,0 +1,65 @@
+# Dart {#sec-language-dart}
+
+## Dart applications {#ssec-dart-applications}
+
+The function `buildDartApplication` builds Dart applications managed with pub.
+
+It fetches its Dart dependencies automatically through `fetchDartDeps`, and (through a series of hooks) builds and installs the executables specified in the pubspec file. The hooks can be used in other derivations, if needed. The phases can also be overridden to do something different from installing binaries.
+
+If you are packaging a Flutter desktop application, use [`buildFlutterApplication`](#ssec-dart-flutter) instead.
+
+`vendorHash`: is the hash of the output of the dependency fetcher derivation. To obtain it, simply set it to `lib.fakeHash` (or omit it) and run the build ([more details here](#sec-source-hashes)).
+
+If the upstream source is missing a `pubspec.lock` file, you'll have to vendor one and specify it using `pubspecLockFile`. If it is needed, one will be generated for you and printed when attempting to build the derivation.
+
+The `dart` commands run can be overridden through `pubGetScript` and `dartCompileCommand`, you can also add flags using `dartCompileFlags` or `dartJitFlags`.
+
+Dart supports multiple [outputs types](https://dart.dev/tools/dart-compile#types-of-output), you can choose between them using `dartOutputType` (defaults to `exe`). If you want to override the binaries path or the source path they come from, you can use `dartEntryPoints`. Outputs that require a runtime will automatically be wrapped with the relevant runtime (`dartaotruntime` for `aot-snapshot`, `dart run` for `jit-snapshot` and `kernel`, `node` for `js`), this can be overridden through `dartRuntimeCommand`.
+
+```nix
+{ buildDartApplication, fetchFromGitHub }:
+
+buildDartApplication rec {
+  pname = "dart-sass";
+  version = "1.62.1";
+
+  src = fetchFromGitHub {
+    owner = "sass";
+    repo = pname;
+    rev = version;
+    hash = "sha256-U6enz8yJcc4Wf8m54eYIAnVg/jsGi247Wy8lp1r1wg4=";
+  };
+
+  pubspecLockFile = ./pubspec.lock;
+  vendorHash = "sha256-Atm7zfnDambN/BmmUf4BG0yUz/y6xWzf0reDw3Ad41s=";
+}
+```
+
+## Flutter applications {#ssec-dart-flutter}
+
+The function `buildFlutterApplication` builds Flutter applications.
+
+The deps.json file must always be provided when packaging in Nixpkgs. It will be generated and printed if the derivation is attempted to be built without one. Alternatively, `autoDepsList` may be set to `true` when outside of Nixpkgs, as it relies on import-from-derivation.
+
+A `pubspec.lock` file must be available. See the [Dart documentation](#ssec-dart-applications) for more details.
+
+```nix
+{  flutter, fetchFromGitHub }:
+
+flutter.buildFlutterApplication {
+  pname = "firmware-updater";
+  version = "unstable-2023-04-30";
+
+  src = fetchFromGitHub {
+    owner = "canonical";
+    repo = "firmware-updater";
+    rev = "6e7dbdb64e344633ea62874b54ff3990bd3b8440";
+    sha256 = "sha256-s5mwtr5MSPqLMN+k851+pFIFFPa0N1hqz97ys050tFA=";
+    fetchSubmodules = true;
+  };
+
+  pubspecLockFile = ./pubspec.lock;
+  depsListFile = ./deps.json;
+  vendorHash = "sha256-cdMO+tr6kYiN5xKXa+uTMAcFf2C75F3wVPrn21G4QPQ=";
+}
+```
diff --git a/nixpkgs/doc/languages-frameworks/dhall.section.md b/nixpkgs/doc/languages-frameworks/dhall.section.md
index 4b49908b0b0c..846b8cfd3163 100644
--- a/nixpkgs/doc/languages-frameworks/dhall.section.md
+++ b/nixpkgs/doc/languages-frameworks/dhall.section.md
@@ -91,7 +91,7 @@ buildDhallPackage {
 let
   nixpkgs = builtins.fetchTarball {
     url    = "https://github.com/NixOS/nixpkgs/archive/94b2848559b12a8ed1fe433084686b2a81123c99.tar.gz";
-    sha256 = "1pbl4c2dsaz2lximgd31m96jwbps6apn3anx8cvvhk1gl9rkg107";
+    sha256 = "sha256-B4Q3c6IvTLg3Q92qYa8y+i4uTaphtFdjp+Ir3QQjdN0=";
   };
 
   dhallOverlay = self: super: {
@@ -295,7 +295,7 @@ terms of `buildDhallPackage` that accepts the following arguments:
 * `document`: Set to `true` to generate documentation for the package
 
 Additionally, `buildDhallGitHubPackage` accepts the same arguments as
-`fetchFromGitHub`, such as `sha256` or `fetchSubmodules`.
+`fetchFromGitHub`, such as `hash` or `fetchSubmodules`.
 
 ## `dhall-to-nixpkgs` {#ssec-dhall-dhall-to-nixpkgs}
 
@@ -307,16 +307,16 @@ $ nix-env --install --attr haskellPackages.dhall-nixpkgs
 
 $ nix-env --install --attr nix-prefetch-git  # Used by dhall-to-nixpkgs
 
-$ dhall-to-nixpkgs github https://github.com/Gabriel439/dhall-semver.git
+$ dhall-to-nixpkgs github https://github.com/Gabriella439/dhall-semver.git
 { buildDhallGitHubPackage, Prelude }:
   buildDhallGitHubPackage {
     name = "dhall-semver";
     githubBase = "github.com";
-    owner = "Gabriel439";
+    owner = "Gabriella439";
     repo = "dhall-semver";
     rev = "2d44ae605302ce5dc6c657a1216887fbb96392a4";
     fetchSubmodules = false;
-    sha256 = "0y8shvp8srzbjjpmnsvz9c12ciihnx1szs0yzyi9ashmrjvd0jcz";
+    hash = "sha256-n0nQtswVapWi/x7or0O3MEYmAkt/a1uvlOtnje6GGnk=";
     directory = "";
     file = "package.dhall";
     source = false;
diff --git a/nixpkgs/doc/languages-frameworks/dotnet.section.md b/nixpkgs/doc/languages-frameworks/dotnet.section.md
index 4c245a7544e1..b6a622875a76 100644
--- a/nixpkgs/doc/languages-frameworks/dotnet.section.md
+++ b/nixpkgs/doc/languages-frameworks/dotnet.section.md
@@ -11,7 +11,7 @@ with import <nixpkgs> {};
 mkShell {
   name = "dotnet-env";
   packages = [
-    dotnet-sdk_3
+    dotnet-sdk
   ];
 }
 ```
@@ -27,36 +27,57 @@ mkShell {
   name = "dotnet-env";
   packages = [
     (with dotnetCorePackages; combinePackages [
-      sdk_3_1
-      sdk_5_0
+      sdk_6_0
+      sdk_7_0
     ])
   ];
 }
 ```
 
-This will produce a dotnet installation that has the dotnet 3.1, 3.0, and 2.1 sdk. The first sdk listed will have it's cli utility present in the resulting environment. Example info output:
+This will produce a dotnet installation that has the dotnet 6.0 7.0 sdk. The first sdk listed will have it's cli utility present in the resulting environment. Example info output:
 
 ```ShellSession
 $ dotnet --info
-.NET Core SDK (reflecting any global.json):
- Version:   3.1.101
- Commit:    b377529961
-
-...
-
-.NET Core SDKs installed:
-  2.1.803 [/nix/store/iiv98i2jdi226dgh4jzkkj2ww7f8jgpd-dotnet-core-combined/sdk]
-  3.0.102 [/nix/store/iiv98i2jdi226dgh4jzkkj2ww7f8jgpd-dotnet-core-combined/sdk]
-  3.1.101 [/nix/store/iiv98i2jdi226dgh4jzkkj2ww7f8jgpd-dotnet-core-combined/sdk]
-
-.NET Core runtimes installed:
-  Microsoft.AspNetCore.All 2.1.15 [/nix/store/iiv98i2jdi226dgh4jzkkj2ww7f8jgpd-dotnet-core-combined/shared/Microsoft.AspNetCore.All]
-  Microsoft.AspNetCore.App 2.1.15 [/nix/store/iiv98i2jdi226dgh4jzkkj2ww7f8jgpd-dotnet-core-combined/shared/Microsoft.AspNetCore.App]
-  Microsoft.AspNetCore.App 3.0.2 [/nix/store/iiv98i2jdi226dgh4jzkkj2ww7f8jgpd-dotnet-core-combined/shared/Microsoft.AspNetCore.App]
-  Microsoft.AspNetCore.App 3.1.1 [/nix/store/iiv98i2jdi226dgh4jzkkj2ww7f8jgpd-dotnet-core-combined/shared/Microsoft.AspNetCore.App]
-  Microsoft.NETCore.App 2.1.15 [/nix/store/iiv98i2jdi226dgh4jzkkj2ww7f8jgpd-dotnet-core-combined/shared/Microsoft.NETCore.App]
-  Microsoft.NETCore.App 3.0.2 [/nix/store/iiv98i2jdi226dgh4jzkkj2ww7f8jgpd-dotnet-core-combined/shared/Microsoft.NETCore.App]
-  Microsoft.NETCore.App 3.1.1 [/nix/store/iiv98i2jdi226dgh4jzkkj2ww7f8jgpd-dotnet-core-combined/shared/Microsoft.NETCore.App]
+.NET SDK:
+ Version:   7.0.202
+ Commit:    6c74320bc3
+
+Środowisko uruchomieniowe:
+ OS Name:     nixos
+ OS Version:  23.05
+ OS Platform: Linux
+ RID:         linux-x64
+ Base Path:   /nix/store/n2pm44xq20hz7ybsasgmd7p3yh31gnh4-dotnet-sdk-7.0.202/sdk/7.0.202/
+
+Host:
+  Version:      7.0.4
+  Architecture: x64
+  Commit:       0a396acafe
+
+.NET SDKs installed:
+  6.0.407 [/nix/store/3b19303vwrhv0xxz1hg355c7f2hgxxgd-dotnet-core-combined/sdk]
+  7.0.202 [/nix/store/3b19303vwrhv0xxz1hg355c7f2hgxxgd-dotnet-core-combined/sdk]
+
+.NET runtimes installed:
+  Microsoft.AspNetCore.App 6.0.15 [/nix/store/3b19303vwrhv0xxz1hg355c7f2hgxxgd-dotnet-core-combined/shared/Microsoft.AspNetCore.App]
+  Microsoft.AspNetCore.App 7.0.4 [/nix/store/3b19303vwrhv0xxz1hg355c7f2hgxxgd-dotnet-core-combined/shared/Microsoft.AspNetCore.App]
+  Microsoft.NETCore.App 6.0.15 [/nix/store/3b19303vwrhv0xxz1hg355c7f2hgxxgd-dotnet-core-combined/shared/Microsoft.NETCore.App]
+  Microsoft.NETCore.App 7.0.4 [/nix/store/3b19303vwrhv0xxz1hg355c7f2hgxxgd-dotnet-core-combined/shared/Microsoft.NETCore.App]
+
+Other architectures found:
+  None
+
+Environment variables:
+  Not set
+
+global.json file:
+  Not found
+
+Learn more:
+  https://aka.ms/dotnet/info
+
+Download .NET:
+  https://aka.ms/dotnet/download
 ```
 
 ## dotnet-sdk vs dotnetCorePackages.sdk {#dotnet-sdk-vs-dotnetcorepackages.sdk}
@@ -71,7 +92,7 @@ The `dotnetCorePackages.sdk` contains both a runtime and the full sdk of a given
 
 To package Dotnet applications, you can use `buildDotnetModule`. This has similar arguments to `stdenv.mkDerivation`, with the following additions:
 
-* `projectFile` has to be used for specifying the dotnet project file relative to the source root. These usually have `.sln` or `.csproj` file extensions. This can be an array of multiple projects as well.
+* `projectFile` is used for specifying the dotnet project file, relative to the source root. These usually have `.sln` or `.csproj` file extensions. This can be a list of multiple projects as well. Most of the time dotnet can figure this location out by itself, so this should only be set if necessary.
 * `nugetDeps` takes either a path to a `deps.nix` file, or a derivation. The `deps.nix` file can be generated using the script attached to `passthru.fetch-deps`. This file can also be generated manually using `nuget-to-nix` tool, which is available in nixpkgs. If the argument is a derivation, it will be used directly and assume it has the same output as `mkNugetDeps`.
 * `packNupkg` is used to pack project as a `nupkg`, and installs it to `$out/share`. If set to `true`, the derivation can be used as a dependency for another dotnet project by adding it to `projectReferences`.
 * `projectReferences` can be used to resolve `ProjectReference` project items. Referenced projects can be packed with `buildDotnetModule` by setting the `packNupkg = true` attribute and passing a list of derivations to `projectReferences`. Since we are sharing referenced projects as NuGets they must be added to csproj/fsproj files as `PackageReference` as well.
@@ -88,7 +109,7 @@ To package Dotnet applications, you can use `buildDotnetModule`. This has simila
 * `runtimeDeps` is used to wrap libraries into `LD_LIBRARY_PATH`. This is how dotnet usually handles runtime dependencies.
 * `buildType` is used to change the type of build. Possible values are `Release`, `Debug`, etc. By default, this is set to `Release`.
 * `selfContainedBuild` allows to enable the [self-contained](https://docs.microsoft.com/en-us/dotnet/core/deploying/#publish-self-contained) build flag. By default, it is set to false and generated applications have a dependency on the selected dotnet runtime. If enabled, the dotnet runtime is bundled into the executable and the built app has no dependency on Dotnet.
-* `dotnet-sdk` is useful in cases where you need to change what dotnet SDK is being used.
+* `dotnet-sdk` is useful in cases where you need to change what dotnet SDK is being used. You can also set this to the result of `dotnetSdkPackages.combinePackages`, if the project uses multiple SDKs to build.
 * `dotnet-runtime` is useful in cases where you need to change what dotnet runtime is being used. This can be either a regular dotnet runtime, or an aspnetcore.
 * `dotnet-test-sdk` is useful in cases where unit tests expect a different dotnet SDK. By default, this is set to the `dotnet-sdk` attribute.
 * `testProjectFile` is useful in cases where the regular project file does not contain the unit tests. It gets restored and build, but not installed. You may need to regenerate your nuget lockfile after setting this.
@@ -100,7 +121,7 @@ To package Dotnet applications, you can use `buildDotnetModule`. This has simila
 * `dotnetPackFlags` can be used to pass flags to `dotnet pack`. Used only if `packNupkg` is set to `true`.
 * `dotnetFlags` can be used to pass flags to all of the above phases.
 
-When packaging a new application, you need to fetch it's dependencies. You can set `nugetDeps` to an empty string to make the derivation temporarily evaluate, and then run `nix-build -A package.passthru.fetch-deps` to generate it's dependency fetching script. After running the script, you should have the location of the generated lockfile printed to the console. This can be copied to a stable directory. Note that if either `projectFile` or `nugetDeps` are unset, this script cannot be generated!
+When packaging a new application, you need to fetch its dependencies. You can run `nix-build -A package.fetch-deps` to generate a script that will build a lockfile for you. After running the script you should have the location of the generated lockfile printed to the console, which can be copied to a stable directory. Then set `nugetDeps = ./deps.nix` and you're ready to build the derivation.
 
 Here is an example `default.nix`, using some of the previously discussed arguments:
 ```nix
@@ -119,9 +140,8 @@ in buildDotnetModule rec {
 
   projectReferences = [ referencedProject ]; # `referencedProject` must contain `nupkg` in the folder structure.
 
-  dotnet-sdk = dotnetCorePackages.sdk_3_1;
-  dotnet-runtime = dotnetCorePackages.net_5_0;
-  dotnetFlags = [ "--runtime linux-x64" ];
+  dotnet-sdk = dotnetCorePackages.sdk_6.0;
+  dotnet-runtime = dotnetCorePackages.runtime_6_0;
 
   executables = [ "foo" ]; # This wraps "$out/lib/$pname/foo" to `$out/bin/foo`.
   executables = []; # Don't install any executables.
diff --git a/nixpkgs/doc/languages-frameworks/emscripten.section.md b/nixpkgs/doc/languages-frameworks/emscripten.section.md
index c96f689c4c00..5f93dd5ff315 100644
--- a/nixpkgs/doc/languages-frameworks/emscripten.section.md
+++ b/nixpkgs/doc/languages-frameworks/emscripten.section.md
@@ -56,11 +56,11 @@ See the `zlib` example:
 
     zlib = (pkgs.zlib.override {
       stdenv = pkgs.emscriptenStdenv;
-    }).overrideDerivation
+    }).overrideAttrs
     (old: rec {
       buildInputs = old.buildInputs ++ [ pkg-config ];
       # we need to reset this setting!
-      NIX_CFLAGS_COMPILE="";
+      env = (old.env or { }) // { NIX_CFLAGS_COMPILE = ""; };
       configurePhase = ''
         # FIXME: Some tests require writing at $HOME
         HOME=$TMPDIR
@@ -121,7 +121,7 @@ This `xmlmirror` example features a emscriptenPackage which is defined completel
       src = pkgs.fetchgit {
         url = "https://gitlab.com/odfplugfest/xmlmirror.git";
         rev = "4fd7e86f7c9526b8f4c1733e5c8b45175860a8fd";
-        sha256 = "1jasdqnbdnb83wbcnyrp32f36w3xwhwp0wq8lwwmhqagxrij1r4b";
+        hash = "sha256-i+QgY+5PYVg5pwhzcDnkfXAznBg3e8sWH2jZtixuWsk=";
       };
 
       configurePhase = ''
diff --git a/nixpkgs/doc/languages-frameworks/gnome.section.md b/nixpkgs/doc/languages-frameworks/gnome.section.md
index d5996cce13cf..897ebd7861fa 100644
--- a/nixpkgs/doc/languages-frameworks/gnome.section.md
+++ b/nixpkgs/doc/languages-frameworks/gnome.section.md
@@ -27,14 +27,14 @@ The modules are typically installed to `lib/gio/modules/` directory of a package
 
 In particular, we recommend:
 
-* adding `dconf.lib` for any software on Linux that reads [GSettings](#ssec-gnome-settings) (even transitivily through e.g. GTK’s file manager)
+* adding `dconf.lib` for any software on Linux that reads [GSettings](#ssec-gnome-settings) (even transitively through e.g. GTK’s file manager)
 * adding `glib-networking` for any software that accesses network using GIO or libsoup – glib-networking contains a module that implements TLS support and loads system-wide proxy settings
 
 To allow software to use various virtual file systems, `gvfs` package can be also added. But that is usually an optional feature so we typically use `gvfs` from the system (e.g. installed globally using NixOS module).
 
 ### GdkPixbuf loaders {#ssec-gnome-gdk-pixbuf-loaders}
 
-GTK applications typically use [GdkPixbuf](https://developer.gnome.org/gdk-pixbuf/stable/) to load images. But `gdk-pixbuf` package only supports basic bitmap formats like JPEG, PNG or TIFF, requiring to use third-party loader modules for other formats. This is especially painful since GTK itself includes SVG icons, which cannot be rendered without a loader provided by `librsvg`.
+GTK applications typically use [GdkPixbuf](https://gitlab.gnome.org/GNOME/gdk-pixbuf/) to load images. But `gdk-pixbuf` package only supports basic bitmap formats like JPEG, PNG or TIFF, requiring to use third-party loader modules for other formats. This is especially painful since GTK itself includes SVG icons, which cannot be rendered without a loader provided by `librsvg`.
 
 Unlike other libraries mentioned in this section, GdkPixbuf only supports a single value in its controlling environment variable `GDK_PIXBUF_MODULE_FILE`. It is supposed to point to a cache file containing information about the available loaders. Each loader package will contain a `lib/gdk-pixbuf-2.0/2.10.0/loaders.cache` file describing the default loaders in `gdk-pixbuf` package plus the loader contained in the package itself. If you want to use multiple third-party loaders, you will need to create your own cache file manually. Fortunately, this is pretty rare as [not many loaders exist](https://gitlab.gnome.org/federico/gdk-pixbuf-survey/blob/master/src/modules.md).
 
@@ -70,7 +70,7 @@ Also make sure that `icon-theme.cache` is installed for each theme provided by t
 
 ### GTK Themes {#ssec-gnome-themes}
 
-Previously, a GTK theme needed to be in `XDG_DATA_DIRS`. This is no longer necessary for most programs since GTK incorporated Adwaita theme. Some programs (for example, those designed for [elementary HIG](https://elementary.io/docs/human-interface-guidelines#human-interface-guidelines)) might require a special theme like `pantheon.elementary-gtk-theme`.
+Previously, a GTK theme needed to be in `XDG_DATA_DIRS`. This is no longer necessary for most programs since GTK incorporated Adwaita theme. Some programs (for example, those designed for [elementary HIG](https://docs.elementary.io/hig)) might require a special theme like `pantheon.elementary-gtk-theme`.
 
 ### GObject introspection typelibs {#ssec-gnome-typelibs}
 
@@ -116,10 +116,6 @@ For convenience, it also adds `dconf.lib` for a GIO module implementing a GSetti
 
 - []{#ssec-gnome-hooks-gobject-introspection} `gobject-introspection` setup hook populates `GI_TYPELIB_PATH` variable with `lib/girepository-1.0` directories of dependencies, which is then added to wrapper by `wrapGAppsHook`. It also adds `share` directories of dependencies to `XDG_DATA_DIRS`, which is intended to promote GIR files but it also [pollutes the closures](https://github.com/NixOS/nixpkgs/issues/32790) of packages using `wrapGAppsHook`.
 
-  ::: {.warning}
-  The setup hook [currently](https://github.com/NixOS/nixpkgs/issues/56943) does not work in expressions with `strictDeps` enabled, like Python packages. In those cases, you will need to disable it with `strictDeps = false;`.
-  :::
-
 - []{#ssec-gnome-hooks-gst-grl-plugins} Setup hooks of `gst_all_1.gstreamer` and `grilo` will populate the `GST_PLUGIN_SYSTEM_PATH_1_0` and `GRL_PLUGIN_PATH` variables, respectively, which will then be added to the wrapper by `wrapGAppsHook`.
 
 You can also pass additional arguments to `makeWrapper` using `gappsWrapperArgs` in `preFixup` hook:
diff --git a/nixpkgs/doc/languages-frameworks/go.section.md b/nixpkgs/doc/languages-frameworks/go.section.md
index 8616d64e7c4e..cf1808414234 100644
--- a/nixpkgs/doc/languages-frameworks/go.section.md
+++ b/nixpkgs/doc/languages-frameworks/go.section.md
@@ -11,8 +11,16 @@ The function `buildGoModule` builds Go programs managed with Go modules. It buil
 
 In the following is an example expression using `buildGoModule`, the following arguments are of special significance to the function:
 
-- `vendorHash`: is the hash of the output of the intermediate fetcher derivation. `vendorHash` can also take `null` as an input. When `null` is used as a value, rather than fetching the dependencies and vendoring them, we use the vendoring included within the source repo. If you'd like to not have to update this field on dependency changes, run `go mod vendor` in your source repo and set `vendorHash = null;`
-- `proxyVendor`: Fetches (go mod download) and proxies the vendor directory. This is useful if your code depends on c code and go mod tidy does not include the needed sources to build or if any dependency has case-insensitive conflicts which will produce platform dependant `vendorHash` checksums.
+- `vendorHash`: is the hash of the output of the intermediate fetcher derivation.
+
+  `vendorHash` can also be set to `null`.
+  In that case, rather than fetching the dependencies and vendoring them, the dependencies vendored in the source repo will be used.
+
+  To avoid updating this field when dependencies change, run `go mod vendor` in your source repo and set `vendorHash = null;`
+
+  To obtain the actual hash, set `vendorHash = lib.fakeSha256;` and run the build ([more details here](#sec-source-hashes)).
+- `proxyVendor`: Fetches (go mod download) and proxies the vendor directory. This is useful if your code depends on c code and go mod tidy does not include the needed sources to build or if any dependency has case-insensitive conflicts which will produce platform-dependent `vendorHash` checksums.
+- `modPostBuild`: Shell commands to run after the build of the go-modules executes `go mod vendor`, and before calculating fixed output derivation's `vendorHash` (or `vendorSha256`). Note that if you change this attribute, you need to update `vendorHash` (or `vendorSha256`) attribute.
 
 ```nix
 pet = buildGoModule rec {
@@ -23,7 +31,7 @@ pet = buildGoModule rec {
     owner = "knqyf263";
     repo = "pet";
     rev = "v${version}";
-    sha256 = "0m2fzpqxk7hrbxsgqplkg7h2p7gv6s1miymv3gvw0cz039skag0s";
+    hash = "sha256-Gjw1dRrgM8D3G7v6WIM2+50r4HmTXvx0Xxme2fH9TlQ=";
   };
 
   vendorHash = "sha256-ciBIR+a1oaYH+H1PcC8cD8ncfJczk1IiJ8iYNM+R6aA=";
@@ -59,7 +67,7 @@ deis = buildGoPackage rec {
     owner = "deis";
     repo = "deis";
     rev = "v${version}";
-    sha256 = "1qv9lxqx7m18029lj8cw3k7jngvxs4iciwrypdy0gd2nnghc68sw";
+    hash = "sha256-XCPD4LNWtAd8uz7zyCLRfT8rzxycIUmTACjU03GnaeM=";
   };
 
   goDeps = ./deps.nix;
@@ -76,11 +84,11 @@ The `goDeps` attribute can be imported from a separate `nix` file that defines w
     goPackagePath = "gopkg.in/yaml.v2";
     fetch = {
       # `fetch type` that needs to be used to get package source.
-      # If `git` is used there should be `url`, `rev` and `sha256` defined next to it.
+      # If `git` is used there should be `url`, `rev` and `hash` defined next to it.
       type = "git";
       url = "https://gopkg.in/yaml.v2";
       rev = "a83829b6f1293c91addabc89d0571c246397bbf4";
-      sha256 = "1m4dsmk90sbi17571h6pld44zxz7jc4lrnl4f27dpd1l8g5xvjhh";
+      hash = "sha256-EMrdy0M0tNuOcITaTAmT5/dPSKPXwHDKCXFpkGbVjdQ=";
     };
   }
   {
@@ -89,7 +97,7 @@ The `goDeps` attribute can be imported from a separate `nix` file that defines w
       type = "git";
       url = "https://github.com/docopt/docopt-go";
       rev = "784ddc588536785e7299f7272f39101f7faccc3f";
-      sha256 = "0wwz48jl9fvl1iknvn9dqr4gfy1qs03gxaikrxxp9gry6773v3sj";
+      hash = "sha256-Uo89zjE+v3R7zzOq/gbQOHj3SMYt2W1nDHS7RCUin3M=";
     };
   }
 ]
@@ -107,7 +115,16 @@ done
 
 ## Attributes used by the builders {#ssec-go-common-attributes}
 
-Both `buildGoModule` and `buildGoPackage` can be tweaked to behave slightly differently, if the following attributes are used:
+Many attributes [controlling the build phase](#variables-controlling-the-build-phase) are respected by both `buildGoModule` and `buildGoPackage`. Note that `buildGoModule` reads the following attributes also when building the `vendor/` go-modules fixed output derivation as well:
+
+- [`sourceRoot`](#var-stdenv-sourceRoot)
+- [`prePatch`](#var-stdenv-prePatch)
+- [`patches`](#var-stdenv-patches)
+- [`patchFlags`](#var-stdenv-patchFlags)
+- [`postPatch`](#var-stdenv-postPatch)
+- [`preBuild`](#var-stdenv-preBuild)
+
+In addition to the above attributes, and the many more variables respected also by `stdenv.mkDerivation`, both `buildGoModule` and `buildGoPackage` respect Go-specific attributes that tweak them to behave slightly differently:
 
 ### `ldflags` {#var-go-ldflags}
 
diff --git a/nixpkgs/doc/languages-frameworks/haskell.section.md b/nixpkgs/doc/languages-frameworks/haskell.section.md
index 1fda505a2255..87da2e63663a 100644
--- a/nixpkgs/doc/languages-frameworks/haskell.section.md
+++ b/nixpkgs/doc/languages-frameworks/haskell.section.md
@@ -1,7 +1,1161 @@
 # Haskell {#haskell}
 
-The documentation for the Haskell infrastructure is published at
-<https://haskell4nix.readthedocs.io/>. The source code for that
-site lives in the `doc/` sub-directory of the
-[`cabal2nix` Git repository](https://github.com/NixOS/cabal2nix)
-and changes can be submitted there.
+The Haskell infrastructure in Nixpkgs has two main purposes: The primary purpose
+is to provide a Haskell compiler and build tools as well as infrastructure for
+packaging Haskell-based packages.
+
+The secondary purpose is to provide support for Haskell development environments
+including prebuilt Haskell libraries. However, in this area sacrifices have been
+made due to self-imposed restrictions in Nixpkgs, to lessen the maintenance
+effort and to improve performance. (More details in the subsection
+[Limitations.](#haskell-limitations))
+
+## Available packages {#haskell-available-packages}
+
+The compiler and most build tools are exposed at the top level:
+
+* `ghc` is the default version of GHC
+* Language specific tools: `cabal-install`, `stack`, `hpack`, …
+
+Many “normal” user facing packages written in Haskell, like `niv` or `cachix`,
+are also exposed at the top level, and there is nothing Haskell specific to
+installing and using them.
+
+All of these packages are originally defined in the `haskellPackages` package
+set and are re-exposed with a reduced dependency closure for convenience.
+(see `justStaticExecutables` below)
+
+The `haskellPackages` set includes at least one version of every package from
+Hackage as well as some manually injected packages. This amounts to a lot of
+packages, so it is hidden from `nix-env -qa` by default for performance reasons.
+You can still list all packages in the set like this:
+
+```console
+$ nix-env -f '<nixpkgs>' -qaP -A haskellPackages
+haskellPackages.a50                                                         a50-0.5
+haskellPackages.AAI                                                         AAI-0.2.0.1
+haskellPackages.aasam                                                       aasam-0.2.0.0
+haskellPackages.abacate                                                     abacate-0.0.0.0
+haskellPackages.abc-puzzle                                                  abc-puzzle-0.2.1
+…
+```
+Also, the `haskellPackages` set is included on [search.nixos.org].
+
+The attribute names in `haskellPackages` always correspond with their name on
+Hackage. Since Hackage allows names that are not valid Nix without escaping,
+you need to take care when handling attribute names like `3dmodels`.
+
+For packages that are part of [Stackage], we use the version prescribed by a
+Stackage solver (usually the current LTS one) as the default version. For all
+other packages we use the latest version from Hackage. See
+[below](#haskell-available-versions) to learn which versions are provided
+exactly.
+
+Roughly half of the 16K packages contained in `haskellPackages` don't actually
+build and are marked as broken semi-automatically. Most of those packages are
+deprecated or unmaintained, but sometimes packages that should build, do not
+build. Very often fixing them is not a lot of work.
+
+<!--
+TODO(@sternenseemann):
+How you can help with that is
+described in [Fixing a broken package](#haskell-fixing-a-broken-package).
+-->
+
+`haskellPackages` is built with our default compiler, but we also provide other
+releases of GHC and package sets built with them. You can list all available
+compilers like this:
+
+```console
+$ nix-env -f '<nixpkgs>' -qaP -A haskell.compiler
+haskell.compiler.ghc810                  ghc-8.10.7
+haskell.compiler.ghc88                   ghc-8.8.4
+haskell.compiler.ghc90                   ghc-9.0.2
+haskell.compiler.ghc924                  ghc-9.2.4
+haskell.compiler.ghc925                  ghc-9.2.5
+haskell.compiler.ghc926                  ghc-9.2.6
+haskell.compiler.ghc92                   ghc-9.2.7
+haskell.compiler.ghc942                  ghc-9.4.2
+haskell.compiler.ghc943                  ghc-9.4.3
+haskell.compiler.ghc94                   ghc-9.4.4
+haskell.compiler.ghcHEAD                 ghc-9.7.20221224
+haskell.compiler.ghc8102Binary           ghc-binary-8.10.2
+haskell.compiler.ghc8102BinaryMinimal    ghc-binary-8.10.2
+haskell.compiler.ghc8107BinaryMinimal    ghc-binary-8.10.7
+haskell.compiler.ghc8107Binary           ghc-binary-8.10.7
+haskell.compiler.ghc865Binary            ghc-binary-8.6.5
+haskell.compiler.ghc924Binary            ghc-binary-9.2.4
+haskell.compiler.ghc924BinaryMinimal     ghc-binary-9.2.4
+haskell.compiler.integer-simple.ghc810   ghc-integer-simple-8.10.7
+haskell.compiler.integer-simple.ghc8107  ghc-integer-simple-8.10.7
+haskell.compiler.integer-simple.ghc88    ghc-integer-simple-8.8.4
+haskell.compiler.integer-simple.ghc884   ghc-integer-simple-8.8.4
+haskell.compiler.native-bignum.ghc90     ghc-native-bignum-9.0.2
+haskell.compiler.native-bignum.ghc902    ghc-native-bignum-9.0.2
+haskell.compiler.native-bignum.ghc924    ghc-native-bignum-9.2.4
+haskell.compiler.native-bignum.ghc925    ghc-native-bignum-9.2.5
+haskell.compiler.native-bignum.ghc926    ghc-native-bignum-9.2.6
+haskell.compiler.native-bignum.ghc92     ghc-native-bignum-9.2.7
+haskell.compiler.native-bignum.ghc927    ghc-native-bignum-9.2.7
+haskell.compiler.native-bignum.ghc942    ghc-native-bignum-9.4.2
+haskell.compiler.native-bignum.ghc943    ghc-native-bignum-9.4.3
+haskell.compiler.native-bignum.ghc94     ghc-native-bignum-9.4.4
+haskell.compiler.native-bignum.ghc944    ghc-native-bignum-9.4.4
+haskell.compiler.native-bignum.ghcHEAD   ghc-native-bignum-9.7.20221224
+haskell.compiler.ghcjs                   ghcjs-8.10.7
+```
+
+Each of those compiler versions has a corresponding attribute set built using
+it. However, the non-standard package sets are not tested regularly and, as a
+result, contain fewer working packages. The corresponding package set for GHC
+9.4.5 is `haskell.packages.ghc945`. In fact `haskellPackages` is just an alias
+for `haskell.packages.ghc927`:
+
+```console
+$ nix-env -f '<nixpkgs>' -qaP -A haskell.packages.ghc927
+haskell.packages.ghc927.a50                                                         a50-0.5
+haskell.packages.ghc927.AAI                                                         AAI-0.2.0.1
+haskell.packages.ghc927.aasam                                                       aasam-0.2.0.0
+haskell.packages.ghc927.abacate                                                     abacate-0.0.0.0
+haskell.packages.ghc927.abc-puzzle                                                  abc-puzzle-0.2.1
+…
+```
+
+Every package set also re-exposes the GHC used to build its packages as `haskell.packages.*.ghc`.
+
+### Available package versions {#haskell-available-versions}
+
+We aim for a “blessed” package set which only contains one version of each
+package, like Stackage (and based on it) but with more packages. Normally in
+nixpkgs the number of building Haskell packages is roughly two to three times
+the size of Stackage. For choosing the version to use for a certain package we
+use the following rules:
+
+1. By default, for every package `haskellPackages.foo` is the newest version
+found on Hackage (at the time of the last update of our package set).
+2. If the Stackage snapshot that we use (usually the newest LTS snapshot)
+contains a package, we use the Stackage version as default version for that
+package.
+3. For some packages, which are not on Stackage, we have manual overrides to
+set the default version to a version older than the newest on Hackage. We do
+this to get them or their reverse dependencies to compile in our package set.
+4. For all packages, for which the newest Hackage version is not the default
+version, there will also be a `haskellPackages.foo_x_y_z` package with the
+newest version. The `x_y_z` part encodes the version with dots replaced by
+underscores. When the newest version changes by a new release to Hackage the
+old package will disappear under that name and be replaced by a newer one under
+the name with the new version. The package name including the version will
+also disappear when the default version e.g. from Stackage catches up with the
+newest version from Hackage.
+5. For some packages, we also manually add other `haskellPackages.foo_x_y_z`
+versions, if they are required for a certain build.
+
+Relying on `haskellPackages.foo_x_y_z` attributes in derivations outside
+nixpkgs is discouraged because they may change or disappear with every package
+set update.
+<!-- TODO(@maralorn) We should add a link to callHackage, etc. once we added
+them to the docs. -->
+
+All `haskell.packages.*` package sets use the same package descriptions and the same sets
+of versions by default. There are however GHC version specific override `.nix`
+files to loosen this a bit.
+
+### Dependency resolution {#haskell-dependency-resolution}
+
+Normally when you build Haskell packages with `cabal-install`, `cabal-install`
+does dependency resolution. It will look at all Haskell package versions known
+on Hackage and tries to pick for every (transitive) dependency of your build
+exactly one version. Those versions need to satisfy all the version constraints
+given in the `.cabal` file of your package and all its dependencies.
+
+The [Haskell builder in nixpkgs](#haskell-mkderivation) does no such thing.
+It will simply take as input packages with names off the desired dependencies
+and just check whether they fulfill the version bounds and fail if they don’t
+(by default, see `jailbreak` to circumvent this).
+
+The `haskellPackages.callPackage` function does the package resolution.
+It will, e.g., use `haskellPackages.aeson`which has the default version as
+described above for a package input of name `aeson`. (More general:
+`<packages>.callPackage f` will call `f` with named inputs provided from the
+package set `<packages>`.)
+While this is the default behavior, it is possible to override the dependencies
+for a specific package, see
+[`override` and `overrideScope`](#haskell-overriding-haskell-packages).
+
+### Limitations {#haskell-limitations}
+
+Our main objective with `haskellPackages` is to package Haskell software in
+nixpkgs. This entails some limitations, partially due to self-imposed
+restrictions of nixpkgs, partially in the name of maintainability:
+
+* Only the packages built with the default compiler see extensive testing of the
+  whole package set. For other GHC versions only a few essential packages are
+  tested and cached.
+* As described above we only build one version of most packages.
+
+The experience using an older or newer packaged compiler or using different
+versions may be worse, because builds will not be cached on `cache.nixos.org`
+or may fail.
+
+Thus, to get the best experience, make sure that your project can be compiled
+using the default compiler of nixpkgs and recent versions of its dependencies.
+
+A result of this setup is, that getting a valid build plan for a given
+package can sometimes be quite painful, and in fact this is where most of the
+maintenance work for `haskellPackages` is required. Besides that, it is not
+possible to get the dependencies of a legacy project from nixpkgs or to use a
+specific stack solver for compiling a project.
+
+Even though we couldn’t use them directly in nixpkgs, it would be desirable
+to have tooling to generate working Nix package sets from build plans generated
+by `cabal-install` or a specific Stackage snapshot via import-from-derivation.
+Sadly we currently don’t have tooling for this. For this you might be
+interested in the alternative [haskell.nix] framework, which, be warned, is
+completely incompatible with packages from `haskellPackages`.
+
+<!-- TODO(@maralorn) Link to package set generation docs in the contributers guide below. -->
+
+## `haskellPackages.mkDerivation` {#haskell-mkderivation}
+
+Every haskell package set has its own haskell-aware `mkDerivation` which is used
+to build its packages. Generally you won't have to interact with this builder
+since [cabal2nix][cabal2nix] can generate packages
+using it for an arbitrary cabal package definition. Still it is useful to know
+the parameters it takes when you need to
+[override](#haskell-overriding-haskell-packages) a generated Nix expression.
+
+`haskellPackages.mkDerivation` is a wrapper around `stdenv.mkDerivation` which
+re-defines the default phases to be haskell aware and handles dependency
+specification, test suites, benchmarks etc. by compiling and invoking the
+package's `Setup.hs`. It does *not* use or invoke the `cabal-install` binary,
+but uses the underlying `Cabal` library instead.
+
+### General arguments {#haskell-derivation-args}
+
+`pname`
+: Package name, assumed to be the same as on Hackage (if applicable)
+
+`version`
+: Packaged version, assumed to be the same as on Hackage (if applicable)
+
+`src`
+: Source of the package. If omitted, fetch package corresponding to `pname`
+and `version` from Hackage.
+
+`sha256`
+: Hash to use for the default case of `src`.
+
+`revision`
+: Revision number of the updated cabal file to fetch from Hackage.
+If `null` (which is the default value), the one included in `src` is used.
+
+`editedCabalFile`
+: `sha256` hash of the cabal file identified by `revision` or `null`.
+
+`configureFlags`
+: Extra flags passed when executing the `configure` command of `Setup.hs`.
+
+`buildFlags`
+: Extra flags passed when executing the `build` command of `Setup.hs`.
+
+`haddockFlags`
+: Extra flags passed to `Setup.hs haddock` when building the documentation.
+
+`doCheck`
+: Whether to execute the package's test suite if it has one. Defaults to `true` unless cross-compiling.
+
+`doBenchmark`
+: Whether to execute the package's benchmark if it has one. Defaults to `false`.
+
+`doHoogle`
+: Whether to generate an index file for [hoogle][hoogle] as part of
+`haddockPhase` by passing the [`--hoogle` option][haddock-hoogle-option].
+Defaults to `true`.
+
+`doHaddockQuickjump`
+: Whether to generate an index for interactive navigation of the HTML documentation.
+Defaults to `true` if supported.
+
+`doInstallIntermediates`
+: Whether to install intermediate build products (files written to `dist/build`
+by GHC during the build process). With `enableSeparateIntermediatesOutput`,
+these files are instead installed to [a separate `intermediates`
+output.][multiple-outputs] The output can then be passed into a future build of
+the same package with the `previousIntermediates` argument to support
+incremental builds. See [“Incremental builds”](#haskell-incremental-builds) for
+more information. Defaults to `false`.
+
+`enableLibraryProfiling`
+: Whether to enable [profiling][profiling] for libraries contained in the
+package. Enabled by default if supported.
+
+`enableExecutableProfiling`
+: Whether to enable [profiling][profiling] for executables contained in the
+package. Disabled by default.
+
+`profilingDetail`
+: [Profiling detail level][profiling-detail] to set. Defaults to `exported-functions`.
+
+`enableSharedExecutables`
+: Whether to link executables dynamically. By default, executables are linked statically.
+
+`enableSharedLibraries`
+: Whether to build shared Haskell libraries. This is enabled by default unless we are using
+`pkgsStatic` or shared libraries have been disabled in GHC.
+
+`enableStaticLibraries`
+: Whether to build static libraries. Enabled by default if supported.
+
+`enableDeadCodeElimination`
+: Whether to enable linker based dead code elimination in GHC.
+Enabled by default if supported.
+
+`enableHsc2hsViaAsm`
+: Whether to pass `--via-asm` to `hsc2hs`. Enabled by default only on Windows.
+
+`hyperlinkSource`
+: Whether to render the source as well as part of the haddock documentation
+by passing the [`--hyperlinked-source` flag][haddock-hyperlinked-source-option].
+Defaults to `true`.
+
+`isExecutable`
+: Whether the package contains an executable.
+
+`isLibrary`
+: Whether the package contains a library.
+
+`jailbreak`
+: Whether to execute [jailbreak-cabal][jailbreak-cabal] before `configurePhase`
+to lift any version constraints in the cabal file. Note that this can't
+lift version bounds if they are conditional, i.e. if a dependency is hidden
+behind a flag.
+
+`enableParallelBuilding`
+: Whether to use the `-j` flag to make GHC/Cabal start multiple jobs in parallel.
+
+`maxBuildCores`
+: Upper limit of jobs to use in parallel for compilation regardless of
+`$NIX_BUILD_CORES`. Defaults to 16 as Haskell compilation with GHC currently
+sees a [performance regression](https://gitlab.haskell.org/ghc/ghc/-/issues/9221)
+if too many parallel jobs are used.
+
+`doCoverage`
+: Whether to generate and install files needed for [HPC][haskell-program-coverage].
+Defaults to `false`.
+
+`doHaddock`
+: Whether to build (HTML) documentation using [haddock][haddock].
+Defaults to `true` if supported.
+
+`testTarget`
+: Name of the test suite to build and run. If unset, all test suites will be executed.
+
+`preCompileBuildDriver`
+: Shell code to run before compiling `Setup.hs`.
+
+`postCompileBuildDriver`
+: Shell code to run after compiling `Setup.hs`.
+
+`preHaddock`
+: Shell code to run before building documentation using haddock.
+
+`postHaddock`
+: Shell code to run after building documentation using haddock.
+
+`coreSetup`
+: Whether to only allow core libraries to be used while building `Setup.hs`.
+Defaults to `false`.
+
+`useCpphs`
+: Whether to enable the [cpphs][cpphs] preprocessor. Defaults to `false`.
+
+`enableSeparateBinOutput`
+: Whether to install executables to a separate `bin` output. Defaults to `false`.
+
+`enableSeparateDataOutput`
+: Whether to install data files shipped with the package to a separate `data` output.
+Defaults to `false`.
+
+`enableSeparateDocOutput`
+: Whether to install documentation to a separate `doc` output.
+Is automatically enabled if `doHaddock` is `true`.
+
+`enableSeparateIntermediatesOutput`
+: When `doInstallIntermediates` is true, whether to install intermediate build
+products to a separate `intermediates` output. See [“Incremental
+builds”](#haskell-incremental-builds) for more information. Defaults to
+`false`.
+
+`allowInconsistentDependencies`
+: If enabled, allow multiple versions of the same Haskell package in the
+dependency tree at configure time. Often in such a situation compilation would
+later fail because of type mismatches. Defaults to `false`.
+
+`enableLibraryForGhci`
+: Build and install a special object file for GHCi. This improves performance
+when loading the library in the REPL, but requires extra build time and
+disk space. Defaults to `false`.
+
+`previousIntermediates`
+: If non-null, intermediate build artifacts are copied from this input to
+`dist/build` before performing compiling. See [“Incremental
+builds”](#haskell-incremental-builds) for more information. Defaults to `null`.
+
+`buildTarget`
+: Name of the executable or library to build and install.
+If unset, all available targets are built and installed.
+
+### Specifying dependencies {#haskell-derivation-deps}
+
+Since `haskellPackages.mkDerivation` is intended to be generated from cabal
+files, it reflects cabal's way of specifying dependencies. For one, dependencies
+are grouped by what part of the package they belong to. This helps to reduce the
+dependency closure of a derivation, for example benchmark dependencies are not
+included if `doBenchmark == false`.
+
+`setup*Depends`
+: dependencies necessary to compile `Setup.hs`
+
+`library*Depends`
+: dependencies of a library contained in the package
+
+`executable*Depends`
+: dependencies of an executable contained in the package
+
+`test*Depends`
+: dependencies of a test suite contained in the package
+
+`benchmark*Depends`
+: dependencies of a benchmark contained in the package
+
+The other categorization relates to the way the package depends on the dependency:
+
+`*ToolDepends`
+: Tools we need to run as part of the build process.
+They are added to the derivation's `nativeBuildInputs`.
+
+`*HaskellDepends`
+: Haskell libraries the package depends on.
+They are added to `propagatedBuildInputs`.
+
+`*SystemDepends`
+: Non-Haskell libraries the package depends on.
+They are added to `buildInputs`
+
+`*PkgconfigDepends`
+: `*SystemDepends` which are discovered using `pkg-config`.
+They are added to `buildInputs` and it is additionally
+ensured that `pkg-config` is available at build time.
+
+`*FrameworkDepends`
+: Apple SDK Framework which the package depends on when compiling it on Darwin.
+
+Using these two distinctions, you should be able to categorize most of the dependency
+specifications that are available:
+`benchmarkFrameworkDepends`,
+`benchmarkHaskellDepends`,
+`benchmarkPkgconfigDepends`,
+`benchmarkSystemDepends`,
+`benchmarkToolDepends`,
+`executableFrameworkDepends`,
+`executableHaskellDepends`,
+`executablePkgconfigDepends`,
+`executableSystemDepends`,
+`executableToolDepends`,
+`libraryFrameworkDepends`,
+`libraryHaskellDepends`,
+`libraryPkgconfigDepends`,
+`librarySystemDepends`,
+`libraryToolDepends`,
+`setupHaskellDepends`,
+`testFrameworkDepends`,
+`testHaskellDepends`,
+`testPkgconfigDepends`,
+`testSystemDepends` and
+`testToolDepends`.
+
+That only leaves the following extra ways for specifying dependencies:
+
+`buildDepends`
+: Allows specifying Haskell dependencies which are added to `propagatedBuildInputs` unconditionally.
+
+`buildTools`
+: Like `*ToolDepends`, but are added to `nativeBuildInputs` unconditionally.
+
+`extraLibraries`
+: Like `*SystemDepends`, but are added to `buildInputs` unconditionally.
+
+`pkg-configDepends`
+: Like `*PkgconfigDepends`, but are added to `buildInputs` unconditionally.
+
+`testDepends`
+: Deprecated, use either `testHaskellDepends` or `testSystemDepends`.
+
+`benchmarkDepends`
+: Deprecated, use either `benchmarkHaskellDepends` or `benchmarkSystemDepends`.
+
+The dependency specification methods in this list which are unconditional
+are especially useful when writing [overrides](#haskell-overriding-haskell-packages)
+when you want to make sure that they are definitely included. However, it is
+recommended to use the more accurate ones listed above when possible.
+
+### Meta attributes {#haskell-derivation-meta}
+
+`haskellPackages.mkDerivation` accepts the following attributes as direct
+arguments which are transparently set in `meta` of the resulting derivation. See
+the [Meta-attributes section](#chap-meta) for their documentation.
+
+* These attributes are populated with a default value if omitted:
+    * `homepage`: defaults to the Hackage page for `pname`.
+    * `platforms`: defaults to `lib.platforms.all` (since GHC can cross-compile)
+* These attributes are only set if given:
+    * `description`
+    * `license`
+    * `changelog`
+    * `maintainers`
+    * `broken`
+    * `hydraPlatforms`
+
+### Incremental builds {#haskell-incremental-builds}
+
+`haskellPackages.mkDerivation` supports incremental builds for GHC 9.4 and
+newer with the `doInstallIntermediates`, `enableSeparateIntermediatesOutput`,
+and `previousIntermediates` arguments.
+
+The basic idea is to first perform a full build of the package in question,
+save its intermediate build products for later, and then copy those build
+products into the build directory of an incremental build performed later.
+Then, GHC will use those build artifacts to avoid recompiling unchanged
+modules.
+
+For more detail on how to store and use incremental build products, see
+[Gabriella Gonzalez’ blog post “Nixpkgs support for incremental Haskell
+builds”.][incremental-builds] motivation behind this feature.
+
+An incremental build for [the `turtle` package][turtle] can be performed like
+so:
+
+```nix
+let
+  pkgs = import <nixpkgs> {};
+  inherit (pkgs) haskell;
+  inherit (haskell.lib.compose) overrideCabal;
+
+  # Incremental builds work with GHC >=9.4.
+  turtle = haskell.packages.ghc944.turtle;
+
+  # This will do a full build of `turtle`, while writing the intermediate build products
+  # (compiled modules, etc.) to the `intermediates` output.
+  turtle-full-build-with-incremental-output = overrideCabal (drv: {
+    doInstallIntermediates = true;
+    enableSeparateIntermediatesOutput = true;
+  }) turtle;
+
+  # This will do an incremental build of `turtle` by copying the previously
+  # compiled modules and intermediate build products into the source tree
+  # before running the build.
+  #
+  # GHC will then naturally pick up and reuse these products, making this build
+  # complete much more quickly than the previous one.
+  turtle-incremental-build = overrideCabal (drv: {
+    previousIntermediates = turtle-full-build-with-incremental-output.intermediates;
+  }) turtle;
+in
+  turtle-incremental-build
+```
+
+## Development environments {#haskell-development-environments}
+
+In addition to building and installing Haskell software, nixpkgs can also
+provide development environments for Haskell projects. This has the obvious
+advantage that you benefit from `cache.nixos.org` and no longer need to compile
+all project dependencies yourself. While it is often very useful, this is not
+the primary use case of our package set. Have a look at the section
+[available package versions](#haskell-available-versions) to learn which
+versions of packages we provide and the section
+[limitations](#haskell-limitations), to judge whether a `haskellPackages`
+based development environment for your project is feasible.
+
+By default, every derivation built using
+[`haskellPackages.mkDerivation`](#haskell-mkderivation) exposes an environment
+suitable for building it interactively as the `env` attribute. For example, if
+you have a local checkout of `random`, you can enter a development environment
+for it like this (if the dependencies in the development and packaged version
+match):
+
+```console
+$ cd ~/src/random
+$ nix-shell -A haskellPackages.random.env '<nixpkgs>'
+[nix-shell:~/src/random]$ ghc-pkg list
+/nix/store/a8hhl54xlzfizrhcf03c1l3f6l9l8qwv-ghc-9.2.4-with-packages/lib/ghc-9.2.4/package.conf.d
+    Cabal-3.6.3.0
+    array-0.5.4.0
+    base-4.16.3.0
+    binary-0.8.9.0
+    …
+    ghc-9.2.4
+    …
+```
+
+As you can see, the environment contains a GHC which is set up so it finds all
+dependencies of `random`. Note that this environment does not mirror
+the environment used to build the package, but is intended as a convenient
+tool for development and simple debugging. `env` relies on the `ghcWithPackages`
+wrapper which automatically injects a pre-populated package-db into every
+GHC invocation. In contrast, using `nix-shell -A haskellPackages.random` will
+not result in an environment in which the dependencies are in GHCs package
+database. Instead, the Haskell builder will pass in all dependencies explicitly
+via configure flags.
+
+`env` mirrors the normal derivation environment in one aspect: It does not include
+familiar development tools like `cabal-install`, since we rely on plain `Setup.hs`
+to build all packages. However, `cabal-install` will work as expected if in
+`PATH` (e.g. when installed globally and using a `nix-shell` without `--pure`).
+A declarative and pure way of adding arbitrary development tools is provided
+via [`shellFor`](#haskell-shellFor).
+
+When using `cabal-install` for dependency resolution you need to be a bit
+careful to achieve build purity. `cabal-install` will find and use all
+dependencies installed from the packages `env` via Nix, but it will also
+consult Hackage to potentially download and compile dependencies if it can’t
+find a valid build plan locally. To prevent this you can either never run
+`cabal update`, remove the cabal database from your `~/.cabal` folder or run
+`cabal` with `--offline`. Note though, that for some usecases `cabal2nix` needs
+the local Hackage db.
+
+Often you won't work on a package that is already part of `haskellPackages` or
+Hackage, so we first need to write a Nix expression to obtain the development
+environment from. Luckily, we can generate one very easily from an already
+existing cabal file using `cabal2nix`:
+
+```console
+$ ls
+my-project.cabal src …
+$ cabal2nix ./. > my-project.nix
+```
+
+The generated Nix expression evaluates to a function ready to be
+`callPackage`-ed. For now, we can add a minimal `default.nix` which does just
+that:
+
+```nix
+# Retrieve nixpkgs impurely from NIX_PATH for now, you can pin it instead, of course.
+{ pkgs ? import <nixpkgs> {} }:
+
+# use the nixpkgs default haskell package set
+pkgs.haskellPackages.callPackage ./my-project.nix { }
+```
+
+Using `nix-build default.nix` we can now build our project, but we can also
+enter a shell with all the package's dependencies available using `nix-shell
+-A env default.nix`. If you have `cabal-install` installed globally, it'll work
+inside the shell as expected.
+
+### shellFor {#haskell-shellFor}
+
+Having to install tools globally is obviously not great, especially if you want
+to provide a batteries-included `shell.nix` with your project. Luckily there's a
+proper tool for making development environments out of packages' build
+environments: `shellFor`, a function exposed by every haskell package set. It
+takes the following arguments and returns a derivation which is suitable as a
+development environment inside `nix-shell`:
+
+`packages`
+: This argument is used to select the packages for which to build the
+development environment. This should be a function which takes a haskell package
+set and returns a list of packages. `shellFor` will pass the used package set to
+this function and include all dependencies of the returned package in the build
+environment. This means you can reuse Nix expressions of packages included in
+nixpkgs, but also use local Nix expressions like this: `hpkgs: [
+(hpkgs.callPackage ./my-project.nix { }) ]`.
+
+`nativeBuildInputs`
+: Expects a list of derivations to add as build tools to the build environment.
+This is the place to add packages like `cabal-install`, `doctest` or `hlint`.
+Defaults to `[]`.
+
+`buildInputs`
+: Expects a list of derivations to add as library dependencies, like `openssl`.
+This is rarely necessary as the haskell package expressions usually track system
+dependencies as well. Defaults to `[]`. (see also
+[derivation dependencies](#haskell-derivation-deps))
+
+`withHoogle`
+: If this is true, `hoogle` will be added to `nativeBuildInputs`.
+Additionally, its database will be populated with all included dependencies,
+so you'll be able search through the documentation of your dependencies.
+Defaults to `false`.
+
+`genericBuilderArgsModifier`
+: This argument accepts a function allowing you to modify the arguments passed
+to `mkDerivation` in order to create the development environment. For example,
+`args: { doCheck = false; }` would cause the environment to not include any test
+dependencies. Defaults to `lib.id`.
+
+`doBenchmark`
+: This is a shortcut for enabling `doBenchmark` via `genericBuilderArgsModifier`.
+Setting it to `true` will cause the development environment to include all
+benchmark dependencies which would be excluded by default. Defaults to `false`.
+
+One neat property of `shellFor` is that it allows you to work on multiple
+packages using the same environment in conjunction with
+[cabal.project files][cabal-project-files].
+Say our example above depends on `distribution-nixpkgs` and we have a project
+file set up for both, we can add the following `shell.nix` expression:
+
+```nix
+{ pkgs ? import <nixpkgs> {} }:
+
+pkgs.haskellPackages.shellFor {
+  packages = hpkgs: [
+    # reuse the nixpkgs for this package
+    hpkgs.distribution-nixpkgs
+    # call our generated Nix expression manually
+    (hpkgs.callPackage ./my-project/my-project.nix { })
+  ];
+
+  # development tools we use
+  nativeBuildInputs = [
+    pkgs.cabal-install
+    pkgs.haskellPackages.doctest
+    pkgs.cabal2nix
+  ];
+
+  # Extra arguments are added to mkDerivation's arguments as-is.
+  # Since it adds all passed arguments to the shell environment,
+  # we can use this to set the environment variable the `Paths_`
+  # module of distribution-nixpkgs uses to search for bundled
+  # files.
+  # See also: https://cabal.readthedocs.io/en/latest/cabal-package.html#accessing-data-files-from-package-code
+  distribution_nixpkgs_datadir = toString ./distribution-nixpkgs;
+}
+```
+
+<!-- TODO(@sternenseemann): deps are not included if not selected -->
+
+### haskell-language-server {#haskell-language-server}
+
+To use HLS in short: Install `pkgs.haskell-language-server` e.g. in
+`nativeBuildInputs` in `shellFor` and use the `haskell-language-server-wrapper`
+command to run it. See the [HLS user guide] on how to configure your text
+editor to use HLS and how to test your setup.
+
+HLS needs to be compiled with the GHC version of the project you use it
+on.
+
+``pkgs.haskell-language-server`` provides
+``haskell-language-server-wrapper``, ``haskell-language-server``
+and ``haskell-language-server-x.x.x``
+binaries, where ``x.x.x`` is the GHC version for which it is compiled. By
+default, it only includes binaries for the current GHC version, to reduce
+closure size. The closure size is large, because HLS needs to be dynamically
+linked to work reliably. You can override the list of supported GHC versions
+with e.g.
+
+```nix
+pkgs.haskell-language-server.override { supportedGhcVersions = [ "90" "94" ]; }
+```
+Where all strings `version` are allowed such that
+`haskell.packages.ghc${version}` is an existing package set.
+
+When you run `haskell-language-server-wrapper` it will detect the GHC
+version used by the project you are working on (by asking e.g. cabal or
+stack) and pick the appropriate versioned binary from your path.
+
+Be careful when installing HLS globally and using a pinned nixpkgs for a
+Haskell project in a `nix-shell`. If the nixpkgs versions deviate to much
+(e.g., use different `glibc` versions) the `haskell-language-server-?.?.?`
+executable will try to detect these situations and refuse to start. It is
+recommended to obtain HLS via `nix-shell` from the nixpkgs version pinned in
+there instead.
+
+The top level `pkgs.haskell-language-server` attribute is just a convenience
+wrapper to make it possible to install HLS for multiple GHC versions at the
+same time. If you know, that you only use one GHC version, e.g., in a project
+specific `nix-shell` you can simply use
+`pkgs.haskellPackages.haskell-language-server` or
+`pkgs.haskell.packages.*.haskell-language-server` from the package set you use.
+
+If you use `nix-shell` for your development environments remember to start your
+editor in that environment. You may want to use something like `direnv` and/or an
+editor plugin to achieve this.
+
+## Overriding Haskell packages {#haskell-overriding-haskell-packages}
+
+### Overriding a single package {#haskell-overriding-a-single-package}
+
+<!-- TODO(@sternenseemann): we should document /somewhere/ that base == null etc. -->
+
+Like many language specific subsystems in nixpkgs, the Haskell infrastructure
+also has its own quirks when it comes to overriding. Overriding of the *inputs*
+to a package at least follows the standard procedure. For example, imagine you
+need to build `nix-tree` with a more recent version of `brick` than the default
+one provided by `haskellPackages`:
+
+```nix
+haskellPackages.nix-tree.override {
+  brick = haskellPackages.brick_0_67;
+}
+```
+
+<!-- TODO(@sternenseemann): This belongs in the next section
+One common problem you may run into with such an override is the build failing
+with “abort because of serious configure-time warning from Cabal”. When scrolling
+up, you'll usually notice that Cabal noticed that more than one versions of the same
+package was present in the dependency graph. This typically causes a later compilation
+failure (the error message `haskellPackages.mkDerivation` produces tries to save
+you the time of finding this out yourself, but if you wish to do so, you can
+disable it using `allowInconsistentDependencies`). Luckily, `haskellPackages` provides
+you with a tool to deal with this. `overrideScope` creates a new `haskellPackages`
+instance with the override applied *globally* for this package, so the dependency
+closure automatically uses a consistent version of the overridden package. E. g.
+if `haskell-ci` needs a recent version of `Cabal`, but also uses other packages
+that depend on that library, you may want to use:
+
+```nix
+haskellPackages.haskell-ci.overrideScope (self: super: {
+  Cabal = self.Cabal_3_6_2_0;
+})
+```
+
+-->
+
+The custom interface comes into play when you want to override the arguments
+passed to `haskellPackages.mkDerivation`. For this, the function `overrideCabal`
+from `haskell.lib.compose` is used. E.g., if you want to install a man page
+that is distributed with the package, you can do something like this:
+
+```nix
+haskell.lib.compose.overrideCabal (drv: {
+  postInstall = ''
+    ${drv.postInstall or ""}
+    install -Dm644 man/pnbackup.1 -t $out/share/man/man1
+  '';
+}) haskellPackages.pnbackup
+```
+
+`overrideCabal` takes two arguments:
+
+1. A function which receives all arguments passed to `haskellPackages.mkDerivation`
+   before and returns a set of arguments to replace (or add) with a new value.
+2. The Haskell derivation to override.
+
+The arguments are ordered so that you can easily create helper functions by making
+use of currying:
+
+```nix
+let
+  installManPage = haskell.lib.compose.overrideCabal (drv: {
+    postInstall = ''
+      ${drv.postInstall or ""}
+      install -Dm644 man/${drv.pname}.1 -t "$out/share/man/man1"
+    '';
+  });
+in
+
+installManPage haskellPackages.pnbackup
+```
+
+In fact, `haskell.lib.compose` already provides lots of useful helpers for common
+tasks, detailed in the next section. They are also structured in such a way that
+they can be combined using `lib.pipe`:
+
+```nix
+lib.pipe my-haskell-package [
+  # lift version bounds on dependencies
+  haskell.lib.compose.doJailbreak
+  # disable building the haddock documentation
+  haskell.lib.compose.dontHaddock
+  # pass extra package flag to Cabal's configure step
+  (haskell.lib.compose.enableCabalFlag "myflag")
+]
+```
+
+#### `haskell.lib.compose` {#haskell-haskell.lib.compose}
+
+The base interface for all overriding is the following function:
+
+`overrideCabal f drv`
+: Takes the arguments passed to obtain `drv` to `f` and uses the resulting
+attribute set to update the argument set. Then a recomputed version of `drv`
+using the new argument set is returned.
+
+<!--
+TODO(@sternenseemann): ideally we want to be more detailed here as well, but
+I want to avoid the documentation having to be kept in sync in too many places.
+We already document this stuff in the mkDerivation section and lib/compose.nix.
+Ideally this section would be generated from the latter in the future.
+-->
+
+All other helper functions are implemented in terms of `overrideCabal` and make
+common overrides shorter and more complicate ones trivial. The simple overrides
+which only change a single argument are only described very briefly in the
+following overview. Refer to the
+[documentation of `haskellPackages.mkDerivation`](#haskell-mkderivation)
+for a more detailed description of the effects of the respective arguments.
+
+##### Packaging Helpers {#haskell-packaging-helpers}
+
+`overrideSrc { src, version } drv`
+: Replace the source used for building `drv` with the path or derivation given
+as `src`. The `version` attribute is optional. Prefer this function over
+overriding `src` via `overrideCabal`, since it also automatically takes care of
+removing any Hackage revisions.
+
+<!-- TODO(@sternenseemann): deprecated
+
+`generateOptparseApplicativeCompletions list drv`
+: Generate and install shell completion files for the installed executables whose
+names are given via `list`. The executables need to be using `optparse-applicative`
+for this to work.
+-->
+
+`justStaticExecutables drv`
+: Only build and install the executables produced by `drv`, removing everything
+that may refer to other Haskell packages' store paths (like libraries and
+documentation). This dramatically reduces the closure size of the resulting
+derivation. Note that the executables are only statically linked against their
+Haskell dependencies, but will still link dynamically against libc, GMP and
+other system library dependencies. If dependencies use their Cabal-generated
+`Paths_*` module, this may not work as well if GHC's dead code elimination
+is unable to remove the references to the dependency's store path that module
+contains.
+
+`enableSeparateBinOutput drv`
+: Install executables produced by `drv` to a separate `bin` output. This
+has a similar effect as `justStaticExecutables`, but preserves the libraries
+and documentation in the `out` output alongside the `bin` output with a
+much smaller closure size.
+
+`markBroken drv`
+: Sets the `broken` flag to `true` for `drv`.
+
+`markUnbroken drv`, `unmarkBroken drv`
+: Set the `broken` flag to `false` for `drv`.
+
+`doDistribute drv`
+: Updates `hydraPlatforms` so that Hydra will build `drv`. This is
+sometimes necessary when working with versioned packages in
+`haskellPackages` which are not built by default.
+
+`dontDistribute drv`
+: Sets `hydraPlatforms` to `[]`, causing Hydra to skip this package
+altogether. Useful if it fails to evaluate cleanly and is causing
+noise in the evaluation errors tab on Hydra.
+
+##### Development Helpers {#haskell-development-helpers}
+
+`sdistTarball drv`
+: Create a source distribution tarball like those found on Hackage
+instead of building the package `drv`.
+
+`documentationTarball drv`
+: Create a documentation tarball suitable for uploading to Hackage
+instead of building the package `drv`.
+
+`buildFromSdist drv`
+: Uses `sdistTarball drv` as the source to compile `drv`. This helps to catch
+packaging bugs when building from a local directory, e.g. when required files
+are missing from `extra-source-files`.
+
+`failOnAllWarnings drv`
+: Enables all warnings GHC supports and makes it fail the build if any of them
+are emitted.
+
+<!-- TODO(@sternenseemann):
+`checkUnusedPackages opts drv`
+: Adds an extra check to `postBuild` which fails the build if any dependency
+taken as an input is not used. The `opts` attribute set allows relaxing this
+check.
+-->
+
+`enableDWARFDebugging drv`
+: Compiles the package with additional debug symbols enabled, useful
+for debugging with e.g. `gdb`.
+
+`doStrip drv`
+: Sets `doStrip` to `true` for `drv`.
+
+`dontStrip drv`
+: Sets `doStrip` to `false` for `drv`.
+
+<!-- TODO(@sternenseemann): shellAware -->
+
+##### Trivial Helpers {#haskell-trivial-helpers}
+
+`doJailbreak drv`
+: Sets the `jailbreak` argument to `true` for `drv`.
+
+`dontJailbreak drv`
+: Sets the `jailbreak` argument to `false` for `drv`.
+
+`doHaddock drv`
+: Sets `doHaddock` to `true` for `drv`.
+
+`dontHaddock drv`
+: Sets `doHaddock` to `false` for `drv`. Useful if the build of a package is
+failing because of e.g. a syntax error in the Haddock documentation.
+
+`doHyperlinkSource drv`
+: Sets `hyperlinkSource` to `true` for `drv`.
+
+`dontHyperlinkSource drv`
+: Sets `hyperlinkSource` to `false` for `drv`.
+
+`doCheck drv`
+: Sets `doCheck` to `true` for `drv`.
+
+`dontCheck drv`
+: Sets `doCheck` to `false` for `drv`. Useful if a package has a broken,
+flaky or otherwise problematic test suite breaking the build.
+
+<!-- Purposefully omitting the non-list variants here. They are a bit
+ugly, and we may want to deprecate them at some point. -->
+
+`appendConfigureFlags list drv`
+: Adds the strings in `list` to the `configureFlags` argument for `drv`.
+
+`enableCabalFlag flag drv`
+: Makes sure that the Cabal flag `flag` is enabled in Cabal's configure step.
+
+`disableCabalFlag flag drv`
+: Makes sure that the Cabal flag `flag` is disabled in Cabal's configure step.
+
+`appendBuildflags list drv`
+: Adds the strings in `list` to the `buildFlags` argument for `drv`.
+
+<!-- TODO(@sternenseemann): removeConfigureFlag -->
+
+`appendPatches list drv`
+: Adds the `list` of derivations or paths to the `patches` argument for `drv`.
+
+<!-- TODO(@sternenseemann): link dep section -->
+
+`addBuildTools list drv`
+: Adds the `list` of derivations to the `buildTools` argument for `drv`.
+
+`addExtraLibraries list drv`
+: Adds the `list` of derivations to the `extraLibraries` argument for `drv`.
+
+`addBuildDepends list drv`
+: Adds the `list` of derivations to the `buildDepends` argument for `drv`.
+
+`addTestToolDepends list drv`
+: Adds the `list` of derivations to the `testToolDepends` argument for `drv`.
+
+`addPkgconfigDepends list drv`
+: Adds the `list` of derivations to the `pkg-configDepends` argument for `drv`.
+
+`addSetupDepends list drv`
+: Adds the `list` of derivations to the `setupHaskellDepends` argument for `drv`.
+
+`doBenchmark drv`
+: Set `doBenchmark` to `true` for `drv`. Useful if your development
+environment is missing the dependencies necessary for compiling the
+benchmark component.
+
+`dontBenchmark drv`
+: Set `doBenchmark` to `false` for `drv`.
+
+`setBuildTargets drv list`
+: Sets the `buildTarget` argument for `drv` so that the targets specified in `list` are built.
+
+`doCoverage drv`
+: Sets the `doCoverage` argument to `true` for `drv`.
+
+`dontCoverage drv`
+: Sets the `doCoverage` argument to `false` for `drv`.
+
+#### Library functions in the Haskell package sets {#haskell-package-set-lib-functions}
+
+Some library functions depend on packages from the Haskell package sets. Thus they are
+exposed from those instead of from `haskell.lib.compose` which can only access what is
+passed directly to it. When using the functions below, make sure that you are obtaining them
+from the same package set (`haskellPackages`, `haskell.packages.ghc944` etc.) as the packages
+you are working with or – even better – from the `self`/`final` fix point of your overlay to
+`haskellPackages`.
+
+Note: Some functions like `shellFor` that are not intended for overriding per se, are omitted
+in this section. <!-- TODO(@sternenseemann): note about ifd section -->
+
+`cabalSdist { src, name ? ... }`
+: Generates the Cabal sdist tarball for `src`, suitable for uploading to Hackage.
+Contrary to `haskell.lib.compose.sdistTarball`, it uses `cabal-install` over `Setup.hs`,
+so it is usually faster: No build dependencies need to be downloaded, and we can
+skip compiling `Setup.hs`.
+
+`buildFromCabalSdist drv`
+: Build `drv`, but run its `src` attribute through `cabalSdist` first. Useful for catching
+files necessary for compilation that are missing from the sdist.
+
+`generateOptparseApplicativeCompletions list drv`
+: Generate and install shell completion files for the installed executables whose
+names are given via `list`. The executables need to be using `optparse-applicative`
+for [this to work][optparse-applicative-completions].
+Note that this feature is automatically disabled when cross-compiling, since it
+requires executing the binaries in question.
+
+<!--
+
+TODO(@NixOS/haskell): finish these planned sections
+### Overriding the entire package set
+
+
+## Import-from-Derivation helpers
+
+* `callCabal2nix`
+* `callHackage`, `callHackageDirect`
+* `developPackage`
+
+## Contributing {#haskell-contributing}
+
+### Fixing a broken package {#haskell-fixing-a-broken-package}
+
+### Package set generation {#haskell-package-set-generation}
+
+### Packaging a Haskell project
+
+### Backporting {#haskell-backporting}
+
+Backporting changes to a stable NixOS version in general is covered
+in nixpkgs' `CONTRIBUTING.md` in general. In particular refer to the
+[backporting policy](https://github.com/NixOS/nixpkgs/blob/master/CONTRIBUTING.md#criteria-for-backporting-changes)
+to check if the change you have in mind may be backported.
+
+This section focuses on how to backport a package update (e.g. a
+bug fix or security release). Fixing a broken package works like
+it does for the unstable branches.
+
+-->
+
+## F.A.Q. {#haskell-faq}
+
+### Why is topic X not covered in this section? Why is section Y missing? {#haskell-why-not-covered}
+
+We have been working on [moving the nixpkgs Haskell documentation back into the
+nixpkgs manual](https://github.com/NixOS/nixpkgs/issues/121403). Since this
+process has not been completed yet, you may find some topics missing here
+covered in the old [haskell4nix docs](https://haskell4nix.readthedocs.io/).
+
+If you feel any important topic is not documented at all, feel free to comment
+on the issue linked above.
+
+[Stackage]: https://www.stackage.org
+[cabal-project-files]: https://cabal.readthedocs.io/en/latest/cabal-project.html
+[cabal2nix]: https://github.com/nixos/cabal2nix
+[cpphs]: https://Hackage.haskell.org/package/cpphs
+[haddock-hoogle-option]: https://haskell-haddock.readthedocs.io/en/latest/invoking.html#cmdoption-hoogle
+[haddock-hyperlinked-source-option]: https://haskell-haddock.readthedocs.io/en/latest/invoking.html#cmdoption-hyperlinked-source
+[haddock]: https://www.haskell.org/haddock/
+[haskell-program-coverage]: https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/profiling.html#observing-code-coverage
+[haskell.nix]: https://input-output-hk.github.io/haskell.nix/index.html
+[HLS user guide]: https://haskell-language-server.readthedocs.io/en/latest/configuration.html#configuring-your-editor
+[hoogle]: https://wiki.haskell.org/Hoogle
+[incremental-builds]: https://www.haskellforall.com/2022/12/nixpkgs-support-for-incremental-haskell.html
+[jailbreak-cabal]: https://github.com/NixOS/jailbreak-cabal/
+[multiple-outputs]: https://nixos.org/manual/nixpkgs/stable/#chap-multiple-output
+[optparse-applicative-completions]: https://github.com/pcapriotti/optparse-applicative/blob/7726b63796aa5d0df82e926d467f039b78ca09e2/README.md#bash-zsh-and-fish-completions
+[profiling-detail]: https://cabal.readthedocs.io/en/latest/cabal-project.html#cfg-field-profiling-detail
+[profiling]: https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/profiling.html
+[search.nixos.org]: https://search.nixos.org
+[turtle]: https://hackage.haskell.org/package/turtle
diff --git a/nixpkgs/doc/languages-frameworks/hy.section.md b/nixpkgs/doc/languages-frameworks/hy.section.md
index a851ff24dfc2..49309e4819f5 100644
--- a/nixpkgs/doc/languages-frameworks/hy.section.md
+++ b/nixpkgs/doc/languages-frameworks/hy.section.md
@@ -4,10 +4,10 @@
 
 ### Installation without packages {#installation-without-packages}
 
-You can install `hy` via nix-env or by adding it to `configuration.nix` by reffering to it as a `hy` attribute. This kind of installation adds `hy` to your environment and it succesfully works with `python3`.
+You can install `hy` via nix-env or by adding it to `configuration.nix` by referring to it as a `hy` attribute. This kind of installation adds `hy` to your environment and it successfully works with `python3`.
 
 ::: {.caution}
-Packages that are installed with your python derivation, are not accesible by `hy` this way.
+Packages that are installed with your python derivation, are not accessible by `hy` this way.
 :::
 
 ### Installation with packages {#installation-with-packages}
diff --git a/nixpkgs/doc/languages-frameworks/idris.section.md b/nixpkgs/doc/languages-frameworks/idris.section.md
index 19146844cff5..447a3e7bb8a3 100644
--- a/nixpkgs/doc/languages-frameworks/idris.section.md
+++ b/nixpkgs/doc/languages-frameworks/idris.section.md
@@ -90,7 +90,7 @@ build-idris-package  {
     owner = "Heather";
     repo = "Idris.Yaml";
     rev = "5afa51ffc839844862b8316faba3bafa15656db4";
-    sha256 = "1g4pi0swmg214kndj85hj50ccmckni7piprsxfdzdfhg87s0avw7";
+    hash = "sha256-h28F9EEPuvab6zrfeE+0k1XGQJGwINnsJEG8yjWIl7w=";
   };
 
   meta = with lib; {
diff --git a/nixpkgs/doc/languages-frameworks/index.xml b/nixpkgs/doc/languages-frameworks/index.xml
index 3d5b2f738976..94c4e303027f 100644
--- a/nixpkgs/doc/languages-frameworks/index.xml
+++ b/nixpkgs/doc/languages-frameworks/index.xml
@@ -3,7 +3,7 @@
          xml:id="chap-language-support">
  <title>Languages and frameworks</title>
  <para>
-  The <link linkend="chap-stdenv">standard build environment</link> makes it easy to build typical Autotools-based packages with very little code. Any other kind of package can be accomodated by overriding the appropriate phases of <literal>stdenv</literal>. However, there are specialised functions in Nixpkgs to easily build packages for other programming languages, such as Perl or Haskell. These are described in this chapter.
+  The <link linkend="chap-stdenv">standard build environment</link> makes it easy to build typical Autotools-based packages with very little code. Any other kind of package can be accommodated by overriding the appropriate phases of <literal>stdenv</literal>. However, there are specialised functions in Nixpkgs to easily build packages for other programming languages, such as Perl or Haskell. These are described in this chapter.
  </para>
  <xi:include href="agda.section.xml" />
  <xi:include href="android.section.xml" />
@@ -13,6 +13,8 @@
  <xi:include href="coq.section.xml" />
  <xi:include href="crystal.section.xml" />
  <xi:include href="cuda.section.xml" />
+ <xi:include href="cuelang.section.xml" />
+ <xi:include href="dart.section.xml" />
  <xi:include href="dhall.section.xml" />
  <xi:include href="dotnet.section.xml" />
  <xi:include href="emscripten.section.xml" />
@@ -24,6 +26,7 @@
  <xi:include href="ios.section.xml" />
  <xi:include href="java.section.xml" />
  <xi:include href="javascript.section.xml" />
+ <xi:include href="lisp.section.xml" />
  <xi:include href="lua.section.xml" />
  <xi:include href="maven.section.xml" />
  <xi:include href="nim.section.xml" />
@@ -31,11 +34,13 @@
  <xi:include href="octave.section.xml" />
  <xi:include href="perl.section.xml" />
  <xi:include href="php.section.xml" />
+ <xi:include href="pkg-config.section.xml" />
  <xi:include href="python.section.xml" />
  <xi:include href="qt.section.xml" />
  <xi:include href="r.section.xml" />
  <xi:include href="ruby.section.xml" />
  <xi:include href="rust.section.xml" />
+ <xi:include href="swift.section.xml" />
  <xi:include href="texlive.section.xml" />
  <xi:include href="titanium.section.xml" />
  <xi:include href="vim.section.xml" />
diff --git a/nixpkgs/doc/languages-frameworks/ios.section.md b/nixpkgs/doc/languages-frameworks/ios.section.md
index 04b013be12e2..eb8e2ca55326 100644
--- a/nixpkgs/doc/languages-frameworks/ios.section.md
+++ b/nixpkgs/doc/languages-frameworks/ios.section.md
@@ -104,7 +104,7 @@ The above function takes a variety of parameters:
   and the location where the source code resides
 * `sdkVersion` specifies which version of the iOS SDK to use.
 
-It also possile to adjust the `xcodebuild` parameters. This is only needed in
+It also possible to adjust the `xcodebuild` parameters. This is only needed in
 rare circumstances. In most cases the default values should suffice:
 
 * Specifies which `xcodebuild` target to build. By default it takes the target
@@ -130,7 +130,7 @@ In addition, you need to set the following parameters:
   store certificates.
 * `generateIPA` specifies that we want to produce an IPA file (this is probably
   what you want)
-* `generateXCArchive` specifies thet we want to produce an xcarchive file.
+* `generateXCArchive` specifies that we want to produce an xcarchive file.
 
 When building IPA files on Hydra and when it is desired to allow iOS devices to
 install IPAs by browsing to the Hydra build products page, you can enable the
diff --git a/nixpkgs/doc/languages-frameworks/javascript.section.md b/nixpkgs/doc/languages-frameworks/javascript.section.md
index 9d16b951e8dd..a6c5aad15c15 100644
--- a/nixpkgs/doc/languages-frameworks/javascript.section.md
+++ b/nixpkgs/doc/languages-frameworks/javascript.section.md
@@ -6,16 +6,16 @@ This contains instructions on how to package javascript applications.
 
 The various tools available will be listed in the [tools-overview](#javascript-tools-overview). Some general principles for packaging will follow. Finally some tool specific instructions will be given.
 
-## Getting unstuck / finding code examples
+## Getting unstuck / finding code examples {#javascript-finding-examples}
 
 If you find you are lacking inspiration for packing javascript applications, the links below might prove useful. Searching online for prior art can be helpful if you are running into solved problems.
 
-### Github
+### Github {#javascript-finding-examples-github}
 
 - Searching Nix files for `mkYarnPackage`: <https://github.com/search?q=mkYarnPackage+language%3ANix&type=code>
 - Searching just `flake.nix` files for `mkYarnPackage`: <https://github.com/search?q=mkYarnPackage+filename%3Aflake.nix&type=code>
 
-### Gitlab
+### Gitlab {#javascript-finding-examples-gitlab}
 
 - Searching Nix files for `mkYarnPackage`: <https://gitlab.com/search?scope=blobs&search=mkYarnPackage+extension%3Anix>
 - Searching just `flake.nix` files for `mkYarnPackage`: <https://gitlab.com/search?scope=blobs&search=mkYarnPackage+filename%3Aflake.nix>
@@ -105,7 +105,7 @@ After you have identified the correct system, you need to override your package
     });
 ```
 
-### Adding and Updating Javascript packages in nixpkgs
+### Adding and Updating Javascript packages in nixpkgs {#javascript-adding-or-updating-packages}
 
 To add a package from NPM to nixpkgs:
 
@@ -140,10 +140,10 @@ To update NPM packages in nixpkgs, run the same `generate.sh` script:
 ./pkgs/development/node-packages/generate.sh
 ```
 
-#### Git protocol error
+#### Git protocol error {#javascript-git-error}
 
 Some packages may have Git dependencies from GitHub specified with `git://`.
-GitHub has [disabled unecrypted Git connections](https://github.blog/2021-09-01-improving-git-protocol-security-github/#no-more-unauthenticated-git), so you may see the following error when running the generate script:
+GitHub has [disabled unencrypted Git connections](https://github.blog/2021-09-01-improving-git-protocol-security-github/#no-more-unauthenticated-git), so you may see the following error when running the generate script:
 
 ```
 The unauthenticated git protocol on port 9418 is no longer supported
@@ -157,6 +157,62 @@ git config --global url."https://github.com/".insteadOf git://github.com/
 
 ## Tool specific instructions {#javascript-tool-specific}
 
+### buildNpmPackage {#javascript-buildNpmPackage}
+
+`buildNpmPackage` allows you to package npm-based projects in Nixpkgs without the use of an auto-generated dependencies file (as used in [node2nix](#javascript-node2nix)). It works by utilizing npm's cache functionality -- creating a reproducible cache that contains the dependencies of a project, and pointing npm to it.
+
+```nix
+{ lib, buildNpmPackage, fetchFromGitHub }:
+
+buildNpmPackage rec {
+  pname = "flood";
+  version = "4.7.0";
+
+  src = fetchFromGitHub {
+    owner = "jesec";
+    repo = pname;
+    rev = "v${version}";
+    hash = "sha256-BR+ZGkBBfd0dSQqAvujsbgsEPFYw/ThrylxUbOksYxM=";
+  };
+
+  npmDepsHash = "sha256-tuEfyePwlOy2/mOPdXbqJskO6IowvAP4DWg8xSZwbJw=";
+
+  # The prepack script runs the build script, which we'd rather do in the build phase.
+  npmPackFlags = [ "--ignore-scripts" ];
+
+  NODE_OPTIONS = "--openssl-legacy-provider";
+
+  meta = with lib; {
+    description = "A modern web UI for various torrent clients with a Node.js backend and React frontend";
+    homepage = "https://flood.js.org";
+    license = licenses.gpl3Only;
+    maintainers = with maintainers; [ winter ];
+  };
+}
+```
+
+#### Arguments {#javascript-buildNpmPackage-arguments}
+
+* `npmDepsHash`: The output hash of the dependencies for this project. Can be calculated in advance with [`prefetch-npm-deps`](#javascript-buildNpmPackage-prefetch-npm-deps).
+* `makeCacheWritable`: Whether to make the cache writable prior to installing dependencies. Don't set this unless npm tries to write to the cache directory, as it can slow down the build.
+* `npmBuildScript`: The script to run to build the project. Defaults to `"build"`.
+* `npmFlags`: Flags to pass to all npm commands.
+* `npmInstallFlags`: Flags to pass to `npm ci` and `npm prune`.
+* `npmBuildFlags`: Flags to pass to `npm run ${npmBuildScript}`.
+* `npmPackFlags`: Flags to pass to `npm pack`.
+
+#### prefetch-npm-deps {#javascript-buildNpmPackage-prefetch-npm-deps}
+
+`prefetch-npm-deps` can calculate the hash of the dependencies of an npm project ahead of time.
+
+```console
+$ ls
+package.json package-lock.json index.js
+$ prefetch-npm-deps package-lock.json
+...
+sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=
+```
+
 ### node2nix {#javascript-node2nix}
 
 #### Preparation {#javascript-node2nix-preparation}
@@ -173,7 +229,7 @@ See `node2nix` [docs](https://github.com/svanderburg/node2nix) for more info.
 #### Pitfalls {#javascript-node2nix-pitfalls}
 
 - If upstream package.json does not have a "version" attribute, `node2nix` will crash. You will need to add it like shown in [the package.json section](#javascript-upstream-package-json).
-- `node2nix` has some [bugs](https://github.com/svanderburg/node2nix/issues/238) related to working with lock files from NPM distributed with `nodejs-16_x`.
+- `node2nix` has some [bugs](https://github.com/svanderburg/node2nix/issues/238) related to working with lock files from NPM distributed with `nodejs_16`.
 - `node2nix` does not like missing packages from NPM. If you see something like `Cannot resolve version: vue-loader-v16@undefined` then you might want to try another tool. The package might have been pulled off of NPM.
 
 ### yarn2nix {#javascript-yarn2nix}
@@ -187,7 +243,7 @@ If the downloaded files contain the `package.json` and `yarn.lock` files they ca
 ```nix
 offlineCache = fetchYarnDeps {
   yarnLock = src + "/yarn.lock";
-  sha256 = "....";
+  hash = "....";
 };
 ```
 
@@ -232,7 +288,7 @@ configurePhase = ''
 This will generate a derivation including the `node_modules` directory.
 If you have to build a derivation for an integrated web framework (rails, phoenix..), this is probably the easiest way.
 
-#### Overriding dependency behavior
+#### Overriding dependency behavior {#javascript-mkYarnPackage-overriding-dependencies}
 
 In the `mkYarnPackage` record the property `pkgConfig` can be used to override packages when you encounter problems building.
 
@@ -275,13 +331,16 @@ mkYarnPackage rec {
   - The `echo 9` steps comes from this answer: <https://stackoverflow.com/a/49139496>
   - Exporting the headers in `npm_config_nodedir` comes from this issue: <https://github.com/nodejs/node-gyp/issues/1191#issuecomment-301243919>
 
-## Outside of nixpkgs {#javascript-outside-nixpkgs}
+## Outside Nixpkgs {#javascript-outside-nixpkgs}
+
+There are some other tools available, which are written in the Nix language.
+These that can't be used inside Nixpkgs because they require [Import From Derivation](#ssec-import-from-derivation), which is not allowed in Nixpkgs.
 
-There are some other options available that can't be used inside nixpkgs. Those other options are written in Nix. Importing them in nixpkgs will require moving the source code into nixpkgs. Using [Import From Derivation](https://nixos.wiki/wiki/Import_From_Derivation) is not allowed in Hydra at present. If you are packaging something outside nixpkgs, those can be considered
+If you are packaging something outside Nixpkgs, consider the following:
 
 ### npmlock2nix {#javascript-npmlock2nix}
 
-[npmlock2nix](https://github.com/nix-community/npmlock2nix) aims at building node_modules without code generation. It hasn't reached v1 yet, the API might be subject to change.
+[npmlock2nix](https://github.com/nix-community/npmlock2nix) aims at building `node_modules` without code generation. It hasn't reached v1 yet, the API might be subject to change.
 
 #### Pitfalls {#javascript-npmlock2nix-pitfalls}
 
@@ -289,7 +348,7 @@ There are some [problems with npm v7](https://github.com/tweag/npmlock2nix/issue
 
 ### nix-npm-buildpackage {#javascript-nix-npm-buildpackage}
 
-[nix-npm-buildpackage](https://github.com/serokell/nix-npm-buildpackage) aims at building node_modules without code generation. It hasn't reached v1 yet, the API might change. It supports both package-lock.json and yarn.lock.
+[nix-npm-buildpackage](https://github.com/serokell/nix-npm-buildpackage) aims at building `node_modules` without code generation. It hasn't reached v1 yet, the API might change. It supports both `package-lock.json` and yarn.lock.
 
 #### Pitfalls {#javascript-nix-npm-buildpackage-pitfalls}
 
diff --git a/nixpkgs/doc/languages-frameworks/lisp.section.md b/nixpkgs/doc/languages-frameworks/lisp.section.md
new file mode 100644
index 000000000000..3c408eaa09da
--- /dev/null
+++ b/nixpkgs/doc/languages-frameworks/lisp.section.md
@@ -0,0 +1,304 @@
+# lisp-modules {#lisp}
+
+This document describes the Nixpkgs infrastructure for building Common Lisp
+libraries that use ASDF (Another System Definition Facility). It lives in
+`pkgs/development/lisp-modules`.
+
+## Overview {#lisp-overview}
+
+The main entry point of the API are the Common Lisp implementation packages
+(e.g. `abcl`, `ccl`, `clasp-common-lisp`, `clisp` `ecl`, `sbcl`)
+themselves. They have the `pkgs` and `withPackages` attributes, which can be
+used to discover available packages and to build wrappers, respectively.
+
+The `pkgs` attribute set contains packages that were automatically imported from
+Quicklisp, and any other manually defined ones. Not every package works for all
+the CL implementations (e.g. `nyxt` only makes sense for `sbcl`).
+
+The `withPackages` function is of primary utility. It is used to build runnable
+wrappers, with a pinned and pre-built ASDF FASL available in the `ASDF`
+environment variable, and `CL_SOURCE_REGISTRY`/`ASDF_OUTPUT_TRANSLATIONS`
+configured to find the desired systems on runtime.
+
+With a few exceptions, the primary thing that the infrastructure does is to run
+`asdf:load-system` for each system specified in the `systems` argument to
+`build-asdf-system`, and save the FASLs to the Nix store. Then, it makes these
+FASLs available to wrappers. Any other use-cases, such as producing SBCL
+executables with `sb-ext:save-lisp-and-die`, are achieved via overriding the
+`buildPhase` etc.
+
+In addition, Lisps have the `withOverrides` function, which can be used to
+substitute any package in the scope of their `pkgs`. This will be useful
+together with `overrideLispAttrs` when dealing with slashy ASDF systems, because
+they should stay in the main package and be build by specifying the `systems`
+argument to `build-asdf-system`.
+
+## The 90% use case example {#lisp-use-case-example}
+
+The most common way to use the library is to run ad-hoc wrappers like this:
+
+`nix-shell -p 'sbcl.withPackages (ps: with ps; [ alexandria ])'`
+
+Then, in a shell:
+
+```
+$ result/bin/sbcl
+* (load (sb-ext:posix-getenv "ASDF"))
+* (asdf:load-system 'alexandria)
+```
+
+Also one can create a `pkgs.mkShell` environment in `shell.nix`/`flake.nix`:
+
+```
+let
+  sbcl' = sbcl.withPackages (ps: [ ps.alexandria ]);
+in mkShell {
+  buildInputs = [ sbcl' ];
+}
+```
+
+Such a Lisp can be now used e.g. to compile your sources:
+
+```
+buildPhase = ''
+  ${sbcl'}/bin/sbcl --load my-build-file.lisp
+''
+```
+
+## Importing packages from Quicklisp {#lisp-importing-packages-from-quicklisp}
+
+The library is able to very quickly import all the packages distributed by
+Quicklisp by parsing its `releases.txt` and `systems.txt` files. These files are
+available from [http://beta.quicklisp.org/dist/quicklisp.txt].
+
+The import process is implemented in the `import` directory as Common Lisp
+functions in the `org.lispbuilds.nix` ASDF system. To run the script, one can
+execute `ql-import.lisp`:
+
+```
+nix-shell --run 'sbcl --script ql-import.lisp'
+```
+
+The script will:
+
+1. Download the latest Quicklisp `systems.txt` and `releases.txt` files
+2. Generate an SQLite database of all QL systems in `packages.sqlite`
+3. Generate an `imported.nix` file from the database
+
+The maintainer's job there is to:
+
+1. Re-run the `ql-import.lisp` script
+2. Add missing native dependencies in `ql.nix`
+3. For packages that still don't build, package them manually in `packages.nix`
+
+Also, the `imported.nix` file **must not be edited manually**! It should only be
+generated as described in this section.
+
+### Adding native dependencies {#lisp-quicklisp-adding-native-dependencies}
+
+The Quicklisp files contain ASDF dependency data, but don't include native
+library (CFFI) dependencies, and, in the case of ABCL, Java dependencies.
+
+The `ql.nix` file contains a long list of overrides, where these dependencies
+can be added.
+
+Packages defined in `packages.nix` contain these dependencies naturally.
+
+### Trusting `systems.txt` and `releases.txt` {#lisp-quicklisp-trusting}
+
+The previous implementation of `lisp-modules` didn't fully trust the Quicklisp
+data, because there were times where the dependencies specified were not
+complete, and caused broken builds. It instead used a `nix-shell` environment to
+discover real dependencies by using the ASDF APIs.
+
+The current implementation has chosen to trust this data, because it's faster to
+parse a text file than to build each system to generate its Nix file, and
+because that way packages can be mass-imported. Because of that, there may come
+a day where some packages will break, due to bugs in Quicklisp. In that case,
+the fix could be a manual override in `packages.nix` and `ql.nix`.
+
+A known fact is that Quicklisp doesn't include dependencies on slashy systems in
+its data. This is an example of a situation where such fixes were used, e.g. to
+replace the `systems` attribute of the affected packages. (See the definition of
+`iolib`).
+
+### Quirks {#lisp-quicklisp-quirks}
+
+During Quicklisp import:
+
+- `+` in names are converted to `_plus{_,}`: `cl+ssl`->`cl_plus_ssl`, `alexandria+`->`alexandria_plus`
+- `.` to `_dot_`: `iolib.base`->`iolib_dot_base`
+- names starting with a number have a `_` prepended (`3d-vectors`->`_3d-vectors`)
+- `_` in names is converted to `__` for reversibility
+
+
+## Defining packages manually inside Nixpkgs {#lisp-defining-packages-inside}
+
+New packages, that for some reason are not in Quicklisp, and so cannot be
+auto-imported, can be written in the `packages.nix` file.
+
+In that file, use the `build-asdf-system` function, which is a wrapper around
+`mkDerivation` for building ASDF systems. Various other hacks are present, such
+as `build-with-compile-into-pwd` for systems which create files during
+compilation.
+
+The `build-asdf-system` function is documented with comments in
+`nix-cl.nix`. Also, `packages.nix` is full of examples of how to use it.
+
+## Defining packages manually outside Nixpkgs {#lisp-defining-packages-outside}
+
+Lisp derivations (`abcl`, `sbcl` etc.) also export the `buildASDFSystem`
+function, which is the same as `build-asdf-system`, except for the `lisp`
+argument which is set to the given CL implementation.
+
+It can be used to define packages outside Nixpkgs, and, for example, add them
+into the package scope with `withOverrides` which will be discussed later on.
+
+### Including an external package in scope {#lisp-including-external-pkg-in-scope}
+
+A package defined outside Nixpkgs using `buildASDFSystem` can be woven into the
+Nixpkgs-provided scope like this:
+
+```
+let
+  alexandria = sbcl.buildASDFSystem rec {
+    pname = "alexandria";
+    version = "1.4";
+    src = fetchFromGitLab {
+      domain = "gitlab.common-lisp.net";
+      owner = "alexandria";
+      repo = "alexandria";
+      rev = "v${version}";
+      hash = "sha256-1Hzxt65dZvgOFIljjjlSGgKYkj+YBLwJCACi5DZsKmQ=";
+    };
+  };
+  sbcl' = sbcl.withOverrides (self: super: {
+    inherit alexandria;
+  });
+in sbcl'.pkgs.alexandria
+```
+
+## Overriding package attributes {#lisp-overriding-package-attributes}
+
+Packages export the `overrideLispAttrs` function, which can be used to build a
+new package with different parameters.
+
+Example of overriding `alexandria`:
+
+```
+sbcl.pkgs.alexandria.overrideLispAttrs (oldAttrs: rec {
+  version = "1.4";
+  src = fetchFromGitLab {
+    domain = "gitlab.common-lisp.net";
+    owner = "alexandria";
+    repo = "alexandria";
+    rev = "v${version}";
+    hash = "sha256-1Hzxt65dZvgOFIljjjlSGgKYkj+YBLwJCACi5DZsKmQ=";
+  };
+})
+```
+
+## Overriding packages in scope {#lisp-overriding-packages-in-scope}
+
+Packages can be woven into a new scope by using `withOverrides`:
+
+```
+let
+  sbcl' = sbcl.withOverrides (self: super: {
+    alexandria = super.alexandria.overrideLispAttrs (oldAttrs: rec {
+      pname = "alexandria";
+      version = "1.4";
+      src = fetchFromGitLab {
+        domain = "gitlab.common-lisp.net";
+        owner = "alexandria";
+        repo = "alexandria";
+        rev = "v${version}";
+        hash = "sha256-1Hzxt65dZvgOFIljjjlSGgKYkj+YBLwJCACi5DZsKmQ=";
+      };
+    });
+  });
+in builtins.elemAt sbcl'.pkgs.bordeaux-threads.lispLibs 0
+```
+
+### Dealing with slashy systems {#lisp-dealing-with-slashy-systems}
+
+Slashy (secondary) systems should not exist in their own packages! Instead, they
+should be included in the parent package as an extra entry in the `systems`
+argument to the `build-asdf-system`/`buildASDFSystem` functions.
+
+The reason is that ASDF searches for a secondary system in the `.asd` of the
+parent package. Thus, having them separate would cause either one of them not to
+load cleanly, because one will contains FASLs of itself but not the other, and
+vice versa.
+
+To package slashy systems, use `overrideLispAttrs`, like so:
+
+```
+ecl.pkgs.alexandria.overrideLispAttrs (oldAttrs: {
+  systems = oldAttrs.systems ++ [ "alexandria/tests" ];
+  lispLibs = oldAttrs.lispLibs ++ [ ecl.pkgs.rt ];
+})
+```
+
+See the respective section on using `withOverrides` for how to weave it back
+into `ecl.pkgs`.
+
+Note that sometimes the slashy systems might not only have more dependencies
+than the main one, but create a circular dependency between `.asd`
+files. Unfortunately, in this case an adhoc solution becomes necessary.
+
+## Building Wrappers {#lisp-building-wrappers}
+
+Wrappers can be built using the `withPackages` function of Common Lisp
+implementations (`abcl`, `ecl`, `sbcl` etc.):
+
+```
+sbcl.withPackages (ps: [ ps.alexandria ps.bordeaux-threads ])
+```
+
+Such a wrapper can then be executed like this:
+
+```
+result/bin/sbcl
+```
+
+### Loading ASDF {#lisp-loading-asdf}
+
+For best results, avoid calling `(require 'asdf)` When using the
+library-generated wrappers.
+
+Use `(load (ext:getenv "ASDF"))` instead, supplying your implementation's way of
+getting an environment variable for `ext:getenv`. This will load the
+(pre-compiled to FASL) Nixpkgs-provided version of ASDF.
+
+### Loading systems {#lisp-loading-systems}
+
+There, you can simply use `asdf:load-system`. This works by setting the right
+values for the `CL_SOURCE_REGISTRY`/`ASDF_OUTPUT_TRANSLATIONS` environment
+variables, so that systems are found in the Nix store and pre-compiled FASLs are
+loaded.
+
+## Adding a new Lisp {#lisp-adding-a-new-lisp}
+
+The function `wrapLisp` is used to wrap Common Lisp implementations. It adds the
+`pkgs`, `withPackages`, `withOverrides` and `buildASDFSystem` attributes to the
+derivation.
+
+`wrapLisp` takes these arguments:
+
+- `pkg`: the Lisp package
+- `faslExt`: Implementation-specific extension for FASL files
+- `program`: The name of executable file in `${pkg}/bin/` (Default: `pkg.pname`)
+- `flags`: A list of flags to always pass to `program` (Default: `[]`)
+- `asdf`: The ASDF version to use (Default: `pkgs.asdf_3_3`)
+- `packageOverrides`: Package overrides config (Default: `(self: super: {})`)
+
+This example wraps CLISP:
+
+```
+wrapLisp {
+  pkg = clisp;
+  faslExt = "fas";
+  flags = ["-E" "UTF8"];
+}
+```
diff --git a/nixpkgs/doc/languages-frameworks/lua.section.md b/nixpkgs/doc/languages-frameworks/lua.section.md
index 17b80f07d3e1..2ed02ab9d6c7 100644
--- a/nixpkgs/doc/languages-frameworks/lua.section.md
+++ b/nixpkgs/doc/languages-frameworks/lua.section.md
@@ -129,16 +129,21 @@ Let's present the luarocks way first and the manual one in a second time.
 ### Packaging a library on luarocks {#packaging-a-library-on-luarocks}
 
 [Luarocks.org](https://luarocks.org/) is the main repository of lua packages.
-The site proposes two types of packages, the rockspec and the src.rock
+The site proposes two types of packages, the `rockspec` and the `src.rock`
 (equivalent of a [rockspec](https://github.com/luarocks/luarocks/wiki/Rockspec-format) but with the source).
-These packages can have different build types such as `cmake`, `builtin` etc .
 
-Luarocks-based packages are generated in pkgs/development/lua-modules/generated-packages.nix from
-the whitelist maintainers/scripts/luarocks-packages.csv and updated by running maintainers/scripts/update-luarocks-packages.
+Luarocks-based packages are generated in [pkgs/development/lua-modules/generated-packages.nix](https://github.com/NixOS/nixpkgs/tree/master/pkgs/development/lua-modules/generated-packages.nix) from
+the whitelist maintainers/scripts/luarocks-packages.csv and updated by running
+the script
+[maintainers/scripts/update-luarocks-packages](https://github.com/NixOS/nixpkgs/tree/master/maintainers/scripts/update-luarocks-packages):
+
+```sh
+./maintainers/scripts/update-luarocks-packages update
+```
 
 [luarocks2nix](https://github.com/nix-community/luarocks) is a tool capable of generating nix derivations from both rockspec and src.rock (and favors the src.rock).
 The automation only goes so far though and some packages need to be customized.
-These customizations go in `pkgs/development/lua-modules/overrides.nix`.
+These customizations go in [pkgs/development/lua-modules/overrides.nix](https://github.com/NixOS/nixpkgs/tree/master/pkgs/development/lua-modules/overrides.nix).
 For instance if the rockspec defines `external_dependencies`, these need to be manually added to the overrides.nix.
 
 You can try converting luarocks packages to nix packages with the command `nix-shell -p luarocks-nix` and then `luarocks nix PKG_NAME`.
@@ -183,7 +188,7 @@ luaposix = buildLuarocksPackage {
 
   src = fetchurl {
     url    = "https://raw.githubusercontent.com/rocks-moonscript-org/moonrocks-mirror/master/luaposix-34.0.4-1.src.rock";
-    sha256 = "0yrm5cn2iyd0zjd4liyj27srphvy0gjrjx572swar6zqr4dwjqp2";
+    hash = "sha256-4mLJG8n4m6y4Fqd0meUDfsOb9RHSR0qa/KD5KCwrNXs=";
   };
   disabled = (luaOlder "5.1") || (luaAtLeast "5.4");
   propagatedBuildInputs = [ bit32 lua std_normalize ];
@@ -200,7 +205,7 @@ luaposix = buildLuarocksPackage {
 The `buildLuarocksPackage` delegates most tasks to luarocks:
 
 * it adds `luarocks` as an unpacker for `src.rock` files (zip files really).
-* configurePhase` writes a temporary luarocks configuration file which location
+* `configurePhase` writes a temporary luarocks configuration file which location
 is exported via the environment variable `LUAROCKS_CONFIG`.
 * the `buildPhase` does nothing.
 * `installPhase` calls `luarocks make --deps-mode=none --tree $out` to build and
diff --git a/nixpkgs/doc/languages-frameworks/nim.section.md b/nixpkgs/doc/languages-frameworks/nim.section.md
index 16dce61d71c9..4f97c7585f33 100644
--- a/nixpkgs/doc/languages-frameworks/nim.section.md
+++ b/nixpkgs/doc/languages-frameworks/nim.section.md
@@ -25,7 +25,7 @@ nimPackages.buildNimPackage rec {
 
   src = fetchurl {
     url = "https://git.sr.ht/~ehmry/hottext/archive/v${version}.tar.gz";
-    sha256 = "sha256-hIUofi81zowSMbt1lUsxCnVzfJGN3FEiTtN8CEFpwzY=";
+    hash = "sha256-hIUofi81zowSMbt1lUsxCnVzfJGN3FEiTtN8CEFpwzY=";
   };
 
   buildInputs = with nimPackages; [
@@ -65,7 +65,7 @@ buildNimPackage rec {
   version = "2.0.4";
   src = fetchNimble {
     inherit pname version;
-    hash = "sha256-Vtcj8goI4zZPQs2TbFoBFlcR5UqDtOldaXSH/+/xULk=";
+    hash = "sha256-qDtVSnf+7rTq36WAxgsUZ8XoUk4sKwHyt8EJcY5WP+o=";
   };
   propagatedBuildInputs = [ SDL2 ];
 }
diff --git a/nixpkgs/doc/languages-frameworks/ocaml.section.md b/nixpkgs/doc/languages-frameworks/ocaml.section.md
index c6e40eaa20d0..cbdc64bf5dd3 100644
--- a/nixpkgs/doc/languages-frameworks/ocaml.section.md
+++ b/nixpkgs/doc/languages-frameworks/ocaml.section.md
@@ -38,12 +38,12 @@ Here is a simple package example.
 
 - It uses the `fetchFromGitHub` fetcher to get its source.
 
-- `duneVersion = "2"` ensures that Dune version 2 is used for the
-  build (this is the default; valid values are `"1"`, `"2"`, and `"3"`);
-  note that there is also a legacy `useDune2` boolean attribute:
-  set to `false` it corresponds to `duneVersion = "1"`; set to `true` it
-  corresponds to `duneVersion = "2"`. If both arguments (`duneVersion` and
-  `useDune2`) are given, the second one (`useDune2`) is silently ignored.
+- It also accept `duneVersion` parameter (valid value are `"1"`, `"2"`, and
+  `"3"`). The recommended practice it to set only if you don't want the default
+  value and/or it depends on something else like package version. You might see
+  a not-supported argument `useDune2`. The behavior was `useDune2 = true;` =>
+  `duneVersion = "2";` and `useDune2 = false;` => `duneVersion = "1";`. It was
+  used at the time when dune3 didn't existed.
 
 - It sets the optional `doCheck` attribute such that tests will be run with
   `dune runtest -p angstrom` after the build (`dune build -p angstrom`) is
@@ -71,7 +71,6 @@ Here is a simple package example.
 buildDunePackage rec {
   pname = "angstrom";
   version = "0.15.0";
-  duneVersion = "2";
 
   minimalOCamlVersion = "4.04";
 
@@ -79,7 +78,7 @@ buildDunePackage rec {
     owner  = "inhabitedtype";
     repo   = pname;
     rev    = version;
-    sha256 = "1hmrkdcdlkwy7rxhngf3cv3sa61cznnd9p5lmqhx20664gx2ibrh";
+    hash   = "sha256-MK8o+iPGANEhrrTc1Kz9LBilx2bDPQt7Pp5P2libucI=";
   };
 
   checkInputs = [ alcotest ppx_let ];
@@ -104,13 +103,11 @@ buildDunePackage rec {
   pname = "wtf8";
   version = "1.0.2";
 
-  useDune2 = true;
-
   minimalOCamlVersion = "4.02";
 
   src = fetchurl {
     url = "https://github.com/flowtype/ocaml-${pname}/releases/download/v${version}/${pname}-v${version}.tbz";
-    sha256 = "09ygcxxd5warkdzz17rgpidrd0pg14cy2svvnvy1hna080lzg7vp";
+    hash = "sha256-d5/3KUBAWRj8tntr4RkJ74KWW7wvn/B/m1nx0npnzyc=";
   };
 
   meta = with lib; {
@@ -129,3 +126,8 @@ packaged libraries may still use the old spelling: maintainers are invited to
 fix this when updating packages. Massive renaming is strongly discouraged as it
 would be challenging to review, difficult to test, and will cause unnecessary
 rebuild.
+
+The build will automatically fail if two distinct versions of the same library
+are added to `buildInputs` (which usually happens transitively because of
+`propagatedBuildInputs`). Set `dontDetectOcamlConflicts` to true to disable this
+behavior.
diff --git a/nixpkgs/doc/languages-frameworks/perl.section.md b/nixpkgs/doc/languages-frameworks/perl.section.md
index 28a78cc23441..c188e228112c 100644
--- a/nixpkgs/doc/languages-frameworks/perl.section.md
+++ b/nixpkgs/doc/languages-frameworks/perl.section.md
@@ -39,7 +39,7 @@ ClassC3 = buildPerlPackage rec {
   version = "0.21";
   src = fetchurl {
     url = "mirror://cpan/authors/id/F/FL/FLORA/${pname}-${version}.tar.gz";
-    sha256 = "1bl8z095y4js66pwxnm7s853pi9czala4sqc743fdlnk27kq94gz";
+    hash = "sha256-/5GE5xHT0uYGOQxroqj6LMU7CtKn2s6vMVoSXxL4iK4=";
   };
 };
 ```
@@ -78,7 +78,7 @@ buildPerlPackage rec {
 
   src = fetchurl {
     url = "mirror://cpan/authors/id/P/PM/PMQS/${pname}-${version}.tar.gz";
-    sha256 = "07xf50riarb60l1h6m2dqmql8q5dij619712fsgw7ach04d8g3z1";
+    hash = "sha256-4Y+HGgGQqcOfdiKcFIyMrWBEccVNVAMDBWZlFTMorh8=";
   };
 
   preConfigure = ''
@@ -96,7 +96,7 @@ ClassC3Componentised = buildPerlPackage rec {
   version = "1.0004";
   src = fetchurl {
     url = "mirror://cpan/authors/id/A/AS/ASH/${pname}-${version}.tar.gz";
-    sha256 = "0xql73jkcdbq4q9m0b0rnca6nrlvf5hyzy8is0crdk65bynvs8q1";
+    hash = "sha256-ASO9rV/FzJYZ0BH572Fxm2ZrFLMZLFATJng1NuU4FHc=";
   };
   propagatedBuildInputs = [
     ClassC3 ClassInspector TestException MROCompat
@@ -111,14 +111,14 @@ On Darwin, if a script has too many `-Idir` flags in its first line (its “sheb
 
 ImageExifTool = buildPerlPackage {
   pname = "Image-ExifTool";
-  version = "11.50";
+  version = "12.50";
 
   src = fetchurl {
-    url = "https://www.sno.phy.queensu.ca/~phil/exiftool/${pname}-${version}.tar.gz";
-    sha256 = "0d8v48y94z8maxkmw1rv7v9m0jg2dc8xbp581njb6yhr7abwqdv3";
+    url = "https://exiftool.org/${pname}-${version}.tar.gz";
+    hash = "sha256-vOhB/FwQMC8PPvdnjDvxRpU6jAZcC6GMQfc0AH4uwKg=";
   };
 
-  buildInputs = lib.optional stdenv.isDarwin shortenPerlShebang;
+  nativeBuildInputs = lib.optional stdenv.isDarwin shortenPerlShebang;
   postInstall = lib.optionalString stdenv.isDarwin ''
     shortenPerlShebang $out/bin/exiftool
   '';
@@ -146,7 +146,7 @@ $ nix-generate-from-cpan XML::Simple
     version = "2.22";
     src = fetchurl {
       url = "mirror://cpan/authors/id/G/GR/GRANTM/XML-Simple-2.22.tar.gz";
-      sha256 = "b9450ef22ea9644ae5d6ada086dc4300fa105be050a2030ebd4efd28c198eb49";
+      hash = "sha256-uUUO8i6pZErl1q2ghtxDAPoQW+BQogMOvU79KMGY60k=";
     };
     propagatedBuildInputs = [ XMLNamespaceSupport XMLSAX XMLSAXExpat ];
     meta = {
diff --git a/nixpkgs/doc/languages-frameworks/pkg-config.section.md b/nixpkgs/doc/languages-frameworks/pkg-config.section.md
new file mode 100644
index 000000000000..75cbdaeb6fe8
--- /dev/null
+++ b/nixpkgs/doc/languages-frameworks/pkg-config.section.md
@@ -0,0 +1,51 @@
+# pkg-config {#sec-pkg-config}
+
+*pkg-config* is a unified interface for declaring and querying built C/C++ libraries.
+
+Nixpkgs provides a couple of facilities for working with this tool.
+
+## Writing packages providing pkg-config modules {#pkg-config-writing-packages}
+
+Packages should set `meta.pkgConfigModules` with the list of package config modules they provide.
+They should also use `testers.testMetaPkgConfig` to check that the final built package matches that list.
+Additionally, the [`validatePkgConfig` setup hook](https://nixos.org/manual/nixpkgs/stable/#validatepkgconfig), will do extra checks on to-be-installed pkg-config modules.
+
+A good example of all these things is zlib:
+
+```
+{ pkg-config, testers, ... }:
+
+stdenv.mkDerivation (finalAttrs: {
+  ...
+
+  nativeBuildInputs = [ pkg-config validatePkgConfig ];
+
+  passthru.tests.pkg-config = testers.testMetaPkgConfig finalAttrs.finalPackage;
+
+  meta = {
+    ...
+    pkgConfigModules = [ "zlib" ];
+  };
+})
+```
+
+## Accessing packages via pkg-config module name {#sec-pkg-config-usage}
+
+### Within Nixpkgs {#sec-pkg-config-usage-internal}
+
+A [setup hook](#setup-hook-pkg-config) is bundled in the `pkg-config` package to bring a derivation's declared build inputs into the environment.
+This will populate environment variables like `PKG_CONFIG_PATH`, `PKG_CONFIG_PATH_FOR_BUILD`, and `PKG_CONFIG_PATH_HOST` based on:
+
+ - how `pkg-config` itself is depended upon
+
+ - how other dependencies are depended upon
+
+For more details see the section on [specifying dependencies in general](#ssec-stdenv-dependencies).
+
+Normal pkg-config commands to look up dependencies by name will then work with those environment variables defined by the hook.
+
+### Externally {#sec-pkg-config-usage-external}
+
+The `defaultPkgConfigPackages` package set is a set of aliases, named after the modules they provide.
+This is meant to be used by language-to-nix integrations.
+Hand-written packages should use the normal Nixpkgs attribute name instead.
diff --git a/nixpkgs/doc/languages-frameworks/python.section.md b/nixpkgs/doc/languages-frameworks/python.section.md
index 7fb8ba2e7c27..10f5e3938ce4 100644
--- a/nixpkgs/doc/languages-frameworks/python.section.md
+++ b/nixpkgs/doc/languages-frameworks/python.section.md
@@ -10,7 +10,7 @@ Several versions of the Python interpreter are available on Nix, as well as a
 high amount of packages. The attribute `python3` refers to the default
 interpreter, which is currently CPython 3.10. The attribute `python` refers to
 CPython 2.7 for backwards-compatibility. It is also possible to refer to
-specific versions, e.g. `python39` refers to CPython 3.9, and `pypy` refers to
+specific versions, e.g. `python311` refers to CPython 3.11, and `pypy` refers to
 the default PyPy interpreter.
 
 Python is used a lot, and in different ways. This affects also how it is
@@ -26,10 +26,10 @@ however, are in separate sets, with one set per interpreter version.
 The interpreters have several common attributes. One of these attributes is
 `pkgs`, which is a package set of Python libraries for this specific
 interpreter. E.g., the `toolz` package corresponding to the default interpreter
-is `python.pkgs.toolz`, and the CPython 3.9 version is `python39.pkgs.toolz`.
+is `python.pkgs.toolz`, and the CPython 3.11 version is `python311.pkgs.toolz`.
 The main package set contains aliases to these package sets, e.g.
-`pythonPackages` refers to `python.pkgs` and `python39Packages` to
-`python39.pkgs`.
+`pythonPackages` refers to `python.pkgs` and `python311Packages` to
+`python311.pkgs`.
 
 #### Installing Python and packages {#installing-python-and-packages}
 
@@ -54,11 +54,11 @@ with `python.buildEnv` or `python.withPackages` where the interpreter and other
 executables are wrapped to be able to find each other and all of the modules.
 
 In the following examples we will start by creating a simple, ad-hoc environment
-with a nix-shell that has `numpy` and `toolz` in Python 3.9; then we will create
+with a nix-shell that has `numpy` and `toolz` in Python 3.11; then we will create
 a re-usable environment in a single-file Python script; then we will create a
 full Python environment for development with this same environment.
 
-Philosphically, this should be familiar to users who are used to a `venv` style
+Philosophically, this should be familiar to users who are used to a `venv` style
 of development: individual projects create their own Python environments without
 impacting the global environment or each other.
 
@@ -70,10 +70,10 @@ temporary shell session with a Python and a *precise* list of packages (plus
 their runtime dependencies), with no other Python packages in the Python
 interpreter's scope.
 
-To create a Python 3.9 session with `numpy` and `toolz` available, run:
+To create a Python 3.11 session with `numpy` and `toolz` available, run:
 
 ```sh
-$ nix-shell -p 'python39.withPackages(ps: with ps; [ numpy toolz ])'
+$ nix-shell -p 'python311.withPackages(ps: with ps; [ numpy toolz ])'
 ```
 
 By default `nix-shell` will start a `bash` session with this interpreter in our
@@ -81,8 +81,7 @@ By default `nix-shell` will start a `bash` session with this interpreter in our
 
 ```Python console
 [nix-shell:~/src/nixpkgs]$ python3
-Python 3.9.12 (main, Mar 23 2022, 21:36:19)
-[GCC 11.3.0] on linux
+Python 3.11.3 (main, Apr  4 2023, 22:36:41) [GCC 12.2.0] on linux
 Type "help", "copyright", "credits" or "license" for more information.
 >>> import numpy; import toolz
 ```
@@ -102,16 +101,12 @@ will still get 1 wrapped Python interpreter. We can start the interpreter
 directly like so:
 
 ```sh
-$ nix-shell -p "python39.withPackages (ps: with ps; [ numpy toolz requests ])" --run python3
+$ nix-shell -p "python311.withPackages (ps: with ps; [ numpy toolz requests ])" --run python3
 this derivation will be built:
-  /nix/store/mpn7k6bkjl41fm51342rafaqfsl10qs4-python3-3.9.12-env.drv
-this path will be fetched (0.09 MiB download, 0.41 MiB unpacked):
-  /nix/store/5gaiacnzi096b6prc6aa1pwrhncmhc8b-python3.9-toolz-0.11.2
-copying path '/nix/store/5gaiacnzi096b6prc6aa1pwrhncmhc8b-python3.9-toolz-0.11.2' from 'https://cache.nixos.org'...
-building '/nix/store/mpn7k6bkjl41fm51342rafaqfsl10qs4-python3-3.9.12-env.drv'...
-created 279 symlinks in user environment
-Python 3.9.12 (main, Mar 23 2022, 21:36:19)
-[GCC 11.3.0] on linux
+  /nix/store/r19yf5qgfiakqlhkgjahbg3zg79549n4-python3-3.11.2-env.drv
+building '/nix/store/r19yf5qgfiakqlhkgjahbg3zg79549n4-python3-3.11.2-env.drv'...
+created 273 symlinks in user environment
+Python 3.11.2 (main, Feb  7 2023, 13:52:42) [GCC 12.2.0] on linux
 Type "help", "copyright", "credits" or "license" for more information.
 >>> import requests
 >>>
@@ -150,7 +145,7 @@ Executing this script requires a `python3` that has `numpy`. Using what we learn
 in the previous section, we could startup a shell and just run it like so:
 
 ```ShellSession
-$ nix-shell -p 'python39.withPackages(ps: with ps; [ numpy ])' --run 'python3 foo.py'
+$ nix-shell -p 'python311.withPackages (ps: with ps; [ numpy ])' --run 'python3 foo.py'
 The dot product of [1 2] and [3 4] is: 11
 ```
 
@@ -190,17 +185,17 @@ can make it fully reproducible by pinning the `nixpkgs` import:
 
 ```python
 #!/usr/bin/env nix-shell
-#!nix-shell -i python3 -p "python3.withPackages(ps: [ ps.numpy ])"
-#!nix-shell -I nixpkgs=https://github.com/NixOS/nixpkgs/archive/d373d80b1207d52621961b16aa4a3438e4f98167.tar.gz
+#!nix-shell -i python3 -p "python3.withPackages (ps: [ ps.numpy ])"
+#!nix-shell -I nixpkgs=https://github.com/NixOS/nixpkgs/archive/e51209796c4262bfb8908e3d6d72302fe4e96f5f.tar.gz
 import numpy as np
 a = np.array([1,2])
 b = np.array([3,4])
 print(f"The dot product of {a} and {b} is: {np.dot(a, b)}")
 ```
 
-This will execute with the exact same versions of Python 3.8, numpy, and system
+This will execute with the exact same versions of Python 3.10, numpy, and system
 dependencies a year from now as it does today, because it will always use
-exactly git commit `d373d80b1207d52621961b16aa4a3438e4f98167` of Nixpkgs for all
+exactly git commit `e51209796c4262bfb8908e3d6d72302fe4e96f5f` of Nixpkgs for all
 of the package versions.
 
 This is also a great way to ensure the script executes identically on different
@@ -213,12 +208,15 @@ create a single script with Python dependencies, but in the course of normal
 development we're usually working in an entire package repository.
 
 As explained in the Nix manual, `nix-shell` can also load an expression from a
-`.nix` file. Say we want to have Python 3.9, `numpy` and `toolz`, like before,
+`.nix` file. Say we want to have Python 3.11, `numpy` and `toolz`, like before,
 in an environment. We can add a `shell.nix` file describing our dependencies:
 
 ```nix
 with import <nixpkgs> {};
-(python39.withPackages (ps: [ps.numpy ps.toolz])).env
+(python311.withPackages (ps: with ps; [
+  numpy
+  toolz
+])).env
 ```
 
 And then at the command line, just typing `nix-shell` produces the same
@@ -232,7 +230,7 @@ What's happening here?
    imports the `<nixpkgs>` function, `{}` calls it and the `with` statement
    brings all attributes of `nixpkgs` in the local scope. These attributes form
    the main package set.
-2. Then we create a Python 3.9 environment with the `withPackages` function, as before.
+2. Then we create a Python 3.11 environment with the `withPackages` function, as before.
 3. The `withPackages` function expects us to provide a function as an argument
    that takes the set of all Python packages and returns a list of packages to
    include in the environment. Here, we select the packages `numpy` and `toolz`
@@ -243,7 +241,7 @@ To combine this with `mkShell` you can:
 ```nix
 with import <nixpkgs> {};
 let
-  pythonEnv = python39.withPackages (ps: [
+  pythonEnv = python311.withPackages (ps: [
     ps.numpy
     ps.toolz
   ]);
@@ -327,7 +325,7 @@ on NixOS.
 { # ...
 
   environment.systemPackages = with pkgs; [
-    (python38.withPackages(ps: with ps; [ numpy toolz ]))
+    (python310.withPackages(ps: with ps; [ numpy toolz ]))
   ];
 }
 ```
@@ -348,20 +346,32 @@ building Python libraries is `buildPythonPackage`. Let's see how we can build th
 `toolz` package.
 
 ```nix
-{ lib, buildPythonPackage, fetchPypi }:
+{ lib
+, buildPythonPackage
+, fetchPypi
+}:
 
 buildPythonPackage rec {
   pname = "toolz";
   version = "0.10.0";
+  format = "setuptools";
 
   src = fetchPypi {
     inherit pname version;
-    sha256 = "08fdd5ef7c96480ad11c12d472de21acd32359996f69a5259299b540feba4560";
+    hash = "sha256-CP3V73yWSArRHBLUct4hrNMjWZlvaaUlkpm1QP66RWA=";
   };
 
+  # has no tests
   doCheck = false;
 
+  pythonImportsCheck = [
+    "toolz.itertoolz"
+    "toolz.functoolz"
+    "toolz.dicttoolz"
+  ];
+
   meta = with lib; {
+    changelog = "https://github.com/pytoolz/toolz/releases/tag/${version}";
     homepage = "https://github.com/pytoolz/toolz";
     description = "List processing tools and functional utilities";
     license = licenses.bsd3;
@@ -376,13 +386,14 @@ arguments is the name of the package, which consists of a basename (generally
 following the name on PyPi) and a version. Another argument, `src` specifies the
 source, which in this case is fetched from PyPI using the helper function
 `fetchPypi`. The argument `doCheck` is used to set whether tests should be run
-when building the package. Furthermore, we specify some (optional) meta
+when building the package. Since there are no tests, we rely on `pythonImportsCheck`
+to test whether the package can be imported. Furthermore, we specify some meta
 information. The output of the function is a derivation.
 
 An expression for `toolz` can be found in the Nixpkgs repository. As explained
 in the introduction of this Python section, a derivation of `toolz` is available
-for each interpreter version, e.g. `python39.pkgs.toolz` refers to the `toolz`
-derivation corresponding to the CPython 3.9 interpreter.
+for each interpreter version, e.g. `python311.pkgs.toolz` refers to the `toolz`
+derivation corresponding to the CPython 3.11 interpreter.
 
 The above example works when you're directly working on
 `pkgs/top-level/python-packages.nix` in the Nixpkgs repository. Often though,
@@ -395,29 +406,35 @@ and adds it along with a `numpy` package to a Python environment.
 with import <nixpkgs> {};
 
 ( let
-    my_toolz = python39.pkgs.buildPythonPackage rec {
+    my_toolz = python311.pkgs.buildPythonPackage rec {
       pname = "toolz";
       version = "0.10.0";
+      format = "setuptools";
 
-      src = python39.pkgs.fetchPypi {
+      src = fetchPypi {
         inherit pname version;
-        sha256 = "08fdd5ef7c96480ad11c12d472de21acd32359996f69a5259299b540feba4560";
+        hash = "sha256-CP3V73yWSArRHBLUct4hrNMjWZlvaaUlkpm1QP66RWA=";
       };
 
+      # has no tests
       doCheck = false;
 
       meta = {
         homepage = "https://github.com/pytoolz/toolz/";
         description = "List processing tools and functional utilities";
+        # [...]
       };
     };
 
-  in python38.withPackages (ps: [ps.numpy my_toolz])
+  in python311.withPackages (ps: with ps; [
+    numpy
+    my_toolz
+  ])
 ).env
 ```
 
 Executing `nix-shell` will result in an environment in which you can use
-Python 3.9 and the `toolz` package. As you can see we had to explicitly mention
+Python 3.11 and the `toolz` package. As you can see we had to explicitly mention
 for which Python version we want to build a package.
 
 So, what did we do here? Well, we took the Nix expression that we used earlier
@@ -436,27 +453,45 @@ arguments `buildInputs` and `propagatedBuildInputs` to specify dependencies. If
 something is exclusively a build-time dependency, then the dependency should be
 included in `buildInputs`, but if it is (also) a runtime dependency, then it
 should be added to `propagatedBuildInputs`. Test dependencies are considered
-build-time dependencies and passed to `checkInputs`.
+build-time dependencies and passed to `nativeCheckInputs`.
 
 The following example shows which arguments are given to `buildPythonPackage` in
 order to build [`datashape`](https://github.com/blaze/datashape).
 
 ```nix
-{ lib, buildPythonPackage, fetchPypi, numpy, multipledispatch, python-dateutil, pytest }:
+{ lib
+, buildPythonPackage
+, fetchPypi
+
+# dependencies
+, numpy, multipledispatch, python-dateutil
+
+# tests
+, pytest
+}:
 
 buildPythonPackage rec {
   pname = "datashape";
   version = "0.4.7";
+  format = "setuptools";
 
   src = fetchPypi {
     inherit pname version;
-    sha256 = "14b2ef766d4c9652ab813182e866f493475e65e558bed0822e38bf07bba1a278";
+    hash = "sha256-FLLvdm1MllKrgTGC6Gb0k0deZeVYvtCCLji/B7uhong=";
   };
 
-  checkInputs = [ pytest ];
-  propagatedBuildInputs = [ numpy multipledispatch python-dateutil ];
+  propagatedBuildInputs = [
+    multipledispatch
+    numpy
+    python-dateutil
+  ];
+
+  nativeCheckInputs = [
+    pytest
+  ];
 
   meta = with lib; {
+    changelog = "https://github.com/blaze/datashape/releases/tag/${version}";
     homepage = "https://github.com/ContinuumIO/datashape";
     description = "A data description language";
     license = licenses.bsd2;
@@ -466,9 +501,9 @@ buildPythonPackage rec {
 ```
 
 We can see several runtime dependencies, `numpy`, `multipledispatch`, and
-`python-dateutil`. Furthermore, we have one `checkInputs`, i.e. `pytest`. `pytest` is a
-test runner and is only used during the `checkPhase` and is therefore not added
-to `propagatedBuildInputs`.
+`python-dateutil`. Furthermore, we have `nativeCheckInputs` with `pytest`.
+`pytest` is a test runner and is only used during the `checkPhase` and is
+therefore not added to `propagatedBuildInputs`.
 
 In the previous case we had only dependencies on other Python packages to consider.
 Occasionally you have also system libraries to consider. E.g., `lxml` provides
@@ -476,20 +511,29 @@ Python bindings to `libxml2` and `libxslt`. These libraries are only required
 when building the bindings and are therefore added as `buildInputs`.
 
 ```nix
-{ lib, pkgs, buildPythonPackage, fetchPypi }:
+{ lib
+, pkgs
+, buildPythonPackage
+, fetchPypi
+}:
 
 buildPythonPackage rec {
   pname = "lxml";
   version = "3.4.4";
+  format = "setuptools";
 
   src = fetchPypi {
     inherit pname version;
-    sha256 = "16a0fa97hym9ysdk3rmqz32xdjqmy4w34ld3rm3jf5viqjx65lxk";
+    hash = "sha256-s9NiusRxFydHzaNRMjjxFcvWxfi45jGb9ql6eJJyQJk=";
   };
 
-  buildInputs = [ pkgs.libxml2 pkgs.libxslt ];
+  buildInputs = [
+    pkgs.libxml2
+    pkgs.libxslt
+  ];
 
   meta = with lib; {
+    changelog = "https://github.com/lxml/lxml/releases/tag/lxml-${version}";
     description = "Pythonic binding for the libxml2 and libxslt libraries";
     homepage = "https://lxml.de";
     license = licenses.bsd3;
@@ -509,30 +553,47 @@ The bindings don't expect to find each of them in a different folder, and
 therefore we have to set `LDFLAGS` and `CFLAGS`.
 
 ```nix
-{ lib, pkgs, buildPythonPackage, fetchPypi, numpy, scipy }:
+{ lib
+, pkgs
+, buildPythonPackage
+, fetchPypi
+
+# dependencies
+, numpy
+, scipy
+}:
 
 buildPythonPackage rec {
   pname = "pyFFTW";
   version = "0.9.2";
+  format = "setuptools";
 
   src = fetchPypi {
     inherit pname version;
-    sha256 = "f6bbb6afa93085409ab24885a1a3cdb8909f095a142f4d49e346f2bd1b789074";
+    hash = "sha256-9ru2r6kwhUCaskiFoaPNuJCfCVoUL01J40byvRt4kHQ=";
   };
 
-  buildInputs = [ pkgs.fftw pkgs.fftwFloat pkgs.fftwLongDouble];
-
-  propagatedBuildInputs = [ numpy scipy ];
+  buildInputs = [
+    pkgs.fftw
+    pkgs.fftwFloat
+    pkgs.fftwLongDouble
+  ];
 
-  # Tests cannot import pyfftw. pyfftw works fine though.
-  doCheck = false;
+  propagatedBuildInputs = [
+    numpy
+    scipy
+  ];
 
   preConfigure = ''
     export LDFLAGS="-L${pkgs.fftw.dev}/lib -L${pkgs.fftwFloat.out}/lib -L${pkgs.fftwLongDouble.out}/lib"
     export CFLAGS="-I${pkgs.fftw.dev}/include -I${pkgs.fftwFloat.dev}/include -I${pkgs.fftwLongDouble.dev}/include"
   '';
 
+  # Tests cannot import pyfftw. pyfftw works fine though.
+  doCheck = false;
+
   meta = with lib; {
+    changelog = "https://github.com/pyFFTW/pyFFTW/releases/tag/v${version}";
     description = "A pythonic wrapper around FFTW, the FFT library, presenting a unified interface for all the supported transforms";
     homepage = "http://hgomersall.github.com/pyFFTW";
     license = with licenses; [ bsd2 bsd3 ];
@@ -569,8 +630,14 @@ Pytest is the most common test runner for python repositories. A trivial
 test run would be:
 
 ```
-  checkInputs = [ pytest ];
-  checkPhase = "pytest";
+  nativeCheckInputs = [ pytest ];
+  checkPhase = ''
+    runHook preCheck
+
+    pytest
+
+    runHook postCheck
+  '';
 ```
 
 However, many repositories' test suites do not translate well to nix's build
@@ -579,10 +646,14 @@ sandbox, and will generally need many tests to be disabled.
 To filter tests using pytest, one can do the following:
 
 ```
-  checkInputs = [ pytest ];
+  nativeCheckInputs = [ pytest ];
   # avoid tests which need additional data or touch network
   checkPhase = ''
-    pytest tests/ --ignore=tests/integration -k 'not download and not update'
+    runHook preCheck
+
+    pytest tests/ --ignore=tests/integration -k 'not download and not update' --ignore=tests/test_failing.py
+
+    runHook postCheck
   '';
 ```
 
@@ -605,13 +676,18 @@ been removed, in this case, it's recommended to use `pytestCheckHook`.
 `test` command for a `checkPhase` which runs `pytest`. This is also beneficial
 when a package may need many items disabled to run the test suite.
 
-Using the example above, the analagous `pytestCheckHook` usage would be:
+Using the example above, the analogous `pytestCheckHook` usage would be:
 
 ```
-  checkInputs = [ pytestCheckHook ];
+  nativeCheckInputs = [
+    pytestCheckHook
+  ];
 
   # requires additional data
-  pytestFlagsArray = [ "tests/" "--ignore=tests/integration" ];
+  pytestFlagsArray = [
+    "tests/"
+    "--ignore=tests/integration"
+  ];
 
   disabledTests = [
     # touches network
@@ -624,7 +700,7 @@ Using the example above, the analagous `pytestCheckHook` usage would be:
   ];
 ```
 
-This is expecially useful when tests need to be conditionally disabled,
+This is especially useful when tests need to be conditionally disabled,
 for example:
 
 ```
@@ -653,7 +729,10 @@ To help ensure the package still works, `pythonImportsCheck` can attempt to impo
 the listed modules.
 
 ```
-  pythonImportsCheck = [ "requests" "urllib" ];
+  pythonImportsCheck = [
+    "requests"
+    "urllib"
+  ];
 ```
 
 roughly translates to:
@@ -694,9 +773,16 @@ pkg3>=1.0,<=2.0
 we can do:
 
 ```
-  nativeBuildInputs = [ pythonRelaxDepsHook ];
-  pythonRelaxDeps = [ "pkg1" "pkg3" ];
-  pythonRemoveDeps = [ "pkg2" ];
+  nativeBuildInputs = [
+    pythonRelaxDepsHook
+  ];
+  pythonRelaxDeps = [
+    "pkg1"
+    "pkg3"
+  ];
+  pythonRemoveDeps = [
+    "pkg2"
+  ];
 ```
 
 which would result in the following `requirements.txt` file:
@@ -734,6 +820,67 @@ work in any of the formats supported by `buildPythonPackage` currently,
 with the exception of `other` (see `format` in
 [`buildPythonPackage` parameters](#buildpythonpackage-parameters) for more details).
 
+#### Using unittestCheckHook {#using-unittestcheckhook}
+
+`unittestCheckHook` is a hook which will substitute the setuptools `test` command for a `checkPhase` which runs `python -m unittest discover`:
+
+```
+  nativeCheckInputs = [
+    unittestCheckHook
+  ];
+
+  unittestFlagsArray = [
+    "-s" "tests" "-v"
+  ];
+```
+
+#### Using sphinxHook {#using-sphinxhook}
+
+The `sphinxHook` is a helpful tool to build documentation and manpages
+using the popular Sphinx documentation generator.
+It is setup to automatically find common documentation source paths and
+render them using the default `html` style.
+
+```
+  outputs = [
+    "out"
+    "doc"
+  ];
+
+  nativeBuildInputs = [
+    sphinxHook
+  ];
+```
+
+The hook will automatically build and install the artifact into the
+`doc` output, if it exists. It also provides an automatic diversion
+for the artifacts of the `man` builder into the `man` target.
+
+```
+  outputs = [
+    "out"
+    "doc"
+    "man"
+  ];
+
+  # Use multiple builders
+  sphinxBuilders = [
+    "singlehtml"
+    "man"
+  ];
+```
+
+Overwrite `sphinxRoot` when the hook is unable to find your
+documentation source root.
+
+```
+  # Configure sphinxRoot for uncommon paths
+  sphinxRoot = "weird/docs/path";
+```
+
+The hook is also available to packages outside the python ecosystem by
+referencing it using `sphinxHook` from top-level.
+
 ### Develop local package {#develop-local-package}
 
 As a Python developer you're likely aware of [development mode](http://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode)
@@ -749,7 +896,7 @@ If we create a `shell.nix` file which calls `buildPythonPackage`, and if `src`
 is a local source, and if the local source has a `setup.py`, then development
 mode is activated.
 
-In the following example, we create a simple environment that has a Python 3.9
+In the following example, we create a simple environment that has a Python 3.11
 version of our package in it, as well as its dependencies and other packages we
 like to have in the environment, all specified with `propagatedBuildInputs`.
 Indeed, we can just add any package we like to have in our environment to
@@ -757,12 +904,16 @@ Indeed, we can just add any package we like to have in our environment to
 
 ```nix
 with import <nixpkgs> {};
-with python39Packages;
+with python311Packages;
 
 buildPythonPackage rec {
   name = "mypackage";
   src = ./path/to/package/source;
-  propagatedBuildInputs = [ pytest numpy pkgs.libsndfile ];
+  propagatedBuildInputs = [
+    pytest
+    numpy
+    pkgs.libsndfile
+  ];
 }
 ```
 
@@ -790,18 +941,22 @@ Let's split the package definition from the environment definition.
 We first create a function that builds `toolz` in `~/path/to/toolz/release.nix`
 
 ```nix
-{ lib, buildPythonPackage }:
+{ lib
+, buildPythonPackage
+}:
 
 buildPythonPackage rec {
   pname = "toolz";
   version = "0.10.0";
+  format = "setuptools";
 
   src = fetchPypi {
     inherit pname version;
-    sha256 = "08fdd5ef7c96480ad11c12d472de21acd32359996f69a5259299b540feba4560";
+    hash = "sha256-CP3V73yWSArRHBLUct4hrNMjWZlvaaUlkpm1QP66RWA=";
   };
 
   meta = with lib; {
+    changelog = "https://github.com/pytoolz/toolz/releases/tag/${version}";
     homepage = "https://github.com/pytoolz/toolz/";
     description = "List processing tools and functional utilities";
     license = licenses.bsd3;
@@ -818,9 +973,13 @@ with import <nixpkgs> {};
 
 ( let
     toolz = callPackage /path/to/toolz/release.nix {
-      buildPythonPackage = python38Packages.buildPythonPackage;
+      buildPythonPackage = python310
+Packages.buildPythonPackage;
     };
-  in python38.withPackages (ps: [ ps.numpy toolz ])
+  in python310.withPackages (ps: [
+    ps.numpy
+    toolz
+  ])
 ).env
 ```
 
@@ -828,17 +987,17 @@ Important to remember is that the Python version for which the package is made
 depends on the `python` derivation that is passed to `buildPythonPackage`. Nix
 tries to automatically pass arguments when possible, which is why generally you
 don't explicitly define which `python` derivation should be used. In the above
-example we use `buildPythonPackage` that is part of the set `python38Packages`,
-and in this case the `python38` interpreter is automatically used.
+example we use `buildPythonPackage` that is part of the set `python3Packages`,
+and in this case the `python3` interpreter is automatically used.
 
 ## Reference {#reference}
 
 ### Interpreters {#interpreters}
 
-Versions 2.7, 3.7, 3.8, 3.9 and 3.10 of the CPython interpreter are available
-as respectively `python27`, `python37`, `python38`, `python39` and `python310`.
+Versions 2.7, 3.8, 3.9, 3.10 and 3.11 of the CPython interpreter are available
+as respectively `python27`, python38`, `python39`, `python310` and `python311`.
 The aliases `python2` and `python3` correspond to respectively `python27` and
-`python39`. The attribute `python` maps to `python2`. The PyPy interpreters
+`python310`. The attribute `python` maps to `python2`. The PyPy interpreters
 compatible with Python 2.7 and 3 are available as `pypy27` and `pypy3`, with
 aliases `pypy2` mapping to `pypy27` and `pypy` mapping to `pypy2`. The Nix
 expressions for the interpreters can be found in
@@ -861,7 +1020,7 @@ Each interpreter has the following attributes:
 - `buildEnv`. Function to build python interpreter environments with extra packages bundled together. See section *python.buildEnv function* for usage and documentation.
 - `withPackages`. Simpler interface to `buildEnv`. See section *python.withPackages function* for usage and documentation.
 - `sitePackages`. Alias for `lib/${libPrefix}/site-packages`.
-- `executable`. Name of the interpreter executable, e.g. `python3.8`.
+- `executable`. Name of the interpreter executable, e.g. `python3.10`.
 - `pkgs`. Set of Python packages for that specific interpreter. The package set can be modified by overriding the interpreter and passing `packageOverrides`.
 
 ### Optimizations {#optimizations}
@@ -901,7 +1060,7 @@ attribute set is created for each available Python interpreter. The available
 sets are
 
 * `pkgs.python27Packages`
-* `pkgs.python37Packages`
+* `pkgs.python3Packages`
 * `pkgs.python38Packages`
 * `pkgs.python39Packages`
 * `pkgs.python310Packages`
@@ -911,7 +1070,7 @@ sets are
 and the aliases
 
 * `pkgs.python2Packages` pointing to `pkgs.python27Packages`
-* `pkgs.python3Packages` pointing to `pkgs.python39Packages`
+* `pkgs.python3Packages` pointing to `pkgs.python310Packages`
 * `pkgs.pythonPackages` pointing to `pkgs.python2Packages`
 
 #### `buildPythonPackage` function {#buildpythonpackage-function}
@@ -923,15 +1082,32 @@ using setup hooks.
 The following is an example:
 
 ```nix
-{ lib, buildPythonPackage, fetchPypi, hypothesis, setuptools-scm, attrs, py, setuptools, six, pluggy }:
+{ lib
+, buildPythonPackage
+, fetchPypi
+
+# build-system
+, setuptools-scm
+
+# dependencies
+, attrs
+, pluggy
+, py
+, setuptools
+, six
+
+# tests
+, hypothesis
+ }:
 
 buildPythonPackage rec {
   pname = "pytest";
   version = "3.3.1";
+  format = "setuptools";
 
   src = fetchPypi {
     inherit pname version;
-    sha256 = "cf8436dc59d8695346fcd3ab296de46425ecab00d64096cebe79fb51ecb2eb93";
+    hash = "sha256-z4Q23FnYaVNG/NOrKW3kZCXsqwDWQJbOvnn7Ueyy65M=";
   };
 
   postPatch = ''
@@ -939,20 +1115,35 @@ buildPythonPackage rec {
     rm testing/test_argcomplete.py
   '';
 
-  checkInputs = [ hypothesis ];
-  nativeBuildInputs = [ setuptools-scm ];
-  propagatedBuildInputs = [ attrs py setuptools six pluggy ];
+  nativeBuildInputs = [
+    setuptools-scm
+  ];
+
+  propagatedBuildInputs = [
+    attrs
+    py
+    setuptools
+    six
+    pluggy
+  ];
+
+  nativeCheckInputs = [
+    hypothesis
+  ];
 
   meta = with lib; {
-    maintainers = with maintainers; [ domenkozar lovek323 madjar lsix ];
+    changelog = "https://github.com/pytest-dev/pytest/releases/tag/${version}";
     description = "Framework for writing tests";
+    homepage = "https://github.com/pytest-dev/pytest";
+    license = licenses.mit;
+    maintainers = with maintainers; [ domenkozar lovek323 madjar lsix ];
   };
 }
 ```
 
 The `buildPythonPackage` mainly does four things:
 
-* In the `buildPhase`, it calls `${python.interpreter} setup.py bdist_wheel` to
+* In the `buildPhase`, it calls `${python.pythonForBuild.interpreter} setup.py bdist_wheel` to
   build a wheel binary zipfile.
 * In the `installPhase`, it installs the wheel file using `pip install *.whl`.
 * In the `postFixup` phase, the `wrapPythonPrograms` bash function is called to
@@ -961,7 +1152,7 @@ The `buildPythonPackage` mainly does four things:
 * In the `installCheck` phase, `${python.interpreter} setup.py test` is run.
 
 By default tests are run because `doCheck = true`. Test dependencies, like
-e.g. the test runner, should be added to `checkInputs`.
+e.g. the test runner, should be added to `nativeCheckInputs`.
 
 By default `meta.platforms` is set to the same value
 as the interpreter unless overridden otherwise.
@@ -1015,7 +1206,7 @@ because their behaviour is different:
 * `buildInputs ? []`: Build and/or run-time dependencies that need to be
   compiled for the host machine. Typically non-Python libraries which are being
   linked.
-* `checkInputs ? []`: Dependencies needed for running the `checkPhase`. These
+* `nativeCheckInputs ? []`: Dependencies needed for running the `checkPhase`. These
   are added to `nativeBuildInputs` when `doCheck = true`. Items listed in
   `tests_require` go here.
 * `propagatedBuildInputs ? []`: Aside from propagating dependencies,
@@ -1038,19 +1229,19 @@ with import <nixpkgs> {};
     packageOverrides = self: super: {
       pandas = super.pandas.overridePythonAttrs(old: rec {
         version = "0.19.1";
-        src =  super.fetchPypi {
+        src =  fetchPypi {
           pname = "pandas";
           inherit version;
-          sha256 = "08blshqj9zj1wyjhhw3kl2vas75vhhicvv72flvf1z3jvapgw295";
+          hash = "sha256-JQn+rtpy/OA2deLszSKEuxyttqBzcAil50H+JDHUdCE=";
         };
       });
     };
   in pkgs.python3.override {inherit packageOverrides; self = python;};
 
-in python.withPackages(ps: [ps.blaze])).env
+in python.withPackages(ps: [ ps.blaze ])).env
 ```
 
-#### Optional extra dependencies
+#### Optional extra dependencies {#python-optional-dependencies}
 
 Some packages define optional dependencies for additional features. With
 `setuptools` this is called `extras_require` and `flit` calls it
@@ -1093,18 +1284,25 @@ called with `callPackage` and passed `python` or `pythonPackages` (possibly
 specifying an interpreter version), like this:
 
 ```nix
-{ lib, python3 }:
+{ lib
+, python3
+, fetchPypi
+}:
 
 python3.pkgs.buildPythonApplication rec {
   pname = "luigi";
   version = "2.7.9";
+  format = "setuptools";
 
-  src = python3.pkgs.fetchPypi {
+  src = fetchPypi {
     inherit pname version;
-    sha256 = "035w8gqql36zlan0xjrzz9j4lh9hs0qrsgnbyw07qs7lnkvbdv9x";
+    hash  = "sha256-Pe229rT0aHwA98s+nTHQMEFKZPo/yw6sot8MivFDvAw=";
   };
 
-  propagatedBuildInputs = with python3.pkgs; [ tornado python-daemon ];
+  propagatedBuildInputs = with python3.pkgs; [
+    tornado
+    python-daemon
+  ];
 
   meta = with lib; {
     ...
@@ -1186,7 +1384,10 @@ running `nix-shell` with the following `shell.nix`
 with import <nixpkgs> {};
 
 (python3.buildEnv.override {
-  extraLibs = with python3Packages; [ numpy requests ];
+  extraLibs = with python3Packages; [
+    numpy
+    requests
+  ];
 }).env
 ```
 
@@ -1212,7 +1413,7 @@ example for the Pyramid Web Framework environment can be written like this:
 ```nix
 with import <nixpkgs> {};
 
-python.withPackages (ps: [ps.pyramid])
+python.withPackages (ps: [ ps.pyramid ])
 ```
 
 `withPackages` passes the correct package set for the specific interpreter
@@ -1222,7 +1423,7 @@ version as an argument to the function. In the above example, `ps` equals
 ```nix
 with import <nixpkgs> {};
 
-python3.withPackages (ps: [ps.pyramid])
+python3.withPackages (ps: [ ps.pyramid ])
 ```
 
 Now, `ps` is set to `python3Packages`, matching the version of the interpreter.
@@ -1234,7 +1435,10 @@ thus be also written like this:
 ```nix
 with import <nixpkgs> {};
 
-(python38.withPackages (ps: [ps.numpy ps.requests])).env
+(python3.withPackages (ps: with ps; [
+  numpy
+  requests
+])).env
 ```
 
 In contrast to `python.buildEnv`, `python.withPackages` does not support the
@@ -1260,16 +1464,18 @@ are used in `buildPythonPackage`.
 - `pytestCheckHook` to run tests with `pytest`. See [example usage](#using-pytestcheckhook).
 - `pythonCatchConflictsHook` to check whether a Python package is not already existing.
 - `pythonImportsCheckHook` to check whether importing the listed modules works.
+- `pythonRelaxDepsHook` will relax Python dependencies restrictions for the package.
+  See [example usage](#using-pythonrelaxdepshook).
 - `pythonRemoveBinBytecode` to remove bytecode from the `/bin` folder.
 - `setuptoolsBuildHook` to build a wheel using `setuptools`.
 - `setuptoolsCheckHook` to run tests with `python setup.py test`.
+- `sphinxHook` to build documentation and manpages using Sphinx.
 - `venvShellHook` to source a Python 3 `venv` at the `venvDir` location. A
   `venv` is created if it does not yet exist. `postVenvCreation` can be used to
   to run commands only after venv is first created.
 - `wheelUnpackHook` to move a wheel to the correct folder so it can be installed
   with the `pipInstallHook`.
-- `pythonRelaxDepsHook` will relax Python dependencies restrictions for the package.
-  See [example usage](#using-pythonrelaxdepshook).
+- `unittestCheckHook` will run tests with `python -m unittest discover`. See [example usage](#using-unittestcheckhook).
 
 ### Development mode {#development-mode}
 
@@ -1308,10 +1514,6 @@ Note: There is a boolean value `lib.inNixShell` set to `true` if nix-shell is in
 Packages inside nixpkgs are written by hand. However many tools exist in
 community to help save time. No tool is preferred at the moment.
 
-- [pypi2nix](https://github.com/nix-community/pypi2nix): Generate Nix
-  expressions for your Python project. Note that [sharing derivations from
-  pypi2nix with nixpkgs is possible but not
-  encouraged](https://github.com/nix-community/pypi2nix/issues/222#issuecomment-443497376).
 - [nixpkgs-pytools](https://github.com/nix-community/nixpkgs-pytools)
 - [poetry2nix](https://github.com/nix-community/poetry2nix)
 
@@ -1324,7 +1526,7 @@ has security implications and is relevant for those using Python in a
 
 When the environment variable `DETERMINISTIC_BUILD` is set, all bytecode will
 have timestamp 1. The `buildPythonPackage` function sets `DETERMINISTIC_BUILD=1`
-and [PYTHONHASHSEED=0](https://docs.python.org/3.8/using/cmdline.html#envvar-PYTHONHASHSEED).
+and [PYTHONHASHSEED=0](https://docs.python.org/3.11/using/cmdline.html#envvar-PYTHONHASHSEED).
 Both are also exported in `nix-shell`.
 
 ### Automatic tests {#automatic-tests}
@@ -1339,18 +1541,27 @@ example of such a situation is when `py.test` is used.
 #### Common issues {#common-issues}
 
 * Non-working tests can often be deselected. By default `buildPythonPackage`
-  runs `python setup.py test`. Most Python modules follows the standard test
-  protocol where the pytest runner can be used instead. `py.test` supports a
-  `-k` parameter to ignore test methods or classes:
+  runs `python setup.py test`. which is deprecated. Most Python modules however
+  do follow the standard test protocol where the pytest runner can be used
+  instead. `pytest` supports the `-k` and `--ignore` parameters to ignore test
+  methods or classes as well as whole files. For `pytestCheckHook` these are
+  conveniently exposed as `disabledTests` and `disabledTestPaths` respectively.
 
   ```nix
   buildPythonPackage {
     # ...
-    # assumes the tests are located in tests
-    checkInputs = [ pytest ];
-    checkPhase = ''
-      py.test -k 'not function_name and not other_function' tests
-    '';
+    nativeCheckInputs = [
+      pytestCheckHook
+    ];
+
+    disabledTests = [
+      "function_name"
+      "other_function"
+    ];
+
+    disabledTestPaths = [
+      "this/file.py"
+    ];
   }
   ```
 
@@ -1378,9 +1589,13 @@ with import <nixpkgs> {};
     packageOverrides = self: super: {
       pandas = super.pandas.overridePythonAttrs(old: {name="foo";});
     };
-  in pkgs.python38.override {inherit packageOverrides;};
+  in pkgs.python310.override {
+    inherit packageOverrides;
+  };
 
-in python.withPackages(ps: [ps.pandas])).env
+in python.withPackages (ps: [
+  ps.pandas
+])).env
 ```
 
 Using `nix-build` on this expression will build an environment that contains the
@@ -1400,7 +1615,11 @@ with import <nixpkgs> {};
     packageOverrides = self: super: {
       scipy = super.scipy_0_17;
     };
-  in (pkgs.python38.override {inherit packageOverrides;}).withPackages (ps: [ps.blaze])
+  in (pkgs.python310.override {
+    inherit packageOverrides;
+  }).withPackages (ps: [
+    ps.blaze
+  ])
 ).env
 ```
 
@@ -1414,11 +1633,11 @@ If you want the whole of Nixpkgs to use your modifications, then you can use
 let
   pkgs = import <nixpkgs> {};
   newpkgs = import pkgs.path { overlays = [ (self: super: {
-    python38 = let
+    python310 = let
       packageOverrides = python-self: python-super: {
         numpy = python-super.numpy_1_18;
       };
-    in super.python38.override {inherit packageOverrides;};
+    in super.python310.override {inherit packageOverrides;};
   } ) ]; };
 in newpkgs.inkscape
 ```
@@ -1473,7 +1692,7 @@ of such package using the feature is `pkgs/tools/X11/xpra/default.nix`.
 As workaround install it as an extra `preInstall` step:
 
 ```shell
-${python.interpreter} setup.py install_data --install-dir=$out --root=$out
+${python.pythonForBuild.interpreter} setup.py install_data --install-dir=$out --root=$out
 sed -i '/ = data\_files/d' setup.py
 ```
 
@@ -1611,13 +1830,13 @@ If you need to change a package's attribute(s) from `configuration.nix` you coul
 
 ```nix
   nixpkgs.config.packageOverrides = super: {
-    python = super.python.override {
+    python3 = super.python3.override {
       packageOverrides = python-self: python-super: {
-        twisted = python-super.twisted.overrideAttrs (oldAttrs: {
+        twisted = python-super.twisted.overridePythonAttrs (oldAttrs: {
           src = super.fetchPypi {
-            pname = "twisted";
+            pname = "Twisted";
             version = "19.10.0";
-            sha256 = "7394ba7f272ae722a74f3d969dcf599bc4ef093bc392038748a490f1724a515d";
+            hash = "sha256-c5S6fycq5yKnTz2Wnc9Zm8TvCTvDkgOHSKSQ8XJKUV0=";
             extension = "tar.bz2";
           };
         });
@@ -1653,9 +1872,9 @@ self: super: {
     packageOverrides = python-self: python-super: {
       twisted = python-super.twisted.overrideAttrs (oldAttrs: {
         src = super.fetchPypi {
-          pname = "twisted";
+          pname = "Twisted";
           version = "19.10.0";
-          sha256 = "7394ba7f272ae722a74f3d969dcf599bc4ef093bc392038748a490f1724a515d";
+          hash = "sha256-c5S6fycq5yKnTz2Wnc9Zm8TvCTvDkgOHSKSQ8XJKUV0=";
           extension = "tar.bz2";
         };
       });
@@ -1695,7 +1914,7 @@ In a `setup.py` or `setup.cfg` it is common to declare dependencies:
 
 * `setup_requires` corresponds to `nativeBuildInputs`
 * `install_requires` corresponds to `propagatedBuildInputs`
-* `tests_require` corresponds to `checkInputs`
+* `tests_require` corresponds to `nativeCheckInputs`
 
 ## Contributing {#contributing}
 
@@ -1721,17 +1940,21 @@ The following rules are desired to be respected:
   that characters should be converted to lowercase and `.` and `_` should be
   replaced by a single `-` (foo-bar-baz instead of Foo__Bar.baz).
   If necessary, `pname` has to be given a different value within `fetchPypi`.
+* Packages from sources such as GitHub and GitLab that do not exist on PyPI
+  should not use a name that is already used on PyPI. When possible, they should
+  use the package repository name prefixed with the owner (e.g. organization) name
+  and using a `-` as delimiter.
 * Attribute names in `python-packages.nix` should be sorted alphanumerically to
   avoid merge conflicts and ease locating attributes.
 
-## Package set maintenance
+## Package set maintenance {#python-package-set-maintenance}
 
 The whole Python package set has a lot of packages that do not see regular
 updates, because they either are a very fragile component in the Python
 ecosystem, like for example the `hypothesis` package, or packages that have
 no maintainer, so maintenance falls back to the package set maintainers.
 
-### Updating packages in bulk
+### Updating packages in bulk {#python-package-bulk-updates}
 
 There is a tool to update alot of python libraries in bulk, it exists at
 `maintainers/scripts/update-python-libraries` with this repository.
@@ -1744,6 +1967,11 @@ hosted on GitHub, exporting a `GITHUB_API_TOKEN` is highly recommended.
 Updating packages in bulk leads to lots of breakages, which is why a
 stabilization period on the `python-unstable` branch is required.
 
+If a package is fragile and often breaks during these bulks updates, it
+may be reasonable to set `passthru.skipBulkUpdate = true` in the
+derivation. This decision should not be made on a whim and should
+always be supported by a qualifying comment.
+
 Once the branch is sufficiently stable it should normally be merged
 into the `staging` branch.
 
@@ -1754,7 +1982,7 @@ would be:
 $ maintainers/scripts/update-python-libraries --target minor --commit --use-pkgs-prefix pkgs/development/python-modules/**/default.nix
 ```
 
-## CPython Update Schedule
+## CPython Update Schedule {#python-cpython-update-schedule}
 
 With [PEP 602](https://www.python.org/dev/peps/pep-0602/), CPython now
 follows a yearly release cadence. In nixpkgs, all supported interpreters
diff --git a/nixpkgs/doc/languages-frameworks/qt.section.md b/nixpkgs/doc/languages-frameworks/qt.section.md
index 986deeb0d4b2..e09194e391e1 100644
--- a/nixpkgs/doc/languages-frameworks/qt.section.md
+++ b/nixpkgs/doc/languages-frameworks/qt.section.md
@@ -2,14 +2,11 @@
 
 Writing Nix expressions for Qt libraries and applications is largely similar as for other C++ software.
 This section assumes some knowledge of the latter.
-There are two problems that the Nixpkgs Qt infrastructure addresses,
-which are not shared by other C++ software:
 
-1.  There are usually multiple supported versions of Qt in Nixpkgs.
-    All of a package's dependencies must be built with the same version of Qt.
-    This is similar to the version constraints imposed on interpreted languages like Python.
-2.  Qt makes extensive use of runtime dependency detection.
-    Runtime dependencies are made into build dependencies through wrappers.
+The major caveat with Qt applications is that Qt uses a plugin system to load additional modules at runtime,
+from a list of well-known locations. In Nixpkgs, we patch QtCore to instead use an environment variable,
+and wrap Qt applications to set it to the right paths. This effectively makes the runtime dependencies
+pure and explicit at build-time, at the cost of introducing an extra indirection.
 
 ## Nix expression for a Qt package (default.nix) {#qt-default-nix}
 
@@ -95,66 +92,3 @@ stdenv.mkDerivation {
 This means that scripts won't be automatically wrapped so you'll need to manually wrap them as previously mentioned.
 An example of when you'd always need to do this is with Python applications that use PyQt.
 :::
-
-## Adding a library to Nixpkgs {#adding-a-library-to-nixpkgs}
-
-Add Qt libraries to `qt5-packages.nix` to make them available for every
-supported Qt version.
-
-### Example adding a Qt library {#qt-library-all-packages-nix}
-
-The following represents the contents of `qt5-packages.nix`.
-
-```nix
-{
-  # ...
-
-  mylib = callPackage ../path/to/mylib {};
-
-  # ...
-}
-```
-
-Libraries are built with every available version of Qt.
-Use the `meta.broken` attribute to disable the package for unsupported Qt versions:
-
-```nix
-{ stdenv, lib, qtbase }:
-
-stdenv.mkDerivation {
-  # ...
-  # Disable this library with Qt < 5.9.0
-  meta.broken = lib.versionOlder qtbase.version "5.9.0";
-}
-```
-
-## Adding an application to Nixpkgs {#adding-an-application-to-nixpkgs}
-
-Add Qt applications to `qt5-packages.nix`. Add an alias to `all-packages.nix`
-to select the Qt 5 version used for the application.
-
-### Example adding a Qt application {#qt-application-all-packages-nix}
-
-The following represents the contents of `qt5-packages.nix`.
-
-```nix
-{
-  # ...
-
-  myapp = callPackage ../path/to/myapp {};
-
-  # ...
-}
-```
-
-The following represents the contents of `all-packages.nix`.
-
-```nix
-{
-  # ...
-
-  myapp = libsForQt5.myapp;
-
-  # ...
-}
-```
diff --git a/nixpkgs/doc/languages-frameworks/ruby.section.md b/nixpkgs/doc/languages-frameworks/ruby.section.md
index d1265097d206..f1953500fa32 100644
--- a/nixpkgs/doc/languages-frameworks/ruby.section.md
+++ b/nixpkgs/doc/languages-frameworks/ruby.section.md
@@ -201,7 +201,7 @@ $ nix-shell --run 'ruby -rpg -e "puts PG.library_version"'
 
 Of course for this use-case one could also use overlays since the configuration for `pg` depends on the `postgresql` alias, but for demonstration purposes this has to suffice.
 
-### Platform-specific gems
+### Platform-specific gems {#ruby-platform-specif-gems}
 
 Right now, bundix has some issues with pre-built, platform-specific gems: [bundix PR #68](https://github.com/nix-community/bundix/pull/68).
 Until this is solved, you can tell bundler to not use platform-specific gems and instead build them from source each time:
diff --git a/nixpkgs/doc/languages-frameworks/rust.section.md b/nixpkgs/doc/languages-frameworks/rust.section.md
index e19783e29e6a..7d46bdbd4d48 100644
--- a/nixpkgs/doc/languages-frameworks/rust.section.md
+++ b/nixpkgs/doc/languages-frameworks/rust.section.md
@@ -13,9 +13,9 @@ into your `configuration.nix` or bring them into scope with `nix-shell -p rustc
 
 For other versions such as daily builds (beta and nightly),
 use either `rustup` from nixpkgs (which will manage the rust installation in your home directory),
-or use a community maintained [Rust overlay](#using-community-rust-overlays).
+or use [community maintained Rust toolchains](#using-community-maintained-rust-toolchains).
 
-## Compiling Rust applications with Cargo {#compiling-rust-applications-with-cargo}
+## `buildRustPackage`: Compiling Rust applications with Cargo {#compiling-rust-applications-with-cargo}
 
 Rust applications are packaged by using the `buildRustPackage` helper from `rustPlatform`:
 
@@ -30,10 +30,10 @@ rustPlatform.buildRustPackage rec {
     owner = "BurntSushi";
     repo = pname;
     rev = version;
-    sha256 = "1hqps7l5qrjh9f914r5i6kmcz6f1yb951nv4lby0cjnp5l253kps";
+    hash = "sha256-+s5RBC3XSgb8omTbUNLywZnP6jSxZBKSS1BmXOjRF8M=";
   };
 
-  cargoSha256 = "03wf9r2csi6jpa7v5sw5lpxkrk4wfzwmzx7k3991q3bdjzcwnnwp";
+  cargoHash = "sha256-jtBw4ahSl88L0iuCXxQgZVm1EcboWRJMNtjxLVTtzts=";
 
   meta = with lib; {
     description = "A fast line-oriented regex search tool, similar to ag and ack";
@@ -50,6 +50,11 @@ package. `cargoHash256` is used for traditional Nix SHA-256 hashes,
 such as the one in the example above. `cargoHash` should instead be
 used for [SRI](https://www.w3.org/TR/SRI/) hashes. For example:
 
+Exception: If the application has cargo `git` dependencies, the `cargoHash`/`cargoSha256`
+approach will not work, and you will need to copy the `Cargo.lock` file of the application
+to nixpkgs and continue with the next section for specifying the options of the`cargoLock`
+section.
+
 ```nix
   cargoHash = "sha256-l1vL2ZdtDRxSGvP0X/l3nMw8+6WF67KPutJEzUROjg8=";
 ```
@@ -97,10 +102,10 @@ rustPlatform.buildRustPackage rec {
 
   src = fetchCrate {
     inherit pname version;
-    sha256 = "1mqaynrqaas82f5957lx31x80v74zwmwmjxxlbywajb61vh00d38";
+    sha256 = "sha256-aDQA4A5mScX9or3Lyiv/5GyAehidnpKKE0grhbP1Ctc=";
   };
 
-  cargoHash = "sha256-JmBZcDVYJaK1cK05cxx5BrnGWp4t8ca6FLUbvIot67s=";
+  cargoHash = "sha256-tbrTbutUs5aPSV+yE0IBUZAAytgmZV7Eqxia7g+9zRs=";
   cargoDepsName = pname;
 
   # ...
@@ -157,7 +162,7 @@ required to build a rust package. A simple fix is to use:
 
 ```nix
 postPatch = ''
-  cp ${./Cargo.lock} Cargo.lock
+  ln -s ${./Cargo.lock} Cargo.lock
 '';
 ```
 
@@ -186,6 +191,23 @@ added. To find the correct hash, you can first use `lib.fakeSha256` or
 `lib.fakeHash` as a stub hash. Building the package (and thus the
 vendored dependencies) will then inform you of the correct hash.
 
+For usage outside nixpkgs, `allowBuiltinFetchGit` could be used to
+avoid having to specify `outputHashes`. For example:
+
+```nix
+rustPlatform.buildRustPackage rec {
+  pname = "myproject";
+  version = "1.0.0";
+
+  cargoLock = {
+    lockFile = ./Cargo.lock;
+    allowBuiltinFetchGit = true;
+  };
+
+  # ...
+}
+```
+
 ### Cargo features {#cargo-features}
 
 You can disable default features using `buildNoDefaultFeatures`, and
@@ -319,6 +341,32 @@ The above are just guidelines, and exceptions may be granted on a case-by-case b
 However, please check if it's possible to disable a problematic subset of the
 test suite and leave a comment explaining your reasoning.
 
+This can be achieved with `--skip` in `checkFlags`:
+
+```nix
+rustPlatform.buildRustPackage {
+  /* ... */
+  checkFlags = [
+    # reason for disabling test
+    "--skip=example::tests:example_test"
+  ];
+}
+```
+
+#### Using `cargo-nextest` {#using-cargo-nextest}
+
+Tests can be run with [cargo-nextest](https://github.com/nextest-rs/nextest)
+by setting `useNextest = true`. The same options still apply, but nextest
+accepts a different set of arguments and the settings might need to be
+adapted to be compatible with cargo-nextest.
+
+```nix
+rustPlatform.buildRustPackage {
+  /* ... */
+  useNextest = true;
+}
+```
+
 #### Setting `test-threads` {#setting-test-threads}
 
 `buildRustPackage` will use parallel test threads by default,
@@ -368,13 +416,13 @@ rustPlatform.buildRustPackage rec {
 }
 ```
 
-## Compiling non-Rust packages that include Rust code {#compiling-non-rust-packages-that-include-rust-code}
+### Compiling non-Rust packages that include Rust code {#compiling-non-rust-packages-that-include-rust-code}
 
 Several non-Rust packages incorporate Rust code for performance- or
 security-sensitive parts. `rustPlatform` exposes several functions and
 hooks that can be used to integrate Cargo in non-Rust packages.
 
-### Vendoring of dependencies {#vendoring-of-dependencies}
+#### Vendoring of dependencies {#vendoring-of-dependencies}
 
 Since network access is not allowed in sandboxed builds, Rust crate
 dependencies need to be retrieved using a fetcher. `rustPlatform`
@@ -391,8 +439,8 @@ cargoDeps = rustPlatform.fetchCargoTarball {
 ```
 
 The `src` attribute is required, as well as a hash specified through
-one of the `sha256` or `hash` attributes. The following optional
-attributes can also be used:
+one of the `hash` attribute. The following optional attributes can
+also be used:
 
 * `name`: the name that is used for the dependencies tarball.  If
   `name` is not specified, then the name `cargo-deps` will be used.
@@ -434,7 +482,7 @@ added. To find the correct hash, you can first use `lib.fakeSha256` or
 `lib.fakeHash` as a stub hash. Building `cargoDeps` will then inform
 you of the correct hash.
 
-### Hooks {#hooks}
+#### Hooks {#hooks}
 
 `rustPlatform` provides the following hooks to automate Cargo builds:
 
@@ -458,16 +506,19 @@ you of the correct hash.
   `maturinBuildFlags`.
 * `cargoCheckHook`: run tests using Cargo. The build type for checks
   can be set using `cargoCheckType`. Features can be specified with
-  `cargoCheckNoDefaultFeaatures` and `cargoCheckFeatures`. Additional
+  `cargoCheckNoDefaultFeatures` and `cargoCheckFeatures`. Additional
   flags can be passed to the tests using `checkFlags` and
   `checkFlagsArray`. By default, tests are run in parallel. This can
   be disabled by setting `dontUseCargoParallelTests`.
+* `cargoNextestHook`: run tests using
+  [cargo-nextest](https://github.com/nextest-rs/nextest). The same
+  options for `cargoCheckHook` also applies to `cargoNextestHook`.
 * `cargoInstallHook`: install binaries and static/shared libraries
   that were built using `cargoBuildHook`.
 * `bindgenHook`: for crates which use `bindgen` as a build dependency, lets
   `bindgen` find `libclang` and `libclang` find the libraries in `buildInputs`.
 
-### Examples {#examples}
+#### Examples {#examples}
 
 #### Python package using `setuptools-rust` {#python-package-using-setuptools-rust}
 
@@ -484,7 +535,9 @@ directory of the `tokenizers` project's source archive, we use
 ```nix
 { fetchFromGitHub
 , buildPythonPackage
+, cargo
 , rustPlatform
+, rustc
 , setuptools-rust
 }:
 
@@ -502,16 +555,17 @@ buildPythonPackage rec {
   cargoDeps = rustPlatform.fetchCargoTarball {
     inherit src sourceRoot;
     name = "${pname}-${version}";
-    hash = "sha256-BoHIN/519Top1NUBjpB/oEMqi86Omt3zTQcXFWqrek0=";
+    hash = "sha256-miW//pnOmww2i6SOGbkrAIdc/JMDT4FJLqdMFojZeoY=";
   };
 
   sourceRoot = "source/bindings/python";
 
-  nativeBuildInputs = [ setuptools-rust ] ++ (with rustPlatform; [
-    cargoSetupHook
-    rust.cargo
-    rust.rustc
-  ]);
+  nativeBuildInputs = [
+    cargo
+    rustPlatform.cargoSetupHook
+    rustc
+    setuptools-rust
+  ];
 
   # ...
 }
@@ -539,7 +593,7 @@ buildPythonPackage rec {
 
   src = fetchPypi {
     inherit pname version;
-    sha256 = "1i1mx5y9hkyfi9jrrkcw804hmkcglxi6rmf7vin7jfnbr2bf4q64";
+    hash = "sha256-xGDilsjLOnls3MfVbGKnj80KCUCczZxlis5PmHzpNcQ=";
   };
 
   cargoDeps = rustPlatform.fetchCargoTarball {
@@ -579,7 +633,7 @@ buildPythonPackage rec {
     owner = "Qiskit";
     repo = "retworkx";
     rev = version;
-    sha256 = "11n30ldg3y3y6qxg3hbj837pnbwjkqw3nxq6frds647mmmprrd20";
+    hash = "sha256-11n30ldg3y3y6qxg3hbj837pnbwjkqw3nxq6frds647mmmprrd20=";
   };
 
   cargoDeps = rustPlatform.fetchCargoTarball {
@@ -596,105 +650,19 @@ buildPythonPackage rec {
 }
 ```
 
-## Compiling Rust crates using Nix instead of Cargo {#compiling-rust-crates-using-nix-instead-of-cargo}
+## `buildRustCrate`: Compiling Rust crates using Nix instead of Cargo {#compiling-rust-crates-using-nix-instead-of-cargo}
 
 ### Simple operation {#simple-operation}
 
 When run, `cargo build` produces a file called `Cargo.lock`,
 containing pinned versions of all dependencies. Nixpkgs contains a
-tool called `carnix` (`nix-env -iA nixos.carnix`), which can be used
-to turn a `Cargo.lock` into a Nix expression.
-
-That Nix expression calls `rustc` directly (hence bypassing Cargo),
-and can be used to compile a crate and all its dependencies. Here is
-an example for a minimal `hello` crate:
-
-```ShellSession
-$ cargo new hello
-$ cd hello
-$ cargo build
-     Compiling hello v0.1.0 (file:///tmp/hello)
-     Finished dev [unoptimized + debuginfo] target(s) in 0.20 secs
-$ carnix -o hello.nix --src ./. Cargo.lock --standalone
-$ nix-build hello.nix -A hello_0_1_0
-```
-
-Now, the file produced by the call to `carnix`, called `hello.nix`, looks like:
-
-```nix
-# Generated by carnix 0.6.5: carnix -o hello.nix --src ./. Cargo.lock --standalone
-{ stdenv, buildRustCrate, fetchgit }:
-let kernel = stdenv.buildPlatform.parsed.kernel.name;
-    # ... (content skipped)
-in
-rec {
-  hello = f: hello_0_1_0 { features = hello_0_1_0_features { hello_0_1_0 = f; }; };
-  hello_0_1_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate {
-    crateName = "hello";
-    version = "0.1.0";
-    authors = [ "pe@pijul.org <pe@pijul.org>" ];
-    src = ./.;
-    inherit dependencies buildDependencies features;
-  };
-  hello_0_1_0 = { features?(hello_0_1_0_features {}) }: hello_0_1_0_ {};
-  hello_0_1_0_features = f: updateFeatures f (rec {
-        hello_0_1_0.default = (f.hello_0_1_0.default or true);
-    }) [ ];
-}
-```
+tool called `crate2Nix` (`nix-shell -p crate2nix`), which can be
+used to turn a `Cargo.lock` into a Nix expression.  That Nix
+expression calls `rustc` directly (hence bypassing Cargo), and can
+be used to compile a crate and all its dependencies.
 
-In particular, note that the argument given as `--src` is copied
-verbatim to the source. If we look at a more complicated
-dependencies, for instance by adding a single line `libc="*"` to our
-`Cargo.toml`, we first need to run `cargo build` to update the
-`Cargo.lock`. Then, `carnix` needs to be run again, and produces the
-following nix file:
-
-```nix
-# Generated by carnix 0.6.5: carnix -o hello.nix --src ./. Cargo.lock --standalone
-{ stdenv, buildRustCrate, fetchgit }:
-let kernel = stdenv.buildPlatform.parsed.kernel.name;
-    # ... (content skipped)
-in
-rec {
-  hello = f: hello_0_1_0 { features = hello_0_1_0_features { hello_0_1_0 = f; }; };
-  hello_0_1_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate {
-    crateName = "hello";
-    version = "0.1.0";
-    authors = [ "pe@pijul.org <pe@pijul.org>" ];
-    src = ./.;
-    inherit dependencies buildDependencies features;
-  };
-  libc_0_2_36_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate {
-    crateName = "libc";
-    version = "0.2.36";
-    authors = [ "The Rust Project Developers" ];
-    sha256 = "01633h4yfqm0s302fm0dlba469bx8y6cs4nqc8bqrmjqxfxn515l";
-    inherit dependencies buildDependencies features;
-  };
-  hello_0_1_0 = { features?(hello_0_1_0_features {}) }: hello_0_1_0_ {
-    dependencies = mapFeatures features ([ libc_0_2_36 ]);
-  };
-  hello_0_1_0_features = f: updateFeatures f (rec {
-    hello_0_1_0.default = (f.hello_0_1_0.default or true);
-    libc_0_2_36.default = true;
-  }) [ libc_0_2_36_features ];
-  libc_0_2_36 = { features?(libc_0_2_36_features {}) }: libc_0_2_36_ {
-    features = mkFeatures (features.libc_0_2_36 or {});
-  };
-  libc_0_2_36_features = f: updateFeatures f (rec {
-    libc_0_2_36.default = (f.libc_0_2_36.default or true);
-    libc_0_2_36.use_std =
-      (f.libc_0_2_36.use_std or false) ||
-      (f.libc_0_2_36.default or false) ||
-      (libc_0_2_36.default or false);
-  }) [];
-}
-```
-
-Here, the `libc` crate has no `src` attribute, so `buildRustCrate`
-will fetch it from [crates.io](https://crates.io). A `sha256`
-attribute is still needed for Nix purity.
+See [`crate2nix`'s documentation](https://github.com/kolloch/crate2nix#known-restrictions)
+for instructions on how to use it.
 
 ### Handling external dependencies {#handling-external-dependencies}
 
@@ -802,22 +770,7 @@ general. A number of other parameters can be overridden:
   };
   ```
 
-### Features {#features}
-
-One can also supply features switches. For example, if we want to
-compile `diesel_cli` only with the `postgres` feature, and no default
-features, we would write:
-
-```nix
-(callPackage ./diesel.nix {}).diesel {
-  default = false;
-  postgres = true;
-}
-```
-
-Where `diesel.nix` is the file generated by Carnix, as explained above.
-
-## Setting Up `nix-shell` {#setting-up-nix-shell}
+### Setting Up `nix-shell` {#setting-up-nix-shell}
 
 Oftentimes you want to develop code from within `nix-shell`. Unfortunately
 `buildRustCrate` does not support common `nix-shell` operations directly
@@ -861,31 +814,61 @@ $ cargo build
 $ cargo test
 ```
 
-### Controlling Rust Version Inside `nix-shell` {#controlling-rust-version-inside-nix-shell}
+## Using community maintained Rust toolchains {#using-community-maintained-rust-toolchains}
+
+::: {.note}
+Note: The following projects cannot be used within nixpkgs since [IFD](#ssec-import-from-derivation) is disallowed.
+To package things that require Rust nightly, `RUSTC_BOOTSTRAP = true;` can sometimes be used as a hack.
+:::
+
+There are two community maintained approaches to Rust toolchain management:
+- [oxalica's Rust overlay](https://github.com/oxalica/rust-overlay)
+- [fenix](https://github.com/nix-community/fenix)
+
+Despite their names, both projects provides a similar set of packages and overlays under different APIs.
+
+Oxalica's overlay allows you to select a particular Rust version without you providing a hash or a flake input,
+but comes with a larger git repository than fenix.
+
+Fenix also provides rust-analyzer nightly in addition to the Rust toolchains.
+
+Both oxalica's overlay and fenix better integrate with nix and cache optimizations.
+Because of this and ergonomics, either of those community projects
+should be preferred to the Mozilla's Rust overlay ([nixpkgs-mozilla](https://github.com/mozilla/nixpkgs-mozilla)).
+
+The following documentation demonstrates examples using fenix and oxalica's Rust overlay
+with `nix-shell` and building derivations. More advanced usages like flake usage
+are documented in their own repositories.
+
+### Using Rust nightly with `nix-shell` {#using-rust-nightly-with-nix-shell}
 
-To control your rust version (i.e. use nightly) from within `shell.nix` (or
-other nix expressions) you can use the following `shell.nix`
+Here is a simple `shell.nix` that provides Rust nightly (default profile) using fenix:
 
 ```nix
-# Latest Nightly
-with import <nixpkgs> {};
-let src = fetchFromGitHub {
-      owner = "mozilla";
-      repo = "nixpkgs-mozilla";
-      # commit from: 2019-05-15
-      rev = "9f35c4b09fd44a77227e79ff0c1b4b6a69dff533";
-      sha256 = "18h0nvh55b5an4gmlgfbvwbyqj91bklf1zymis6lbdh75571qaz0";
-   };
+with import <nixpkgs> { };
+let
+  fenix = callPackage
+    (fetchFromGitHub {
+      owner = "nix-community";
+      repo = "fenix";
+      # commit from: 2023-03-03
+      rev = "e2ea04982b892263c4d939f1cc3bf60a9c4deaa1";
+      hash = "sha256-AsOim1A8KKtMWIxG+lXh5Q4P2bhOZjoUhFWJ1EuZNNk=";
+    })
+    { };
 in
-with import "${src.out}/rust-overlay.nix" pkgs pkgs;
-stdenv.mkDerivation {
+mkShell {
   name = "rust-env";
-  buildInputs = [
-    # Note: to use stable, just replace `nightly` with `stable`
-    latest.rustChannels.nightly.rust
+  nativeBuildInputs = [
+    # Note: to use stable, just replace `default` with `stable`
+    fenix.default.toolchain
 
-    # Add some extra dependencies from `pkgs`
-    pkg-config openssl
+    # Example Build-time Additional Dependencies
+    pkg-config
+  ];
+  buildInputs = [
+    # Example Run-time Additional Dependencies
+    openssl
   ];
 
   # Set Environment Variables
@@ -893,116 +876,66 @@ stdenv.mkDerivation {
 }
 ```
 
-Now run:
+Save this to `shell.nix`, then run:
 
 ```ShellSession
 $ rustc --version
-rustc 1.26.0-nightly (188e693b3 2018-03-26)
+rustc 1.69.0-nightly (13471d3b2 2023-03-02)
 ```
 
 To see that you are using nightly.
 
-## Using community Rust overlays {#using-community-rust-overlays}
-
-There are two community maintained approaches to Rust toolchain management:
-- [oxalica's Rust overlay](https://github.com/oxalica/rust-overlay)
-- [fenix](https://github.com/nix-community/fenix)
-
-Oxalica's overlay allows you to select a particular Rust version and components.
-See [their documentation](https://github.com/oxalica/rust-overlay#rust-overlay) for more
-detailed usage.
+Oxalica's Rust overlay has more complete examples of `shell.nix` (and cross compilation) under its
+[`examples` directory](https://github.com/oxalica/rust-overlay/tree/e53e8853aa7b0688bc270e9e6a681d22e01cf299/examples).
 
-Fenix is an alternative to `rustup` and can also be used as an overlay.
+### Using Rust nightly in a derivation with `buildRustPackage` {#using-rust-nightly-in-a-derivation-with-buildrustpackage}
 
-Both oxalica's overlay and fenix better integrate with nix and cache optimizations.
-Because of this and ergonomics, either of those community projects
-should be preferred to the Mozilla's Rust overlay (`nixpkgs-mozilla`).
+You can also use Rust nightly to build rust packages using `makeRustPlatform`.
+The below snippet demonstrates invoking `buildRustPackage` with a Rust toolchain from oxalica's overlay:
 
-### How to select a specific `rustc` and toolchain version {#how-to-select-a-specific-rustc-and-toolchain-version}
-
-You can consume the oxalica overlay and use it to grab a specific Rust toolchain version.
-Here is an example `shell.nix` showing how to grab the current stable toolchain:
 ```nix
-{ pkgs ? import <nixpkgs> {
-    overlays = [
-      (import (fetchTarball "https://github.com/oxalica/rust-overlay/archive/master.tar.gz"))
-    ];
-  }
-}:
-pkgs.mkShell {
-  nativeBuildInputs = with pkgs; [
-    pkg-config
-    rust-bin.stable.latest.minimal
-  ];
-}
-```
-
-You can try this out by:
-1. Saving that to `shell.nix`
-2. Executing `nix-shell --pure --command 'rustc --version'`
-
-As of writing, this prints out `rustc 1.56.0 (09c42c458 2021-10-18)`.
-
-### How to use an overlay toolchain in a derivation  {#how-to-use-an-overlay-toolchain-in-a-derivation}
-
-You can also use an overlay's Rust toolchain with `buildRustPackage`.
-The below snippet demonstrates invoking `buildRustPackage` with an oxalica overlay selected Rust toolchain:
-```nix
-with import <nixpkgs> {
+with import <nixpkgs>
+{
   overlays = [
     (import (fetchTarball "https://github.com/oxalica/rust-overlay/archive/master.tar.gz"))
   ];
 };
+let
+  rustPlatform = makeRustPlatform {
+    cargo = rust-bin.stable.latest.minimal;
+    rustc = rust-bin.stable.latest.minimal;
+  };
+in
 
 rustPlatform.buildRustPackage rec {
   pname = "ripgrep";
   version = "12.1.1";
-  nativeBuildInputs = [
-    rust-bin.stable.latest.minimal
-  ];
 
   src = fetchFromGitHub {
     owner = "BurntSushi";
     repo = "ripgrep";
     rev = version;
-    sha256 = "1hqps7l5qrjh9f914r5i6kmcz6f1yb951nv4lby0cjnp5l253kps";
+    hash = "sha256-+s5RBC3XSgb8omTbUNLywZnP6jSxZBKSS1BmXOjRF8M=";
   };
 
-  cargoSha256 = "03wf9r2csi6jpa7v5sw5lpxkrk4wfzwmzx7k3991q3bdjzcwnnwp";
+  cargoHash = "sha256-l1vL2ZdtDRxSGvP0X/l3nMw8+6WF67KPutJEzUROjg8=";
+
+  doCheck = false;
 
   meta = with lib; {
     description = "A fast line-oriented regex search tool, similar to ag and ack";
     homepage = "https://github.com/BurntSushi/ripgrep";
-    license = licenses.unlicense;
-    maintainers = [ maintainers.tailhook ];
+    license = with licenses; [ mit unlicense ];
+    maintainers = with maintainers; [ tailhook ];
   };
 }
 ```
 
 Follow the below steps to try that snippet.
-1. create a new directory
 1. save the above snippet as `default.nix` in that directory
-1. cd into that directory and run `nix-build`
-
-### Rust overlay installation {#rust-overlay-installation}
-
-You can use this overlay by either changing your local nixpkgs configuration,
-or by adding the overlay declaratively in a nix expression,  e.g. in `configuration.nix`.
-For more information see [the manual on installing overlays](#sec-overlays-install).
-
-### Declarative Rust overlay installation {#declarative-rust-overlay-installation}
-
-This snippet shows how to use oxalica's Rust overlay.
-Add the following to your `configuration.nix`, `home-configuration.nix`, `shell.nix`, or similar:
-
-```nix
-{ pkgs ? import <nixpkgs> {
-    overlays = [
-      (import (builtins.fetchTarball "https://github.com/oxalica/rust-overlay/archive/master.tar.gz"))
-      # Further overlays go here
-    ];
-  };
-};
-```
+2. cd into that directory and run `nix-build`
 
-Note that this will fetch the latest overlay version when rebuilding your system.
+Fenix also has examples with `buildRustPackage`,
+[crane](https://github.com/ipetkov/crane),
+[naersk](https://github.com/nix-community/naersk),
+and cross compilation in its [Examples](https://github.com/nix-community/fenix#examples) section.
diff --git a/nixpkgs/doc/languages-frameworks/swift.section.md b/nixpkgs/doc/languages-frameworks/swift.section.md
new file mode 100644
index 000000000000..1cc452cc9b9b
--- /dev/null
+++ b/nixpkgs/doc/languages-frameworks/swift.section.md
@@ -0,0 +1,176 @@
+# Swift {#swift}
+
+The Swift compiler is provided by the `swift` package:
+
+```sh
+# Compile and link a simple executable.
+nix-shell -p swift --run 'swiftc -' <<< 'print("Hello world!")'
+# Run it!
+./main
+```
+
+The `swift` package also provides the `swift` command, with some caveats:
+
+- Swift Package Manager (SwiftPM) is packaged separately as `swiftpm`. If you
+  need functionality like `swift build`, `swift run`, `swift test`, you must
+  also add the `swiftpm` package to your closure.
+- On Darwin, the `swift repl` command requires an Xcode installation. This is
+  because it uses the system LLDB debugserver, which has special entitlements.
+
+## Module search paths {#ssec-swift-module-search-paths}
+
+Like other toolchains in Nixpkgs, the Swift compiler executables are wrapped
+to help Swift find your application's dependencies in the Nix store. These
+wrappers scan the `buildInputs` of your package derivation for specific
+directories where Swift modules are placed by convention, and automatically
+add those directories to the Swift compiler search paths.
+
+Swift follows different conventions depending on the platform. The wrappers
+look for the following directories:
+
+- On Darwin platforms: `lib/swift/macosx`
+  (If not targeting macOS, replace `macosx` with the Xcode platform name.)
+- On other platforms: `lib/swift/linux/x86_64`
+  (Where `linux` and `x86_64` are from lowercase `uname -sm`.)
+- For convenience, Nixpkgs also adds simply `lib/swift` to the search path.
+  This can save a bit of work packaging Swift modules, because many Nix builds
+  will produce output for just one target any way.
+
+## Core libraries {#ssec-swift-core-libraries}
+
+In addition to the standard library, the Swift toolchain contains some
+additional 'core libraries' that, on Apple platforms, are normally distributed
+as part of the OS or Xcode. These are packaged separately in Nixpkgs, and can
+be found (for use in `buildInputs`) as:
+
+- `swiftPackages.Dispatch`
+- `swiftPackages.Foundation`
+- `swiftPackages.XCTest`
+
+## Packaging with SwiftPM {#ssec-swift-packaging-with-swiftpm}
+
+Nixpkgs includes a small helper `swiftpm2nix` that can fetch your SwiftPM
+dependencies for you, when you need to write a Nix expression to package your
+application.
+
+The first step is to run the generator:
+
+```sh
+cd /path/to/my/project
+# Enter a Nix shell with the required tools.
+nix-shell -p swift swiftpm swiftpm2nix
+# First, make sure the workspace is up-to-date.
+swift package resolve
+# Now generate the Nix code.
+swiftpm2nix
+```
+
+This produces some files in a directory `nix`, which will be part of your Nix
+expression. The next step is to write that expression:
+
+```nix
+{ stdenv, swift, swiftpm, swiftpm2nix, fetchFromGitHub }:
+
+let
+  # Pass the generated files to the helper.
+  generated = swiftpm2nix.helpers ./nix;
+in
+
+stdenv.mkDerivation rec {
+  pname = "myproject";
+  version = "0.0.0";
+
+  src = fetchFromGitHub {
+    owner = "nixos";
+    repo = pname;
+    rev = version;
+    hash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=";
+  };
+
+  # Including SwiftPM as a nativeBuildInput provides a buildPhase for you.
+  # This by default performs a release build using SwiftPM, essentially:
+  #   swift build -c release
+  nativeBuildInputs = [ swift swiftpm ];
+
+  # The helper provides a configure snippet that will prepare all dependencies
+  # in the correct place, where SwiftPM expects them.
+  configurePhase = generated.configure;
+
+  installPhase = ''
+    # This is a special function that invokes swiftpm to find the location
+    # of the binaries it produced.
+    binPath="$(swiftpmBinPath)"
+    # Now perform any installation steps.
+    mkdir -p $out/bin
+    cp $binPath/myproject $out/bin/
+  '';
+}
+```
+
+### Custom build flags {#ssec-swiftpm-custom-build-flags}
+
+If you'd like to build a different configuration than `release`:
+
+```nix
+swiftpmBuildConfig = "debug";
+```
+
+It is also possible to provide additional flags to `swift build`:
+
+```nix
+swiftpmFlags = [ "--disable-dead-strip" ];
+```
+
+The default `buildPhase` already passes `-j` for parallel building.
+
+If these two customization options are insufficient, simply provide your own
+`buildPhase` that invokes `swift build`.
+
+### Running tests {#ssec-swiftpm-running-tests}
+
+Including `swiftpm` in your `nativeBuildInputs` also provides a default
+`checkPhase`, but it must be enabled with:
+
+```nix
+doCheck = true;
+```
+
+This essentially runs: `swift test -c release`
+
+### Patching dependencies {#ssec-swiftpm-patching-dependencies}
+
+In some cases, it may be necessary to patch a SwiftPM dependency. SwiftPM
+dependencies are located in `.build/checkouts`, but the `swiftpm2nix` helper
+provides these as symlinks to read-only `/nix/store` paths. In order to patch
+them, we need to make them writable.
+
+A special function `swiftpmMakeMutable` is available to replace the symlink
+with a writable copy:
+
+```
+configurePhase = generated.configure ++ ''
+  # Replace the dependency symlink with a writable copy.
+  swiftpmMakeMutable swift-crypto
+  # Now apply a patch.
+  patch -p1 -d .build/checkouts/swift-crypto -i ${./some-fix.patch}
+'';
+```
+
+## Considerations for custom build tools {#ssec-swift-considerations-for-custom-build-tools}
+
+### Linking the standard library {#ssec-swift-linking-the-standard-library}
+
+The `swift` package has a separate `lib` output containing just the Swift
+standard library, to prevent Swift applications needing a dependency on the
+full Swift compiler at run-time. Linking with the Nixpkgs Swift toolchain
+already ensures binaries correctly reference the `lib` output.
+
+Sometimes, Swift is used only to compile part of a mixed codebase, and the
+link step is manual. Custom build tools often locate the standard library
+relative to the `swift` compiler executable, and while the result will work,
+when this path ends up in the binary, it will have the Swift compiler as an
+unintended dependency.
+
+In this case, you should investigate how your build process discovers the
+standard library, and override the path. The correct path will be something
+like: `"${swift.swift.lib}/${swift.swiftModuleSubdir}"`
diff --git a/nixpkgs/doc/languages-frameworks/texlive.section.md b/nixpkgs/doc/languages-frameworks/texlive.section.md
index 060f5c647c29..72ab14126bed 100644
--- a/nixpkgs/doc/languages-frameworks/texlive.section.md
+++ b/nixpkgs/doc/languages-frameworks/texlive.section.md
@@ -40,26 +40,33 @@ Since release 15.09 there is a new TeX Live packaging that lives entirely under
 
 ## Custom packages {#sec-language-texlive-custom-packages}
 
+You may find that you need to use an external TeX package. A derivation for such package has to provide the contents of the "texmf" directory in its output and provide the appropriate `tlType` attribute (one of `"run"`, `"bin"`, `"doc"`, `"source"`). Dependencies on other TeX packages can be listed in the attribute `tlDeps`.
 
-You may find that you need to use an external TeX package. A derivation for such package has to provide contents of the "texmf" directory in its output and provide the `tlType` attribute. Here is a (very verbose) example:
+Such derivation must then be listed in the attribute `pkgs` of an attribute set passed to `texlive.combine`, for instance by passing `extraPkgs = { pkgs = [ custom_package ]; };`. Within Nixpkgs, `pkgs` should be part of the derivation itself, allowing users to call `texlive.combine { inherit (texlive) scheme-small; inherit some_tex_package; }`.
+
+Here is a (very verbose) example where the attribute `pkgs` is attached to the derivation itself, which requires creating a fixed point. See also the packages `auctex`, `eukleides`, `mftrace` for more examples.
 
 ```nix
 with import <nixpkgs> {};
 
 let
-  foiltex_run = stdenvNoCC.mkDerivation {
+  foiltex = stdenvNoCC.mkDerivation (finalAttrs: {
     pname = "latex-foiltex";
     version = "2.1.4b";
-    passthru.tlType = "run";
+    passthru = {
+      pkgs = [ finalAttrs.finalPackage ];
+      tlDeps = with texlive; [ latex ];
+      tlType = "run";
+    };
 
     srcs = [
       (fetchurl {
         url = "http://mirrors.ctan.org/macros/latex/contrib/foiltex/foiltex.dtx";
-        sha256 = "07frz0krpz7kkcwlayrwrj2a2pixmv0icbngyw92srp9fp23cqpz";
+        hash = "sha256-/2I2xHXpZi0S988uFsGuPV6hhMw8e0U5m/P8myf42R0=";
       })
       (fetchurl {
         url = "http://mirrors.ctan.org/macros/latex/contrib/foiltex/foiltex.ins";
-        sha256 = "09wkyidxk3n3zvqxfs61wlypmbhi1pxmjdi1kns9n2ky8ykbff99";
+        hash = "sha256-KTm3pkd+Cpu0nSE2WfsNEa56PeXBaNfx/sOO2Vv0kyc=";
       })
     ];
 
@@ -102,8 +109,7 @@ let
       maintainers = with maintainers; [ veprbl ];
       platforms = platforms.all;
     };
-  };
-  foiltex = { pkgs = [ foiltex_run ]; };
+  });
 
   latex_with_foiltex = texlive.combine {
     inherit (texlive) scheme-small;
diff --git a/nixpkgs/doc/languages-frameworks/vim.section.md b/nixpkgs/doc/languages-frameworks/vim.section.md
index ec0e60389155..bf0d663179b9 100644
--- a/nixpkgs/doc/languages-frameworks/vim.section.md
+++ b/nixpkgs/doc/languages-frameworks/vim.section.md
@@ -8,14 +8,23 @@ Loading can be deferred; see examples.
 At the moment we support two different methods for managing plugins:
 
 - Vim packages (*recommended*)
-- vim-plug
+- vim-plug (vim only)
+
+Right now two Vim packages are available: `vim` which has most features that require extra
+dependencies disabled and `vim-full` which has them configurable and enabled by default.
+
+::: {.note}
+`vim_configurable` is a deprecated alias for `vim-full` and refers to the fact that its
+build-time features are configurable. It has nothing to do with user configuration,
+and both the `vim` and `vim-full` packages can be customized as explained in the next section.
+:::
 
 ## Custom configuration {#custom-configuration}
 
 Adding custom .vimrc lines can be done using the following code:
 
 ```nix
-vim_configurable.customize {
+vim-full.customize {
   # `name` optionally specifies the name of the executable and package
   name = "vim-with-plugins";
 
@@ -62,7 +71,7 @@ neovim-qt.override {
 To store your plugins in Vim packages (the native Vim plugin manager, see `:help packages`) the following example can be used:
 
 ```nix
-vim_configurable.customize {
+vim-full.customize {
   vimrcConfig.packages.myVimPackage = with pkgs.vimPlugins; {
     # loaded on launch
     start = [ youcompleteme fugitive ];
@@ -101,7 +110,7 @@ The resulting package can be added to `packageOverrides` in `~/.nixpkgs/config.n
 ```nix
 {
   packageOverrides = pkgs: with pkgs; {
-    myVim = vim_configurable.customize {
+    myVim = vim-full.customize {
       # `name` specifies the name of the executable and package
       name = "vim-with-plugins";
       # add here code from the example section
@@ -125,13 +134,13 @@ If one of your favourite plugins isn't packaged, you can package it yourself:
 { config, pkgs, ... }:
 
 let
-  easygrep = pkgs.vimUtils.buildVimPlugin {
+  easygrep = pkgs.vimUtils.buildVimPluginFrom2Nix {
     name = "vim-easygrep";
     src = pkgs.fetchFromGitHub {
       owner = "dkprice";
       repo = "vim-easygrep";
       rev = "d0c36a77cc63c22648e792796b1815b44164653a";
-      sha256 = "0y2p5mz0d5fhg6n68lhfhl8p4mlwkb82q337c22djs4w5zyzggbc";
+      hash = "sha256-bL33/S+caNmEYGcMLNCanFZyEYUOUmSsedCVBn4tV3g=";
     };
   };
 in
@@ -155,8 +164,10 @@ in
 }
 ```
 
-### Specificities for some plugins
-#### Treesitter
+If your package requires building specific parts, use instead `pkgs.vimUtils.buildVimPlugin`.
+
+### Specificities for some plugins {#vim-plugin-specificities}
+#### Treesitter {#vim-plugin-treesitter}
 
 By default `nvim-treesitter` encourages you to download, compile and install
 the required Treesitter grammars at run time with `:TSInstall`. This works
@@ -170,8 +181,8 @@ of precompiled grammars, you can use `nvim-treesitter.withPlugins` function:
       start = [
         (nvim-treesitter.withPlugins (
           plugins: with plugins; [
-            tree-sitter-nix
-            tree-sitter-python
+            nix
+            python
           ]
         ))
       ];
@@ -180,7 +191,7 @@ of precompiled grammars, you can use `nvim-treesitter.withPlugins` function:
 })
 ```
 
-To enable all grammars packaged in nixpkgs, use `(pkgs.vimPlugins.nvim-treesitter.withPlugins (plugins: pkgs.tree-sitter.allGrammars))`.
+To enable all grammars packaged in nixpkgs, use `pkgs.vimPlugins.nvim-treesitter.withAllGrammars`.
 
 ## Managing plugins with vim-plug {#managing-plugins-with-vim-plug}
 
@@ -188,7 +199,7 @@ To use [vim-plug](https://github.com/junegunn/vim-plug) to manage your Vim
 plugins the following example can be used:
 
 ```nix
-vim_configurable.customize {
+vim-full.customize {
   vimrcConfig.packages.myVimPackage = with pkgs.vimPlugins; {
     # loaded on launch
     plug.plugins = [ youcompleteme fugitive phpCompletion elm-vim ];
@@ -196,24 +207,14 @@ vim_configurable.customize {
 }
 ```
 
-For Neovim the syntax is:
+Note: this is not possible anymore for Neovim.
 
-```nix
-neovim.override {
-  configure = {
-    customRC = ''
-      # your custom configuration goes here! 
-    '';
-    plug.plugins = with pkgs.vimPlugins; [
-      vim-go
-    ];
-  };
-}
-```
 
 ## Adding new plugins to nixpkgs {#adding-new-plugins-to-nixpkgs}
 
-Nix expressions for Vim plugins are stored in [pkgs/applications/editors/vim/plugins](https://github.com/NixOS/nixpkgs/tree/master/pkgs/applications/editors/vim/plugins). For the vast majority of plugins, Nix expressions are automatically generated by running [`./update.py`](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/editors/vim/plugins/update.py). This creates a [generated.nix](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/editors/vim/plugins/generated.nix) file based on the plugins listed in [vim-plugin-names](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/editors/vim/plugins/vim-plugin-names). Plugins are listed in alphabetical order in `vim-plugin-names` using the format `[github username]/[repository]@[gitref]`. For example https://github.com/scrooloose/nerdtree becomes `scrooloose/nerdtree`.
+Nix expressions for Vim plugins are stored in [pkgs/applications/editors/vim/plugins](https://github.com/NixOS/nixpkgs/tree/master/pkgs/applications/editors/vim/plugins). For the vast majority of plugins, Nix expressions are automatically generated by running [`./update.py`](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/editors/vim/plugins/update.py). This creates a [generated.nix](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/editors/vim/plugins/generated.nix) file based on the plugins listed in [vim-plugin-names](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/editors/vim/plugins/vim-plugin-names).
+
+After running `./update.py`, if nvim-treesitter received an update, also run [`nvim-treesitter/update.py`](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/editors/vim/plugins/update.py) to update the tree sitter grammars for `nvim-treesitter`.
 
 Some plugins require overrides in order to function properly. Overrides are placed in [overrides.nix](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/editors/vim/plugins/overrides.nix). Overrides are most often required when a plugin requires some dependencies, or extra steps are required during the build process. For example `deoplete-fish` requires both `deoplete-nvim` and `vim-fish`, and so the following override was added:
 
@@ -225,9 +226,9 @@ deoplete-fish = super.deoplete-fish.overrideAttrs(old: {
 
 Sometimes plugins require an override that must be changed when the plugin is updated. This can cause issues when Vim plugins are auto-updated but the associated override isn't updated. For these plugins, the override should be written so that it specifies all information required to install the plugin, and running `./update.py` doesn't change the derivation for the plugin. Manually updating the override is required to update these types of plugins. An example of such a plugin is `LanguageClient-neovim`.
 
-To add a new plugin, run `./update.py --add "[owner]/[name]"`. **NOTE**: This script automatically commits to your git repository. Be sure to check out a fresh branch before running.
+To add a new plugin, run `./update.py add "[owner]/[name]"`. **NOTE**: This script automatically commits to your git repository. Be sure to check out a fresh branch before running.
 
-Finally, there are some plugins that are also packaged in nodePackages because they have Javascript-related build steps, such as running webpack. Those plugins are not listed in `vim-plugin-names` or managed by `update.py` at all, and are included separately in `overrides.nix`. Currently, all these plugins are related to the `coc.nvim` ecosystem of the Language Server Protocol integration with vim/neovim.
+Finally, there are some plugins that are also packaged in nodePackages because they have Javascript-related build steps, such as running webpack. Those plugins are not listed in `vim-plugin-names` or managed by `update.py` at all, and are included separately in `overrides.nix`. Currently, all these plugins are related to the `coc.nvim` ecosystem of the Language Server Protocol integration with Vim/Neovim.
 
 ## Updating plugins in nixpkgs {#updating-plugins-in-nixpkgs}
 
@@ -243,10 +244,27 @@ Alternatively, set the number of processes to a lower count to avoid rate-limiti
 ./pkgs/applications/editors/vim/plugins/update.py --proc 1
 ```
 
-## Important repositories {#important-repositories}
+## How to maintain an out-of-tree overlay of vim plugins ? {#vim-out-of-tree-overlays}
+
+You can use the updater script to generate basic packages out of a custom vim
+plugin list:
+
+```
+pkgs/applications/editors/vim/plugins/update.py -i vim-plugin-names -o generated.nix --no-commit
+```
+
+with the contents of `vim-plugin-names` being for example:
 
-- [vim-pi](https://bitbucket.org/vimcommunity/vim-pi) is a plugin repository
-  from VAM plugin manager meant to be used by others as well used by
+```
+repo,branch,alias
+pwntester/octo.nvim,,
+```
+
+You can then reference the generated vim plugins via:
+
+```nix
+myVimPlugins = pkgs.vimPlugins.extend (
+  (pkgs.callPackage generated.nix {})
+);
+```
 
-- [vim2nix](https://github.com/MarcWeber/vim-addon-vim2nix) which generates the
-  .nix code
diff --git a/nixpkgs/doc/manpage-urls.json b/nixpkgs/doc/manpage-urls.json
new file mode 100644
index 000000000000..e83708dd64d6
--- /dev/null
+++ b/nixpkgs/doc/manpage-urls.json
@@ -0,0 +1,32 @@
+{
+  "gnunet.conf(5)": "https://docs.gnunet.org/users/configuration.html",
+  "mpd(1)": "https://mpd.readthedocs.io/en/latest/mpd.1.html",
+  "mpd.conf(5)": "https://mpd.readthedocs.io/en/latest/mpd.conf.5.html",
+  "nix.conf(5)": "https://nixos.org/manual/nix/stable/command-ref/conf-file.html",
+
+  "journald.conf(5)": "https://www.freedesktop.org/software/systemd/man/journald.conf.html",
+  "logind.conf(5)": "https://www.freedesktop.org/software/systemd/man/logind.conf.html",
+  "networkd.conf(5)": "https://www.freedesktop.org/software/systemd/man/networkd.conf.html",
+  "systemd.automount(5)": "https://www.freedesktop.org/software/systemd/man/systemd.automount.html",
+  "systemd.exec(5)": "https://www.freedesktop.org/software/systemd/man/systemd.exec.html",
+  "systemd.link(5)": "https://www.freedesktop.org/software/systemd/man/systemd.link.html",
+  "systemd.mount(5)": "https://www.freedesktop.org/software/systemd/man/systemd.mount.html",
+  "systemd.netdev(5)": "https://www.freedesktop.org/software/systemd/man/systemd.netdev.html",
+  "systemd.network(5)": "https://www.freedesktop.org/software/systemd/man/systemd.network.html",
+  "systemd.nspawn(5)": "https://www.freedesktop.org/software/systemd/man/systemd.nspawn.html",
+  "systemd.path(5)": "https://www.freedesktop.org/software/systemd/man/systemd.path.html",
+  "systemd.resource-control(5)": "https://www.freedesktop.org/software/systemd/man/systemd.resource-control.html",
+  "systemd.scope(5)": "https://www.freedesktop.org/software/systemd/man/systemd.scope.html",
+  "systemd.service(5)": "https://www.freedesktop.org/software/systemd/man/systemd.service.html",
+  "systemd.slice(5)": "https://www.freedesktop.org/software/systemd/man/systemd.slice.html",
+  "systemd.socket(5)": "https://www.freedesktop.org/software/systemd/man/systemd.socket.html",
+  "systemd.timer(5)": "https://www.freedesktop.org/software/systemd/man/systemd.timer.html",
+  "systemd.unit(5)": "https://www.freedesktop.org/software/systemd/man/systemd.unit.html",
+  "systemd-system.conf(5)": "https://www.freedesktop.org/software/systemd/man/systemd-system.conf.html",
+  "systemd-user.conf(5)": "https://www.freedesktop.org/software/systemd/man/systemd-user.conf.html",
+  "timesyncd.conf(5)": "https://www.freedesktop.org/software/systemd/man/timesyncd.conf.html",
+  "tmpfiles.d(5)": "https://www.freedesktop.org/software/systemd/man/tmpfiles.d.html",
+  "systemd.time(7)": "https://www.freedesktop.org/software/systemd/man/systemd.time.html",
+  "systemd-fstab-generator(8)": "https://www.freedesktop.org/software/systemd/man/systemd-fstab-generator.html",
+  "systemd-networkd-wait-online.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-networkd-wait-online.service.html"
+}
diff --git a/nixpkgs/doc/manual.xml b/nixpkgs/doc/manual.xml
index ccbaf40586d1..de3d40f553c0 100644
--- a/nixpkgs/doc/manual.xml
+++ b/nixpkgs/doc/manual.xml
@@ -1,19 +1,24 @@
 <book xmlns="http://docbook.org/ns/docbook"
-      xmlns:xi="http://www.w3.org/2001/XInclude">
+      xmlns:xi="http://www.w3.org/2001/XInclude"
+      xml:id="nixpkgs-manual">
  <info>
   <title>Nixpkgs Manual</title>
   <subtitle>Version <xi:include href=".version" parse="text" />
   </subtitle>
  </info>
  <xi:include href="preface.chapter.xml" />
- <part>
+ <part xml:id="part-using">
   <title>Using Nixpkgs</title>
   <xi:include href="using/configuration.chapter.xml" />
   <xi:include href="using/overlays.chapter.xml" />
   <xi:include href="using/overrides.chapter.xml" />
-  <xi:include href="functions.xml" />
  </part>
  <part>
+  <title>Nixpkgs <code>lib</code></title>
+  <xi:include href="functions.xml" />
+  <xi:include href="module-system/module-system.chapter.xml" />
+ </part>
+ <part xml:id="part-stdenv">
   <title>Standard environment</title>
   <xi:include href="stdenv/stdenv.chapter.xml" />
   <xi:include href="stdenv/meta.chapter.xml" />
@@ -21,7 +26,7 @@
   <xi:include href="stdenv/cross-compilation.chapter.xml" />
   <xi:include href="stdenv/platform-notes.chapter.xml" />
  </part>
- <part>
+ <part xml:id="part-builders">
   <title>Builders</title>
   <xi:include href="builders/fetchers.chapter.xml" />
   <xi:include href="builders/trivial-builders.chapter.xml" />
@@ -32,7 +37,7 @@
   <xi:include href="languages-frameworks/index.xml" />
   <xi:include href="builders/packages/index.xml" />
  </part>
- <part>
+ <part xml:id="part-contributing">
   <title>Contributing to Nixpkgs</title>
   <xi:include href="contributing/quick-start.chapter.xml" />
   <xi:include href="contributing/coding-conventions.chapter.xml" />
diff --git a/nixpkgs/doc/module-system/module-system.chapter.md b/nixpkgs/doc/module-system/module-system.chapter.md
new file mode 100644
index 000000000000..927f66073748
--- /dev/null
+++ b/nixpkgs/doc/module-system/module-system.chapter.md
@@ -0,0 +1,105 @@
+# Module System {#module-system}
+
+## Introduction {#module-system-introduction}
+
+The module system is a language for handling configuration, implemented as a Nix library.
+
+Compared to plain Nix, it adds documentation, type checking and composition or extensibility.
+
+::: {.note}
+This chapter is new and not complete yet. For a gentle introduction to the module system, in the context of NixOS, see [Writing NixOS Modules](https://nixos.org/manual/nixos/unstable/index.html#sec-writing-modules) in the NixOS manual.
+:::
+
+
+## `lib.evalModules` {#module-system-lib-evalModules}
+
+Evaluate a set of modules. This function is typically only used once per application (e.g. once in NixOS, once in Home Manager, ...).
+
+### Parameters {#module-system-lib-evalModules-parameters}
+
+#### `modules` {#module-system-lib-evalModules-param-modules}
+
+A list of modules. These are merged together to form the final configuration.
+<!-- TODO link to section about merging, TBD -->
+
+#### `specialArgs` {#module-system-lib-evalModules-param-specialArgs}
+
+An attribute set of module arguments that can be used in `imports`.
+
+This is in contrast to `config._module.args`, which is only available after all `imports` have been resolved.
+
+#### `class` {#module-system-lib-evalModules-param-class}
+
+If the `class` attribute is set and non-`null`, the module system will reject `imports` with a different `_class` declaration.
+
+The `class` value should be a string in lower [camel case](https://en.wikipedia.org/wiki/Camel_case).
+
+If applicable, the `class` should match the "prefix" of the attributes used in (experimental) [flakes](https://nixos.org/manual/nix/stable/command-ref/new-cli/nix3-flake.html#description). Some examples are:
+
+ - `nixos` as in `flake.nixosModules`
+ - `nixosTest`: modules that constitute a [NixOS VM test](https://nixos.org/manual/nixos/stable/index.html#sec-nixos-tests)
+<!-- We've only just started with `class`. You're invited to add a few more. -->
+
+#### `prefix` {#module-system-lib-evalModules-param-prefix}
+
+A list of strings representing the location at or below which all options are evaluated. This is used by `types.submodule` to improve error reporting and find the implicit `name` module argument.
+
+### Return value {#module-system-lib-evalModules-return-value}
+
+The result is an attribute set with the following attributes:
+
+#### `options` {#module-system-lib-evalModules-return-value-options}
+
+The nested attribute set of all option declarations.
+
+#### `config` {#module-system-lib-evalModules-return-value-config}
+
+The nested attribute set of all option values.
+
+#### `type` {#module-system-lib-evalModules-return-value-type}
+
+A module system type. This type is an instance of `types.submoduleWith` containing the current [`modules`](#module-system-lib-evalModules-param-modules).
+
+The option definitions that are typed with this type will extend the current set of modules, like [`extendModules`](#module-system-lib-evalModules-return-value-extendModules).
+
+However, the value returned from the type is just the [`config`](#module-system-lib-evalModules-return-value-config), like any submodule.
+
+If you're familiar with prototype inheritance, you can think of this `evalModules` invocation as the prototype, and usages of this type as the instances.
+
+This type is also available to the [`modules`](#module-system-lib-evalModules-param-modules) as the module argument `moduleType`.
+<!-- TODO: document the module arguments. Using moduleType is like saying: suppose this configuration was extended. -->
+
+#### `extendModules` {#module-system-lib-evalModules-return-value-extendModules}
+
+A function similar to `evalModules` but building on top of the already passed [`modules`](#module-system-lib-evalModules-param-modules). Its arguments, `modules` and `specialArgs` are added to the existing values.
+
+If you're familiar with prototype inheritance, you can think of the current, actual `evalModules` invocation as the prototype, and the return value of `extendModules` as the instance.
+
+This functionality is also available to modules as the `extendModules` module argument.
+
+::: {.note}
+
+**Evaluation Performance**
+
+`extendModules` returns a configuration that shares very little with the original `evalModules` invocation, because the module arguments may be different.
+
+So if you have a configuration that has been (or will be) largely evaluated, almost none of the computation is shared with the configuration returned by `extendModules`.
+
+The real work of module evaluation happens while computing the values in `config` and `options`, so multiple invocations of `extendModules` have a particularly small cost, as long as only the final `config` and `options` are evaluated.
+
+If you do reference multiple `config` (or `options`) from before and after `extendModules`, evaluation performance is the same as with multiple `evalModules` invocations, because the new modules' ability to override existing configuration fundamentally requires constructing a new `config` and `options` fixpoint.
+:::
+
+#### `_module` {#module-system-lib-evalModules-return-value-_module}
+
+A portion of the configuration tree which is elided from `config`.
+
+<!-- TODO: when markdown migration is complete, make _module docs visible again and reference _module docs. Maybe move those docs into this chapter? -->
+
+#### `_type` {#module-system-lib-evalModules-return-value-_type}
+
+A nominal type marker, always `"configuration"`.
+
+#### `class` {#module-system-lib-evalModules-return-value-_configurationClass}
+
+The [`class` argument](#module-system-lib-evalModules-param-class).
diff --git a/nixpkgs/doc/old/cross.txt b/nixpkgs/doc/old/cross.txt
index 064ca8d37b01..0f958e772b78 100644
--- a/nixpkgs/doc/old/cross.txt
+++ b/nixpkgs/doc/old/cross.txt
@@ -61,7 +61,7 @@ stdenv.mkDerivation {
   builder = ./builder.sh;
   src = fetchurl {
     url = "http://ftp.nluug.nl/gnu/binutils/binutils-2.16.1.tar.bz2";
-    sha256 = "1ian3kwh2vg6hr3ymrv48s04gijs539vzrq62xr76bxbhbwnz2np";
+    hash = "sha256-14pv+YKrL3NyFwbnv9MoWsZHgEZk5+pHhuZtAfkcVsU=";
   };
   inherit noSysDirs;
   configureFlags = [ "--target=arm-linux" ];
@@ -85,7 +85,7 @@ stdenv.mkDerivation {
   builder = ./builder.sh;
   src = fetchurl {
     url = "http://www.kernel.org/pub/linux/kernel/v2.6/linux-2.6.13.1.tar.bz2";
-    sha256 = "12qxmc827fjhaz53kjy7vyrzsaqcg78amiqsb3qm20z26w705lma";
+    hash = "sha256-qtICDjfiA1HxWBrHqtB5DCv9s9/HyznKV1C6IxCrHYs=";
   };
 }
 ---
@@ -151,8 +151,8 @@ stdenv.mkDerivation {
   name = "gcc-4.0.2-arm";
   builder = ./builder.sh;
   src = fetchurl {
-    url = ftp://ftp.nluug.nl/pub/gnu/gcc/gcc-4.0.2/gcc-core-4.0.2.tar.bz2;
-    sha256 = "02fxh0asflm8825w23l2jq1wvs7hbnam0jayrivg7zdv2ifnc0rc";
+    url = "ftp://ftp.nluug.nl/pub/gnu/gcc/gcc-4.0.2/gcc-core-4.0.2.tar.bz2";
+    hash = "sha256-LANmXRS7/fN2zF5JUJVd8OjNA5aCDsGLQKhSpxWA3Qk=";
   };
   # !!! apply only if noSysDirs is set
   patches = [./no-sys-dirs.patch ./gcc-inhibit.patch];
@@ -303,7 +303,7 @@ One of the compiler flags that GCC uses for this compiler is called X_CFLAGS.
 This is used by the Nix build process to set the dynamic linker, glibc
 in the case of i686-linux using the default Nix packages collection.
 
-Obiously, since we need to compile libstc++ for arm-linux with uClibc linking
+Obviously, since we need to compile libstc++ for arm-linux with uClibc linking
 will not be done correctly: you can't link object files built for arm-linux
 with a glibc built for i686-linux.
 
diff --git a/nixpkgs/doc/preface.chapter.md b/nixpkgs/doc/preface.chapter.md
index 16f228272b3d..7aae7fa90591 100644
--- a/nixpkgs/doc/preface.chapter.md
+++ b/nixpkgs/doc/preface.chapter.md
@@ -24,8 +24,8 @@ Packages, including the Nix packages collection, are distributed through
 [channels](https://nixos.org/nix/manual/#sec-channels). The collection is
 distributed for users of Nix on non-NixOS distributions through the channel
 `nixpkgs`. Users of NixOS generally use one of the `nixos-*` channels, e.g.
-`nixos-19.09`, which includes all packages and modules for the stable NixOS
-19.09. Stable NixOS releases are generally only given
+`nixos-22.11`, which includes all packages and modules for the stable NixOS
+22.11. Stable NixOS releases are generally only given
 security updates. More up to date packages and modules are available via the
 `nixos-unstable` channel.
 
@@ -43,4 +43,4 @@ The binaries are made available via a [binary cache](https://cache.nixos.org).
 
 The current Nix expressions of the channels are available in the
 [`nixpkgs`](https://github.com/NixOS/nixpkgs) repository in branches
-that correspond to the channel names (e.g. `nixos-19.09-small`).
+that correspond to the channel names (e.g. `nixos-22.11-small`).
diff --git a/nixpkgs/doc/stdenv/cross-compilation.chapter.md b/nixpkgs/doc/stdenv/cross-compilation.chapter.md
index 0eff70de5ca1..e659e1803807 100644
--- a/nixpkgs/doc/stdenv/cross-compilation.chapter.md
+++ b/nixpkgs/doc/stdenv/cross-compilation.chapter.md
@@ -150,7 +150,7 @@ depsBuildBuild = [ buildPackages.stdenv.cc ];
 Add the following to your `mkDerivation` invocation.
 
 ```nix
-doCheck = stdenv.hostPlatform == stdenv.buildPlatform;
+doCheck = stdenv.buildPlatform.canExecute stdenv.hostPlatform;
 ```
 
 #### Package using Meson needs to run binaries for the host platform during build. {#cross-meson-runs-host-code}
@@ -250,5 +250,5 @@ Thirdly, it is because everything target-mentioning only exists to accommodate c
 :::
 
 ::: {.note}
-If one explores Nixpkgs, they will see derivations with names like `gccCross`. Such `*Cross` derivations is a holdover from before we properly distinguished between the host and target platforms—the derivation with “Cross” in the name covered the `build = host != target` case, while the other covered the `host = target`, with build platform the same or not based on whether one was using its `.nativeDrv` or `.crossDrv`. This ugliness will disappear soon.
+If one explores Nixpkgs, they will see derivations with names like `gccCross`. Such `*Cross` derivations is a holdover from before we properly distinguished between the host and target platforms—the derivation with “Cross” in the name covered the `build = host != target` case, while the other covered the `host = target`, with build platform the same or not based on whether one was using its `.__spliced.buildHost` or `.__spliced.hostTarget`.
 :::
diff --git a/nixpkgs/doc/stdenv/meta.chapter.md b/nixpkgs/doc/stdenv/meta.chapter.md
index 475006b1259b..0cb2d6573dfc 100644
--- a/nixpkgs/doc/stdenv/meta.chapter.md
+++ b/nixpkgs/doc/stdenv/meta.chapter.md
@@ -11,58 +11,12 @@ meta = with lib; {
   '';
   homepage = "https://www.gnu.org/software/hello/manual/";
   license = licenses.gpl3Plus;
-  maintainers = [ maintainers.eelco ];
+  maintainers = with maintainers; [ eelco ];
   platforms = platforms.all;
 };
 ```
 
-Meta-attributes are not passed to the builder of the package. Thus, a change to a meta-attribute doesn’t trigger a recompilation of the package. The value of a meta-attribute must be a string.
-
-The meta-attributes of a package can be queried from the command-line using `nix-env`:
-
-```ShellSession
-$ nix-env -qa hello --json
-{
-    "hello": {
-        "meta": {
-            "description": "A program that produces a familiar, friendly greeting",
-            "homepage": "https://www.gnu.org/software/hello/manual/",
-            "license": {
-                "fullName": "GNU General Public License version 3 or later",
-                "shortName": "GPLv3+",
-                "url": "http://www.fsf.org/licensing/licenses/gpl.html"
-            },
-            "longDescription": "GNU Hello is a program that prints \"Hello, world!\" when you run it.\nIt is fully customizable.\n",
-            "maintainers": [
-                "Ludovic Court\u00e8s <ludo@gnu.org>"
-            ],
-            "platforms": [
-                "i686-linux",
-                "x86_64-linux",
-                "armv5tel-linux",
-                "armv7l-linux",
-                "mips32-linux",
-                "x86_64-darwin",
-                "i686-cygwin",
-                "i686-freebsd",
-                "x86_64-freebsd",
-                "i686-openbsd",
-                "x86_64-openbsd"
-            ],
-            "position": "/home/user/dev/nixpkgs/pkgs/applications/misc/hello/default.nix:14"
-        },
-        "name": "hello-2.9",
-        "system": "x86_64-linux"
-    }
-}
-```
-
-`nix-env` knows about the `description` field specifically:
-
-```ShellSession
-$ nix-env -qa hello --description
-hello-2.3  A program that produces a familiar, friendly greeting
-```
+Meta-attributes are not passed to the builder of the package. Thus, a change to a meta-attribute doesn’t trigger a recompilation of the package.
 
 ## Standard meta-attributes {#sec-standard-meta-attributes}
 
@@ -80,7 +34,7 @@ Right: `"A library for decoding PNG images"`
 
 ### `longDescription` {#var-meta-longDescription}
 
-An arbitrarily long description of the package.
+An arbitrarily long description of the package in [CommonMark](https://commonmark.org) Markdown.
 
 ### `branch` {#var-meta-branch}
 
@@ -112,11 +66,11 @@ For details, see [Licenses](#sec-meta-license).
 
 ### `maintainers` {#var-meta-maintainers}
 
-A list of the maintainers of this Nix expression. Maintainers are defined in [`nixpkgs/maintainers/maintainer-list.nix`](https://github.com/NixOS/nixpkgs/blob/master/maintainers/maintainer-list.nix). There is no restriction to becoming a maintainer, just add yourself to that list in a separate commit titled “maintainers: add alice”, and reference maintainers with `maintainers = with lib.maintainers; [ alice bob ]`.
+A list of the maintainers of this Nix expression. Maintainers are defined in [`nixpkgs/maintainers/maintainer-list.nix`](https://github.com/NixOS/nixpkgs/blob/master/maintainers/maintainer-list.nix). There is no restriction to becoming a maintainer, just add yourself to that list in a separate commit titled “maintainers: add alice” in the same pull request, and reference maintainers with `maintainers = with lib.maintainers; [ alice bob ]`.
 
 ### `mainProgram` {#var-meta-mainProgram}
 
-The name of the main binary for the package. This effects the binary `nix run` executes and falls back to the name of the package. Example: `"rg"`
+The name of the main binary for the package. This affects the binary `nix run` executes and falls back to the name of the package. Example: `"rg"`
 
 ### `priority` {#var-meta-priority}
 
@@ -132,6 +86,23 @@ meta.platforms = lib.platforms.linux;
 
 Attribute Set `lib.platforms` defines [various common lists](https://github.com/NixOS/nixpkgs/blob/master/lib/systems/doubles.nix) of platforms types.
 
+### `badPlatforms` {#var-meta-badPlatforms}
+
+The list of Nix [platform types](https://github.com/NixOS/nixpkgs/blob/b03ac42b0734da3e7be9bf8d94433a5195734b19/lib/meta.nix#L75-L81) on which the package is known not to be buildable.
+Hydra will never create prebuilt binaries for these platform types, even if they are in [`meta.platforms`](#var-meta-platforms).
+In general it is preferable to set `meta.platforms = lib.platforms.all` and then exclude any platforms on which the package is known not to build.
+For example, a package which requires dynamic linking and cannot be linked statically could use this:
+
+```nix
+meta.platforms = lib.platforms.all;
+meta.badPlatforms = [ lib.systems.inspect.patterns.isStatic ];
+```
+
+The [`lib.meta.availableOn`](https://github.com/NixOS/nixpkgs/blob/b03ac42b0734da3e7be9bf8d94433a5195734b19/lib/meta.nix#L95-L106) function can be used to test whether or not a package is available (i.e. buildable) on a given platform.
+Some packages use this to automatically detect the maximum set of features with which they can be built.
+For example, `systemd` [requires dynamic linking](https://github.com/systemd/systemd/issues/20600#issuecomment-912338965), and [has a `meta.badPlatforms` setting](https://github.com/NixOS/nixpkgs/blob/b03ac42b0734da3e7be9bf8d94433a5195734b19/pkgs/os-specific/linux/systemd/default.nix#L752) similar to the one above.
+Packages which can be built with or without `systemd` support will use `lib.meta.availableOn` to detect whether or not `systemd` is available on the [`hostPlatform`](#ssec-cross-platform-parameters) for which they are being built; if it is not available (e.g. due to a statically-linked host platform like `pkgsStatic`) this support will be disabled by default.
+
 ### `tests` {#var-meta-tests}
 
 ::: {.warning}
@@ -147,7 +118,7 @@ $ cd path/to/nixpkgs
 $ nix-build -A your-package.tests
 ```
 
-#### Package tests
+#### Package tests {#var-meta-tests-packages}
 
 Tests that are part of the source package are often executed in the `installCheckPhase`.
 
@@ -159,7 +130,7 @@ Prefer `passthru.tests` for tests that are introduced in nixpkgs because:
 
 For more on how to write and run package tests, see <xref linkend="sec-package-tests"/>.
 
-#### NixOS tests
+#### NixOS tests {#var-meta-tests-nixos}
 
 The NixOS tests are available as `nixosTests` in parameters of derivations. For instance, the OpenSMTPD derivation includes lines similar to:
 
@@ -211,11 +182,15 @@ runCommand "my-package-test" {
 
 ### `timeout` {#var-meta-timeout}
 
-A timeout (in seconds) for building the derivation. If the derivation takes longer than this time to build, it can fail due to breaking the timeout. However, all computers do not have the same computing power, hence some builders may decide to apply a multiplicative factor to this value. When filling this value in, try to keep it approximately consistent with other values already present in `nixpkgs`.
+A timeout (in seconds) for building the derivation. If the derivation takes longer than this time to build, Hydra will fail it due to breaking the timeout. However, all computers do not have the same computing power, hence some builders may decide to apply a multiplicative factor to this value. When filling this value in, try to keep it approximately consistent with other values already present in `nixpkgs`.
+
+`meta` attributes are not stored in the instantiated derivation.
+Therefore, this setting may be lost when the package is used as a dependency.
+To be effective, it must be presented directly to an evaluation process that handles the `meta.timeout` attribute.
 
 ### `hydraPlatforms` {#var-meta-hydraPlatforms}
 
-The list of Nix platform types for which the Hydra instance at `hydra.nixos.org` will build the package. (Hydra is the Nix-based continuous build system.) It defaults to the value of `meta.platforms`. Thus, the only reason to set `meta.hydraPlatforms` is if you want `hydra.nixos.org` to build the package on a subset of `meta.platforms`, or not at all, e.g.
+The list of Nix platform types for which the [Hydra](https://github.com/nixos/hydra) [instance at `hydra.nixos.org`](https://nixos.org/hydra) will build the package. (Hydra is the Nix-based continuous build system.) It defaults to the value of `meta.platforms`. Thus, the only reason to set `meta.hydraPlatforms` is if you want `hydra.nixos.org` to build the package on a subset of `meta.platforms`, or not at all, e.g.
 
 ```nix
 meta.platforms = lib.platforms.linux;
@@ -224,7 +199,26 @@ meta.hydraPlatforms = [];
 
 ### `broken` {#var-meta-broken}
 
-If set to `true`, the package is marked as "broken", meaning that it won’t show up in `nix-env -qa`, and cannot be built or installed. Such packages should be removed from Nixpkgs eventually unless they are fixed.
+If set to `true`, the package is marked as "broken", meaning that it won’t show up in [search.nixos.org](https://search.nixos.org/packages), and cannot be built or installed unless the environment variable [`NIXPKGS_ALLOW_BROKEN`](#opt-allowBroken) is set.
+Such unconditionally-broken packages should be removed from Nixpkgs eventually unless they are fixed.
+
+The value of this attribute can depend on a package's arguments, including `stdenv`.
+This means that `broken` can be used to express constraints, for example:
+
+- Does not cross compile
+
+  ```nix
+   meta.broken = !(stdenv.buildPlatform.canExecute stdenv.hostPlatform)
+  ```
+
+- Broken if all of a certain set of its dependencies are broken
+
+  ```nix
+  meta.broken = lib.all (map (p: p.meta.broken) [ glibc musl ])
+  ```
+
+This makes `broken` strictly more powerful than `meta.badPlatforms`.
+However `meta.availableOn` currently examines only `meta.platforms` and `meta.badPlatforms`, so `meta.broken` does not influence the default values for optional dependencies.
 
 ## Licenses {#sec-meta-license}
 
diff --git a/nixpkgs/doc/stdenv/multiple-output.chapter.md b/nixpkgs/doc/stdenv/multiple-output.chapter.md
index 65156816b991..c19d497ab61e 100644
--- a/nixpkgs/doc/stdenv/multiple-output.chapter.md
+++ b/nixpkgs/doc/stdenv/multiple-output.chapter.md
@@ -29,7 +29,7 @@ NixOS provides two ways to select the outputs to install for packages listed in
 `nix-env` lacks an easy way to select the outputs to install. When installing a package, `nix-env` always installs the outputs listed in `meta.outputsToInstall`, even when the user explicitly selects an output.
 
 ::: {.warning}
-`nix-env` silenty disregards the outputs selected by the user, and instead installs the outputs from `meta.outputsToInstall`. For example,
+`nix-env` silently disregards the outputs selected by the user, and instead installs the outputs from `meta.outputsToInstall`. For example,
 
 ```ShellSession
 $ nix-env -iA nixpkgs.coreutils.info
diff --git a/nixpkgs/doc/stdenv/stdenv.chapter.md b/nixpkgs/doc/stdenv/stdenv.chapter.md
index b4cc50b509d4..0b3776b530ca 100644
--- a/nixpkgs/doc/stdenv/stdenv.chapter.md
+++ b/nixpkgs/doc/stdenv/stdenv.chapter.md
@@ -11,12 +11,13 @@ stdenv.mkDerivation {
   name = "libfoo-1.2.3";
   src = fetchurl {
     url = "http://example.org/libfoo-1.2.3.tar.bz2";
-    sha256 = "0x2g1jqygyr5wiwg4ma1nd7w4ydpy82z9gkcv8vh2v8dn3y58v5m";
+    hash = "sha256-tWxU/LANbQE32my+9AXyt3nCT7NBVfJ45CX757EMT3Q=";
   };
 }
 ```
 
-(`stdenv` needs to be in scope, so if you write this in a separate Nix expression from `pkgs/all-packages.nix`, you need to pass it as a function argument.) Specifying a `name` and a `src` is the absolute minimum Nix requires. For convenience, you can also use `pname` and `version` attributes and `mkDerivation` will automatically set `name` to `"${pname}-${version}"` by default. Since [RFC 0035](https://github.com/NixOS/rfcs/pull/35), this is preferred for packages in Nixpkgs, as it allows us to reuse the version easily:
+(`stdenv` needs to be in scope, so if you write this in a separate Nix expression from `pkgs/all-packages.nix`, you need to pass it as a function argument.) Specifying a `name` and a `src` is the absolute minimum Nix requires. For convenience, you can also use `pname` and `version` attributes and `mkDerivation` will automatically set `name` to `"${pname}-${version}"` by default.
+**Since [RFC 0035](https://github.com/NixOS/rfcs/pull/35), this is preferred for packages in Nixpkgs**, as it allows us to reuse the version easily:
 
 ```nix
 stdenv.mkDerivation rec {
@@ -24,7 +25,7 @@ stdenv.mkDerivation rec {
   version = "1.2.3";
   src = fetchurl {
     url = "http://example.org/libfoo-source-${version}.tar.bz2";
-    sha256 = "0x2g1jqygyr5wiwg4ma1nd7w4ydpy82z9gkcv8vh2v8dn3y58v5m";
+    hash = "sha256-tWxU/LANbQE32my+9AXyt3nCT7NBVfJ45CX757EMT3Q=";
   };
 }
 ```
@@ -33,7 +34,8 @@ Many packages have dependencies that are not provided in the standard environmen
 
 ```nix
 stdenv.mkDerivation {
-  name = "libfoo-1.2.3";
+  pname = "libfoo";
+  version = "1.2.3";
   ...
   buildInputs = [libbar perl ncurses];
 }
@@ -45,7 +47,8 @@ Often it is necessary to override or modify some aspect of the build. To make th
 
 ```nix
 stdenv.mkDerivation {
-  name = "fnord-4.5";
+  pname = "fnord";
+  version = "4.5";
   ...
   buildPhase = ''
     gcc foo.c -o foo
@@ -65,7 +68,8 @@ While the standard environment provides a generic builder, you can still supply
 
 ```nix
 stdenv.mkDerivation {
-  name = "libfoo-1.2.3";
+  pname = "libfoo";
+  version = "1.2.3";
   ...
   builder = ./builder.sh;
 }
@@ -95,6 +99,27 @@ installPhase() {
 genericBuild
 ```
 
+### Building a `stdenv` package in `nix-shell` {#sec-building-stdenv-package-in-nix-shell}
+
+To build a `stdenv` package in a [`nix-shell`](https://nixos.org/manual/nix/unstable/command-ref/nix-shell.html), use
+
+```bash
+nix-shell '<nixpkgs>' -A some_package
+eval "${unpackPhase:-unpackPhase}"
+cd $sourceRoot
+eval "${patchPhase:-patchPhase}"
+eval "${configurePhase:-configurePhase}"
+eval "${buildPhase:-buildPhase}"
+```
+
+To modify a [phase](#sec-stdenv-phases), first print it with
+
+```bash
+type buildPhase
+```
+
+then change it in a text editor, and paste it back to the terminal.
+
 ## Tools provided by `stdenv` {#sec-tools-of-stdenv}
 
 The standard environment provides the following packages:
@@ -116,6 +141,82 @@ On Linux, `stdenv` also includes the `patchelf` utility.
 
 ## Specifying dependencies {#ssec-stdenv-dependencies}
 
+Build systems often require more dependencies than just what `stdenv` provides. This section describes attributes accepted by `stdenv.mkDerivation` that can be used to make these dependencies available to the build system.
+
+### Overview {#ssec-stdenv-dependencies-overview}
+
+A full reference of the different kinds of dependencies is provided in [](#ssec-stdenv-dependencies-reference), but here is an overview of the most common ones.
+It should cover most use cases.
+
+Add dependencies to `nativeBuildInputs` if they are executed during the build:
+- those which are needed on `$PATH` during the build, for example `cmake` and `pkg-config`
+- [setup hooks](#ssec-setup-hooks), for example [`makeWrapper`](#fun-makeWrapper)
+- interpreters needed by [`patchShebangs`](#patch-shebangs.sh) for build scripts (with the `--build` flag), which can be the case for e.g. `perl`
+
+Add dependencies to `buildInputs` if they will end up copied or linked into the final output or otherwise used at runtime:
+- libraries used by compilers, for example `zlib`,
+- interpreters needed by [`patchShebangs`](#patch-shebangs.sh) for scripts which are installed, which can be the case for e.g. `perl`
+
+::: {.note}
+These criteria are independent.
+
+For example, software using Wayland usually needs the `wayland` library at runtime, so `wayland` should be added to `buildInputs`.
+But it also executes the `wayland-scanner` program as part of the build to generate code, so `wayland` should also be added to `nativeBuildInputs`.
+:::
+
+Dependencies needed only to run tests are similarly classified between native (executed during build) and non-native (executed at runtime):
+- `nativeCheckInputs` for test tools needed on `$PATH` (such as `ctest`) and [setup hooks](#ssec-setup-hooks) (for example [`pytestCheckHook`](#python))
+- `checkInputs` for libraries linked into test executables (for example the `qcheck` OCaml package)
+
+These dependencies are only injected when [`doCheck`](#var-stdenv-doCheck) is set to `true`.
+
+#### Example {#ssec-stdenv-dependencies-overview-example}
+
+Consider for example this simplified derivation for `solo5`, a sandboxing tool:
+```nix
+stdenv.mkDerivation rec {
+  pname = "solo5";
+  version = "0.7.5";
+
+  src = fetchurl {
+    url = "https://github.com/Solo5/solo5/releases/download/v${version}/solo5-v${version}.tar.gz";
+    sha256 = "sha256-viwrS9lnaU8sTGuzK/+L/PlMM/xRRtgVuK5pixVeDEw=";
+  };
+
+  nativeBuildInputs = [ makeWrapper pkg-config ];
+  buildInputs = [ libseccomp ];
+
+  postInstall = ''
+    substituteInPlace $out/bin/solo5-virtio-mkimage \
+      --replace "/usr/lib/syslinux" "${syslinux}/share/syslinux" \
+      --replace "/usr/share/syslinux" "${syslinux}/share/syslinux" \
+      --replace "cp " "cp --no-preserve=mode "
+
+    wrapProgram $out/bin/solo5-virtio-mkimage \
+      --prefix PATH : ${lib.makeBinPath [ dosfstools mtools parted syslinux ]}
+  '';
+
+  doCheck = true;
+  nativeCheckInputs = [ util-linux qemu ];
+  checkPhase = '' [elided] '';
+}
+```
+
+- `makeWrapper` is a setup hook, i.e., a shell script sourced by the generic builder of `stdenv`.
+  It is thus executed during the build and must be added to `nativeBuildInputs`.
+- `pkg-config` is a build tool which the configure script of `solo5` expects to be on `$PATH` during the build:
+  therefore, it must be added to `nativeBuildInputs`.
+- `libseccomp` is a library linked into `$out/bin/solo5-elftool`.
+  As it is used at runtime, it must be added to `buildInputs`.
+- Tests need `qemu` and `getopt` (from `util-linux`) on `$PATH`, these must be added to `nativeCheckInputs`.
+- Some dependencies are injected directly in the shell code of phases: `syslinux`, `dosfstools`, `mtools`, and `parted`.
+In this specific case, they will end up in the output of the derivation (`$out` here).
+As Nix marks dependencies whose absolute path is present in the output as runtime dependencies, adding them to `buildInputs` is not required.
+
+For more complex cases, like libraries linked into an executable which is then executed as part of the build system, see [](#ssec-stdenv-dependencies-reference).
+
+### Reference {#ssec-stdenv-dependencies-reference}
+
 As described in the Nix manual, almost any `*.drv` store path in a derivation’s attribute set will induce a dependency on that derivation. `mkDerivation`, however, takes a few attributes intended to include all the dependencies of a package. This is done both for structure and consistency, but also so that certain other setup can take place. For example, certain dependencies need their bin directories added to the `PATH`. That is built-in, but other setup is done via a pluggable mechanism that works in conjunction with these dependency attributes. See [](#ssec-setup-hooks) for details.
 
 Dependencies can be broken down along three axes: their host and target platforms relative to the new derivation’s, and whether they are propagated. The platform distinctions are motivated by cross compilation; see [](#chap-cross) for exactly what each platform means. [^footnote-stdenv-ignored-build-platform] But even if one is not cross compiling, the platforms imply whether or not the dependency is needed at run-time or build-time, a concept that makes perfect sense outside of cross compilation. By default, the run-time/build-time distinction is just a hint for mental clarity, but with `strictDeps` set it is mostly enforced even in the native case.
@@ -156,7 +257,7 @@ propagated-dep(mapOffset(h0, t0, h1),
 ```
 let mapOffset(h, t, i) = i + (if i <= 0 then h else t - 1)
 
-dep(h0, _, A, B)
+dep(h0, t0, A, B)
 propagated-dep(h1, t1, B, C)
 h0 + h1 in {-1, 0, 1}
 h0 + t1 in {-1, 0, -1}
@@ -185,23 +286,23 @@ This is where “sum-like” comes in from above: We can just sum all of the hos
 
 Because of the bounds checks, the uncommon cases are `h = t` and `h + 2 = t`. In the former case, the motivation for `mapOffset` is that since its host and target platforms are the same, no transitive dependency of it should be able to “discover” an offset greater than its reduced target offsets. `mapOffset` effectively “squashes” all its transitive dependencies’ offsets so that none will ever be greater than the target offset of the original `h = t` package. In the other case, `h + 1` is skipped over between the host and target offsets. Instead of squashing the offsets, we need to “rip” them apart so no transitive dependencies’ offset is that one.
 
-Overall, the unifying theme here is that propagation shouldn’t be introducing transitive dependencies involving platforms the depending package is unaware of. \[One can imagine the dependending package asking for dependencies with the platforms it knows about; other platforms it doesn’t know how to ask for. The platform description in that scenario is a kind of unforagable capability.\] The offset bounds checking and definition of `mapOffset` together ensure that this is the case. Discovering a new offset is discovering a new platform, and since those platforms weren’t in the derivation “spec” of the needing package, they cannot be relevant. From a capability perspective, we can imagine that the host and target platforms of a package are the capabilities a package requires, and the depending package must provide the capability to the dependency.
+Overall, the unifying theme here is that propagation shouldn’t be introducing transitive dependencies involving platforms the depending package is unaware of. \[One can imagine the depending package asking for dependencies with the platforms it knows about; other platforms it doesn’t know how to ask for. The platform description in that scenario is a kind of unforgeable capability.\] The offset bounds checking and definition of `mapOffset` together ensure that this is the case. Discovering a new offset is discovering a new platform, and since those platforms weren’t in the derivation “spec” of the needing package, they cannot be relevant. From a capability perspective, we can imagine that the host and target platforms of a package are the capabilities a package requires, and the depending package must provide the capability to the dependency.
 
-### Variables specifying dependencies {#variables-specifying-dependencies}
+#### Variables specifying dependencies {#variables-specifying-dependencies}
 
-#### `depsBuildBuild` {#var-stdenv-depsBuildBuild}
+##### `depsBuildBuild` {#var-stdenv-depsBuildBuild}
 
 A list of dependencies whose host and target platforms are the new derivation’s build platform. These are programs and libraries used at build time that produce programs and libraries also used at build time. If the dependency doesn’t care about the target platform (i.e. isn’t a compiler or similar tool), put it in `nativeBuildInputs` instead. The most common use of this `buildPackages.stdenv.cc`, the default C compiler for this role. That example crops up more than one might think in old commonly used C libraries.
 
 Since these packages are able to be run at build-time, they are always added to the `PATH`, as described above. But since these packages are only guaranteed to be able to run then, they shouldn’t persist as run-time dependencies. This isn’t currently enforced, but could be in the future.
 
-#### `nativeBuildInputs` {#var-stdenv-nativeBuildInputs}
+##### `nativeBuildInputs` {#var-stdenv-nativeBuildInputs}
 
 A list of dependencies whose host platform is the new derivation’s build platform, and target platform is the new derivation’s host platform. These are programs and libraries used at build-time that, if they are a compiler or similar tool, produce code to run at run-time—i.e. tools used to build the new derivation. If the dependency doesn’t care about the target platform (i.e. isn’t a compiler or similar tool), put it here, rather than in `depsBuildBuild` or `depsBuildTarget`. This could be called `depsBuildHost` but `nativeBuildInputs` is used for historical continuity.
 
 Since these packages are able to be run at build-time, they are added to the `PATH`, as described above. But since these packages are only guaranteed to be able to run then, they shouldn’t persist as run-time dependencies. This isn’t currently enforced, but could be in the future.
 
-#### `depsBuildTarget` {#var-stdenv-depsBuildTarget}
+##### `depsBuildTarget` {#var-stdenv-depsBuildTarget}
 
 A list of dependencies whose host platform is the new derivation’s build platform, and target platform is the new derivation’s target platform. These are programs used at build time that produce code to run with code produced by the depending package. Most commonly, these are tools used to build the runtime or standard library that the currently-being-built compiler will inject into any code it compiles. In many cases, the currently-being-built-compiler is itself employed for that task, but when that compiler won’t run (i.e. its build and host platform differ) this is not possible. Other times, the compiler relies on some other tool, like binutils, that is always built separately so that the dependency is unconditional.
 
@@ -209,41 +310,41 @@ This is a somewhat confusing concept to wrap one’s head around, and for good r
 
 Since these packages are able to run at build time, they are added to the `PATH`, as described above. But since these packages are only guaranteed to be able to run then, they shouldn’t persist as run-time dependencies. This isn’t currently enforced, but could be in the future.
 
-#### `depsHostHost` {#var-stdenv-depsHostHost}
+##### `depsHostHost` {#var-stdenv-depsHostHost}
 
 A list of dependencies whose host and target platforms match the new derivation’s host platform. In practice, this would usually be tools used by compilers for macros or a metaprogramming system, or libraries used by the macros or metaprogramming code itself. It’s always preferable to use a `depsBuildBuild` dependency in the derivation being built over a `depsHostHost` on the tool doing the building for this purpose.
 
-#### `buildInputs` {#var-stdenv-buildInputs}
+##### `buildInputs` {#var-stdenv-buildInputs}
 
 A list of dependencies whose host platform and target platform match the new derivation’s. This would be called `depsHostTarget` but for historical continuity. If the dependency doesn’t care about the target platform (i.e. isn’t a compiler or similar tool), put it here, rather than in `depsBuildBuild`.
 
 These are often programs and libraries used by the new derivation at *run*-time, but that isn’t always the case. For example, the machine code in a statically-linked library is only used at run-time, but the derivation containing the library is only needed at build-time. Even in the dynamic case, the library may also be needed at build-time to appease the linker.
 
-#### `depsTargetTarget` {#var-stdenv-depsTargetTarget}
+##### `depsTargetTarget` {#var-stdenv-depsTargetTarget}
 
 A list of dependencies whose host platform matches the new derivation’s target platform. These are packages that run on the target platform, e.g. the standard library or run-time deps of standard library that a compiler insists on knowing about. It’s poor form in almost all cases for a package to depend on another from a future stage \[future stage corresponding to positive offset\]. Do not use this attribute unless you are packaging a compiler and are sure it is needed.
 
-#### `depsBuildBuildPropagated` {#var-stdenv-depsBuildBuildPropagated}
+##### `depsBuildBuildPropagated` {#var-stdenv-depsBuildBuildPropagated}
 
 The propagated equivalent of `depsBuildBuild`. This perhaps never ought to be used, but it is included for consistency \[see below for the others\].
 
-#### `propagatedNativeBuildInputs` {#var-stdenv-propagatedNativeBuildInputs}
+##### `propagatedNativeBuildInputs` {#var-stdenv-propagatedNativeBuildInputs}
 
 The propagated equivalent of `nativeBuildInputs`. This would be called `depsBuildHostPropagated` but for historical continuity. For example, if package `Y` has `propagatedNativeBuildInputs = [X]`, and package `Z` has `buildInputs = [Y]`, then package `Z` will be built as if it included package `X` in its `nativeBuildInputs`. If instead, package `Z` has `nativeBuildInputs = [Y]`, then `Z` will be built as if it included `X` in the `depsBuildBuild` of package `Z`, because of the sum of the two `-1` host offsets.
 
-#### `depsBuildTargetPropagated` {#var-stdenv-depsBuildTargetPropagated}
+##### `depsBuildTargetPropagated` {#var-stdenv-depsBuildTargetPropagated}
 
 The propagated equivalent of `depsBuildTarget`. This is prefixed for the same reason of alerting potential users.
 
-#### `depsHostHostPropagated` {#var-stdenv-depsHostHostPropagated}
+##### `depsHostHostPropagated` {#var-stdenv-depsHostHostPropagated}
 
 The propagated equivalent of `depsHostHost`.
 
-#### `propagatedBuildInputs` {#var-stdenv-propagatedBuildInputs}
+##### `propagatedBuildInputs` {#var-stdenv-propagatedBuildInputs}
 
 The propagated equivalent of `buildInputs`. This would be called `depsHostTargetPropagated` but for historical continuity.
 
-#### `depsTargetTargetPropagated` {#var-stdenv-depsTargetTargetPropagated}
+##### `depsTargetTargetPropagated` {#var-stdenv-depsTargetTargetPropagated}
 
 The propagated equivalent of `depsTargetTarget`. This is prefixed for the same reason of alerting potential users.
 
@@ -253,7 +354,7 @@ The propagated equivalent of `depsTargetTarget`. This is prefixed for the same r
 
 #### `NIX_DEBUG` {#var-stdenv-NIX_DEBUG}
 
-A natural number indicating how much information to log. If set to 1 or higher, `stdenv` will print moderate debugging information during the build. In particular, the `gcc` and `ld` wrapper scripts will print out the complete command line passed to the wrapped tools. If set to 6 or higher, the `stdenv` setup script will be run with `set -x` tracing. If set to 7 or higher, the `gcc` and `ld` wrapper scripts will also be run with `set -x` tracing.
+A number between 0 and 7 indicating how much information to log. If set to 1 or higher, `stdenv` will print moderate debugging information during the build. In particular, the `gcc` and `ld` wrapper scripts will print out the complete command line passed to the wrapped tools. If set to 6 or higher, the `stdenv` setup script will be run with `set -x` tracing. If set to 7 or higher, the `gcc` and `ld` wrapper scripts will also be run with `set -x` tracing.
 
 ### Attributes affecting build properties {#attributes-affecting-build-properties}
 
@@ -283,39 +384,107 @@ Values inside it are not passed to the builder, so you can change them without t
 
 #### `passthru.updateScript` {#var-passthru-updateScript}
 
-A script to be run by `maintainers/scripts/update.nix` when the package is matched. It needs to be an executable file, either on the file system:
+A script to be run by `maintainers/scripts/update.nix` when the package is matched. The attribute can contain one of the following:
 
-```nix
-passthru.updateScript = ./update.sh;
-```
+- []{#var-passthru-updateScript-command} an executable file, either on the file system:
 
-or inside the expression itself:
+  ```nix
+  passthru.updateScript = ./update.sh;
+  ```
 
-```nix
-passthru.updateScript = writeScript "update-zoom-us" ''
-  #!/usr/bin/env nix-shell
-  #!nix-shell -i bash -p curl pcre common-updater-scripts
+  or inside the expression itself:
 
-  set -eu -o pipefail
+  ```nix
+  passthru.updateScript = writeScript "update-zoom-us" ''
+    #!/usr/bin/env nix-shell
+    #!nix-shell -i bash -p curl pcre common-updater-scripts
 
-  version="$(curl -sI https://zoom.us/client/latest/zoom_x86_64.tar.xz | grep -Fi 'Location:' | pcregrep -o1 '/(([0-9]\.?)+)/')"
-  update-source-version zoom-us "$version"
-'';
-```
+    set -eu -o pipefail
 
-The attribute can also contain a list, a script followed by arguments to be passed to it:
+    version="$(curl -sI https://zoom.us/client/latest/zoom_x86_64.tar.xz | grep -Fi 'Location:' | pcregrep -o1 '/(([0-9]\.?)+)/')"
+    update-source-version zoom-us "$version"
+  '';
+  ```
 
-```nix
-passthru.updateScript = [ ../../update.sh pname "--requested-release=unstable" ];
-```
+- a list, a script followed by arguments to be passed to it:
+
+  ```nix
+  passthru.updateScript = [ ../../update.sh pname "--requested-release=unstable" ];
+  ```
+
+- an attribute set containing:
+  - [`command`]{#var-passthru-updateScript-set-command} – a string or list in the [format expected by `passthru.updateScript`](#var-passthru-updateScript-command).
+  - [`attrPath`]{#var-passthru-updateScript-set-attrPath} (optional) – a string containing the canonical attribute path for the package. If present, it will be passed to the update script instead of the attribute path on which the package was discovered during Nixpkgs traversal.
+  - [`supportedFeatures`]{#var-passthru-updateScript-set-supportedFeatures} (optional) – a list of the [extra features](#var-passthru-updateScript-supported-features) the script supports.
 
-The script will be run with `UPDATE_NIX_ATTR_PATH` environment variable set to the attribute path it is supposed to update.
+  ```nix
+  passthru.updateScript = {
+    command = [ ../../update.sh pname ];
+    attrPath = pname;
+    supportedFeatures = [ … ];
+  };
+  ```
+
+##### How update scripts are executed? {#var-passthru-updateScript-execution}
+
+Update scripts are to be invoked by `maintainers/scripts/update.nix` script. You can run `nix-shell maintainers/scripts/update.nix` in the root of Nixpkgs repository for information on how to use it. `update.nix` offers several modes for selecting packages to update (e.g. select by attribute path, traverse Nixpkgs and filter by maintainer, etc.), and it will execute update scripts for all matched packages that have an `updateScript` attribute.
+
+Each update script will be passed the following environment variables:
+
+- [`UPDATE_NIX_NAME`]{#var-passthru-updateScript-env-UPDATE_NIX_NAME} – content of the `name` attribute of the updated package.
+- [`UPDATE_NIX_PNAME`]{#var-passthru-updateScript-env-UPDATE_NIX_PNAME} – content of the `pname` attribute of the updated package.
+- [`UPDATE_NIX_OLD_VERSION`]{#var-passthru-updateScript-env-UPDATE_NIX_OLD_VERSION} – content of the `version` attribute of the updated package.
+- [`UPDATE_NIX_ATTR_PATH`]{#var-passthru-updateScript-env-UPDATE_NIX_ATTR_PATH} – attribute path the `update.nix` discovered the package on (or the [canonical `attrPath`](#var-passthru-updateScript-set-attrPath) when available). Example: `pantheon.elementary-terminal`
 
 ::: {.note}
-The script will be usually run from the root of the Nixpkgs repository but you should not rely on that. Also note that the update scripts will be run in parallel by default; you should avoid running `git commit` or any other commands that cannot handle that.
+An update script will be usually run from the root of the Nixpkgs repository but you should not rely on that. Also note that `update.nix` executes update scripts in parallel by default so you should avoid running `git commit` or any other commands that cannot handle that.
 :::
 
-For information about how to run the updates, execute `nix-shell maintainers/scripts/update.nix`.
+::: {.tip}
+While update scripts should not create commits themselves, `maintainers/scripts/update.nix` supports automatically creating commits when running it with `--argstr commit true`. If you need to customize commit message, you can have the update script implement [`commit`](#var-passthru-updateScript-commit) feature.
+:::
+
+##### Supported features {#var-passthru-updateScript-supported-features}
+###### `commit` {#var-passthru-updateScript-commit}
+
+This feature allows update scripts to *ask* `update.nix` to create Git commits.
+
+When support of this feature is declared, whenever the update script exits with `0` return status, it is expected to print a JSON list containing an object described below for each updated attribute to standard output.
+
+When `update.nix` is run with `--argstr commit true` arguments, it will create a separate commit for each of the objects. An empty list can be returned when the script did not update any files, for example, when the package is already at the latest version.
+
+The commit object contains the following values:
+
+- [`attrPath`]{#var-passthru-updateScript-commit-attrPath} – a string containing attribute path.
+- [`oldVersion`]{#var-passthru-updateScript-commit-oldVersion} – a string containing old version.
+- [`newVersion`]{#var-passthru-updateScript-commit-newVersion} – a string containing new version.
+- [`files`]{#var-passthru-updateScript-commit-files} – a non-empty list of file paths (as strings) to add to the commit.
+- [`commitBody`]{#var-passthru-updateScript-commit-commitBody} (optional) – a string with extra content to be appended to the default commit message (useful for adding changelog links).
+- [`commitMessage`]{#var-passthru-updateScript-commit-commitMessage} (optional) – a string to use instead of the default commit message.
+
+If the returned array contains exactly one object (e.g. `[{}]`), all values are optional and will be determined automatically.
+
+```{=docbook}
+<example>
+<title>Standard output of an update script using commit feature</title>
+```
+
+```json
+[
+  {
+    "attrPath": "volume_key",
+    "oldVersion": "0.3.11",
+    "newVersion": "0.3.12",
+    "files": [
+      "/path/to/nixpkgs/pkgs/development/libraries/volume-key/default.nix"
+    ]
+  }
+]
+```
+
+```{=docbook}
+</example>
+```
 
 ### Recursive attributes in `mkDerivation` {#mkderivation-recursive-attributes}
 
@@ -452,6 +621,8 @@ The list of source files or directories to be unpacked or copied. One of these m
 
 After running `unpackPhase`, the generic builder changes the current directory to the directory created by unpacking the sources. If there are multiple source directories, you should set `sourceRoot` to the name of the intended directory. Set `sourceRoot = ".";` if you use `srcs` and control the unpack phase yourself.
 
+By default the `sourceRoot` is set to `"source"`. If you want to point to a sub-directory inside your project, you therefore need to set `sourceRoot = "source/my-sub-directory"`.
+
 ##### `setSourceRoot` {#var-stdenv-setSourceRoot}
 
 Alternatively to setting `sourceRoot`, you can set `setSourceRoot` to a shell command to be evaluated by the unpack phase after the sources have been unpacked. This command must set `sourceRoot`.
@@ -536,7 +707,7 @@ The prefix under which the package must be installed, passed via the `--prefix`
 
 The key to use when specifying the prefix. By default, this is set to `--prefix=` as that is used by the majority of packages.
 
-##### `dontAddStaticConfigureFlags`
+##### `dontAddStaticConfigureFlags` {#var-stdenv-dontAddStaticConfigureFlags}
 
 By default, when building statically, stdenv will try to add build system appropriate configure flags to try to enable static builds.
 
@@ -624,7 +795,7 @@ Before and after running `make`, the hooks `preBuild` and `postBuild` are called
 
 ### The check phase {#ssec-check-phase}
 
-The check phase checks whether the package was built correctly by running its test suite. The default `checkPhase` calls `make check`, but only if the `doCheck` variable is enabled.
+The check phase checks whether the package was built correctly by running its test suite. The default `checkPhase` calls `make $checkTarget`, but only if the [`doCheck` variable](#var-stdenv-doCheck) is enabled.
 
 #### Variables controlling the check phase {#variables-controlling-the-check-phase}
 
@@ -644,7 +815,8 @@ See the [build phase](#var-stdenv-makeFlags) for details.
 
 ##### `checkTarget` {#var-stdenv-checkTarget}
 
-The make target that runs the tests. Defaults to `check`.
+The `make` target that runs the tests.
+If unset, use `check` if it exists, otherwise `test`; if neither is found, do nothing.
 
 ##### `checkFlags` / `checkFlagsArray` {#var-stdenv-checkFlags}
 
@@ -652,7 +824,11 @@ A list of strings passed as additional flags to `make`. Like `makeFlags` and `ma
 
 ##### `checkInputs` {#var-stdenv-checkInputs}
 
-A list of dependencies used by the phase. This gets included in `nativeBuildInputs` when `doCheck` is set.
+A list of host dependencies used by the phase, usually libraries linked into executables built during tests. This gets included in `buildInputs` when `doCheck` is set.
+
+##### `nativeCheckInputs` {#var-stdenv-nativeCheckInputs}
+
+A list of native dependencies used by the phase, notably tools needed on `$PATH`. This gets included in `nativeBuildInputs` when `doCheck` is set.
 
 ##### `preCheck` {#var-stdenv-preCheck}
 
@@ -717,11 +893,11 @@ If set, libraries and executables are not stripped. By default, they are.
 
 ##### `dontStripHost` {#var-stdenv-dontStripHost}
 
-Like `dontStrip`, but only affects the `strip` command targetting the package’s host platform. Useful when supporting cross compilation, but otherwise feel free to ignore.
+Like `dontStrip`, but only affects the `strip` command targeting the package’s host platform. Useful when supporting cross compilation, but otherwise feel free to ignore.
 
 ##### `dontStripTarget` {#var-stdenv-dontStripTarget}
 
-Like `dontStrip`, but only affects the `strip` command targetting the packages’ target platform. Useful when supporting cross compilation, but otherwise feel free to ignore.
+Like `dontStrip`, but only affects the `strip` command targeting the packages’ target platform. Useful when supporting cross compilation, but otherwise feel free to ignore.
 
 ##### `dontMoveSbin` {#var-stdenv-dontMoveSbin}
 
@@ -819,7 +995,11 @@ A list of strings passed as additional flags to `make`. Like `makeFlags` and `ma
 
 ##### `installCheckInputs` {#var-stdenv-installCheckInputs}
 
-A list of dependencies used by the phase. This gets included in `nativeBuildInputs` when `doInstallCheck` is set.
+A list of host dependencies used by the phase, usually libraries linked into executables built during tests. This gets included in `buildInputs` when `doInstallCheck` is set.
+
+##### `nativeInstallCheckInputs` {#var-stdenv-nativeInstallCheckInputs}
+
+A list of native dependencies used by the phase, notably tools needed on `$PATH`. This gets included in `nativeBuildInputs` when `doInstallCheck` is set.
 
 ##### `preInstallCheck` {#var-stdenv-preInstallCheck}
 
@@ -835,6 +1015,10 @@ The distribution phase is intended to produce a source distribution of the packa
 
 #### Variables controlling the distribution phase {#variables-controlling-the-distribution-phase}
 
+##### `doDist` {#var-stdenv-doDist}
+
+If set, the distribution phase is executed.
+
 ##### `distTarget` {#var-stdenv-distTarget}
 
 The make target that produces the distribution. Defaults to `dist`.
@@ -871,12 +1055,27 @@ Constructs a wrapper for a program with various possible arguments. It is define
 # adds `FOOBAR=baz` to `$out/bin/foo`’s environment
 makeWrapper $out/bin/foo $wrapperfile --set FOOBAR baz
 
-# prefixes the binary paths of `hello` and `git`
+# Prefixes the binary paths of `hello` and `git`
+# and suffixes the binary path of `xdg-utils`.
 # Be advised that paths often should be patched in directly
 # (via string replacements or in `configurePhase`).
-makeWrapper $out/bin/foo $wrapperfile --prefix PATH : ${lib.makeBinPath [ hello git ]}
+makeWrapper $out/bin/foo $wrapperfile \
+  --prefix PATH : ${lib.makeBinPath [ hello git ]} \
+  --suffix PATH : ${lib.makeBinPath [ xdg-utils ]}
 ```
 
+Packages may expect or require other utilities to be available at runtime.
+`makeWrapper` can be used to add packages to a `PATH` environment variable local to a wrapper.
+
+Use `--prefix` to explicitly set dependencies in `PATH`.
+
+::: {.note}
+`--prefix` essentially hard-codes dependencies into the wrapper.
+They cannot be overridden without rebuilding the package.
+:::
+
+If dependencies should be resolved at runtime, use `--suffix` to append fallback values to `PATH`.
+
 There’s many more kinds of arguments, they are documented in `nixpkgs/pkgs/build-support/setup-hooks/make-wrapper.sh` for the `makeWrapper` implementation and in `nixpkgs/pkgs/build-support/setup-hooks/make-binary-wrapper/make-binary-wrapper.sh` for the `makeBinaryWrapper` implementation.
 
 `wrapProgram` is a convenience function you probably want to use most of the time, implemented by both `makeWrapper` and `makeBinaryWrapper`.
@@ -900,15 +1099,15 @@ postInstall = ''
 
 Performs string substitution on the contents of \<infile\>, writing the result to \<outfile\>. The substitutions in \<subs\> are of the following form:
 
-#### `--replace` \<s1\> \<s2\>
+#### `--replace` \<s1\> \<s2\> {#fun-substitute-replace}
 
 Replace every occurrence of the string \<s1\> by \<s2\>.
 
-#### `--subst-var` \<varName\>
+#### `--subst-var` \<varName\> {#fun-substitute-subst-var}
 
 Replace every occurrence of `@varName@` by the contents of the environment variable \<varName\>. This is useful for generating files from templates, using `@...@` in the template as placeholders.
 
-#### `--subst-var-by` \<varName\> \<s\>
+#### `--subst-var-by` \<varName\> \<s\> {#fun-substitute-subst-var-by}
 
 Replace every occurrence of `@varName@` by the string \<s\>.
 
@@ -973,13 +1172,39 @@ Convenience function for `makeWrapper` that replaces `<\executable\>` with a wra
 
 If you will apply it multiple times, it will overwrite the wrapper file and you will end up with double wrapping, which should be avoided.
 
+### `prependToVar` \<variableName\> \<elements...\> {#fun-prependToVar}
+
+Prepend elements to a variable.
+
+Example:
+
+```shellSession
+$ configureFlags="--disable-static"
+$ prependToVar configureFlags --disable-dependency-tracking --enable-foo
+$ echo $configureFlags
+--disable-dependency-tracking --enable-foo --disable-static
+```
+
+### `appendToVar` \<variableName\> \<elements...\> {#fun-appendToVar}
+
+Append elements to a variable.
+
+Example:
+
+```shellSession
+$ configureFlags="--disable-static"
+$ appendToVar configureFlags --disable-dependency-tracking --enable-foo
+$ echo $configureFlags
+--disable-static --disable-dependency-tracking --enable-foo
+```
+
 ## Package setup hooks {#ssec-setup-hooks}
 
 Nix itself considers a build-time dependency as merely something that should previously be built and accessible at build time—packages themselves are on their own to perform any additional setup. In most cases, that is fine, and the downstream derivation can deal with its own dependencies. But for a few common tasks, that would result in almost every package doing the same sort of setup work—depending not on the package itself, but entirely on which dependencies were used.
 
 In order to alleviate this burden, the setup hook mechanism was written, where any package can include a shell script that \[by convention rather than enforcement by Nix\], any downstream reverse-dependency will source as part of its build process. That allows the downstream dependency to merely specify its dependencies, and lets those dependencies effectively initialize themselves. No boilerplate mirroring the list of dependencies is needed.
 
-The setup hook mechanism is a bit of a sledgehammer though: a powerful feature with a broad and indiscriminate area of effect. The combination of its power and implicit use may be expedient, but isn’t without costs. Nix itself is unchanged, but the spirit of added dependencies being effect-free is violated even if the letter isn’t. For example, if a derivation path is mentioned more than once, Nix itself doesn’t care and simply makes sure the dependency derivation is already built just the same—depending is just needing something to exist, and needing is idempotent. However, a dependency specified twice will have its setup hook run twice, and that could easily change the build environment (though a well-written setup hook will therefore strive to be idempotent so this is in fact not observable). More broadly, setup hooks are anti-modular in that multiple dependencies, whether the same or different, should not interfere and yet their setup hooks may well do so.
+The setup hook mechanism is a bit of a sledgehammer though: a powerful feature with a broad and indiscriminate area of effect. The combination of its power and implicit use may be expedient, but isn’t without costs. Nix itself is unchanged, but the spirit of added dependencies being effect-free is violated even if the latter isn’t. For example, if a derivation path is mentioned more than once, Nix itself doesn’t care and simply makes sure the dependency derivation is already built just the same—depending is just needing something to exist, and needing is idempotent. However, a dependency specified twice will have its setup hook run twice, and that could easily change the build environment (though a well-written setup hook will therefore strive to be idempotent so this is in fact not observable). More broadly, setup hooks are anti-modular in that multiple dependencies, whether the same or different, should not interfere and yet their setup hooks may well do so.
 
 The most typical use of the setup hook is actually to add other hooks which are then run (i.e. after all the setup hooks) on each dependency. For example, the C compiler wrapper’s setup hook feeds itself flags for each dependency that contains relevant libraries and headers. This is done by defining a bash function, and appending its name to one of `envBuildBuildHooks`, `envBuildHostHooks`, `envBuildTargetHooks`, `envHostHostHooks`, `envHostTargetHooks`, or `envTargetTargetHooks`. These 6 bash variables correspond to the 6 sorts of dependencies by platform (there’s 12 total but we ignore the propagated/non-propagated axis).
 
@@ -1023,7 +1248,7 @@ Multiple paths can be specified.
 patchShebangs [--build | --host] PATH...
 ```
 
-##### Flags
+##### Flags {#patch-shebangs.sh-invocation-flags}
 
 `--build`
 : Look up commands available at build time
@@ -1031,7 +1256,7 @@ patchShebangs [--build | --host] PATH...
 `--host`
 : Look up commands available at run time
 
-##### Examples
+##### Examples {#patch-shebangs.sh-invocation-examples}
 
 ```sh
 patchShebangs --host /nix/store/<hash>-hello-1.0/bin
@@ -1092,13 +1317,15 @@ This setup hook moves any libraries installed in the `lib64/` subdirectory into
 
 This setup hook moves any systemd user units installed in the `lib/` subdirectory into `share/`. In addition, a link is provided from `share/` to `lib/` for compatibility. This is needed for systemd to find user services when installed into the user profile.
 
+This hook only runs when compiling for Linux.
+
 ### `set-source-date-epoch-to-latest.sh` {#set-source-date-epoch-to-latest.sh}
 
 This sets `SOURCE_DATE_EPOCH` to the modification time of the most recent file.
 
-### Bintools Wrapper {#bintools-wrapper}
+### Bintools Wrapper and hook {#bintools-wrapper}
 
-The Bintools Wrapper wraps the binary utilities for a bunch of miscellaneous purposes. These are GNU Binutils when targetting Linux, and a mix of cctools and GNU binutils for Darwin. \[The “Bintools” name is supposed to be a compromise between “Binutils” and “cctools” not denoting any specific implementation.\] Specifically, the underlying bintools package, and a C standard library (glibc or Darwin’s libSystem, just for the dynamic loader) are all fed in, and dependency finding, hardening (see below), and purity checks for each are handled by the Bintools Wrapper. Packages typically depend on CC Wrapper, which in turn (at run time) depends on the Bintools Wrapper.
+The Bintools Wrapper wraps the binary utilities for a bunch of miscellaneous purposes. These are GNU Binutils when targeting Linux, and a mix of cctools and GNU binutils for Darwin. \[The “Bintools” name is supposed to be a compromise between “Binutils” and “cctools” not denoting any specific implementation.\] Specifically, the underlying bintools package, and a C standard library (glibc or Darwin’s libSystem, just for the dynamic loader) are all fed in, and dependency finding, hardening (see below), and purity checks for each are handled by the Bintools Wrapper. Packages typically depend on CC Wrapper, which in turn (at run time) depends on the Bintools Wrapper.
 
 The Bintools Wrapper was only just recently split off from CC Wrapper, so the division of labor is still being worked out. For example, it shouldn’t care about the C standard library, but just take a derivation with the dynamic loader (which happens to be the glibc on linux). Dependency finding however is a task both wrappers will continue to need to share, and probably the most important to understand. It is currently accomplished by collecting directories of host-platform dependencies (i.e. `buildInputs` and `nativeBuildInputs`) in environment variables. The Bintools Wrapper’s setup hook causes any `lib` and `lib64` subdirectories to be added to `NIX_LDFLAGS`. Since the CC Wrapper and the Bintools Wrapper use the same strategy, most of the Bintools Wrapper code is sparsely commented and refers to the CC Wrapper. But the CC Wrapper’s code, by contrast, has quite lengthy comments. The Bintools Wrapper merely cites those, rather than repeating them, to avoid falling out of sync.
 
@@ -1106,173 +1333,27 @@ A final task of the setup hook is defining a number of standard environment vari
 
 A problem with this final task is that the Bintools Wrapper is honest and defines `LD` as `ld`. Most packages, however, firstly use the C compiler for linking, secondly use `LD` anyways, defining it as the C compiler, and thirdly, only so define `LD` when it is undefined as a fallback. This triple-threat means Bintools Wrapper will break those packages, as LD is already defined as the actual linker which the package won’t override yet doesn’t want to use. The workaround is to define, just for the problematic package, `LD` as the C compiler. A good way to do this would be `preConfigure = "LD=$CC"`.
 
-### CC Wrapper {#cc-wrapper}
+### CC Wrapper and hook {#cc-wrapper}
 
 The CC Wrapper wraps a C toolchain for a bunch of miscellaneous purposes. Specifically, a C compiler (GCC or Clang), wrapped binary tools, and a C standard library (glibc or Darwin’s libSystem, just for the dynamic loader) are all fed in, and dependency finding, hardening (see below), and purity checks for each are handled by the CC Wrapper. Packages typically depend on the CC Wrapper, which in turn (at run-time) depends on the Bintools Wrapper.
 
-Dependency finding is undoubtedly the main task of the CC Wrapper. This works just like the Bintools Wrapper, except that any `include` subdirectory of any relevant dependency is added to `NIX_CFLAGS_COMPILE`. The setup hook itself contains some lengthy comments describing the exact convoluted mechanism by which this is accomplished.
+Dependency finding is undoubtedly the main task of the CC Wrapper. This works just like the Bintools Wrapper, except that any `include` subdirectory of any relevant dependency is added to `NIX_CFLAGS_COMPILE`. The setup hook itself contains elaborate comments describing the exact mechanism by which this is accomplished.
 
 Similarly, the CC Wrapper follows the Bintools Wrapper in defining standard environment variables with the names of the tools it wraps, for the same reasons described above. Importantly, while it includes a `cc` symlink to the c compiler for portability, the `CC` will be defined using the compiler’s “real name” (i.e. `gcc` or `clang`). This helps lousy build systems that inspect on the name of the compiler rather than run it.
 
 Here are some more packages that provide a setup hook. Since the list of hooks is extensible, this is not an exhaustive list. The mechanism is only to be used as a last resort, so it might cover most uses.
 
-### Perl {#setup-hook-perl}
-
-Adds the `lib/site_perl` subdirectory of each build input to the `PERL5LIB` environment variable. For instance, if `buildInputs` contains Perl, then the `lib/site_perl` subdirectory of each input is added to the `PERL5LIB` environment variable.
-
-### Python {#setup-hook-python}
-
-Adds the `lib/${python.libPrefix}/site-packages` subdirectory of each build input to the `PYTHONPATH` environment variable.
-
-### pkg-config {#setup-hook-pkg-config}
-
-Adds the `lib/pkgconfig` and `share/pkgconfig` subdirectories of each build input to the `PKG_CONFIG_PATH` environment variable.
-
-### Automake {#setup-hook-automake}
-
-Adds the `share/aclocal` subdirectory of each build input to the `ACLOCAL_PATH` environment variable.
-
-### Autoconf {#setup-hook-autoconf}
-
-The `autoreconfHook` derivation adds `autoreconfPhase`, which runs autoreconf, libtoolize and automake, essentially preparing the configure script in autotools-based builds. Most autotools-based packages come with the configure script pre-generated, but this hook is necessary for a few packages and when you need to patch the package’s configure scripts.
-
-### libxml2 {#setup-hook-libxml2}
-
-Adds every file named `catalog.xml` found under the `xml/dtd` and `xml/xsl` subdirectories of each build input to the `XML_CATALOG_FILES` environment variable.
-
-### teTeX / TeX Live {#tetex-tex-live}
-
-Adds the `share/texmf-nix` subdirectory of each build input to the `TEXINPUTS` environment variable.
-
-### Qt 4 {#qt-4}
-
-Sets the `QTDIR` environment variable to Qt’s path.
-
-### gdk-pixbuf {#setup-hook-gdk-pixbuf}
-
-Exports `GDK_PIXBUF_MODULE_FILE` environment variable to the builder. Add librsvg package to `buildInputs` to get svg support. See also the [setup hook description in GNOME platform docs](#ssec-gnome-hooks-gdk-pixbuf).
-
-### GHC {#ghc}
-
-Creates a temporary package database and registers every Haskell build input in it (TODO: how?).
-
-### GNOME platform {#gnome-platform}
-
-Hooks related to GNOME platform and related libraries like GLib, GTK and GStreamer are described in [](#sec-language-gnome).
-
-### autoPatchelfHook {#setup-hook-autopatchelfhook}
-
-This is a special setup hook which helps in packaging proprietary software in that it automatically tries to find missing shared library dependencies of ELF files based on the given `buildInputs` and `nativeBuildInputs`.
-
-You can also specify a `runtimeDependencies` variable which lists dependencies to be unconditionally added to rpath of all executables. This is useful for programs that use dlopen 3 to load libraries at runtime.
-
-In certain situations you may want to run the main command (`autoPatchelf`) of the setup hook on a file or a set of directories instead of unconditionally patching all outputs. This can be done by setting the `dontAutoPatchelf` environment variable to a non-empty value.
-
-By default `autoPatchelf` will fail as soon as any ELF file requires a dependency which cannot be resolved via the given build inputs. In some situations you might prefer to just leave missing dependencies unpatched and continue to patch the rest. This can be achieved by setting the `autoPatchelfIgnoreMissingDeps` environment variable to a non-empty value. `autoPatchelfIgnoreMissingDeps` can be set to a list like `autoPatchelfIgnoreMissingDeps = [ "libcuda.so.1" "libcudart.so.1" ];` or to simply `[ "*" ]` to ignore all missing dependencies.
-
-The `autoPatchelf` command also recognizes a `--no-recurse` command line flag, which prevents it from recursing into subdirectories.
-
-### breakpointHook {#breakpointhook}
-
-This hook will make a build pause instead of stopping when a failure happens. It prevents nix from cleaning up the build environment immediately and allows the user to attach to a build environment using the `cntr` command. Upon build error it will print instructions on how to use `cntr`, which can be used to enter the environment for debugging. Installing cntr and running the command will provide shell access to the build sandbox of failed build. At `/var/lib/cntr` the sandboxed filesystem is mounted. All commands and files of the system are still accessible within the shell. To execute commands from the sandbox use the cntr exec subcommand. `cntr` is only supported on Linux-based platforms. To use it first add `cntr` to your `environment.systemPackages` on NixOS or alternatively to the root user on non-NixOS systems. Then in the package that is supposed to be inspected, add `breakpointHook` to `nativeBuildInputs`.
-
-```nix
-nativeBuildInputs = [ breakpointHook ];
-```
-
-When a build failure happens there will be an instruction printed that shows how to attach with `cntr` to the build sandbox.
-
-::: {.note}
-::: {.title}
-Caution with remote builds
-:::
-
-This won’t work with remote builds as the build environment is on a different machine and can’t be accessed by `cntr`. Remote builds can be turned off by setting `--option builders ''` for `nix-build` or `--builders ''` for `nix build`.
-:::
-
-### installShellFiles {#installshellfiles}
-
-This hook helps with installing manpages and shell completion files. It exposes 2 shell functions `installManPage` and `installShellCompletion` that can be used from your `postInstall` hook.
-
-The `installManPage` function takes one or more paths to manpages to install. The manpages must have a section suffix, and may optionally be compressed (with `.gz` suffix). This function will place them into the correct directory.
-
-The `installShellCompletion` function takes one or more paths to shell completion files. By default it will autodetect the shell type from the completion file extension, but you may also specify it by passing one of `--bash`, `--fish`, or `--zsh`. These flags apply to all paths listed after them (up until another shell flag is given). Each path may also have a custom installation name provided by providing a flag `--name NAME` before the path. If this flag is not provided, zsh completions will be renamed automatically such that `foobar.zsh` becomes `_foobar`. A root name may be provided for all paths using the flag `--cmd NAME`; this synthesizes the appropriate name depending on the shell (e.g. `--cmd foo` will synthesize the name `foo.bash` for bash and `_foo` for zsh). The path may also be a fifo or named fd (such as produced by `<(cmd)`), in which case the shell and name must be provided.
-
-```nix
-nativeBuildInputs = [ installShellFiles ];
-postInstall = ''
-  installManPage doc/foobar.1 doc/barfoo.3
-  # explicit behavior
-  installShellCompletion --bash --name foobar.bash share/completions.bash
-  installShellCompletion --fish --name foobar.fish share/completions.fish
-  installShellCompletion --zsh --name _foobar share/completions.zsh
-  # implicit behavior
-  installShellCompletion share/completions/foobar.{bash,fish,zsh}
-  # using named fd
-  installShellCompletion --cmd foobar \
-    --bash <($out/bin/foobar --bash-completion) \
-    --fish <($out/bin/foobar --fish-completion) \
-    --zsh <($out/bin/foobar --zsh-completion)
-'';
-```
-
-### libiconv, libintl {#libiconv-libintl}
-
-A few libraries automatically add to `NIX_LDFLAGS` their library, making their symbols automatically available to the linker. This includes libiconv and libintl (gettext). This is done to provide compatibility between GNU Linux, where libiconv and libintl are bundled in, and other systems where that might not be the case. Sometimes, this behavior is not desired. To disable this behavior, set `dontAddExtraLibs`.
-
-### validatePkgConfig {#validatepkgconfig}
-
-The `validatePkgConfig` hook validates all pkg-config (`.pc`) files in a package. This helps catching some common errors in pkg-config files, such as undefined variables.
-
-### cmake {#cmake}
-
-Overrides the default configure phase to run the CMake command. By default, we use the Make generator of CMake. In addition, dependencies are added automatically to `CMAKE_PREFIX_PATH` so that packages are correctly detected by CMake. Some additional flags are passed in to give similar behavior to configure-based packages. You can disable this hook’s behavior by setting `configurePhase` to a custom value, or by setting `dontUseCmakeConfigure`. `cmakeFlags` controls flags passed only to CMake. By default, parallel building is enabled as CMake supports parallel building almost everywhere. When Ninja is also in use, CMake will detect that and use the ninja generator.
-
-### xcbuildHook {#xcbuildhook}
-
-Overrides the build and install phases to run the "xcbuild" command. This hook is needed when a project only comes with build files for the XCode build system. You can disable this behavior by setting buildPhase and configurePhase to a custom value. xcbuildFlags controls flags passed only to xcbuild.
-
-### Meson {#meson}
-
-Overrides the configure phase to run meson to generate Ninja files. To run these files, you should accompany Meson with ninja. By default, `enableParallelBuilding` is enabled as Meson supports parallel building almost everywhere.
-
-#### Variables controlling Meson {#variables-controlling-meson}
-
-##### `mesonFlags` {#mesonflags}
-
-Controls the flags passed to meson.
-
-##### `mesonBuildType` {#mesonbuildtype}
-
-Which [`--buildtype`](https://mesonbuild.com/Builtin-options.html#core-options) to pass to Meson. We default to `plain`.
-
-##### `mesonAutoFeatures` {#mesonautofeatures}
-
-What value to set [`-Dauto_features=`](https://mesonbuild.com/Builtin-options.html#core-options) to. We default to `enabled`.
-
-##### `mesonWrapMode` {#mesonwrapmode}
-
-What value to set [`-Dwrap_mode=`](https://mesonbuild.com/Builtin-options.html#core-options) to. We default to `nodownload` as we disallow network access.
-
-##### `dontUseMesonConfigure` {#dontusemesonconfigure}
-
-Disables using Meson’s `configurePhase`.
-
-### ninja {#ninja}
-
-Overrides the build, install, and check phase to run ninja instead of make. You can disable this behavior with the `dontUseNinjaBuild`, `dontUseNinjaInstall`, and `dontUseNinjaCheck`, respectively. Parallel building is enabled by default in Ninja.
-
-### unzip {#unzip}
-
-This setup hook will allow you to unzip .zip files specified in `$src`. There are many similar packages like `unrar`, `undmg`, etc.
-
-### wafHook {#wafhook}
+### Other hooks {#stdenv-other-hooks}
 
-Overrides the configure, build, and install phases. This will run the “waf” script used by many projects. If `wafPath` (default `./waf`) doesn’t exist, it will copy the version of waf available in Nixpkgs. `wafFlags` can be used to pass flags to the waf script.
+Many other packages provide hooks, that are not part of `stdenv`. You can find
+these in the [Hooks Reference](#chap-hooks).
 
-### scons {#scons}
+### Compiler and Linker wrapper hooks {#compiler-linker-wrapper-hooks}
 
-Overrides the build, install, and check phases. This uses the scons build system as a replacement for make. scons does not provide a configure phase, so everything is managed at build and install time.
+If the file `${cc}/nix-support/cc-wrapper-hook` exists, it will be run at the end of the [compiler wrapper](#cc-wrapper).
+If the file `${binutils}/nix-support/post-link-hook` exists, it will be run at the end of the linker wrapper.
+These hooks allow a user to inject code into the wrappers.
+As an example, these hooks can be used to extract `extraBefore`, `params` and `extraAfter` which store all the command line arguments passed to the compiler and linker respectively.
 
 ## Purity in Nixpkgs {#sec-purity-in-nixpkgs}
 
@@ -1320,7 +1401,7 @@ bin/blib.a(bios_console.o): In function `bios_handle_cup':
 
 Adds the `-O2 -D_FORTIFY_SOURCE=2` compiler options. During code generation the compiler knows a great deal of information about buffer sizes (where possible), and attempts to replace insecure unlimited length buffer function calls with length-limited ones. This is especially useful for old, crufty code. Additionally, format strings in writable memory that contain `%n` are blocked. If an application depends on such a format string, it will need to be worked around.
 
-Additionally, some warnings are enabled which might trigger build failures if compiler warnings are treated as errors in the package build. In this case, set `NIX_CFLAGS_COMPILE` to `-Wno-error=warning-type`.
+Additionally, some warnings are enabled which might trigger build failures if compiler warnings are treated as errors in the package build. In this case, set `env.NIX_CFLAGS_COMPILE` to `-Wno-error=warning-type`.
 
 This needs to be turned off or fixed for errors similar to:
 
diff --git a/nixpkgs/doc/using/configuration.chapter.md b/nixpkgs/doc/using/configuration.chapter.md
index 3c46dc3227a6..e657cb21c295 100644
--- a/nixpkgs/doc/using/configuration.chapter.md
+++ b/nixpkgs/doc/using/configuration.chapter.md
@@ -73,7 +73,7 @@ There are also two ways to try compiling a package which has been marked as unsu
     }
     ```
 
-The difference between a package being unsupported on some system and being broken is admittedly a bit fuzzy. If a program *ought* to work on a certain platform, but doesn't, the platform should be included in `meta.platforms`, but marked as broken with e.g.  `meta.broken = !hostPlatform.isWindows`. Of course, this begs the question of what \"ought\" means exactly. That is left to the package maintainer.
+The difference between a package being unsupported on some system and being broken is admittedly a bit fuzzy. If a program *ought* to work on a certain platform, but doesn't, the platform should be included in `meta.platforms`, but marked as broken with e.g.  `meta.broken = !hostPlatform.isWindows`. Of course, this begs the question of what "ought" means exactly. That is left to the package maintainer.
 
 ## Installing unfree packages {#sec-allow-unfree}
 
@@ -310,16 +310,16 @@ For this to work fully, you must also have this script sourced when you are logg
 
 ```ShellSession
 #!/bin/sh
-if [ -d $HOME/.nix-profile/etc/profile.d ]; then
-  for i in $HOME/.nix-profile/etc/profile.d/*.sh; do
-    if [ -r $i ]; then
-      . $i
+if [ -d "${HOME}/.nix-profile/etc/profile.d" ]; then
+  for i in "${HOME}/.nix-profile/etc/profile.d/"*.sh; do
+    if [ -r "$i" ]; then
+      . "$i"
     fi
   done
 fi
 ```
 
-Now just run `source $HOME/.profile` and you can starting loading man pages from your environment.
+Now just run `. "${HOME}/.profile"` and you can start loading man pages from your environment.
 
 ### GNU info setup {#sec-gnu-info-setup}
 
diff --git a/nixpkgs/doc/using/overrides.chapter.md b/nixpkgs/doc/using/overrides.chapter.md
index a97a39354a9d..198b4504197d 100644
--- a/nixpkgs/doc/using/overrides.chapter.md
+++ b/nixpkgs/doc/using/overrides.chapter.md
@@ -63,7 +63,7 @@ You should prefer `overrideAttrs` in almost all cases, see its documentation for
 :::
 
 ::: {.warning}
-Do not use this function in Nixpkgs as it evaluates a Derivation before modifying it, which breaks package abstraction and removes error-checking of function arguments. In addition, this evaluation-per-function application incurs a performance penalty, which can become a problem if many overrides are used. It is only intended for ad-hoc customisation, such as in `~/.config/nixpkgs/config.nix`.
+Do not use this function in Nixpkgs as it evaluates a derivation before modifying it, which breaks package abstraction. In addition, this evaluation-per-function application incurs a performance penalty, which can become a problem if many overrides are used. It is only intended for ad-hoc customisation, such as in `~/.config/nixpkgs/config.nix`.
 :::
 
 The function `overrideDerivation` creates a new derivation based on an existing one by overriding the original's attributes with the attribute set produced by the specified function. This function is available on all derivations defined using the `makeOverridable` function. Most standard derivation-producing functions, such as `stdenv.mkDerivation`, are defined using this function, which means most packages in the nixpkgs expression, `pkgs`, have this function.
@@ -74,8 +74,8 @@ Example usage:
 mySed = pkgs.gnused.overrideDerivation (oldAttrs: {
   name = "sed-4.2.2-pre";
   src = fetchurl {
-    url = ftp://alpha.gnu.org/gnu/sed/sed-4.2.2-pre.tar.bz2;
-    sha256 = "11nq06d131y4wmf3drm0yk502d2xc6n5qy82cg88rb9nqd2lj41k";
+    url = "ftp://alpha.gnu.org/gnu/sed/sed-4.2.2-pre.tar.bz2";
+    hash = "sha256-MxBJRcM2rYzQYwJ5XKxhXTQByvSg5jZc5cSHEZoB2IY=";
   };
   patches = [];
 });