about summary refs log tree commit diff
path: root/nixpkgs/lib
diff options
context:
space:
mode:
authorAlyssa Ross <hi@alyssa.is>2023-11-16 12:53:32 +0100
committerAlyssa Ross <hi@alyssa.is>2023-11-16 12:53:32 +0100
commit67419f0e56f99b0ebbe14574d3492110ac84c8d6 (patch)
tree3abc8e1606a2c80b6f5d14fef175e50800202163 /nixpkgs/lib
parenta2c1eff83c3118a9aee8076c7f84f58137416b6e (diff)
parent9008bc4eb62c878d0812105ea1b34255d651df88 (diff)
downloadnixlib-67419f0e56f99b0ebbe14574d3492110ac84c8d6.tar
nixlib-67419f0e56f99b0ebbe14574d3492110ac84c8d6.tar.gz
nixlib-67419f0e56f99b0ebbe14574d3492110ac84c8d6.tar.bz2
nixlib-67419f0e56f99b0ebbe14574d3492110ac84c8d6.tar.lz
nixlib-67419f0e56f99b0ebbe14574d3492110ac84c8d6.tar.xz
nixlib-67419f0e56f99b0ebbe14574d3492110ac84c8d6.tar.zst
nixlib-67419f0e56f99b0ebbe14574d3492110ac84c8d6.zip
Merge branch 'nixos-unstable-small' of https://github.com/NixOS/nixpkgs into HEAD
Diffstat (limited to 'nixpkgs/lib')
-rw-r--r--nixpkgs/lib/README.md20
-rw-r--r--nixpkgs/lib/asserts.nix29
-rw-r--r--nixpkgs/lib/customisation.nix35
-rw-r--r--nixpkgs/lib/default.nix4
-rw-r--r--nixpkgs/lib/fileset/README.md4
-rw-r--r--nixpkgs/lib/fileset/default.nix136
-rw-r--r--nixpkgs/lib/fileset/internal.nix110
-rwxr-xr-xnixpkgs/lib/fileset/tests.sh361
-rw-r--r--nixpkgs/lib/licenses.nix25
-rw-r--r--nixpkgs/lib/lists.nix13
-rw-r--r--nixpkgs/lib/strings.nix14
-rw-r--r--nixpkgs/lib/systems/inspect.nix26
-rw-r--r--nixpkgs/lib/tests/misc.nix14
-rw-r--r--nixpkgs/lib/trivial.nix34
14 files changed, 705 insertions, 120 deletions
diff --git a/nixpkgs/lib/README.md b/nixpkgs/lib/README.md
index 627086843db7..220940bc2122 100644
--- a/nixpkgs/lib/README.md
+++ b/nixpkgs/lib/README.md
@@ -74,3 +74,23 @@ path/tests/prop.sh
 # Run the lib.fileset tests
 fileset/tests.sh
 ```
+
+## Commit conventions
+
+- Make sure you read about the [commit conventions](../CONTRIBUTING.md#commit-conventions) common to Nixpkgs as a whole.
+
+- Format the commit messages in the following way:
+
+  ```
+  lib.(section): (init | add additional argument | refactor | etc)
+
+  (Motivation for change. Additional information.)
+  ```
+
+  Examples:
+
+  * lib.getExe': check arguments
+  * lib.fileset: Add an additional argument in the design docs
+
+    Closes #264537
+
diff --git a/nixpkgs/lib/asserts.nix b/nixpkgs/lib/asserts.nix
index 98e0b490acf2..8d0a621f4c1c 100644
--- a/nixpkgs/lib/asserts.nix
+++ b/nixpkgs/lib/asserts.nix
@@ -50,4 +50,33 @@ rec {
       lib.generators.toPretty {} xs}, but is: ${
         lib.generators.toPretty {} val}";
 
+  /* Specialized `assertMsg` for checking if every one of `vals` is one of the elements
+     of the list `xs`. Useful for checking lists of supported attributes.
+
+     Example:
+       let sslLibraries = [ "libressl" "bearssl" ];
+       in assertEachOneOf "sslLibraries" sslLibraries [ "openssl" "bearssl" ]
+       stderr> error: each element in sslLibraries must be one of [
+       stderr>   "openssl"
+       stderr>   "bearssl"
+       stderr> ], but is: [
+       stderr>   "libressl"
+       stderr>   "bearssl"
+       stderr> ]
+
+     Type:
+       assertEachOneOf :: String -> List ComparableVal -> List ComparableVal -> Bool
+  */
+  assertEachOneOf =
+    # The name of the variable the user entered `val` into, for inclusion in the error message
+    name:
+    # The list of values of what the user provided, to be compared against the values in `xs`
+    vals:
+    # The list of valid values
+    xs:
+    assertMsg
+    (lib.all (val: lib.elem val xs) vals)
+    "each element in ${name} must be one of ${
+      lib.generators.toPretty {} xs}, but is: ${
+        lib.generators.toPretty {} vals}";
 }
diff --git a/nixpkgs/lib/customisation.nix b/nixpkgs/lib/customisation.nix
index 61bb531d2f62..08fc5db0614d 100644
--- a/nixpkgs/lib/customisation.nix
+++ b/nixpkgs/lib/customisation.nix
@@ -76,19 +76,22 @@ rec {
      Type:
        makeOverridable :: (AttrSet -> a) -> AttrSet -> a
   */
-  makeOverridable = f: lib.setFunctionArgs
-    (origArgs: let
+  makeOverridable = f:
+    let
+      # Creates a functor with the same arguments as f
+      mirrorArgs = lib.mirrorFunctionArgs f;
+    in
+    mirrorArgs (origArgs:
+    let
       result = f origArgs;
 
-      # Creates a functor with the same arguments as f
-      copyArgs = g: lib.setFunctionArgs g (lib.functionArgs f);
       # Changes the original arguments with (potentially a function that returns) a set of new attributes
       overrideWith = newArgs: origArgs // (if lib.isFunction newArgs then newArgs origArgs else newArgs);
 
       # Re-call the function but with different arguments
-      overrideArgs = copyArgs (newArgs: makeOverridable f (overrideWith newArgs));
+      overrideArgs = mirrorArgs (newArgs: makeOverridable f (overrideWith newArgs));
       # Change the result of the function call by applying g to it
-      overrideResult = g: makeOverridable (copyArgs (args: g (f args))) origArgs;
+      overrideResult = g: makeOverridable (mirrorArgs (args: g (f args))) origArgs;
     in
       if builtins.isAttrs result then
         result // {
@@ -102,8 +105,7 @@ rec {
         lib.setFunctionArgs result (lib.functionArgs result) // {
           override = overrideArgs;
         }
-      else result)
-    (lib.functionArgs f);
+      else result);
 
 
   /* Call the package function in the file `fn` with the required
@@ -343,7 +345,24 @@ rec {
     , newScope
     }:
     { otherSplices
+    # Attrs from `self` which won't be spliced.
+    # Avoid using keep, it's only used for a python hook workaround, added in PR #104201.
+    # ex: `keep = (self: { inherit (self) aAttr; })`
     , keep ? (_self: {})
+    # Additional attrs to add to the sets `callPackage`.
+    # When the package is from a subset (but not a subset within a package IS #211340)
+    # within `spliced0` it will be spliced.
+    # When using an package outside the set but it's available from `pkgs`, use the package from `pkgs.__splicedPackages`.
+    # If the package is not available within the set or in `pkgs`, such as a package in a let binding, it will not be spliced
+    # ex:
+    # ```
+    # nix-repl> darwin.apple_sdk.frameworks.CoreFoundation
+    #   «derivation ...CoreFoundation-11.0.0.drv»
+    # nix-repl> darwin.CoreFoundation
+    #   error: attribute 'CoreFoundation' missing
+    # nix-repl> darwin.callPackage ({ CoreFoundation }: CoreFoundation) { }
+    #   «derivation ...CoreFoundation-11.0.0.drv»
+    # ```
     , extra ? (_spliced0: {})
     , f
     }:
diff --git a/nixpkgs/lib/default.nix b/nixpkgs/lib/default.nix
index fe737a125e68..a2958e561cf3 100644
--- a/nixpkgs/lib/default.nix
+++ b/nixpkgs/lib/default.nix
@@ -74,7 +74,7 @@ let
       importJSON importTOML warn warnIf warnIfNot throwIf throwIfNot checkListOfEnum
       info showWarnings nixpkgsVersion version isInOldestRelease
       mod compare splitByAndCompare
-      functionArgs setFunctionArgs isFunction toFunction
+      functionArgs setFunctionArgs isFunction toFunction mirrorFunctionArgs
       toHexString toBaseDigits inPureEvalMode;
     inherit (self.fixedPoints) fix fix' converge extends composeExtensions
       composeManyExtensions makeExtensible makeExtensibleWithCustomName;
@@ -92,7 +92,7 @@ let
       concatMap flatten remove findSingle findFirst any all count
       optional optionals toList range replicate partition zipListsWith zipLists
       reverseList listDfs toposort sort naturalSort compareLists take
-      drop sublist last init crossLists unique intersectLists
+      drop sublist last init crossLists unique allUnique intersectLists
       subtractLists mutuallyExclusive groupBy groupBy';
     inherit (self.strings) concatStrings concatMapStrings concatImapStrings
       intersperse concatStringsSep concatMapStringsSep
diff --git a/nixpkgs/lib/fileset/README.md b/nixpkgs/lib/fileset/README.md
index 16ab58e2f266..91f892a1be95 100644
--- a/nixpkgs/lib/fileset/README.md
+++ b/nixpkgs/lib/fileset/README.md
@@ -241,8 +241,4 @@ Arguments:
 ## To update in the future
 
 Here's a list of places in the library that need to be updated in the future:
-- > The file set library is currently somewhat limited but is being expanded to include more functions over time.
-
-  in [the manual](../../doc/functions/fileset.section.md)
-- If/Once a function to convert `lib.sources` values into file sets exists, the `_coerce` and `toSource` functions should be updated to mention that function in the error when such a value is passed
 - If/Once a function exists that can optionally include a path depending on whether it exists, the error message for the path not existing in `_coerce` should mention the new function
diff --git a/nixpkgs/lib/fileset/default.nix b/nixpkgs/lib/fileset/default.nix
index 4a97633b4a89..d90c770633d8 100644
--- a/nixpkgs/lib/fileset/default.nix
+++ b/nixpkgs/lib/fileset/default.nix
@@ -3,8 +3,10 @@ let
 
   inherit (import ./internal.nix { inherit lib; })
     _coerce
+    _singleton
     _coerceMany
     _toSourceFilter
+    _fromSourceFilter
     _unionMany
     _fileFilter
     _printFileset
@@ -122,11 +124,10 @@ in {
       Paths in [strings](https://nixos.org/manual/nix/stable/language/values.html#type-string), including Nix store paths, cannot be passed as `root`.
       `root` has to be a directory.
 
-<!-- Ignore the indentation here, this is a nixdoc rendering bug that needs to be fixed: https://github.com/nix-community/nixdoc/issues/75 -->
-:::{.note}
-Changing `root` only affects the directory structure of the resulting store path, it does not change which files are added to the store.
-The only way to change which files get added to the store is by changing the `fileset` attribute.
-:::
+      :::{.note}
+      Changing `root` only affects the directory structure of the resulting store path, it does not change which files are added to the store.
+      The only way to change which files get added to the store is by changing the `fileset` attribute.
+      :::
     */
     root,
     /*
@@ -135,10 +136,9 @@ The only way to change which files get added to the store is by changing the `fi
       This argument can also be a path,
       which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
 
-<!-- Ignore the indentation here, this is a nixdoc rendering bug that needs to be fixed: https://github.com/nix-community/nixdoc/issues/75 -->
-:::{.note}
-If a directory does not recursively contain any file, it is omitted from the store path contents.
-:::
+      :::{.note}
+      If a directory does not recursively contain any file, it is omitted from the store path contents.
+      :::
 
     */
     fileset,
@@ -154,9 +154,14 @@ If a directory does not recursively contain any file, it is omitted from the sto
       sourceFilter = _toSourceFilter fileset;
     in
     if ! isPath root then
-      if isStringLike root then
+      if root ? _isLibCleanSourceWith then
         throw ''
-          lib.fileset.toSource: `root` ("${toString root}") is a string-like value, but it should be a path instead.
+          lib.fileset.toSource: `root` is a `lib.sources`-based value, but it should be a path instead.
+              To use a `lib.sources`-based value, convert it to a file set using `lib.fileset.fromSource` and pass it as `fileset`.
+              Note that this only works for sources created from paths.''
+      else if isStringLike root then
+        throw ''
+          lib.fileset.toSource: `root` (${toString root}) is a string-like value, but it should be a path instead.
               Paths in strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.''
       else
         throw ''
@@ -165,13 +170,13 @@ If a directory does not recursively contain any file, it is omitted from the sto
     # See also ../path/README.md
     else if ! fileset._internalIsEmptyWithoutBase && rootFilesystemRoot != filesetFilesystemRoot then
       throw ''
-        lib.fileset.toSource: Filesystem roots are not the same for `fileset` and `root` ("${toString root}"):
-            `root`: root "${toString rootFilesystemRoot}"
-            `fileset`: root "${toString filesetFilesystemRoot}"
-            Different roots are not supported.''
+        lib.fileset.toSource: Filesystem roots are not the same for `fileset` and `root` (${toString root}):
+            `root`: Filesystem root is "${toString rootFilesystemRoot}"
+            `fileset`: Filesystem root is "${toString filesetFilesystemRoot}"
+            Different filesystem roots are not supported.''
     else if ! pathExists root then
       throw ''
-        lib.fileset.toSource: `root` (${toString root}) does not exist.''
+        lib.fileset.toSource: `root` (${toString root}) is a path that does not exist.''
     else if pathType root != "directory" then
       throw ''
         lib.fileset.toSource: `root` (${toString root}) is a file, but it should be a directory instead. Potential solutions:
@@ -183,7 +188,7 @@ If a directory does not recursively contain any file, it is omitted from the sto
             - Set `root` to ${toString fileset._internalBase} or any directory higher up. This changes the layout of the resulting store path.
             - Set `fileset` to a file set that cannot contain files outside the `root` (${toString root}). This could change the files included in the result.''
     else
-      builtins.seq sourceFilter
+      seq sourceFilter
       cleanSourceWith {
         name = "source";
         src = root;
@@ -191,6 +196,75 @@ If a directory does not recursively contain any file, it is omitted from the sto
       };
 
   /*
+  Create a file set with the same files as a `lib.sources`-based value.
+  This does not import any of the files into the store.
+
+  This can be used to gradually migrate from `lib.sources`-based filtering to `lib.fileset`.
+
+  A file set can be turned back into a source using [`toSource`](#function-library-lib.fileset.toSource).
+
+  :::{.note}
+  File sets cannot represent empty directories.
+  Turning the result of this function back into a source using `toSource` will therefore not preserve empty directories.
+  :::
+
+  Type:
+    fromSource :: SourceLike -> FileSet
+
+  Example:
+    # There's no cleanSource-like function for file sets yet,
+    # but we can just convert cleanSource to a file set and use it that way
+    toSource {
+      root = ./.;
+      fileset = fromSource (lib.sources.cleanSource ./.);
+    }
+
+    # Keeping a previous sourceByRegex (which could be migrated to `lib.fileset.unions`),
+    # but removing a subdirectory using file set functions
+    difference
+      (fromSource (lib.sources.sourceByRegex ./. [
+        "^README\.md$"
+        # This regex includes everything in ./doc
+        "^doc(/.*)?$"
+      ])
+      ./doc/generated
+
+    # Use cleanSource, but limit it to only include ./Makefile and files under ./src
+    intersection
+      (fromSource (lib.sources.cleanSource ./.))
+      (unions [
+        ./Makefile
+        ./src
+      ]);
+  */
+  fromSource = source:
+    let
+      # This function uses `._isLibCleanSourceWith`, `.origSrc` and `.filter`,
+      # which are technically internal to lib.sources,
+      # but we'll allow this since both libraries are in the same code base
+      # and this function is a bridge between them.
+      isFiltered = source ? _isLibCleanSourceWith;
+      path = if isFiltered then source.origSrc else source;
+    in
+    # We can only support sources created from paths
+    if ! isPath path then
+      if isStringLike path then
+        throw ''
+          lib.fileset.fromSource: The source origin of the argument is a string-like value ("${toString path}"), but it should be a path instead.
+              Sources created from paths in strings cannot be turned into file sets, use `lib.sources` or derivations instead.''
+      else
+        throw ''
+          lib.fileset.fromSource: The source origin of the argument is of type ${typeOf path}, but it should be a path instead.''
+    else if ! pathExists path then
+      throw ''
+        lib.fileset.fromSource: The source origin (${toString path}) of the argument does not exist.''
+    else if isFiltered then
+      _fromSourceFilter path source.filter
+    else
+      # If there's no filter, no need to run the expensive conversion, all subpaths will be included
+      _singleton path;
+
+  /*
     The file set containing all files that are in either of two given file sets.
     This is the same as [`unions`](#function-library-lib.fileset.unions),
     but takes just two file sets instead of a list.
@@ -223,11 +297,11 @@ If a directory does not recursively contain any file, it is omitted from the sto
     _unionMany
       (_coerceMany "lib.fileset.union" [
         {
-          context = "first argument";
+          context = "First argument";
           value = fileset1;
         }
         {
-          context = "second argument";
+          context = "Second argument";
           value = fileset2;
         }
       ]);
@@ -269,12 +343,13 @@ If a directory does not recursively contain any file, it is omitted from the sto
     # which get [implicitly coerced to file sets](#sec-fileset-path-coercion).
     filesets:
     if ! isList filesets then
-      throw "lib.fileset.unions: Expected argument to be a list, but got a ${typeOf filesets}."
+      throw ''
+        lib.fileset.unions: Argument is of type ${typeOf filesets}, but it should be a list instead.''
     else
       pipe filesets [
         # Annotate the elements with context, used by _coerceMany for better errors
         (imap0 (i: el: {
-          context = "element ${toString i}";
+          context = "Element ${toString i}";
           value = el;
         }))
         (_coerceMany "lib.fileset.unions")
@@ -305,7 +380,7 @@ If a directory does not recursively contain any file, it is omitted from the sto
       fileFilter (file: hasPrefix "." file.name) ./.
 
       # Include all regular files (not symlinks or others) in the current directory
-      fileFilter (file: file.type == "regular")
+      fileFilter (file: file.type == "regular") ./.
   */
   fileFilter =
     /*
@@ -325,10 +400,11 @@ If a directory does not recursively contain any file, it is omitted from the sto
     # The file set to filter based on the predicate function
     fileset:
     if ! isFunction predicate then
-      throw "lib.fileset.fileFilter: Expected the first argument to be a function, but it's a ${typeOf predicate} instead."
+      throw ''
+        lib.fileset.fileFilter: First argument is of type ${typeOf predicate}, but it should be a function instead.''
     else
       _fileFilter predicate
-        (_coerce "lib.fileset.fileFilter: second argument" fileset);
+        (_coerce "lib.fileset.fileFilter: Second argument" fileset);
 
   /*
     The file set containing all files that are in both of two given file sets.
@@ -356,11 +432,11 @@ If a directory does not recursively contain any file, it is omitted from the sto
     let
       filesets = _coerceMany "lib.fileset.intersection" [
         {
-          context = "first argument";
+          context = "First argument";
           value = fileset1;
         }
         {
-          context = "second argument";
+          context = "Second argument";
           value = fileset2;
         }
       ];
@@ -408,11 +484,11 @@ If a directory does not recursively contain any file, it is omitted from the sto
     let
       filesets = _coerceMany "lib.fileset.difference" [
         {
-          context = "first argument (positive set)";
+          context = "First argument (positive set)";
           value = positive;
         }
         {
-          context = "second argument (negative set)";
+          context = "Second argument (negative set)";
           value = negative;
         }
       ];
@@ -456,7 +532,7 @@ If a directory does not recursively contain any file, it is omitted from the sto
     let
       # "fileset" would be a better name, but that would clash with the argument name,
       # and we cannot change that because of https://github.com/nix-community/nixdoc/issues/76
-      actualFileset = _coerce "lib.fileset.trace: argument" fileset;
+      actualFileset = _coerce "lib.fileset.trace: Argument" fileset;
     in
     seq
       (_printFileset actualFileset)
@@ -503,7 +579,7 @@ If a directory does not recursively contain any file, it is omitted from the sto
     let
       # "fileset" would be a better name, but that would clash with the argument name,
       # and we cannot change that because of https://github.com/nix-community/nixdoc/issues/76
-      actualFileset = _coerce "lib.fileset.traceVal: argument" fileset;
+      actualFileset = _coerce "lib.fileset.traceVal: Argument" fileset;
     in
     seq
       (_printFileset actualFileset)
diff --git a/nixpkgs/lib/fileset/internal.nix b/nixpkgs/lib/fileset/internal.nix
index b919a5de3eef..b245caade1f5 100644
--- a/nixpkgs/lib/fileset/internal.nix
+++ b/nixpkgs/lib/fileset/internal.nix
@@ -7,7 +7,6 @@ let
     isString
     pathExists
     readDir
-    seq
     split
     trace
     typeOf
@@ -17,7 +16,6 @@ let
     attrNames
     attrValues
     mapAttrs
-    setAttrByPath
     zipAttrsWith
     ;
 
@@ -28,7 +26,6 @@ let
   inherit (lib.lists)
     all
     commonPrefix
-    drop
     elemAt
     filter
     findFirst
@@ -170,7 +167,12 @@ rec {
       else
         value
     else if ! isPath value then
-      if isStringLike value then
+      if value ? _isLibCleanSourceWith then
+        throw ''
+          ${context} is a `lib.sources`-based value, but it should be a file set or a path instead.
+              To convert a `lib.sources`-based value to a file set you can use `lib.fileset.fromSource`.
+              Note that this only works for sources created from paths.''
+      else if isStringLike value then
         throw ''
           ${context} ("${toString value}") is a string-like value, but it should be a file set or a path instead.
               Paths represented as strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.''
@@ -179,7 +181,7 @@ rec {
           ${context} is of type ${typeOf value}, but it should be a file set or a path instead.''
     else if ! pathExists value then
       throw ''
-        ${context} (${toString value}) does not exist.''
+        ${context} (${toString value}) is a path that does not exist.''
     else
       _singleton value;
 
@@ -208,9 +210,9 @@ rec {
     if firstWithBase != null && differentIndex != null then
       throw ''
         ${functionContext}: Filesystem roots are not the same:
-            ${(head list).context}: root "${toString firstBaseRoot}"
-            ${(elemAt list differentIndex).context}: root "${toString (elemAt filesets differentIndex)._internalBaseRoot}"
-            Different roots are not supported.''
+            ${(head list).context}: Filesystem root is "${toString firstBaseRoot}"
+            ${(elemAt list differentIndex).context}: Filesystem root is "${toString (elemAt filesets differentIndex)._internalBaseRoot}"
+            Different filesystem roots are not supported.''
     else
       filesets;
 
@@ -473,6 +475,59 @@ rec {
     else
       nonEmpty;
 
+  # Turn a builtins.filterSource-based source filter on a root path into a file set
+  # containing only files included by the filter.
+  # The filter is lazily called as necessary to determine whether paths are included
+  # Type: Path -> (String -> String -> Bool) -> fileset
+  _fromSourceFilter = root: sourceFilter:
+    let
+      # During the recursion we need to track both:
+      # - The path value such that we can safely call `readDir` on it
+      # - The path string value such that we can correctly call the `filter` with it
+      #
+      # While we could just recurse with the path value,
+      # this would then require converting it to a path string for every path,
+      # which is a fairly expensive operation
+
+      # Create a file set from a directory entry
+      fromDirEntry = path: pathString: type:
+        # The filter needs to run on the path as a string
+        if ! sourceFilter pathString type then
+          null
+        else if type == "directory" then
+          fromDir path pathString
+        else
+          type;
+
+      # Create a file set from a directory
+      fromDir = path: pathString:
+        mapAttrs
+          # This looks a bit funny, but we need both the path-based and the path string-based values
+          (name: fromDirEntry (path + "/${name}") (pathString + "/${name}"))
+          # We need to readDir on the path value, because reading on a path string
+          # would be unspecified if there are multiple filesystem roots
+          (readDir path);
+
+      rootPathType = pathType root;
+
+      # We need to convert the path to a string to imitate what builtins.path calls the filter function with.
+      # We don't want to rely on `toString` for this though because it's not very well defined, see ../path/README.md
+      # So instead we use `lib.path.splitRoot` to safely deconstruct the path into its filesystem root and subpath
+      # We don't need the filesystem root though, builtins.path doesn't expose that in any way to the filter.
+      # So we only need the components, which we then turn into a string as one would expect.
+      rootString = "/" + concatStringsSep "/" (components (splitRoot root).subpath);
+    in
+    if rootPathType == "directory" then
+      # We imitate builtins.path not calling the filter on the root path
+      _create root (fromDir root rootString)
+    else
+      # Direct files are always included by builtins.path without calling the filter
+      # But we need to lift up the base path to its parent to satisfy the base path invariant
+      _create (dirOf root)
+        {
+          ${baseNameOf root} = rootPathType;
+        };
+
   # Transforms the filesetTree of a file set to a shorter base path, e.g.
   # _shortenTreeBase [ "foo" ] (_create /foo/bar null)
   # => { bar = null; }
@@ -731,29 +786,40 @@ rec {
         _differenceTree (path + "/${name}") lhsValue (rhs.${name} or null)
       ) (_directoryEntries path lhs);
 
+  # Filters all files in a file set based on a predicate
+  # Type: ({ name, type, ... } -> Bool) -> FileSet -> FileSet
   _fileFilter = predicate: fileset:
     let
-      recurse = path: tree:
+      # Check the predicate for a single file
+      # Type: String -> String -> filesetTree
+      fromFile = name: type:
+        if
+          predicate {
+            inherit name type;
+            # To ensure forwards compatibility with more arguments being added in the future,
+            # adding an attribute which can't be deconstructed :)
+            "lib.fileset.fileFilter: The predicate function passed as the first argument must be able to handle extra attributes for future compatibility. If you're using `{ name, file }:`, use `{ name, file, ... }:` instead." = null;
+          }
+        then
+          type
+        else
+          null;
+
+      # Check the predicate for all files in a directory
+      # Type: Path -> filesetTree
+      fromDir = path: tree:
         mapAttrs (name: subtree:
           if isAttrs subtree || subtree == "directory" then
-            recurse (path + "/${name}") subtree
-          else if
-            predicate {
-              inherit name;
-              type = subtree;
-              # To ensure forwards compatibility with more arguments being added in the future,
-              # adding an attribute which can't be deconstructed :)
-              "lib.fileset.fileFilter: The predicate function passed as the first argument must be able to handle extra attributes for future compatibility. If you're using `{ name, file }:`, use `{ name, file, ... }:` instead." = null;
-            }
-          then
-            subtree
-          else
+            fromDir (path + "/${name}") subtree
+          else if subtree == null then
             null
+          else
+            fromFile name subtree
         ) (_directoryEntries path tree);
     in
     if fileset._internalIsEmptyWithoutBase then
       _emptyWithoutBase
     else
       _create fileset._internalBase
-        (recurse fileset._internalBase fileset._internalTree);
+        (fromDir fileset._internalBase fileset._internalTree);
 }
diff --git a/nixpkgs/lib/fileset/tests.sh b/nixpkgs/lib/fileset/tests.sh
index 2df0727bde38..5ef155d25a88 100755
--- a/nixpkgs/lib/fileset/tests.sh
+++ b/nixpkgs/lib/fileset/tests.sh
@@ -1,5 +1,7 @@
 #!/usr/bin/env bash
 # shellcheck disable=SC2016
+# shellcheck disable=SC2317
+# shellcheck disable=SC2192
 
 # Tests lib.fileset
 # Run:
@@ -224,6 +226,43 @@ withFileMonitor() {
     fi
 }
 
+
+# Create the tree structure declared in the tree variable, usage:
+#
+# tree=(
+#   [a/b] =   # Declare that file       a/b should exist
+#   [c/a] =   # Declare that file       c/a should exist
+#   [c/d/]=   # Declare that directory c/d/ should exist
+# )
+# createTree
+declare -A tree
+createTree() {
+    # Track which paths need to be created
+    local -a dirsToCreate=()
+    local -a filesToCreate=()
+    for p in "${!tree[@]}"; do
+        # If keys end with a `/` we treat them as directories, otherwise files
+        if [[ "$p" =~ /$ ]]; then
+            dirsToCreate+=("$p")
+        else
+            filesToCreate+=("$p")
+        fi
+    done
+
+    # Create all the necessary paths.
+    # This is done with only a fixed number of processes,
+    # in order to not be too slow
+    # Though this does mean we're a bit limited with how many files can be created
+    if (( ${#dirsToCreate[@]} != 0 )); then
+        mkdir -p "${dirsToCreate[@]}"
+    fi
+    if (( ${#filesToCreate[@]} != 0 )); then
+        readarray -d '' -t parentsToCreate < <(dirname -z "${filesToCreate[@]}")
+        mkdir -p "${parentsToCreate[@]}"
+        touch "${filesToCreate[@]}"
+    fi
+}
+
 # Check whether a file set includes/excludes declared paths as expected, usage:
 #
 # tree=(
@@ -232,34 +271,26 @@ withFileMonitor() {
 #   [c/d/]=   # Declare that directory c/d/ should exist and expect it to be excluded in the store path
 # )
 # checkFileset './a' # Pass the fileset as the argument
-declare -A tree
 checkFileset() {
     # New subshell so that we can have a separate trap handler, see `trap` below
     local fileset=$1
 
+    # Create the tree
+    createTree
+
     # Process the tree into separate arrays for included paths, excluded paths and excluded files.
     local -a included=()
     local -a excluded=()
     local -a excludedFiles=()
-    # Track which paths need to be created
-    local -a dirsToCreate=()
-    local -a filesToCreate=()
     for p in "${!tree[@]}"; do
-        # If keys end with a `/` we treat them as directories, otherwise files
-        if [[ "$p" =~ /$ ]]; then
-            dirsToCreate+=("$p")
-            isFile=
-        else
-            filesToCreate+=("$p")
-            isFile=1
-        fi
         case "${tree[$p]}" in
             1)
                 included+=("$p")
                 ;;
             0)
                 excluded+=("$p")
-                if [[ -n "$isFile" ]]; then
+                # If keys end with a `/` we treat them as directories, otherwise files
+                if [[ ! "$p" =~ /$ ]]; then
                     excludedFiles+=("$p")
                 fi
                 ;;
@@ -268,19 +299,6 @@ checkFileset() {
         esac
     done
 
-    # Create all the necessary paths.
-    # This is done with only a fixed number of processes,
-    # in order to not be too slow
-    # Though this does mean we're a bit limited with how many files can be created
-    if (( ${#dirsToCreate[@]} != 0 )); then
-        mkdir -p "${dirsToCreate[@]}"
-    fi
-    if (( ${#filesToCreate[@]} != 0 )); then
-        readarray -d '' -t parentsToCreate < <(dirname -z "${filesToCreate[@]}")
-        mkdir -p "${parentsToCreate[@]}"
-        touch "${filesToCreate[@]}"
-    fi
-
     expression="toSource { root = ./.; fileset = $fileset; }"
 
     # We don't have lambda's in bash unfortunately,
@@ -318,9 +336,13 @@ checkFileset() {
 #### Error messages #####
 
 # Absolute paths in strings cannot be passed as `root`
-expectFailure 'toSource { root = "/nix/store/foobar"; fileset = ./.; }' 'lib.fileset.toSource: `root` \("/nix/store/foobar"\) is a string-like value, but it should be a path instead.
+expectFailure 'toSource { root = "/nix/store/foobar"; fileset = ./.; }' 'lib.fileset.toSource: `root` \(/nix/store/foobar\) is a string-like value, but it should be a path instead.
 \s*Paths in strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.'
 
+expectFailure 'toSource { root = cleanSourceWith { src = ./.; }; fileset = ./.; }' 'lib.fileset.toSource: `root` is a `lib.sources`-based value, but it should be a path instead.
+\s*To use a `lib.sources`-based value, convert it to a file set using `lib.fileset.fromSource` and pass it as `fileset`.
+\s*Note that this only works for sources created from paths.'
+
 # Only paths are accepted as `root`
 expectFailure 'toSource { root = 10; fileset = ./.; }' 'lib.fileset.toSource: `root` is of type int, but it should be a path instead.'
 
@@ -328,14 +350,14 @@ expectFailure 'toSource { root = 10; fileset = ./.; }' 'lib.fileset.toSource: `r
 mkdir -p {foo,bar}/mock-root
 expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
   toSource { root = ./foo/mock-root; fileset = ./bar/mock-root; }
-' 'lib.fileset.toSource: Filesystem roots are not the same for `fileset` and `root` \("'"$work"'/foo/mock-root"\):
-\s*`root`: root "'"$work"'/foo/mock-root"
-\s*`fileset`: root "'"$work"'/bar/mock-root"
-\s*Different roots are not supported.'
+' 'lib.fileset.toSource: Filesystem roots are not the same for `fileset` and `root` \('"$work"'/foo/mock-root\):
+\s*`root`: Filesystem root is "'"$work"'/foo/mock-root"
+\s*`fileset`: Filesystem root is "'"$work"'/bar/mock-root"
+\s*Different filesystem roots are not supported.'
 rm -rf -- *
 
 # `root` needs to exist
-expectFailure 'toSource { root = ./a; fileset = ./.; }' 'lib.fileset.toSource: `root` \('"$work"'/a\) does not exist.'
+expectFailure 'toSource { root = ./a; fileset = ./.; }' 'lib.fileset.toSource: `root` \('"$work"'/a\) is a path that does not exist.'
 
 # `root` needs to be a file
 touch a
@@ -365,9 +387,12 @@ rm -rf -- *
 expectFailure 'toSource { root = ./.; fileset = 10; }' 'lib.fileset.toSource: `fileset` is of type int, but it should be a file set or a path instead.'
 expectFailure 'toSource { root = ./.; fileset = "/some/path"; }' 'lib.fileset.toSource: `fileset` \("/some/path"\) is a string-like value, but it should be a file set or a path instead.
 \s*Paths represented as strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.'
+expectFailure 'toSource { root = ./.; fileset = cleanSourceWith { src = ./.; }; }' 'lib.fileset.toSource: `fileset` is a `lib.sources`-based value, but it should be a file set or a path instead.
+\s*To convert a `lib.sources`-based value to a file set you can use `lib.fileset.fromSource`.
+\s*Note that this only works for sources created from paths.'
 
 # Path coercion errors for non-existent paths
-expectFailure 'toSource { root = ./.; fileset = ./a; }' 'lib.fileset.toSource: `fileset` \('"$work"'/a\) does not exist.'
+expectFailure 'toSource { root = ./.; fileset = ./a; }' 'lib.fileset.toSource: `fileset` \('"$work"'/a\) is a path that does not exist.'
 
 # File sets cannot be evaluated directly
 expectFailure 'union ./. ./.' 'lib.fileset: Directly evaluating a file set is not supported.
@@ -490,26 +515,26 @@ mkdir -p {foo,bar}/mock-root
 expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
   toSource { root = ./.; fileset = union ./foo/mock-root ./bar/mock-root; }
 ' 'lib.fileset.union: Filesystem roots are not the same:
-\s*first argument: root "'"$work"'/foo/mock-root"
-\s*second argument: root "'"$work"'/bar/mock-root"
-\s*Different roots are not supported.'
+\s*First argument: Filesystem root is "'"$work"'/foo/mock-root"
+\s*Second argument: Filesystem root is "'"$work"'/bar/mock-root"
+\s*Different filesystem roots are not supported.'
 
 expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
   toSource { root = ./.; fileset = unions [ ./foo/mock-root ./bar/mock-root ]; }
 ' 'lib.fileset.unions: Filesystem roots are not the same:
-\s*element 0: root "'"$work"'/foo/mock-root"
-\s*element 1: root "'"$work"'/bar/mock-root"
-\s*Different roots are not supported.'
+\s*Element 0: Filesystem root is "'"$work"'/foo/mock-root"
+\s*Element 1: Filesystem root is "'"$work"'/bar/mock-root"
+\s*Different filesystem roots are not supported.'
 rm -rf -- *
 
 # Coercion errors show the correct context
-expectFailure 'toSource { root = ./.; fileset = union ./a ./.; }' 'lib.fileset.union: first argument \('"$work"'/a\) does not exist.'
-expectFailure 'toSource { root = ./.; fileset = union ./. ./b; }' 'lib.fileset.union: second argument \('"$work"'/b\) does not exist.'
-expectFailure 'toSource { root = ./.; fileset = unions [ ./a ./. ]; }' 'lib.fileset.unions: element 0 \('"$work"'/a\) does not exist.'
-expectFailure 'toSource { root = ./.; fileset = unions [ ./. ./b ]; }' 'lib.fileset.unions: element 1 \('"$work"'/b\) does not exist.'
+expectFailure 'toSource { root = ./.; fileset = union ./a ./.; }' 'lib.fileset.union: First argument \('"$work"'/a\) is a path that does not exist.'
+expectFailure 'toSource { root = ./.; fileset = union ./. ./b; }' 'lib.fileset.union: Second argument \('"$work"'/b\) is a path that does not exist.'
+expectFailure 'toSource { root = ./.; fileset = unions [ ./a ./. ]; }' 'lib.fileset.unions: Element 0 \('"$work"'/a\) is a path that does not exist.'
+expectFailure 'toSource { root = ./.; fileset = unions [ ./. ./b ]; }' 'lib.fileset.unions: Element 1 \('"$work"'/b\) is a path that does not exist.'
 
 # unions needs a list
-expectFailure 'toSource { root = ./.; fileset = unions null; }' 'lib.fileset.unions: Expected argument to be a list, but got a null.'
+expectFailure 'toSource { root = ./.; fileset = unions null; }' 'lib.fileset.unions: Argument is of type null, but it should be a list instead.'
 
 # The tree of later arguments should not be evaluated if a former argument already includes all files
 tree=()
@@ -603,14 +628,14 @@ mkdir -p {foo,bar}/mock-root
 expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
   toSource { root = ./.; fileset = intersection ./foo/mock-root ./bar/mock-root; }
 ' 'lib.fileset.intersection: Filesystem roots are not the same:
-\s*first argument: root "'"$work"'/foo/mock-root"
-\s*second argument: root "'"$work"'/bar/mock-root"
-\s*Different roots are not supported.'
+\s*First argument: Filesystem root is "'"$work"'/foo/mock-root"
+\s*Second argument: Filesystem root is "'"$work"'/bar/mock-root"
+\s*Different filesystem roots are not supported.'
 rm -rf -- *
 
 # Coercion errors show the correct context
-expectFailure 'toSource { root = ./.; fileset = intersection ./a ./.; }' 'lib.fileset.intersection: first argument \('"$work"'/a\) does not exist.'
-expectFailure 'toSource { root = ./.; fileset = intersection ./. ./b; }' 'lib.fileset.intersection: second argument \('"$work"'/b\) does not exist.'
+expectFailure 'toSource { root = ./.; fileset = intersection ./a ./.; }' 'lib.fileset.intersection: First argument \('"$work"'/a\) is a path that does not exist.'
+expectFailure 'toSource { root = ./.; fileset = intersection ./. ./b; }' 'lib.fileset.intersection: Second argument \('"$work"'/b\) is a path that does not exist.'
 
 # The tree of later arguments should not be evaluated if a former argument already excludes all files
 tree=(
@@ -785,6 +810,13 @@ checkFileset 'difference ./. ./b'
 
 ## File filter
 
+# The first argument needs to be a function
+expectFailure 'fileFilter null (abort "this is not needed")' 'lib.fileset.fileFilter: First argument is of type null, but it should be a function instead.'
+
+# The second argument can be a file set or an existing path
+expectFailure 'fileFilter (file: abort "this is not needed") null' 'lib.fileset.fileFilter: Second argument is of type null, but it should be a file set or a path instead.'
+expectFailure 'fileFilter (file: abort "this is not needed") ./a' 'lib.fileset.fileFilter: Second argument \('"$work"'/a\) is a path that does not exist.'
+
 # The predicate is not called when there's no files
 tree=()
 checkFileset 'fileFilter (file: abort "this is not needed") ./.'
@@ -850,6 +882,26 @@ checkFileset 'union ./c/a (fileFilter (file: assert file.name != "a"; true) ./.)
 # but here we need to use ./c
 checkFileset 'union (fileFilter (file: assert file.name != "a"; true) ./.) ./c'
 
+# Also lazy, the filter isn't called on a filtered out path
+tree=(
+    [a]=1
+    [b]=0
+    [c]=0
+)
+checkFileset 'fileFilter (file: assert file.name != "c"; file.name == "a") (difference ./. ./c)'
+
+# Make sure single files are filtered correctly
+tree=(
+    [a]=1
+    [b]=0
+)
+checkFileset 'fileFilter (file: assert file.name == "a"; true) ./a'
+tree=(
+    [a]=0
+    [b]=0
+)
+checkFileset 'fileFilter (file: assert file.name == "a"; false) ./a'
+
 ## Tracing
 
 # The second trace argument is returned
@@ -995,6 +1047,217 @@ touch 0 "${filesToCreate[@]}"
 expectTrace 'unions (mapAttrsToList (n: _: ./. + "/${n}") (removeAttrs (builtins.readDir ./.) [ "0" ]))' "$expectedTrace"
 rm -rf -- *
 
+## lib.fileset.fromSource
+
+# Check error messages
+expectFailure 'fromSource null' 'lib.fileset.fromSource: The source origin of the argument is of type null, but it should be a path instead.'
+
+expectFailure 'fromSource (lib.cleanSource "")' 'lib.fileset.fromSource: The source origin of the argument is a string-like value \(""\), but it should be a path instead.
+\s*Sources created from paths in strings cannot be turned into file sets, use `lib.sources` or derivations instead.'
+
+expectFailure 'fromSource (lib.cleanSource null)' 'lib.fileset.fromSource: The source origin of the argument is of type null, but it should be a path instead.'
+
+# fromSource on a path works and is the same as coercing that path
+mkdir a
+touch a/b c
+expectEqual 'trace (fromSource ./.) null' 'trace ./. null'
+rm -rf -- *
+
+# Check that converting to a file set doesn't read the included files
+mkdir a
+touch a/b
+run() {
+    expectEqual "trace (fromSource (lib.cleanSourceWith { src = ./a; })) null" "builtins.trace \"$work/a (all files in directory)\" null"
+    rm a/b
+}
+withFileMonitor run a/b
+rm -rf -- *
+
+# Check that converting to a file set doesn't read entries for directories that are filtered out
+mkdir -p a/b
+touch a/b/c
+run() {
+    expectEqual "trace (fromSource (lib.cleanSourceWith {
+      src = ./a;
+      filter = pathString: type: false;
+    })) null" "builtins.trace \"(empty)\" null"
+    rm a/b/c
+    rmdir a/b
+}
+withFileMonitor run a/b
+rm -rf -- *
+
+# The filter is not needed on empty directories
+expectEqual 'trace (fromSource (lib.cleanSourceWith {
+  src = ./.;
+  filter = abort "filter should not be needed";
+})) null' 'trace _emptyWithoutBase null'
+
+# Single files also work
+touch a b
+expectEqual 'trace (fromSource (cleanSourceWith { src = ./a; })) null' 'trace ./a null'
+rm -rf -- *
+
+# For a tree assigning each subpath true/false,
+# check whether a source filter with those results includes the same files
+# as a file set created using fromSource. Usage:
+#
+# tree=(
+#   [a]=1  # ./a is a file and the filter should return true for it
+#   [b/]=0 # ./b is a directory and the filter should return false for it
+# )
+# checkSource
+checkSource() {
+    createTree
+
+    # Serialise the tree as JSON (there's only minimal savings with jq,
+    # and we don't need to handle escapes)
+    {
+        echo "{"
+        first=1
+        for p in "${!tree[@]}"; do
+            if [[ -z "$first" ]]; then
+                echo ","
+            else
+                first=
+            fi
+            echo "\"$p\":"
+            case "${tree[$p]}" in
+                1)
+                    echo "true"
+                    ;;
+                0)
+                    echo "false"
+                    ;;
+                *)
+                    die "Unsupported tree value: ${tree[$p]}"
+            esac
+        done
+        echo "}"
+    } > "$tmp/tree.json"
+
+    # An expression to create a source value with a filter matching the tree
+    sourceExpr='
+      let
+        tree = importJSON '"$tmp"'/tree.json;
+      in
+      cleanSourceWith {
+        src = ./.;
+        filter =
+          pathString: type:
+          let
+            stripped = removePrefix (toString ./. + "/") pathString;
+            key = stripped + optionalString (type == "directory") "/";
+          in
+          tree.${key} or
+            (throw "tree key ${key} missing");
+      }
+    '
+
+    filesetExpr='
+      toSource {
+        root = ./.;
+        fileset = fromSource ('"$sourceExpr"');
+      }
+    '
+
+    # Turn both into store paths
+    sourceStorePath=$(expectStorePath "$sourceExpr")
+    filesetStorePath=$(expectStorePath "$filesetExpr")
+
+    # Loop through each path in the tree
+    while IFS= read -r -d $'\0' subpath; do
+        if [[ ! -e "$sourceStorePath"/"$subpath" ]]; then
+            # If it's not in the source store path, it's also not in the file set store path
+            if [[ -e "$filesetStorePath"/"$subpath" ]]; then
+                die "The store path $sourceStorePath created by $expr doesn't contain $subpath, but the corresponding store path $filesetStorePath created via fromSource does contain $subpath"
+            fi
+        elif [[ -z "$(find "$sourceStorePath"/"$subpath" -type f)" ]]; then
+            # If it's an empty directory in the source store path, it shouldn't be in the file set store path
+            if [[ -e "$filesetStorePath"/"$subpath" ]]; then
+                die "The store path $sourceStorePath created by $expr contains the path $subpath without any files, but the corresponding store path $filesetStorePath created via fromSource didn't omit it"
+            fi
+        else
+            # If it's non-empty directory or a file, it should be in the file set store path
+            if [[ ! -e "$filesetStorePath"/"$subpath" ]]; then
+                die "The store path $sourceStorePath created by $expr contains the non-empty path $subpath, but the corresponding store path $filesetStorePath created via fromSource doesn't include it"
+            fi
+        fi
+    done < <(find . -mindepth 1 -print0)
+
+    rm -rf -- *
+}
+
+# Check whether the filter is evaluated correctly
+tree=(
+    [a]=
+    [b/]=
+    [b/c]=
+    [b/d]=
+    [e/]=
+    [e/e/]=
+)
+# We fill out the above tree values with all possible combinations of 0 and 1
+# Then check whether a filter based on those return values gets turned into the corresponding file set
+for i in $(seq 0 $((2 ** ${#tree[@]} - 1 ))); do
+    for p in "${!tree[@]}"; do
+        tree[$p]=$(( i % 2 ))
+        (( i /= 2 )) || true
+    done
+    checkSource
+done
+
+# The filter is called with the same arguments in the same order
+mkdir a e
+touch a/b a/c d e
+expectEqual '
+  trace (fromSource (cleanSourceWith {
+    src = ./.;
+    filter = pathString: type: builtins.trace "${pathString} ${toString type}" true;
+  })) null
+' '
+  builtins.seq (cleanSourceWith {
+    src = ./.;
+    filter = pathString: type: builtins.trace "${pathString} ${toString type}" true;
+  }).outPath
+  builtins.trace "'"$work"' (all files in directory)"
+  null
+'
+rm -rf -- *
+
+# Test that if a directory is not included, the filter isn't called on its contents
+mkdir a b
+touch a/c b/d
+expectEqual 'trace (fromSource (cleanSourceWith {
+  src = ./.;
+  filter = pathString: type:
+    if pathString == toString ./a then
+      false
+    else if pathString == toString ./b then
+      true
+    else if pathString == toString ./b/d then
+      true
+    else
+      abort "This filter should not be called with path ${pathString}";
+})) null' 'trace (_create ./. { b = "directory"; }) null'
+rm -rf -- *
+
+# The filter is called lazily:
+# If a later say intersection removes a part of the tree, the filter won't run on it
+mkdir a d
+touch a/{b,c} d/e
+expectEqual 'trace (intersection ./a (fromSource (lib.cleanSourceWith {
+  src = ./.;
+  filter = pathString: type:
+    if pathString == toString ./a || pathString == toString ./a/b then
+      true
+    else if pathString == toString ./a/c then
+      false
+    else
+      abort "filter should not be called on ${pathString}";
+}))) null' 'trace ./a/b null'
+rm -rf -- *
+
 # TODO: Once we have combinators and a property testing library, derive property tests from https://en.wikipedia.org/wiki/Algebra_of_sets
 
 echo >&2 tests ok
diff --git a/nixpkgs/lib/licenses.nix b/nixpkgs/lib/licenses.nix
index d9555ca66cb9..ad6922498ab4 100644
--- a/nixpkgs/lib/licenses.nix
+++ b/nixpkgs/lib/licenses.nix
@@ -516,17 +516,17 @@ in mkLicense lset) ({
 
   generaluser = {
     fullName = "GeneralUser GS License v2.0";
-    url = "http://www.schristiancollins.com/generaluser.php"; # license included in sources
+    url = "https://www.schristiancollins.com/generaluser.php"; # license included in sources
   };
 
   gfl = {
     fullName = "GUST Font License";
-    url = "http://www.gust.org.pl/fonts/licenses/GUST-FONT-LICENSE.txt";
+    url = "https://www.gust.org.pl/projects/e-foundry/licenses/GUST-FONT-LICENSE.txt";
   };
 
   gfsl = {
     fullName = "GUST Font Source License";
-    url = "http://www.gust.org.pl/fonts/licenses/GUST-FONT-SOURCE-LICENSE.txt";
+    url = "https://www.gust.org.pl/projects/e-foundry/licenses/GUST-FONT-SOURCE-LICENSE.txt";
   };
 
   gpl1Only = {
@@ -613,7 +613,7 @@ in mkLicense lset) ({
   info-zip = {
     spdxId = "Info-ZIP";
     fullName = "Info-ZIP License";
-    url = "http://www.info-zip.org/pub/infozip/license.html";
+    url = "https://infozip.sourceforge.net/license.html";
   };
 
   inria-compcert = {
@@ -877,6 +877,21 @@ in mkLicense lset) ({
     fullName = "Non-Profit Open Software License 3.0";
   };
 
+  nvidiaCuda = {
+    shortName = "CUDA EULA";
+    fullName = "CUDA Toolkit End User License Agreement (EULA)";
+    url = "https://docs.nvidia.com/cuda/eula/index.html#cuda-toolkit-supplement-license-agreement";
+    free = false;
+  };
+
+  nvidiaCudaRedist = {
+    shortName = "CUDA EULA";
+    fullName = "CUDA Toolkit End User License Agreement (EULA)";
+    url = "https://docs.nvidia.com/cuda/eula/index.html#cuda-toolkit-supplement-license-agreement";
+    free = false;
+    redistributable = true;
+  };
+
   obsidian = {
     fullName = "Obsidian End User Agreement";
     url = "https://obsidian.md/eula";
@@ -1167,7 +1182,7 @@ in mkLicense lset) ({
 
   xfig = {
     fullName = "xfig";
-    url = "http://mcj.sourceforge.net/authors.html#xfig"; # https is broken
+    url = "https://mcj.sourceforge.net/authors.html#xfig";
   };
 
   zlib = {
diff --git a/nixpkgs/lib/lists.nix b/nixpkgs/lib/lists.nix
index 3835e3ba69cb..15047f488f4d 100644
--- a/nixpkgs/lib/lists.nix
+++ b/nixpkgs/lib/lists.nix
@@ -821,6 +821,19 @@ rec {
    */
   unique = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) [];
 
+  /* Check if list contains only unique elements. O(n^2) complexity.
+
+     Type: allUnique :: [a] -> bool
+
+     Example:
+       allUnique [ 3 2 3 4 ]
+       => false
+       allUnique [ 3 2 4 1 ]
+       => true
+   */
+  allUnique = list: (length (unique list) == length list);
+
+
   /* Intersects list 'e' and another list. O(nm) complexity.
 
      Example:
diff --git a/nixpkgs/lib/strings.nix b/nixpkgs/lib/strings.nix
index 628669d86bbd..695aaaacd348 100644
--- a/nixpkgs/lib/strings.nix
+++ b/nixpkgs/lib/strings.nix
@@ -144,6 +144,20 @@ rec {
   */
   concatLines = concatMapStrings (s: s + "\n");
 
+  /*
+    Replicate a string n times,
+    and concatenate the parts into a new string.
+
+    Type: replicate :: int -> string -> string
+
+    Example:
+      replicate 3 "v"
+      => "vvv"
+      replicate 5 "hello"
+      => "hellohellohellohellohello"
+  */
+  replicate = n: s: concatStrings (lib.lists.replicate n s);
+
   /* Construct a Unix-style, colon-separated search path consisting of
      the given `subDir` appended to each of the given paths.
 
diff --git a/nixpkgs/lib/systems/inspect.nix b/nixpkgs/lib/systems/inspect.nix
index 022e459c3945..073df78797c7 100644
--- a/nixpkgs/lib/systems/inspect.nix
+++ b/nixpkgs/lib/systems/inspect.nix
@@ -100,6 +100,32 @@ rec {
     ];
   };
 
+  # given two patterns, return a pattern which is their logical AND.
+  # Since a pattern is a list-of-disjuncts, this needs to
+  patternLogicalAnd = pat1_: pat2_:
+    let
+      # patterns can be either a list or a (bare) singleton; turn
+      # them into singletons for uniform handling
+      pat1 = lib.toList pat1_;
+      pat2 = lib.toList pat2_;
+    in
+      lib.concatMap (attr1:
+        map (attr2:
+          lib.recursiveUpdateUntil
+            (path: subattr1: subattr2:
+              if (builtins.intersectAttrs subattr1 subattr2) == {} || subattr1 == subattr2
+              then true
+              else throw ''
+                pattern conflict at path ${toString path}:
+                  ${builtins.toJSON subattr1}
+                  ${builtins.toJSON subattr2}
+                '')
+            attr1
+            attr2
+            )
+          pat2)
+        pat1;
+
   matchAnyAttrs = patterns:
     if builtins.isList patterns then attrs: any (pattern: matchAttrs pattern attrs) patterns
     else matchAttrs patterns;
diff --git a/nixpkgs/lib/tests/misc.nix b/nixpkgs/lib/tests/misc.nix
index 47853f47278a..06cb5e763e2c 100644
--- a/nixpkgs/lib/tests/misc.nix
+++ b/nixpkgs/lib/tests/misc.nix
@@ -191,6 +191,11 @@ runTests {
     expected = "a\nb\nc\n";
   };
 
+  testReplicateString = {
+    expr = strings.replicate 5 "hello";
+    expected = "hellohellohellohellohello";
+  };
+
   testSplitStringsSimple = {
     expr = strings.splitString "." "a.b.c.d";
     expected = [ "a" "b" "c" "d" ];
@@ -721,6 +726,15 @@ runTests {
     expected = 7;
   };
 
+  testAllUnique_true = {
+    expr = allUnique [ 3 2 4 1 ];
+    expected = true;
+  };
+  testAllUnique_false = {
+    expr = allUnique [ 3 2 3 4 ];
+    expected = false;
+  };
+
 # ATTRSETS
 
   testConcatMapAttrs = {
diff --git a/nixpkgs/lib/trivial.nix b/nixpkgs/lib/trivial.nix
index c23fc6070be4..a89c1aa25b1f 100644
--- a/nixpkgs/lib/trivial.nix
+++ b/nixpkgs/lib/trivial.nix
@@ -449,6 +449,40 @@ rec {
     (f ? __functor && isFunction (f.__functor f));
 
   /*
+    `mirrorFunctionArgs f g` creates a new function `g'` with the same behavior as `g` (`g' x == g x`)
+    but its function arguments mirroring `f` (`lib.functionArgs g' == lib.functionArgs f`).
+
+    Type:
+      mirrorFunctionArgs :: (a -> b) -> (a -> c) -> (a -> c)
+
+    Example:
+      addab = {a, b}: a + b
+      addab { a = 2; b = 4; }
+      => 6
+      lib.functionArgs addab
+      => { a = false; b = false; }
+      addab1 = attrs: addab attrs + 1
+      addab1 { a = 2; b = 4; }
+      => 7
+      lib.functionArgs addab1
+      => { }
+      addab1' = lib.mirrorFunctionArgs addab addab1
+      addab1' { a = 2; b = 4; }
+      => 7
+      lib.functionArgs addab1'
+      => { a = false; b = false; }
+  */
+  mirrorFunctionArgs =
+    # Function to provide the argument metadata
+    f:
+    let
+      fArgs = functionArgs f;
+    in
+    # Function to set the argument metadata to
+    g:
+    setFunctionArgs g fArgs;
+
+  /*
     Turns any non-callable values into constant functions.
     Returns callable values as is.