about summary refs log tree commit diff
path: root/nixpkgs/pkgs/build-support/node
diff options
context:
space:
mode:
Diffstat (limited to 'nixpkgs/pkgs/build-support/node')
-rw-r--r--nixpkgs/pkgs/build-support/node/build-npm-package/default.nix88
-rw-r--r--nixpkgs/pkgs/build-support/node/build-npm-package/hooks/default.nix48
-rw-r--r--nixpkgs/pkgs/build-support/node/build-npm-package/hooks/npm-build-hook.sh38
-rw-r--r--nixpkgs/pkgs/build-support/node/build-npm-package/hooks/npm-config-hook.sh119
-rw-r--r--nixpkgs/pkgs/build-support/node/build-npm-package/hooks/npm-install-hook.sh71
-rw-r--r--nixpkgs/pkgs/build-support/node/fetch-npm-deps/.gitignore1
-rw-r--r--nixpkgs/pkgs/build-support/node/fetch-npm-deps/Cargo.lock1046
-rw-r--r--nixpkgs/pkgs/build-support/node/fetch-npm-deps/Cargo.toml23
-rw-r--r--nixpkgs/pkgs/build-support/node/fetch-npm-deps/default.nix207
-rw-r--r--nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/cacache.rs125
-rw-r--r--nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/main.rs432
-rw-r--r--nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/parse/lock.rs370
-rw-r--r--nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/parse/mod.rs353
-rw-r--r--nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/util.rs66
-rw-r--r--nixpkgs/pkgs/build-support/node/fetch-yarn-deps/common.js17
-rw-r--r--nixpkgs/pkgs/build-support/node/fetch-yarn-deps/default.nix85
-rwxr-xr-xnixpkgs/pkgs/build-support/node/fetch-yarn-deps/fixup.js76
-rwxr-xr-xnixpkgs/pkgs/build-support/node/fetch-yarn-deps/index.js206
-rw-r--r--nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/default.nix24
-rw-r--r--nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/file.lock9
-rw-r--r--nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/git.lock7
-rw-r--r--nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/github.lock7
-rw-r--r--nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/giturl.lock11
-rw-r--r--nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/simple.lock8
-rw-r--r--nixpkgs/pkgs/build-support/node/import-npm-lock/default.nix134
-rw-r--r--nixpkgs/pkgs/build-support/node/import-npm-lock/hooks/canonicalize-symlinks.js52
-rw-r--r--nixpkgs/pkgs/build-support/node/import-npm-lock/hooks/default.nix13
-rw-r--r--nixpkgs/pkgs/build-support/node/import-npm-lock/hooks/npm-config-hook.sh70
28 files changed, 3706 insertions, 0 deletions
diff --git a/nixpkgs/pkgs/build-support/node/build-npm-package/default.nix b/nixpkgs/pkgs/build-support/node/build-npm-package/default.nix
new file mode 100644
index 000000000000..1c7bf63e8cd6
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/build-npm-package/default.nix
@@ -0,0 +1,88 @@
+{ lib
+, stdenv
+, fetchNpmDeps
+, buildPackages
+, nodejs
+, darwin
+} @ topLevelArgs:
+
+{ name ? "${args.pname}-${args.version}"
+, src ? null
+, srcs ? null
+, sourceRoot ? null
+, prePatch ? ""
+, patches ? [ ]
+, postPatch ? ""
+, nativeBuildInputs ? [ ]
+, buildInputs ? [ ]
+  # The output hash of the dependencies for this project.
+  # Can be calculated in advance with prefetch-npm-deps.
+, npmDepsHash ? ""
+  # Whether to force the usage of Git dependencies that have install scripts, but not a lockfile.
+  # Use with care.
+, forceGitDeps ? false
+  # Whether to force allow an empty dependency cache.
+  # This can be enabled if there are truly no remote dependencies, but generally an empty cache indicates something is wrong.
+, forceEmptyCache ? false
+  # Whether to make the cache writable prior to installing dependencies.
+  # Don't set this unless npm tries to write to the cache directory, as it can slow down the build.
+, makeCacheWritable ? false
+  # The script to run to build the project.
+, npmBuildScript ? "build"
+  # Flags to pass to all npm commands.
+, npmFlags ? [ ]
+  # Flags to pass to `npm ci`.
+, npmInstallFlags ? [ ]
+  # Flags to pass to `npm rebuild`.
+, npmRebuildFlags ? [ ]
+  # Flags to pass to `npm run ${npmBuildScript}`.
+, npmBuildFlags ? [ ]
+  # Flags to pass to `npm pack`.
+, npmPackFlags ? [ ]
+  # Flags to pass to `npm prune`.
+, npmPruneFlags ? npmInstallFlags
+  # Value for npm `--workspace` flag and directory in which the files to be installed are found.
+, npmWorkspace ? null
+, nodejs ? topLevelArgs.nodejs
+, npmDeps ?  fetchNpmDeps {
+  inherit forceGitDeps forceEmptyCache src srcs sourceRoot prePatch patches postPatch;
+  name = "${name}-npm-deps";
+  hash = npmDepsHash;
+}
+  # Custom npmConfigHook
+, npmConfigHook ? null
+  # Custom npmBuildHook
+, npmBuildHook ? null
+  # Custom npmInstallHook
+, npmInstallHook ? null
+, ...
+} @ args:
+
+let
+  # .override {} negates splicing, so we need to use buildPackages explicitly
+  npmHooks = buildPackages.npmHooks.override {
+    inherit nodejs;
+  };
+in
+stdenv.mkDerivation (args // {
+  inherit npmDeps npmBuildScript;
+
+  nativeBuildInputs = nativeBuildInputs
+    ++ [
+      nodejs
+      # Prefer passed hooks
+      (if npmConfigHook != null then npmConfigHook else npmHooks.npmConfigHook)
+      (if npmBuildHook != null then npmBuildHook else npmHooks.npmBuildHook)
+      (if npmInstallHook != null then npmInstallHook else npmHooks.npmInstallHook)
+      nodejs.python
+    ]
+    ++ lib.optionals stdenv.isDarwin [ darwin.cctools ];
+  buildInputs = buildInputs ++ [ nodejs ];
+
+  strictDeps = true;
+
+  # Stripping takes way too long with the amount of files required by a typical Node.js project.
+  dontStrip = args.dontStrip or true;
+
+  meta = (args.meta or { }) // { platforms = args.meta.platforms or nodejs.meta.platforms; };
+})
diff --git a/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/default.nix b/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/default.nix
new file mode 100644
index 000000000000..36f0319e3d23
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/default.nix
@@ -0,0 +1,48 @@
+{ lib
+, srcOnly
+, makeSetupHook
+, makeWrapper
+, nodejs
+, jq
+, prefetch-npm-deps
+, diffutils
+, installShellFiles
+}:
+
+{
+  npmConfigHook = makeSetupHook
+    {
+      name = "npm-config-hook";
+      substitutions = {
+        nodeSrc = srcOnly nodejs;
+        nodeGyp = "${nodejs}/lib/node_modules/npm/node_modules/node-gyp/bin/node-gyp.js";
+
+        # Specify `diff`, `jq`, and `prefetch-npm-deps` by abspath to ensure that the user's build
+        # inputs do not cause us to find the wrong binaries.
+        diff = "${diffutils}/bin/diff";
+        jq = "${jq}/bin/jq";
+        prefetchNpmDeps = "${prefetch-npm-deps}/bin/prefetch-npm-deps";
+
+        nodeVersion = nodejs.version;
+        nodeVersionMajor = lib.versions.major nodejs.version;
+      };
+    } ./npm-config-hook.sh;
+
+  npmBuildHook = makeSetupHook
+    {
+      name = "npm-build-hook";
+    } ./npm-build-hook.sh;
+
+  npmInstallHook = makeSetupHook
+    {
+      name = "npm-install-hook";
+      propagatedBuildInputs = [
+        installShellFiles
+        makeWrapper
+      ];
+      substitutions = {
+        hostNode = "${nodejs}/bin/node";
+        jq = "${jq}/bin/jq";
+      };
+    } ./npm-install-hook.sh;
+}
diff --git a/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/npm-build-hook.sh b/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/npm-build-hook.sh
new file mode 100644
index 000000000000..c341f672363a
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/npm-build-hook.sh
@@ -0,0 +1,38 @@
+# shellcheck shell=bash
+
+npmBuildHook() {
+    echo "Executing npmBuildHook"
+
+    runHook preBuild
+
+    if [ -z "${npmBuildScript-}" ]; then
+        echo
+        echo "ERROR: no build script was specified"
+        echo 'Hint: set `npmBuildScript`, override `buildPhase`, or set `dontNpmBuild = true`.'
+        echo
+
+        exit 1
+    fi
+
+    if ! npm run ${npmWorkspace+--workspace=$npmWorkspace} "$npmBuildScript" $npmBuildFlags "${npmBuildFlagsArray[@]}" $npmFlags "${npmFlagsArray[@]}"; then
+        echo
+        echo 'ERROR: `npm build` failed'
+        echo
+        echo "Here are a few things you can try, depending on the error:"
+        echo "1. Make sure your build script ($npmBuildScript) exists"
+        echo '  If there is none, set `dontNpmBuild = true`.'
+        echo '2. If the error being thrown is something similar to "error:0308010C:digital envelope routines::unsupported", add `NODE_OPTIONS = "--openssl-legacy-provider"` to your derivation'
+        echo "  See https://github.com/webpack/webpack/issues/14532 for more information."
+        echo
+
+        exit 1
+    fi
+
+    runHook postBuild
+
+    echo "Finished npmBuildHook"
+}
+
+if [ -z "${dontNpmBuild-}" ] && [ -z "${buildPhase-}" ]; then
+    buildPhase=npmBuildHook
+fi
diff --git a/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/npm-config-hook.sh b/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/npm-config-hook.sh
new file mode 100644
index 000000000000..486b0c2f8372
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/npm-config-hook.sh
@@ -0,0 +1,119 @@
+# shellcheck shell=bash
+
+npmConfigHook() {
+    echo "Executing npmConfigHook"
+
+    # Use npm patches in the nodejs package
+    export NIX_NODEJS_BUILDNPMPACKAGE=1
+    export prefetchNpmDeps="@prefetchNpmDeps@"
+
+    if [ -n "${npmRoot-}" ]; then
+      pushd "$npmRoot"
+    fi
+
+    echo "Configuring npm"
+
+    export HOME="$TMPDIR"
+    export npm_config_nodedir="@nodeSrc@"
+    export npm_config_node_gyp="@nodeGyp@"
+
+    if [ -z "${npmDeps-}" ]; then
+        echo
+        echo "ERROR: no dependencies were specified"
+        echo 'Hint: set `npmDeps` if using these hooks individually. If this is happening with `buildNpmPackage`, please open an issue.'
+        echo
+
+        exit 1
+    fi
+
+    local -r cacheLockfile="$npmDeps/package-lock.json"
+    local -r srcLockfile="$PWD/package-lock.json"
+
+    echo "Validating consistency between $srcLockfile and $cacheLockfile"
+
+    if ! @diff@ "$srcLockfile" "$cacheLockfile"; then
+      # If the diff failed, first double-check that the file exists, so we can
+      # give a friendlier error msg.
+      if ! [ -e "$srcLockfile" ]; then
+        echo
+        echo "ERROR: Missing package-lock.json from src. Expected to find it at: $srcLockfile"
+        echo "Hint: You can copy a vendored package-lock.json file via postPatch."
+        echo
+
+        exit 1
+      fi
+
+      if ! [ -e "$cacheLockfile" ]; then
+        echo
+        echo "ERROR: Missing lockfile from cache. Expected to find it at: $cacheLockfile"
+        echo
+
+        exit 1
+      fi
+
+      echo
+      echo "ERROR: npmDepsHash is out of date"
+      echo
+      echo "The package-lock.json in src is not the same as the in $npmDeps."
+      echo
+      echo "To fix the issue:"
+      echo '1. Use `lib.fakeHash` as the npmDepsHash value'
+      echo "2. Build the derivation and wait for it to fail with a hash mismatch"
+      echo "3. Copy the 'got: sha256-' value back into the npmDepsHash field"
+      echo
+
+      exit 1
+    fi
+
+    export CACHE_MAP_PATH="$TMP/MEOW"
+    @prefetchNpmDeps@ --map-cache
+
+    @prefetchNpmDeps@ --fixup-lockfile "$srcLockfile"
+
+    local cachePath
+
+    if [ -z "${makeCacheWritable-}" ]; then
+        cachePath="$npmDeps"
+    else
+        echo "Making cache writable"
+        cp -r "$npmDeps" "$TMPDIR/cache"
+        chmod -R 700 "$TMPDIR/cache"
+        cachePath="$TMPDIR/cache"
+    fi
+
+    npm config set cache "$cachePath"
+    npm config set offline true
+    npm config set progress false
+
+    echo "Installing dependencies"
+
+    if ! npm ci --ignore-scripts $npmInstallFlags "${npmInstallFlagsArray[@]}" $npmFlags "${npmFlagsArray[@]}"; then
+        echo
+        echo "ERROR: npm failed to install dependencies"
+        echo
+        echo "Here are a few things you can try, depending on the error:"
+        echo '1. Set `makeCacheWritable = true`'
+        echo "  Note that this won't help if npm is complaining about not being able to write to the logs directory -- look above that for the actual error."
+        echo '2. Set `npmFlags = [ "--legacy-peer-deps" ]`'
+        echo
+
+        exit 1
+    fi
+
+    patchShebangs node_modules
+
+    npm rebuild $npmRebuildFlags "${npmRebuildFlagsArray[@]}" $npmFlags "${npmFlagsArray[@]}"
+
+    patchShebangs node_modules
+
+    rm "$CACHE_MAP_PATH"
+    unset CACHE_MAP_PATH
+
+    if [ -n "${npmRoot-}" ]; then
+      popd
+    fi
+
+    echo "Finished npmConfigHook"
+}
+
+postPatchHooks+=(npmConfigHook)
diff --git a/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/npm-install-hook.sh b/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/npm-install-hook.sh
new file mode 100644
index 000000000000..750ed421789f
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/npm-install-hook.sh
@@ -0,0 +1,71 @@
+# shellcheck shell=bash
+
+npmInstallHook() {
+    echo "Executing npmInstallHook"
+
+    runHook preInstall
+
+    local -r packageOut="$out/lib/node_modules/$(@jq@ --raw-output '.name' package.json)"
+
+    # `npm pack` writes to cache so temporarily override it
+    while IFS= read -r file; do
+        local dest="$packageOut/$(dirname "$file")"
+        mkdir -p "$dest"
+        cp "${npmWorkspace-.}/$file" "$dest"
+    done < <(@jq@ --raw-output '.[0].files | map(.path | select(. | startswith("node_modules/") | not)) | join("\n")' <<< "$(npm_config_cache="$HOME/.npm" npm pack --json --dry-run --loglevel=warn --no-foreground-scripts ${npmWorkspace+--workspace=$npmWorkspace} $npmPackFlags "${npmPackFlagsArray[@]}" $npmFlags "${npmFlagsArray[@]}")")
+
+    # Based on code from Python's buildPythonPackage wrap.sh script, for
+    # supporting both the case when makeWrapperArgs is an array and a
+    # IFS-separated string.
+    #
+    # TODO: remove the string branch when __structuredAttrs are used.
+    if [[ "${makeWrapperArgs+defined}" == "defined" && "$(declare -p makeWrapperArgs)" =~ ^'declare -a makeWrapperArgs=' ]]; then
+        local -a user_args=("${makeWrapperArgs[@]}")
+    else
+        local -a user_args="(${makeWrapperArgs:-})"
+    fi
+    while IFS=" " read -ra bin; do
+        mkdir -p "$out/bin"
+        makeWrapper @hostNode@ "$out/bin/${bin[0]}" --add-flags "$packageOut/${bin[1]}" "${user_args[@]}"
+    done < <(@jq@ --raw-output '(.bin | type) as $typ | if $typ == "string" then
+        .name + " " + .bin
+        elif $typ == "object" then .bin | to_entries | map(.key + " " + .value) | join("\n")
+        elif $typ == "null" then empty
+        else "invalid type " + $typ | halt_error end' "${npmWorkspace-.}/package.json")
+
+    while IFS= read -r man; do
+        installManPage "$packageOut/$man"
+    done < <(@jq@ --raw-output '(.man | type) as $typ | if $typ == "string" then .man
+        elif $typ == "list" then .man | join("\n")
+        elif $typ == "null" then empty
+        else "invalid type " + $typ | halt_error end' "${npmWorkspace-.}/package.json")
+
+    local -r nodeModulesPath="$packageOut/node_modules"
+
+    if [ ! -d "$nodeModulesPath" ]; then
+        if [ -z "${dontNpmPrune-}" ]; then
+            if ! npm prune --omit=dev --no-save ${npmWorkspace+--workspace=$npmWorkspace} $npmPruneFlags "${npmPruneFlagsArray[@]}" $npmFlags "${npmFlagsArray[@]}"; then
+              echo
+              echo
+              echo "ERROR: npm prune step failed"
+              echo
+              echo 'If npm tried to download additional dependencies above, try setting `dontNpmPrune = true`.'
+              echo
+
+              exit 1
+            fi
+        fi
+
+        find node_modules -maxdepth 1 -type d -empty -delete
+
+        cp -r node_modules "$nodeModulesPath"
+    fi
+
+    runHook postInstall
+
+    echo "Finished npmInstallHook"
+}
+
+if [ -z "${dontNpmInstall-}" ] && [ -z "${installPhase-}" ]; then
+    installPhase=npmInstallHook
+fi
diff --git a/nixpkgs/pkgs/build-support/node/fetch-npm-deps/.gitignore b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/.gitignore
new file mode 100644
index 000000000000..ea8c4bf7f35f
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/.gitignore
@@ -0,0 +1 @@
+/target
diff --git a/nixpkgs/pkgs/build-support/node/fetch-npm-deps/Cargo.lock b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/Cargo.lock
new file mode 100644
index 000000000000..8ba72a7b76c4
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/Cargo.lock
@@ -0,0 +1,1046 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "aho-corasick"
+version = "1.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "anyhow"
+version = "1.0.75"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6"
+
+[[package]]
+name = "async-channel"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35"
+dependencies = [
+ "concurrent-queue",
+ "event-listener",
+ "futures-core",
+]
+
+[[package]]
+name = "autocfg"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+
+[[package]]
+name = "backoff"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b62ddb9cb1ec0a098ad4bbf9344d0713fa193ae1a80af55febcff2627b6a00c1"
+dependencies = [
+ "getrandom",
+ "instant",
+ "rand",
+]
+
+[[package]]
+name = "base64"
+version = "0.21.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9"
+
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "bitflags"
+version = "2.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07"
+
+[[package]]
+name = "block-buffer"
+version = "0.10.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "bytes"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223"
+
+[[package]]
+name = "castaway"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6"
+
+[[package]]
+name = "cc"
+version = "1.0.83"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "concurrent-queue"
+version = "2.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f057a694a54f12365049b0958a1685bb52d567f5593b355fbf685838e873d400"
+dependencies = [
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "cpufeatures"
+version = "0.2.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "crossbeam-deque"
+version = "0.8.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef"
+dependencies = [
+ "cfg-if",
+ "crossbeam-epoch",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-epoch"
+version = "0.9.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7"
+dependencies = [
+ "autocfg",
+ "cfg-if",
+ "crossbeam-utils",
+ "memoffset",
+ "scopeguard",
+]
+
+[[package]]
+name = "crossbeam-utils"
+version = "0.8.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "crypto-common"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
+dependencies = [
+ "generic-array",
+ "typenum",
+]
+
+[[package]]
+name = "curl"
+version = "0.4.44"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "509bd11746c7ac09ebd19f0b17782eae80aadee26237658a6b4808afb5c11a22"
+dependencies = [
+ "curl-sys",
+ "libc",
+ "openssl-probe",
+ "openssl-sys",
+ "schannel",
+ "socket2",
+ "winapi",
+]
+
+[[package]]
+name = "curl-sys"
+version = "0.4.68+curl-8.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b4a0d18d88360e374b16b2273c832b5e57258ffc1d4aa4f96b108e0738d5752f"
+dependencies = [
+ "cc",
+ "libc",
+ "libz-sys",
+ "openssl-sys",
+ "pkg-config",
+ "vcpkg",
+ "windows-sys",
+]
+
+[[package]]
+name = "digest"
+version = "0.10.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
+dependencies = [
+ "block-buffer",
+ "crypto-common",
+]
+
+[[package]]
+name = "either"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
+
+[[package]]
+name = "env_logger"
+version = "0.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "95b3f3e67048839cb0d0781f445682a35113da7121f7c949db0e2be96a4fbece"
+dependencies = [
+ "humantime",
+ "is-terminal",
+ "log",
+ "regex",
+ "termcolor",
+]
+
+[[package]]
+name = "errno"
+version = "0.3.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f258a7194e7f7c2a7837a8913aeab7fd8c383457034fa20ce4dd3dcb813e8eb8"
+dependencies = [
+ "libc",
+ "windows-sys",
+]
+
+[[package]]
+name = "event-listener"
+version = "2.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0"
+
+[[package]]
+name = "fastrand"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be"
+dependencies = [
+ "instant",
+]
+
+[[package]]
+name = "fastrand"
+version = "2.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5"
+
+[[package]]
+name = "fnv"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
+
+[[package]]
+name = "form_urlencoded"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652"
+dependencies = [
+ "percent-encoding",
+]
+
+[[package]]
+name = "futures-core"
+version = "0.3.29"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eb1d22c66e66d9d72e1758f0bd7d4fd0bee04cad842ee34587d68c07e45d088c"
+
+[[package]]
+name = "futures-io"
+version = "0.3.29"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8bf34a163b5c4c52d0478a4d757da8fb65cabef42ba90515efee0f6f9fa45aaa"
+
+[[package]]
+name = "futures-lite"
+version = "1.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce"
+dependencies = [
+ "fastrand 1.9.0",
+ "futures-core",
+ "futures-io",
+ "memchr",
+ "parking",
+ "pin-project-lite",
+ "waker-fn",
+]
+
+[[package]]
+name = "generic-array"
+version = "0.14.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
+dependencies = [
+ "typenum",
+ "version_check",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.2.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "wasi",
+]
+
+[[package]]
+name = "hermit-abi"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7"
+
+[[package]]
+name = "http"
+version = "0.2.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb"
+dependencies = [
+ "bytes",
+ "fnv",
+ "itoa",
+]
+
+[[package]]
+name = "humantime"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
+
+[[package]]
+name = "idna"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c"
+dependencies = [
+ "unicode-bidi",
+ "unicode-normalization",
+]
+
+[[package]]
+name = "instant"
+version = "0.1.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "is-terminal"
+version = "0.4.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b"
+dependencies = [
+ "hermit-abi",
+ "rustix",
+ "windows-sys",
+]
+
+[[package]]
+name = "isahc"
+version = "1.7.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "334e04b4d781f436dc315cb1e7515bd96826426345d498149e4bde36b67f8ee9"
+dependencies = [
+ "async-channel",
+ "castaway",
+ "crossbeam-utils",
+ "curl",
+ "curl-sys",
+ "event-listener",
+ "futures-lite",
+ "http",
+ "log",
+ "once_cell",
+ "polling",
+ "slab",
+ "sluice",
+ "tracing",
+ "tracing-futures",
+ "url",
+ "waker-fn",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38"
+
+[[package]]
+name = "libc"
+version = "0.2.150"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c"
+
+[[package]]
+name = "libz-sys"
+version = "1.1.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d97137b25e321a73eef1418d1d5d2eda4d77e12813f8e6dead84bc52c5870a7b"
+dependencies = [
+ "cc",
+ "libc",
+ "pkg-config",
+ "vcpkg",
+]
+
+[[package]]
+name = "linux-raw-sys"
+version = "0.4.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "969488b55f8ac402214f3f5fd243ebb7206cf82de60d3172994707a4bcc2b829"
+
+[[package]]
+name = "log"
+version = "0.4.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
+
+[[package]]
+name = "memchr"
+version = "2.6.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167"
+
+[[package]]
+name = "memoffset"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.18.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
+
+[[package]]
+name = "openssl-probe"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
+
+[[package]]
+name = "openssl-sys"
+version = "0.9.95"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "40a4130519a360279579c2053038317e40eff64d13fd3f004f9e1b72b8a6aaf9"
+dependencies = [
+ "cc",
+ "libc",
+ "pkg-config",
+ "vcpkg",
+]
+
+[[package]]
+name = "parking"
+version = "2.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae"
+
+[[package]]
+name = "percent-encoding"
+version = "2.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94"
+
+[[package]]
+name = "pin-project"
+version = "1.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422"
+dependencies = [
+ "pin-project-internal",
+]
+
+[[package]]
+name = "pin-project-internal"
+version = "1.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "pin-project-lite"
+version = "0.2.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58"
+
+[[package]]
+name = "pkg-config"
+version = "0.3.27"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964"
+
+[[package]]
+name = "polling"
+version = "2.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce"
+dependencies = [
+ "autocfg",
+ "bitflags 1.3.2",
+ "cfg-if",
+ "concurrent-queue",
+ "libc",
+ "log",
+ "pin-project-lite",
+ "windows-sys",
+]
+
+[[package]]
+name = "ppv-lite86"
+version = "0.2.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
+
+[[package]]
+name = "prefetch-npm-deps"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "backoff",
+ "base64",
+ "digest",
+ "env_logger",
+ "isahc",
+ "log",
+ "rayon",
+ "serde",
+ "serde_json",
+ "sha1",
+ "sha2",
+ "tempfile",
+ "url",
+ "walkdir",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.69"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.33"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "rand"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
+dependencies = [
+ "libc",
+ "rand_chacha",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
+dependencies = [
+ "ppv-lite86",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.6.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
+dependencies = [
+ "getrandom",
+]
+
+[[package]]
+name = "rayon"
+version = "1.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1"
+dependencies = [
+ "either",
+ "rayon-core",
+]
+
+[[package]]
+name = "rayon-core"
+version = "1.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed"
+dependencies = [
+ "crossbeam-deque",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
+dependencies = [
+ "bitflags 1.3.2",
+]
+
+[[package]]
+name = "regex"
+version = "1.10.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-automata",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
+
+[[package]]
+name = "rustix"
+version = "0.38.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dc99bc2d4f1fed22595588a013687477aedf3cdcfb26558c559edb67b4d9b22e"
+dependencies = [
+ "bitflags 2.4.1",
+ "errno",
+ "libc",
+ "linux-raw-sys",
+ "windows-sys",
+]
+
+[[package]]
+name = "ryu"
+version = "1.0.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741"
+
+[[package]]
+name = "same-file"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "schannel"
+version = "0.1.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88"
+dependencies = [
+ "windows-sys",
+]
+
+[[package]]
+name = "scopeguard"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
+
+[[package]]
+name = "serde"
+version = "1.0.193"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.193"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.108"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b"
+dependencies = [
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "sha1"
+version = "0.10.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
+dependencies = [
+ "cfg-if",
+ "cpufeatures",
+ "digest",
+]
+
+[[package]]
+name = "sha2"
+version = "0.10.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
+dependencies = [
+ "cfg-if",
+ "cpufeatures",
+ "digest",
+]
+
+[[package]]
+name = "slab"
+version = "0.4.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "sluice"
+version = "0.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d7400c0eff44aa2fcb5e31a5f24ba9716ed90138769e4977a2ba6014ae63eb5"
+dependencies = [
+ "async-channel",
+ "futures-core",
+ "futures-io",
+]
+
+[[package]]
+name = "socket2"
+version = "0.4.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d"
+dependencies = [
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "syn"
+version = "2.0.39"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "tempfile"
+version = "3.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5"
+dependencies = [
+ "cfg-if",
+ "fastrand 2.0.1",
+ "redox_syscall",
+ "rustix",
+ "windows-sys",
+]
+
+[[package]]
+name = "termcolor"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ff1bc3d3f05aff0403e8ac0d92ced918ec05b666a43f83297ccef5bea8a3d449"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "tinyvec"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
+dependencies = [
+ "tinyvec_macros",
+]
+
+[[package]]
+name = "tinyvec_macros"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
+
+[[package]]
+name = "tracing"
+version = "0.1.40"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
+dependencies = [
+ "log",
+ "pin-project-lite",
+ "tracing-attributes",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-attributes"
+version = "0.1.27"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "tracing-core"
+version = "0.1.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54"
+dependencies = [
+ "once_cell",
+]
+
+[[package]]
+name = "tracing-futures"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2"
+dependencies = [
+ "pin-project",
+ "tracing",
+]
+
+[[package]]
+name = "typenum"
+version = "1.17.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
+
+[[package]]
+name = "unicode-bidi"
+version = "0.3.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460"
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
+
+[[package]]
+name = "unicode-normalization"
+version = "0.1.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921"
+dependencies = [
+ "tinyvec",
+]
+
+[[package]]
+name = "url"
+version = "2.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5"
+dependencies = [
+ "form_urlencoded",
+ "idna",
+ "percent-encoding",
+ "serde",
+]
+
+[[package]]
+name = "vcpkg"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
+
+[[package]]
+name = "version_check"
+version = "0.9.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
+
+[[package]]
+name = "waker-fn"
+version = "1.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f3c4517f54858c779bbcbf228f4fca63d121bf85fbecb2dc578cdf4a39395690"
+
+[[package]]
+name = "walkdir"
+version = "2.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee"
+dependencies = [
+ "same-file",
+ "winapi-util",
+]
+
+[[package]]
+name = "wasi"
+version = "0.11.0+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-util"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+
+[[package]]
+name = "windows-sys"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
+dependencies = [
+ "windows-targets",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.48.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
+dependencies = [
+ "windows_aarch64_gnullvm",
+ "windows_aarch64_msvc",
+ "windows_i686_gnu",
+ "windows_i686_msvc",
+ "windows_x86_64_gnu",
+ "windows_x86_64_gnullvm",
+ "windows_x86_64_msvc",
+]
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.48.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.48.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.48.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.48.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.48.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.48.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.48.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
diff --git a/nixpkgs/pkgs/build-support/node/fetch-npm-deps/Cargo.toml b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/Cargo.toml
new file mode 100644
index 000000000000..ea121c510c95
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/Cargo.toml
@@ -0,0 +1,23 @@
+[package]
+name = "prefetch-npm-deps"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+anyhow = "1.0.75"
+backoff = "0.4.0"
+base64 = "0.21.5"
+digest = "0.10.7"
+env_logger = "0.10.1"
+isahc = { version = "1.7.2", default_features = false }
+log = "0.4.20"
+rayon = "1.8.0"
+serde = { version = "1.0.193", features = ["derive"] }
+serde_json = "1.0.108"
+sha1 = "0.10.6"
+sha2 = "0.10.8"
+tempfile = "3.8.1"
+url = { version = "2.4.1", features = ["serde"] }
+walkdir = "2.4.0"
diff --git a/nixpkgs/pkgs/build-support/node/fetch-npm-deps/default.nix b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/default.nix
new file mode 100644
index 000000000000..373d63cc59b8
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/default.nix
@@ -0,0 +1,207 @@
+{ lib, stdenvNoCC, rustPlatform, makeWrapper, pkg-config, curl, gnutar, gzip, nix, testers, fetchurl, cacert, prefetch-npm-deps, fetchNpmDeps }:
+
+{
+  prefetch-npm-deps = rustPlatform.buildRustPackage {
+    pname = "prefetch-npm-deps";
+    version = (lib.importTOML ./Cargo.toml).package.version;
+
+    src = lib.cleanSourceWith {
+      src = ./.;
+      filter = name: type:
+        let
+          name' = builtins.baseNameOf name;
+        in
+        name' != "default.nix" && name' != "target";
+    };
+
+    cargoLock.lockFile = ./Cargo.lock;
+
+    nativeBuildInputs = [ makeWrapper pkg-config ];
+    buildInputs = [ curl ];
+
+    postInstall = ''
+      wrapProgram "$out/bin/prefetch-npm-deps" --prefix PATH : ${lib.makeBinPath [ gnutar gzip nix ]}
+    '';
+
+    passthru.tests =
+      let
+        makeTestSrc = { name, src }: stdenvNoCC.mkDerivation {
+          name = "${name}-src";
+
+          inherit src;
+
+          buildCommand = ''
+            mkdir -p $out
+            cp $src $out/package-lock.json
+          '';
+        };
+
+        makeTest = { name, src, hash, forceGitDeps ? false, forceEmptyCache ? false }: testers.invalidateFetcherByDrvHash fetchNpmDeps {
+          inherit name hash forceGitDeps forceEmptyCache;
+
+          src = makeTestSrc { inherit name src; };
+        };
+      in
+      {
+        lockfileV1 = makeTest {
+          name = "lockfile-v1";
+
+          src = fetchurl {
+            url = "https://raw.githubusercontent.com/jellyfin/jellyfin-web/v10.8.4/package-lock.json";
+            hash = "sha256-uQmc+S+V1co1Rfc4d82PpeXjmd1UqdsG492ADQFcZGA=";
+          };
+
+          hash = "sha256-wca1QvxUw3OrLStfYN9Co6oVBR1LbfcNUKlDqvObps4=";
+        };
+
+        lockfileV2 = makeTest {
+          name = "lockfile-v2";
+
+          src = fetchurl {
+            url = "https://raw.githubusercontent.com/jesec/flood/v4.7.0/package-lock.json";
+            hash = "sha256-qS29tq5QPnGxV+PU40VgMAtdwVLtLyyhG2z9GMeYtC4=";
+          };
+
+          hash = "sha256-tuEfyePwlOy2/mOPdXbqJskO6IowvAP4DWg8xSZwbJw=";
+        };
+
+        hashPrecedence = makeTest {
+          name = "hash-precedence";
+
+          src = fetchurl {
+            url = "https://raw.githubusercontent.com/matrix-org/matrix-appservice-irc/0.34.0/package-lock.json";
+            hash = "sha256-1+0AQw9EmbHiMPA/H8OP8XenhrkhLRYBRhmd1cNPFjk=";
+          };
+
+          hash = "sha256-oItUls7AXcCECuyA+crQO6B0kv4toIr8pBubNwB7kAM=";
+        };
+
+        hostedGitDeps = makeTest {
+          name = "hosted-git-deps";
+
+          src = fetchurl {
+            url = "https://cyberchaos.dev/yuka/trainsearch/-/raw/e3cba6427e8ecfd843d0f697251ddaf5e53c2327/package-lock.json";
+            hash = "sha256-X9mCwPqV5yP0S2GonNvpYnLSLJMd/SUIked+hMRxDpA=";
+          };
+
+          hash = "sha256-tEdElWJ+KBTxBobzXBpPopQSwK2usGW/it1+yfbVzBw=";
+        };
+
+        linkDependencies = makeTest {
+          name = "link-dependencies";
+
+          src = fetchurl {
+            url = "https://raw.githubusercontent.com/evcc-io/evcc/0.106.3/package-lock.json";
+            hash = "sha256-6ZTBMyuyPP/63gpQugggHhKVup6OB4hZ2rmSvPJ0yEs=";
+          };
+
+          hash = "sha256-VzQhArHoznYSXUT7l9HkJV4yoSOmoP8eYTLel1QwmB4=";
+        };
+
+        # This package has no resolved deps whatsoever, which will not actually work but does test the forceEmptyCache option.
+        emptyCache = makeTest {
+          name = "empty-cache";
+
+          src = fetchurl {
+            url = "https://raw.githubusercontent.com/bufbuild/protobuf-es/v1.2.1/package-lock.json";
+            hash = "sha256-UdBUEb4YRHsbvyjymIyjemJEiaI9KQRirqt+SFSK0wA=";
+          };
+
+          hash = "sha256-Cdv40lQjRszzJtJydZt25uYfcJVeJGwH54A+agdH9wI=";
+
+          forceEmptyCache = true;
+        };
+
+        # This package contains both hosted Git shorthand, and a bundled dependency that happens to override an existing one.
+        etherpadLite1818 = makeTest {
+          name = "etherpad-lite-1.8.18";
+
+          src = fetchurl {
+            url = "https://raw.githubusercontent.com/ether/etherpad-lite/1.8.18/src/package-lock.json";
+            hash = "sha256-1fGNxYJi1I4cXK/jinNG+Y6tPEOhP3QAqWOBEQttS9E=";
+          };
+
+          hash = "sha256-+KA8/orSBJ4EhuSyQO8IKSxsN/FAsYU3lOzq+awuxNQ=";
+
+          forceGitDeps = true;
+        };
+
+        # This package has a lockfile v1 git dependency with no `dependencies` attribute, since it sementically has no dependencies.
+        jitsiMeet9111 = makeTest {
+          name = "jitsi-meet-9111";
+
+          src = fetchurl {
+            url = "https://raw.githubusercontent.com/jitsi/jitsi-meet/stable/jitsi-meet_9111/package-lock.json";
+            hash = "sha256-NU+eQD4WZ4BMur8uX79uk8wUPsZvIT02KhPWHTmaihk=";
+          };
+
+          hash = "sha256-FhxlJ0HdJMPiWe7+n1HaGLWOr/2HJEPwiS65uqXZM8Y=";
+        };
+      };
+
+    meta = with lib; {
+      description = "Prefetch dependencies from npm (for use with `fetchNpmDeps`)";
+      mainProgram = "prefetch-npm-deps";
+      maintainers = with maintainers; [ lilyinstarlight winter ];
+      license = licenses.mit;
+    };
+  };
+
+  fetchNpmDeps =
+    { name ? "npm-deps"
+    , hash ? ""
+    , forceGitDeps ? false
+    , forceEmptyCache ? false
+    , ...
+    } @ args:
+    let
+      hash_ =
+        if hash != "" then {
+          outputHash = hash;
+        } else {
+          outputHash = "";
+          outputHashAlgo = "sha256";
+        };
+
+      forceGitDeps_ = lib.optionalAttrs forceGitDeps { FORCE_GIT_DEPS = true; };
+      forceEmptyCache_ = lib.optionalAttrs forceEmptyCache { FORCE_EMPTY_CACHE = true; };
+    in
+    stdenvNoCC.mkDerivation (args // {
+      inherit name;
+
+      nativeBuildInputs = [ prefetch-npm-deps ];
+
+      buildPhase = ''
+        runHook preBuild
+
+        if [[ ! -e package-lock.json ]]; then
+          echo
+          echo "ERROR: The package-lock.json file does not exist!"
+          echo
+          echo "package-lock.json is required to make sure that npmDepsHash doesn't change"
+          echo "when packages are updated on npm."
+          echo
+          echo "Hint: You can copy a vendored package-lock.json file via postPatch."
+          echo
+
+          exit 1
+        fi
+
+        prefetch-npm-deps package-lock.json $out
+
+        runHook postBuild
+      '';
+
+      dontInstall = true;
+
+      # NIX_NPM_TOKENS environment variable should be a JSON mapping in the shape of:
+      # `{ "registry.example.com": "example-registry-bearer-token", ... }`
+      impureEnvVars = lib.fetchers.proxyImpureEnvVars ++ [ "NIX_NPM_TOKENS" ];
+
+      SSL_CERT_FILE = if (hash_.outputHash == "" || hash_.outputHash == lib.fakeSha256 || hash_.outputHash == lib.fakeSha512 || hash_.outputHash == lib.fakeHash)
+        then "${cacert}/etc/ssl/certs/ca-bundle.crt"
+        else "/no-cert-file.crt";
+
+      outputHashMode = "recursive";
+    } // hash_ // forceGitDeps_ // forceEmptyCache_);
+}
diff --git a/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/cacache.rs b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/cacache.rs
new file mode 100644
index 000000000000..c49c094b85c6
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/cacache.rs
@@ -0,0 +1,125 @@
+use base64::prelude::{Engine, BASE64_STANDARD};
+use digest::{Digest, Update};
+use serde::{Deserialize, Serialize};
+use sha1::Sha1;
+use sha2::{Sha256, Sha512};
+use std::{
+    fmt::Write as FmtWrite,
+    fs::{self, File},
+    io::Write,
+    path::PathBuf,
+};
+use url::Url;
+
+#[derive(Serialize, Deserialize)]
+pub(super) struct Key {
+    pub(super) key: String,
+    pub(super) integrity: String,
+    pub(super) time: u8,
+    pub(super) size: usize,
+    pub(super) metadata: Metadata,
+}
+
+#[derive(Serialize, Deserialize)]
+pub(super) struct Metadata {
+    pub(super) url: Url,
+    pub(super) options: Options,
+}
+
+#[derive(Serialize, Deserialize)]
+pub(super) struct Options {
+    pub(super) compress: bool,
+}
+
+pub struct Cache(PathBuf);
+
+fn push_hash_segments(path: &mut PathBuf, hash: &str) {
+    path.push(&hash[0..2]);
+    path.push(&hash[2..4]);
+    path.push(&hash[4..]);
+}
+
+impl Cache {
+    pub fn new(path: PathBuf) -> Cache {
+        Cache(path)
+    }
+
+    pub fn init(&self) -> anyhow::Result<()> {
+        fs::create_dir_all(self.0.join("content-v2"))?;
+        fs::create_dir_all(self.0.join("index-v5"))?;
+
+        Ok(())
+    }
+
+    pub fn put(
+        &self,
+        key: String,
+        url: Url,
+        data: &[u8],
+        integrity: Option<String>,
+    ) -> anyhow::Result<()> {
+        let (algo, hash, integrity) = if let Some(integrity) = integrity {
+            let (algo, hash) = integrity.split_once('-').unwrap();
+
+            (algo.to_string(), BASE64_STANDARD.decode(hash)?, integrity)
+        } else {
+            let hash = Sha512::new().chain(data).finalize();
+
+            (
+                String::from("sha512"),
+                hash.to_vec(),
+                format!("sha512-{}", BASE64_STANDARD.encode(hash)),
+            )
+        };
+
+        let content_path = {
+            let mut p = self.0.join("content-v2");
+
+            p.push(algo);
+
+            push_hash_segments(
+                &mut p,
+                &hash.into_iter().fold(String::new(), |mut out, n| {
+                    let _ = write!(out, "{n:02x}");
+                    out
+                }),
+            );
+
+            p
+        };
+
+        fs::create_dir_all(content_path.parent().unwrap())?;
+
+        fs::write(content_path, data)?;
+
+        let index_path = {
+            let mut p = self.0.join("index-v5");
+
+            push_hash_segments(
+                &mut p,
+                &format!("{:x}", Sha256::new().chain(&key).finalize()),
+            );
+
+            p
+        };
+
+        fs::create_dir_all(index_path.parent().unwrap())?;
+
+        let data = serde_json::to_string(&Key {
+            key,
+            integrity,
+            time: 0,
+            size: data.len(),
+            metadata: Metadata {
+                url,
+                options: Options { compress: true },
+            },
+        })?;
+
+        let mut file = File::options().append(true).create(true).open(index_path)?;
+
+        write!(file, "{:x}\t{data}", Sha1::new().chain(&data).finalize())?;
+
+        Ok(())
+    }
+}
diff --git a/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/main.rs b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/main.rs
new file mode 100644
index 000000000000..dc20c7297049
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/main.rs
@@ -0,0 +1,432 @@
+#![warn(clippy::pedantic)]
+
+use crate::cacache::{Cache, Key};
+use anyhow::{anyhow, bail};
+use rayon::prelude::*;
+use serde_json::{Map, Value};
+use std::{
+    collections::HashMap,
+    env, fs,
+    path::{Path, PathBuf},
+    process::{self, Command},
+};
+use tempfile::tempdir;
+use url::Url;
+use walkdir::WalkDir;
+
+mod cacache;
+mod parse;
+mod util;
+
+fn cache_map_path() -> Option<PathBuf> {
+    env::var_os("CACHE_MAP_PATH").map(PathBuf::from)
+}
+
+/// `fixup_lockfile` rewrites `integrity` hashes to match cache and removes the `integrity` field from Git dependencies.
+///
+/// Sometimes npm has multiple instances of a given `resolved` URL that have different types of `integrity` hashes (e.g. SHA-1
+/// and SHA-512) in the lockfile. Given we only cache one version of these, the `integrity` field must be normalized to the hash
+/// we cache as (which is the strongest available one).
+///
+/// Git dependencies from specific providers can be retrieved from those providers' automatic tarball features.
+/// When these dependencies are specified with a commit identifier, npm generates a tarball, and inserts the integrity hash of that
+/// tarball into the lockfile.
+///
+/// Thus, we remove this hash, to replace it with our own determinstic copies of dependencies from hosted Git providers.
+///
+/// If no fixups were performed, `None` is returned and the lockfile structure should be left as-is. If fixups were performed, the
+/// `dependencies` key in v2 lockfiles designed for backwards compatibility with v1 parsers is removed because of inconsistent data.
+fn fixup_lockfile(
+    mut lock: Map<String, Value>,
+    cache: &Option<HashMap<String, String>>,
+) -> anyhow::Result<Option<Map<String, Value>>> {
+    let mut fixed = false;
+
+    match lock
+        .get("lockfileVersion")
+        .ok_or_else(|| anyhow!("couldn't get lockfile version"))?
+        .as_i64()
+        .ok_or_else(|| anyhow!("lockfile version isn't an int"))?
+    {
+        1 => fixup_v1_deps(
+            lock.get_mut("dependencies")
+                .unwrap()
+                .as_object_mut()
+                .unwrap(),
+            cache,
+            &mut fixed,
+        ),
+        2 | 3 => {
+            for package in lock
+                .get_mut("packages")
+                .ok_or_else(|| anyhow!("couldn't get packages"))?
+                .as_object_mut()
+                .ok_or_else(|| anyhow!("packages isn't a map"))?
+                .values_mut()
+            {
+                if let Some(Value::String(resolved)) = package.get("resolved") {
+                    if let Some(Value::String(integrity)) = package.get("integrity") {
+                        if resolved.starts_with("git+ssh://") {
+                            fixed = true;
+
+                            package
+                                .as_object_mut()
+                                .ok_or_else(|| anyhow!("package isn't a map"))?
+                                .remove("integrity");
+                        } else if let Some(cache_hashes) = cache {
+                            let cache_hash = cache_hashes
+                                .get(resolved)
+                                .expect("dependency should have a hash");
+
+                            if integrity != cache_hash {
+                                fixed = true;
+
+                                *package
+                                    .as_object_mut()
+                                    .ok_or_else(|| anyhow!("package isn't a map"))?
+                                    .get_mut("integrity")
+                                    .unwrap() = Value::String(cache_hash.clone());
+                            }
+                        }
+                    }
+                }
+            }
+
+            if fixed {
+                lock.remove("dependencies");
+            }
+        }
+        v => bail!("unsupported lockfile version {v}"),
+    }
+
+    if fixed {
+        Ok(Some(lock))
+    } else {
+        Ok(None)
+    }
+}
+
+// Recursive helper to fixup v1 lockfile deps
+fn fixup_v1_deps(
+    dependencies: &mut Map<String, Value>,
+    cache: &Option<HashMap<String, String>>,
+    fixed: &mut bool,
+) {
+    for dep in dependencies.values_mut() {
+        if let Some(Value::String(resolved)) = dep
+            .as_object()
+            .expect("v1 dep must be object")
+            .get("resolved")
+        {
+            if let Some(Value::String(integrity)) = dep
+                .as_object()
+                .expect("v1 dep must be object")
+                .get("integrity")
+            {
+                if resolved.starts_with("git+ssh://") {
+                    *fixed = true;
+
+                    dep.as_object_mut()
+                        .expect("v1 dep must be object")
+                        .remove("integrity");
+                } else if let Some(cache_hashes) = cache {
+                    let cache_hash = cache_hashes
+                        .get(resolved)
+                        .expect("dependency should have a hash");
+
+                    if integrity != cache_hash {
+                        *fixed = true;
+
+                        *dep.as_object_mut()
+                            .expect("v1 dep must be object")
+                            .get_mut("integrity")
+                            .unwrap() = Value::String(cache_hash.clone());
+                    }
+                }
+            }
+        }
+
+        if let Some(Value::Object(more_deps)) = dep.as_object_mut().unwrap().get_mut("dependencies")
+        {
+            fixup_v1_deps(more_deps, cache, fixed);
+        }
+    }
+}
+
+fn map_cache() -> anyhow::Result<HashMap<Url, String>> {
+    let mut hashes = HashMap::new();
+
+    let content_path = Path::new(&env::var_os("npmDeps").unwrap()).join("_cacache/index-v5");
+
+    for entry in WalkDir::new(content_path) {
+        let entry = entry?;
+
+        if entry.file_type().is_file() {
+            let content = fs::read_to_string(entry.path())?;
+            let key: Key = serde_json::from_str(content.split_ascii_whitespace().nth(1).unwrap())?;
+
+            hashes.insert(key.metadata.url, key.integrity);
+        }
+    }
+
+    Ok(hashes)
+}
+
+fn main() -> anyhow::Result<()> {
+    env_logger::init();
+
+    let args = env::args().collect::<Vec<_>>();
+
+    if args.len() < 2 {
+        println!("usage: {} <path/to/package-lock.json>", args[0]);
+        println!();
+        println!("Prefetches npm dependencies for usage by fetchNpmDeps.");
+
+        process::exit(1);
+    }
+
+    if let Ok(jobs) = env::var("NIX_BUILD_CORES") {
+        if !jobs.is_empty() {
+            rayon::ThreadPoolBuilder::new()
+                .num_threads(
+                    jobs.parse()
+                        .expect("NIX_BUILD_CORES must be a whole number"),
+                )
+                .build_global()
+                .unwrap();
+        }
+    }
+
+    if args[1] == "--fixup-lockfile" {
+        let lock = serde_json::from_str(&fs::read_to_string(&args[2])?)?;
+
+        let cache = cache_map_path()
+            .map(|map_path| Ok::<_, anyhow::Error>(serde_json::from_slice(&fs::read(map_path)?)?))
+            .transpose()?;
+
+        if let Some(fixed) = fixup_lockfile(lock, &cache)? {
+            println!("Fixing lockfile");
+
+            fs::write(&args[2], serde_json::to_string(&fixed)?)?;
+        }
+
+        return Ok(());
+    } else if args[1] == "--map-cache" {
+        let map = map_cache()?;
+
+        fs::write(
+            cache_map_path().expect("CACHE_MAP_PATH environment variable must be set"),
+            serde_json::to_string(&map)?,
+        )?;
+
+        return Ok(());
+    }
+
+    let lock_content = fs::read_to_string(&args[1])?;
+
+    let out_tempdir;
+
+    let (out, print_hash) = if let Some(path) = args.get(2) {
+        (Path::new(path), false)
+    } else {
+        out_tempdir = tempdir()?;
+
+        (out_tempdir.path(), true)
+    };
+
+    let packages = parse::lockfile(
+        &lock_content,
+        env::var("FORCE_GIT_DEPS").is_ok(),
+        env::var("FORCE_EMPTY_CACHE").is_ok(),
+    )?;
+
+    let cache = Cache::new(out.join("_cacache"));
+    cache.init()?;
+
+    packages.into_par_iter().try_for_each(|package| {
+        eprintln!("{}", package.name);
+
+        let tarball = package
+            .tarball()
+            .map_err(|e| anyhow!("couldn't fetch {} at {}: {e:?}", package.name, package.url))?;
+        let integrity = package.integrity().map(ToString::to_string);
+
+        cache
+            .put(
+                format!("make-fetch-happen:request-cache:{}", package.url),
+                package.url,
+                &tarball,
+                integrity,
+            )
+            .map_err(|e| anyhow!("couldn't insert cache entry for {}: {e:?}", package.name))?;
+
+        Ok::<_, anyhow::Error>(())
+    })?;
+
+    fs::write(out.join("package-lock.json"), lock_content)?;
+
+    if print_hash {
+        Command::new("nix")
+            .args(["--experimental-features", "nix-command", "hash", "path"])
+            .arg(out.as_os_str())
+            .status()?;
+    }
+
+    Ok(())
+}
+
+#[cfg(test)]
+mod tests {
+    use std::collections::HashMap;
+
+    use super::fixup_lockfile;
+    use serde_json::json;
+
+    #[test]
+    fn lockfile_fixup() -> anyhow::Result<()> {
+        let input = json!({
+            "lockfileVersion": 2,
+            "name": "foo",
+            "packages": {
+                "": {
+
+                },
+                "foo": {
+                    "resolved": "https://github.com/NixOS/nixpkgs",
+                    "integrity": "sha1-aaa"
+                },
+                "bar": {
+                    "resolved": "git+ssh://git@github.com/NixOS/nixpkgs.git",
+                    "integrity": "sha512-aaa"
+                },
+                "foo-bad": {
+                    "resolved": "foo",
+                    "integrity": "sha1-foo"
+                },
+                "foo-good": {
+                    "resolved": "foo",
+                    "integrity": "sha512-foo"
+                },
+            }
+        });
+
+        let expected = json!({
+            "lockfileVersion": 2,
+            "name": "foo",
+            "packages": {
+                "": {
+
+                },
+                "foo": {
+                    "resolved": "https://github.com/NixOS/nixpkgs",
+                    "integrity": ""
+                },
+                "bar": {
+                    "resolved": "git+ssh://git@github.com/NixOS/nixpkgs.git",
+                },
+                "foo-bad": {
+                    "resolved": "foo",
+                    "integrity": "sha512-foo"
+                },
+                "foo-good": {
+                    "resolved": "foo",
+                    "integrity": "sha512-foo"
+                },
+            }
+        });
+
+        let mut hashes = HashMap::new();
+
+        hashes.insert(
+            String::from("https://github.com/NixOS/nixpkgs"),
+            String::new(),
+        );
+
+        hashes.insert(
+            String::from("git+ssh://git@github.com/NixOS/nixpkgs.git"),
+            String::new(),
+        );
+
+        hashes.insert(String::from("foo"), String::from("sha512-foo"));
+
+        assert_eq!(
+            fixup_lockfile(input.as_object().unwrap().clone(), &Some(hashes))?,
+            Some(expected.as_object().unwrap().clone())
+        );
+
+        Ok(())
+    }
+
+    #[test]
+    fn lockfile_v1_fixup() -> anyhow::Result<()> {
+        let input = json!({
+            "lockfileVersion": 1,
+            "name": "foo",
+            "dependencies": {
+                "foo": {
+                    "resolved": "https://github.com/NixOS/nixpkgs",
+                    "integrity": "sha512-aaa"
+                },
+                "foo-good": {
+                    "resolved": "foo",
+                    "integrity": "sha512-foo"
+                },
+                "bar": {
+                    "resolved": "git+ssh://git@github.com/NixOS/nixpkgs.git",
+                    "integrity": "sha512-bbb",
+                    "dependencies": {
+                        "foo-bad": {
+                            "resolved": "foo",
+                            "integrity": "sha1-foo"
+                        },
+                    },
+                },
+            }
+        });
+
+        let expected = json!({
+            "lockfileVersion": 1,
+            "name": "foo",
+            "dependencies": {
+                "foo": {
+                    "resolved": "https://github.com/NixOS/nixpkgs",
+                    "integrity": ""
+                },
+                "foo-good": {
+                    "resolved": "foo",
+                    "integrity": "sha512-foo"
+                },
+                "bar": {
+                    "resolved": "git+ssh://git@github.com/NixOS/nixpkgs.git",
+                    "dependencies": {
+                        "foo-bad": {
+                            "resolved": "foo",
+                            "integrity": "sha512-foo"
+                        },
+                    },
+                },
+            }
+        });
+
+        let mut hashes = HashMap::new();
+
+        hashes.insert(
+            String::from("https://github.com/NixOS/nixpkgs"),
+            String::new(),
+        );
+
+        hashes.insert(
+            String::from("git+ssh://git@github.com/NixOS/nixpkgs.git"),
+            String::new(),
+        );
+
+        hashes.insert(String::from("foo"), String::from("sha512-foo"));
+
+        assert_eq!(
+            fixup_lockfile(input.as_object().unwrap().clone(), &Some(hashes))?,
+            Some(expected.as_object().unwrap().clone())
+        );
+
+        Ok(())
+    }
+}
diff --git a/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/parse/lock.rs b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/parse/lock.rs
new file mode 100644
index 000000000000..c6e77153a0b8
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/parse/lock.rs
@@ -0,0 +1,370 @@
+use anyhow::{anyhow, bail, Context};
+use rayon::slice::ParallelSliceMut;
+use serde::{
+    de::{self, Visitor},
+    Deserialize, Deserializer,
+};
+use std::{
+    cmp::Ordering,
+    collections::{HashMap, HashSet},
+    fmt,
+};
+use url::Url;
+
+pub(super) fn packages(content: &str) -> anyhow::Result<Vec<Package>> {
+    let lockfile: Lockfile = serde_json::from_str(content)?;
+
+    let mut packages = match lockfile.version {
+        1 => {
+            let initial_url = get_initial_url()?;
+
+            to_new_packages(lockfile.dependencies.unwrap_or_default(), &initial_url)?
+        }
+        2 | 3 => lockfile
+            .packages
+            .unwrap_or_default()
+            .into_iter()
+            .filter(|(n, p)| !n.is_empty() && matches!(p.resolved, Some(UrlOrString::Url(_))))
+            .map(|(n, p)| Package { name: Some(n), ..p })
+            .collect(),
+        _ => bail!(
+            "We don't support lockfile version {}, please file an issue.",
+            lockfile.version
+        ),
+    };
+
+    packages.par_sort_by(|x, y| {
+        x.resolved
+            .partial_cmp(&y.resolved)
+            .expect("resolved should be comparable")
+            .then(
+                // v1 lockfiles can contain multiple references to the same version of a package, with
+                // different integrity values (e.g. a SHA-1 and a SHA-512 in one, but just a SHA-512 in another)
+                y.integrity
+                    .partial_cmp(&x.integrity)
+                    .expect("integrity should be comparable"),
+            )
+    });
+
+    packages.dedup_by(|x, y| x.resolved == y.resolved);
+
+    Ok(packages)
+}
+
+#[derive(Deserialize)]
+struct Lockfile {
+    #[serde(rename = "lockfileVersion")]
+    version: u8,
+    dependencies: Option<HashMap<String, OldPackage>>,
+    packages: Option<HashMap<String, Package>>,
+}
+
+#[derive(Deserialize)]
+struct OldPackage {
+    version: UrlOrString,
+    #[serde(default)]
+    bundled: bool,
+    resolved: Option<UrlOrString>,
+    integrity: Option<HashCollection>,
+    dependencies: Option<HashMap<String, OldPackage>>,
+}
+
+#[derive(Debug, Deserialize, PartialEq, Eq)]
+pub(super) struct Package {
+    #[serde(default)]
+    pub(super) name: Option<String>,
+    pub(super) resolved: Option<UrlOrString>,
+    pub(super) integrity: Option<HashCollection>,
+}
+
+#[derive(Debug, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
+#[serde(untagged)]
+pub(super) enum UrlOrString {
+    Url(Url),
+    String(String),
+}
+
+impl fmt::Display for UrlOrString {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match self {
+            UrlOrString::Url(url) => url.fmt(f),
+            UrlOrString::String(string) => string.fmt(f),
+        }
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct HashCollection(HashSet<Hash>);
+
+impl HashCollection {
+    pub fn from_str(s: impl AsRef<str>) -> anyhow::Result<HashCollection> {
+        let hashes = s
+            .as_ref()
+            .split_ascii_whitespace()
+            .map(Hash::new)
+            .collect::<anyhow::Result<_>>()?;
+
+        Ok(HashCollection(hashes))
+    }
+
+    pub fn into_best(self) -> Option<Hash> {
+        self.0.into_iter().max()
+    }
+}
+
+impl PartialOrd for HashCollection {
+    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+        let lhs = self.0.iter().max()?;
+        let rhs = other.0.iter().max()?;
+
+        lhs.partial_cmp(rhs)
+    }
+}
+
+impl<'de> Deserialize<'de> for HashCollection {
+    fn deserialize<D>(deserializer: D) -> Result<HashCollection, D::Error>
+    where
+        D: Deserializer<'de>,
+    {
+        deserializer.deserialize_string(HashCollectionVisitor)
+    }
+}
+
+struct HashCollectionVisitor;
+
+impl<'de> Visitor<'de> for HashCollectionVisitor {
+    type Value = HashCollection;
+
+    fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+        formatter.write_str("a single SRI hash or a collection of them (separated by spaces)")
+    }
+
+    fn visit_str<E>(self, value: &str) -> Result<HashCollection, E>
+    where
+        E: de::Error,
+    {
+        HashCollection::from_str(value).map_err(E::custom)
+    }
+}
+
+#[derive(Clone, Debug, Deserialize, PartialEq, Eq, Hash)]
+pub struct Hash(String);
+
+// Hash algorithms, in ascending preference.
+const ALGOS: &[&str] = &["sha1", "sha512"];
+
+impl Hash {
+    fn new(s: impl AsRef<str>) -> anyhow::Result<Hash> {
+        let algo = s
+            .as_ref()
+            .split_once('-')
+            .ok_or_else(|| anyhow!("expected SRI hash, got {:?}", s.as_ref()))?
+            .0;
+
+        if ALGOS.iter().any(|&a| algo == a) {
+            Ok(Hash(s.as_ref().to_string()))
+        } else {
+            Err(anyhow!("unknown hash algorithm {algo:?}"))
+        }
+    }
+
+    pub fn as_str(&self) -> &str {
+        &self.0
+    }
+}
+
+impl fmt::Display for Hash {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        self.as_str().fmt(f)
+    }
+}
+
+#[allow(clippy::incorrect_partial_ord_impl_on_ord_type)]
+impl PartialOrd for Hash {
+    fn partial_cmp(&self, other: &Hash) -> Option<Ordering> {
+        let lhs = self.0.split_once('-')?.0;
+        let rhs = other.0.split_once('-')?.0;
+
+        ALGOS
+            .iter()
+            .position(|&s| lhs == s)?
+            .partial_cmp(&ALGOS.iter().position(|&s| rhs == s)?)
+    }
+}
+
+impl Ord for Hash {
+    fn cmp(&self, other: &Hash) -> Ordering {
+        self.partial_cmp(other).unwrap()
+    }
+}
+
+#[allow(clippy::case_sensitive_file_extension_comparisons)]
+fn to_new_packages(
+    old_packages: HashMap<String, OldPackage>,
+    initial_url: &Url,
+) -> anyhow::Result<Vec<Package>> {
+    let mut new = Vec::new();
+
+    for (name, mut package) in old_packages {
+        // In some cases, a bundled dependency happens to have the same version as a non-bundled one, causing
+        // the bundled one without a URL to override the entry for the non-bundled instance, which prevents the
+        // dependency from being downloaded.
+        if package.bundled {
+            continue;
+        }
+
+        if let UrlOrString::Url(v) = &package.version {
+            if v.scheme() == "npm" {
+                if let Some(UrlOrString::Url(ref url)) = &package.resolved {
+                    package.version = UrlOrString::Url(url.clone());
+                }
+            } else {
+                for (scheme, host) in [
+                    ("github", "github.com"),
+                    ("bitbucket", "bitbucket.org"),
+                    ("gitlab", "gitlab.com"),
+                ] {
+                    if v.scheme() == scheme {
+                        package.version = {
+                            let mut new_url = initial_url.clone();
+
+                            new_url.set_host(Some(host))?;
+
+                            if v.path().ends_with(".git") {
+                                new_url.set_path(v.path());
+                            } else {
+                                new_url.set_path(&format!("{}.git", v.path()));
+                            }
+
+                            new_url.set_fragment(v.fragment());
+
+                            UrlOrString::Url(new_url)
+                        };
+
+                        break;
+                    }
+                }
+            }
+        }
+
+        new.push(Package {
+            name: Some(name),
+            resolved: if matches!(package.version, UrlOrString::Url(_)) {
+                Some(package.version)
+            } else {
+                package.resolved
+            },
+            integrity: package.integrity,
+        });
+
+        if let Some(dependencies) = package.dependencies {
+            new.append(&mut to_new_packages(dependencies, initial_url)?);
+        }
+    }
+
+    Ok(new)
+}
+
+fn get_initial_url() -> anyhow::Result<Url> {
+    Url::parse("git+ssh://git@a.b").context("initial url should be valid")
+}
+
+#[cfg(test)]
+mod tests {
+    use super::{
+        get_initial_url, packages, to_new_packages, Hash, HashCollection, OldPackage, Package,
+        UrlOrString,
+    };
+    use std::{
+        cmp::Ordering,
+        collections::{HashMap, HashSet},
+    };
+    use url::Url;
+
+    #[test]
+    fn git_shorthand_v1() -> anyhow::Result<()> {
+        let old = {
+            let mut o = HashMap::new();
+            o.insert(
+                String::from("sqlite3"),
+                OldPackage {
+                    version: UrlOrString::Url(
+                        Url::parse(
+                            "github:mapbox/node-sqlite3#593c9d498be2510d286349134537e3bf89401c4a",
+                        )
+                        .unwrap(),
+                    ),
+                    bundled: false,
+                    resolved: None,
+                    integrity: None,
+                    dependencies: None,
+                },
+            );
+            o
+        };
+
+        let initial_url = get_initial_url()?;
+
+        let new = to_new_packages(old, &initial_url)?;
+
+        assert_eq!(new.len(), 1, "new packages map should contain 1 value");
+        assert_eq!(new[0], Package {
+            name: Some(String::from("sqlite3")),
+            resolved: Some(UrlOrString::Url(Url::parse("git+ssh://git@github.com/mapbox/node-sqlite3.git#593c9d498be2510d286349134537e3bf89401c4a").unwrap())),
+            integrity: None
+        });
+
+        Ok(())
+    }
+
+    #[test]
+    fn hash_preference() {
+        assert_eq!(
+            Hash(String::from("sha1-foo")).partial_cmp(&Hash(String::from("sha512-foo"))),
+            Some(Ordering::Less)
+        );
+
+        assert_eq!(
+            HashCollection({
+                let mut set = HashSet::new();
+                set.insert(Hash(String::from("sha512-foo")));
+                set.insert(Hash(String::from("sha1-bar")));
+                set
+            })
+            .into_best(),
+            Some(Hash(String::from("sha512-foo")))
+        );
+    }
+
+    #[test]
+    fn parse_lockfile_correctly() {
+        let packages = packages(
+            r#"{
+                "name": "node-ddr",
+                "version": "1.0.0",
+                "lockfileVersion": 1,
+                "requires": true,
+                "dependencies": {
+                    "string-width-cjs": {
+                        "version": "npm:string-width@4.2.3",
+                        "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
+                        "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+                        "requires": {
+                            "emoji-regex": "^8.0.0",
+                            "is-fullwidth-code-point": "^3.0.0",
+                            "strip-ansi": "^6.0.1"
+                        }
+                    }
+                }
+            }"#).unwrap();
+
+        assert_eq!(packages.len(), 1);
+        assert_eq!(
+            packages[0].resolved,
+            Some(UrlOrString::Url(
+                Url::parse("https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz")
+                    .unwrap()
+            ))
+        );
+    }
+}
diff --git a/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/parse/mod.rs b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/parse/mod.rs
new file mode 100644
index 000000000000..0bca33f03915
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/parse/mod.rs
@@ -0,0 +1,353 @@
+use anyhow::{anyhow, bail, Context};
+use lock::UrlOrString;
+use log::{debug, info};
+use rayon::prelude::*;
+use serde_json::{Map, Value};
+use std::{
+    fs,
+    io::Write,
+    process::{Command, Stdio},
+};
+use tempfile::{tempdir, TempDir};
+use url::Url;
+
+use crate::util;
+
+pub mod lock;
+
+pub fn lockfile(
+    content: &str,
+    force_git_deps: bool,
+    force_empty_cache: bool,
+) -> anyhow::Result<Vec<Package>> {
+    debug!("parsing lockfile with contents:\n{content}");
+
+    let mut packages = lock::packages(content)
+        .context("failed to extract packages from lockfile")?
+        .into_par_iter()
+        .map(|p| {
+            let n = p.name.clone().unwrap();
+
+            Package::from_lock(p).with_context(|| format!("failed to parse data for {n}"))
+        })
+        .collect::<anyhow::Result<Vec<_>>>()?;
+
+    if packages.is_empty() && !force_empty_cache {
+        bail!("No cacheable dependencies were found. Please inspect the upstream `package-lock.json` file and ensure that remote dependencies have `resolved` URLs and `integrity` hashes. If the lockfile is missing this data, attempt to get upstream to fix it via a tool like <https://github.com/jeslie0/npm-lockfile-fix>. If generating an empty cache is intentional and you would like to do it anyways, set `forceEmptyCache = true`.");
+    }
+
+    let mut new = Vec::new();
+
+    for pkg in packages
+        .iter()
+        .filter(|p| matches!(p.specifics, Specifics::Git { .. }))
+    {
+        let dir = match &pkg.specifics {
+            Specifics::Git { workdir } => workdir,
+            Specifics::Registry { .. } => unimplemented!(),
+        };
+
+        let path = dir.path().join("package");
+
+        info!("recursively parsing lockfile for {} at {path:?}", pkg.name);
+
+        let lockfile_contents = fs::read_to_string(path.join("package-lock.json"));
+
+        let package_json_path = path.join("package.json");
+        let mut package_json: Map<String, Value> =
+            serde_json::from_str(&fs::read_to_string(package_json_path)?)?;
+
+        if let Some(scripts) = package_json
+            .get_mut("scripts")
+            .and_then(Value::as_object_mut)
+        {
+            // https://github.com/npm/pacote/blob/272edc1bac06991fc5f95d06342334bbacfbaa4b/lib/git.js#L166-L172
+            for typ in [
+                "postinstall",
+                "build",
+                "preinstall",
+                "install",
+                "prepack",
+                "prepare",
+            ] {
+                if scripts.contains_key(typ) && lockfile_contents.is_err() && !force_git_deps {
+                    bail!("Git dependency {} contains install scripts, but has no lockfile, which is something that will probably break. Open an issue if you can't feasibly patch this dependency out, and we'll come up with a workaround.\nIf you'd like to attempt to try to use this dependency anyways, set `forceGitDeps = true`.", pkg.name);
+                }
+            }
+        }
+
+        if let Ok(lockfile_contents) = lockfile_contents {
+            new.append(&mut lockfile(
+                &lockfile_contents,
+                force_git_deps,
+                // force_empty_cache is turned on here since recursively parsed lockfiles should be
+                // allowed to have an empty cache without erroring by default
+                true,
+            )?);
+        }
+    }
+
+    packages.append(&mut new);
+
+    packages.par_sort_by(|x, y| {
+        x.url
+            .partial_cmp(&y.url)
+            .expect("resolved should be comparable")
+    });
+
+    packages.dedup_by(|x, y| x.url == y.url);
+
+    Ok(packages)
+}
+
+#[derive(Debug)]
+pub struct Package {
+    pub name: String,
+    pub url: Url,
+    specifics: Specifics,
+}
+
+#[derive(Debug)]
+enum Specifics {
+    Registry { integrity: lock::Hash },
+    Git { workdir: TempDir },
+}
+
+impl Package {
+    fn from_lock(pkg: lock::Package) -> anyhow::Result<Package> {
+        let mut resolved = match pkg
+            .resolved
+            .expect("at this point, packages should have URLs")
+        {
+            UrlOrString::Url(u) => u,
+            UrlOrString::String(_) => panic!("at this point, all packages should have URLs"),
+        };
+
+        let specifics = match get_hosted_git_url(&resolved)? {
+            Some(hosted) => {
+                let body = util::get_url_body_with_retry(&hosted)?;
+
+                let workdir = tempdir()?;
+
+                let tar_path = workdir.path().join("package");
+
+                fs::create_dir(&tar_path)?;
+
+                let mut cmd = Command::new("tar")
+                    .args(["--extract", "--gzip", "--strip-components=1", "-C"])
+                    .arg(&tar_path)
+                    .stdin(Stdio::piped())
+                    .spawn()?;
+
+                cmd.stdin.take().unwrap().write_all(&body)?;
+
+                let exit = cmd.wait()?;
+
+                if !exit.success() {
+                    bail!(
+                        "failed to extract tarball for {}: tar exited with status code {}",
+                        pkg.name.unwrap(),
+                        exit.code().unwrap()
+                    );
+                }
+
+                resolved = hosted;
+
+                Specifics::Git { workdir }
+            }
+            None => Specifics::Registry {
+                integrity: pkg
+                    .integrity
+                    .expect("non-git dependencies should have associated integrity")
+                    .into_best()
+                    .expect("non-git dependencies should have non-empty associated integrity"),
+            },
+        };
+
+        Ok(Package {
+            name: pkg.name.unwrap(),
+            url: resolved,
+            specifics,
+        })
+    }
+
+    pub fn tarball(&self) -> anyhow::Result<Vec<u8>> {
+        match &self.specifics {
+            Specifics::Registry { .. } => Ok(util::get_url_body_with_retry(&self.url)?),
+            Specifics::Git { workdir } => Ok(Command::new("tar")
+                .args([
+                    "--sort=name",
+                    "--mtime=@0",
+                    "--owner=0",
+                    "--group=0",
+                    "--numeric-owner",
+                    "--format=gnu",
+                    "-I",
+                    "gzip -n -9",
+                    "--create",
+                    "-C",
+                ])
+                .arg(workdir.path())
+                .arg("package")
+                .output()?
+                .stdout),
+        }
+    }
+
+    pub fn integrity(&self) -> Option<&lock::Hash> {
+        match &self.specifics {
+            Specifics::Registry { integrity } => Some(integrity),
+            Specifics::Git { .. } => None,
+        }
+    }
+}
+
+#[allow(clippy::case_sensitive_file_extension_comparisons)]
+fn get_hosted_git_url(url: &Url) -> anyhow::Result<Option<Url>> {
+    if ["git", "git+ssh", "git+https", "ssh"].contains(&url.scheme()) {
+        let mut s = url
+            .path_segments()
+            .ok_or_else(|| anyhow!("bad URL: {url}"))?;
+
+        let mut get_url = || match url.host_str()? {
+            "github.com" => {
+                let user = s.next()?;
+                let mut project = s.next()?;
+                let typ = s.next();
+                let mut commit = s.next();
+
+                if typ.is_none() {
+                    commit = url.fragment();
+                } else if typ.is_some() && typ != Some("tree") {
+                    return None;
+                }
+
+                if project.ends_with(".git") {
+                    project = project.strip_suffix(".git")?;
+                }
+
+                let commit = commit.unwrap();
+
+                Some(
+                    Url::parse(&format!(
+                        "https://codeload.github.com/{user}/{project}/tar.gz/{commit}"
+                    ))
+                    .ok()?,
+                )
+            }
+            "bitbucket.org" => {
+                let user = s.next()?;
+                let mut project = s.next()?;
+                let aux = s.next();
+
+                if aux == Some("get") {
+                    return None;
+                }
+
+                if project.ends_with(".git") {
+                    project = project.strip_suffix(".git")?;
+                }
+
+                let commit = url.fragment()?;
+
+                Some(
+                    Url::parse(&format!(
+                        "https://bitbucket.org/{user}/{project}/get/{commit}.tar.gz"
+                    ))
+                    .ok()?,
+                )
+            }
+            "gitlab.com" => {
+                /* let path = &url.path()[1..];
+
+                if path.contains("/~/") || path.contains("/archive.tar.gz") {
+                    return None;
+                }
+
+                let user = s.next()?;
+                let mut project = s.next()?;
+
+                if project.ends_with(".git") {
+                    project = project.strip_suffix(".git")?;
+                }
+
+                let commit = url.fragment()?;
+
+                Some(
+                    Url::parse(&format!(
+                    "https://gitlab.com/{user}/{project}/repository/archive.tar.gz?ref={commit}"
+                ))
+                    .ok()?,
+                ) */
+
+                // lmao: https://github.com/npm/hosted-git-info/pull/109
+                None
+            }
+            "git.sr.ht" => {
+                let user = s.next()?;
+                let mut project = s.next()?;
+                let aux = s.next();
+
+                if aux == Some("archive") {
+                    return None;
+                }
+
+                if project.ends_with(".git") {
+                    project = project.strip_suffix(".git")?;
+                }
+
+                let commit = url.fragment()?;
+
+                Some(
+                    Url::parse(&format!(
+                        "https://git.sr.ht/{user}/{project}/archive/{commit}.tar.gz"
+                    ))
+                    .ok()?,
+                )
+            }
+            _ => None,
+        };
+
+        match get_url() {
+            Some(u) => Ok(Some(u)),
+            None => Err(anyhow!("This lockfile either contains a Git dependency with an unsupported host, or a malformed URL in the lockfile: {url}"))
+        }
+    } else {
+        Ok(None)
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::get_hosted_git_url;
+    use url::Url;
+
+    #[test]
+    fn hosted_git_urls() {
+        for (input, expected) in [
+            (
+                "git+ssh://git@github.com/castlabs/electron-releases.git#fc5f78d046e8d7cdeb66345a2633c383ab41f525",
+                Some("https://codeload.github.com/castlabs/electron-releases/tar.gz/fc5f78d046e8d7cdeb66345a2633c383ab41f525"),
+            ),
+            (
+                "git+ssh://bitbucket.org/foo/bar#branch",
+                Some("https://bitbucket.org/foo/bar/get/branch.tar.gz")
+            ),
+            (
+                "git+ssh://git.sr.ht/~foo/bar#branch",
+                Some("https://git.sr.ht/~foo/bar/archive/branch.tar.gz")
+            ),
+        ] {
+            assert_eq!(
+                get_hosted_git_url(&Url::parse(input).unwrap()).unwrap(),
+                expected.map(|u| Url::parse(u).unwrap())
+            );
+        }
+
+        assert!(
+            get_hosted_git_url(&Url::parse("ssh://git@gitlab.com/foo/bar.git#fix/bug").unwrap())
+                .is_err(),
+            "GitLab URLs should be marked as invalid (lol)"
+        );
+    }
+}
diff --git a/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/util.rs b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/util.rs
new file mode 100644
index 000000000000..7dd928fdc43f
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/util.rs
@@ -0,0 +1,66 @@
+use backoff::{retry, ExponentialBackoff};
+use isahc::{
+    config::{CaCertificate, Configurable, RedirectPolicy, SslOption},
+    Body, Request, RequestExt,
+};
+use serde_json::{Map, Value};
+use std::{env, io::Read, path::Path};
+use url::Url;
+
+pub fn get_url(url: &Url) -> Result<Body, isahc::Error> {
+    let mut request = Request::get(url.as_str()).redirect_policy(RedirectPolicy::Limit(10));
+
+    // Respect SSL_CERT_FILE if environment variable exists
+    if let Ok(ssl_cert_file) = env::var("SSL_CERT_FILE") {
+        if Path::new(&ssl_cert_file).exists() {
+            // When file exists, use it. NIX_SSL_CERT_FILE will still override.
+            request = request.ssl_ca_certificate(CaCertificate::file(ssl_cert_file));
+        } else if env::var("outputHash").is_ok() {
+            // When file does not exist, assume we are downloading in a FOD and
+            // therefore do not need to check certificates, since the output is
+            // already hashed.
+            request = request.ssl_options(SslOption::DANGER_ACCEPT_INVALID_CERTS);
+        }
+    }
+
+    // Respect NIX_NPM_TOKENS environment variable, which should be a JSON mapping in the shape of:
+    // `{ "registry.example.com": "example-registry-bearer-token", ... }`
+    if let Some(host) = url.host_str() {
+        if let Ok(npm_tokens) = env::var("NIX_NPM_TOKENS") {
+            if let Ok(tokens) = serde_json::from_str::<Map<String, Value>>(&npm_tokens) {
+                if let Some(token) = tokens.get(host).and_then(serde_json::Value::as_str) {
+                    request = request.header("Authorization", format!("Bearer {token}"));
+                }
+            }
+        }
+    }
+
+    Ok(request.body(())?.send()?.into_body())
+}
+
+pub fn get_url_body_with_retry(url: &Url) -> Result<Vec<u8>, isahc::Error> {
+    retry(ExponentialBackoff::default(), || {
+        get_url(url)
+            .and_then(|mut body| {
+                let mut buf = Vec::new();
+
+                body.read_to_end(&mut buf)?;
+
+                Ok(buf)
+            })
+            .map_err(|err| {
+                if err.is_network() || err.is_timeout() {
+                    backoff::Error::transient(err)
+                } else {
+                    backoff::Error::permanent(err)
+                }
+            })
+    })
+    .map_err(|backoff_err| match backoff_err {
+        backoff::Error::Permanent(err)
+        | backoff::Error::Transient {
+            err,
+            retry_after: _,
+        } => err,
+    })
+}
diff --git a/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/common.js b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/common.js
new file mode 100644
index 000000000000..8e0d1b0e470b
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/common.js
@@ -0,0 +1,17 @@
+const path = require('path')
+
+// This has to match the logic in pkgs/development/tools/yarn2nix-moretea/yarn2nix/lib/urlToName.js
+// so that fixup_yarn_lock produces the same paths
+const urlToName = url => {
+	const isCodeloadGitTarballUrl = url.startsWith('https://codeload.github.com/') && url.includes('/tar.gz/')
+
+	if (url.startsWith('git+') || isCodeloadGitTarballUrl) {
+		return path.basename(url)
+	} else {
+		return url
+			.replace(/https:\/\/(.)*(.com)\//g, '') // prevents having long directory names
+			.replace(/[@/%:-]/g, '_') // replace @ and : and - and % characters with underscore
+	}
+}
+
+module.exports = { urlToName };
diff --git a/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/default.nix b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/default.nix
new file mode 100644
index 000000000000..e837f7457d17
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/default.nix
@@ -0,0 +1,85 @@
+{ stdenv, lib, makeWrapper, coreutils, nix-prefetch-git, fetchurl, nodejs-slim, prefetch-yarn-deps, cacert, callPackage, nix }:
+
+let
+  yarnpkg-lockfile-tar = fetchurl {
+    url = "https://registry.yarnpkg.com/@yarnpkg/lockfile/-/lockfile-1.1.0.tgz";
+    hash = "sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ==";
+  };
+
+  tests = callPackage ./tests {};
+
+in {
+  prefetch-yarn-deps = stdenv.mkDerivation {
+    name = "prefetch-yarn-deps";
+
+    dontUnpack = true;
+
+    nativeBuildInputs = [ makeWrapper ];
+    buildInputs = [ coreutils nix-prefetch-git nodejs-slim nix ];
+
+    buildPhase = ''
+      runHook preBuild
+
+      mkdir libexec
+      tar --strip-components=1 -xf ${yarnpkg-lockfile-tar} package/index.js
+      mv index.js libexec/yarnpkg-lockfile.js
+      cp ${./.}/*.js libexec/
+      patchShebangs libexec
+
+      runHook postBuild
+    '';
+
+    installPhase = ''
+      runHook preInstall
+
+      mkdir -p $out/bin
+      cp -r libexec $out
+      makeWrapper $out/libexec/index.js $out/bin/prefetch-yarn-deps \
+        --prefix PATH : ${lib.makeBinPath [ coreutils nix-prefetch-git nix ]}
+      makeWrapper $out/libexec/fixup.js $out/bin/fixup-yarn-lock
+
+      runHook postInstall
+    '';
+
+    passthru = { inherit tests; };
+  };
+
+  fetchYarnDeps = let
+    f = {
+      name ? "offline",
+      src ? null,
+      hash ? "",
+      sha256 ? "",
+      ...
+    }@args: let
+      hash_ =
+        if hash != "" then { outputHashAlgo = null; outputHash = hash; }
+        else if sha256 != "" then { outputHashAlgo = "sha256"; outputHash = sha256; }
+        else { outputHashAlgo = "sha256"; outputHash = lib.fakeSha256; };
+    in stdenv.mkDerivation ({
+      inherit name;
+
+      dontUnpack = src == null;
+      dontInstall = true;
+
+      nativeBuildInputs = [ prefetch-yarn-deps cacert ];
+      GIT_SSL_CAINFO = "${cacert}/etc/ssl/certs/ca-bundle.crt";
+      NODE_EXTRA_CA_CERTS = "${cacert}/etc/ssl/certs/ca-bundle.crt";
+
+      buildPhase = ''
+        runHook preBuild
+
+        yarnLock=''${yarnLock:=$PWD/yarn.lock}
+        mkdir -p $out
+        (cd $out; prefetch-yarn-deps --verbose --builder $yarnLock)
+
+        runHook postBuild
+      '';
+
+      outputHashMode = "recursive";
+    } // hash_ // (removeAttrs args ["src" "name" "hash" "sha256"]));
+
+  in lib.setFunctionArgs f (lib.functionArgs f) // {
+    inherit tests;
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/fixup.js b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/fixup.js
new file mode 100755
index 000000000000..732e569aba7b
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/fixup.js
@@ -0,0 +1,76 @@
+#!/usr/bin/env node
+'use strict'
+
+const fs = require('fs')
+const process = require('process')
+const lockfile = require('./yarnpkg-lockfile.js')
+const { urlToName } = require('./common.js')
+
+const fixupYarnLock = async (lockContents, verbose) => {
+	const lockData = lockfile.parse(lockContents)
+
+	const fixedData = Object.fromEntries(
+		Object.entries(lockData.object)
+		.map(([dep, pkg]) => {
+			const [ url, hash ] = pkg.resolved.split("#", 2)
+
+			if (hash || url.startsWith("https://codeload.github.com")) {
+				if (verbose) console.log(`Removing integrity for git dependency ${dep}`)
+				delete pkg.integrity
+			}
+
+			if (verbose) console.log(`Rewriting URL ${url} for dependency ${dep}`)
+			pkg.resolved = urlToName(url)
+			if (hash)
+				pkg.resolved += `#${hash}`
+
+			return [dep, pkg]
+		})
+	)
+
+	if (verbose) console.log('Done')
+
+	return fixedData
+}
+
+const showUsage = async () => {
+	process.stderr.write(`
+syntax: fixup-yarn-lock [path to yarn.lock] [options]
+
+Options:
+  -h --help         Show this help
+  -v --verbose      Verbose output
+`)
+	process.exit(1)
+}
+
+const main = async () => {
+	const args = process.argv.slice(2)
+	let next, lockFile, verbose
+	while (next = args.shift()) {
+		if (next == '--verbose' || next == '-v') {
+			verbose = true
+		} else if (next == '--help' || next == '-h') {
+			showUsage()
+		} else if (!lockFile) {
+			lockFile = next
+		} else {
+			showUsage()
+		}
+	}
+	let lockContents
+	try {
+		lockContents = await fs.promises.readFile(lockFile || 'yarn.lock', 'utf-8')
+	} catch {
+		showUsage()
+	}
+
+	const fixedData = await fixupYarnLock(lockContents, verbose)
+	await fs.promises.writeFile(lockFile || 'yarn.lock', lockfile.stringify(fixedData))
+}
+
+main()
+	.catch(e => {
+		console.error(e)
+		process.exit(1)
+	})
diff --git a/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/index.js b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/index.js
new file mode 100755
index 000000000000..e60fdeb54330
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/index.js
@@ -0,0 +1,206 @@
+#!/usr/bin/env node
+'use strict'
+
+const fs = require('fs')
+const crypto = require('crypto')
+const process = require('process')
+const https = require('https')
+const child_process = require('child_process')
+const path = require('path')
+const lockfile = require('./yarnpkg-lockfile.js')
+const { promisify } = require('util')
+const url = require('url')
+const { urlToName } = require('./common.js')
+
+const execFile = promisify(child_process.execFile)
+
+const exec = async (...args) => {
+	const res = await execFile(...args)
+	if (res.error) throw new Error(res.stderr)
+	return res
+}
+
+const downloadFileHttps = (fileName, url, expectedHash, hashType = 'sha1') => {
+	return new Promise((resolve, reject) => {
+		const get = (url, redirects = 0) => https.get(url, (res) => {
+			if(redirects > 10) {
+				reject('Too many redirects!');
+				return;
+			}
+			if(res.statusCode === 301 || res.statusCode === 302) {
+				return get(res.headers.location, redirects + 1)
+			}
+			const file = fs.createWriteStream(fileName)
+			const hash = crypto.createHash(hashType)
+			res.pipe(file)
+			res.pipe(hash).setEncoding('hex')
+			res.on('end', () => {
+				file.close()
+				const h = hash.read()
+				if (expectedHash === undefined){
+					console.log(`Warning: lockfile url ${url} doesn't end in "#<hash>" to validate against. Downloaded file had hash ${h}.`);
+				} else if (h != expectedHash) return reject(new Error(`hash mismatch, expected ${expectedHash}, got ${h}`))
+				resolve()
+			})
+                        res.on('error', e => reject(e))
+		})
+		get(url)
+	})
+}
+
+const downloadGit = async (fileName, url, rev) => {
+	await exec('nix-prefetch-git', [
+		'--out', fileName + '.tmp',
+		'--url', url,
+		'--rev', rev,
+		'--builder'
+	])
+
+	await exec('tar', [
+		// hopefully make it reproducible across runs and systems
+		'--owner=0', '--group=0', '--numeric-owner', '--format=gnu', '--sort=name', '--mtime=@1',
+
+		// Set u+w because tar-fs can't unpack archives with read-only dirs: https://github.com/mafintosh/tar-fs/issues/79
+		'--mode', 'u+w',
+
+		'-C', fileName + '.tmp',
+		'-cf', fileName, '.'
+	])
+
+	await exec('rm', [ '-rf', fileName + '.tmp', ])
+}
+
+const isGitUrl = pattern => {
+	// https://github.com/yarnpkg/yarn/blob/3119382885ea373d3c13d6a846de743eca8c914b/src/resolvers/exotics/git-resolver.js#L15-L47
+	const GIT_HOSTS = ['github.com', 'gitlab.com', 'bitbucket.com', 'bitbucket.org']
+	const GIT_PATTERN_MATCHERS = [/^git:/, /^git\+.+:/, /^ssh:/, /^https?:.+\.git$/, /^https?:.+\.git#.+/]
+
+	for (const matcher of GIT_PATTERN_MATCHERS) if (matcher.test(pattern)) return true
+
+	const {hostname, path} = url.parse(pattern)
+	if (hostname && path && GIT_HOSTS.indexOf(hostname) >= 0
+		// only if dependency is pointing to a git repo,
+		// e.g. facebook/flow and not file in a git repo facebook/flow/archive/v1.0.0.tar.gz
+		&& path.split('/').filter(p => !!p).length === 2
+	) return true
+
+	return false
+}
+
+const downloadPkg = (pkg, verbose) => {
+	const fileMarker = '@file:'
+	const split = pkg.key.split(fileMarker)
+	if (split.length == 2) {
+		console.info(`ignoring lockfile entry "${split[0]}" which points at path "${split[1]}"`)
+		return
+	} else if (split.length > 2) {
+		throw new Error(`The lockfile entry key "${pkg.key}" contains "${fileMarker}" more than once. Processing is not implemented.`)
+	}
+
+	if (pkg.resolved === undefined) {
+		throw new Error(`The lockfile entry with key "${pkg.key}" cannot be downloaded because it is missing the "resolved" attribute, which should contain the URL to download from. The lockfile might be invalid.`)
+	}
+
+	const [ url, hash ] = pkg.resolved.split('#')
+	if (verbose) console.log('downloading ' + url)
+	const fileName = urlToName(url)
+	if (url.startsWith('https://codeload.github.com/') && url.includes('/tar.gz/')) {
+		const s = url.split('/')
+		return downloadGit(fileName, `https://github.com/${s[3]}/${s[4]}.git`, s[s.length-1])
+	} else if (url.startsWith('https://github.com/') && url.endsWith('.tar.gz')) {
+		const s = url.split('/')
+		return downloadGit(fileName, `https://github.com/${s[3]}/${s[4]}.git`, s[s.length-1].replace(/.tar.gz$/, ''))
+	} else if (isGitUrl(url)) {
+		return downloadGit(fileName, url.replace(/^git\+/, ''), hash)
+	} else if (url.startsWith('https://')) {
+		if (typeof pkg.integrity === 'string' || pkg.integrity instanceof String) {
+			const [ type, checksum ] = pkg.integrity.split('-')
+			return downloadFileHttps(fileName, url, Buffer.from(checksum, 'base64').toString('hex'), type)
+		}
+		return downloadFileHttps(fileName, url, hash)
+	} else if (url.startsWith('file:')) {
+		console.warn(`ignoring unsupported file:path url "${url}"`)
+	} else {
+		throw new Error('don\'t know how to download "' + url + '"')
+	}
+}
+
+const performParallel = tasks => {
+	const worker = async () => {
+		while (tasks.length > 0) await tasks.shift()()
+	}
+
+	const workers = []
+	for (let i = 0; i < 4; i++) {
+		workers.push(worker())
+	}
+
+	return Promise.all(workers)
+}
+
+const prefetchYarnDeps = async (lockContents, verbose) => {
+	const lockData = lockfile.parse(lockContents)
+	await performParallel(
+		Object.entries(lockData.object)
+		.map(([key, value]) => () => downloadPkg({ key, ...value }, verbose))
+	)
+	await fs.promises.writeFile('yarn.lock', lockContents)
+	if (verbose) console.log('Done')
+}
+
+const showUsage = async () => {
+	process.stderr.write(`
+syntax: prefetch-yarn-deps [path to yarn.lock] [options]
+
+Options:
+  -h --help         Show this help
+  -v --verbose      Verbose output
+  --builder         Only perform the download to current directory, then exit
+`)
+	process.exit(1)
+}
+
+const main = async () => {
+	const args = process.argv.slice(2)
+	let next, lockFile, verbose, isBuilder
+	while (next = args.shift()) {
+		if (next == '--builder') {
+			isBuilder = true
+		} else if (next == '--verbose' || next == '-v') {
+			verbose = true
+		} else if (next == '--help' || next == '-h') {
+			showUsage()
+		} else if (!lockFile) {
+			lockFile = next
+		} else {
+			showUsage()
+		}
+	}
+	let lockContents
+	try {
+		lockContents = await fs.promises.readFile(lockFile || 'yarn.lock', 'utf-8')
+	} catch {
+		showUsage()
+	}
+
+	if (isBuilder) {
+		await prefetchYarnDeps(lockContents, verbose)
+	} else {
+		const { stdout: tmpDir } = await exec('mktemp', [ '-d' ])
+
+		try {
+			process.chdir(tmpDir.trim())
+			await prefetchYarnDeps(lockContents, verbose)
+			const { stdout: hash } = await exec('nix-hash', [ '--type', 'sha256', '--base32', tmpDir.trim() ])
+			console.log(hash)
+		} finally {
+			await exec('rm', [ '-rf', tmpDir.trim() ])
+		}
+	}
+}
+
+main()
+	.catch(e => {
+		console.error(e)
+		process.exit(1)
+	})
diff --git a/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/default.nix b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/default.nix
new file mode 100644
index 000000000000..8057d05ba72c
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/default.nix
@@ -0,0 +1,24 @@
+{ testers, fetchYarnDeps, ... }:
+
+{
+  file = testers.invalidateFetcherByDrvHash fetchYarnDeps {
+    yarnLock = ./file.lock;
+    sha256 = "sha256-BPuyQVCbdpFL/iRhmarwWAmWO2NodlVCOY9JU+4pfa4=";
+  };
+  simple = testers.invalidateFetcherByDrvHash fetchYarnDeps {
+    yarnLock = ./simple.lock;
+    sha256 = "sha256-FRrt8BixleILmFB2ZV8RgPNLqgS+dlH5nWoPgeaaNQ8=";
+  };
+  gitDep = testers.invalidateFetcherByDrvHash fetchYarnDeps {
+    yarnLock = ./git.lock;
+    sha256 = "sha256-f90IiEzHDiBdswWewRBHcJfqqpPipaMg8N0DVLq2e8Q=";
+  };
+  githubDep = testers.invalidateFetcherByDrvHash fetchYarnDeps {
+    yarnLock = ./github.lock;
+    sha256 = "sha256-DIKrhDKoqm7tHZmcuh9eK9VTqp6BxeW0zqDUpY4F57A=";
+  };
+  gitUrlDep = testers.invalidateFetcherByDrvHash fetchYarnDeps {
+    yarnLock = ./giturl.lock;
+    sha256 = "sha256-VPnyqN6lePQZGXwR7VhbFnP7/0/LB621RZwT1F+KzVQ=";
+  };
+}
diff --git a/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/file.lock b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/file.lock
new file mode 100644
index 000000000000..4881d83a7de9
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/file.lock
@@ -0,0 +1,9 @@
+# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
+# yarn lockfile v1
+
+
+"@org/somepack@file:vendor/orgpacks/somepack/assets":
+  version "1.0.0"
+
+"otherpack@file:vendor/otherpack":
+  version "1.0.0"
diff --git a/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/git.lock b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/git.lock
new file mode 100644
index 000000000000..9eda5b2c409d
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/git.lock
@@ -0,0 +1,7 @@
+# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
+# yarn lockfile v1
+
+
+"async@git+https://github.com/caolan/async":
+  version "3.2.1"
+  resolved "git+https://github.com/caolan/async#fc9ba651341af5ab974aade6b1640e345912be83"
diff --git a/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/github.lock b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/github.lock
new file mode 100644
index 000000000000..057e043a5390
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/github.lock
@@ -0,0 +1,7 @@
+# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
+# yarn lockfile v1
+
+
+"async@github:caolan/async":
+  version "3.2.1"
+  resolved "https://codeload.github.com/caolan/async/tar.gz/fc9ba651341af5ab974aade6b1640e345912be83"
diff --git a/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/giturl.lock b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/giturl.lock
new file mode 100644
index 000000000000..154030a7e358
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/giturl.lock
@@ -0,0 +1,11 @@
+# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
+# yarn lockfile v1
+
+
+"autocomplete-atom-api@https://codeload.github.com/atom/autocomplete-atom-api/legacy.tar.gz/refs/tags/v0.10.7":
+  version "0.10.7"
+  resolved "https://codeload.github.com/atom/autocomplete-atom-api/legacy.tar.gz/refs/tags/v0.10.7#c9d51fa721d543ccfc1b2189101155e81db6b97d"
+
+"find-and-replace@https://github.com/atom-community/find-and-replace/archive/refs/tags/v0.220.1.tar.gz":
+  version "0.220.1"
+  resolved "https://github.com/atom-community/find-and-replace/archive/refs/tags/v0.220.1.tar.gz#d7a0f56511e38ee72a89895a795bbbcab4a1a405"
diff --git a/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/simple.lock b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/simple.lock
new file mode 100644
index 000000000000..db2f4b2be4b7
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/simple.lock
@@ -0,0 +1,8 @@
+# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
+# yarn lockfile v1
+
+
+lit-html@1:
+  version "1.4.1"
+  resolved "https://registry.yarnpkg.com/lit-html/-/lit-html-1.4.1.tgz#0c6f3ee4ad4eb610a49831787f0478ad8e9ae5e0"
+  integrity sha512-B9btcSgPYb1q4oSOb/PrOT6Z/H+r6xuNzfH4lFli/AWhYwdtrgQkQWBbIc6mdnf6E2IL3gDXdkkqNktpU0OZQA==
diff --git a/nixpkgs/pkgs/build-support/node/import-npm-lock/default.nix b/nixpkgs/pkgs/build-support/node/import-npm-lock/default.nix
new file mode 100644
index 000000000000..d530b8ee30ff
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/import-npm-lock/default.nix
@@ -0,0 +1,134 @@
+{ lib
+, fetchurl
+, stdenv
+, callPackages
+, runCommand
+}:
+
+let
+  inherit (builtins) match elemAt toJSON removeAttrs;
+  inherit (lib) importJSON mapAttrs;
+
+  matchGitHubReference = match "github(.com)?:.+";
+  getName = package: package.name or "unknown";
+  getVersion = package: package.version or "0.0.0";
+
+  # Fetch a module from package-lock.json -> packages
+  fetchModule =
+    { module
+    , npmRoot ? null
+    }: (
+      if module ? "resolved" then
+        (
+          let
+            # Parse scheme from URL
+            mUrl = match "(.+)://(.+)" module.resolved;
+            scheme = elemAt mUrl 0;
+          in
+          (
+            if mUrl == null then
+              (
+                assert npmRoot != null; {
+                  outPath = npmRoot + "/${module.resolved}";
+                }
+              )
+            else if (scheme == "http" || scheme == "https") then
+              (
+                fetchurl {
+                  url = module.resolved;
+                  hash = module.integrity;
+                }
+              )
+            else if lib.hasPrefix "git" module.resolved then
+              (
+                builtins.fetchGit {
+                  url = module.resolved;
+                }
+              )
+            else throw "Unsupported URL scheme: ${scheme}"
+          )
+        )
+      else null
+    );
+
+  # Manage node_modules outside of the store with hooks
+  hooks = callPackages ./hooks { };
+
+in
+{
+  importNpmLock =
+    { npmRoot ? null
+    , package ? importJSON (npmRoot + "/package.json")
+    , packageLock ? importJSON (npmRoot + "/package-lock.json")
+    , pname ? getName package
+    , version ? getVersion package
+    }:
+    let
+      mapLockDependencies =
+        mapAttrs
+          (name: version: (
+            # Substitute the constraint with the version of the dependency from the top-level of package-lock.
+            if (
+              # if the version is `latest`
+              version == "latest"
+              ||
+              # Or if it's a github reference
+              matchGitHubReference version != null
+            ) then packageLock'.packages.${"node_modules/${name}"}.version
+            # But not a regular version constraint
+            else version
+          ));
+
+      packageLock' = packageLock // {
+        packages =
+          mapAttrs
+            (_: module:
+              let
+                src = fetchModule {
+                  inherit module npmRoot;
+                };
+              in
+              (removeAttrs module [
+                "link"
+                "funding"
+              ]) // lib.optionalAttrs (src != null) {
+                resolved = "file:${src}";
+              } // lib.optionalAttrs (module ? dependencies) {
+                dependencies = mapLockDependencies module.dependencies;
+              } // lib.optionalAttrs (module ? optionalDependencies) {
+                optionalDependencies = mapLockDependencies module.optionalDependencies;
+              })
+            packageLock.packages;
+      };
+
+      mapPackageDependencies = mapAttrs (name: _: packageLock'.packages.${"node_modules/${name}"}.resolved);
+
+      # Substitute dependency references in package.json with Nix store paths
+      packageJSON' = package // lib.optionalAttrs (package ? dependencies) {
+        dependencies = mapPackageDependencies package.dependencies;
+      } // lib.optionalAttrs (package ? devDependencies) {
+        devDependencies = mapPackageDependencies package.devDependencies;
+      };
+
+      pname = package.name or "unknown";
+
+    in
+    runCommand "${pname}-${version}-sources"
+      {
+        inherit pname version;
+
+        passAsFile = [ "package" "packageLock" ];
+
+        package = toJSON packageJSON';
+        packageLock = toJSON packageLock';
+      } ''
+      mkdir $out
+      cp "$packagePath" $out/package.json
+      cp "$packageLockPath" $out/package-lock.json
+    '';
+
+  inherit hooks;
+  inherit (hooks) npmConfigHook;
+
+  __functor = self: self.importNpmLock;
+}
diff --git a/nixpkgs/pkgs/build-support/node/import-npm-lock/hooks/canonicalize-symlinks.js b/nixpkgs/pkgs/build-support/node/import-npm-lock/hooks/canonicalize-symlinks.js
new file mode 100644
index 000000000000..81cd2593c5b2
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/import-npm-lock/hooks/canonicalize-symlinks.js
@@ -0,0 +1,52 @@
+#!/usr/bin/env node
+const fs = require("fs");
+const path = require("path");
+
+// When installing files rewritten to the Nix store with npm
+// npm writes the symlinks relative to the build directory.
+//
+// This makes relocating node_modules tricky when refering to the store.
+// This script walks node_modules and canonicalizes symlinks.
+
+async function canonicalize(storePrefix, root) {
+  console.log(storePrefix, root)
+  const entries = await fs.promises.readdir(root);
+  const paths = entries.map((entry) => path.join(root, entry));
+
+  const stats = await Promise.all(
+    paths.map(async (path) => {
+      return {
+        path: path,
+        stat: await fs.promises.lstat(path),
+      };
+    })
+  );
+
+  const symlinks = stats.filter((stat) => stat.stat.isSymbolicLink());
+  const dirs = stats.filter((stat) => stat.stat.isDirectory());
+
+  // Canonicalize symlinks to their real path
+  await Promise.all(
+    symlinks.map(async (stat) => {
+      const target = await fs.promises.realpath(stat.path);
+      if (target.startsWith(storePrefix)) {
+        await fs.promises.unlink(stat.path);
+        await fs.promises.symlink(target, stat.path);
+      }
+    })
+  );
+
+  // Recurse into directories
+  await Promise.all(dirs.map((dir) => canonicalize(storePrefix, dir.path)));
+}
+
+async function main() {
+  const args = process.argv.slice(2);
+  const storePrefix = args[0];
+
+  if (fs.existsSync("node_modules")) {
+    await canonicalize(storePrefix, "node_modules");
+  }
+}
+
+main();
diff --git a/nixpkgs/pkgs/build-support/node/import-npm-lock/hooks/default.nix b/nixpkgs/pkgs/build-support/node/import-npm-lock/hooks/default.nix
new file mode 100644
index 000000000000..5990371def91
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/import-npm-lock/hooks/default.nix
@@ -0,0 +1,13 @@
+{ callPackage, lib, makeSetupHook, srcOnly, nodejs }:
+{
+  npmConfigHook = makeSetupHook
+    {
+      name = "npm-config-hook";
+      substitutions = {
+        nodeSrc = srcOnly nodejs;
+        nodeGyp = "${nodejs}/lib/node_modules/npm/node_modules/node-gyp/bin/node-gyp.js";
+        canonicalizeSymlinksScript = ./canonicalize-symlinks.js;
+        storePrefix = builtins.storeDir;
+      };
+    } ./npm-config-hook.sh;
+}
diff --git a/nixpkgs/pkgs/build-support/node/import-npm-lock/hooks/npm-config-hook.sh b/nixpkgs/pkgs/build-support/node/import-npm-lock/hooks/npm-config-hook.sh
new file mode 100644
index 000000000000..35c3a2061d4b
--- /dev/null
+++ b/nixpkgs/pkgs/build-support/node/import-npm-lock/hooks/npm-config-hook.sh
@@ -0,0 +1,70 @@
+# shellcheck shell=bash
+
+npmConfigHook() {
+    echo "Executing npmConfigHook"
+
+    if [ -n "${npmRoot-}" ]; then
+      pushd "$npmRoot"
+    fi
+
+    if [ -z "${npmDeps-}" ]; then
+        echo "Error: 'npmDeps' should be set when using npmConfigHook."
+        exit 1
+    fi
+
+    echo "Configuring npm"
+
+    export HOME="$TMPDIR"
+    export npm_config_nodedir="@nodeSrc@"
+    export npm_config_node_gyp="@nodeGyp@"
+    npm config set offline true
+    npm config set progress false
+    npm config set fund false
+
+    echo "Installing patched package.json/package-lock.json"
+
+    # Save original package.json/package-lock.json for closure size reductions.
+    # The patched one contains store paths we don't want at runtime.
+    mv package.json .package.json.orig
+    if test -f package-lock.json; then # Not all packages have package-lock.json.
+        mv package-lock.json .package-lock.json.orig
+    fi
+    cp --no-preserve=mode "${npmDeps}/package.json" package.json
+    cp --no-preserve=mode "${npmDeps}/package-lock.json" package-lock.json
+
+    echo "Installing dependencies"
+
+    if ! npm install --ignore-scripts $npmInstallFlags "${npmInstallFlagsArray[@]}" $npmFlags "${npmFlagsArray[@]}"; then
+        echo
+        echo "ERROR: npm failed to install dependencies"
+        echo
+        echo "Here are a few things you can try, depending on the error:"
+        echo '1. Set `npmFlags = [ "--legacy-peer-deps" ]`'
+        echo
+
+        exit 1
+    fi
+
+    patchShebangs node_modules
+
+    npm rebuild $npmRebuildFlags "${npmRebuildFlagsArray[@]}" $npmFlags "${npmFlagsArray[@]}"
+
+    patchShebangs node_modules
+
+    # Canonicalize symlinks from relative paths to the Nix store.
+    node @canonicalizeSymlinksScript@ @storePrefix@
+
+    # Revert to pre-patched package.json/package-lock.json for closure size reductions
+    mv .package.json.orig package.json
+    if test -f ".package-lock.json.orig"; then
+        mv .package-lock.json.orig package-lock.json
+    fi
+
+    if [ -n "${npmRoot-}" ]; then
+      popd
+    fi
+
+    echo "Finished npmConfigHook"
+}
+
+postConfigureHooks+=(npmConfigHook)