diff options
Diffstat (limited to 'nixpkgs/pkgs/build-support')
476 files changed, 44647 insertions, 0 deletions
diff --git a/nixpkgs/pkgs/build-support/add-opengl-runpath/default.nix b/nixpkgs/pkgs/build-support/add-opengl-runpath/default.nix new file mode 100644 index 000000000000..5cab0937e074 --- /dev/null +++ b/nixpkgs/pkgs/build-support/add-opengl-runpath/default.nix @@ -0,0 +1,12 @@ +{ lib, stdenv }: + +stdenv.mkDerivation { + name = "add-opengl-runpath"; + + driverLink = "/run/opengl-driver" + lib.optionalString stdenv.isi686 "-32"; + + buildCommand = '' + mkdir -p $out/nix-support + substituteAll ${./setup-hook.sh} $out/nix-support/setup-hook + ''; +} diff --git a/nixpkgs/pkgs/build-support/add-opengl-runpath/setup-hook.sh b/nixpkgs/pkgs/build-support/add-opengl-runpath/setup-hook.sh new file mode 100644 index 000000000000..e556e7ead2a7 --- /dev/null +++ b/nixpkgs/pkgs/build-support/add-opengl-runpath/setup-hook.sh @@ -0,0 +1,29 @@ +# Set RUNPATH so that driver libraries in /run/opengl-driver(-32)/lib can be found. +# This is needed to not rely on LD_LIBRARY_PATH which does not work with setuid +# executables. Fixes https://github.com/NixOS/nixpkgs/issues/22760. It must be run +# in postFixup because RUNPATH stripping in fixup would undo it. Note that patchelf +# actually sets RUNPATH not RPATH, which applies only to dependencies of the binary +# it set on (including for dlopen), so the RUNPATH must indeed be set on these +# libraries and would not work if set only on executables. +addOpenGLRunpath() { + local forceRpath= + + while [ $# -gt 0 ]; do + case "$1" in + --) shift; break;; + --force-rpath) shift; forceRpath=1;; + --*) + echo "addOpenGLRunpath: ERROR: Invalid command line" \ + "argument: $1" >&2 + return 1;; + *) break;; + esac + done + + for file in "$@"; do + if ! isELF "$file"; then continue; fi + local origRpath="$(patchelf --print-rpath "$file")" + patchelf --set-rpath "@driverLink@/lib:$origRpath" ${forceRpath:+--force-rpath} "$file" + done +} + diff --git a/nixpkgs/pkgs/build-support/agda/default.nix b/nixpkgs/pkgs/build-support/agda/default.nix new file mode 100644 index 000000000000..63adf2a27651 --- /dev/null +++ b/nixpkgs/pkgs/build-support/agda/default.nix @@ -0,0 +1,106 @@ +# Builder for Agda packages. + +{ stdenv, lib, self, Agda, runCommand, makeWrapper, writeText, ghcWithPackages, nixosTests }: + +with lib.strings; + +let + withPackages' = { + pkgs, + ghc ? ghcWithPackages (p: with p; [ ieee754 ]) + }: let + pkgs' = if builtins.isList pkgs then pkgs else pkgs self; + library-file = writeText "libraries" '' + ${(concatMapStringsSep "\n" (p: "${p}/${p.libraryFile}") pkgs')} + ''; + pname = "agdaWithPackages"; + version = Agda.version; + in runCommand "${pname}-${version}" { + inherit pname version; + nativeBuildInputs = [ makeWrapper ]; + passthru = { + unwrapped = Agda; + inherit withPackages; + tests = { + inherit (nixosTests) agda; + allPackages = withPackages (lib.filter self.lib.isUnbrokenAgdaPackage (lib.attrValues self)); + }; + }; + inherit (Agda) meta; + } '' + mkdir -p $out/bin + makeWrapper ${Agda}/bin/agda $out/bin/agda \ + --add-flags "--with-compiler=${ghc}/bin/ghc" \ + --add-flags "--library-file=${library-file}" \ + --add-flags "--local-interfaces" + ln -s ${Agda}/bin/agda-mode $out/bin/agda-mode + ''; # Local interfaces has been added for now: See https://github.com/agda/agda/issues/4526 + + withPackages = arg: if builtins.isAttrs arg then withPackages' arg else withPackages' { pkgs = arg; }; + + extensions = [ + "agda" + "agda-lib" + "agdai" + "lagda" + "lagda.md" + "lagda.org" + "lagda.rst" + "lagda.tex" + ]; + + defaults = + { pname + , meta + , buildInputs ? [] + , everythingFile ? "./Everything.agda" + , includePaths ? [] + , libraryName ? pname + , libraryFile ? "${libraryName}.agda-lib" + , buildPhase ? null + , installPhase ? null + , extraExtensions ? [] + , ... + }: let + agdaWithArgs = withPackages (builtins.filter (p: p ? isAgdaDerivation) buildInputs); + includePathArgs = concatMapStrings (path: "-i" + path + " ") (includePaths ++ [(dirOf everythingFile)]); + in + { + inherit libraryName libraryFile; + + isAgdaDerivation = true; + + buildInputs = buildInputs ++ [ agdaWithArgs ]; + + buildPhase = if buildPhase != null then buildPhase else '' + runHook preBuild + agda ${includePathArgs} ${everythingFile} + runHook postBuild + ''; + + installPhase = if installPhase != null then installPhase else '' + runHook preInstall + mkdir -p $out + find -not \( -path ${everythingFile} -or -path ${lib.interfaceFile everythingFile} \) -and \( ${concatMapStringsSep " -or " (p: "-name '*.${p}'") (extensions ++ extraExtensions)} \) -exec cp -p --parents -t "$out" {} + + runHook postInstall + ''; + + # As documented at https://github.com/NixOS/nixpkgs/issues/172752, + # we need to set LC_ALL to an UTF-8-supporting locale. However, on + # darwin, it seems that there is no standard such locale; luckily, + # the referenced issue doesn't seem to surface on darwin. Hence let's + # set this only on non-darwin. + LC_ALL = lib.optionalString (!stdenv.isDarwin) "C.UTF-8"; + + meta = if meta.broken or false then meta // { hydraPlatforms = lib.platforms.none; } else meta; + + # Retrieve all packages from the finished package set that have the current package as a dependency and build them + passthru.tests = with builtins; + lib.filterAttrs (name: pkg: self.lib.isUnbrokenAgdaPackage pkg && elem pname (map (pkg: pkg.pname) pkg.buildInputs)) self; + }; +in +{ + mkDerivation = args: stdenv.mkDerivation (args // defaults args); + + inherit withPackages withPackages'; +} diff --git a/nixpkgs/pkgs/build-support/agda/lib.nix b/nixpkgs/pkgs/build-support/agda/lib.nix new file mode 100644 index 000000000000..80a0974192bb --- /dev/null +++ b/nixpkgs/pkgs/build-support/agda/lib.nix @@ -0,0 +1,15 @@ +{ lib }: +{ + /* Returns the Agda interface file to a given Agda file. + * + * Examples: + * interfaceFile "Everything.agda" == "Everything.agdai" + * interfaceFile "src/Everything.lagda.tex" == "src/Everything.agdai" + */ + interfaceFile = agdaFile: lib.head (builtins.match ''(.*\.)l?agda(\.(md|org|rst|tex))?'' agdaFile) + "agdai"; + + /* Takes an arbitrary derivation and says whether it is an agda library package + * that is not marked as broken. + */ + isUnbrokenAgdaPackage = pkg: pkg.isAgdaDerivation or false && !pkg.meta.broken; +} diff --git a/nixpkgs/pkgs/build-support/alternatives/blas/default.nix b/nixpkgs/pkgs/build-support/alternatives/blas/default.nix new file mode 100644 index 000000000000..fec2d0526bb3 --- /dev/null +++ b/nixpkgs/pkgs/build-support/alternatives/blas/default.nix @@ -0,0 +1,143 @@ +{ lib, stdenv +, lapack-reference, openblas +, isILP64 ? false +, blasProvider ? openblas }: + +let + blasFortranSymbols = [ + "caxpy" "ccopy" "cdotc" "cdotu" "cgbmv" "cgemm" "cgemv" "cgerc" "cgeru" + "chbmv" "chemm" "chemv" "cher" "cher2" "cher2k" "cherk" "chpmv" "chpr" + "chpr2" "crotg" "cscal" "csrot" "csscal" "cswap" "csymm" "csyr2k" "csyrk" + "ctbmv" "ctbsv" "ctpmv" "ctpsv" "ctrmm" "ctrmv" "ctrsm" "ctrsv" "dasum" + "daxpy" "dcabs1" "dcopy" "ddot" "dgbmv" "dgemm" "dgemv" "dger" "dnrm2" + "drot" "drotg" "drotm" "drotmg" "dsbmv" "dscal" "dsdot" "dspmv" "dspr" + "dspr2" "dswap" "dsymm" "dsymv" "dsyr" "dsyr2" "dsyr2k" "dsyrk" "dtbmv" + "dtbsv" "dtpmv" "dtpsv" "dtrmm" "dtrmv" "dtrsm" "dtrsv" "dzasum" "dznrm2" + "icamax" "idamax" "isamax" "izamax" "lsame" "sasum" "saxpy" "scabs1" + "scasum" "scnrm2" "scopy" "sdot" "sdsdot" "sgbmv" "sgemm" "sgemv" + "sger" "snrm2" "srot" "srotg" "srotm" "srotmg" "ssbmv" "sscal" "sspmv" + "sspr" "sspr2" "sswap" "ssymm" "ssymv" "ssyr" "ssyr2" "ssyr2k" "ssyrk" + "stbmv" "stbsv" "stpmv" "stpsv" "strmm" "strmv" "strsm" "strsv" "xerbla" + "xerbla_array" "zaxpy" "zcopy" "zdotc" "zdotu" "zdrot" "zdscal" "zgbmv" + "zgemm" "zgemv" "zgerc" "zgeru" "zhbmv" "zhemm" "zhemv" "zher" "zher2" + "zher2k" "zherk" "zhpmv" "zhpr" "zhpr2" "zrotg" "zscal" "zswap" "zsymm" + "zsyr2k" "zsyrk" "ztbmv" "ztbsv" "ztpmv" "ztpsv" "ztrmm" "ztrmv" "ztrsm" + "ztrsv" + ]; + + version = "3"; + canonicalExtension = if stdenv.hostPlatform.isLinux + then "${stdenv.hostPlatform.extensions.sharedLibrary}.${version}" + else stdenv.hostPlatform.extensions.sharedLibrary; + + + blasImplementation = lib.getName blasProvider; + blasProvider' = if blasImplementation == "mkl" + then blasProvider + else blasProvider.override { blas64 = isILP64; }; + +in + +assert isILP64 -> blasImplementation == "mkl" || blasProvider'.blas64; + +stdenv.mkDerivation { + pname = "blas"; + inherit version; + + outputs = [ "out" "dev" ]; + + meta = (blasProvider'.meta or {}) // { + description = "${lib.getName blasProvider} with just the BLAS C and FORTRAN ABI"; + }; + + passthru = { + inherit isILP64; + provider = blasProvider'; + implementation = blasImplementation; + }; + + dontBuild = true; + dontConfigure = true; + unpackPhase = "src=$PWD"; + + dontPatchELF = true; + + installPhase = ('' + mkdir -p $out/lib $dev/include $dev/lib/pkgconfig + + libblas="${lib.getLib blasProvider'}/lib/libblas${canonicalExtension}" + + if ! [ -e "$libblas" ]; then + echo "$libblas does not exist, ${blasProvider'.name} does not provide libblas." + exit 1 + fi + + $NM -an "$libblas" | cut -f3 -d' ' > symbols + for symbol in ${toString blasFortranSymbols}; do + grep -q "^$symbol_$" symbols || { echo "$symbol" was not found in "$libblas"; exit 1; } + done + + cp -L "$libblas" $out/lib/libblas${canonicalExtension} + chmod +w $out/lib/libblas${canonicalExtension} + +'' + (if stdenv.hostPlatform.parsed.kernel.execFormat.name == "elf" then '' + patchelf --set-soname libblas${canonicalExtension} $out/lib/libblas${canonicalExtension} + patchelf --set-rpath "$(patchelf --print-rpath $out/lib/libblas${canonicalExtension}):${lib.getLib blasProvider'}/lib" $out/lib/libblas${canonicalExtension} +'' else lib.optionalString (stdenv.hostPlatform.isDarwin) '' + install_name_tool \ + -id $out/lib/libblas${canonicalExtension} \ + -add_rpath ${lib.getLib blasProvider'}/lib \ + $out/lib/libblas${canonicalExtension} +'') + '' + + if [ "$out/lib/libblas${canonicalExtension}" != "$out/lib/libblas${stdenv.hostPlatform.extensions.sharedLibrary}" ]; then + ln -s $out/lib/libblas${canonicalExtension} "$out/lib/libblas${stdenv.hostPlatform.extensions.sharedLibrary}" + fi + + cat <<EOF > $dev/lib/pkgconfig/blas.pc +Name: blas +Version: ${version} +Description: BLAS FORTRAN implementation +Libs: -L$out/lib -lblas +Cflags: -I$dev/include +EOF + + libcblas="${lib.getLib blasProvider'}/lib/libcblas${canonicalExtension}" + + if ! [ -e "$libcblas" ]; then + echo "$libcblas does not exist, ${blasProvider'.name} does not provide libcblas." + exit 1 + fi + + cp -L "$libcblas" $out/lib/libcblas${canonicalExtension} + chmod +w $out/lib/libcblas${canonicalExtension} + +'' + (if stdenv.hostPlatform.parsed.kernel.execFormat.name == "elf" then '' + patchelf --set-soname libcblas${canonicalExtension} $out/lib/libcblas${canonicalExtension} + patchelf --set-rpath "$(patchelf --print-rpath $out/lib/libcblas${canonicalExtension}):${lib.getLib blasProvider'}/lib" $out/lib/libcblas${canonicalExtension} +'' else lib.optionalString stdenv.hostPlatform.isDarwin '' + install_name_tool \ + -id $out/lib/libcblas${canonicalExtension} \ + -add_rpath ${lib.getLib blasProvider'}/lib \ + $out/lib/libcblas${canonicalExtension} +'') + '' + if [ "$out/lib/libcblas${canonicalExtension}" != "$out/lib/libcblas${stdenv.hostPlatform.extensions.sharedLibrary}" ]; then + ln -s $out/lib/libcblas${canonicalExtension} "$out/lib/libcblas${stdenv.hostPlatform.extensions.sharedLibrary}" + fi + + cp ${lib.getDev lapack-reference}/include/cblas{,_mangling}.h $dev/include + + cat <<EOF > $dev/lib/pkgconfig/cblas.pc +Name: cblas +Version: ${version} +Description: BLAS C implementation +Cflags: -I$dev/include +Libs: -L$out/lib -lcblas +EOF +'' + lib.optionalString (blasImplementation == "mkl") '' + mkdir -p $out/nix-support + echo 'export MKL_INTERFACE_LAYER=${lib.optionalString isILP64 "I"}LP64,GNU' > $out/nix-support/setup-hook + ln -s $out/lib/libblas${canonicalExtension} $out/lib/libmkl_rt${stdenv.hostPlatform.extensions.sharedLibrary} + ln -sf ${blasProvider'}/include/* $dev/include +''); +} diff --git a/nixpkgs/pkgs/build-support/alternatives/lapack/default.nix b/nixpkgs/pkgs/build-support/alternatives/lapack/default.nix new file mode 100644 index 000000000000..cbc7bf25c797 --- /dev/null +++ b/nixpkgs/pkgs/build-support/alternatives/lapack/default.nix @@ -0,0 +1,113 @@ +{ lib, stdenv +, lapack-reference, openblas +, isILP64 ? false +, lapackProvider ? openblas }: + +let + + version = "3"; + canonicalExtension = if stdenv.hostPlatform.isLinux + then "${stdenv.hostPlatform.extensions.sharedLibrary}.${version}" + else stdenv.hostPlatform.extensions.sharedLibrary; + + lapackImplementation = lib.getName lapackProvider; + lapackProvider' = if lapackImplementation == "mkl" + then lapackProvider + else lapackProvider.override { blas64 = isILP64; }; + +in + +assert isILP64 -> lapackImplementation == "mkl" || lapackProvider'.blas64; + +stdenv.mkDerivation { + pname = "lapack"; + inherit version; + + outputs = [ "out" "dev" ]; + + meta = (lapackProvider'.meta or {}) // { + description = "${lib.getName lapackProvider'} with just the LAPACK C and FORTRAN ABI"; + }; + + passthru = { + inherit isILP64; + provider = lapackProvider'; + implementation = lapackImplementation; + }; + + # TODO: drop this forced rebuild, as it was needed just once. + rebuild_salt = if stdenv.isDarwin && stdenv.isx86_64 then "J4AQ" else null; + + dontBuild = true; + dontConfigure = true; + unpackPhase = "src=$PWD"; + + dontPatchELF = true; + + installPhase = ('' + mkdir -p $out/lib $dev/include $dev/lib/pkgconfig + + liblapack="${lib.getLib lapackProvider'}/lib/liblapack${canonicalExtension}" + + if ! [ -e "$liblapack" ]; then + echo "$liblapack does not exist, ${lapackProvider'.name} does not provide liblapack." + exit 1 + fi + + cp -L "$liblapack" $out/lib/liblapack${canonicalExtension} + chmod +w $out/lib/liblapack${canonicalExtension} + +'' + (lib.optionalString (stdenv.hostPlatform.parsed.kernel.execFormat.name == "elf") '' + patchelf --set-soname liblapack${canonicalExtension} $out/lib/liblapack${canonicalExtension} + patchelf --set-rpath "$(patchelf --print-rpath $out/lib/liblapack${canonicalExtension}):${lapackProvider'}/lib" $out/lib/liblapack${canonicalExtension} +'') + '' + + if [ "$out/lib/liblapack${canonicalExtension}" != "$out/lib/liblapack${stdenv.hostPlatform.extensions.sharedLibrary}" ]; then + ln -s $out/lib/liblapack${canonicalExtension} "$out/lib/liblapack${stdenv.hostPlatform.extensions.sharedLibrary}" + fi + + install -D ${lib.getDev lapack-reference}/include/lapack.h $dev/include/lapack.h + + cat <<EOF > $dev/lib/pkgconfig/lapack.pc +Name: lapack +Version: ${version} +Description: LAPACK FORTRAN implementation +Cflags: -I$dev/include +Libs: -L$out/lib -llapack +EOF + + liblapacke="${lib.getLib lapackProvider'}/lib/liblapacke${canonicalExtension}" + + if ! [ -e "$liblapacke" ]; then + echo "$liblapacke does not exist, ${lapackProvider'.name} does not provide liblapacke." + exit 1 + fi + + cp -L "$liblapacke" $out/lib/liblapacke${canonicalExtension} + chmod +w $out/lib/liblapacke${canonicalExtension} + +'' + (lib.optionalString (stdenv.hostPlatform.parsed.kernel.execFormat.name == "elf") '' + patchelf --set-soname liblapacke${canonicalExtension} $out/lib/liblapacke${canonicalExtension} + patchelf --set-rpath "$(patchelf --print-rpath $out/lib/liblapacke${canonicalExtension}):${lib.getLib lapackProvider'}/lib" $out/lib/liblapacke${canonicalExtension} +'') + '' + + if [ -f "$out/lib/liblapacke.so.3" ]; then + ln -s $out/lib/liblapacke.so.3 $out/lib/liblapacke.so + fi + + cp ${lib.getDev lapack-reference}/include/lapacke{,_mangling,_config,_utils}.h $dev/include + + cat <<EOF > $dev/lib/pkgconfig/lapacke.pc +Name: lapacke +Version: ${version} +Description: LAPACK C implementation +Cflags: -I$dev/include +Libs: -L$out/lib -llapacke +EOF +'' + lib.optionalString (lapackImplementation == "mkl") '' + mkdir -p $out/nix-support + echo 'export MKL_INTERFACE_LAYER=${lib.optionalString isILP64 "I"}LP64,GNU' > $out/nix-support/setup-hook + ln -s $out/lib/liblapack${canonicalExtension} $out/lib/libmkl_rt${stdenv.hostPlatform.extensions.sharedLibrary} + ln -sf ${lapackProvider'}/include/* $dev/include +''); +} diff --git a/nixpkgs/pkgs/build-support/appimage/appimage-exec.sh b/nixpkgs/pkgs/build-support/appimage/appimage-exec.sh new file mode 100755 index 000000000000..b91d13c4a235 --- /dev/null +++ b/nixpkgs/pkgs/build-support/appimage/appimage-exec.sh @@ -0,0 +1,145 @@ +#!@shell@ +# shellcheck shell=bash + +if [ -n "$DEBUG" ] ; then + set -x +fi + +PATH="@path@:$PATH" +apprun_opt=true +OWD=$(readlink -f .) +# can be read by appimages: https://docs.appimage.org/packaging-guide/environment-variables.html +export OWD + +# src : AppImage +# dest : let's unpack() create the directory +unpack() { + local src="$1" + local out="$2" + + # https://github.com/AppImage/libappimage/blob/ca8d4b53bed5cbc0f3d0398e30806e0d3adeaaab/src/libappimage/utils/MagicBytesChecker.cpp#L45-L63 + local appimageSignature; + appimageSignature="$(LC_ALL=C readelf -h "$src" | awk 'NR==2{print $10$11;}')" + local appimageType; + appimageType="$(LC_ALL=C readelf -h "$src" | awk 'NR==2{print $12;}')" + + # check AppImage signature + if [ "$appimageSignature" != "4149" ]; then + echo "Not an AppImage file" + exit + fi + + case "$appimageType" in + "01") + echo "Uncompress $(basename "$src") of type $appimageType" + mkdir "$out" + pv "$src" | bsdtar -x -C "$out" -f - + ;; + + "02") + # This method avoid issues with non executable appimages, + # non-native packer, packer patching and squashfs-root destination prefix. + + # multiarch offset one-liner using same method as AppImage + # see https://gist.github.com/probonopd/a490ba3401b5ef7b881d5e603fa20c93 + offset=$(LC_ALL=C readelf -h "$src" | awk 'NR==13{e_shoff=$5} NR==18{e_shentsize=$5} NR==19{e_shnum=$5} END{print e_shoff+e_shentsize*e_shnum}') + echo "Uncompress $(basename "$src") of type $appimageType @ offset $offset" + unsquashfs -q -d "$out" -o "$offset" "$src" + chmod go-w "$out" + ;; + + # "03") + # get ready, https://github.com/TheAssassin/type3-runtime + + *) + echo Unsupported AppImage Type: "$appimageType" + exit + ;; + esac + echo "$(basename "$src") is now installed in $out" +} + +apprun() { + + SHA256=$(sha256sum "$APPIMAGE" | awk '{print $1}') + export APPDIR="${XDG_CACHE_HOME:-$HOME/.cache}/appimage-run/$SHA256" + + #compatibility + if [ -x "$APPDIR/squashfs-root" ]; then APPDIR="$APPDIR/squashfs-root"; fi + + if [ ! -x "$APPDIR" ]; then + mkdir -p "$(dirname "$APPDIR")" + unpack "$APPIMAGE" "$APPDIR" + else echo "$(basename "$APPIMAGE")" installed in "$APPDIR" + fi + + export PATH="$PATH:$PWD/usr/bin" +} + +wrap() { + + # quite same in appimageTools + export APPIMAGE_SILENT_INSTALL=1 + + if [ -n "$APPIMAGE_DEBUG_EXEC" ]; then + cd "$APPDIR" || true + exec "$APPIMAGE_DEBUG_EXEC" + fi + + exec "$APPDIR/AppRun" "$@" +} + +usage() { + cat <<EOF +Usage: appimage-run [appimage-run options] <AppImage> [AppImage options] + +-h show this message +-d debug mode +-x <directory> : extract appimage in the directory then exit. +-w <directory> : run uncompressed appimage directory (used in appimageTools) + +[AppImage options]: Options are passed on to the appimage. +If you want to execute a custom command in the appimage's environment, set the APPIMAGE_DEBUG_EXEC environment variable. + +EOF + exit 1 +} + +while getopts "x:w:dh" option; do + case "${option}" in + d) set -x + ;; + x) # eXtract + unpack_opt=true + APPDIR=${OPTARG} + ;; + w) # WrapAppImage + export APPDIR=${OPTARG} + wrap_opt=true + ;; + h) usage + ;; + *) usage + ;; + esac +done +shift "$((OPTIND-1))" + +if [ -n "$wrap_opt" ] && [ -d "$APPDIR" ]; then + wrap "$@" + exit +else + APPIMAGE="$(realpath "$1")" || usage + shift +fi + +if [ -n "$unpack_opt" ] && [ -f "$APPIMAGE" ]; then + unpack "$APPIMAGE" "$APPDIR" + exit +fi + +if [ -n "$apprun_opt" ] && [ -f "$APPIMAGE" ]; then + apprun + wrap "$@" + exit +fi diff --git a/nixpkgs/pkgs/build-support/appimage/default.nix b/nixpkgs/pkgs/build-support/appimage/default.nix new file mode 100644 index 000000000000..7b9bb239402b --- /dev/null +++ b/nixpkgs/pkgs/build-support/appimage/default.nix @@ -0,0 +1,213 @@ +{ lib +, bash +, binutils-unwrapped +, coreutils +, gawk +, libarchive +, pv +, squashfsTools +, buildFHSEnv +, pkgs +}: + +rec { + appimage-exec = pkgs.substituteAll { + src = ./appimage-exec.sh; + isExecutable = true; + dir = "bin"; + path = lib.makeBinPath [ + bash + binutils-unwrapped + coreutils + gawk + libarchive + pv + squashfsTools + ]; + }; + + extract = args@{ name ? "${args.pname}-${args.version}", postExtract ? "", src, ... }: pkgs.runCommand "${name}-extracted" { + buildInputs = [ appimage-exec ]; + } '' + appimage-exec.sh -x $out ${src} + ${postExtract} + ''; + + # for compatibility, deprecated + extractType1 = extract; + extractType2 = extract; + wrapType1 = wrapType2; + + wrapAppImage = args@{ + name ? "${args.pname}-${args.version}", + src, + extraPkgs, + meta ? {}, + ... + }: buildFHSEnv + (defaultFhsEnvArgs // { + inherit name; + + targetPkgs = pkgs: [ appimage-exec ] + ++ defaultFhsEnvArgs.targetPkgs pkgs ++ extraPkgs pkgs; + + runScript = "appimage-exec.sh -w ${src} --"; + + meta = { + sourceProvenance = with lib.sourceTypes; [ binaryNativeCode ]; + } // meta; + } // (removeAttrs args ([ "pname" "version" ] ++ (builtins.attrNames (builtins.functionArgs wrapAppImage))))); + + wrapType2 = args@{ name ? "${args.pname}-${args.version}", src, extraPkgs ? pkgs: [ ], ... }: wrapAppImage + (args // { + inherit name extraPkgs; + src = extract { inherit name src; }; + + # passthru src to make nix-update work + # hack to keep the origin position (unsafeGetAttrPos) + passthru = lib.pipe args [ + lib.attrNames + (lib.remove "src") + (removeAttrs args) + ] // args.passthru or { }; + }); + + defaultFhsEnvArgs = { + name = "appimage-env"; + + # Most of the packages were taken from the Steam chroot + targetPkgs = pkgs: with pkgs; [ + gtk3 + bashInteractive + gnome.zenity + xorg.xrandr + which + perl + xdg-utils + iana-etc + krb5 + gsettings-desktop-schemas + hicolor-icon-theme # dont show a gtk warning about hicolor not being installed + ]; + + # list of libraries expected in an appimage environment: + # https://github.com/AppImage/pkg2appimage/blob/master/excludelist + multiPkgs = pkgs: with pkgs; [ + desktop-file-utils + xorg.libXcomposite + xorg.libXtst + xorg.libXrandr + xorg.libXext + xorg.libX11 + xorg.libXfixes + libGL + + gst_all_1.gstreamer + gst_all_1.gst-plugins-ugly + gst_all_1.gst-plugins-base + libdrm + xorg.xkeyboardconfig + xorg.libpciaccess + + glib + gtk2 + bzip2 + zlib + gdk-pixbuf + + xorg.libXinerama + xorg.libXdamage + xorg.libXcursor + xorg.libXrender + xorg.libXScrnSaver + xorg.libXxf86vm + xorg.libXi + xorg.libSM + xorg.libICE + freetype + curlWithGnuTls + nspr + nss + fontconfig + cairo + pango + expat + dbus + cups + libcap + SDL2 + libusb1 + udev + dbus-glib + atk + at-spi2-atk + libudev0-shim + + xorg.libXt + xorg.libXmu + xorg.libxcb + xorg.xcbutil + xorg.xcbutilwm + xorg.xcbutilimage + xorg.xcbutilkeysyms + xorg.xcbutilrenderutil + libGLU + libuuid + libogg + libvorbis + SDL + SDL2_image + glew110 + openssl + libidn + tbb + wayland + mesa + libxkbcommon + vulkan-loader + + flac + freeglut + libjpeg + libpng12 + libpulseaudio + libsamplerate + libmikmod + libtheora + libtiff + pixman + speex + SDL_image + SDL_ttf + SDL_mixer + SDL2_ttf + SDL2_mixer + libappindicator-gtk2 + libcaca + libcanberra + libgcrypt + libvpx + librsvg + xorg.libXft + libvdpau + alsa-lib + + harfbuzz + e2fsprogs + libgpg-error + keyutils.lib + libjack2 + fribidi + p11-kit + + gmp + + # libraries not on the upstream include list, but nevertheless expected + # by at least one appimage + libtool.lib # for Synfigstudio + xorg.libxshmfence # for apple-music-electron + at-spi2-core + pciutils # for FreeCAD + ]; + }; +} diff --git a/nixpkgs/pkgs/build-support/binary-cache/default.nix b/nixpkgs/pkgs/build-support/binary-cache/default.nix new file mode 100644 index 000000000000..27f9ad962899 --- /dev/null +++ b/nixpkgs/pkgs/build-support/binary-cache/default.nix @@ -0,0 +1,40 @@ +{ lib, stdenv, buildPackages }: + +# This function is for creating a flat-file binary cache, i.e. the kind created by +# nix copy --to file:///some/path and usable as a substituter (with the file:// prefix). + +# For example, in the Nixpkgs repo: +# nix-build -E 'with import ./. {}; mkBinaryCache { rootPaths = [hello]; }' + +{ name ? "binary-cache" +, rootPaths +}: + +stdenv.mkDerivation { + inherit name; + + __structuredAttrs = true; + + exportReferencesGraph.closure = rootPaths; + + preferLocalBuild = true; + + PATH = lib.makeBinPath (with buildPackages; [ coreutils jq python3 nix xz ]); + + builder = builtins.toFile "builder" '' + . .attrs.sh + + export out=''${outputs[out]} + + mkdir $out + mkdir $out/nar + + python ${./make-binary-cache.py} + + # These directories must exist, or Nix might try to create them in LocalBinaryCacheStore::init(), + # which fails if mounted read-only + mkdir $out/realisations + mkdir $out/debuginfo + mkdir $out/log + ''; +} diff --git a/nixpkgs/pkgs/build-support/binary-cache/make-binary-cache.py b/nixpkgs/pkgs/build-support/binary-cache/make-binary-cache.py new file mode 100644 index 000000000000..16dd8a7e96bc --- /dev/null +++ b/nixpkgs/pkgs/build-support/binary-cache/make-binary-cache.py @@ -0,0 +1,43 @@ + +import json +import os +import subprocess + +with open(".attrs.json", "r") as f: + closures = json.load(f)["closure"] + +os.chdir(os.environ["out"]) + +nixPrefix = os.environ["NIX_STORE"] # Usually /nix/store + +with open("nix-cache-info", "w") as f: + f.write("StoreDir: " + nixPrefix + "\n") + +def dropPrefix(path): + return path[len(nixPrefix + "/"):] + +for item in closures: + narInfoHash = dropPrefix(item["path"]).split("-")[0] + + xzFile = "nar/" + narInfoHash + ".nar.xz" + with open(xzFile, "w") as f: + subprocess.run("nix-store --dump %s | xz -c" % item["path"], stdout=f, shell=True) + + fileHash = subprocess.run(["nix-hash", "--base32", "--type", "sha256", item["path"]], capture_output=True).stdout.decode().strip() + fileSize = os.path.getsize(xzFile) + + # Rename the .nar.xz file to its own hash to match "nix copy" behavior + finalXzFile = "nar/" + fileHash + ".nar.xz" + os.rename(xzFile, finalXzFile) + + with open(narInfoHash + ".narinfo", "w") as f: + f.writelines((x + "\n" for x in [ + "StorePath: " + item["path"], + "URL: " + finalXzFile, + "Compression: xz", + "FileHash: sha256:" + fileHash, + "FileSize: " + str(fileSize), + "NarHash: " + item["narHash"], + "NarSize: " + str(item["narSize"]), + "References: " + " ".join(dropPrefix(ref) for ref in item["references"]), + ])) diff --git a/nixpkgs/pkgs/build-support/bintools-wrapper/add-darwin-ldflags-before.sh b/nixpkgs/pkgs/build-support/bintools-wrapper/add-darwin-ldflags-before.sh new file mode 100644 index 000000000000..75d9484846a8 --- /dev/null +++ b/nixpkgs/pkgs/build-support/bintools-wrapper/add-darwin-ldflags-before.sh @@ -0,0 +1,81 @@ +# Unconditionally adding in platform version flags will result in warnings that +# will be treated as errors by some packages. Add any missing flags here. + +# There are two things to be configured: the "platform version" (oldest +# supported version of macos, ios, etc), and the "sdk version". +# +# The modern way of configuring these is to use: +# -platform_version $platform $platform_version $sdk_version" +# +# The old way is still supported, and uses flags like: +# -${platform}_version_min $platform_version +# -sdk_version $sdk_version +# +# If both styles are specified ld will combine them. If multiple versions are +# specified for the same platform, ld will emit an error. +# +# The following adds flags for whichever properties have not already been +# provided. + +havePlatformVersionFlag= +haveDarwinSDKVersion= +haveDarwinPlatformVersion= + +# Roles will set by add-flags.sh, but add-flags.sh can be skipped when the +# cc-wrapper has added the linker flags. Both the cc-wrapper and the binutils +# wrapper mangle the same variable (MACOSX_DEPLOYMENT_TARGET), so if roles are +# empty due to being run through the cc-wrapper then the mangle here is a no-op +# and we still do the right thing. +# +# To be robust, make sure we always have the correct set of roles. +accumulateRoles + +mangleVarSingle @darwinMinVersionVariable@ ${role_suffixes[@]+"${role_suffixes[@]}"} + +n=0 +nParams=${#params[@]} +while (( n < nParams )); do + p=${params[n]} + case "$p" in + # the current platform + -@darwinPlatform@_version_min) + haveDarwinPlatformVersion=1 + ;; + + # legacy aliases + -macosx_version_min|-iphoneos_version_min|-iosmac_version_min|-uikitformac_version_min) + haveDarwinPlatformVersion=1 + ;; + + -sdk_version) + haveDarwinSDKVersion=1 + ;; + + -platform_version) + havePlatformVersionFlag=1 + + # If clang can't determine the sdk version it will pass 0.0.0. This + # has runtime effects so we override this to use the known sdk + # version. + if [ "${params[n+3]-}" = 0.0.0 ]; then + params[n+3]=@darwinSdkVersion@ + fi + ;; + esac + n=$((n + 1)) +done + +# If the caller has set -platform_version, trust they're doing the right thing. +# This will be the typical case for clang in nixpkgs. +if [ ! "$havePlatformVersionFlag" ]; then + if [ ! "$haveDarwinSDKVersion" ] && [ ! "$haveDarwinPlatformVersion" ]; then + # Nothing provided. Use the modern "-platform_version" to set both. + extraBefore+=(-platform_version @darwinPlatform@ "${@darwinMinVersionVariable@_@suffixSalt@:-@darwinMinVersion@}" @darwinSdkVersion@) + elif [ ! "$haveDarwinSDKVersion" ]; then + # Add missing sdk version + extraBefore+=(-sdk_version @darwinSdkVersion@) + elif [ ! "$haveDarwinPlatformVersion" ]; then + # Add missing platform version + extraBefore+=(-@darwinPlatform@_version_min "${@darwinMinVersionVariable@_@suffixSalt@:-@darwinMinVersion@}") + fi +fi diff --git a/nixpkgs/pkgs/build-support/bintools-wrapper/add-flags.sh b/nixpkgs/pkgs/build-support/bintools-wrapper/add-flags.sh new file mode 100644 index 000000000000..3b94daba65d7 --- /dev/null +++ b/nixpkgs/pkgs/build-support/bintools-wrapper/add-flags.sh @@ -0,0 +1,37 @@ +# See cc-wrapper for comments. +var_templates_list=( + NIX_IGNORE_LD_THROUGH_GCC + NIX_LDFLAGS + NIX_LDFLAGS_BEFORE + NIX_DYNAMIC_LINKER + NIX_LDFLAGS_AFTER + NIX_LDFLAGS_HARDEN + NIX_HARDENING_ENABLE +) +var_templates_bool=( + NIX_SET_BUILD_ID + NIX_DONT_SET_RPATH +) + +accumulateRoles + +for var in "${var_templates_list[@]}"; do + mangleVarList "$var" ${role_suffixes[@]+"${role_suffixes[@]}"} +done +for var in "${var_templates_bool[@]}"; do + mangleVarBool "$var" ${role_suffixes[@]+"${role_suffixes[@]}"} +done + +if [ -e @out@/nix-support/libc-ldflags ]; then + NIX_LDFLAGS_@suffixSalt@+=" $(< @out@/nix-support/libc-ldflags)" +fi + +if [ -z "$NIX_DYNAMIC_LINKER_@suffixSalt@" ] && [ -e @out@/nix-support/ld-set-dynamic-linker ]; then + NIX_DYNAMIC_LINKER_@suffixSalt@="$(< @out@/nix-support/dynamic-linker)" +fi + +if [ -e @out@/nix-support/libc-ldflags-before ]; then + NIX_LDFLAGS_BEFORE_@suffixSalt@="$(< @out@/nix-support/libc-ldflags-before) $NIX_LDFLAGS_BEFORE_@suffixSalt@" +fi + +export NIX_BINTOOLS_WRAPPER_FLAGS_SET_@suffixSalt@=1 diff --git a/nixpkgs/pkgs/build-support/bintools-wrapper/add-hardening.sh b/nixpkgs/pkgs/build-support/bintools-wrapper/add-hardening.sh new file mode 100644 index 000000000000..db9553c3fc76 --- /dev/null +++ b/nixpkgs/pkgs/build-support/bintools-wrapper/add-hardening.sh @@ -0,0 +1,62 @@ +declare -a hardeningLDFlags=() + +declare -A hardeningEnableMap=() + +# Intentionally word-split in case 'NIX_HARDENING_ENABLE' is defined in Nix. The +# array expansion also prevents undefined variables from causing trouble with +# `set -u`. +for flag in ${NIX_HARDENING_ENABLE_@suffixSalt@-}; do + hardeningEnableMap["$flag"]=1 +done + +# Remove unsupported flags. +for flag in @hardening_unsupported_flags@; do + unset -v "hardeningEnableMap[$flag]" +done + +if (( "${NIX_DEBUG:-0}" >= 1 )); then + declare -a allHardeningFlags=(pie relro bindnow) + declare -A hardeningDisableMap=() + + # Determine which flags were effectively disabled so we can report below. + for flag in "${allHardeningFlags[@]}"; do + if [[ -z "${hardeningEnableMap[$flag]-}" ]]; then + hardeningDisableMap[$flag]=1 + fi + done + + printf 'HARDENING: disabled flags:' >&2 + (( "${#hardeningDisableMap[@]}" )) && printf ' %q' "${!hardeningDisableMap[@]}" >&2 + echo >&2 + + if (( "${#hardeningEnableMap[@]}" )); then + echo 'HARDENING: Is active (not completely disabled with "all" flag)' >&2; + fi +fi + +for flag in "${!hardeningEnableMap[@]}"; do + case $flag in + pie) + if [[ ! (" ${params[*]} " =~ " -shared " \ + || " ${params[*]} " =~ " -static " \ + || " ${params[*]} " =~ " -r " \ + || " ${params[*]} " =~ " -Ur " \ + || " ${params[*]} " =~ " -i ") ]]; then + if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling LDFlags -pie >&2; fi + hardeningLDFlags+=('-pie') + fi + ;; + relro) + if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling relro >&2; fi + hardeningLDFlags+=('-z' 'relro') + ;; + bindnow) + if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling bindnow >&2; fi + hardeningLDFlags+=('-z' 'now') + ;; + *) + # Ignore unsupported. Checked in Nix that at least *some* + # tool supports each flag. + ;; + esac +done diff --git a/nixpkgs/pkgs/build-support/bintools-wrapper/darwin-install_name_tool-wrapper.sh b/nixpkgs/pkgs/build-support/bintools-wrapper/darwin-install_name_tool-wrapper.sh new file mode 100755 index 000000000000..376a7abfe41c --- /dev/null +++ b/nixpkgs/pkgs/build-support/bintools-wrapper/darwin-install_name_tool-wrapper.sh @@ -0,0 +1,49 @@ +#! @shell@ +# shellcheck shell=bash + +set -eu -o pipefail +o posix +shopt -s nullglob + +if (( "${NIX_DEBUG:-0}" >= 7 )); then + set -x +fi + +source @signingUtils@ + +extraAfter=() +extraBefore=() +params=("$@") + +input= + +pprev= +prev= +for p in \ + ${extraBefore+"${extraBefore[@]}"} \ + ${params+"${params[@]}"} \ + ${extraAfter+"${extraAfter[@]}"} +do + if [ "$pprev" != "-change" ] && [[ "$prev" != -* ]] && [[ "$p" != -* ]]; then + input="$p" + fi + pprev="$prev" + prev="$p" +done + +# Optionally print debug info. +if (( "${NIX_DEBUG:-0}" >= 1 )); then + # Old bash workaround, see above. + echo "extra flags before to @prog@:" >&2 + printf " %q\n" ${extraBefore+"${extraBefore[@]}"} >&2 + echo "original flags to @prog@:" >&2 + printf " %q\n" ${params+"${params[@]}"} >&2 + echo "extra flags after to @prog@:" >&2 + printf " %q\n" ${extraAfter+"${extraAfter[@]}"} >&2 +fi + +@prog@ \ + ${extraBefore+"${extraBefore[@]}"} \ + ${params+"${params[@]}"} \ + ${extraAfter+"${extraAfter[@]}"} + +sign "$input" diff --git a/nixpkgs/pkgs/build-support/bintools-wrapper/darwin-strip-wrapper.sh b/nixpkgs/pkgs/build-support/bintools-wrapper/darwin-strip-wrapper.sh new file mode 100755 index 000000000000..a67699547a6f --- /dev/null +++ b/nixpkgs/pkgs/build-support/bintools-wrapper/darwin-strip-wrapper.sh @@ -0,0 +1,78 @@ +#! @shell@ +# shellcheck shell=bash + +set -eu -o pipefail +o posix +shopt -s nullglob + +if (( "${NIX_DEBUG:-0}" >= 7 )); then + set -x +fi + +source @signingUtils@ + +extraAfter=() +extraBefore=() +params=("$@") + +output= +inputs=() + +restAreFiles= +prev= +for p in \ + ${extraBefore+"${extraBefore[@]}"} \ + ${params+"${params[@]}"} \ + ${extraAfter+"${extraAfter[@]}"} +do + if [ "$restAreFiles" ]; then + inputs+=("$p") + else + case "$prev" in + -s|-R|-d|-arch) + # Unrelated arguments with values + ;; + -o) + # Explicit output + output="$p" + ;; + *) + # Any other orgument either takes no value, or is a file. + if [[ "$p" != -* ]]; then + inputs+=("$p") + fi + ;; + esac + + if [ "$p" == - ]; then + restAreFiles=1 + fi + fi + + prev="$p" +done + +# Optionally print debug info. +if (( "${NIX_DEBUG:-0}" >= 1 )); then + # Old bash workaround, see above. + echo "extra flags before to @prog@:" >&2 + printf " %q\n" ${extraBefore+"${extraBefore[@]}"} >&2 + echo "original flags to @prog@:" >&2 + printf " %q\n" ${params+"${params[@]}"} >&2 + echo "extra flags after to @prog@:" >&2 + printf " %q\n" ${extraAfter+"${extraAfter[@]}"} >&2 +fi + +@prog@ \ + ${extraBefore+"${extraBefore[@]}"} \ + ${params+"${params[@]}"} \ + ${extraAfter+"${extraAfter[@]}"} + +if [ "$output" ]; then + # Single explicit output + signIfRequired "$output" +else + # Multiple inputs, rewritten in place + for input in "${inputs[@]}"; do + signIfRequired "$input" + done +fi diff --git a/nixpkgs/pkgs/build-support/bintools-wrapper/default.nix b/nixpkgs/pkgs/build-support/bintools-wrapper/default.nix new file mode 100644 index 000000000000..ccd342eaa0d1 --- /dev/null +++ b/nixpkgs/pkgs/build-support/bintools-wrapper/default.nix @@ -0,0 +1,401 @@ +# The Nixpkgs CC is not directly usable, since it doesn't know where +# the C library and standard header files are. Therefore the compiler +# produced by that package cannot be installed directly in a user +# environment and used from the command line. So we use a wrapper +# script that sets up the right environment variables so that the +# compiler and the linker just "work". + +{ name ? "" +, lib +, stdenvNoCC +, bintools ? null, libc ? null, coreutils ? null, shell ? stdenvNoCC.shell, gnugrep ? null +, netbsd ? null, netbsdCross ? null +, sharedLibraryLoader ? + if libc == null then + null + else if stdenvNoCC.targetPlatform.isNetBSD then + if !(targetPackages ? netbsdCross) then + netbsd.ld_elf_so + else if libc != targetPackages.netbsdCross.headers then + targetPackages.netbsdCross.ld_elf_so + else + null + else + lib.getLib libc +, nativeTools, noLibc ? false, nativeLibc, nativePrefix ? "" +, propagateDoc ? bintools != null && bintools ? man +, extraPackages ? [], extraBuildCommands ? "" +, isGNU ? bintools.isGNU or false +, isLLVM ? bintools.isLLVM or false +, isCCTools ? bintools.isCCTools or false +, buildPackages ? {} +, targetPackages ? {} +, useMacosReexportHack ? false +, wrapGas ? false + +# Darwin code signing support utilities +, postLinkSignHook ? null, signingUtils ? null +}: + +with lib; + +assert nativeTools -> !propagateDoc && nativePrefix != ""; +assert !nativeTools -> + bintools != null && coreutils != null && gnugrep != null; +assert !(nativeLibc && noLibc); +assert (noLibc || nativeLibc) == (libc == null); + +let + stdenv = stdenvNoCC; + inherit (stdenv) hostPlatform targetPlatform; + + # Prefix for binaries. Customarily ends with a dash separator. + # + # TODO(@Ericson2314) Make unconditional, or optional but always true by + # default. + targetPrefix = lib.optionalString (targetPlatform != hostPlatform) + (targetPlatform.config + "-"); + + bintoolsVersion = lib.getVersion bintools; + bintoolsName = lib.removePrefix targetPrefix (lib.getName bintools); + + libc_bin = lib.optionalString (libc != null) (getBin libc); + libc_dev = lib.optionalString (libc != null) (getDev libc); + libc_lib = lib.optionalString (libc != null) (getLib libc); + bintools_bin = lib.optionalString (!nativeTools) (getBin bintools); + # The wrapper scripts use 'cat' and 'grep', so we may need coreutils. + coreutils_bin = lib.optionalString (!nativeTools) (getBin coreutils); + + # See description in cc-wrapper. + suffixSalt = replaceStrings ["-" "."] ["_" "_"] targetPlatform.config; + + # The dynamic linker has different names on different platforms. This is a + # shell glob that ought to match it. + dynamicLinker = + /**/ if sharedLibraryLoader == null then "" + else if targetPlatform.libc == "musl" then "${sharedLibraryLoader}/lib/ld-musl-*" + else if targetPlatform.libc == "uclibc" then "${sharedLibraryLoader}/lib/ld*-uClibc.so.1" + else if (targetPlatform.libc == "bionic" && targetPlatform.is32bit) then "/system/bin/linker" + else if (targetPlatform.libc == "bionic" && targetPlatform.is64bit) then "/system/bin/linker64" + else if targetPlatform.libc == "nblibc" then "${sharedLibraryLoader}/libexec/ld.elf_so" + else if targetPlatform.system == "i686-linux" then "${sharedLibraryLoader}/lib/ld-linux.so.2" + else if targetPlatform.system == "x86_64-linux" then "${sharedLibraryLoader}/lib/ld-linux-x86-64.so.2" + # ELFv1 (.1) or ELFv2 (.2) ABI + else if targetPlatform.isPower64 then "${sharedLibraryLoader}/lib/ld64.so.*" + # ARM with a wildcard, which can be "" or "-armhf". + else if (with targetPlatform; isAarch32 && isLinux) then "${sharedLibraryLoader}/lib/ld-linux*.so.3" + else if targetPlatform.system == "aarch64-linux" then "${sharedLibraryLoader}/lib/ld-linux-aarch64.so.1" + else if targetPlatform.system == "powerpc-linux" then "${sharedLibraryLoader}/lib/ld.so.1" + else if targetPlatform.isMips then "${sharedLibraryLoader}/lib/ld.so.1" + # `ld-linux-riscv{32,64}-<abi>.so.1` + else if targetPlatform.isRiscV then "${sharedLibraryLoader}/lib/ld-linux-riscv*.so.1" + else if targetPlatform.isLoongArch64 then "${sharedLibraryLoader}/lib/ld-linux-loongarch*.so.1" + else if targetPlatform.isDarwin then "/usr/lib/dyld" + else if targetPlatform.isFreeBSD then "/libexec/ld-elf.so.1" + else if lib.hasSuffix "pc-gnu" targetPlatform.config then "ld.so.1" + else ""; + + expand-response-params = + lib.optionalString (buildPackages ? stdenv && buildPackages.stdenv.hasCC && buildPackages.stdenv.cc != "/dev/null") + (import ../expand-response-params { inherit (buildPackages) stdenv; }); + +in + +stdenv.mkDerivation { + pname = targetPrefix + + (if name != "" then name else "${bintoolsName}-wrapper"); + version = lib.optionalString (bintools != null) bintoolsVersion; + + preferLocalBuild = true; + + outputs = [ "out" ] ++ optionals propagateDoc ([ "man" ] ++ optional (bintools ? info) "info"); + + passthru = { + inherit targetPrefix suffixSalt; + inherit bintools libc nativeTools nativeLibc nativePrefix isGNU isLLVM; + + emacsBufferSetup = pkgs: '' + ; We should handle propagation here too + (mapc + (lambda (arg) + (when (file-directory-p (concat arg "/lib")) + (setenv "NIX_LDFLAGS_${suffixSalt}" (concat (getenv "NIX_LDFLAGS_${suffixSalt}") " -L" arg "/lib"))) + (when (file-directory-p (concat arg "/lib64")) + (setenv "NIX_LDFLAGS_${suffixSalt}" (concat (getenv "NIX_LDFLAGS_${suffixSalt}") " -L" arg "/lib64")))) + '(${concatStringsSep " " (map (pkg: "\"${pkg}\"") pkgs)})) + ''; + }; + + dontBuild = true; + dontConfigure = true; + + enableParallelBuilding = true; + + unpackPhase = '' + src=$PWD + ''; + + installPhase = + '' + mkdir -p $out/bin $out/nix-support + + wrap() { + local dst="$1" + local wrapper="$2" + export prog="$3" + export use_response_file_by_default=${if isCCTools then "1" else "0"} + substituteAll "$wrapper" "$out/bin/$dst" + chmod +x "$out/bin/$dst" + } + '' + + + (if nativeTools then '' + echo ${nativePrefix} > $out/nix-support/orig-bintools + + ldPath="${nativePrefix}/bin" + '' else '' + echo $bintools_bin > $out/nix-support/orig-bintools + + ldPath="${bintools_bin}/bin" + '' + + # Solaris needs an additional ld wrapper. + + optionalString (targetPlatform.isSunOS && nativePrefix != "") '' + ldPath="${nativePrefix}/bin" + exec="$ldPath/${targetPrefix}ld" + wrap ld-solaris ${./ld-solaris-wrapper.sh} + '') + + # If we are asked to wrap `gas` and this bintools has it, + # then symlink it (`as` will be symlinked next). + # This is mainly for the wrapped gnat-bootstrap on x86-64 Darwin, + # as it must have both the GNU assembler from cctools (installed as `gas`) + # and the Clang integrated assembler (installed as `as`). + # See pkgs/os-specific/darwin/binutils/default.nix for details. + + lib.optionalString wrapGas '' + if [ -e $ldPath/${targetPrefix}gas ]; then + ln -s $ldPath/${targetPrefix}gas $out/bin/${targetPrefix}gas + fi + '' + + # Create symlinks for rest of the binaries. + + '' + for binary in objdump objcopy size strings as ar nm gprof dwp c++filt addr2line \ + ranlib readelf elfedit dlltool dllwrap windmc windres; do + if [ -e $ldPath/${targetPrefix}''${binary} ]; then + ln -s $ldPath/${targetPrefix}''${binary} $out/bin/${targetPrefix}''${binary} + fi + done + + '' + (if !useMacosReexportHack then '' + if [ -e ''${ld:-$ldPath/${targetPrefix}ld} ]; then + wrap ${targetPrefix}ld ${./ld-wrapper.sh} ''${ld:-$ldPath/${targetPrefix}ld} + fi + '' else '' + ldInner="${targetPrefix}ld-reexport-delegate" + wrap "$ldInner" ${./macos-sierra-reexport-hack.bash} ''${ld:-$ldPath/${targetPrefix}ld} + wrap "${targetPrefix}ld" ${./ld-wrapper.sh} "$out/bin/$ldInner" + unset ldInner + '') + '' + + for variant in $ldPath/${targetPrefix}ld.*; do + basename=$(basename "$variant") + wrap $basename ${./ld-wrapper.sh} $variant + done + ''; + + strictDeps = true; + depsTargetTargetPropagated = extraPackages; + + setupHooks = [ + ../setup-hooks/role.bash + ./setup-hook.sh + ]; + + postFixup = + ## + ## General libc support + ## + optionalString (libc != null) ('' + touch "$out/nix-support/libc-ldflags" + echo "-L${libc_lib}${libc.libdir or "/lib"}" >> $out/nix-support/libc-ldflags + + echo "${libc_lib}" > $out/nix-support/orig-libc + echo "${libc_dev}" > $out/nix-support/orig-libc-dev + '' + + ## + ## Dynamic linker support + ## + + optionalString (sharedLibraryLoader != null) '' + if [[ -z ''${dynamicLinker+x} ]]; then + echo "Don't know the name of the dynamic linker for platform '${targetPlatform.config}', so guessing instead." >&2 + local dynamicLinker="${sharedLibraryLoader}/lib/ld*.so.?" + fi + '' + + # Expand globs to fill array of options + + '' + dynamicLinker=($dynamicLinker) + + case ''${#dynamicLinker[@]} in + 0) echo "No dynamic linker found for platform '${targetPlatform.config}'." >&2;; + 1) echo "Using dynamic linker: '$dynamicLinker'" >&2;; + *) echo "Multiple dynamic linkers found for platform '${targetPlatform.config}'." >&2;; + esac + + if [ -n "''${dynamicLinker-}" ]; then + echo $dynamicLinker > $out/nix-support/dynamic-linker + + ${if targetPlatform.isDarwin then '' + printf "export LD_DYLD_PATH=%q\n" "$dynamicLinker" >> $out/nix-support/setup-hook + '' else lib.optionalString (sharedLibraryLoader != null) '' + if [ -e ${sharedLibraryLoader}/lib/32/ld-linux.so.2 ]; then + echo ${sharedLibraryLoader}/lib/32/ld-linux.so.2 > $out/nix-support/dynamic-linker-m32 + fi + touch $out/nix-support/ld-set-dynamic-linker + ''} + fi + '') + + ## + ## User env support + ## + + # Propagate the underling unwrapped bintools so that if you + # install the wrapper, you get tools like objdump (same for any + # binaries of libc). + + optionalString (!nativeTools) '' + printWords ${bintools_bin} ${lib.optionalString (libc != null) libc_bin} > $out/nix-support/propagated-user-env-packages + '' + + ## + ## Man page and info support + ## + + optionalString propagateDoc ('' + ln -s ${bintools.man} $man + '' + optionalString (bintools ? info) '' + ln -s ${bintools.info} $info + '') + + ## + ## Hardening support + ## + + # some linkers on some platforms don't support specific -z flags + + '' + export hardening_unsupported_flags="" + if [[ "$($ldPath/${targetPrefix}ld -z now 2>&1 || true)" =~ un(recognized|known)\ option ]]; then + hardening_unsupported_flags+=" bindnow" + fi + if [[ "$($ldPath/${targetPrefix}ld -z relro 2>&1 || true)" =~ un(recognized|known)\ option ]]; then + hardening_unsupported_flags+=" relro" + fi + '' + + + optionalString hostPlatform.isCygwin '' + hardening_unsupported_flags+=" pic" + '' + + + optionalString targetPlatform.isAvr '' + hardening_unsupported_flags+=" relro bindnow" + '' + + + optionalString (libc != null && targetPlatform.isAvr) '' + for isa in avr5 avr3 avr4 avr6 avr25 avr31 avr35 avr51 avrxmega2 avrxmega4 avrxmega5 avrxmega6 avrxmega7 tiny-stack; do + echo "-L${getLib libc}/avr/lib/$isa" >> $out/nix-support/libc-cflags + done + '' + + + optionalString stdenv.targetPlatform.isDarwin '' + echo "-arch ${targetPlatform.darwinArch}" >> $out/nix-support/libc-ldflags + '' + + ## + ## GNU specific extra strip flags + ## + + # TODO(@sternenseemann): make a generic strip wrapper? + + optionalString (bintools.isGNU or false) '' + wrap ${targetPrefix}strip ${./gnu-binutils-strip-wrapper.sh} \ + "${bintools_bin}/bin/${targetPrefix}strip" + '' + + ### + ### Remove LC_UUID + ### + + optionalString (stdenv.targetPlatform.isDarwin && !(bintools.isGNU or false)) '' + echo "-no_uuid" >> $out/nix-support/libc-ldflags-before + '' + + + '' + for flags in "$out/nix-support"/*flags*; do + substituteInPlace "$flags" --replace $'\n' ' ' + done + + substituteAll ${./add-flags.sh} $out/nix-support/add-flags.sh + substituteAll ${./add-hardening.sh} $out/nix-support/add-hardening.sh + substituteAll ${../wrapper-common/utils.bash} $out/nix-support/utils.bash + '' + + ### + ### Ensure consistent LC_VERSION_MIN_MACOSX + ### + + optionalString stdenv.targetPlatform.isDarwin ( + let + inherit (stdenv.targetPlatform) + darwinPlatform darwinSdkVersion + darwinMinVersion darwinMinVersionVariable; + in '' + export darwinPlatform=${darwinPlatform} + export darwinMinVersion=${darwinMinVersion} + export darwinSdkVersion=${darwinSdkVersion} + export darwinMinVersionVariable=${darwinMinVersionVariable} + substituteAll ${./add-darwin-ldflags-before.sh} $out/nix-support/add-local-ldflags-before.sh + '' + ) + + ## + ## Code signing on Apple Silicon + ## + + optionalString (targetPlatform.isDarwin && targetPlatform.isAarch64) '' + echo 'source ${postLinkSignHook}' >> $out/nix-support/post-link-hook + + export signingUtils=${signingUtils} + + wrap \ + ${targetPrefix}install_name_tool \ + ${./darwin-install_name_tool-wrapper.sh} \ + "${bintools_bin}/bin/${targetPrefix}install_name_tool" + + wrap \ + ${targetPrefix}strip ${./darwin-strip-wrapper.sh} \ + "${bintools_bin}/bin/${targetPrefix}strip" + '' + + ## + ## Extra custom steps + ## + + extraBuildCommands; + + env = { + # for substitution in utils.bash + expandResponseParams = "${expand-response-params}/bin/expand-response-params"; + shell = getBin shell + shell.shellPath or ""; + gnugrep_bin = lib.optionalString (!nativeTools) gnugrep; + wrapperName = "BINTOOLS_WRAPPER"; + inherit dynamicLinker targetPrefix suffixSalt coreutils_bin; + inherit bintools_bin libc_bin libc_dev libc_lib; + }; + + meta = + let bintools_ = lib.optionalAttrs (bintools != null) bintools; in + (lib.optionalAttrs (bintools_ ? meta) (removeAttrs bintools.meta ["priority"])) // + { description = + lib.attrByPath ["meta" "description"] "System binary utilities" bintools_ + + " (wrapper script)"; + priority = 10; + } // optionalAttrs useMacosReexportHack { + platforms = lib.platforms.darwin; + }; +} diff --git a/nixpkgs/pkgs/build-support/bintools-wrapper/gnu-binutils-strip-wrapper.sh b/nixpkgs/pkgs/build-support/bintools-wrapper/gnu-binutils-strip-wrapper.sh new file mode 100644 index 000000000000..5b5136e3d14c --- /dev/null +++ b/nixpkgs/pkgs/build-support/bintools-wrapper/gnu-binutils-strip-wrapper.sh @@ -0,0 +1,4 @@ +#! @shell@ +# shellcheck shell=bash + +exec @prog@ --enable-deterministic-archives "$@" diff --git a/nixpkgs/pkgs/build-support/bintools-wrapper/ld-solaris-wrapper.sh b/nixpkgs/pkgs/build-support/bintools-wrapper/ld-solaris-wrapper.sh new file mode 100644 index 000000000000..5d81e34a047f --- /dev/null +++ b/nixpkgs/pkgs/build-support/bintools-wrapper/ld-solaris-wrapper.sh @@ -0,0 +1,29 @@ +#!@shell@ +set -eu -o pipefail +shopt -s nullglob + +if (( "${NIX_DEBUG:-0}" >= 7 )); then + set -x +fi + +declare -a args=("$@") +# I've also tried adding -z direct and -z lazyload, but it gave too many problems with C++ exceptions :'( +# Also made sure libgcc would not be lazy-loaded, as suggested here: https://www.illumos.org/issues/2534#note-3 +# but still no success. +declare -a argsBefore=(-z ignore) argsAfter=() + +# This loop makes sure all -L arguments are before -l arguments, or ld may complain it cannot find a library. +# GNU binutils does not have this problem: +# http://stackoverflow.com/questions/5817269/does-the-order-of-l-and-l-options-in-the-gnu-linker-matter +while (( $# )); do + case "${args[$i]}" in + -L) argsBefore+=("$1" "$2"); shift ;; + -L?*) argsBefore+=("$1") ;; + *) argsAfter+=("$1") ;; + esac + shift +done + +# Trace: +set -x +exec "@ld@" "${argsBefore[@]}" "${argsAfter[@]}" diff --git a/nixpkgs/pkgs/build-support/bintools-wrapper/ld-wrapper.sh b/nixpkgs/pkgs/build-support/bintools-wrapper/ld-wrapper.sh new file mode 100644 index 000000000000..dcbe8a4c2494 --- /dev/null +++ b/nixpkgs/pkgs/build-support/bintools-wrapper/ld-wrapper.sh @@ -0,0 +1,273 @@ +#! @shell@ +set -eu -o pipefail +o posix +shopt -s nullglob + +if (( "${NIX_DEBUG:-0}" >= 7 )); then + set -x +fi + +path_backup="$PATH" + +# phase separation makes this look useless +# shellcheck disable=SC2157 +if [ -n "@coreutils_bin@" ]; then + PATH="@coreutils_bin@/bin" +fi + +source @out@/nix-support/utils.bash + +if [ -z "${NIX_BINTOOLS_WRAPPER_FLAGS_SET_@suffixSalt@:-}" ]; then + source @out@/nix-support/add-flags.sh +fi + + +# Optionally filter out paths not refering to the store. +expandResponseParams "$@" + +# NIX_LINK_TYPE is set if ld has been called through our cc wrapper. We take +# advantage of this to avoid both recalculating it, and also repeating other +# processing cc wrapper has already done. +if [[ -n "${NIX_LINK_TYPE_@suffixSalt@:-}" ]]; then + linkType=$NIX_LINK_TYPE_@suffixSalt@ +else + linkType=$(checkLinkType "${params[@]}") +fi + +if [[ "${NIX_ENFORCE_PURITY:-}" = 1 && -n "${NIX_STORE:-}" + && ( -z "$NIX_IGNORE_LD_THROUGH_GCC_@suffixSalt@" || -z "${NIX_LINK_TYPE_@suffixSalt@:-}" ) ]]; then + rest=() + nParams=${#params[@]} + declare -i n=0 + + while (( "$n" < "$nParams" )); do + p=${params[n]} + p2=${params[n+1]:-} # handle `p` being last one + if [ "${p:0:3}" = -L/ ] && badPath "${p:2}"; then + skip "${p:2}" + elif [ "$p" = -L ] && badPath "$p2"; then + n+=1; skip "$p2" + elif [ "$p" = -rpath ] && badPath "$p2"; then + n+=1; skip "$p2" + elif [ "$p" = -dynamic-linker ] && badPath "$p2"; then + n+=1; skip "$p2" + elif [ "$p" = -syslibroot ] && [ $p2 == // ]; then + # When gcc is built on darwin --with-build-sysroot=/ + # produces '-syslibroot //' linker flag. It's a no-op, + # which does not introduce impurities. + n+=1; skip "$p2" + elif [ "${p:0:10}" = /LIBPATH:/ ] && badPath "${p:9}"; then + reject "${p:9}" + # We need to not match LINK.EXE-style flags like + # /NOLOGO or /LIBPATH:/nix/store/foo + elif [[ $p =~ ^/[^:]*/ ]] && badPath "$p"; then + reject "$p" + elif [ "${p:0:9}" = --sysroot ]; then + # Our ld is not built with sysroot support (Can we fix that?) + : + else + rest+=("$p") + fi + n+=1 + done + # Old bash empty array hack + params=(${rest+"${rest[@]}"}) +fi + + +source @out@/nix-support/add-hardening.sh + +extraAfter=() +extraBefore=(${hardeningLDFlags[@]+"${hardeningLDFlags[@]}"}) + +if [ -z "${NIX_LINK_TYPE_@suffixSalt@:-}" ]; then + extraAfter+=($(filterRpathFlags "$linkType" $NIX_LDFLAGS_@suffixSalt@)) + extraBefore+=($(filterRpathFlags "$linkType" $NIX_LDFLAGS_BEFORE_@suffixSalt@)) + + # By adding dynamic linker to extraBefore we allow the users set their + # own dynamic linker as NIX_LD_FLAGS will override earlier set flags + if [[ "$linkType" == dynamic && -n "$NIX_DYNAMIC_LINKER_@suffixSalt@" ]]; then + extraBefore+=("-dynamic-linker" "$NIX_DYNAMIC_LINKER_@suffixSalt@") + fi +fi + +extraAfter+=($(filterRpathFlags "$linkType" $NIX_LDFLAGS_AFTER_@suffixSalt@)) + +# These flags *must not* be pulled up to -Wl, flags, so they can't go in +# add-flags.sh. They must always be set, so must not be disabled by +# NIX_LDFLAGS_SET. +if [ -e @out@/nix-support/add-local-ldflags-before.sh ]; then + source @out@/nix-support/add-local-ldflags-before.sh +fi + + +# Three tasks: +# +# 1. Find all -L... switches for rpath +# +# 2. Find relocatable flag for build id. +# +# 3. Choose 32-bit dynamic linker if needed +declare -a libDirs +declare -A libs +declare -i relocatable=0 link32=0 + +linkerOutput="a.out" + +if + [ "$NIX_DONT_SET_RPATH_@suffixSalt@" != 1 ] \ + || [ "$NIX_SET_BUILD_ID_@suffixSalt@" = 1 ] \ + || [ -e @out@/nix-support/dynamic-linker-m32 ] +then + prev= + # Old bash thinks empty arrays are undefined, ugh. + for p in \ + ${extraBefore+"${extraBefore[@]}"} \ + ${params+"${params[@]}"} \ + ${extraAfter+"${extraAfter[@]}"} + do + case "$prev" in + -L) + libDirs+=("$p") + ;; + -l) + libs["lib${p}.so"]=1 + ;; + -m) + # Presumably only the last `-m` flag has any effect. + case "$p" in + elf_i386) link32=1;; + *) link32=0;; + esac + ;; + -dynamic-linker | -plugin) + # Ignore this argument, or it will match *.so and be added to rpath. + ;; + *) + case "$p" in + -L/*) + libDirs+=("${p:2}") + ;; + -l?*) + libs["lib${p:2}.so"]=1 + ;; + "${NIX_STORE:-}"/*.so | "${NIX_STORE:-}"/*.so.*) + # This is a direct reference to a shared library. + libDirs+=("${p%/*}") + libs["${p##*/}"]=1 + ;; + -r | --relocatable | -i) + relocatable=1 + esac + ;; + esac + prev="$p" + done +fi + +# Determine linkerOutput +prev= +for p in \ + ${extraBefore+"${extraBefore[@]}"} \ + ${params+"${params[@]}"} \ + ${extraAfter+"${extraAfter[@]}"} +do + case "$prev" in + -o) + # Informational for post-link-hook + linkerOutput="$p" + ;; + *) + ;; + esac + prev="$p" +done + +if [[ "$link32" == "1" && "$linkType" == dynamic && -e "@out@/nix-support/dynamic-linker-m32" ]]; then + # We have an alternate 32-bit linker and we're producing a 32-bit ELF, let's + # use it. + extraAfter+=( + '-dynamic-linker' + "$(< @out@/nix-support/dynamic-linker-m32)" + ) +fi + +# Add all used dynamic libraries to the rpath. +if [[ "$NIX_DONT_SET_RPATH_@suffixSalt@" != 1 && "$linkType" != static-pie ]]; then + # For each directory in the library search path (-L...), + # see if it contains a dynamic library used by a -l... flag. If + # so, add the directory to the rpath. + # It's important to add the rpath in the order of -L..., so + # the link time chosen objects will be those of runtime linking. + declare -A rpaths + for dir in ${libDirs+"${libDirs[@]}"}; do + if [[ "$dir" =~ [/.][/.] ]] && dir2=$(readlink -f "$dir"); then + dir="$dir2" + fi + if [ -n "${rpaths[$dir]:-}" ] || [[ "$dir" != "${NIX_STORE:-}"/* ]]; then + # If the path is not in the store, don't add it to the rpath. + # This typically happens for libraries in /tmp that are later + # copied to $out/lib. If not, we're screwed. + continue + fi + for path in "$dir"/*; do + file="${path##*/}" + if [ "${libs[$file]:-}" ]; then + # This library may have been provided by a previous directory, + # but if that library file is inside an output of the current + # derivation, it can be deleted after this compilation and + # should be found in a later directory, so we add all + # directories that contain any of the libraries to rpath. + rpaths["$dir"]=1 + extraAfter+=(-rpath "$dir") + break + fi + done + done + +fi + +# This is outside the DONT_SET_RPATH branch because it's more targeted and we +# usually want it (on Darwin) even if DONT_SET_RPATH is set. +if [ -n "${NIX_COREFOUNDATION_RPATH:-}" ]; then + extraAfter+=(-rpath $NIX_COREFOUNDATION_RPATH) +fi + +# Only add --build-id if this is a final link. FIXME: should build gcc +# with --enable-linker-build-id instead? +# +# Note: `lld` interprets `--build-id` to mean `--build-id=fast`; GNU ld defaults +# to SHA1. +if [ "$NIX_SET_BUILD_ID_@suffixSalt@" = 1 ] && ! (( "$relocatable" )); then + extraAfter+=(--build-id="${NIX_BUILD_ID_STYLE:-sha1}") +fi + + +# Optionally print debug info. +if (( "${NIX_DEBUG:-0}" >= 1 )); then + # Old bash workaround, see above. + echo "extra flags before to @prog@:" >&2 + printf " %q\n" ${extraBefore+"${extraBefore[@]}"} >&2 + echo "original flags to @prog@:" >&2 + printf " %q\n" ${params+"${params[@]}"} >&2 + echo "extra flags after to @prog@:" >&2 + printf " %q\n" ${extraAfter+"${extraAfter[@]}"} >&2 +fi + +PATH="$path_backup" +# Old bash workaround, see above. + +if (( "${NIX_LD_USE_RESPONSE_FILE:-@use_response_file_by_default@}" >= 1 )); then + @prog@ @<(printf "%q\n" \ + ${extraBefore+"${extraBefore[@]}"} \ + ${params+"${params[@]}"} \ + ${extraAfter+"${extraAfter[@]}"}) +else + @prog@ \ + ${extraBefore+"${extraBefore[@]}"} \ + ${params+"${params[@]}"} \ + ${extraAfter+"${extraAfter[@]}"} +fi + +if [ -e "@out@/nix-support/post-link-hook" ]; then + source @out@/nix-support/post-link-hook +fi diff --git a/nixpkgs/pkgs/build-support/bintools-wrapper/macos-sierra-reexport-hack.bash b/nixpkgs/pkgs/build-support/bintools-wrapper/macos-sierra-reexport-hack.bash new file mode 100644 index 000000000000..255071adf681 --- /dev/null +++ b/nixpkgs/pkgs/build-support/bintools-wrapper/macos-sierra-reexport-hack.bash @@ -0,0 +1,246 @@ +#! @shell@ + +set -eu -o pipefail + +# For cmd | while read; do ...; done +shopt -s lastpipe + +path_backup="$PATH" +if [ -n "@coreutils_bin@" ]; then + PATH="@coreutils_bin@/bin" +fi + +declare -ri recurThreshold=200 +declare -i overflowCount=0 + +declare -ar origArgs=("$@") + +# Throw away what we won't need +declare -a parentArgs=() + +while (( $# )); do + case "$1" in + -l) + echo "cctools LD does not support '-l foo'" >&2 + exit 1 + ;; + -lazy_library | -reexport_library | -upward_library | -weak_library) + overflowCount+=1 + shift 2 + ;; + -l* | *.so.* | *.dylib | -lazy-l* | -reexport-l* | -upward-l* | -weak-l*) + overflowCount+=1 + shift 1 + ;; + *.a | *.o) + shift 1 + ;; + -L | -F) + # Evidentally ld doesn't like using the child's RPATH, so it still + # needs these. + parentArgs+=("$1" "$2") + shift 2 + ;; + -L?* | -F?*) + parentArgs+=("$1") + shift 1 + ;; + -o) + outputName="$2" + parentArgs+=("$1" "$2") + shift 2 + ;; + -install_name | -dylib_install_name | -dynamic-linker | -plugin) + parentArgs+=("$1" "$2") + shift 2 + ;; + -rpath) + # Only an rpath to the child is needed, which we will add + shift 2 + ;; + *) + if [[ -f "$1" ]]; then + # Propabably a non-standard object file like Haskell's + # `.dyn_o`. Skip it like other inputs + : + else + parentArgs+=("$1") + fi + shift 1 + ;; + esac +done + + + +if (( "$overflowCount" <= "$recurThreshold" )); then + if [ -n "${NIX_DEBUG:-}" ]; then + echo "ld-wrapper: Only ${overflowCount} inputs counted while ${recurThreshold} is the ceiling, linking normally. " >&2 + fi + PATH="$path_backup" + exec @prog@ "${origArgs[@]}" +fi + + + +if [ -n "${NIX_DEBUG:-}" ]; then + echo "ld-wrapper: ${overflowCount} inputs counted when ${recurThreshold} is the ceiling, inspecting further. " >&2 +fi + +# Collect the normalized linker input +declare -a norm=() + +# Arguments are null-separated +@prog@ --dump-normalized-lib-args "${origArgs[@]}" | + while IFS= read -r -d '' input; do + norm+=("$input") + done + +declare -i leafCount=0 +declare lastLeaf='' +declare -a childrenInputs=() trailingInputs=() +while (( "${#norm[@]}" )); do + case "${norm[0]}" in + -lazy_library | -upward_library) + # TODO(@Ericson2314): Don't do that, but intersperse children + # between such args. + echo "ld-wrapper: Warning: Potentially changing link order" >&2 + trailingInputs+=("${norm[0]}" "${norm[1]}") + norm=("${norm[@]:2}") + ;; + -reexport_library | -weak_library) + childrenInputs+=("${norm[0]}" "${norm[1]}") + if [[ "${norm[1]}" != "$lastLeaf" ]]; then + leafCount+=1 + lastLeaf="${norm[1]}" + fi + norm=("${norm[@]:2}") + ;; + *.so | *.dylib) + childrenInputs+=(-reexport_library "${norm[0]}") + if [[ "${norm[0]}" != "$lastLeaf" ]]; then + leafCount+=1 + lastLeaf="${norm[0]}" + fi + norm=("${norm[@]:1}") + ;; + *.o | *.a) + # Don't delegate object files or static libs + parentArgs+=("${norm[0]}") + norm=("${norm[@]:1}") + ;; + *) + if [[ -f "${norm[0]}" ]]; then + # Propabably a non-standard object file. We'll let it by. + parentArgs+=("${norm[0]}") + norm=("${norm[@]:1}") + else + echo "ld-wrapper: Internal Error: Invalid normalized argument" >&2 + exit 255 + fi + ;; + esac +done + + + +if (( "$leafCount" <= "$recurThreshold" )); then + if [ -n "${NIX_DEBUG:-}" ]; then + echo "ld-wrapper: Only ${leafCount} *dynamic* inputs counted while ${recurThreshold} is the ceiling, linking normally. " >&2 + fi + PATH="$path_backup" + exec @prog@ "${origArgs[@]}" +fi + + + +if [ -n "${NIX_DEBUG:-}" ]; then + echo "ld-wrapper: ${leafCount} *dynamic* inputs counted when ${recurThreshold} is the ceiling, delegating to children. " >&2 +fi + +declare -r outputNameLibless=$( \ + if [[ -z "${outputName:+isUndefined}" ]]; then + echo unnamed + return 0; + fi + baseName=$(basename ${outputName}) + if [[ "$baseName" = lib* ]]; then + baseName="${baseName:3}" + fi + echo "$baseName") + +declare -ra children=( + "$outputNameLibless-reexport-delegate-0" + "$outputNameLibless-reexport-delegate-1" +) + +mkdir -p "$out/lib" + +symbolBloatObject=$outputNameLibless-symbol-hack.o +if [[ ! -f $symbolBloatObject ]]; then + # `-Q` means use GNU Assembler rather than Clang, avoiding an awkward + # dependency cycle. + printf '.private_extern _______child_hack_foo\nchild_hack_foo:\n' | + PATH="$PATH:@out@/bin" @targetPrefix@as -Q -- -o $symbolBloatObject +fi + +# Split inputs between children +declare -a child0Inputs=() child1Inputs=("${childrenInputs[@]}") +let "countFirstChild = $leafCount / 2" || true +lastLeaf='' +while (( "$countFirstChild" )); do + case "${child1Inputs[0]}" in + -reexport_library | -weak_library) + child0Inputs+=("${child1Inputs[0]}" "${child1Inputs[1]}") + if [[ "${child1Inputs[1]}" != "$lastLeaf" ]]; then + let countFirstChild-=1 || true + lastLeaf="${child1Inputs[1]}" + fi + child1Inputs=("${child1Inputs[@]:2}") + ;; + *.so | *.dylib) + child0Inputs+=(-reexport_library "${child1Inputs[0]}") + if [[ "${child1Inputs[0]}" != "$lastLeaf" ]]; then + let countFirstChild-=1 || true + lastLeaf="${child1Inputs[1]}" + fi + child1Inputs=("${child1Inputs[@]:2}") + ;; + *) + echo "ld-wrapper: Internal Error: Invalid delegated input" >&2 + exit -1 + ;; + esac +done + + +# First half of libs +@out@/bin/@targetPrefix@ld \ + -macosx_version_min $MACOSX_DEPLOYMENT_TARGET -arch x86_64 -dylib \ + -o "$out/lib/lib${children[0]}.dylib" \ + -install_name "$out/lib/lib${children[0]}.dylib" \ + "$symbolBloatObject" "${child0Inputs[@]}" "${trailingInputs[@]}" + +# Second half of libs +@out@/bin/@targetPrefix@ld \ + -macosx_version_min $MACOSX_DEPLOYMENT_TARGET -arch x86_64 -dylib \ + -o "$out/lib/lib${children[1]}.dylib" \ + -install_name "$out/lib/lib${children[1]}.dylib" \ + "$symbolBloatObject" "${child1Inputs[@]}" "${trailingInputs[@]}" + +parentArgs+=("-L$out/lib" -rpath "$out/lib") +if [[ $outputName != *reexport-delegate* ]]; then + parentArgs+=("-l${children[0]}" "-l${children[1]}") +else + parentArgs+=("-reexport-l${children[0]}" "-reexport-l${children[1]}") +fi + +parentArgs+=("${trailingInputs[@]}") + +if [ -n "${NIX_DEBUG:-}" ]; then + echo "flags using delegated children to @prog@:" >&2 + printf " %q\n" "${parentArgs[@]}" >&2 +fi + +PATH="$path_backup" +exec @prog@ "${parentArgs[@]}" diff --git a/nixpkgs/pkgs/build-support/bintools-wrapper/setup-hook.sh b/nixpkgs/pkgs/build-support/bintools-wrapper/setup-hook.sh new file mode 100644 index 000000000000..7e9547b96c25 --- /dev/null +++ b/nixpkgs/pkgs/build-support/bintools-wrapper/setup-hook.sh @@ -0,0 +1,72 @@ +# Binutils Wrapper hygiene +# +# See comments in cc-wrapper's setup hook. This works exactly the same way. + +# Skip setup hook if we're neither a build-time dep, nor, temporarily, doing a +# native compile. +# +# TODO(@Ericson2314): No native exception +[[ -z ${strictDeps-} ]] || (( "$hostOffset" < 0 )) || return 0 + +bintoolsWrapper_addLDVars () { + # See ../setup-hooks/role.bash + local role_post + getHostRoleEnvHook + + if [[ -d "$1/lib64" && ! -L "$1/lib64" ]]; then + export NIX_LDFLAGS${role_post}+=" -L$1/lib64" + fi + + if [[ -d "$1/lib" ]]; then + # Don't add the /lib directory if it actually doesn't contain any libraries. For instance, + # Python and Haskell packages often only have directories like $out/lib/ghc-8.4.3/ or + # $out/lib/python3.6/, so having them in LDFLAGS just makes the linker search unnecessary + # directories and bloats the size of the environment variable space. + local -a glob=( $1/lib/lib* ) + if [ "${#glob[*]}" -gt 0 ]; then + export NIX_LDFLAGS${role_post}+=" -L$1/lib" + fi + fi +} + +# See ../setup-hooks/role.bash +getTargetRole +getTargetRoleWrapper + +addEnvHooks "$targetOffset" bintoolsWrapper_addLDVars + +# shellcheck disable=SC2157 +if [ -n "@bintools_bin@" ]; then + addToSearchPath _PATH @bintools_bin@/bin +fi + +# shellcheck disable=SC2157 +if [ -n "@libc_bin@" ]; then + addToSearchPath _PATH @libc_bin@/bin +fi + +# shellcheck disable=SC2157 +if [ -n "@coreutils_bin@" ]; then + addToSearchPath _PATH @coreutils_bin@/bin +fi + +# Export tool environment variables so various build systems use the right ones. + +export NIX_BINTOOLS${role_post}=@out@ + +for cmd in \ + ar as ld nm objcopy objdump readelf ranlib strip strings size windres +do + if + PATH=$_PATH type -p "@targetPrefix@${cmd}" > /dev/null + then + export "${cmd^^}${role_post}=@targetPrefix@${cmd}"; + fi +done + +# If unset, assume the default hardening flags. +: ${NIX_HARDENING_ENABLE="fortify stackprotector pic strictoverflow format relro bindnow"} +export NIX_HARDENING_ENABLE + +# No local scope in sourced file +unset -v role_post cmd upper_case diff --git a/nixpkgs/pkgs/build-support/build-bazel-package/default.nix b/nixpkgs/pkgs/build-support/build-bazel-package/default.nix new file mode 100644 index 000000000000..3ffff74f70e2 --- /dev/null +++ b/nixpkgs/pkgs/build-support/build-bazel-package/default.nix @@ -0,0 +1,294 @@ +{ stdenv +, cacert +, lib +, writeCBin +}: + +args@{ + name ? "${args.pname}-${args.version}" +, bazel +, bazelFlags ? [] +, bazelBuildFlags ? [] +, bazelTestFlags ? [] +, bazelRunFlags ? [] +, runTargetFlags ? [] +, bazelFetchFlags ? [] +, bazelTargets ? [] +, bazelTestTargets ? [] +, bazelRunTarget ? null +, buildAttrs +, fetchAttrs + + # Newer versions of Bazel are moving away from built-in rules_cc and instead + # allow fetching it as an external dependency in a WORKSPACE file[1]. If + # removed in the fixed-output fetch phase, building will fail to download it. + # This can be seen e.g. in #73097 + # + # This option allows configuring the removal of rules_cc in cases where a + # project depends on it via an external dependency. + # + # [1]: https://github.com/bazelbuild/rules_cc +, removeRulesCC ? true +, removeLocalConfigCc ? true +, removeLocal ? true + + # Use build --nobuild instead of fetch. This allows fetching the dependencies + # required for the build as configured, rather than fetching all the dependencies + # which may not work in some situations (e.g. Java code which ends up relying on + # Debian-specific /usr/share/java paths, but doesn't in the configured build). +, fetchConfigured ? true + + # Don’t add Bazel --copt and --linkopt from NIX_CFLAGS_COMPILE / + # NIX_LDFLAGS. This is necessary when using a custom toolchain which + # Bazel wants all headers / libraries to come from, like when using + # CROSSTOOL. Weirdly, we can still get the flags through the wrapped + # compiler. +, dontAddBazelOpts ? false +, ... +}: + +let + fArgs = removeAttrs args [ "buildAttrs" "fetchAttrs" "removeRulesCC" ] // { + inherit + name + bazelFlags + bazelBuildFlags + bazelTestFlags + bazelRunFlags + runTargetFlags + bazelFetchFlags + bazelTargets + bazelTestTargets + bazelRunTarget + dontAddBazelOpts + ; + }; + fBuildAttrs = fArgs // buildAttrs; + fFetchAttrs = fArgs // removeAttrs fetchAttrs [ "sha256" ]; + bazelCmd = { cmd, additionalFlags, targets, targetRunFlags ? [ ] }: + lib.optionalString (targets != [ ]) '' + # See footnote called [USER and BAZEL_USE_CPP_ONLY_TOOLCHAIN variables] + BAZEL_USE_CPP_ONLY_TOOLCHAIN=1 \ + USER=homeless-shelter \ + bazel \ + --batch \ + --output_base="$bazelOut" \ + --output_user_root="$bazelUserRoot" \ + ${cmd} \ + --curses=no \ + "''${copts[@]}" \ + "''${host_copts[@]}" \ + "''${linkopts[@]}" \ + "''${host_linkopts[@]}" \ + $bazelFlags \ + ${lib.strings.concatStringsSep " " additionalFlags} \ + ${lib.strings.concatStringsSep " " targets} \ + ${lib.optionalString (targetRunFlags != []) " -- " + lib.strings.concatStringsSep " " targetRunFlags} + ''; + # we need this to chmod dangling symlinks on darwin, gnu coreutils refuses to do so: + # chmod: cannot operate on dangling symlink '$symlink' + chmodder = writeCBin "chmodder" '' + #include <stdio.h> + #include <stdlib.h> + #include <sys/types.h> + #include <sys/stat.h> + #include <errno.h> + #include <string.h> + + int main(int argc, char** argv) { + mode_t mode = S_IRWXU | S_IRWXG | S_IRWXO; + if (argc != 2) { + fprintf(stderr, "usage: chmodder file"); + exit(EXIT_FAILURE); + } + if (lchmod(argv[1], mode) != 0) { + fprintf(stderr, "failed to lchmod '%s': %s", argv[0], strerror(errno)); + exit(EXIT_FAILURE); + } + } + ''; +in +stdenv.mkDerivation (fBuildAttrs // { + + deps = stdenv.mkDerivation (fFetchAttrs // { + name = "${name}-deps.tar.gz"; + + impureEnvVars = lib.fetchers.proxyImpureEnvVars ++ fFetchAttrs.impureEnvVars or []; + + nativeBuildInputs = fFetchAttrs.nativeBuildInputs or [] ++ [ bazel ]; + + preHook = fFetchAttrs.preHook or "" + '' + export bazelOut="$(echo ''${NIX_BUILD_TOP}/output | sed -e 's,//,/,g')" + export bazelUserRoot="$(echo ''${NIX_BUILD_TOP}/tmp | sed -e 's,//,/,g')" + export HOME="$NIX_BUILD_TOP" + export USER="nix" + # This is needed for git_repository with https remotes + export GIT_SSL_CAINFO="${cacert}/etc/ssl/certs/ca-bundle.crt" + # This is needed for Bazel fetchers that are themselves programs (e.g. + # rules_go using the go toolchain) + export SSL_CERT_FILE="${cacert}/etc/ssl/certs/ca-bundle.crt" + ''; + + buildPhase = fFetchAttrs.buildPhase or '' + runHook preBuild + + ${ + bazelCmd { + cmd = if fetchConfigured then "build --nobuild" else "fetch"; + additionalFlags = [ + # We disable multithreading for the fetching phase since it can lead to timeouts with many dependencies/threads: + # https://github.com/bazelbuild/bazel/issues/6502 + "--loading_phase_threads=1" + "$bazelFetchFlags" + ] ++ (if fetchConfigured then ["--jobs" "$NIX_BUILD_CORES"] else []); + targets = fFetchAttrs.bazelTargets ++ fFetchAttrs.bazelTestTargets; + } + } + + runHook postBuild + ''; + + installPhase = fFetchAttrs.installPhase or ('' + runHook preInstall + + # Remove all built in external workspaces, Bazel will recreate them when building + rm -rf $bazelOut/external/{bazel_tools,\@bazel_tools.marker} + ${lib.optionalString removeRulesCC "rm -rf $bazelOut/external/{rules_cc,\\@rules_cc.marker}"} + rm -rf $bazelOut/external/{embedded_jdk,\@embedded_jdk.marker} + ${lib.optionalString removeLocalConfigCc "rm -rf $bazelOut/external/{local_config_cc,\\@local_config_cc.marker}"} + ${lib.optionalString removeLocal "rm -rf $bazelOut/external/{local_*,\\@local_*.marker}"} + + # Clear markers + find $bazelOut/external -name '@*\.marker' -exec sh -c 'echo > {}' \; + + # Remove all vcs files + rm -rf $(find $bazelOut/external -type d -name .git) + rm -rf $(find $bazelOut/external -type d -name .svn) + rm -rf $(find $bazelOut/external -type d -name .hg) + + # Removing top-level symlinks along with their markers. + # This is needed because they sometimes point to temporary paths (?). + # For example, in Tensorflow-gpu build: + # platforms -> NIX_BUILD_TOP/tmp/install/35282f5123611afa742331368e9ae529/_embedded_binaries/platforms + find $bazelOut/external -maxdepth 1 -type l | while read symlink; do + name="$(basename "$symlink")" + rm "$symlink" + test -f "$bazelOut/external/@$name.marker" && rm "$bazelOut/external/@$name.marker" || true + done + + # Patching symlinks to remove build directory reference + find $bazelOut/external -type l | while read symlink; do + new_target="$(readlink "$symlink" | sed "s,$NIX_BUILD_TOP,NIX_BUILD_TOP,")" + rm "$symlink" + ln -sf "$new_target" "$symlink" + '' + lib.optionalString stdenv.isDarwin '' + # on linux symlink permissions cannot be modified, so we modify those on darwin to match the linux ones + ${chmodder}/bin/chmodder "$symlink" + '' + '' + done + + echo '${bazel.name}' > $bazelOut/external/.nix-bazel-version + + (cd $bazelOut/ && tar czf $out --sort=name --mtime='@1' --owner=0 --group=0 --numeric-owner external/) + + runHook postInstall + ''); + + dontFixup = true; + allowedRequisites = []; + + outputHashAlgo = "sha256"; + outputHash = fetchAttrs.sha256; + }); + + nativeBuildInputs = fBuildAttrs.nativeBuildInputs or [] ++ [ (bazel.override { enableNixHacks = true; }) ]; + + preHook = fBuildAttrs.preHook or "" + '' + export bazelOut="$NIX_BUILD_TOP/output" + export bazelUserRoot="$NIX_BUILD_TOP/tmp" + export HOME="$NIX_BUILD_TOP" + ''; + + preConfigure = '' + mkdir -p "$bazelOut" + + (cd $bazelOut && tar xfz $deps) + + test "${bazel.name}" = "$(<$bazelOut/external/.nix-bazel-version)" || { + echo "fixed output derivation was built for a different bazel version" >&2 + echo " got: $(<$bazelOut/external/.nix-bazel-version)" >&2 + echo "expected: ${bazel.name}" >&2 + exit 1 + } + + chmod -R +w $bazelOut + find $bazelOut -type l | while read symlink; do + if [[ $(readlink "$symlink") == *NIX_BUILD_TOP* ]]; then + ln -sf $(readlink "$symlink" | sed "s,NIX_BUILD_TOP,$NIX_BUILD_TOP,") "$symlink" + fi + done + '' + fBuildAttrs.preConfigure or ""; + + buildPhase = fBuildAttrs.buildPhase or '' + runHook preBuild + + # Bazel sandboxes the execution of the tools it invokes, so even though we are + # calling the correct nix wrappers, the values of the environment variables + # the wrappers are expecting will not be set. So instead of relying on the + # wrappers picking them up, pass them in explicitly via `--copt`, `--linkopt` + # and related flags. + + copts=() + host_copts=() + linkopts=() + host_linkopts=() + if [ -z "''${dontAddBazelOpts:-}" ]; then + for flag in $NIX_CFLAGS_COMPILE; do + copts+=( "--copt=$flag" ) + host_copts+=( "--host_copt=$flag" ) + done + for flag in $NIX_CXXSTDLIB_COMPILE; do + copts+=( "--copt=$flag" ) + host_copts+=( "--host_copt=$flag" ) + done + for flag in $NIX_LDFLAGS; do + linkopts+=( "--linkopt=-Wl,$flag" ) + host_linkopts+=( "--host_linkopt=-Wl,$flag" ) + done + fi + + ${ + bazelCmd { + cmd = "test"; + additionalFlags = + ["--test_output=errors"] ++ fBuildAttrs.bazelTestFlags ++ ["--jobs" "$NIX_BUILD_CORES"]; + targets = fBuildAttrs.bazelTestTargets; + } + } + ${ + bazelCmd { + cmd = "build"; + additionalFlags = fBuildAttrs.bazelBuildFlags ++ ["--jobs" "$NIX_BUILD_CORES"]; + targets = fBuildAttrs.bazelTargets; + } + } + ${ + bazelCmd { + cmd = "run"; + additionalFlags = fBuildAttrs.bazelRunFlags ++ [ "--jobs" "$NIX_BUILD_CORES" ]; + # Bazel run only accepts a single target, but `bazelCmd` expects `targets` to be a list. + targets = lib.optionals (fBuildAttrs.bazelRunTarget != null) [ fBuildAttrs.bazelRunTarget ]; + targetRunFlags = fBuildAttrs.runTargetFlags; + } + } + runHook postBuild + ''; +}) + +# [USER and BAZEL_USE_CPP_ONLY_TOOLCHAIN variables]: +# Bazel computes the default value of output_user_root before parsing the +# flag. The computation of the default value involves getting the $USER +# from the environment. Code here : +# https://github.com/bazelbuild/bazel/blob/9323c57607d37f9c949b60e293b573584906da46/src/main/cpp/startup_options.cc#L123-L124 +# +# On macOS Bazel will use the system installed Xcode or CLT toolchain instead of the one in the PATH unless we pass BAZEL_USE_CPP_ONLY_TOOLCHAIN. diff --git a/nixpkgs/pkgs/build-support/build-fhsenv-bubblewrap/buildFHSEnv.nix b/nixpkgs/pkgs/build-support/build-fhsenv-bubblewrap/buildFHSEnv.nix new file mode 100644 index 000000000000..b5e03164ac26 --- /dev/null +++ b/nixpkgs/pkgs/build-support/build-fhsenv-bubblewrap/buildFHSEnv.nix @@ -0,0 +1,263 @@ +{ lib +, stdenv +, runCommandLocal +, buildEnv +, writeText +, writeShellScriptBin +, pkgs +, pkgsi686Linux +}: + +{ name ? null +, profile ? "" +, targetPkgs ? pkgs: [] +, multiPkgs ? pkgs: [] +, multiArch ? false # Whether to include 32bit packages +, extraBuildCommands ? "" +, extraBuildCommandsMulti ? "" +, extraOutputsToInstall ? [] +} @ args: + +# HOWTO: +# All packages (most likely programs) returned from targetPkgs will only be +# installed once--matching the host's architecture (64bit on x86_64 and 32bit on +# x86). +# +# Packages (most likely libraries) returned from multiPkgs are installed +# once on x86 systems and twice on x86_64 systems. +# On x86 they are merged with packages from targetPkgs. +# On x86_64 they are added to targetPkgs and in addition their 32bit +# versions are also installed. The final directory structure looks as +# follows: +# /lib32 will include 32bit libraries from multiPkgs +# /lib64 will include 64bit libraries from multiPkgs and targetPkgs +# /lib will link to /lib32 + +let + inherit (stdenv) is64bit; + + # "use of glibc_multi is only supported on x86_64-linux" + isMultiBuild = multiArch && stdenv.system == "x86_64-linux"; + isTargetBuild = !isMultiBuild; + + # list of packages (usually programs) which are only be installed for the + # host's architecture + targetPaths = targetPkgs pkgs ++ (if multiPkgs == null then [] else multiPkgs pkgs); + + # list of packages which are installed for both x86 and x86_64 on x86_64 + # systems + multiPaths = multiPkgs pkgsi686Linux; + + # base packages of the chroot + # these match the host's architecture, glibc_multi is used for multilib + # builds. glibcLocales must be before glibc or glibc_multi as otherwiese + # the wrong LOCALE_ARCHIVE will be used where only C.UTF-8 is available. + baseTargetPaths = with pkgs; [ + glibcLocales + (if isMultiBuild then glibc_multi else glibc) + (toString gcc.cc.lib) + bashInteractiveFHS + coreutils + less + shadow + su + gawk + diffutils + findutils + gnused + gnugrep + gnutar + gzip + bzip2 + xz + ]; + baseMultiPaths = with pkgsi686Linux; [ + (toString gcc.cc.lib) + ]; + + ldconfig = writeShellScriptBin "ldconfig" '' + # due to a glibc bug, 64-bit ldconfig complains about patchelf'd 32-bit libraries, so we're using 32-bit ldconfig + exec ${if stdenv.system == "x86_64-linux" then pkgsi686Linux.glibc.bin else pkgs.glibc.bin}/bin/ldconfig -f /etc/ld.so.conf -C /etc/ld.so.cache "$@" + ''; + + etcProfile = writeText "profile" '' + export PS1='${name}-chrootenv:\u@\h:\w\$ ' + export LOCALE_ARCHIVE='/usr/lib/locale/locale-archive' + export LD_LIBRARY_PATH="/run/opengl-driver/lib:/run/opengl-driver-32/lib''${LD_LIBRARY_PATH:+:}$LD_LIBRARY_PATH" + export PATH="/run/wrappers/bin:/usr/bin:/usr/sbin:$PATH" + export TZDIR='/etc/zoneinfo' + + # XDG_DATA_DIRS is used by pressure-vessel (steam proton) and vulkan loaders to find the corresponding icd + export XDG_DATA_DIRS=$XDG_DATA_DIRS''${XDG_DATA_DIRS:+:}/run/opengl-driver/share:/run/opengl-driver-32/share + + # Following XDG spec [1], XDG_DATA_DIRS should default to "/usr/local/share:/usr/share". + # In nix, it is commonly set without containing these values, so we add them as fallback. + # + # [1] <https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html> + case ":$XDG_DATA_DIRS:" in + *:/usr/local/share:*) ;; + *) export XDG_DATA_DIRS="$XDG_DATA_DIRS''${XDG_DATA_DIRS:+:}/usr/local/share" ;; + esac + case ":$XDG_DATA_DIRS:" in + *:/usr/share:*) ;; + *) export XDG_DATA_DIRS="$XDG_DATA_DIRS''${XDG_DATA_DIRS:+:}/usr/share" ;; + esac + + # Force compilers and other tools to look in default search paths + unset NIX_ENFORCE_PURITY + export NIX_BINTOOLS_WRAPPER_TARGET_HOST_${stdenv.cc.suffixSalt}=1 + export NIX_CC_WRAPPER_TARGET_HOST_${stdenv.cc.suffixSalt}=1 + export NIX_CFLAGS_COMPILE='-idirafter /usr/include' + export NIX_CFLAGS_LINK='-L/usr/lib -L/usr/lib32' + export NIX_LDFLAGS='-L/usr/lib -L/usr/lib32' + export PKG_CONFIG_PATH=/usr/lib/pkgconfig + export ACLOCAL_PATH=/usr/share/aclocal + + ${profile} + ''; + + # Compose /etc for the chroot environment + etcPkg = runCommandLocal "${name}-chrootenv-etc" { } '' + mkdir -p $out/etc + pushd $out/etc + + # environment variables + ln -s ${etcProfile} profile + + # symlink /etc/mtab -> /proc/mounts (compat for old userspace progs) + ln -s /proc/mounts mtab + ''; + + # Composes a /usr-like directory structure + staticUsrProfileTarget = buildEnv { + name = "${name}-usr-target"; + # ldconfig wrapper must come first so it overrides the original ldconfig + paths = [ etcPkg ldconfig ] ++ baseTargetPaths ++ targetPaths; + extraOutputsToInstall = [ "out" "lib" "bin" ] ++ extraOutputsToInstall; + ignoreCollisions = true; + postBuild = '' + if [[ -d $out/share/gsettings-schemas/ ]]; then + # Recreate the standard schemas directory if its a symlink to make it writable + if [[ -L $out/share/glib-2.0 ]]; then + target=$(readlink $out/share/glib-2.0) + rm $out/share/glib-2.0 + mkdir $out/share/glib-2.0 + ln -fs $target/* $out/share/glib-2.0 + fi + + if [[ -L $out/share/glib-2.0/schemas ]]; then + target=$(readlink $out/share/glib-2.0/schemas) + rm $out/share/glib-2.0/schemas + mkdir $out/share/glib-2.0/schemas + ln -fs $target/* $out/share/glib-2.0/schemas + fi + + mkdir -p $out/share/glib-2.0/schemas + + for d in $out/share/gsettings-schemas/*; do + # Force symlink, in case there are duplicates + ln -fs $d/glib-2.0/schemas/*.xml $out/share/glib-2.0/schemas + ln -fs $d/glib-2.0/schemas/*.gschema.override $out/share/glib-2.0/schemas + done + + # and compile them + ${pkgs.glib.dev}/bin/glib-compile-schemas $out/share/glib-2.0/schemas + fi + ''; + }; + + staticUsrProfileMulti = buildEnv { + name = "${name}-usr-multi"; + paths = baseMultiPaths ++ multiPaths; + extraOutputsToInstall = [ "out" "lib" ] ++ extraOutputsToInstall; + ignoreCollisions = true; + }; + + # setup library paths only for the targeted architecture + setupLibDirsTarget = '' + # link content of targetPaths + cp -rsHf ${staticUsrProfileTarget}/lib lib + ln -s lib lib${if is64bit then "64" else "32"} + ''; + + # setup /lib, /lib32 and /lib64 + setupLibDirsMulti = '' + mkdir -m0755 lib32 + mkdir -m0755 lib64 + ln -s lib64 lib + + # copy glibc stuff + cp -rsHf ${staticUsrProfileTarget}/lib/32/* lib32/ + chmod u+w -R lib32/ + + # copy content of multiPaths (32bit libs) + if [ -d ${staticUsrProfileMulti}/lib ]; then + cp -rsHf ${staticUsrProfileMulti}/lib/* lib32/ + chmod u+w -R lib32/ + fi + + # copy content of targetPaths (64bit libs) + cp -rsHf ${staticUsrProfileTarget}/lib/* lib64/ + chmod u+w -R lib64/ + + # symlink 32-bit ld-linux.so + ln -Ls ${staticUsrProfileTarget}/lib/32/ld-linux.so.2 lib/ + ''; + + setupLibDirs = if isTargetBuild + then setupLibDirsTarget + else setupLibDirsMulti; + + # the target profile is the actual profile that will be used for the chroot + setupTargetProfile = '' + mkdir -m0755 usr + pushd usr + + ${setupLibDirs} + + '' + lib.optionalString isMultiBuild '' + if [ -d "${staticUsrProfileMulti}/share" ]; then + cp -rLf ${staticUsrProfileMulti}/share share + fi + '' + '' + if [ -d "${staticUsrProfileTarget}/share" ]; then + if [ -d share ]; then + chmod -R 755 share + cp -rLTf ${staticUsrProfileTarget}/share share + else + cp -rsHf ${staticUsrProfileTarget}/share share + fi + fi + for i in bin sbin include; do + if [ -d "${staticUsrProfileTarget}/$i" ]; then + cp -rsHf "${staticUsrProfileTarget}/$i" "$i" + fi + done + cd .. + + for i in var etc opt; do + if [ -d "${staticUsrProfileTarget}/$i" ]; then + cp -rsHf "${staticUsrProfileTarget}/$i" "$i" + fi + done + for i in usr/{bin,sbin,lib,lib32,lib64}; do + if [ -d "$i" ]; then + ln -s "$i" + fi + done + + popd + ''; + +in runCommandLocal "${name}-fhs" { + passthru = { + inherit args baseTargetPaths targetPaths baseMultiPaths multiPaths ldconfig; + }; +} '' + mkdir -p $out + pushd $out + + ${setupTargetProfile} + ${extraBuildCommands} + ${lib.optionalString isMultiBuild extraBuildCommandsMulti} +'' diff --git a/nixpkgs/pkgs/build-support/build-fhsenv-bubblewrap/default.nix b/nixpkgs/pkgs/build-support/build-fhsenv-bubblewrap/default.nix new file mode 100644 index 000000000000..3500e5e9216f --- /dev/null +++ b/nixpkgs/pkgs/build-support/build-fhsenv-bubblewrap/default.nix @@ -0,0 +1,245 @@ +{ lib +, stdenv +, callPackage +, runCommandLocal +, writeShellScript +, glibc +, pkgsi686Linux +, coreutils +, bubblewrap +}: + +{ name ? null +, pname ? null +, version ? null +, runScript ? "bash" +, extraInstallCommands ? "" +, meta ? {} +, passthru ? {} +, extraBwrapArgs ? [] +, unshareUser ? false +, unshareIpc ? false +, unsharePid ? false +, unshareNet ? false +, unshareUts ? false +, unshareCgroup ? false +, dieWithParent ? true +, ... +} @ args: + +assert (pname != null || version != null) -> (name == null && pname != null); # You must declare either a name or pname + version (preferred). + +with builtins; +let + pname = if args ? name && args.name != null then args.name else args.pname; + versionStr = lib.optionalString (version != null) ("-" + version); + name = pname + versionStr; + + buildFHSEnv = callPackage ./buildFHSEnv.nix { }; + + fhsenv = buildFHSEnv (removeAttrs (args // { inherit name; }) [ + "runScript" "extraInstallCommands" "meta" "passthru" "extraBwrapArgs" "dieWithParent" + "unshareUser" "unshareCgroup" "unshareUts" "unshareNet" "unsharePid" "unshareIpc" + "pname" "version" + ]); + + etcBindEntries = let + files = [ + # NixOS Compatibility + "static" + "nix" # mainly for nixUnstable users, but also for access to nix/netrc + # Shells + "shells" + "bashrc" + "zshenv" + "zshrc" + "zinputrc" + "zprofile" + # Users, Groups, NSS + "passwd" + "group" + "shadow" + "hosts" + "resolv.conf" + "nsswitch.conf" + # User profiles + "profiles" + # Sudo & Su + "login.defs" + "sudoers" + "sudoers.d" + # Time + "localtime" + "zoneinfo" + # Other Core Stuff + "machine-id" + "os-release" + # PAM + "pam.d" + # Fonts + "fonts" + # ALSA + "alsa" + "asound.conf" + # SSL + "ssl/certs" + "ca-certificates" + "pki" + ]; + in map (path: "/etc/${path}") files; + + # Create this on the fly instead of linking from /nix + # The container might have to modify it and re-run ldconfig if there are + # issues running some binary with LD_LIBRARY_PATH + createLdConfCache = '' + cat > /etc/ld.so.conf <<EOF + /lib + /lib/x86_64-linux-gnu + /lib64 + /usr/lib + /usr/lib/x86_64-linux-gnu + /usr/lib64 + /lib/i386-linux-gnu + /lib32 + /usr/lib/i386-linux-gnu + /usr/lib32 + /run/opengl-driver/lib + /run/opengl-driver-32/lib + EOF + ldconfig &> /dev/null + ''; + init = run: writeShellScript "${name}-init" '' + source /etc/profile + ${createLdConfCache} + exec ${run} "$@" + ''; + + indentLines = str: lib.concatLines (map (s: " " + s) (filter (s: s != "") (lib.splitString "\n" str))); + bwrapCmd = { initArgs ? "" }: '' + ignored=(/nix /dev /proc /etc) + ro_mounts=() + symlinks=() + etc_ignored=() + for i in ${fhsenv}/*; do + path="/''${i##*/}" + if [[ $path == '/etc' ]]; then + : + elif [[ -L $i ]]; then + symlinks+=(--symlink "$(${coreutils}/bin/readlink "$i")" "$path") + ignored+=("$path") + else + ro_mounts+=(--ro-bind "$i" "$path") + ignored+=("$path") + fi + done + + if [[ -d ${fhsenv}/etc ]]; then + for i in ${fhsenv}/etc/*; do + path="/''${i##*/}" + # NOTE: we're binding /etc/fonts and /etc/ssl/certs from the host so we + # don't want to override it with a path from the FHS environment. + if [[ $path == '/fonts' || $path == '/ssl' ]]; then + continue + fi + ro_mounts+=(--ro-bind "$i" "/etc$path") + etc_ignored+=("/etc$path") + done + fi + + for i in ${lib.escapeShellArgs etcBindEntries}; do + if [[ "''${etc_ignored[@]}" =~ "$i" ]]; then + continue + fi + if [[ -L $i ]]; then + symlinks+=(--symlink "$(${coreutils}/bin/readlink "$i")" "$i") + else + ro_mounts+=(--ro-bind-try "$i" "$i") + fi + done + + declare -a auto_mounts + # loop through all directories in the root + for dir in /*; do + # if it is a directory and it is not ignored + if [[ -d "$dir" ]] && [[ ! "''${ignored[@]}" =~ "$dir" ]]; then + # add it to the mount list + auto_mounts+=(--bind "$dir" "$dir") + fi + done + + declare -a x11_args + # Always mount a tmpfs on /tmp/.X11-unix + # Rationale: https://github.com/flatpak/flatpak/blob/be2de97e862e5ca223da40a895e54e7bf24dbfb9/common/flatpak-run.c#L277 + x11_args+=(--tmpfs /tmp/.X11-unix) + + # Try to guess X socket path. This doesn't cover _everything_, but it covers some things. + if [[ "$DISPLAY" == :* ]]; then + display_nr=''${DISPLAY#?} + local_socket=/tmp/.X11-unix/X$display_nr + x11_args+=(--ro-bind-try "$local_socket" "$local_socket") + fi + + cmd=( + ${bubblewrap}/bin/bwrap + --dev-bind /dev /dev + --proc /proc + --chdir "$(pwd)" + ${lib.optionalString unshareUser "--unshare-user"} + ${lib.optionalString unshareIpc "--unshare-ipc"} + ${lib.optionalString unsharePid "--unshare-pid"} + ${lib.optionalString unshareNet "--unshare-net"} + ${lib.optionalString unshareUts "--unshare-uts"} + ${lib.optionalString unshareCgroup "--unshare-cgroup"} + ${lib.optionalString dieWithParent "--die-with-parent"} + --ro-bind /nix /nix + # Our glibc will look for the cache in its own path in `/nix/store`. + # As such, we need a cache to exist there, because pressure-vessel + # depends on the existence of an ld cache. However, adding one + # globally proved to be a bad idea (see #100655), the solution we + # settled on being mounting one via bwrap. + # Also, the cache needs to go to both 32 and 64 bit glibcs, for games + # of both architectures to work. + --tmpfs ${glibc}/etc \ + --symlink /etc/ld.so.conf ${glibc}/etc/ld.so.conf \ + --symlink /etc/ld.so.cache ${glibc}/etc/ld.so.cache \ + --ro-bind ${glibc}/etc/rpc ${glibc}/etc/rpc \ + --remount-ro ${glibc}/etc \ + '' + lib.optionalString (stdenv.isx86_64 && stdenv.isLinux) (indentLines '' + --tmpfs ${pkgsi686Linux.glibc}/etc \ + --symlink /etc/ld.so.conf ${pkgsi686Linux.glibc}/etc/ld.so.conf \ + --symlink /etc/ld.so.cache ${pkgsi686Linux.glibc}/etc/ld.so.cache \ + --ro-bind ${pkgsi686Linux.glibc}/etc/rpc ${pkgsi686Linux.glibc}/etc/rpc \ + --remount-ro ${pkgsi686Linux.glibc}/etc \ + '') + '' + "''${ro_mounts[@]}" + "''${symlinks[@]}" + "''${auto_mounts[@]}" + "''${x11_args[@]}" + ${concatStringsSep "\n " extraBwrapArgs} + ${init runScript} ${initArgs} + ) + exec "''${cmd[@]}" + ''; + + bin = writeShellScript "${name}-bwrap" (bwrapCmd { initArgs = ''"$@"''; }); +in runCommandLocal name { + inherit pname version; + inherit meta; + + passthru = passthru // { + env = runCommandLocal "${name}-shell-env" { + shellHook = bwrapCmd {}; + } '' + echo >&2 "" + echo >&2 "*** User chroot 'env' attributes are intended for interactive nix-shell sessions, not for building! ***" + echo >&2 "" + exit 1 + ''; + inherit args fhsenv; + }; +} '' + mkdir -p $out/bin + ln -s ${bin} $out/bin/${pname} + + ${extraInstallCommands} +'' diff --git a/nixpkgs/pkgs/build-support/build-fhsenv-chroot/chrootenv/default.nix b/nixpkgs/pkgs/build-support/build-fhsenv-chroot/chrootenv/default.nix new file mode 100644 index 000000000000..32ac43d41556 --- /dev/null +++ b/nixpkgs/pkgs/build-support/build-fhsenv-chroot/chrootenv/default.nix @@ -0,0 +1,16 @@ +{ lib, stdenv, meson, ninja, pkg-config, glib }: + +stdenv.mkDerivation { + name = "chrootenv"; + src = ./src; + + nativeBuildInputs = [ meson ninja pkg-config ]; + buildInputs = [ glib ]; + + meta = with lib; { + description = "Setup mount/user namespace for FHS emulation"; + license = licenses.mit; + maintainers = with maintainers; [ yana ]; + platforms = platforms.linux; + }; +} diff --git a/nixpkgs/pkgs/build-support/build-fhsenv-chroot/chrootenv/src/chrootenv.c b/nixpkgs/pkgs/build-support/build-fhsenv-chroot/chrootenv/src/chrootenv.c new file mode 100644 index 000000000000..c109d7297e17 --- /dev/null +++ b/nixpkgs/pkgs/build-support/build-fhsenv-chroot/chrootenv/src/chrootenv.c @@ -0,0 +1,169 @@ +#define _GNU_SOURCE + +#include <glib.h> +#include <glib/gstdio.h> + +#include <errno.h> +#include <sched.h> +#include <unistd.h> + +#include <sys/mount.h> +#include <sys/stat.h> +#include <sys/types.h> +#include <sys/wait.h> +#include <sys/syscall.h> + +#define fail(s, err) g_error("%s: %s: %s", __func__, s, g_strerror(err)) +#define fail_if(expr) \ + if (expr) \ + fail(#expr, errno); + +const gchar *bind_blacklist[] = {"bin", "etc", "host", "real-host", "usr", "lib", "lib64", "lib32", "sbin", "opt", NULL}; + +int pivot_root(const char *new_root, const char *put_old) { + return syscall(SYS_pivot_root, new_root, put_old); +} + +void mount_tmpfs(const gchar *target) { + fail_if(mount("none", target, "tmpfs", 0, NULL)); +} + +void bind_mount(const gchar *source, const gchar *target) { + fail_if(g_mkdir(target, 0755)); + fail_if(mount(source, target, NULL, MS_BIND | MS_REC, NULL)); +} + +const gchar *create_tmpdir() { + gchar *prefix = + g_build_filename(g_get_tmp_dir(), "chrootenvXXXXXX", NULL); + fail_if(!g_mkdtemp_full(prefix, 0755)); + return prefix; +} + +void pivot_host(const gchar *guest) { + g_autofree gchar *point = g_build_filename(guest, "host", NULL); + fail_if(g_mkdir(point, 0755)); + fail_if(pivot_root(guest, point)); +} + +void bind_mount_item(const gchar *host, const gchar *guest, const gchar *name) { + g_autofree gchar *source = g_build_filename(host, name, NULL); + g_autofree gchar *target = g_build_filename(guest, name, NULL); + + if (G_LIKELY(g_file_test(source, G_FILE_TEST_IS_DIR))) + bind_mount(source, target); +} + +void bind(const gchar *host, const gchar *guest) { + mount_tmpfs(guest); + + pivot_host(guest); + + g_autofree gchar *host_dir = g_build_filename("/host", host, NULL); + + g_autoptr(GError) err = NULL; + g_autoptr(GDir) dir = g_dir_open(host_dir, 0, &err); + + if (err != NULL) + fail("g_dir_open", errno); + + const gchar *item; + + while ((item = g_dir_read_name(dir))) + if (!g_strv_contains(bind_blacklist, item)) + bind_mount_item(host_dir, "/", item); +} + +void spit(const char *path, char *fmt, ...) { + va_list args; + va_start(args, fmt); + + FILE *f = g_fopen(path, "w"); + + if (f == NULL) + fail("g_fopen", errno); + + g_vfprintf(f, fmt, args); + fclose(f); +} + +int main(gint argc, gchar **argv) { + const gchar *self = *argv++; + + if (argc < 2) { + g_message("%s command [arguments...]", self); + return 1; + } + + g_autofree const gchar *prefix = create_tmpdir(); + + pid_t cpid = fork(); + + if (cpid < 0) + fail("fork", errno); + + else if (cpid == 0) { + uid_t uid = getuid(); + gid_t gid = getgid(); + + int namespaces = CLONE_NEWNS; + if (uid != 0) { + namespaces |= CLONE_NEWUSER; + } + if (unshare(namespaces) < 0) { + int unshare_errno = errno; + + g_message("Requires Linux version >= 3.19 built with CONFIG_USER_NS"); + if (g_file_test("/proc/sys/kernel/unprivileged_userns_clone", + G_FILE_TEST_EXISTS)) + g_message("Run: sudo sysctl -w kernel.unprivileged_userns_clone=1"); + + fail("unshare", unshare_errno); + } + + // hide all mounts we do from the parent + fail_if(mount(0, "/", 0, MS_SLAVE | MS_REC, 0)); + + if (uid != 0) { + spit("/proc/self/setgroups", "deny"); + spit("/proc/self/uid_map", "%d %d 1", uid, uid); + spit("/proc/self/gid_map", "%d %d 1", gid, gid); + } + + // If there is a /host directory, assume this is nested chrootenv and use it as host instead. + gboolean nested_host = g_file_test("/host", G_FILE_TEST_EXISTS | G_FILE_TEST_IS_DIR); + g_autofree const gchar *host = nested_host ? "/host" : "/"; + + bind(host, prefix); + + // Replace /host by an actual (inner) /host. + if (nested_host) { + fail_if(g_mkdir("/real-host", 0755)); + fail_if(mount("/host/host", "/real-host", NULL, MS_BIND | MS_REC, NULL)); + // For some reason umount("/host") returns EBUSY even immediately after + // pivot_root. We detach it at least to keep `/proc/mounts` from blowing + // up in nested cases. + fail_if(umount2("/host", MNT_DETACH)); + fail_if(mount("/real-host", "/host", NULL, MS_MOVE, NULL)); + fail_if(rmdir("/real-host")); + } + + fail_if(chdir("/")); + fail_if(execvp(*argv, argv)); + } + + else { + int status; + + fail_if(waitpid(cpid, &status, 0) != cpid); + fail_if(rmdir(prefix)); + + if (WIFEXITED(status)) + return WEXITSTATUS(status); + + else if (WIFSIGNALED(status)) + kill(getpid(), WTERMSIG(status)); + + return 1; + } +} diff --git a/nixpkgs/pkgs/build-support/build-fhsenv-chroot/chrootenv/src/meson.build b/nixpkgs/pkgs/build-support/build-fhsenv-chroot/chrootenv/src/meson.build new file mode 100644 index 000000000000..6d0770a0dc4a --- /dev/null +++ b/nixpkgs/pkgs/build-support/build-fhsenv-chroot/chrootenv/src/meson.build @@ -0,0 +1,5 @@ +project('chrootenv', 'c') + +glib = dependency('glib-2.0') + +executable('chrootenv', 'chrootenv.c', dependencies: [glib], install: true) diff --git a/nixpkgs/pkgs/build-support/build-fhsenv-chroot/default.nix b/nixpkgs/pkgs/build-support/build-fhsenv-chroot/default.nix new file mode 100644 index 000000000000..6f0adfb4e08c --- /dev/null +++ b/nixpkgs/pkgs/build-support/build-fhsenv-chroot/default.nix @@ -0,0 +1,53 @@ +{ lib, callPackage, runCommandLocal, writeScript, stdenv, coreutils }: + +let buildFHSEnv = callPackage ./env.nix { }; in + +args@{ name, version ? null, runScript ? "bash", extraInstallCommands ? "", meta ? {}, passthru ? {}, ... }: + +let + env = buildFHSEnv (removeAttrs args [ "version" "runScript" "extraInstallCommands" "meta" "passthru" ]); + + chrootenv = callPackage ./chrootenv {}; + + init = run: writeScript "${name}-init" '' + #! ${stdenv.shell} + for i in ${env}/* /host/*; do + path="/''${i##*/}" + [ -e "$path" ] || ${coreutils}/bin/ln -s "$i" "$path" + done + + [ -d "$1" ] && [ -r "$1" ] && cd "$1" + shift + + source /etc/profile + exec ${run} "$@" + ''; + + versionStr = lib.optionalString (version != null) ("-" + version); + + nameAndVersion = name + versionStr; + +in runCommandLocal nameAndVersion { + inherit meta; + + passthru = passthru // { + env = runCommandLocal "${name}-shell-env" { + shellHook = '' + exec ${chrootenv}/bin/chrootenv ${init runScript} "$(pwd)" + ''; + } '' + echo >&2 "" + echo >&2 "*** User chroot 'env' attributes are intended for interactive nix-shell sessions, not for building! ***" + echo >&2 "" + exit 1 + ''; + }; +} '' + mkdir -p $out/bin + cat <<EOF >$out/bin/${name} + #! ${stdenv.shell} + exec ${chrootenv}/bin/chrootenv ${init runScript} "\$(pwd)" "\$@" + EOF + chmod +x $out/bin/${name} + ${extraInstallCommands} +'' diff --git a/nixpkgs/pkgs/build-support/build-fhsenv-chroot/env.nix b/nixpkgs/pkgs/build-support/build-fhsenv-chroot/env.nix new file mode 100644 index 000000000000..a1a26472373f --- /dev/null +++ b/nixpkgs/pkgs/build-support/build-fhsenv-chroot/env.nix @@ -0,0 +1,258 @@ +{ stdenv, lib, buildEnv, writeText, pkgs, pkgsi686Linux }: + +{ name +, profile ? "" +, targetPkgs ? pkgs: [] +, multiPkgs ? pkgs: [] +, extraBuildCommands ? "" +, extraBuildCommandsMulti ? "" +, extraOutputsToInstall ? [] +}: + +# HOWTO: +# All packages (most likely programs) returned from targetPkgs will only be +# installed once--matching the host's architecture (64bit on x86_64 and 32bit on +# x86). +# +# Packages (most likely libraries) returned from multiPkgs are installed +# once on x86 systems and twice on x86_64 systems. +# On x86 they are merged with packages from targetPkgs. +# On x86_64 they are added to targetPkgs and in addition their 32bit +# versions are also installed. The final directory structure looks as +# follows: +# /lib32 will include 32bit libraries from multiPkgs +# /lib64 will include 64bit libraries from multiPkgs and targetPkgs +# /lib will link to /lib32 + +let + is64Bit = stdenv.hostPlatform.parsed.cpu.bits == 64; + # multi-lib glibc is only supported on x86_64 + isMultiBuild = multiPkgs != null && stdenv.hostPlatform.system == "x86_64-linux"; + isTargetBuild = !isMultiBuild; + + # list of packages (usually programs) which are only be installed for the + # host's architecture + targetPaths = targetPkgs pkgs ++ (if multiPkgs == null then [] else multiPkgs pkgs); + + # list of packages which are installed for both x86 and x86_64 on x86_64 + # systems + multiPaths = multiPkgs pkgsi686Linux; + + # base packages of the chroot + # these match the host's architecture, glibc_multi is used for multilib + # builds. glibcLocales must be before glibc or glibc_multi as otherwiese + # the wrong LOCALE_ARCHIVE will be used where only C.UTF-8 is available. + basePkgs = with pkgs; + [ glibcLocales + (if isMultiBuild then glibc_multi else glibc) + (toString gcc.cc.lib) bashInteractiveFHS coreutils less shadow su + gawk diffutils findutils gnused gnugrep + gnutar gzip bzip2 xz + ]; + baseMultiPkgs = with pkgsi686Linux; + [ (toString gcc.cc.lib) + ]; + + etcProfile = writeText "profile" '' + export PS1='${name}-chrootenv:\u@\h:\w\$ ' + export LOCALE_ARCHIVE='/usr/lib/locale/locale-archive' + export LD_LIBRARY_PATH="/run/opengl-driver/lib:/run/opengl-driver-32/lib:/usr/lib:/usr/lib32''${LD_LIBRARY_PATH:+:}$LD_LIBRARY_PATH" + export PATH="/run/wrappers/bin:/usr/bin:/usr/sbin:$PATH" + export TZDIR='/etc/zoneinfo' + + # XDG_DATA_DIRS is used by pressure-vessel (steam proton) and vulkan loaders to find the corresponding icd + export XDG_DATA_DIRS=$XDG_DATA_DIRS''${XDG_DATA_DIRS:+:}/run/opengl-driver/share:/run/opengl-driver-32/share + + # Following XDG spec [1], XDG_DATA_DIRS should default to "/usr/local/share:/usr/share". + # In nix, it is commonly set without containing these values, so we add them as fallback. + # + # [1] <https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html> + case ":$XDG_DATA_DIRS:" in + *:/usr/local/share:*) ;; + *) export XDG_DATA_DIRS="$XDG_DATA_DIRS''${XDG_DATA_DIRS:+:}/usr/local/share" ;; + esac + case ":$XDG_DATA_DIRS:" in + *:/usr/share:*) ;; + *) export XDG_DATA_DIRS="$XDG_DATA_DIRS''${XDG_DATA_DIRS:+:}/usr/share" ;; + esac + + # Force compilers and other tools to look in default search paths + unset NIX_ENFORCE_PURITY + export NIX_BINTOOLS_WRAPPER_TARGET_HOST_${stdenv.cc.suffixSalt}=1 + export NIX_CC_WRAPPER_TARGET_HOST_${stdenv.cc.suffixSalt}=1 + export NIX_CFLAGS_COMPILE='-idirafter /usr/include' + export NIX_CFLAGS_LINK='-L/usr/lib -L/usr/lib32' + export NIX_LDFLAGS='-L/usr/lib -L/usr/lib32' + export PKG_CONFIG_PATH=/usr/lib/pkgconfig + export ACLOCAL_PATH=/usr/share/aclocal + + ${profile} + ''; + + # Compose /etc for the chroot environment + etcPkg = stdenv.mkDerivation { + name = "${name}-chrootenv-etc"; + buildCommand = '' + mkdir -p $out/etc + cd $out/etc + + # environment variables + ln -s ${etcProfile} profile + + # compatibility with NixOS + ln -s /host/etc/static static + + # symlink nix config + ln -s /host/etc/nix nix + + # symlink some NSS stuff + ln -s /host/etc/passwd passwd + ln -s /host/etc/group group + ln -s /host/etc/shadow shadow + ln -s /host/etc/hosts hosts + ln -s /host/etc/resolv.conf resolv.conf + ln -s /host/etc/nsswitch.conf nsswitch.conf + + # symlink user profiles + ln -s /host/etc/profiles profiles + + # symlink sudo and su stuff + ln -s /host/etc/login.defs login.defs + ln -s /host/etc/sudoers sudoers + ln -s /host/etc/sudoers.d sudoers.d + + # symlink other core stuff + ln -s /host/etc/localtime localtime + ln -s /host/etc/zoneinfo zoneinfo + ln -s /host/etc/machine-id machine-id + ln -s /host/etc/os-release os-release + + # symlink PAM stuff + ln -s /host/etc/pam.d pam.d + + # symlink fonts stuff + ln -s /host/etc/fonts fonts + + # symlink ALSA stuff + ln -s /host/etc/asound.conf asound.conf + + # symlink SSL certs + mkdir -p ssl + ln -s /host/etc/ssl/certs ssl/certs + + # symlink /etc/mtab -> /proc/mounts (compat for old userspace progs) + ln -s /proc/mounts mtab + ''; + }; + + # Composes a /usr-like directory structure + staticUsrProfileTarget = buildEnv { + name = "${name}-usr-target"; + paths = [ etcPkg ] ++ basePkgs ++ targetPaths; + extraOutputsToInstall = [ "out" "lib" "bin" ] ++ extraOutputsToInstall; + ignoreCollisions = true; + postBuild = '' + if [[ -d $out/share/gsettings-schemas/ ]]; then + # Recreate the standard schemas directory if its a symlink to make it writable + if [[ -L $out/share/glib-2.0 ]]; then + target=$(readlink $out/share/glib-2.0) + rm $out/share/glib-2.0 + mkdir $out/share/glib-2.0 + ln -fs $target/* $out/share/glib-2.0 + fi + + if [[ -L $out/share/glib-2.0/schemas ]]; then + target=$(readlink $out/share/glib-2.0/schemas) + rm $out/share/glib-2.0/schemas + mkdir $out/share/glib-2.0/schemas + ln -fs $target/* $out/share/glib-2.0/schemas + fi + + mkdir -p $out/share/glib-2.0/schemas + + for d in $out/share/gsettings-schemas/*; do + # Force symlink, in case there are duplicates + ln -fs $d/glib-2.0/schemas/*.xml $out/share/glib-2.0/schemas + ln -fs $d/glib-2.0/schemas/*.gschema.override $out/share/glib-2.0/schemas + done + + # and compile them + ${pkgs.glib.dev}/bin/glib-compile-schemas $out/share/glib-2.0/schemas + fi + ''; + }; + + staticUsrProfileMulti = buildEnv { + name = "${name}-usr-multi"; + paths = baseMultiPkgs ++ multiPaths; + extraOutputsToInstall = [ "out" "lib" ] ++ extraOutputsToInstall; + ignoreCollisions = true; + }; + + # setup library paths only for the targeted architecture + setupLibDirs_target = '' + # link content of targetPaths + cp -rsHf ${staticUsrProfileTarget}/lib lib + ln -s lib lib${if is64Bit then "64" else "32"} + ''; + + # setup /lib, /lib32 and /lib64 + setupLibDirs_multi = '' + mkdir -m0755 lib32 + mkdir -m0755 lib64 + ln -s lib64 lib + + # copy glibc stuff + cp -rsHf ${staticUsrProfileTarget}/lib/32/* lib32/ && chmod u+w -R lib32/ + + # copy content of multiPaths (32bit libs) + [ -d ${staticUsrProfileMulti}/lib ] && cp -rsHf ${staticUsrProfileMulti}/lib/* lib32/ && chmod u+w -R lib32/ + + # copy content of targetPaths (64bit libs) + cp -rsHf ${staticUsrProfileTarget}/lib/* lib64/ && chmod u+w -R lib64/ + + # symlink 32-bit ld-linux.so + ln -Ls ${staticUsrProfileTarget}/lib/32/ld-linux.so.2 lib/ + ''; + + setupLibDirs = if isTargetBuild then setupLibDirs_target + else setupLibDirs_multi; + + # the target profile is the actual profile that will be used for the chroot + setupTargetProfile = '' + mkdir -m0755 usr + cd usr + ${setupLibDirs} + for i in bin sbin share include; do + if [ -d "${staticUsrProfileTarget}/$i" ]; then + cp -rsHf "${staticUsrProfileTarget}/$i" "$i" + fi + done + cd .. + + for i in var etc opt; do + if [ -d "${staticUsrProfileTarget}/$i" ]; then + cp -rsHf "${staticUsrProfileTarget}/$i" "$i" + fi + done + for i in usr/{bin,sbin,lib,lib32,lib64}; do + if [ -d "$i" ]; then + ln -s "$i" + fi + done + ''; + +in stdenv.mkDerivation { + name = "${name}-fhs"; + buildCommand = '' + mkdir -p $out + cd $out + ${setupTargetProfile} + cd $out + ${extraBuildCommands} + cd $out + ${lib.optionalString isMultiBuild extraBuildCommandsMulti} + ''; + preferLocalBuild = true; + allowSubstitutes = false; +} diff --git a/nixpkgs/pkgs/build-support/build-graalvm-native-image/default.nix b/nixpkgs/pkgs/build-support/build-graalvm-native-image/default.nix new file mode 100644 index 000000000000..e25a71405089 --- /dev/null +++ b/nixpkgs/pkgs/build-support/build-graalvm-native-image/default.nix @@ -0,0 +1,76 @@ +{ lib +, stdenv +, glibcLocales + # The GraalVM derivation to use +, graalvmDrv +, name ? "${args.pname}-${args.version}" +, executable ? args.pname + # JAR used as input for GraalVM derivation, defaults to src +, jar ? args.src +, dontUnpack ? (jar == args.src) + # Default native-image arguments. You probably don't want to set this, + # except in special cases. In most cases, use extraNativeBuildArgs instead +, nativeImageBuildArgs ? [ + (lib.optionalString stdenv.isDarwin "-H:-CheckToolchain") + "-H:Name=${executable}" + "-march=compatibility" + "--verbose" + ] + # Extra arguments to be passed to the native-image +, extraNativeImageBuildArgs ? [ ] + # XMX size of GraalVM during build +, graalvmXmx ? "-J-Xmx6g" + # Locale to be used by GraalVM compiler +, LC_ALL ? "en_US.UTF-8" +, meta ? { } +, ... +} @ args: + +let + extraArgs = builtins.removeAttrs args [ + "lib" + "stdenv" + "glibcLocales" + "jar" + "dontUnpack" + "LC_ALL" + "meta" + "buildPhase" + "nativeBuildInputs" + "installPhase" + ]; +in +stdenv.mkDerivation ({ + inherit dontUnpack LC_ALL jar; + + nativeBuildInputs = (args.nativeBuildInputs or [ ]) ++ [ graalvmDrv glibcLocales ]; + + nativeImageBuildArgs = nativeImageBuildArgs ++ extraNativeImageBuildArgs ++ [ graalvmXmx ]; + + buildPhase = args.buildPhase or '' + runHook preBuild + + native-image -jar "$jar" ''${nativeImageBuildArgs[@]} + + runHook postBuild + ''; + + installPhase = args.installPhase or '' + runHook preInstall + + install -Dm755 ${executable} -t $out/bin + + runHook postInstall + ''; + + disallowedReferences = [ graalvmDrv ]; + + passthru = { inherit graalvmDrv; }; + + meta = { + # default to graalvm's platforms + platforms = graalvmDrv.meta.platforms; + # default to executable name + mainProgram = executable; + } // meta; +} // extraArgs) diff --git a/nixpkgs/pkgs/build-support/build-maven.nix b/nixpkgs/pkgs/build-support/build-maven.nix new file mode 100644 index 000000000000..7ac8afdde225 --- /dev/null +++ b/nixpkgs/pkgs/build-support/build-maven.nix @@ -0,0 +1,88 @@ +{ stdenv, maven, runCommand, writeText, fetchurl, lib, requireFile, linkFarm }: +# Takes an info file generated by mvn2nix +# (https://github.com/NixOS/mvn2nix-maven-plugin) and builds the maven +# project with it. +# +# repo: A local maven repository with the project's dependencies. +# +# settings: A settings.xml to pass to maven to use the repo. +# +# build: A simple build derivation that uses mvn compile and package to build +# the project. +# +# @example +# project = pkgs.buildMaven ./project-info.json +infoFile: +let + info = lib.importJSON infoFile; + + dependencies = lib.flatten (map (dep: + let + inherit (dep) sha1 groupId artifactId version metadata repository-id; + versionDir = dep.unresolved-version or version; + authenticated = dep.authenticated or false; + url = dep.url or ""; + + fetch = if (url != "") then + ((if authenticated then requireFile else fetchurl) { + inherit url sha1; + }) + else + ""; + + fetchMetadata = (if authenticated then requireFile else fetchurl) { + inherit (metadata) url sha1; + }; + + layout = "${ + builtins.replaceStrings [ "." ] [ "/" ] groupId + }/${artifactId}/${versionDir}"; + in lib.optional (url != "") { + layout = "${layout}/${fetch.name}"; + drv = fetch; + } ++ lib.optionals (dep ? metadata) ([{ + layout = "${layout}/maven-metadata-${repository-id}.xml"; + drv = fetchMetadata; + }] ++ lib.optional (fetch != "") { + layout = "${layout}/${ + builtins.replaceStrings [ version ] [ dep.unresolved-version ] + fetch.name + }"; + drv = fetch; + })) info.dependencies); + + repo = linkFarm "maven-repository" (lib.forEach dependencies (dependency: { + name = dependency.layout; + path = dependency.drv; + })); + + settings = writeText "settings.xml" '' + <settings xmlns="http://maven.apache.org/SETTINGS/1.0.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0 + http://maven.apache.org/xsd/settings-1.0.0.xsd"> + <localRepository>${repo}</localRepository> + </settings> + ''; + + src = dirOf infoFile; +in { + inherit repo settings info; + + build = stdenv.mkDerivation { + name = "${info.project.artifactId}-${info.project.version}.jar"; + + src = builtins.filterSource (path: type: + (toString path) != (toString (src + "/target")) && (toString path) + != (toString (src + "/.git"))) src; + + buildInputs = [ maven ]; + + buildPhase = "mvn --offline --settings ${settings} compile"; + + installPhase = '' + mvn --offline --settings ${settings} package + mv target/*.jar $out + ''; + }; +} diff --git a/nixpkgs/pkgs/build-support/build-setupcfg/default.nix b/nixpkgs/pkgs/build-support/build-setupcfg/default.nix new file mode 100644 index 000000000000..5737989249af --- /dev/null +++ b/nixpkgs/pkgs/build-support/build-setupcfg/default.nix @@ -0,0 +1,26 @@ +# Build a python package from info made available by setupcfg2nix. +# +# * src: The source of the package. +# * info: The package information generated by setupcfg2nix. +# * meta: Standard nixpkgs metadata. +# * application: Whether this package is a python library or an +# application which happens to be written in python. +# * doCheck: Whether to run the test suites. +lib: pythonPackages: +{ src, info, meta ? {}, application ? false, doCheck ? true}: let + build = if application + then pythonPackages.buildPythonApplication + else pythonPackages.buildPythonPackage; +in build { + inherit (info) pname version; + + inherit src meta doCheck; + + nativeBuildInputs = map (p: pythonPackages.${p}) ( + (info.setup_requires or []) ++ + (lib.optionals doCheck (info.tests_require or [])) + ); + + propagatedBuildInputs = map (p: pythonPackages.${p}) + (info.install_requires or []); +} diff --git a/nixpkgs/pkgs/build-support/buildenv/builder.pl b/nixpkgs/pkgs/build-support/buildenv/builder.pl new file mode 100755 index 000000000000..975e76df05c0 --- /dev/null +++ b/nixpkgs/pkgs/build-support/buildenv/builder.pl @@ -0,0 +1,283 @@ +#! @perl@ -w + +use strict; +use Cwd 'abs_path'; +use IO::Handle; +use File::Path; +use File::Basename; +use File::Compare; +use JSON::PP; + +STDOUT->autoflush(1); + +$SIG{__WARN__} = sub { warn "warning: ", @_ }; +$SIG{__DIE__} = sub { die "error: ", @_ }; + +my $out = $ENV{"out"}; +my $extraPrefix = $ENV{"extraPrefix"}; + +my @pathsToLink = split ' ', $ENV{"pathsToLink"}; + +sub isInPathsToLink { + my $path = shift; + $path = "/" if $path eq ""; + foreach my $elem (@pathsToLink) { + return 1 if + $elem eq "/" || + (substr($path, 0, length($elem)) eq $elem + && (($path eq $elem) || (substr($path, length($elem), 1) eq "/"))); + } + return 0; +} + +# Returns whether a path in one of the linked packages may contain +# files in one of the elements of pathsToLink. +sub hasPathsToLink { + my $path = shift; + foreach my $elem (@pathsToLink) { + return 1 if + $path eq "" || + (substr($elem, 0, length($path)) eq $path + && (($path eq $elem) || (substr($elem, length($path), 1) eq "/"))); + } + return 0; +} + +# Similar to `lib.isStorePath` +sub isStorePath { + my $path = shift; + my $storePath = "@storeDir@"; + + return substr($path, 0, 1) eq "/" && dirname($path) eq $storePath; +} + +# For each activated package, determine what symlinks to create. + +my %symlinks; + +# Add all pathsToLink and all parent directories. +# +# For "/a/b/c" that will include +# [ "", "/a", "/a/b", "/a/b/c" ] +# +# That ensures the whole directory tree needed by pathsToLink is +# created as directories and not symlinks. +$symlinks{""} = ["", 0]; +for my $p (@pathsToLink) { + my @parts = split '/', $p; + + my $cur = ""; + for my $x (@parts) { + $cur = $cur . "/$x"; + $cur = "" if $cur eq "/"; + $symlinks{$cur} = ["", 0]; + } +} + +sub findFiles; + +sub findFilesInDir { + my ($relName, $target, $ignoreCollisions, $checkCollisionContents, $priority) = @_; + + opendir DIR, "$target" or die "cannot open `$target': $!"; + my @names = readdir DIR or die; + closedir DIR; + + foreach my $name (@names) { + next if $name eq "." || $name eq ".."; + findFiles("$relName/$name", "$target/$name", $name, $ignoreCollisions, $checkCollisionContents, $priority); + } +} + +sub checkCollision { + my ($path1, $path2) = @_; + + if (! -e $path1 || ! -e $path2) { + return 0; + } + + my $stat1 = (stat($path1))[2]; + my $stat2 = (stat($path2))[2]; + + if ($stat1 != $stat2) { + warn "different permissions in `$path1' and `$path2': " + . sprintf("%04o", $stat1 & 07777) . " <-> " + . sprintf("%04o", $stat2 & 07777); + return 0; + } + + return compare($path1, $path2) == 0; +} + +sub prependDangling { + my $path = shift; + return (-l $path && ! -e $path ? "dangling symlink " : "") . "`$path'"; +} + +sub findFiles { + my ($relName, $target, $baseName, $ignoreCollisions, $checkCollisionContents, $priority) = @_; + + # The store path must not be a file + if (-f $target && isStorePath $target) { + die "The store path $target is a file and can't be merged into an environment using pkgs.buildEnv!"; + } + + # Urgh, hacky... + return if + $relName eq "/propagated-build-inputs" || + $relName eq "/nix-support" || + $relName =~ /info\/dir$/ || + ( $relName =~ /^\/share\/mime\// && !( $relName =~ /^\/share\/mime\/packages/ ) ) || + $baseName eq "perllocal.pod" || + $baseName eq "log" || + ! (hasPathsToLink($relName) || isInPathsToLink($relName)); + + my ($oldTarget, $oldPriority) = @{$symlinks{$relName} // [undef, undef]}; + + # If target doesn't exist, create it. If it already exists as a + # symlink to a file (not a directory) in a lower-priority package, + # overwrite it. + if (!defined $oldTarget || ($priority < $oldPriority && ($oldTarget ne "" && ! -d $oldTarget))) { + # If target is a dangling symlink, emit a warning. + if (-l $target && ! -e $target) { + my $link = readlink $target; + warn "creating dangling symlink `$out$extraPrefix/$relName' -> `$target' -> `$link'\n"; + } + $symlinks{$relName} = [$target, $priority]; + return; + } + + # If target already exists and both targets resolves to the same path, skip + if ( + defined $oldTarget && $oldTarget ne "" && + defined abs_path($target) && defined abs_path($oldTarget) && + abs_path($target) eq abs_path($oldTarget) + ) { + # Prefer the target that is not a symlink, if any + if (-l $oldTarget && ! -l $target) { + $symlinks{$relName} = [$target, $priority]; + } + return; + } + + # If target already exists as a symlink to a file (not a + # directory) in a higher-priority package, skip. + if (defined $oldTarget && $priority > $oldPriority && $oldTarget ne "" && ! -d $oldTarget) { + return; + } + + # If target is supposed to be a directory but it isn't, die with an error message + # instead of attempting to recurse into it, only to fail then. + # This happens e.g. when pathsToLink contains a non-directory path. + if ($oldTarget eq "" && ! -d $target) { + die "not a directory: `$target'\n"; + } + + unless (-d $target && ($oldTarget eq "" || -d $oldTarget)) { + # Prepend "dangling symlink" to paths if applicable. + my $targetRef = prependDangling($target); + my $oldTargetRef = prependDangling($oldTarget); + + if ($ignoreCollisions) { + warn "collision between $targetRef and $oldTargetRef\n" if $ignoreCollisions == 1; + return; + } elsif ($checkCollisionContents && checkCollision($oldTarget, $target)) { + return; + } else { + die "collision between $targetRef and $oldTargetRef\n"; + } + } + + findFilesInDir($relName, $oldTarget, $ignoreCollisions, $checkCollisionContents, $oldPriority) unless $oldTarget eq ""; + findFilesInDir($relName, $target, $ignoreCollisions, $checkCollisionContents, $priority); + + $symlinks{$relName} = ["", $priority]; # denotes directory +} + + +my %done; +my %postponed; + +sub addPkg { + my ($pkgDir, $ignoreCollisions, $checkCollisionContents, $priority) = @_; + + return if (defined $done{$pkgDir}); + $done{$pkgDir} = 1; + + findFiles("", $pkgDir, "", $ignoreCollisions, $checkCollisionContents, $priority); + + my $propagatedFN = "$pkgDir/nix-support/propagated-user-env-packages"; + if (-e $propagatedFN) { + open PROP, "<$propagatedFN" or die; + my $propagated = <PROP>; + close PROP; + my @propagated = split ' ', $propagated; + foreach my $p (@propagated) { + $postponed{$p} = 1 unless defined $done{$p}; + } + } +} + +# Read packages list. +my $pkgs; + +if (exists $ENV{"pkgsPath"}) { + open FILE, $ENV{"pkgsPath"}; + $pkgs = <FILE>; + close FILE; +} else { + $pkgs = $ENV{"pkgs"} +} + +# Symlink to the packages that have been installed explicitly by the +# user. +for my $pkg (@{decode_json $pkgs}) { + for my $path (@{$pkg->{paths}}) { + addPkg($path, + $ENV{"ignoreCollisions"} eq "1", + $ENV{"checkCollisionContents"} eq "1", + $pkg->{priority}) + if -e $path; + } +} + + +# Symlink to the packages that have been "propagated" by packages +# installed by the user (i.e., package X declares that it wants Y +# installed as well). We do these later because they have a lower +# priority in case of collisions. +my $priorityCounter = 1000; # don't care about collisions +while (scalar(keys %postponed) > 0) { + my @pkgDirs = keys %postponed; + %postponed = (); + foreach my $pkgDir (sort @pkgDirs) { + addPkg($pkgDir, 2, $ENV{"checkCollisionContents"} eq "1", $priorityCounter++); + } +} + + +# Create the symlinks. +my $nrLinks = 0; +foreach my $relName (sort keys %symlinks) { + my ($target, $priority) = @{$symlinks{$relName}}; + my $abs = "$out" . "$extraPrefix" . "/$relName"; + next unless isInPathsToLink $relName; + if ($target eq "") { + #print "creating directory $relName\n"; + mkpath $abs or die "cannot create directory `$abs': $!"; + } else { + #print "creating symlink $relName to $target\n"; + symlink $target, $abs || + die "error creating link `$abs': $!"; + $nrLinks++; + } +} + + +print STDERR "created $nrLinks symlinks in user environment\n"; + + +my $manifest = $ENV{"manifest"}; +if ($manifest) { + symlink($manifest, "$out/manifest") or die "cannot create manifest"; +} diff --git a/nixpkgs/pkgs/build-support/buildenv/default.nix b/nixpkgs/pkgs/build-support/buildenv/default.nix new file mode 100644 index 000000000000..786a2ad5da02 --- /dev/null +++ b/nixpkgs/pkgs/build-support/buildenv/default.nix @@ -0,0 +1,81 @@ +# buildEnv creates a tree of symlinks to the specified paths. This is +# a fork of the hardcoded buildEnv in the Nix distribution. + +{ buildPackages, runCommand, lib, substituteAll }: + +lib.makeOverridable +({ name + +, # The manifest file (if any). A symlink $out/manifest will be + # created to it. + manifest ? "" + +, # The paths to symlink. + paths + +, # Whether to ignore collisions or abort. + ignoreCollisions ? false + +, # If there is a collision, check whether the contents and permissions match + # and only if not, throw a collision error. + checkCollisionContents ? true + +, # The paths (relative to each element of `paths') that we want to + # symlink (e.g., ["/bin"]). Any file not inside any of the + # directories in the list is not symlinked. + pathsToLink ? ["/"] + +, # The package outputs to include. By default, only the default + # output is included. + extraOutputsToInstall ? [] + +, # Root the result in directory "$out${extraPrefix}", e.g. "/share". + extraPrefix ? "" + +, # Shell commands to run after building the symlink tree. + postBuild ? "" + +# Additional inputs +, nativeBuildInputs ? [] # Handy e.g. if using makeWrapper in `postBuild`. +, buildInputs ? [] + +, passthru ? {} +, meta ? {} +}: + +let + builder = substituteAll { + src = ./builder.pl; + inherit (builtins) storeDir; + }; +in + +runCommand name + rec { + inherit manifest ignoreCollisions checkCollisionContents passthru + meta pathsToLink extraPrefix postBuild + nativeBuildInputs buildInputs; + pkgs = builtins.toJSON (map (drv: { + paths = + # First add the usual output(s): respect if user has chosen explicitly, + # and otherwise use `meta.outputsToInstall`. The attribute is guaranteed + # to exist in mkDerivation-created cases. The other cases (e.g. runCommand) + # aren't expected to have multiple outputs. + (if (! drv ? outputSpecified || ! drv.outputSpecified) + && drv.meta.outputsToInstall or null != null + then map (outName: drv.${outName}) drv.meta.outputsToInstall + else [ drv ]) + # Add any extra outputs specified by the caller of `buildEnv`. + ++ lib.filter (p: p!=null) + (builtins.map (outName: drv.${outName} or null) extraOutputsToInstall); + priority = drv.meta.priority or 5; + }) paths); + preferLocalBuild = true; + allowSubstitutes = false; + # XXX: The size is somewhat arbitrary + passAsFile = if builtins.stringLength pkgs >= 128*1024 then [ "pkgs" ] else [ ]; + } + '' + ${buildPackages.perl}/bin/perl -w ${builder} + eval "$postBuild" + '') diff --git a/nixpkgs/pkgs/build-support/cc-wrapper/add-clang-cc-cflags-before.sh b/nixpkgs/pkgs/build-support/cc-wrapper/add-clang-cc-cflags-before.sh new file mode 100644 index 000000000000..f943b8504683 --- /dev/null +++ b/nixpkgs/pkgs/build-support/cc-wrapper/add-clang-cc-cflags-before.sh @@ -0,0 +1,11 @@ +needsTarget=true + +for p in "${params[@]}"; do + case "$p" in + -target | --target=*) needsTarget=false ;; + esac +done + +if $needsTarget; then + extraBefore+=(-target @defaultTarget@) +fi diff --git a/nixpkgs/pkgs/build-support/cc-wrapper/add-flags.sh b/nixpkgs/pkgs/build-support/cc-wrapper/add-flags.sh new file mode 100644 index 000000000000..c59118d6c09e --- /dev/null +++ b/nixpkgs/pkgs/build-support/cc-wrapper/add-flags.sh @@ -0,0 +1,87 @@ +# N.B. It may be a surprise that the derivation-specific variables are exported, +# since this is just sourced by the wrapped binaries---the end consumers. This +# is because one wrapper binary may invoke another (e.g. cc invoking ld). In +# that case, it is cheaper/better to not repeat this step and let the forked +# wrapped binary just inherit the work of the forker's wrapper script. + +var_templates_list=( + NIX_CFLAGS_COMPILE + NIX_CFLAGS_COMPILE_BEFORE + NIX_CFLAGS_LINK + NIX_CXXSTDLIB_COMPILE + NIX_CXXSTDLIB_LINK + NIX_GNATFLAGS_COMPILE +) +var_templates_bool=( + NIX_ENFORCE_NO_NATIVE +) + +accumulateRoles + +# We need to mangle names for hygiene, but also take parameters/overrides +# from the environment. +for var in "${var_templates_list[@]}"; do + mangleVarList "$var" ${role_suffixes[@]+"${role_suffixes[@]}"} +done +for var in "${var_templates_bool[@]}"; do + mangleVarBool "$var" ${role_suffixes[@]+"${role_suffixes[@]}"} +done + +# `-B@out@/bin' forces cc to use ld-wrapper.sh when calling ld. +NIX_CFLAGS_COMPILE_@suffixSalt@="-B@out@/bin/ $NIX_CFLAGS_COMPILE_@suffixSalt@" + +# Export and assign separately in order that a failing $(..) will fail +# the script. + +# Currently bootstrap-tools does not split glibc, and gcc files into +# separate directories. As a workaround we want resulting cflags to be +# ordered as: crt1-cflags libc-cflags cc-cflags. Otherwise we mix crt/libc.so +# from different libc as seen in +# https://github.com/NixOS/nixpkgs/issues/158042 +# +# Note that below has reverse ordering as we prepend flags one-by-one. +# Once bootstrap-tools is split into different directories we can stop +# relying on flag ordering below. + +if [ -e @out@/nix-support/cc-cflags ]; then + NIX_CFLAGS_COMPILE_@suffixSalt@="$(< @out@/nix-support/cc-cflags) $NIX_CFLAGS_COMPILE_@suffixSalt@" +fi + +if [[ "$cInclude" = 1 ]] && [ -e @out@/nix-support/libc-cflags ]; then + NIX_CFLAGS_COMPILE_@suffixSalt@="$(< @out@/nix-support/libc-cflags) $NIX_CFLAGS_COMPILE_@suffixSalt@" +fi + +if [ -e @out@/nix-support/libc-crt1-cflags ]; then + NIX_CFLAGS_COMPILE_@suffixSalt@="$(< @out@/nix-support/libc-crt1-cflags) $NIX_CFLAGS_COMPILE_@suffixSalt@" +fi + +if [ -e @out@/nix-support/libcxx-cxxflags ]; then + NIX_CXXSTDLIB_COMPILE_@suffixSalt@+=" $(< @out@/nix-support/libcxx-cxxflags)" +fi + +if [ -e @out@/nix-support/libcxx-ldflags ]; then + NIX_CXXSTDLIB_LINK_@suffixSalt@+=" $(< @out@/nix-support/libcxx-ldflags)" +fi + +if [ -e @out@/nix-support/gnat-cflags ]; then + NIX_GNATFLAGS_COMPILE_@suffixSalt@="$(< @out@/nix-support/gnat-cflags) $NIX_GNATFLAGS_COMPILE_@suffixSalt@" +fi + +if [ -e @out@/nix-support/cc-ldflags ]; then + NIX_LDFLAGS_@suffixSalt@+=" $(< @out@/nix-support/cc-ldflags)" +fi + +if [ -e @out@/nix-support/cc-cflags-before ]; then + NIX_CFLAGS_COMPILE_BEFORE_@suffixSalt@="$(< @out@/nix-support/cc-cflags-before) $NIX_CFLAGS_COMPILE_BEFORE_@suffixSalt@" +fi + +# Only add darwin min version flag if a default darwin min version is set, +# which is a signal that we're targetting darwin. +if [ "@darwinMinVersion@" ]; then + mangleVarSingle @darwinMinVersionVariable@ ${role_suffixes[@]+"${role_suffixes[@]}"} + + NIX_CFLAGS_COMPILE_BEFORE_@suffixSalt@="-m@darwinPlatformForCC@-version-min=${@darwinMinVersionVariable@_@suffixSalt@:-@darwinMinVersion@} $NIX_CFLAGS_COMPILE_BEFORE_@suffixSalt@" +fi + +# That way forked processes will not extend these environment variables again. +export NIX_CC_WRAPPER_FLAGS_SET_@suffixSalt@=1 diff --git a/nixpkgs/pkgs/build-support/cc-wrapper/add-gnat-extra-flags.sh b/nixpkgs/pkgs/build-support/cc-wrapper/add-gnat-extra-flags.sh new file mode 100644 index 000000000000..ceff1e4a4c4a --- /dev/null +++ b/nixpkgs/pkgs/build-support/cc-wrapper/add-gnat-extra-flags.sh @@ -0,0 +1,23 @@ +# See add-flags.sh in cc-wrapper for comments. +var_templates_list=( + NIX_GNATMAKE_CARGS +) + +accumulateRoles + +for var in "${var_templates_list[@]}"; do + mangleVarList "$var" ${role_suffixes[@]+"${role_suffixes[@]}"} +done + +# `-B@out@/bin' forces cc to use wrapped as instead of the system one. +NIX_GNATMAKE_CARGS_@suffixSalt@="$NIX_GNATMAKE_CARGS_@suffixSalt@ -B@out@/bin/" + +# Only add darwin min version flag if a default darwin min version is set, +# which is a signal that we're targetting darwin. +if [ "@darwinMinVersion@" ]; then + mangleVarSingle @darwinMinVersionVariable@ ${role_suffixes[@]+"${role_suffixes[@]}"} + + NIX_GNATMAKE_CARGS_@suffixSalt@="-m@darwinPlatformForCC@-version-min=${@darwinMinVersionVariable@_@suffixSalt@:-@darwinMinVersion@} $NIX_GNATMAKE_CARGS_@suffixSalt@" +fi + +export NIX_GNAT_WRAPPER_EXTRA_FLAGS_SET_@suffixSalt@=1 diff --git a/nixpkgs/pkgs/build-support/cc-wrapper/add-hardening.sh b/nixpkgs/pkgs/build-support/cc-wrapper/add-hardening.sh new file mode 100644 index 000000000000..8d02b4e5124d --- /dev/null +++ b/nixpkgs/pkgs/build-support/cc-wrapper/add-hardening.sh @@ -0,0 +1,111 @@ +declare -a hardeningCFlagsAfter=() +declare -a hardeningCFlagsBefore=() + +declare -A hardeningEnableMap=() + +# Intentionally word-split in case 'NIX_HARDENING_ENABLE' is defined in Nix. The +# array expansion also prevents undefined variables from causing trouble with +# `set -u`. +for flag in ${NIX_HARDENING_ENABLE_@suffixSalt@-}; do + hardeningEnableMap["$flag"]=1 +done + +# Remove unsupported flags. +for flag in @hardening_unsupported_flags@; do + unset -v "hardeningEnableMap[$flag]" + # fortify being unsupported implies fortify3 is unsupported + if [[ "$flag" = 'fortify' ]] ; then + unset -v "hardeningEnableMap['fortify3']" + fi +done + +# make fortify and fortify3 mutually exclusive +if [[ -n "${hardeningEnableMap[fortify3]-}" ]]; then + unset -v "hardeningEnableMap['fortify']" +fi + +if (( "${NIX_DEBUG:-0}" >= 1 )); then + declare -a allHardeningFlags=(fortify stackprotector pie pic strictoverflow format) + declare -A hardeningDisableMap=() + + # Determine which flags were effectively disabled so we can report below. + for flag in "${allHardeningFlags[@]}"; do + if [[ -z "${hardeningEnableMap[$flag]-}" ]]; then + hardeningDisableMap["$flag"]=1 + fi + done + + printf 'HARDENING: disabled flags:' >&2 + (( "${#hardeningDisableMap[@]}" )) && printf ' %q' "${!hardeningDisableMap[@]}" >&2 + echo >&2 + + if (( "${#hardeningEnableMap[@]}" )); then + echo 'HARDENING: Is active (not completely disabled with "all" flag)' >&2; + fi +fi + +for flag in "${!hardeningEnableMap[@]}"; do + case $flag in + fortify | fortify3) + # Use -U_FORTIFY_SOURCE to avoid warnings on toolchains that explicitly + # set -D_FORTIFY_SOURCE=0 (like 'clang -fsanitize=address'). + hardeningCFlagsBefore+=('-O2' '-U_FORTIFY_SOURCE') + # Unset any _FORTIFY_SOURCE values the command-line may have set before + # enforcing our own value, avoiding (potentially fatal) redefinition + # warnings + hardeningCFlagsAfter+=('-U_FORTIFY_SOURCE') + case $flag in + fortify) + if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling fortify >&2; fi + hardeningCFlagsAfter+=('-D_FORTIFY_SOURCE=2') + ;; + fortify3) + if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling fortify3 >&2; fi + hardeningCFlagsAfter+=('-D_FORTIFY_SOURCE=3') + ;; + *) + # Ignore unsupported. + ;; + esac + ;; + stackprotector) + if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling stackprotector >&2; fi + hardeningCFlagsBefore+=('-fstack-protector-strong' '--param' 'ssp-buffer-size=4') + ;; + pie) + # NB: we do not use `+=` here, because PIE flags must occur before any PIC flags + if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling CFlags -fPIE >&2; fi + hardeningCFlagsBefore=('-fPIE' "${hardeningCFlagsBefore[@]}") + if [[ ! (" ${params[*]} " =~ " -shared " || " ${params[*]} " =~ " -static ") ]]; then + if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling LDFlags -pie >&2; fi + hardeningCFlagsBefore=('-pie' "${hardeningCFlagsBefore[@]}") + fi + ;; + pic) + if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling pic >&2; fi + hardeningCFlagsBefore+=('-fPIC') + ;; + strictoverflow) + if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling strictoverflow >&2; fi + if (( @isClang@ )); then + # In Clang, -fno-strict-overflow only serves to set -fwrapv and is + # reported as an unused CLI argument if -fwrapv or -fno-wrapv is set + # explicitly, so we side step that by doing the conversion here. + # + # See: https://github.com/llvm/llvm-project/blob/llvmorg-16.0.6/clang/lib/Driver/ToolChains/Clang.cpp#L6315 + # + hardeningCFlagsBefore+=('-fwrapv') + else + hardeningCFlagsBefore+=('-fno-strict-overflow') + fi + ;; + format) + if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling format >&2; fi + hardeningCFlagsBefore+=('-Wformat' '-Wformat-security' '-Werror=format-security') + ;; + *) + # Ignore unsupported. Checked in Nix that at least *some* + # tool supports each flag. + ;; + esac +done diff --git a/nixpkgs/pkgs/build-support/cc-wrapper/cc-wrapper.sh b/nixpkgs/pkgs/build-support/cc-wrapper/cc-wrapper.sh new file mode 100644 index 000000000000..6c43f8cbfa6d --- /dev/null +++ b/nixpkgs/pkgs/build-support/cc-wrapper/cc-wrapper.sh @@ -0,0 +1,261 @@ +#! @shell@ +set -eu -o pipefail +o posix +shopt -s nullglob + +if (( "${NIX_DEBUG:-0}" >= 7 )); then + set -x +fi + +path_backup="$PATH" + +# That @-vars are substituted separately from bash evaluation makes +# shellcheck think this, and others like it, are useless conditionals. +# shellcheck disable=SC2157 +if [[ -n "@coreutils_bin@" && -n "@gnugrep_bin@" ]]; then + PATH="@coreutils_bin@/bin:@gnugrep_bin@/bin" +fi + +source @out@/nix-support/utils.bash + + +# Parse command line options and set several variables. +# For instance, figure out if linker flags should be passed. +# GCC prints annoying warnings when they are not needed. +dontLink=0 +nonFlagArgs=0 +cc1=0 +# shellcheck disable=SC2193 +[[ "@prog@" = *++ ]] && isCxx=1 || isCxx=0 +cxxInclude=1 +cxxLibrary=1 +cInclude=1 + +expandResponseParams "$@" + +declare -ag positionalArgs=() +declare -i n=0 +nParams=${#params[@]} +while (( "$n" < "$nParams" )); do + p=${params[n]} + p2=${params[n+1]:-} # handle `p` being last one + n+=1 + + case "$p" in + -[cSEM] | -MM) dontLink=1 ;; + -cc1) cc1=1 ;; + -nostdinc) cInclude=0 cxxInclude=0 ;; + -nostdinc++) cxxInclude=0 ;; + -nostdlib) cxxLibrary=0 ;; + -x*-header) dontLink=1 ;; # both `-x c-header` and `-xc-header` are accepted by clang + -xc++*) isCxx=1 ;; # both `-xc++` and `-x c++` are accepted by clang + -x) + case "$p2" in + *-header) dontLink=1 ;; + c++*) isCxx=1 ;; + esac + ;; + --) # Everything else is positional args! + # See: https://github.com/llvm/llvm-project/commit/ed1d07282cc9d8e4c25d585e03e5c8a1b6f63a74 + + # Any positional arg (i.e. any argument after `--`) will be + # interpreted as a "non flag" arg: + if [[ -v "params[$n]" ]]; then nonFlagArgs=1; fi + + positionalArgs=("${params[@]:$n}") + params=("${params[@]:0:$((n - 1))}") + break; + ;; + -?*) ;; + *) nonFlagArgs=1 ;; # Includes a solitary dash (`-`) which signifies standard input; it is not a flag + esac +done + +# If we pass a flag like -Wl, then gcc will call the linker unless it +# can figure out that it has to do something else (e.g., because of a +# "-c" flag). So if no non-flag arguments are given, don't pass any +# linker flags. This catches cases like "gcc" (should just print +# "gcc: no input files") and "gcc -v" (should print the version). +if [ "$nonFlagArgs" = 0 ]; then + dontLink=1 +fi + +# Optionally filter out paths not refering to the store. +if [[ "${NIX_ENFORCE_PURITY:-}" = 1 && -n "$NIX_STORE" ]]; then + kept=() + nParams=${#params[@]} + declare -i n=0 + while (( "$n" < "$nParams" )); do + p=${params[n]} + p2=${params[n+1]:-} # handle `p` being last one + n+=1 + + skipNext=false + path="" + case "$p" in + -[IL]/*) path=${p:2} ;; + -[IL] | -isystem) path=$p2 skipNext=true ;; + esac + + if [[ -n $path ]] && badPath "$path"; then + skip "$path" + $skipNext && n+=1 + continue + fi + + kept+=("$p") + done + # Old bash empty array hack + params=(${kept+"${kept[@]}"}) +fi + +# Flirting with a layer violation here. +if [ -z "${NIX_BINTOOLS_WRAPPER_FLAGS_SET_@suffixSalt@:-}" ]; then + source @bintools@/nix-support/add-flags.sh +fi + +# Put this one second so libc ldflags take priority. +if [ -z "${NIX_CC_WRAPPER_FLAGS_SET_@suffixSalt@:-}" ]; then + source @out@/nix-support/add-flags.sh +fi + +# Clear march/mtune=native -- they bring impurity. +if [ "$NIX_ENFORCE_NO_NATIVE_@suffixSalt@" = 1 ]; then + kept=() + # Old bash empty array hack + for p in ${params+"${params[@]}"}; do + if [[ "$p" = -m*=native ]]; then + skip "$p" + else + kept+=("$p") + fi + done + # Old bash empty array hack + params=(${kept+"${kept[@]}"}) +fi + +if [[ "$isCxx" = 1 ]]; then + if [[ "$cxxInclude" = 1 ]]; then + # + # The motivation for this comment is to explain the reason for appending + # the C++ stdlib to NIX_CFLAGS_COMPILE, which I initially thought should + # change and later realized it shouldn't in: + # + # https://github.com/NixOS/nixpkgs/pull/185569#issuecomment-1234959249 + # + # NIX_CFLAGS_COMPILE contains dependencies added using "-isystem", and + # NIX_CXXSTDLIB_COMPILE adds the C++ stdlib using "-isystem". Appending + # NIX_CXXSTDLIB_COMPILE to NIX_CLAGS_COMPILE emulates this part of the + # include lookup order from GCC/Clang: + # + # > 4. Directories specified with -isystem options are scanned in + # > left-to-right order. + # > 5. Standard system directories are scanned. + # > 6. Directories specified with -idirafter options are scanned + # > in left-to-right order. + # + # NIX_CXX_STDLIB_COMPILE acts as the "standard system directories" that + # are otherwise missing from CC in nixpkgs, so should be added last. + # + # This means that the C standard library should never be present inside + # NIX_CFLAGS_COMPILE, because it MUST come after the C++ stdlib. It is + # added automatically by cc-wrapper later using "-idirafter". + # + NIX_CFLAGS_COMPILE_@suffixSalt@+=" $NIX_CXXSTDLIB_COMPILE_@suffixSalt@" + fi + if [[ "$cxxLibrary" = 1 ]]; then + NIX_CFLAGS_LINK_@suffixSalt@+=" $NIX_CXXSTDLIB_LINK_@suffixSalt@" + fi +fi + +source @out@/nix-support/add-hardening.sh + +# Add the flags for the C compiler proper. +extraAfter=(${hardeningCFlagsAfter[@]+"${hardeningCFlagsAfter[@]}"} $NIX_CFLAGS_COMPILE_@suffixSalt@) +extraBefore=(${hardeningCFlagsBefore[@]+"${hardeningCFlagsBefore[@]}"} $NIX_CFLAGS_COMPILE_BEFORE_@suffixSalt@) + +if [ "$dontLink" != 1 ]; then + linkType=$(checkLinkType $NIX_LDFLAGS_BEFORE_@suffixSalt@ "${params[@]}" ${NIX_CFLAGS_LINK_@suffixSalt@:-} $NIX_LDFLAGS_@suffixSalt@) + + # Add the flags that should only be passed to the compiler when + # linking. + extraAfter+=($(filterRpathFlags "$linkType" $NIX_CFLAGS_LINK_@suffixSalt@)) + + # Add the flags that should be passed to the linker (and prevent + # `ld-wrapper' from adding NIX_LDFLAGS_@suffixSalt@ again). + for i in $(filterRpathFlags "$linkType" $NIX_LDFLAGS_BEFORE_@suffixSalt@); do + extraBefore+=("-Wl,$i") + done + if [[ "$linkType" == dynamic && -n "$NIX_DYNAMIC_LINKER_@suffixSalt@" ]]; then + extraBefore+=("-Wl,-dynamic-linker=$NIX_DYNAMIC_LINKER_@suffixSalt@") + fi + for i in $(filterRpathFlags "$linkType" $NIX_LDFLAGS_@suffixSalt@); do + if [ "${i:0:3}" = -L/ ]; then + extraAfter+=("$i") + else + extraAfter+=("-Wl,$i") + fi + done + export NIX_LINK_TYPE_@suffixSalt@=$linkType +fi + +if [[ -e @out@/nix-support/add-local-cc-cflags-before.sh ]]; then + source @out@/nix-support/add-local-cc-cflags-before.sh +fi + +# As a very special hack, if the arguments are just `-v', then don't +# add anything. This is to prevent `gcc -v' (which normally prints +# out the version number and returns exit code 0) from printing out +# `No input files specified' and returning exit code 1. +if [ "$*" = -v ]; then + extraAfter=() + extraBefore=() +fi + +# clang's -cc1 mode is not compatible with most options +# that we would pass. Rather than trying to pass only +# options that would work, let's just remove all of them. +if [ "$cc1" = 1 ]; then + extraAfter=() + extraBefore=() +fi + +# Finally, if we got any positional args, append them to `extraAfter` +# now: +if [[ "${#positionalArgs[@]}" -gt 0 ]]; then + extraAfter+=(-- "${positionalArgs[@]}") +fi + +# Optionally print debug info. +if (( "${NIX_DEBUG:-0}" >= 1 )); then + # Old bash workaround, see ld-wrapper for explanation. + echo "extra flags before to @prog@:" >&2 + printf " %q\n" ${extraBefore+"${extraBefore[@]}"} >&2 + echo "original flags to @prog@:" >&2 + printf " %q\n" ${params+"${params[@]}"} >&2 + echo "extra flags after to @prog@:" >&2 + printf " %q\n" ${extraAfter+"${extraAfter[@]}"} >&2 +fi + +PATH="$path_backup" +# Old bash workaround, see above. + +# if a cc-wrapper-hook exists, run it. +if [[ -e @out@/nix-support/cc-wrapper-hook ]]; then + compiler=@prog@ + source @out@/nix-support/cc-wrapper-hook +fi + +if (( "${NIX_CC_USE_RESPONSE_FILE:-@use_response_file_by_default@}" >= 1 )); then + responseFile=$(mktemp --tmpdir cc-params.XXXXXX) + trap 'rm -f -- "$responseFile"' EXIT + printf "%q\n" \ + ${extraBefore+"${extraBefore[@]}"} \ + ${params+"${params[@]}"} \ + ${extraAfter+"${extraAfter[@]}"} > "$responseFile" + @prog@ "@$responseFile" +else + exec @prog@ \ + ${extraBefore+"${extraBefore[@]}"} \ + ${params+"${params[@]}"} \ + ${extraAfter+"${extraAfter[@]}"} +fi diff --git a/nixpkgs/pkgs/build-support/cc-wrapper/default.nix b/nixpkgs/pkgs/build-support/cc-wrapper/default.nix new file mode 100644 index 000000000000..8ac11436c5f7 --- /dev/null +++ b/nixpkgs/pkgs/build-support/cc-wrapper/default.nix @@ -0,0 +1,726 @@ +# The Nixpkgs CC is not directly usable, since it doesn't know where +# the C library and standard header files are. Therefore the compiler +# produced by that package cannot be installed directly in a user +# environment and used from the command line. So we use a wrapper +# script that sets up the right environment variables so that the +# compiler and the linker just "work". + +{ name ? "" +, lib +, stdenvNoCC +, cc ? null, libc ? null, bintools, coreutils ? null, shell ? stdenvNoCC.shell +, zlib ? null +, nativeTools, noLibc ? false, nativeLibc, nativePrefix ? "" +, propagateDoc ? cc != null && cc ? man +, extraTools ? [], extraPackages ? [], extraBuildCommands ? "" +, nixSupport ? {} +, isGNU ? false, isClang ? cc.isClang or false, isCcache ? cc.isCcache or false, gnugrep ? null +, buildPackages ? {} +, libcxx ? null + +# Whether or not to add `-B` and `-L` to `nix-support/cc-{c,ld}flags` +, useCcForLibs ? + + # Always add these flags for Clang, because in order to compile (most + # software) it needs libraries that are shipped and compiled with gcc. + if isClang then true + + # Never add these flags for a build!=host cross-compiler or a host!=target + # ("cross-built-native") compiler; currently nixpkgs has a special build + # path for these (`crossStageStatic`). Hopefully at some point that build + # path will be merged with this one and this conditional will be removed. + else if (with stdenvNoCC; buildPlatform != hostPlatform || hostPlatform != targetPlatform) then false + + # Never add these flags when wrapping the bootstrapFiles' compiler; it has a + # /usr/-like layout with everything smashed into a single outpath, so it has + # no trouble finding its own libraries. + else if (cc.passthru.isFromBootstrapFiles or false) then false + + # Add these flags when wrapping `xgcc` (the first compiler that nixpkgs builds) + else if (cc.passthru.isXgcc or false) then true + + # Add these flags when wrapping `stdenv.cc` + else if (cc.stdenv.cc.cc.passthru.isXgcc or false) then true + + # Do not add these flags in any other situation. This is `false` mainly to + # prevent these flags from being added when wrapping *old* versions of gcc + # (e.g. `gcc6Stdenv`), since they will cause the old gcc to get `-B` and + # `-L` flags pointing at the new gcc's libstdc++ headers. Example failure: + # https://hydra.nixos.org/build/213125495 + else false + +# the derivation at which the `-B` and `-L` flags added by `useCcForLibs` will point +, gccForLibs ? if useCcForLibs then cc else null +, fortify-headers ? null +, includeFortifyHeaders ? null +}: + +with lib; + +assert nativeTools -> !propagateDoc && nativePrefix != ""; +assert !nativeTools -> + cc != null && coreutils != null && gnugrep != null; +assert !(nativeLibc && noLibc); +assert (noLibc || nativeLibc) == (libc == null); + +let + stdenv = stdenvNoCC; + inherit (stdenv) hostPlatform targetPlatform; + + includeFortifyHeaders' = if includeFortifyHeaders != null + then includeFortifyHeaders + else (targetPlatform.libc == "musl" && isGNU); + + # Prefix for binaries. Customarily ends with a dash separator. + # + # TODO(@Ericson2314) Make unconditional, or optional but always true by + # default. + targetPrefix = lib.optionalString (targetPlatform != hostPlatform) + (targetPlatform.config + "-"); + + ccVersion = lib.getVersion cc; + ccName = lib.removePrefix targetPrefix (lib.getName cc); + + libc_bin = optionalString (libc != null) (getBin libc); + libc_dev = optionalString (libc != null) (getDev libc); + libc_lib = optionalString (libc != null) (getLib libc); + cc_solib = getLib cc + + optionalString (targetPlatform != hostPlatform) "/${targetPlatform.config}"; + + # The wrapper scripts use 'cat' and 'grep', so we may need coreutils. + coreutils_bin = optionalString (!nativeTools) (getBin coreutils); + + # The "suffix salt" is a arbitrary string added in the end of env vars + # defined by cc-wrapper's hooks so that multiple cc-wrappers can be used + # without interfering. For the moment, it is defined as the target triple, + # adjusted to be a valid bash identifier. This should be considered an + # unstable implementation detail, however. + suffixSalt = replaceStrings ["-" "."] ["_" "_"] targetPlatform.config; + + expand-response-params = + lib.optionalString ((buildPackages.stdenv.hasCC or false) && buildPackages.stdenv.cc != "/dev/null") (import ../expand-response-params { inherit (buildPackages) stdenv; }); + + useGccForLibs = useCcForLibs + && libcxx == null + && !stdenv.targetPlatform.isDarwin + && !(stdenv.targetPlatform.useLLVM or false) + && !(stdenv.targetPlatform.useAndroidPrebuilt or false) + && !(stdenv.targetPlatform.isiOS or false) + && gccForLibs != null; + gccForLibs_solib = getLib gccForLibs + + optionalString (targetPlatform != hostPlatform) "/${targetPlatform.config}"; + + # The following two functions, `isGccArchSupported` and + # `isGccTuneSupported`, only handle those situations where a flag + # (`-march` or `-mtune`) is accepted by one compiler but rejected + # by another, and both compilers are relevant to nixpkgs. We are + # not trying to maintain a complete list of all flags accepted by + # all versions of all compilers ever in nixpkgs. + # + # The two main cases of interest are: + # + # - One compiler is gcc and the other is clang + # - One compiler is pkgs.gcc and the other is bootstrap-files.gcc + # -- older compilers (for example bootstrap's GCC 5) fail with + # -march=too-modern-cpu + + isGccArchSupported = arch: + if targetPlatform.isPower then false else # powerpc does not allow -march= + if isGNU then + { # Generic + x86-64-v2 = versionAtLeast ccVersion "11.0"; + x86-64-v3 = versionAtLeast ccVersion "11.0"; + x86-64-v4 = versionAtLeast ccVersion "11.0"; + + # Intel + skylake = versionAtLeast ccVersion "6.0"; + skylake-avx512 = versionAtLeast ccVersion "6.0"; + cannonlake = versionAtLeast ccVersion "8.0"; + icelake-client = versionAtLeast ccVersion "8.0"; + icelake-server = versionAtLeast ccVersion "8.0"; + cascadelake = versionAtLeast ccVersion "9.0"; + cooperlake = versionAtLeast ccVersion "10.0"; + tigerlake = versionAtLeast ccVersion "10.0"; + knm = versionAtLeast ccVersion "8.0"; + alderlake = versionAtLeast ccVersion "12.0"; + + # AMD + znver1 = versionAtLeast ccVersion "6.0"; + znver2 = versionAtLeast ccVersion "9.0"; + znver3 = versionAtLeast ccVersion "11.0"; + znver4 = versionAtLeast ccVersion "13.0"; + }.${arch} or true + else if isClang then + { #Generic + x86-64-v2 = versionAtLeast ccVersion "12.0"; + x86-64-v3 = versionAtLeast ccVersion "12.0"; + x86-64-v4 = versionAtLeast ccVersion "12.0"; + + # Intel + cannonlake = versionAtLeast ccVersion "5.0"; + icelake-client = versionAtLeast ccVersion "7.0"; + icelake-server = versionAtLeast ccVersion "7.0"; + knm = versionAtLeast ccVersion "7.0"; + alderlake = versionAtLeast ccVersion "16.0"; + + # AMD + znver1 = versionAtLeast ccVersion "4.0"; + znver2 = versionAtLeast ccVersion "9.0"; + znver3 = versionAtLeast ccVersion "12.0"; + znver4 = versionAtLeast ccVersion "16.0"; + }.${arch} or true + else + false; + + isGccTuneSupported = tune: + # for x86 -mtune= takes the same values as -march, plus two more: + if targetPlatform.isx86 then + { + generic = true; + intel = true; + }.${tune} or (isGccArchSupported tune) + # on arm64, the -mtune= values are specific processors + else if targetPlatform.isAarch64 then + (if isGNU then + { + cortex-a53 = versionAtLeast ccVersion "4.8"; # gcc 8c075f + cortex-a72 = versionAtLeast ccVersion "5.1"; # gcc d8f70d + "cortex-a72.cortex-a53" = versionAtLeast ccVersion "5.1"; # gcc d8f70d + }.${tune} or false + else if isClang then + { + cortex-a53 = versionAtLeast ccVersion "3.9"; # llvm dfc5d1 + }.${tune} or false + else false) + else if targetPlatform.isPower then + # powerpc does not support -march + true + else if targetPlatform.isMips then + # for mips -mtune= takes the same values as -march + isGccArchSupported tune + else + false; + + # Clang does not support as many `-mtune=` values as gcc does; + # this function will return the best possible approximation of the + # provided `-mtune=` value, or `null` if none exists. + # + # Note: this function can make use of ccVersion; for example, `if + # versionOlder ccVersion "12" then ...` + findBestTuneApproximation = tune: + let guess = if isClang + then { + # clang does not tune for big.LITTLE chips + "cortex-a72.cortex-a53" = "cortex-a72"; + }.${tune} or tune + else tune; + in if isGccTuneSupported guess + then guess + else null; + + darwinPlatformForCC = optionalString stdenv.targetPlatform.isDarwin ( + if (targetPlatform.darwinPlatform == "macos" && isGNU) then "macosx" + else targetPlatform.darwinPlatform + ); + + darwinMinVersion = optionalString stdenv.targetPlatform.isDarwin ( + stdenv.targetPlatform.darwinMinVersion + ); + + darwinMinVersionVariable = optionalString stdenv.targetPlatform.isDarwin + stdenv.targetPlatform.darwinMinVersionVariable; +in + +assert includeFortifyHeaders' -> fortify-headers != null; + +# Ensure bintools matches +assert libc_bin == bintools.libc_bin; +assert libc_dev == bintools.libc_dev; +assert libc_lib == bintools.libc_lib; +assert nativeTools == bintools.nativeTools; +assert nativeLibc == bintools.nativeLibc; +assert nativePrefix == bintools.nativePrefix; + +stdenv.mkDerivation { + pname = targetPrefix + + (if name != "" then name else "${ccName}-wrapper"); + version = optionalString (cc != null) ccVersion; + + preferLocalBuild = true; + + outputs = [ "out" ] ++ optionals propagateDoc [ "man" "info" ]; + + passthru = { + inherit targetPrefix suffixSalt; + # "cc" is the generic name for a C compiler, but there is no one for package + # providing the linker and related tools. The two we use now are GNU + # Binutils, and Apple's "cctools"; "bintools" as an attempt to find an + # unused middle-ground name that evokes both. + inherit bintools; + inherit cc libc libcxx nativeTools nativeLibc nativePrefix isGNU isClang; + + emacsBufferSetup = pkgs: '' + ; We should handle propagation here too + (mapc + (lambda (arg) + (when (file-directory-p (concat arg "/include")) + (setenv "NIX_CFLAGS_COMPILE_${suffixSalt}" (concat (getenv "NIX_CFLAGS_COMPILE_${suffixSalt}") " -isystem " arg "/include")))) + '(${concatStringsSep " " (map (pkg: "\"${pkg}\"") pkgs)})) + ''; + + inherit expand-response-params; + + inherit nixSupport; + }; + + dontBuild = true; + dontConfigure = true; + enableParallelBuilding = true; + + unpackPhase = '' + src=$PWD + ''; + + wrapper = ./cc-wrapper.sh; + + installPhase = + '' + mkdir -p $out/bin $out/nix-support + + wrap() { + local dst="$1" + local wrapper="$2" + export prog="$3" + export use_response_file_by_default=${if isClang && !isCcache then "1" else "0"} + substituteAll "$wrapper" "$out/bin/$dst" + chmod +x "$out/bin/$dst" + } + '' + + + (if nativeTools then '' + echo ${if targetPlatform.isDarwin then cc else nativePrefix} > $out/nix-support/orig-cc + + ccPath="${if targetPlatform.isDarwin then cc else nativePrefix}/bin" + '' else '' + echo $cc > $out/nix-support/orig-cc + + ccPath="${cc}/bin" + '') + + # Create symlinks to everything in the bintools wrapper. + + '' + for bbin in $bintools/bin/*; do + mkdir -p "$out/bin" + ln -s "$bbin" "$out/bin/$(basename $bbin)" + done + '' + + # We export environment variables pointing to the wrapped nonstandard + # cmds, lest some lousy configure script use those to guess compiler + # version. + + '' + export named_cc=${targetPrefix}cc + export named_cxx=${targetPrefix}c++ + + if [ -e $ccPath/${targetPrefix}gcc ]; then + wrap ${targetPrefix}gcc $wrapper $ccPath/${targetPrefix}gcc + ln -s ${targetPrefix}gcc $out/bin/${targetPrefix}cc + export named_cc=${targetPrefix}gcc + export named_cxx=${targetPrefix}g++ + elif [ -e $ccPath/clang ]; then + wrap ${targetPrefix}clang $wrapper $ccPath/clang + ln -s ${targetPrefix}clang $out/bin/${targetPrefix}cc + export named_cc=${targetPrefix}clang + export named_cxx=${targetPrefix}clang++ + fi + + if [ -e $ccPath/${targetPrefix}g++ ]; then + wrap ${targetPrefix}g++ $wrapper $ccPath/${targetPrefix}g++ + ln -s ${targetPrefix}g++ $out/bin/${targetPrefix}c++ + elif [ -e $ccPath/clang++ ]; then + wrap ${targetPrefix}clang++ $wrapper $ccPath/clang++ + ln -s ${targetPrefix}clang++ $out/bin/${targetPrefix}c++ + fi + + if [ -e $ccPath/${targetPrefix}cpp ]; then + wrap ${targetPrefix}cpp $wrapper $ccPath/${targetPrefix}cpp + elif [ -e $ccPath/cpp ]; then + wrap ${targetPrefix}cpp $wrapper $ccPath/cpp + fi + '' + + # No need to wrap gnat, gnatkr, gnatname or gnatprep; we can just symlink them in + + optionalString cc.langAda or false '' + for cmd in gnatbind gnatchop gnatclean gnatlink gnatls gnatmake; do + wrap ${targetPrefix}$cmd ${./gnat-wrapper.sh} $ccPath/${targetPrefix}$cmd + done + + for cmd in gnat gnatkr gnatname gnatprep; do + ln -s $ccPath/${targetPrefix}$cmd $out/bin/${targetPrefix}$cmd + done + + # this symlink points to the unwrapped gnat's output "out". It is used by + # our custom gprconfig compiler description to find GNAT's ada runtime. See + # ../../development/tools/build-managers/gprbuild/{boot.nix, nixpkgs-gnat.xml} + ln -sf ${cc} $out/nix-support/gprconfig-gnat-unwrapped + '' + + + optionalString cc.langD or false '' + wrap ${targetPrefix}gdc $wrapper $ccPath/${targetPrefix}gdc + '' + + + optionalString cc.langFortran or false '' + wrap ${targetPrefix}gfortran $wrapper $ccPath/${targetPrefix}gfortran + ln -sv ${targetPrefix}gfortran $out/bin/${targetPrefix}g77 + ln -sv ${targetPrefix}gfortran $out/bin/${targetPrefix}f77 + export named_fc=${targetPrefix}gfortran + '' + + + optionalString cc.langJava or false '' + wrap ${targetPrefix}gcj $wrapper $ccPath/${targetPrefix}gcj + '' + + + optionalString cc.langGo or false '' + wrap ${targetPrefix}gccgo $wrapper $ccPath/${targetPrefix}gccgo + wrap ${targetPrefix}go ${./go-wrapper.sh} $ccPath/${targetPrefix}go + ''; + + strictDeps = true; + propagatedBuildInputs = [ bintools ] ++ extraTools ++ optionals cc.langD or cc.langJava or false [ zlib ]; + depsTargetTargetPropagated = optional (libcxx != null) libcxx ++ extraPackages; + + setupHooks = [ + ../setup-hooks/role.bash + ] ++ lib.optional (cc.langC or true) ./setup-hook.sh + ++ lib.optional (cc.langFortran or false) ./fortran-hook.sh + ++ lib.optional (targetPlatform.isWindows) (stdenv.mkDerivation { + name = "win-dll-hook.sh"; + dontUnpack = true; + installPhase = '' + echo addToSearchPath "LINK_DLL_FOLDERS" "${cc_solib}/lib" > $out + echo addToSearchPath "LINK_DLL_FOLDERS" "${cc_solib}/lib64" >> $out + echo addToSearchPath "LINK_DLL_FOLDERS" "${cc_solib}/lib32" >> $out + ''; + }); + + postFixup = + # Ensure flags files exists, as some other programs cat them. (That these + # are considered an exposed interface is a bit dubious, but fine for now.) + '' + touch "$out/nix-support/cc-cflags" + touch "$out/nix-support/cc-ldflags" + '' + + # Backwards compatibility for packages expecting this file, e.g. with + # `$NIX_CC/nix-support/dynamic-linker`. + # + # TODO(@Ericson2314): Remove this after stable release and force + # everyone to refer to bintools-wrapper directly. + + '' + if [[ -f "$bintools/nix-support/dynamic-linker" ]]; then + ln -s "$bintools/nix-support/dynamic-linker" "$out/nix-support" + fi + if [[ -f "$bintools/nix-support/dynamic-linker-m32" ]]; then + ln -s "$bintools/nix-support/dynamic-linker-m32" "$out/nix-support" + fi + '' + + ## + ## GCC libs for non-GCC support + ## + + optionalString (useGccForLibs && isClang) '' + + echo "-B${gccForLibs}/lib/gcc/${targetPlatform.config}/${gccForLibs.version}" >> $out/nix-support/cc-cflags + '' + + optionalString useGccForLibs '' + echo "-L${gccForLibs}/lib/gcc/${targetPlatform.config}/${gccForLibs.version}" >> $out/nix-support/cc-ldflags + echo "-L${gccForLibs_solib}/lib" >> $out/nix-support/cc-ldflags + '' + + # TODO We would like to connect this to `useGccForLibs`, but we cannot yet + # because `libcxxStdenv` on linux still needs this. Maybe someday we'll + # always set `useLLVM` on Darwin, and maybe also break down `useLLVM` into + # fine-grained use flags (libgcc vs compiler-rt, ld.lld vs legacy, libc++ + # vs libstdc++, etc.) since Darwin isn't `useLLVM` on all counts. (See + # https://clang.llvm.org/docs/Toolchain.html for all the axes one might + # break `useLLVM` into.) + + optionalString (isClang + && targetPlatform.isLinux + && !(stdenv.targetPlatform.useAndroidPrebuilt or false) + && !(stdenv.targetPlatform.useLLVM or false) + && gccForLibs != null) ('' + echo "--gcc-toolchain=${gccForLibs}" >> $out/nix-support/cc-cflags + + # Pull in 'cc.out' target to get 'libstdc++fs.a'. It should be in + # 'cc.lib'. But it's a gcc package bug. + # TODO(trofi): remove once gcc is fixed to move libraries to .lib output. + echo "-L${gccForLibs}/${optionalString (targetPlatform != hostPlatform) "/${targetPlatform.config}"}/lib" >> $out/nix-support/cc-ldflags + '' + # this ensures that when clang passes -lgcc_s to lld (as it does + # when building e.g. firefox), lld is able to find libgcc_s.so + + concatMapStrings (libgcc: '' + echo "-L${libgcc}/lib" >> $out/nix-support/cc-ldflags + '') (lib.toList (gccForLibs.libgcc or []))) + + ## + ## General libc support + ## + + # The "-B${libc_lib}/lib/" flag is a quick hack to force gcc to link + # against the crt1.o from our own glibc, rather than the one in + # /usr/lib. (This is only an issue when using an `impure' + # compiler/linker, i.e., one that searches /usr/lib and so on.) + # + # Unfortunately, setting -B appears to override the default search + # path. Thus, the gcc-specific "../includes-fixed" directory is + # now longer searched and glibc's <limits.h> header fails to + # compile, because it uses "#include_next <limits.h>" to find the + # limits.h file in ../includes-fixed. To remedy the problem, + # another -idirafter is necessary to add that directory again. + + optionalString (libc != null) ('' + touch "$out/nix-support/libc-cflags" + touch "$out/nix-support/libc-ldflags" + echo "-B${libc_lib}${libc.libdir or "/lib/"}" >> $out/nix-support/libc-crt1-cflags + '' + optionalString (!(cc.langD or false)) '' + echo "-idirafter ${libc_dev}${libc.incdir or "/include"}" >> $out/nix-support/libc-cflags + '' + optionalString (isGNU && (!(cc.langD or false))) '' + for dir in "${cc}"/lib/gcc/*/*/include-fixed; do + echo '-idirafter' ''${dir} >> $out/nix-support/libc-cflags + done + '' + '' + + echo "${libc_lib}" > $out/nix-support/orig-libc + echo "${libc_dev}" > $out/nix-support/orig-libc-dev + '' + # fortify-headers is a set of wrapper headers that augment libc + # and use #include_next to pass through to libc's true + # implementations, so must appear before them in search order. + # in theory a correctly placed -idirafter could be used, but in + # practice the compiler may have been built with a --with-headers + # like option that forces the libc headers before all -idirafter, + # hence -isystem here. + + optionalString includeFortifyHeaders' '' + echo "-isystem ${fortify-headers}/include" >> $out/nix-support/libc-cflags + '') + + ## + ## General libc++ support + ## + + # We have a libc++ directly, we have one via "smuggled" GCC, or we have one + # bundled with the C compiler because it is GCC + + optionalString (libcxx != null || (useGccForLibs && gccForLibs.langCC or false) || (isGNU && cc.langCC or false)) '' + touch "$out/nix-support/libcxx-cxxflags" + touch "$out/nix-support/libcxx-ldflags" + '' + # Adding -isystem flags should be done only for clang; gcc + # already knows how to find its own libstdc++, and adding + # additional -isystem flags will confuse gfortran (see + # https://github.com/NixOS/nixpkgs/pull/209870#issuecomment-1500550903) + + optionalString (libcxx == null && isClang && (useGccForLibs && gccForLibs.langCC or false)) '' + for dir in ${gccForLibs}${lib.optionalString (hostPlatform != targetPlatform) "/${targetPlatform.config}"}/include/c++/*; do + echo "-isystem $dir" >> $out/nix-support/libcxx-cxxflags + done + for dir in ${gccForLibs}${lib.optionalString (hostPlatform != targetPlatform) "/${targetPlatform.config}"}/include/c++/*/${targetPlatform.config}; do + echo "-isystem $dir" >> $out/nix-support/libcxx-cxxflags + done + '' + + optionalString (libcxx.isLLVM or false) '' + echo "-isystem ${lib.getDev libcxx}/include/c++/v1" >> $out/nix-support/libcxx-cxxflags + echo "-isystem ${lib.getDev libcxx.cxxabi}/include/c++/v1" >> $out/nix-support/libcxx-cxxflags + echo "-stdlib=libc++" >> $out/nix-support/libcxx-ldflags + echo "-l${libcxx.cxxabi.libName}" >> $out/nix-support/libcxx-ldflags + '' + + ## + ## Initial CFLAGS + ## + + # GCC shows ${cc_solib}/lib in `gcc -print-search-dirs', but not + # ${cc_solib}/lib64 (even though it does actually search there...).. + # This confuses libtool. So add it to the compiler tool search + # path explicitly. + + optionalString (!nativeTools) '' + if [ -e "${cc_solib}/lib64" -a ! -L "${cc_solib}/lib64" ]; then + ccLDFlags+=" -L${cc_solib}/lib64" + ccCFlags+=" -B${cc_solib}/lib64" + fi + ccLDFlags+=" -L${cc_solib}/lib" + ccCFlags+=" -B${cc_solib}/lib" + + '' + optionalString cc.langAda or false '' + touch "$out/nix-support/gnat-cflags" + touch "$out/nix-support/gnat-ldflags" + basePath=$(echo $cc/lib/*/*/*) + ccCFlags+=" -B$basePath -I$basePath/adainclude" + gnatCFlags="-I$basePath/adainclude -I$basePath/adalib" + + echo "$gnatCFlags" >> $out/nix-support/gnat-cflags + '' + '' + echo "$ccLDFlags" >> $out/nix-support/cc-ldflags + echo "$ccCFlags" >> $out/nix-support/cc-cflags + '' + optionalString (targetPlatform.isDarwin && (libcxx != null) && (cc.isClang or false)) '' + echo " -L${lib.getLib libcxx}/lib" >> $out/nix-support/cc-ldflags + '' + + ## + ## Man page and info support + ## + + optionalString propagateDoc '' + ln -s ${cc.man} $man + ln -s ${cc.info} $info + '' + optionalString (cc.langD or cc.langJava or false) '' + echo "-B${zlib}${zlib.libdir or "/lib/"}" >> $out/nix-support/libc-cflags + '' + + ## + ## Hardening support + ## + + '' + export hardening_unsupported_flags="${builtins.concatStringsSep " " (cc.hardeningUnsupportedFlags or [])}" + '' + + # Machine flags. These are necessary to support + + # TODO: We should make a way to support miscellaneous machine + # flags and other gcc flags as well. + + # Always add -march based on cpu in triple. Sometimes there is a + # discrepency (x86_64 vs. x86-64), so we provide an "arch" arg in + # that case. + # TODO: aarch64-darwin has mcpu incompatible with gcc + + optionalString ((targetPlatform ? gcc.arch) && (isClang || !(stdenv.isDarwin && stdenv.isAarch64)) && + isGccArchSupported targetPlatform.gcc.arch) '' + echo "-march=${targetPlatform.gcc.arch}" >> $out/nix-support/cc-cflags-before + '' + + # -mcpu is not very useful, except on PowerPC where it is used + # instead of march. On all other platforms you should use mtune + # and march instead. + # TODO: aarch64-darwin has mcpu incompatible with gcc + + optionalString ((targetPlatform ? gcc.cpu) && (isClang || !(stdenv.isDarwin && stdenv.isAarch64))) '' + echo "-mcpu=${targetPlatform.gcc.cpu}" >> $out/nix-support/cc-cflags-before + '' + + # -mfloat-abi only matters on arm32 but we set it here + # unconditionally just in case. If the abi specifically sets hard + # vs. soft floats we use it here. + + optionalString (targetPlatform ? gcc.float-abi) '' + echo "-mfloat-abi=${targetPlatform.gcc.float-abi}" >> $out/nix-support/cc-cflags-before + '' + + optionalString (targetPlatform ? gcc.fpu) '' + echo "-mfpu=${targetPlatform.gcc.fpu}" >> $out/nix-support/cc-cflags-before + '' + + optionalString (targetPlatform ? gcc.mode) '' + echo "-mmode=${targetPlatform.gcc.mode}" >> $out/nix-support/cc-cflags-before + '' + + optionalString (targetPlatform ? gcc.thumb) '' + echo "-m${if targetPlatform.gcc.thumb then "thumb" else "arm"}" >> $out/nix-support/cc-cflags-before + '' + + (let tune = if targetPlatform ? gcc.tune + then findBestTuneApproximation targetPlatform.gcc.tune + else null; + in optionalString (tune != null) '' + echo "-mtune=${tune}" >> $out/nix-support/cc-cflags-before + '') + + # TODO: categorize these and figure out a better place for them + + optionalString targetPlatform.isWindows '' + hardening_unsupported_flags+=" pic" + '' + optionalString targetPlatform.isMinGW '' + hardening_unsupported_flags+=" stackprotector fortify" + '' + optionalString targetPlatform.isAvr '' + hardening_unsupported_flags+=" stackprotector pic" + '' + optionalString (targetPlatform.libc == "newlib" || targetPlatform.libc == "newlib-nano") '' + hardening_unsupported_flags+=" stackprotector fortify pie pic" + '' + optionalString (targetPlatform.libc == "musl" && targetPlatform.isx86_32) '' + hardening_unsupported_flags+=" stackprotector" + '' + optionalString targetPlatform.isNetBSD '' + hardening_unsupported_flags+=" stackprotector fortify" + '' + optionalString cc.langAda or false '' + hardening_unsupported_flags+=" format stackprotector strictoverflow" + '' + optionalString cc.langD or false '' + hardening_unsupported_flags+=" format" + '' + optionalString cc.langFortran or false '' + hardening_unsupported_flags+=" format" + '' + optionalString targetPlatform.isWasm '' + hardening_unsupported_flags+=" stackprotector fortify pie pic" + '' + optionalString targetPlatform.isMicroBlaze '' + hardening_unsupported_flags+=" stackprotector" + '' + + + optionalString (libc != null && targetPlatform.isAvr) '' + for isa in avr5 avr3 avr4 avr6 avr25 avr31 avr35 avr51 avrxmega2 avrxmega4 avrxmega5 avrxmega6 avrxmega7 tiny-stack; do + echo "-B${getLib libc}/avr/lib/$isa" >> $out/nix-support/libc-crt1-cflags + done + '' + + + optionalString stdenv.targetPlatform.isDarwin '' + echo "-arch ${targetPlatform.darwinArch}" >> $out/nix-support/cc-cflags + '' + + + optionalString targetPlatform.isAndroid '' + echo "-D__ANDROID_API__=${targetPlatform.sdkVer}" >> $out/nix-support/cc-cflags + '' + + # There are a few tools (to name one libstdcxx5) which do not work + # well with multi line flags, so make the flags single line again + + '' + for flags in "$out/nix-support"/*flags*; do + substituteInPlace "$flags" --replace $'\n' ' ' + done + + substituteAll ${./add-flags.sh} $out/nix-support/add-flags.sh + substituteAll ${./add-hardening.sh} $out/nix-support/add-hardening.sh + substituteAll ${../wrapper-common/utils.bash} $out/nix-support/utils.bash + '' + + + optionalString cc.langAda or false '' + substituteAll ${./add-gnat-extra-flags.sh} $out/nix-support/add-gnat-extra-flags.sh + '' + + ## + ## General Clang support + ## Needs to go after ^ because the for loop eats \n and makes this file an invalid script + ## + + optionalString isClang '' + export defaultTarget=${targetPlatform.config} + substituteAll ${./add-clang-cc-cflags-before.sh} $out/nix-support/add-local-cc-cflags-before.sh + '' + + ## + ## Extra custom steps + ## + + extraBuildCommands + + lib.strings.concatStringsSep "; " + (lib.attrsets.mapAttrsToList + (name: value: "echo ${toString value} >> $out/nix-support/${name}") + nixSupport); + + + env = { + inherit isClang; + + # for substitution in utils.bash + expandResponseParams = "${expand-response-params}/bin/expand-response-params"; + shell = getBin shell + shell.shellPath or ""; + gnugrep_bin = optionalString (!nativeTools) gnugrep; + # stdenv.cc.cc should not be null and we have nothing better for now. + # if the native impure bootstrap is gotten rid of this can become `inherit cc;` again. + cc = optionalString (!nativeTools) cc; + wrapperName = "CC_WRAPPER"; + inherit suffixSalt coreutils_bin bintools; + inherit libc_bin libc_dev libc_lib; + inherit darwinPlatformForCC darwinMinVersion darwinMinVersionVariable; + }; + + meta = + let cc_ = lib.optionalAttrs (cc != null) cc; in + (lib.optionalAttrs (cc_ ? meta) (removeAttrs cc.meta ["priority"])) // + { description = + lib.attrByPath ["meta" "description"] "System C compiler" cc_ + + " (wrapper script)"; + priority = 10; + mainProgram = if name != "" then name else ccName; + }; +} diff --git a/nixpkgs/pkgs/build-support/cc-wrapper/fortran-hook.sh b/nixpkgs/pkgs/build-support/cc-wrapper/fortran-hook.sh new file mode 100644 index 000000000000..d72f314c01ce --- /dev/null +++ b/nixpkgs/pkgs/build-support/cc-wrapper/fortran-hook.sh @@ -0,0 +1,11 @@ +getTargetRole +getTargetRoleWrapper + +export FC${role_post}=@named_fc@ + +# If unset, assume the default hardening flags. +# These are different for fortran. +: ${NIX_HARDENING_ENABLE="stackprotector pic strictoverflow relro bindnow"} +export NIX_HARDENING_ENABLE + +unset -v role_post diff --git a/nixpkgs/pkgs/build-support/cc-wrapper/gnat-wrapper.sh b/nixpkgs/pkgs/build-support/cc-wrapper/gnat-wrapper.sh new file mode 100644 index 000000000000..e75eb3eb1ebf --- /dev/null +++ b/nixpkgs/pkgs/build-support/cc-wrapper/gnat-wrapper.sh @@ -0,0 +1,182 @@ +#! @shell@ +set -eu -o pipefail +o posix +shopt -s nullglob + +if (( "${NIX_DEBUG:-0}" >= 7 )); then + set -x +fi + +path_backup="$PATH" + +# That @-vars are substituted separately from bash evaluation makes +# shellcheck think this, and others like it, are useless conditionals. +# shellcheck disable=SC2157 +if [[ -n "@coreutils_bin@" && -n "@gnugrep_bin@" ]]; then + PATH="@coreutils_bin@/bin:@gnugrep_bin@/bin" +fi + +cInclude=0 + +source @out@/nix-support/utils.bash + +# Flirting with a layer violation here. +if [ -z "${NIX_BINTOOLS_WRAPPER_FLAGS_SET_@suffixSalt@:-}" ]; then + source @bintools@/nix-support/add-flags.sh +fi + +# Put this one second so libc ldflags take priority. +if [ -z "${NIX_CC_WRAPPER_FLAGS_SET_@suffixSalt@:-}" ]; then + source @out@/nix-support/add-flags.sh +fi + +if [ -z "${NIX_GNAT_WRAPPER_EXTRA_FLAGS_SET_@suffixSalt@:-}" ]; then + source @out@/nix-support/add-gnat-extra-flags.sh +fi + +# Parse command line options and set several variables. +# For instance, figure out if linker flags should be passed. +# GCC prints annoying warnings when they are not needed. +dontLink=0 +nonFlagArgs=0 +# shellcheck disable=SC2193 + +expandResponseParams "$@" +declare -i n=0 +nParams=${#params[@]} +while (( "$n" < "$nParams" )); do + p=${params[n]} + p2=${params[n+1]:-} # handle `p` being last one + if [ "$p" = -c ]; then + dontLink=1 + elif [ "$p" = -S ]; then + dontLink=1 + elif [ "$p" = -E ]; then + dontLink=1 + elif [ "$p" = -E ]; then + dontLink=1 + elif [ "$p" = -M ]; then + dontLink=1 + elif [ "$p" = -MM ]; then + dontLink=1 + elif [[ "$p" = -x && "$p2" = *-header ]]; then + dontLink=1 + elif [[ "$p" != -?* ]]; then + # A dash alone signifies standard input; it is not a flag + nonFlagArgs=1 + fi + n+=1 +done + +# If we pass a flag like -Wl, then gcc will call the linker unless it +# can figure out that it has to do something else (e.g., because of a +# "-c" flag). So if no non-flag arguments are given, don't pass any +# linker flags. This catches cases like "gcc" (should just print +# "gcc: no input files") and "gcc -v" (should print the version). +if [ "$nonFlagArgs" = 0 ]; then + dontLink=1 +fi + +# Optionally filter out paths not refering to the store. +if [[ "${NIX_ENFORCE_PURITY:-}" = 1 && -n "$NIX_STORE" ]]; then + rest=() + nParams=${#params[@]} + declare -i n=0 + while (( "$n" < "$nParams" )); do + p=${params[n]} + p2=${params[n+1]:-} # handle `p` being last one + if [ "${p:0:3}" = -L/ ] && badPath "${p:2}"; then + skip "${p:2}" + elif [ "$p" = -L ] && badPath "$p2"; then + n+=1; skip "$p2" + elif [ "${p:0:3}" = -I/ ] && badPath "${p:2}"; then + skip "${p:2}" + elif [ "$p" = -I ] && badPath "$p2"; then + n+=1; skip "$p2" + elif [ "${p:0:4}" = -aI/ ] && badPath "${p:3}"; then + skip "${p:3}" + elif [ "$p" = -aI ] && badPath "$p2"; then + n+=1; skip "$p2" + elif [ "${p:0:4}" = -aO/ ] && badPath "${p:3}"; then + skip "${p:3}" + elif [ "$p" = -aO ] && badPath "$p2"; then + n+=1; skip "$p2" + elif [ "$p" = -isystem ] && badPath "$p2"; then + n+=1; skip "$p2" + else + rest+=("$p") + fi + n+=1 + done + # Old bash empty array hack + params=(${rest+"${rest[@]}"}) +fi + + +# Clear march/mtune=native -- they bring impurity. +if [ "$NIX_ENFORCE_NO_NATIVE_@suffixSalt@" = 1 ]; then + rest=() + # Old bash empty array hack + for p in ${params+"${params[@]}"}; do + if [[ "$p" = -m*=native ]]; then + skip "$p" + else + rest+=("$p") + fi + done + # Old bash empty array hack + params=(${rest+"${rest[@]}"}) +fi + +case "$(basename $0)x" in + "gnatbindx") + extraBefore=() + extraAfter=($NIX_GNATFLAGS_COMPILE_@suffixSalt@) + ;; + "gnatchopx") + extraBefore=("--GCC=@out@/bin/gcc") + extraAfter=() + ;; + "gnatcleanx") + extraBefore=($NIX_GNATFLAGS_COMPILE_@suffixSalt@) + extraAfter=() + ;; + "gnatlinkx") + extraBefore=() + extraAfter=("--GCC=@out@/bin/gcc") + ;; + "gnatlsx") + extraBefore=() + extraAfter=($NIX_GNATFLAGS_COMPILE_@suffixSalt@) + ;; + "gnatmakex") + extraBefore=("--GNATBIND=@out@/bin/gnatbind" "--GNATLINK=@out@/bin/gnatlink") + extraAfter=($NIX_GNATFLAGS_COMPILE_@suffixSalt@ -cargs $NIX_GNATMAKE_CARGS_@suffixSalt@) + ;; +esac + +# As a very special hack, if the arguments are just `-v', then don't +# add anything. This is to prevent `gcc -v' (which normally prints +# out the version number and returns exit code 0) from printing out +# `No input files specified' and returning exit code 1. +if [ "$*" = -v ]; then + extraAfter=() + extraBefore=() +fi + +# Optionally print debug info. +if (( "${NIX_DEBUG:-0}" >= 1 )); then + # Old bash workaround, see ld-wrapper for explanation. + echo "extra flags before to @prog@:" >&2 + printf " %q\n" ${extraBefore+"${extraBefore[@]}"} >&2 + echo "original flags to @prog@:" >&2 + printf " %q\n" ${params+"${params[@]}"} >&2 + echo "extra flags after to @prog@:" >&2 + printf " %q\n" ${extraAfter+"${extraAfter[@]}"} >&2 +fi + +PATH="$path_backup" +# Old bash workaround, see above. +exec @prog@ \ + ${extraBefore+"${extraBefore[@]}"} \ + ${params+"${params[@]}"} \ + ${extraAfter+"${extraAfter[@]}"} diff --git a/nixpkgs/pkgs/build-support/cc-wrapper/go-wrapper.sh b/nixpkgs/pkgs/build-support/cc-wrapper/go-wrapper.sh new file mode 100644 index 000000000000..5dfc5d2b73fc --- /dev/null +++ b/nixpkgs/pkgs/build-support/cc-wrapper/go-wrapper.sh @@ -0,0 +1,11 @@ +#! @shell@ +set -eu -o pipefail +o posix +shopt -s nullglob + +if (( "${NIX_DEBUG:-0}" >= 7 )); then + set -x +fi + +export GCCGO="@out@/bin/gccgo" + +exec @prog@ "$@" diff --git a/nixpkgs/pkgs/build-support/cc-wrapper/setup-hook.sh b/nixpkgs/pkgs/build-support/cc-wrapper/setup-hook.sh new file mode 100644 index 000000000000..9326d76e2a8f --- /dev/null +++ b/nixpkgs/pkgs/build-support/cc-wrapper/setup-hook.sh @@ -0,0 +1,118 @@ +# CC Wrapper hygiene +# +# For at least cross compilation, we need to depend on multiple cc-wrappers at +# once---specifically up to one per sort of dependency. This follows from having +# different tools targeting different platforms, and different flags for those +# tools. For example: +# +# # Flags for compiling (whether or not linking) C code for the... +# NIX_CFLAGS_COMPILE_FOR_BUILD # ...build platform +# NIX_CFLAGS_COMPILE # ...host platform +# NIX_CFLAGS_COMPILE_FOR_TARGET # ...target platform +# +# Notice that these platforms are the 3 *relative* to the package using +# cc-wrapper, not absolute like `x86_64-pc-linux-gnu`. +# +# The simplest solution would be to have separate cc-wrappers per (3 intended +# use-cases * n absolute concrete platforms). For the use-case axis, we would +# @-splice in 'BUILD_' '' 'TARGET_' to use the write environment variables when +# building the cc-wrapper, and likewise prefix the binaries' names so they didn't +# clobber each other on the PATH. But the need for 3x cc-wrappers, along with +# non-standard name prefixes, is annoying and liable to break packages' build +# systems. +# +# Instead, we opt to have just one cc-wrapper per absolute platform. Matching +# convention, the binaries' names can just be prefixed with their target +# platform. On the other hand, that means packages will depend on not just +# multiple cc-wrappers, but the exact same cc-wrapper derivation multiple ways. +# That means the exact same cc-wrapper derivation must be able to avoid +# conflicting with itself, despite the fact that `setup-hook.sh`, the `addCvars` +# function, and `add-flags.sh` are all communicating with each other with +# environment variables. Yuck. +# +# The basic strategy is: +# +# - Everyone exclusively *adds information* to relative-platform-specific +# environment variables, like `NIX_CFLAGS_COMPILE_FOR_TARGET`, to communicate +# with the wrapped binaries. +# +# - The wrapped binaries will exclusively *read* cc-wrapper-derivation-specific +# environment variables distinguished with with `suffixSalt`, like +# `NIX_CFLAGS_COMPILE_@suffixSalt@`. +# +# - `add-flags`, beyond its old task of reading extra flags stuck inside the +# cc-wrapper derivation, will convert the relative-platform-specific +# variables to cc-wrapper-derivation-specific variables. This conversion is +# the only time all but one of the cc-wrapper-derivation-specific variables +# are set. +# +# This ensures the flow of information is exclusive from +# relative-platform-specific variables to cc-wrapper-derivation-specific +# variables. This allows us to support the general case of a many--many relation +# between relative platforms and cc-wrapper derivations. +# +# For more details, read the individual files where the mechanisms used to +# accomplish this will be individually documented. + +# Skip setup hook if we're neither a build-time dep, nor, temporarily, doing a +# native compile. +# +# TODO(@Ericson2314): No native exception +[[ -z ${strictDeps-} ]] || (( "$hostOffset" < 0 )) || return 0 + +# It's fine that any other cc-wrapper will redefine this. Bash functions close +# over no state, and there's no @-substitutions within, so any redefined +# function is guaranteed to be exactly the same. +ccWrapper_addCVars () { + # See ../setup-hooks/role.bash + local role_post + getHostRoleEnvHook + + if [ -d "$1/include" ]; then + export NIX_CFLAGS_COMPILE${role_post}+=" -isystem $1/include" + fi + + if [ -d "$1/Library/Frameworks" ]; then + export NIX_CFLAGS_COMPILE${role_post}+=" -iframework $1/Library/Frameworks" + fi +} + +# See ../setup-hooks/role.bash +getTargetRole +getTargetRoleWrapper + +# We use the `targetOffset` to choose the right env hook to accumulate the right +# sort of deps (those with that offset). +addEnvHooks "$targetOffset" ccWrapper_addCVars + +# Note 1: these come *after* $out in the PATH (see setup.sh). +# Note 2: phase separation makes this look useless to shellcheck. + +# shellcheck disable=SC2157 +if [ -n "@cc@" ]; then + addToSearchPath _PATH @cc@/bin +fi + +# shellcheck disable=SC2157 +if [ -n "@libc_bin@" ]; then + addToSearchPath _PATH @libc_bin@/bin +fi + +# shellcheck disable=SC2157 +if [ -n "@coreutils_bin@" ]; then + addToSearchPath _PATH @coreutils_bin@/bin +fi + +# Export tool environment variables so various build systems use the right ones. + +export NIX_CC${role_post}=@out@ + +export CC${role_post}=@named_cc@ +export CXX${role_post}=@named_cxx@ + +# If unset, assume the default hardening flags. +: ${NIX_HARDENING_ENABLE="fortify fortify3 stackprotector pic strictoverflow format relro bindnow"} +export NIX_HARDENING_ENABLE + +# No local scope in sourced file +unset -v role_post diff --git a/nixpkgs/pkgs/build-support/closure-info.nix b/nixpkgs/pkgs/build-support/closure-info.nix new file mode 100644 index 000000000000..6b3ff6fd62b0 --- /dev/null +++ b/nixpkgs/pkgs/build-support/closure-info.nix @@ -0,0 +1,36 @@ +# This derivation builds two files containing information about the +# closure of 'rootPaths': $out/store-paths contains the paths in the +# closure, and $out/registration contains a file suitable for use with +# "nix-store --load-db" and "nix-store --register-validity +# --hash-given". + +{ stdenv, buildPackages }: + +{ rootPaths }: + +assert builtins.langVersion >= 5; + +stdenv.mkDerivation { + name = "closure-info"; + + __structuredAttrs = true; + + exportReferencesGraph.closure = rootPaths; + + preferLocalBuild = true; + + PATH = "${buildPackages.coreutils}/bin:${buildPackages.jq}/bin"; + + builder = builtins.toFile "builder" + '' + . .attrs.sh + + out=''${outputs[out]} + + mkdir $out + + jq -r ".closure | map(.narSize) | add" < .attrs.json > $out/total-nar-size + jq -r '.closure | map([.path, .narHash, .narSize, "", (.references | length)] + .references) | add | map("\(.)\n") | add' < .attrs.json | head -n -1 > $out/registration + jq -r .closure[].path < .attrs.json > $out/store-paths + ''; +} diff --git a/nixpkgs/pkgs/build-support/coq/default.nix b/nixpkgs/pkgs/build-support/coq/default.nix new file mode 100644 index 000000000000..eb045ddf6865 --- /dev/null +++ b/nixpkgs/pkgs/build-support/coq/default.nix @@ -0,0 +1,132 @@ +{ lib, stdenv, coqPackages, coq, which, fetchzip }@args: +let lib = import ./extra-lib.nix {inherit (args) lib;}; in +with builtins; with lib; +let + isGitHubDomain = d: match "^github.*" d != null; + isGitLabDomain = d: match "^gitlab.*" d != null; +in +{ pname, + version ? null, + fetcher ? null, + owner ? "coq-community", + domain ? "github.com", + repo ? pname, + defaultVersion ? null, + releaseRev ? (v: v), + displayVersion ? {}, + release ? {}, + buildInputs ? [], + nativeBuildInputs ? [], + extraBuildInputs ? [], + extraNativeBuildInputs ? [], + overrideBuildInputs ? [], + overrideNativeBuildInputs ? [], + namePrefix ? [ "coq" ], + enableParallelBuilding ? true, + extraInstallFlags ? [], + setCOQBIN ? true, + mlPlugin ? false, + useMelquiondRemake ? null, + dropAttrs ? [], + keepAttrs ? [], + dropDerivationAttrs ? [], + useDuneifVersion ? (x: false), + useDune ? false, + opam-name ? (concatStringsSep "-" (namePrefix ++ [ pname ])), + ... +}@args: +let + args-to-remove = foldl (flip remove) ([ + "version" "fetcher" "repo" "owner" "domain" "releaseRev" + "displayVersion" "defaultVersion" "useMelquiondRemake" + "release" + "buildInputs" "nativeBuildInputs" + "extraBuildInputs" "extraNativeBuildInputs" + "overrideBuildInputs" "overrideNativeBuildInputs" + "namePrefix" + "meta" "useDuneifVersion" "useDune" "opam-name" + "extraInstallFlags" "setCOQBIN" "mlPlugin" + "dropAttrs" "dropDerivationAttrs" "keepAttrs" ] ++ dropAttrs) keepAttrs; + fetch = import ../coq/meta-fetch/default.nix + { inherit lib stdenv fetchzip; } ({ + inherit release releaseRev; + location = { inherit domain owner repo; }; + } // optionalAttrs (args?fetcher) {inherit fetcher;}); + fetched = fetch (if version != null then version else defaultVersion); + display-pkg = n: sep: v: + let d = displayVersion.${n} or (if sep == "" then ".." else true); in + n + optionalString (v != "" && v != null) (switch d [ + { case = true; out = sep + v; } + { case = "."; out = sep + versions.major v; } + { case = ".."; out = sep + versions.majorMinor v; } + { case = "..."; out = sep + versions.majorMinorPatch v; } + { case = isFunction; out = optionalString (d v != "") (sep + d v); } + { case = isString; out = optionalString (d != "") (sep + d); } + ] "") + optionalString (v == null) "-broken"; + append-version = p: n: p + display-pkg n "" coqPackages.${n}.version + "-"; + prefix-name = foldl append-version "" namePrefix; + useDune = args.useDune or (useDuneifVersion fetched.version); + coqlib-flags = switch coq.coq-version [ + { case = v: versions.isLe "8.6" v && v != "dev" ; + out = [ "COQLIB=$(out)/lib/coq/${coq.coq-version}/" ]; } + ] [ "COQLIBINSTALL=$(out)/lib/coq/${coq.coq-version}/user-contrib" + "COQPLUGININSTALL=$(OCAMLFIND_DESTDIR)" ]; + docdir-flags = switch coq.coq-version [ + { case = v: versions.isLe "8.6" v && v != "dev"; + out = [ "DOCDIR=$(out)/share/coq/${coq.coq-version}/" ]; } + ] [ "COQDOCINSTALL=$(out)/share/coq/${coq.coq-version}/user-contrib" ]; +in + +stdenv.mkDerivation (removeAttrs ({ + + name = prefix-name + (display-pkg pname "-" fetched.version); + + inherit (fetched) version src; + + nativeBuildInputs = args.overrideNativeBuildInputs + or ([ which ] + ++ optional useDune coq.ocamlPackages.dune_3 + ++ optionals (useDune || mlPlugin) [ coq.ocamlPackages.ocaml coq.ocamlPackages.findlib ] + ++ (args.nativeBuildInputs or []) ++ extraNativeBuildInputs); + buildInputs = args.overrideBuildInputs + or ([ coq ] ++ (args.buildInputs or []) ++ extraBuildInputs); + inherit enableParallelBuilding; + + meta = ({ platforms = coq.meta.platforms; } // + (switch domain [{ + case = pred.union isGitHubDomain isGitLabDomain; + out = { homepage = "https://${domain}/${owner}/${repo}"; }; + }] {}) // + optionalAttrs (fetched.broken or false) { coqFilter = true; broken = true; }) // + (args.meta or {}) ; + +} +// (optionalAttrs setCOQBIN { COQBIN = "${coq}/bin/"; }) +// (optionalAttrs (!args?installPhase && !args?useMelquiondRemake) { + installFlags = + coqlib-flags ++ docdir-flags ++ + extraInstallFlags; +}) +// (optionalAttrs useDune { + buildPhase = '' + runHook preBuild + dune build -p ${opam-name} ''${enableParallelBuilding:+-j $NIX_BUILD_CORES} + runHook postBuild + ''; + installPhase = '' + runHook preInstall + dune install ${opam-name} --prefix=$out + mv $out/lib/coq $out/lib/TEMPORARY + mkdir $out/lib/coq/ + mv $out/lib/TEMPORARY $out/lib/coq/${coq.coq-version} + runHook postInstall + ''; +}) +// (optionalAttrs (args?useMelquiondRemake) rec { + COQUSERCONTRIB = "$out/lib/coq/${coq.coq-version}/user-contrib"; + preConfigurePhases = "autoconf"; + configureFlags = [ "--libdir=${COQUSERCONTRIB}/${useMelquiondRemake.logpath or ""}" ]; + buildPhase = "./remake -j$NIX_BUILD_CORES"; + installPhase = "./remake install"; +}) +// (removeAttrs args args-to-remove)) dropDerivationAttrs) diff --git a/nixpkgs/pkgs/build-support/coq/extra-lib.nix b/nixpkgs/pkgs/build-support/coq/extra-lib.nix new file mode 100644 index 000000000000..3c226b4920b6 --- /dev/null +++ b/nixpkgs/pkgs/build-support/coq/extra-lib.nix @@ -0,0 +1,193 @@ +{ lib }: +with builtins; with lib; recursiveUpdate lib (rec { + + versions = + let + truncate = n: v: concatStringsSep "." (take n (splitVersion v)); + opTruncate = op: v0: v: let n = length (splitVersion v0); in + op (truncate n v) (truncate n v0); + in rec { + + /* Get string of the first n parts of a version string. + + Example: + - truncate 2 "1.2.3-stuff" + => "1.2" + + - truncate 4 "1.2.3-stuff" + => "1.2.3.stuff" + */ + + inherit truncate; + + /* Get string of the first three parts (major, minor and patch) + of a version string. + + Example: + majorMinorPatch "1.2.3-stuff" + => "1.2.3" + */ + majorMinorPatch = truncate 3; + + /* Version comparison predicates, + - isGe v0 v <-> v is greater or equal than v0 [*] + - isLe v0 v <-> v is lesser or equal than v0 [*] + - isGt v0 v <-> v is strictly greater than v0 [*] + - isLt v0 v <-> v is strictly lesser than v0 [*] + - isEq v0 v <-> v is equal to v0 [*] + - range low high v <-> v is between low and high [**] + + [*] truncating v to the same number of digits as v0 + [**] truncating v to low for the lower bound and high for the upper bound + + Examples: + - isGe "8.10" "8.10.1" + => true + - isLe "8.10" "8.10.1" + => true + - isGt "8.10" "8.10.1" + => false + - isGt "8.10.0" "8.10.1" + => true + - isEq "8.10" "8.10.1" + => true + - range "8.10" "8.11" "8.11.1" + => true + - range "8.10" "8.11+" "8.11.0" + => false + - range "8.10" "8.11+" "8.11+beta1" + => false + + */ + isGe = opTruncate versionAtLeast; + isGt = opTruncate (flip versionOlder); + isLe = opTruncate (flip versionAtLeast); + isLt = opTruncate versionOlder; + isEq = opTruncate pred.equal; + range = low: high: pred.inter (versions.isGe low) (versions.isLe high); + }; + + /* Returns a list of list, splitting it using a predicate. + This is analoguous to builtins.split sep list, + with a predicate as a separator and a list instead of a string. + + Type: splitList :: (a -> bool) -> [a] -> [[a]] + + Example: + splitList (x: x == "x") [ "y" "x" "z" "t" ] + => [ [ "y" ] "x" [ "z" "t" ] ] + */ + splitList = pred: l: # put in file lists + let loop = (vv: v: l: if l == [] then vv ++ [v] + else let hd = head l; tl = tail l; in + if pred hd then loop (vv ++ [ v hd ]) [] tl else loop vv (v ++ [hd]) tl); + in loop [] [] l; + + pred = { + /* Predicate intersection, union, and complement */ + inter = p: q: x: p x && q x; + union = p: q: x: p x || q x; + compl = p: x: ! p x; + true = p: true; + false = p: false; + + /* predicate "being equal to y" */ + equal = y: x: x == y; + }; + + /* Emulate a "switch - case" construct, + instead of relying on `if then else if ...` */ + /* Usage: + ```nix + switch-if [ + if-clause-1 + .. + if-clause-k + ] default-out + ``` + where a if-clause has the form `{ cond = b; out = r; }` + the first branch such as `b` is true */ + + switch-if = c: d: (findFirst (getAttr "cond") {} c).out or d; + + /* Usage: + ```nix + switch x [ + simple-clause-1 + .. + simple-clause-k + ] default-out + ``` + where a simple-clause has the form `{ case = p; out = r; }` + the first branch such as `p x` is true + or + ```nix + switch [ x1 .. xn ] [ + complex-clause-1 + .. + complex-clause-k + ] default-out + ``` + where a complex-clause is either a simple-clause + or has the form { cases = [ p1 .. pn ]; out = r; } + in which case the first branch such as all `pi x` are true + + if the variables p are not functions, + they are converted to a equal p + if out is missing the default-out is taken */ + + switch = var: clauses: default: with pred; let + compare = f: if isFunction f then f else equal f; + combine = cl: var: + if cl?case then compare cl.case var + else all (equal true) (zipListsWith compare cl.cases var); in + switch-if (map (cl: { cond = combine cl var; inherit (cl) out; }) clauses) default; + + /* Override arguments to mkCoqDerivation for a Coq library. + + This function allows you to easily override arguments to mkCoqDerivation, + even when they are not exposed by the Coq library directly. + + Type: overrideCoqDerivation :: AttrSet -> CoqLibraryDerivation -> CoqLibraryDerivation + + Example: + + ```nix + coqPackages.lib.overrideCoqDerivation + { + defaultVersion = "9999"; + release."9999".sha256 = "1lq8x86vd3vqqh2yq6hvyagpnhfq5wmk5pg2z0xq7b7dbbbhyfkw"; + } + coqPackages.QuickChick; + ``` + + This example overrides the `defaultVersion` and `release` arguments that + are passed to `mkCoqDerivation` in the QuickChick derivation. + + Note that there is a difference between using `.override` on a Coq + library vs this `overrideCoqDerivation` function. `.override` allows you + to modify arguments to the derivation itself, for instance by passing + different versions of dependencies: + + ```nix + coqPackages.QuickChick.override { ssreflect = my-cool-ssreflect; } + ``` + + whereas `overrideCoqDerivation` allows you to override arguments to the + call to `mkCoqDerivation` in the Coq library. + + Note that all Coq libraries in Nixpkgs have a `version` argument for + easily using a different version. So if all you want to do is use a + different version, and the derivation for the Coq library already has + support for the version you want, you likely only need to update the + `version` argument on the library derivation. This is done with + `.override`: + + ```nix + coqPackages.QuickChick.override { version = "1.4.0"; } + ``` + */ + overrideCoqDerivation = f: drv: (drv.override (args: { + mkCoqDerivation = drv_: (args.mkCoqDerivation drv_).override f; + })); +}) diff --git a/nixpkgs/pkgs/build-support/coq/meta-fetch/default.nix b/nixpkgs/pkgs/build-support/coq/meta-fetch/default.nix new file mode 100644 index 000000000000..82c29fb760b7 --- /dev/null +++ b/nixpkgs/pkgs/build-support/coq/meta-fetch/default.nix @@ -0,0 +1,69 @@ +{ lib, stdenv, fetchzip }@args: +let lib' = lib; in +let lib = import ../extra-lib.nix {lib = lib';}; in +with builtins; with lib; +let + default-fetcher = {domain ? "github.com", owner ? "", repo, rev, name ? "source", sha256 ? null, ...}@args: + let ext = if args?sha256 then "zip" else "tar.gz"; + fmt = if args?sha256 then "zip" else "tarball"; + pr = match "^#(.*)$" rev; + url = switch-if [ + { cond = pr == null && (match "^github.*" domain) != null; + out = "https://${domain}/${owner}/${repo}/archive/${rev}.${ext}"; } + { cond = pr != null && (match "^github.*" domain) != null; + out = "https://api.${domain}/repos/${owner}/${repo}/${fmt}/pull/${head pr}/head"; } + { cond = pr == null && (match "^gitlab.*" domain) != null; + out = "https://${domain}/${owner}/${repo}/-/archive/${rev}/${repo}-${rev}.${ext}"; } + { cond = (match "(www.)?mpi-sws.org" domain) != null; + out = "https://www.mpi-sws.org/~${owner}/${repo}/download/${repo}-${rev}.${ext}";} + ] (throw "meta-fetch: no fetcher found for domain ${domain} on ${rev}"); + fetch = x: if args?sha256 then fetchzip (x // { inherit sha256; }) else fetchTarball x; + in fetch { inherit url ; }; +in +{ + fetcher ? default-fetcher, + location, + release ? {}, + releaseRev ? (v: v), +}: +let isVersion = x: isString x && match "^/.*" x == null && release?${x}; + shortVersion = x: if (isString x && match "^/.*" x == null) + then findFirst (v: versions.majorMinor v == x) null + (sort versionAtLeast (attrNames release)) + else null; + isShortVersion = x: shortVersion x != null; + isPathString = x: isString x && match "^/.*" x != null && pathExists x; in +arg: +switch arg [ + { case = isNull; out = { version = "broken"; src = ""; broken = true; }; } + { case = isPathString; out = { version = "dev"; src = arg; }; } + { case = pred.union isVersion isShortVersion; + out = let v = if isVersion arg then arg else shortVersion arg; in + let + given-sha256 = release.${v}.sha256 or ""; + sha256 = if given-sha256 == "" then lib.fakeSha256 else given-sha256; + rv = release.${v} // { inherit sha256; }; in + { + version = rv.version or v; + src = rv.src or fetcher (location // { rev = releaseRev v; } // rv); + }; + } + { case = isString; + out = let + splitted = filter isString (split ":" arg); + rev = last splitted; + has-owner = length splitted > 1; + version = "dev"; in { + inherit version; + src = fetcher (location // { inherit rev; } // + (optionalAttrs has-owner { owner = head splitted; })); + }; } + { case = isAttrs; + out = { + version = arg.version or "dev"; + src = (arg.fetcher or fetcher) (location // (arg.location or {})); }; } + { case = isPath; + out = { + version = "dev" ; + src = builtins.path {path = arg; name = location.name or "source";}; }; } +] (throw "not a valid source description") diff --git a/nixpkgs/pkgs/build-support/dart/build-dart-application/default.nix b/nixpkgs/pkgs/build-support/dart/build-dart-application/default.nix new file mode 100644 index 000000000000..76328e5645f6 --- /dev/null +++ b/nixpkgs/pkgs/build-support/dart/build-dart-application/default.nix @@ -0,0 +1,123 @@ +{ lib, stdenv, callPackage, fetchDartDeps, runCommand, writeText, dartHooks, makeWrapper, dart, cacert, nodejs, darwin, jq }: + +{ sdkSetupScript ? "" +, pubGetScript ? "dart pub get" + + # Output type to produce. Can be any kind supported by dart + # https://dart.dev/tools/dart-compile#types-of-output + # If using jit, you might want to pass some arguments to `dartJitFlags` +, dartOutputType ? "exe" +, dartCompileCommand ? "dart compile" +, dartCompileFlags ? [ ] + # These come at the end of the command, useful to pass flags to the jit run +, dartJitFlags ? [ ] + + # Attrset of entry point files to build and install. + # Where key is the final binary path and value is the source file path + # e.g. { "bin/foo" = "bin/main.dart"; } + # Set to null to read executables from pubspec.yaml +, dartEntryPoints ? null + # Used when wrapping aot, jit, kernel, and js builds. + # Set to null to disable wrapping. +, dartRuntimeCommand ? if dartOutputType == "aot-snapshot" then "${dart}/bin/dartaotruntime" + else if (dartOutputType == "jit-snapshot" || dartOutputType == "kernel") then "${dart}/bin/dart" + else if dartOutputType == "js" then "${nodejs}/bin/node" + else null + +, runtimeDependencies ? [ ] +, extraWrapProgramArgs ? "" +, customPackageOverrides ? { } +, autoDepsList ? false +, depsListFile ? null +, pubspecLockFile ? null +, vendorHash ? "" +, ... +}@args: + +let + dartDeps = (fetchDartDeps.override { + dart = runCommand "dart-fod" { nativeBuildInputs = [ makeWrapper ]; } '' + mkdir -p "$out/bin" + makeWrapper "${dart}/bin/dart" "$out/bin/dart" \ + --add-flags "--root-certs-file=${cacert}/etc/ssl/certs/ca-bundle.crt" + ''; + }) { + buildDrvArgs = args; + inherit sdkSetupScript pubGetScript vendorHash pubspecLockFile; + }; + inherit (dartHooks.override { inherit dart; }) dartConfigHook dartBuildHook dartInstallHook dartFixupHook; + + baseDerivation = stdenv.mkDerivation (finalAttrs: args // { + inherit sdkSetupScript pubGetScript dartCompileCommand dartOutputType + dartRuntimeCommand dartCompileFlags dartJitFlags runtimeDependencies; + + dartEntryPoints = + if (dartEntryPoints != null) + then writeText "entrypoints.json" (builtins.toJSON dartEntryPoints) + else null; + + runtimeDependencyLibraryPath = lib.makeLibraryPath finalAttrs.runtimeDependencies; + + nativeBuildInputs = (args.nativeBuildInputs or [ ]) ++ [ + dart + dartDeps + dartConfigHook + dartBuildHook + dartInstallHook + dartFixupHook + makeWrapper + jq + ] ++ lib.optionals stdenv.isDarwin [ + darwin.sigtool + ]; + + preUnpack = '' + ${lib.optionalString (!autoDepsList) '' + if ! { [ '${lib.boolToString (depsListFile != null)}' = 'true' ] ${lib.optionalString (depsListFile != null) "&& cmp -s <(jq -Sc . '${depsListFile}') <(jq -Sc . '${finalAttrs.passthru.dartDeps.depsListFile}')"}; }; then + echo 1>&2 -e '\nThe dependency list file was either not given or differs from the expected result.' \ + '\nPlease choose one of the following solutions:' \ + '\n - Duplicate the following file and pass it to the depsListFile argument.' \ + '\n ${finalAttrs.passthru.dartDeps.depsListFile}' \ + '\n - Set autoDepsList to true (not supported by Hydra or permitted in Nixpkgs)'. + exit 1 + fi + ''} + ${args.preUnpack or ""} + ''; + + # When stripping, it seems some ELF information is lost and the dart VM cli + # runs instead of the expected program. Don't strip if it's an exe output. + dontStrip = args.dontStrip or (dartOutputType == "exe"); + + passthru = { inherit dartDeps; } // (args.passthru or { }); + + meta = (args.meta or { }) // { platforms = args.meta.platforms or dart.meta.platforms; }; + }); + + packageOverrideRepository = (callPackage ../../../development/compilers/dart/package-overrides { }) // customPackageOverrides; + productPackages = builtins.filter (package: package.kind != "dev") + (if autoDepsList + then lib.importJSON dartDeps.depsListFile + else + if depsListFile == null + then [ ] + else lib.importJSON depsListFile); +in +assert !(builtins.isString dartOutputType && dartOutputType != "") -> +throw "dartOutputType must be a non-empty string"; +builtins.foldl' + (prev: package: + if packageOverrideRepository ? ${package.name} + then + prev.overrideAttrs + (packageOverrideRepository.${package.name} { + inherit (package) + name + version + kind + source + dependencies; + }) + else prev) + baseDerivation + productPackages diff --git a/nixpkgs/pkgs/build-support/dart/build-dart-application/hooks/dart-build-hook.sh b/nixpkgs/pkgs/build-support/dart/build-dart-application/hooks/dart-build-hook.sh new file mode 100644 index 000000000000..23ebfbd6e66e --- /dev/null +++ b/nixpkgs/pkgs/build-support/dart/build-dart-application/hooks/dart-build-hook.sh @@ -0,0 +1,34 @@ +# shellcheck shell=bash + +# Outputs line-separated "${dest}\t${source}" +_getDartEntryPoints() { + if [ -n "$dartEntryPoints" ]; then + @jq@ -r '(to_entries | map(.key + "\t" + .value) | join("\n"))' "$dartEntryPoints" + else + # The pubspec executables section follows the pattern: + # <output-bin-name>: [source-file-name] + # Where source-file-name defaults to output-bin-name if omited + @yq@ -r '(.executables | to_entries | map("bin/" + .key + "\t" + "bin/" + (.value // .key) + ".dart") | join("\n"))' pubspec.yaml + fi +} + +dartBuildHook() { + echo "Executing dartBuildHook" + + runHook preBuild + + while IFS=$'\t' read -ra target; do + dest="${target[0]}" + src="${target[1]}" + eval "$dartCompileCommand" "$dartOutputType" \ + -o "$dest" "${dartCompileFlags[@]}" "$src" "${dartJitFlags[@]}" + done < <(_getDartEntryPoints) + + runHook postBuild + + echo "Finished dartBuildHook" +} + +if [ -z "${dontDartBuild-}" ] && [ -z "${buildPhase-}" ]; then + buildPhase=dartBuildHook +fi diff --git a/nixpkgs/pkgs/build-support/dart/build-dart-application/hooks/dart-config-hook.sh b/nixpkgs/pkgs/build-support/dart/build-dart-application/hooks/dart-config-hook.sh new file mode 100644 index 000000000000..f22d7d2ce64d --- /dev/null +++ b/nixpkgs/pkgs/build-support/dart/build-dart-application/hooks/dart-config-hook.sh @@ -0,0 +1,15 @@ +# shellcheck shell=bash + +dartConfigHook() { + echo "Executing dartConfigHook" + + echo "Setting up SDK" + eval "$sdkSetupScript" + + echo "Installing dependencies" + eval doPubGet "$pubGetScript" --offline + + echo "Finished dartConfigHook" +} + +postConfigureHooks+=(dartConfigHook) diff --git a/nixpkgs/pkgs/build-support/dart/build-dart-application/hooks/dart-fixup-hook.sh b/nixpkgs/pkgs/build-support/dart/build-dart-application/hooks/dart-fixup-hook.sh new file mode 100644 index 000000000000..c5a9bedd0665 --- /dev/null +++ b/nixpkgs/pkgs/build-support/dart/build-dart-application/hooks/dart-fixup-hook.sh @@ -0,0 +1,32 @@ +# shellcheck shell=bash + +dartFixupHook() { + echo "Executing dartFixupHook" + + declare -a wrapProgramArgs + + # Add runtime library dependencies to the LD_LIBRARY_PATH. + # For some reason, the RUNPATH of the executable is not used to load dynamic libraries in dart:ffi with DynamicLibrary.open(). + # + # This could alternatively be fixed with patchelf --add-needed, but this would cause all the libraries to be opened immediately, + # which is not what application authors expect. + echo "$runtimeDependencyLibraryPath" + if [[ ! -z "$runtimeDependencyLibraryPath" ]]; then + wrapProgramArgs+=(--suffix LD_LIBRARY_PATH : \"$runtimeDependencyLibraryPath\") + fi + + if [[ ! -z "$extraWrapProgramArgs" ]]; then + wrapProgramArgs+=("$extraWrapProgramArgs") + fi + + if [ ${#wrapProgramArgs[@]} -ne 0 ]; then + for f in "$out"/bin/*; do + echo "Wrapping $f..." + eval "wrapProgram \"$f\" ${wrapProgramArgs[@]}" + done + fi + + echo "Finished dartFixupHook" +} + +postFixupHooks+=(dartFixupHook) diff --git a/nixpkgs/pkgs/build-support/dart/build-dart-application/hooks/dart-install-hook.sh b/nixpkgs/pkgs/build-support/dart/build-dart-application/hooks/dart-install-hook.sh new file mode 100644 index 000000000000..1906bcfbca4c --- /dev/null +++ b/nixpkgs/pkgs/build-support/dart/build-dart-application/hooks/dart-install-hook.sh @@ -0,0 +1,29 @@ +# shellcheck shell=bash + +dartInstallHook() { + echo "Executing dartInstallHook" + + runHook preInstall + + mkdir -p "$out" + + while IFS=$'\t' read -ra target; do + dest="${target[0]}" + # Wrap with runtime command, if it's defined + if [ -n "$dartRuntimeCommand" ]; then + install -D "$dest" "$out/share/$dest" + makeWrapper "$dartRuntimeCommand" "$out/$dest" \ + --add-flags "$out/share/$dest" + else + install -Dm755 "$dest" "$out/$dest" + fi + done < <(_getDartEntryPoints) + + runHook postInstall + + echo "Finished dartInstallHook" +} + +if [ -z "${dontDartInstall-}" ] && [ -z "${installPhase-}" ]; then + installPhase=dartInstallHook +fi diff --git a/nixpkgs/pkgs/build-support/dart/build-dart-application/hooks/default.nix b/nixpkgs/pkgs/build-support/dart/build-dart-application/hooks/default.nix new file mode 100644 index 000000000000..134989426d96 --- /dev/null +++ b/nixpkgs/pkgs/build-support/dart/build-dart-application/hooks/default.nix @@ -0,0 +1,18 @@ +{ lib, makeSetupHook, dart, yq, jq }: + +{ + dartConfigHook = makeSetupHook { + name = "dart-config-hook"; + } ./dart-config-hook.sh; + dartBuildHook = makeSetupHook { + name = "dart-build-hook"; + substitutions.yq = "${yq}/bin/yq"; + substitutions.jq = "${jq}/bin/jq"; + } ./dart-build-hook.sh; + dartInstallHook = makeSetupHook { + name = "dart-install-hook"; + } ./dart-install-hook.sh; + dartFixupHook = makeSetupHook { + name = "dart-fixup-hook"; + } ./dart-fixup-hook.sh; +} diff --git a/nixpkgs/pkgs/build-support/dart/fetch-dart-deps/default.nix b/nixpkgs/pkgs/build-support/dart/fetch-dart-deps/default.nix new file mode 100644 index 000000000000..51052cae18f4 --- /dev/null +++ b/nixpkgs/pkgs/build-support/dart/fetch-dart-deps/default.nix @@ -0,0 +1,203 @@ +{ stdenvNoCC +, lib +, makeSetupHook +, writeShellScriptBin +, dart +, git +, cacert +, jq +}: + +{ + # The output hash of the dependencies for this project. + vendorHash ? "" + # Commands to run once before using Dart or pub. +, sdkSetupScript ? "" + # Commands to run to populate the pub cache. +, pubGetScript ? "dart pub get" + # A path to a pubspec.lock file to use instead of the one in the source directory. +, pubspecLockFile ? null + # Arguments used in the derivation that builds the Dart package. + # Passing these is recommended to ensure that the same steps are made to prepare the sources in both this + # derivation and the one that builds the Dart package. +, buildDrvArgs ? { } +, ... +}@args: + +# This is a fixed-output derivation and setup hook that can be used to fetch dependencies for Dart projects. +# It is designed to be placed in the nativeBuildInputs of a derivation that builds a Dart package. +# Providing the buildDrvArgs argument is highly recommended. +let + buildDrvInheritArgNames = [ + "name" + "pname" + "version" + "src" + "sourceRoot" + "setSourceRoot" + "preUnpack" + "unpackPhase" + "unpackCmd" + "postUnpack" + "prePatch" + "patchPhase" + "patches" + "patchFlags" + "postPatch" + ]; + + buildDrvInheritArgs = builtins.foldl' + (attrs: arg: + if buildDrvArgs ? ${arg} + then attrs // { ${arg} = buildDrvArgs.${arg}; } + else attrs) + { } + buildDrvInheritArgNames; + + drvArgs = buildDrvInheritArgs // (removeAttrs args [ "buildDrvArgs" ]); + name = (if drvArgs ? name then drvArgs.name else "${drvArgs.pname}-${drvArgs.version}"); + + deps = + stdenvNoCC.mkDerivation ({ + name = "${name}-dart-deps"; + + nativeBuildInputs = [ + dart + git + ]; + + # avoid pub phase + dontBuild = true; + + configurePhase = '' + # Configure the package cache + export PUB_CACHE="$out/cache/.pub-cache" + mkdir -p "$PUB_CACHE" + + ${sdkSetupScript} + ''; + + installPhase = '' + _pub_get() { + ${pubGetScript} + } + + # so we can use lock, diff yaml + mkdir -p "$out/pubspec" + cp "pubspec.yaml" "$out/pubspec" + ${lib.optionalString (pubspecLockFile != null) "install -m644 ${pubspecLockFile} pubspec.lock"} + if ! cp "pubspec.lock" "$out/pubspec"; then + echo 1>&2 -e '\nThe pubspec.lock file is missing. This is a requirement for reproducible builds.' \ + '\nThe following steps should be taken to fix this issue:' \ + '\n 1. If you are building an application, contact the developer(s).' \ + '\n The pubspec.lock file should be provided with the source code.' \ + '\n https://dart.dev/guides/libraries/private-files#pubspeclock' \ + '\n 2. An attempt to generate and print a compressed pubspec.lock file will be made now.' \ + '\n It is compressed with gzip and base64 encoded.' \ + '\n Paste it to a file and extract it with `base64 -d pubspec.lock.in | gzip -d > pubspec.lock`.' \ + '\n Provide the path to the pubspec.lock file in the pubspecLockFile argument.' \ + '\n This must be updated whenever the application is updated.' \ + '\n' + _pub_get + echo "" + gzip --to-stdout --best pubspec.lock | base64 1>&2 + echo 1>&2 -e '\nA gzipped pubspec.lock file has been printed. Please see the informational message above.' + exit 1 + fi + + _pub_get + + # nuke nondeterminism + + # Remove Git directories in the Git package cache - these are rarely used by Pub, + # which instead maintains a corresponsing mirror and clones cached packages through it. + # + # An exception is made to keep .git/pub-packages files, which are important. + # https://github.com/dart-lang/pub/blob/c890afa1d65b340fa59308172029680c2f8b0fc6/lib/src/source/git.dart#L621 + if [ -d "$PUB_CACHE"/git ]; then + find "$PUB_CACHE"/git -maxdepth 4 -path "*/.git/*" ! -name "pub-packages" -prune -exec rm -rf {} + + fi + + # Remove continuously updated package metadata caches + rm -rf "$PUB_CACHE"/hosted/*/.cache # Not pinned by pubspec.lock + rm -rf "$PUB_CACHE"/git/cache/*/* # Recreate this on the other end. See: https://github.com/dart-lang/pub/blob/c890afa1d65b340fa59308172029680c2f8b0fc6/lib/src/source/git.dart#L531 + + # Miscelaneous transient package cache files + rm -f "$PUB_CACHE"/README.md # May change with different Dart versions + rm -rf "$PUB_CACHE"/_temp # https://github.com/dart-lang/pub/blob/c890afa1d65b340fa59308172029680c2f8b0fc6/lib/src/system_cache.dart#L131 + rm -rf "$PUB_CACHE"/log # https://github.com/dart-lang/pub/blob/c890afa1d65b340fa59308172029680c2f8b0fc6/lib/src/command.dart#L348 + ''; + + GIT_SSL_CAINFO = "${cacert}/etc/ssl/certs/ca-bundle.crt"; + SSL_CERT_FILE = "${cacert}/etc/ssl/certs/ca-bundle.crt"; + + impureEnvVars = lib.fetchers.proxyImpureEnvVars ++ [ + "GIT_PROXY_COMMAND" + "NIX_GIT_SSL_CAINFO" + "SOCKS_SERVER" + ]; + + # Patching shebangs introduces input references to this fixed-output derivation. + # This triggers a bug in Nix, causing the output path to change unexpectedly. + # https://github.com/NixOS/nix/issues/6660 + dontPatchShebangs = true; + + # The following operations are not generally useful for this derivation. + # If a package does contain some native components used at build time, + # please file an issue. + dontStrip = true; + dontMoveSbin = true; + dontPatchELF = true; + + outputHashAlgo = "sha256"; + outputHashMode = "recursive"; + outputHash = if vendorHash != "" then vendorHash else lib.fakeSha256; + } // (removeAttrs drvArgs [ "name" "pname" ])); + + depsListDrv = stdenvNoCC.mkDerivation ({ + name = "${name}-dart-deps-list.json"; + nativeBuildInputs = [ hook dart jq ]; + + configurePhase = '' + runHook preConfigure + doPubGet dart pub get --offline + runHook postConfigure + ''; + + buildPhase = '' + runHook preBuild + dart pub deps --json | jq .packages > $out + runHook postBuild + ''; + + dontInstall = true; + } // (removeAttrs buildDrvInheritArgs [ "name" "pname" ])); + + # As of Dart 3.0.0, Pub checks the revision of cached Git-sourced packages. + # Git must be wrapped to return a positive result, as the real .git directory is wiped + # to produce a deteministic dependency derivation output. + # https://github.com/dart-lang/pub/pull/3791/files#diff-1639c4669c428c26e68cfebd5039a33f87ba568795f2c058c303ca8528f62b77R631 + gitSourceWrapper = writeShellScriptBin "git" '' + args=("$@") + if [[ "''${args[0]}" == "rev-list" && "''${args[1]}" == "--max-count=1" ]]; then + revision="''${args[''${#args[@]}-1]}" + echo "$revision" + else + ${git}/bin/git "''${args[@]}" + fi + ''; + + hook = (makeSetupHook { + # The setup hook should not be part of the fixed-output derivation. + # Updates to the hook script should not change vendor hashes, and it won't + # work at all anyway due to https://github.com/NixOS/nix/issues/6660. + name = "${name}-dart-deps-setup-hook"; + substitutions = { inherit gitSourceWrapper deps; }; + propagatedBuildInputs = [ dart git ]; + passthru = { + files = deps.outPath; + depsListFile = depsListDrv.outPath; + }; + }) ./setup-hook.sh; +in +hook diff --git a/nixpkgs/pkgs/build-support/dart/fetch-dart-deps/setup-hook.sh b/nixpkgs/pkgs/build-support/dart/fetch-dart-deps/setup-hook.sh new file mode 100644 index 000000000000..689e0e8c5b5f --- /dev/null +++ b/nixpkgs/pkgs/build-support/dart/fetch-dart-deps/setup-hook.sh @@ -0,0 +1,46 @@ +preConfigureHooks+=(_setupPubCache) + +_setupPubCache() { + deps="@deps@" + + # Configure the package cache. + export PUB_CACHE="$(mktemp -d)" + mkdir -p "$PUB_CACHE" + + if [ -d "$deps/cache/.pub-cache/git" ]; then + # Link the Git package cache. + mkdir -p "$PUB_CACHE/git" + ln -s "$deps/cache/.pub-cache/git"/* "$PUB_CACHE/git" + + # Recreate the internal Git cache subdirectory. + # See: https://github.com/dart-lang/pub/blob/c890afa1d65b340fa59308172029680c2f8b0fc6/lib/src/source/git.dart#L339) + # Blank repositories are created instead of attempting to match the cache mirrors to checkouts. + # This is not an issue, as pub does not need the mirrors in the Flutter build process. + rm "$PUB_CACHE/git/cache" && mkdir "$PUB_CACHE/git/cache" + for mirror in $(ls -A "$deps/cache/.pub-cache/git/cache"); do + git --git-dir="$PUB_CACHE/git/cache/$mirror" init --bare --quiet + done + fi + + # Link the remaining package cache directories. + # At this point, any subdirectories that must be writable must have been taken care of. + for file in $(comm -23 <(ls -A "$deps/cache/.pub-cache") <(ls -A "$PUB_CACHE")); do + ln -s "$deps/cache/.pub-cache/$file" "$PUB_CACHE/$file" + done + + # ensure we're using a lockfile for the right package version + if [ ! -e pubspec.lock ]; then + cp -v "$deps/pubspec/pubspec.lock" . + # Sometimes the pubspec.lock will get opened in write mode, even when offline. + chmod u+w pubspec.lock + elif ! { diff -u pubspec.lock "$deps/pubspec/pubspec.lock" && diff -u pubspec.yaml "$deps/pubspec/pubspec.yaml"; }; then + echo 1>&2 -e 'The pubspec.lock or pubspec.yaml of the project derivation differs from the one in the dependency derivation.' \ + '\nYou most likely forgot to update the vendorHash while updating the sources.' + exit 1 + fi +} + +# Performs the given pub get command with an appropriate environment. +doPubGet() { + PATH="@gitSourceWrapper@/bin:$PATH" "$@" +} diff --git a/nixpkgs/pkgs/build-support/deterministic-uname/default.nix b/nixpkgs/pkgs/build-support/deterministic-uname/default.nix new file mode 100644 index 000000000000..164136c937b9 --- /dev/null +++ b/nixpkgs/pkgs/build-support/deterministic-uname/default.nix @@ -0,0 +1,54 @@ +# expr and script based on our lsb_release +{ stdenv +, lib +, substituteAll +, coreutils +, getopt +, modDirVersion ? "" +}: + +substituteAll { + name = "uname"; + + src = ./deterministic-uname.sh; + + dir = "bin"; + isExecutable = true; + + inherit coreutils getopt; + + uSystem = if stdenv.buildPlatform.uname.system != null then stdenv.buildPlatform.uname.system else "unknown"; + inherit (stdenv.buildPlatform.uname) processor; + + # uname -o + # maybe add to lib/systems/default.nix uname attrset + # https://github.com/coreutils/coreutils/blob/7fc84d1c0f6b35231b0b4577b70aaa26bf548a7c/src/uname.c#L373-L374 + # https://stackoverflow.com/questions/61711186/where-does-host-operating-system-in-uname-c-comes-from + # https://github.com/coreutils/gnulib/blob/master/m4/host-os.m4 + operatingSystem = + if stdenv.buildPlatform.isLinux + then "GNU/Linux" + else if stdenv.buildPlatform.isDarwin + then "Darwin" # darwin isn't in host-os.m4 so where does this come from? + else "unknown"; + + # in os-specific/linux module packages + # --replace '$(shell uname -r)' "${kernel.modDirVersion}" \ + # is a common thing to do. + modDirVersion = if modDirVersion != "" then modDirVersion else "unknown"; + + meta = with lib; { + description = "Print certain system information (hardcoded with lib/system values)"; + longDescription = '' + This package provides a replacement for `uname` whose output depends only + on `stdenv.buildPlatform`. It is meant to be used from within derivations. + Many packages' build processes run `uname` at compile time and embed its + output into the result of the build. Since `uname` calls into the kernel, + and the Nix sandbox currently does not intercept these calls, builds made + on different kernels will produce different results. + ''; + license = [ licenses.mit ]; + maintainers = with maintainers; [ artturin ]; + platforms = platforms.all; + }; +} diff --git a/nixpkgs/pkgs/build-support/deterministic-uname/deterministic-uname.sh b/nixpkgs/pkgs/build-support/deterministic-uname/deterministic-uname.sh new file mode 100644 index 000000000000..31772aeee3cc --- /dev/null +++ b/nixpkgs/pkgs/build-support/deterministic-uname/deterministic-uname.sh @@ -0,0 +1,174 @@ +#! @shell@ + +set -o errexit +set -o nounset + +show_help() { + @coreutils@/bin/cat << EOF +Usage: uname [OPTION]... +Print certain system information. With no OPTION, same as -s. + + -a, --all print all information, in the following order, + except omit -p and -i if unknown: + -s, --kernel-name print the kernel name + -n, --nodename print the network node hostname + -r, --kernel-release print the kernel release + -v, --kernel-version print the kernel version + -m, --machine print the machine hardware name + -p, --processor print the processor type (non-portable) + -i, --hardware-platform print the hardware platform (non-portable) + -o, --operating-system print the operating system + --help display this help and exit + --version output version information and exit +EOF + exit 0 +} + +# Potential command-line options. +version=0 +all=0 + + +kernel_name=0 +nodename=0 +kernel_release=0 +kernel_version=0 +machine=0 +processor=0 +hardware_platform=0 +operating_system=0 + +# With no OPTION, same as -s. +if [[ $# -eq 0 ]]; then + kernel_name=1 +fi + +@getopt@/bin/getopt --test > /dev/null && rc=$? || rc=$? +if [[ $rc -ne 4 ]]; then + # This shouldn't happen. + echo "Warning: Enhanced getopt not supported, please open an issue in nixpkgs." >&2 +else + # Define all short and long options. + SHORT=hvsnrvmpioa + LONG=help,version,kernel-name,nodename,kernel-release,kernel-version,machine,processor,hardware-platform,operating-system,all + + # Parse all options. + PARSED=`@getopt@/bin/getopt --options $SHORT --longoptions $LONG --name "$0" -- "$@"` + + eval set -- "$PARSED" +fi + +# Process each argument, and set the appropriate flag if we recognize it. +while [[ $# -ge 1 ]]; do + case "$1" in + --version) + version=1 + ;; + -s|--kernel-name) + kernel_name=1 + ;; + -n|--nodename) + nodename=1 + ;; + -r|--kernel-release) + kernel_release=1 + ;; + -v|--kernel-version) + kernel_version=1 + ;; + -m|--machine) + machine=1 + ;; + -p|--processor) + processor=1 + ;; + -i|--hardware-platform) + hardware_platform=1 + ;; + -o|--operating-system) + operating_system=1 + ;; + -a|--all) + all=1 + ;; + --help) + show_help + ;; + --) + shift + break + ;; + *) + echo "uname: unrecognized option '$1'" + echo "Type 'uname --help' for a list of available options." + exit 1 + ;; + esac + shift +done + + +KERNEL_NAME_VAL=@uSystem@ +NODENAME_VAL=nixpkgs +KERNEL_RELEASE_VAL=@modDirVersion@ +# #1-NixOS SMP PREEMPT_DYNAMIC Wed Dec 14 10:41:06 UTC 2022 +KERNEL_VERSION_VAL="#1-NixOS Tue Jan 1 00:00:00 UTC 1980" +MACHINE_VAL=@processor@ +PROCESSOR_VAL=unknown +HARDWARE_PLATFORM_VAL=unknown +OPERATING_SYSTEM_VAL=@operatingSystem@ + + +if [[ "$version" = "1" ]]; then + # in case some script greps for GNU coreutils. + echo "uname (GNU coreutils) 9.1" + echo "Nixpkgs deterministic-uname" + exit +fi + +# output of the real uname from GNU coreutils +# Darwin: +# Darwin *nodename* 22.1.0 Darwin Kernel Version 22.1.0: Sun Oct 9 20:14:30 PDT 2022; root:xnu-8792.41.9~2/RELEASE_ARM64_T8103 arm64 arm Darwin +# NixOS: +# Linux *nodename* 6.0.13 #1-NixOS SMP PREEMPT_DYNAMIC Wed Dec 14 10:41:06 UTC 2022 x86_64 GNU/Linux +output=() +if [[ "$all" = "1" ]]; then + output+=("$KERNEL_NAME_VAL" "$NODENAME_VAL" "$KERNEL_RELEASE_VAL" "$KERNEL_VERSION_VAL" "$MACHINE_VAL") + # in help: except omit -p and -i if unknown. + # output+=($PROCESSOR_VAL $HARDWARE_PLATFORM_VAL) + output+=("$OPERATING_SYSTEM_VAL") +fi + +if [[ "$kernel_name" = "1" ]]; then + output+=("$KERNEL_NAME_VAL") +fi + +if [[ "$nodename" = "1" ]]; then + output+=("$NODENAME_VAL") +fi + +if [[ "$kernel_release" = "1" ]]; then + output+=("$KERNEL_RELEASE_VAL") +fi + +if [[ "$kernel_version" = "1" ]]; then + output+=("$KERNEL_VERSION_VAL") +fi + +if [[ "$machine" = "1" ]]; then + output+=("$MACHINE_VAL") +fi + +if [[ "$processor" = "1" ]]; then + output+=("$PROCESSOR_VAL") +fi + +if [[ "$hardware_platform" = "1" ]]; then + output+=("$HARDWARE_PLATFORM_VAL") +fi + +if [[ "$operating_system" = "1" ]]; then + output+=("$OPERATING_SYSTEM_VAL") +fi + +echo "${output[@]}" diff --git a/nixpkgs/pkgs/build-support/dhall/directory-to-nix.nix b/nixpkgs/pkgs/build-support/dhall/directory-to-nix.nix new file mode 100644 index 000000000000..d751e19df3fc --- /dev/null +++ b/nixpkgs/pkgs/build-support/dhall/directory-to-nix.nix @@ -0,0 +1,25 @@ +{ dhallPackages, dhallPackageToNix}: + +# `dhallDirectoryToNix is a utility function to take a directory of Dhall files +# and read them in as a Nix expression. +# +# This function is similar to `dhallToNix`, but takes a Nixpkgs Dhall package +# as input instead of raw Dhall code. +# +# Note that this uses "import from derivation" (IFD), meaning that Nix will +# perform a build during the evaluation phase if you use this +# `dhallDirectoryToNix` utility. It is not possible to use +# `dhallDirectoryToNix` in Nixpkgs, since the Nixpkgs Hydra doesn't allow IFD. + +{ src +, # The file to import, relative to the src root directory + file ? "package.dhall" +}@args: + +let + generatedPkg = dhallPackages.generateDhallDirectoryPackage args; + + builtPkg = dhallPackages.callPackage generatedPkg { }; + +in + dhallPackageToNix builtPkg diff --git a/nixpkgs/pkgs/build-support/dhall/package-to-nix.nix b/nixpkgs/pkgs/build-support/dhall/package-to-nix.nix new file mode 100644 index 000000000000..301501ad49df --- /dev/null +++ b/nixpkgs/pkgs/build-support/dhall/package-to-nix.nix @@ -0,0 +1,36 @@ + +# `dhallPackageToNix` is a utility function to take a Nixpkgs Dhall package +# (created with a function like `dhallPackages.buildDhallDirectoryPackage`) +# and read it in as a Nix expression. +# +# This function is similar to `dhallToNix`, but takes a Nixpkgs Dhall package +# as input instead of raw Dhall code. +# +# Note that this uses "import from derivation" (IFD), meaning that Nix will +# perform a build during the evaluation phase if you use this +# `dhallPackageToNix` utility. It is not possible to use `dhallPackageToNix` +# in Nixpkgs, since the Nixpkgs Hydra doesn't allow IFD. + +{ stdenv, dhall-nix }: + +dhallPackage: + let + drv = stdenv.mkDerivation { + name = "dhall-compiled-package.nix"; + + buildCommand = '' + # Dhall requires that the cache is writable, even if it is never written to. + # We copy the cache from the input package to the current directory and + # set the cache as writable. + cp -r "${dhallPackage}/.cache" ./ + export XDG_CACHE_HOME=$PWD/.cache + chmod -R +w ./.cache + + dhall-to-nix <<< "${dhallPackage}/binary.dhall" > $out + ''; + + nativeBuildInputs = [ dhall-nix ]; + }; + + in + import drv diff --git a/nixpkgs/pkgs/build-support/dhall/to-nix.nix b/nixpkgs/pkgs/build-support/dhall/to-nix.nix new file mode 100644 index 000000000000..96cc16e16f36 --- /dev/null +++ b/nixpkgs/pkgs/build-support/dhall/to-nix.nix @@ -0,0 +1,38 @@ +/* `dhallToNix` is a utility function to convert expressions in the Dhall + configuration language to their corresponding Nix expressions. + + Example: + dhallToNix "{ foo = 1, bar = True }" + => { foo = 1; bar = true; } + dhallToNix "λ(x : Bool) → x == False" + => x : x == false + dhallToNix "λ(x : Bool) → x == False" false + => true + + See https://hackage.haskell.org/package/dhall-nix/docs/Dhall-Nix.html for + a longer tutorial + + Note that this uses "import from derivation", meaning that Nix will perform + a build during the evaluation phase if you use this `dhallToNix` utility +*/ +{ stdenv, dhall-nix, writeText }: + +let + dhallToNix = code : + let + file = writeText "dhall-expression" code; + + drv = stdenv.mkDerivation { + name = "dhall-compiled.nix"; + + buildCommand = '' + dhall-to-nix <<< "${file}" > $out + ''; + + buildInputs = [ dhall-nix ]; + }; + + in + import drv; +in + dhallToNix diff --git a/nixpkgs/pkgs/build-support/docker/default.nix b/nixpkgs/pkgs/build-support/docker/default.nix new file mode 100644 index 000000000000..70fd3635b745 --- /dev/null +++ b/nixpkgs/pkgs/build-support/docker/default.nix @@ -0,0 +1,1237 @@ +{ bashInteractive +, buildPackages +, cacert +, callPackage +, closureInfo +, coreutils +, e2fsprogs +, proot +, fakeNss +, fakeroot +, go +, jq +, jshon +, lib +, makeWrapper +, moreutils +, nix +, nixosTests +, pigz +, rsync +, runCommand +, runtimeShell +, shadow +, skopeo +, storeDir ? builtins.storeDir +, substituteAll +, symlinkJoin +, tarsum +, util-linux +, vmTools +, writeReferencesToFile +, writeScript +, writeShellScriptBin +, writeText +, writeTextDir +, writePython3 +}: + +let + inherit (lib) + optionals + optionalString + ; + + inherit (lib) + escapeShellArgs + toList + ; + + mkDbExtraCommand = contents: + let + contentsList = if builtins.isList contents then contents else [ contents ]; + in + '' + echo "Generating the nix database..." + echo "Warning: only the database of the deepest Nix layer is loaded." + echo " If you want to use nix commands in the container, it would" + echo " be better to only have one layer that contains a nix store." + + export NIX_REMOTE=local?root=$PWD + # A user is required by nix + # https://github.com/NixOS/nix/blob/9348f9291e5d9e4ba3c4347ea1b235640f54fd79/src/libutil/util.cc#L478 + export USER=nobody + ${buildPackages.nix}/bin/nix-store --load-db < ${closureInfo {rootPaths = contentsList;}}/registration + + mkdir -p nix/var/nix/gcroots/docker/ + for i in ${lib.concatStringsSep " " contentsList}; do + ln -s $i nix/var/nix/gcroots/docker/$(basename $i) + done; + ''; + + # The OCI Image specification recommends that configurations use values listed + # in the Go Language document for GOARCH. + # Reference: https://github.com/opencontainers/image-spec/blob/master/config.md#properties + # For the mapping from Nixpkgs system parameters to GOARCH, we can reuse the + # mapping from the go package. + defaultArchitecture = go.GOARCH; + +in +rec { + examples = callPackage ./examples.nix { + inherit buildImage buildLayeredImage fakeNss pullImage shadowSetup buildImageWithNixDb streamNixShellImage; + }; + + tests = { + inherit (nixosTests) + docker-tools + docker-tools-overlay + # requires remote builder + # docker-tools-cross + ; + }; + + pullImage = + let + fixName = name: builtins.replaceStrings [ "/" ":" ] [ "-" "-" ] name; + in + { imageName + # To find the digest of an image, you can use skopeo: + # see doc/functions.xml + , imageDigest + , sha256 + , os ? "linux" + , # Image architecture, defaults to the architecture of the `hostPlatform` when unset + arch ? defaultArchitecture + # This is used to set name to the pulled image + , finalImageName ? imageName + # This used to set a tag to the pulled image + , finalImageTag ? "latest" + # This is used to disable TLS certificate verification, allowing access to http registries on (hopefully) trusted networks + , tlsVerify ? true + + , name ? fixName "docker-image-${finalImageName}-${finalImageTag}.tar" + }: + + runCommand name + { + inherit imageDigest; + imageName = finalImageName; + imageTag = finalImageTag; + impureEnvVars = lib.fetchers.proxyImpureEnvVars; + outputHashMode = "flat"; + outputHashAlgo = "sha256"; + outputHash = sha256; + + nativeBuildInputs = [ skopeo ]; + SSL_CERT_FILE = "${cacert.out}/etc/ssl/certs/ca-bundle.crt"; + + sourceURL = "docker://${imageName}@${imageDigest}"; + destNameTag = "${finalImageName}:${finalImageTag}"; + } '' + skopeo \ + --insecure-policy \ + --tmpdir=$TMPDIR \ + --override-os ${os} \ + --override-arch ${arch} \ + copy \ + --src-tls-verify=${lib.boolToString tlsVerify} \ + "$sourceURL" "docker-archive://$out:$destNameTag" \ + | cat # pipe through cat to force-disable progress bar + ''; + + # We need to sum layer.tar, not a directory, hence tarsum instead of nix-hash. + # And we cannot untar it, because then we cannot preserve permissions etc. + inherit tarsum; # pkgs.dockerTools.tarsum + + # buildEnv creates symlinks to dirs, which is hard to edit inside the overlay VM + mergeDrvs = + { derivations + , onlyDeps ? false + }: + runCommand "merge-drvs" + { + inherit derivations onlyDeps; + } '' + if [[ -n "$onlyDeps" ]]; then + echo $derivations > $out + exit 0 + fi + + mkdir $out + for derivation in $derivations; do + echo "Merging $derivation..." + if [[ -d "$derivation" ]]; then + # If it's a directory, copy all of its contents into $out. + cp -drf --preserve=mode -f $derivation/* $out/ + else + # Otherwise treat the derivation as a tarball and extract it + # into $out. + tar -C $out -xpf $drv || true + fi + done + ''; + + # Helper for setting up the base files for managing users and + # groups, only if such files don't exist already. It is suitable for + # being used in a runAsRoot script. + shadowSetup = '' + export PATH=${shadow}/bin:$PATH + mkdir -p /etc/pam.d + if [[ ! -f /etc/passwd ]]; then + echo "root:x:0:0::/root:${runtimeShell}" > /etc/passwd + echo "root:!x:::::::" > /etc/shadow + fi + if [[ ! -f /etc/group ]]; then + echo "root:x:0:" > /etc/group + echo "root:x::" > /etc/gshadow + fi + if [[ ! -f /etc/pam.d/other ]]; then + cat > /etc/pam.d/other <<EOF + account sufficient pam_unix.so + auth sufficient pam_rootok.so + password requisite pam_unix.so nullok yescrypt + session required pam_unix.so + EOF + fi + if [[ ! -f /etc/login.defs ]]; then + touch /etc/login.defs + fi + ''; + + # Run commands in a virtual machine. + runWithOverlay = + { name + , fromImage ? null + , fromImageName ? null + , fromImageTag ? null + , diskSize ? 1024 + , buildVMMemorySize ? 512 + , preMount ? "" + , postMount ? "" + , postUmount ? "" + }: + vmTools.runInLinuxVM ( + runCommand name + { + preVM = vmTools.createEmptyImage { + size = diskSize; + fullName = "docker-run-disk"; + destination = "./image"; + }; + inherit fromImage fromImageName fromImageTag; + memSize = buildVMMemorySize; + + nativeBuildInputs = [ util-linux e2fsprogs jshon rsync jq ]; + } '' + mkdir disk + mkfs /dev/${vmTools.hd} + mount /dev/${vmTools.hd} disk + cd disk + + function dedup() { + declare -A seen + while read ln; do + if [[ -z "''${seen["$ln"]:-}" ]]; then + echo "$ln"; seen["$ln"]=1 + fi + done + } + + if [[ -n "$fromImage" ]]; then + echo "Unpacking base image..." + mkdir image + tar -C image -xpf "$fromImage" + + if [[ -n "$fromImageName" ]] && [[ -n "$fromImageTag" ]]; then + parentID="$( + cat "image/manifest.json" | + jq -r '.[] | select(.RepoTags | contains([$desiredTag])) | rtrimstr(".json")' \ + --arg desiredTag "$fromImageName:$fromImageTag" + )" + else + echo "From-image name or tag wasn't set. Reading the first ID." + parentID="$(cat "image/manifest.json" | jq -r '.[0].Config | rtrimstr(".json")')" + fi + + # In case of repeated layers, unpack only the last occurrence of each + cat ./image/manifest.json | jq -r '.[0].Layers | .[]' | tac | dedup | tac > layer-list + else + touch layer-list + fi + + # Unpack all of the parent layers into the image. + lowerdir="" + extractionID=0 + for layerTar in $(cat layer-list); do + echo "Unpacking layer $layerTar" + extractionID=$((extractionID + 1)) + + mkdir -p image/$extractionID/layer + tar -C image/$extractionID/layer -xpf image/$layerTar + rm image/$layerTar + + find image/$extractionID/layer -name ".wh.*" -exec bash -c 'name="$(basename {}|sed "s/^.wh.//")"; mknod "$(dirname {})/$name" c 0 0; rm {}' \; + + # Get the next lower directory and continue the loop. + lowerdir=image/$extractionID/layer''${lowerdir:+:}$lowerdir + done + + mkdir work + mkdir layer + mkdir mnt + + ${lib.optionalString (preMount != "") '' + # Execute pre-mount steps + echo "Executing pre-mount steps..." + ${preMount} + ''} + + if [ -n "$lowerdir" ]; then + mount -t overlay overlay -olowerdir=$lowerdir,workdir=work,upperdir=layer mnt + else + mount --bind layer mnt + fi + + ${lib.optionalString (postMount != "") '' + # Execute post-mount steps + echo "Executing post-mount steps..." + ${postMount} + ''} + + umount mnt + + ( + cd layer + cmd='name="$(basename {})"; touch "$(dirname {})/.wh.$name"; rm "{}"' + find . -type c -exec bash -c "$cmd" \; + ) + + ${postUmount} + ''); + + exportImage = { name ? fromImage.name, fromImage, fromImageName ? null, fromImageTag ? null, diskSize ? 1024 }: + runWithOverlay { + inherit name fromImage fromImageName fromImageTag diskSize; + + postMount = '' + echo "Packing raw image..." + tar -C mnt --hard-dereference --sort=name --mtime="@$SOURCE_DATE_EPOCH" -cf $out/layer.tar . + ''; + + postUmount = '' + mv $out/layer.tar . + rm -rf $out + mv layer.tar $out + ''; + }; + + # Create an executable shell script which has the coreutils in its + # PATH. Since root scripts are executed in a blank environment, even + # things like `ls` or `echo` will be missing. + shellScript = name: text: + writeScript name '' + #!${runtimeShell} + set -e + export PATH=${coreutils}/bin:/bin + ${text} + ''; + + # Create a "layer" (set of files). + mkPureLayer = + { + # Name of the layer + name + , # JSON containing configuration and metadata for this layer. + baseJson + , # Files to add to the layer. + copyToRoot ? null + , # When copying the contents into the image, preserve symlinks to + # directories (see `rsync -K`). Otherwise, transform those symlinks + # into directories. + keepContentsDirlinks ? false + , # Additional commands to run on the layer before it is tar'd up. + extraCommands ? "" + , uid ? 0 + , gid ? 0 + }: + runCommand "docker-layer-${name}" + { + inherit baseJson extraCommands; + contents = copyToRoot; + nativeBuildInputs = [ jshon rsync tarsum ]; + } + '' + mkdir layer + if [[ -n "$contents" ]]; then + echo "Adding contents..." + for item in $contents; do + echo "Adding $item" + rsync -a${if keepContentsDirlinks then "K" else "k"} --chown=0:0 $item/ layer/ + done + else + echo "No contents to add to layer." + fi + + chmod ug+w layer + + if [[ -n "$extraCommands" ]]; then + (cd layer; eval "$extraCommands") + fi + + # Tar up the layer and throw it into 'layer.tar'. + echo "Packing layer..." + mkdir $out + tarhash=$(tar -C layer --hard-dereference --sort=name --mtime="@$SOURCE_DATE_EPOCH" --owner=${toString uid} --group=${toString gid} -cf - . | tee -p $out/layer.tar | tarsum) + + # Add a 'checksum' field to the JSON, with the value set to the + # checksum of the tarball. + cat ${baseJson} | jshon -s "$tarhash" -i checksum > $out/json + + # Indicate to docker that we're using schema version 1.0. + echo -n "1.0" > $out/VERSION + + echo "Finished building layer '${name}'" + ''; + + # Make a "root" layer; required if we need to execute commands as a + # privileged user on the image. The commands themselves will be + # performed in a virtual machine sandbox. + mkRootLayer = + { + # Name of the image. + name + , # Script to run as root. Bash. + runAsRoot + , # Files to add to the layer. If null, an empty layer will be created. + # To add packages to /bin, use `buildEnv` or similar. + copyToRoot ? null + , # When copying the contents into the image, preserve symlinks to + # directories (see `rsync -K`). Otherwise, transform those symlinks + # into directories. + keepContentsDirlinks ? false + , # JSON containing configuration and metadata for this layer. + baseJson + , # Existing image onto which to append the new layer. + fromImage ? null + , # Name of the image we're appending onto. + fromImageName ? null + , # Tag of the image we're appending onto. + fromImageTag ? null + , # How much disk to allocate for the temporary virtual machine. + diskSize ? 1024 + , # How much memory to allocate for the temporary virtual machine. + buildVMMemorySize ? 512 + , # Commands (bash) to run on the layer; these do not require sudo. + extraCommands ? "" + }: + # Generate an executable script from the `runAsRoot` text. + let + runAsRootScript = shellScript "run-as-root.sh" runAsRoot; + extraCommandsScript = shellScript "extra-commands.sh" extraCommands; + in + runWithOverlay { + name = "docker-layer-${name}"; + + inherit fromImage fromImageName fromImageTag diskSize buildVMMemorySize; + + preMount = lib.optionalString (copyToRoot != null && copyToRoot != [ ]) '' + echo "Adding contents..." + for item in ${escapeShellArgs (map (c: "${c}") (toList copyToRoot))}; do + echo "Adding $item..." + rsync -a${if keepContentsDirlinks then "K" else "k"} --chown=0:0 $item/ layer/ + done + + chmod ug+w layer + ''; + + postMount = '' + mkdir -p mnt/{dev,proc,sys,tmp} mnt${storeDir} + + # Mount /dev, /sys and the nix store as shared folders. + mount --rbind /dev mnt/dev + mount --rbind /sys mnt/sys + mount --rbind ${storeDir} mnt${storeDir} + + # Execute the run as root script. See 'man unshare' for + # details on what's going on here; basically this command + # means that the runAsRootScript will be executed in a nearly + # completely isolated environment. + # + # Ideally we would use --mount-proc=mnt/proc or similar, but this + # doesn't work. The workaround is to setup proc after unshare. + # See: https://github.com/karelzak/util-linux/issues/648 + unshare -imnpuf --mount-proc sh -c 'mount --rbind /proc mnt/proc && chroot mnt ${runAsRootScript}' + + # Unmount directories and remove them. + umount -R mnt/dev mnt/sys mnt${storeDir} + rmdir --ignore-fail-on-non-empty \ + mnt/dev mnt/proc mnt/sys mnt${storeDir} \ + mnt$(dirname ${storeDir}) + ''; + + postUmount = '' + (cd layer; ${extraCommandsScript}) + + echo "Packing layer..." + mkdir -p $out + tarhash=$(tar -C layer --hard-dereference --sort=name --mtime="@$SOURCE_DATE_EPOCH" -cf - . | + tee -p $out/layer.tar | + ${tarsum}/bin/tarsum) + + cat ${baseJson} | jshon -s "$tarhash" -i checksum > $out/json + # Indicate to docker that we're using schema version 1.0. + echo -n "1.0" > $out/VERSION + + echo "Finished building layer '${name}'" + ''; + }; + + buildLayeredImage = lib.makeOverridable ({ name, ... }@args: + let + stream = streamLayeredImage args; + in + runCommand "${baseNameOf name}.tar.gz" + { + inherit (stream) imageName; + passthru = { inherit (stream) imageTag; }; + nativeBuildInputs = [ pigz ]; + } "${stream} | pigz -nTR > $out" + ); + + # 1. extract the base image + # 2. create the layer + # 3. add layer deps to the layer itself, diffing with the base image + # 4. compute the layer id + # 5. put the layer in the image + # 6. repack the image + buildImage = lib.makeOverridable ( + args@{ + # Image name. + name + , # Image tag, when null then the nix output hash will be used. + tag ? null + , # Parent image, to append to. + fromImage ? null + , # Name of the parent image; will be read from the image otherwise. + fromImageName ? null + , # Tag of the parent image; will be read from the image otherwise. + fromImageTag ? null + , # Files to put on the image (a nix store path or list of paths). + copyToRoot ? null + , # When copying the contents into the image, preserve symlinks to + # directories (see `rsync -K`). Otherwise, transform those symlinks + # into directories. + keepContentsDirlinks ? false + , # Docker config; e.g. what command to run on the container. + config ? null + , # Image architecture, defaults to the architecture of the `hostPlatform` when unset + architecture ? defaultArchitecture + , # Optional bash script to run on the files prior to fixturizing the layer. + extraCommands ? "" + , uid ? 0 + , gid ? 0 + , # Optional bash script to run as root on the image when provisioning. + runAsRoot ? null + , # Size of the virtual machine disk to provision when building the image. + diskSize ? 1024 + , # Size of the virtual machine memory to provision when building the image. + buildVMMemorySize ? 512 + , # Time of creation of the image. + created ? "1970-01-01T00:00:01Z" + , # Deprecated. + contents ? null + , + }: + + let + checked = + lib.warnIf (contents != null) + "in docker image ${name}: The contents parameter is deprecated. Change to copyToRoot if the contents are designed to be copied to the root filesystem, such as when you use `buildEnv` or similar between contents and your packages. Use copyToRoot = buildEnv { ... }; or similar if you intend to add packages to /bin." + lib.throwIf (contents != null && copyToRoot != null) "in docker image ${name}: You can not specify both contents and copyToRoot." + ; + + rootContents = if copyToRoot == null then contents else copyToRoot; + + baseName = baseNameOf name; + + # Create a JSON blob of the configuration. Set the date to unix zero. + baseJson = + let + pure = writeText "${baseName}-config.json" (builtins.toJSON { + inherit created config architecture; + preferLocalBuild = true; + os = "linux"; + }); + impure = runCommand "${baseName}-config.json" + { + nativeBuildInputs = [ jq ]; + preferLocalBuild = true; + } + '' + jq ".created = \"$(TZ=utc date --iso-8601="seconds")\"" ${pure} > $out + ''; + in + if created == "now" then impure else pure; + + layer = + if runAsRoot == null + then + mkPureLayer + { + name = baseName; + inherit baseJson keepContentsDirlinks extraCommands uid gid; + copyToRoot = rootContents; + } else + mkRootLayer { + name = baseName; + inherit baseJson fromImage fromImageName fromImageTag + keepContentsDirlinks runAsRoot diskSize buildVMMemorySize + extraCommands; + copyToRoot = rootContents; + }; + result = runCommand "docker-image-${baseName}.tar.gz" + { + nativeBuildInputs = [ jshon pigz jq moreutils ]; + # Image name must be lowercase + imageName = lib.toLower name; + imageTag = lib.optionalString (tag != null) tag; + inherit fromImage baseJson; + layerClosure = writeReferencesToFile layer; + passthru.buildArgs = args; + passthru.layer = layer; + passthru.imageTag = + if tag != null + then tag + else + lib.head (lib.strings.splitString "-" (baseNameOf result.outPath)); + } '' + ${lib.optionalString (tag == null) '' + outName="$(basename "$out")" + outHash=$(echo "$outName" | cut -d - -f 1) + + imageTag=$outHash + ''} + + # Print tar contents: + # 1: Interpreted as relative to the root directory + # 2: With no trailing slashes on directories + # This is useful for ensuring that the output matches the + # values generated by the "find" command + ls_tar() { + for f in $(tar -tf $1 | xargs realpath -ms --relative-to=.); do + if [[ "$f" != "." ]]; then + echo "/$f" + fi + done + } + + mkdir image + touch baseFiles + baseEnvs='[]' + if [[ -n "$fromImage" ]]; then + echo "Unpacking base image..." + tar -C image -xpf "$fromImage" + + # Store the layers and the environment variables from the base image + cat ./image/manifest.json | jq -r '.[0].Layers | .[]' > layer-list + configName="$(cat ./image/manifest.json | jq -r '.[0].Config')" + baseEnvs="$(cat "./image/$configName" | jq '.config.Env // []')" + + # Extract the parentID from the manifest + if [[ -n "$fromImageName" ]] && [[ -n "$fromImageTag" ]]; then + parentID="$( + cat "image/manifest.json" | + jq -r '.[] | select(.RepoTags | contains([$desiredTag])) | rtrimstr(".json")' \ + --arg desiredTag "$fromImageName:$fromImageTag" + )" + else + echo "From-image name or tag wasn't set. Reading the first ID." + parentID="$(cat "image/manifest.json" | jq -r '.[0].Config | rtrimstr(".json")')" + fi + + # Otherwise do not import the base image configuration and manifest + chmod a+w image image/*.json + rm -f image/*.json + + for l in image/*/layer.tar; do + ls_tar $l >> baseFiles + done + else + touch layer-list + fi + + chmod -R ug+rw image + + mkdir temp + cp ${layer}/* temp/ + chmod ug+w temp/* + + for dep in $(cat $layerClosure); do + find $dep >> layerFiles + done + + echo "Adding layer..." + # Record the contents of the tarball with ls_tar. + ls_tar temp/layer.tar >> baseFiles + + # Append nix/store directory to the layer so that when the layer is loaded in the + # image /nix/store has read permissions for non-root users. + # nix/store is added only if the layer has /nix/store paths in it. + if [ $(wc -l < $layerClosure) -gt 1 ] && [ $(grep -c -e "^/nix/store$" baseFiles) -eq 0 ]; then + mkdir -p nix/store + chmod -R 555 nix + echo "./nix" >> layerFiles + echo "./nix/store" >> layerFiles + fi + + # Get the files in the new layer which were *not* present in + # the old layer, and record them as newFiles. + comm <(sort -n baseFiles|uniq) \ + <(sort -n layerFiles|uniq|grep -v ${layer}) -1 -3 > newFiles + # Append the new files to the layer. + tar -rpf temp/layer.tar --hard-dereference --sort=name --mtime="@$SOURCE_DATE_EPOCH" \ + --owner=0 --group=0 --no-recursion --verbatim-files-from --files-from newFiles + + echo "Adding meta..." + + # If we have a parentID, add it to the json metadata. + if [[ -n "$parentID" ]]; then + cat temp/json | jshon -s "$parentID" -i parent > tmpjson + mv tmpjson temp/json + fi + + # Take the sha256 sum of the generated json and use it as the layer ID. + # Compute the size and add it to the json under the 'Size' field. + layerID=$(sha256sum temp/json|cut -d ' ' -f 1) + size=$(stat --printf="%s" temp/layer.tar) + cat temp/json | jshon -s "$layerID" -i id -n $size -i Size > tmpjson + mv tmpjson temp/json + + # Use the temp folder we've been working on to create a new image. + mv temp image/$layerID + + # Add the new layer ID to the end of the layer list + ( + cat layer-list + # originally this used `sed -i "1i$layerID" layer-list`, but + # would fail if layer-list was completely empty. + echo "$layerID/layer.tar" + ) | sponge layer-list + + # Create image json and image manifest + imageJson=$(cat ${baseJson} | jq '.config.Env = $baseenv + .config.Env' --argjson baseenv "$baseEnvs") + imageJson=$(echo "$imageJson" | jq ". + {\"rootfs\": {\"diff_ids\": [], \"type\": \"layers\"}}") + manifestJson=$(jq -n "[{\"RepoTags\":[\"$imageName:$imageTag\"]}]") + + for layerTar in $(cat ./layer-list); do + layerChecksum=$(sha256sum image/$layerTar | cut -d ' ' -f1) + imageJson=$(echo "$imageJson" | jq ".history |= . + [{\"created\": \"$(jq -r .created ${baseJson})\"}]") + # diff_ids order is from the bottom-most to top-most layer + imageJson=$(echo "$imageJson" | jq ".rootfs.diff_ids |= . + [\"sha256:$layerChecksum\"]") + manifestJson=$(echo "$manifestJson" | jq ".[0].Layers |= . + [\"$layerTar\"]") + done + + imageJsonChecksum=$(echo "$imageJson" | sha256sum | cut -d ' ' -f1) + echo "$imageJson" > "image/$imageJsonChecksum.json" + manifestJson=$(echo "$manifestJson" | jq ".[0].Config = \"$imageJsonChecksum.json\"") + echo "$manifestJson" > image/manifest.json + + # Store the json under the name image/repositories. + jshon -n object \ + -n object -s "$layerID" -i "$imageTag" \ + -i "$imageName" > image/repositories + + # Make the image read-only. + chmod -R a-w image + + echo "Cooking the image..." + tar -C image --hard-dereference --sort=name --mtime="@$SOURCE_DATE_EPOCH" --owner=0 --group=0 --xform s:'^./':: -c . | pigz -nTR > $out + + echo "Finished." + ''; + + in + checked result + ); + + # Merge the tarballs of images built with buildImage into a single + # tarball that contains all images. Running `docker load` on the resulting + # tarball will load the images into the docker daemon. + mergeImages = images: runCommand "merge-docker-images" + { + inherit images; + nativeBuildInputs = [ pigz jq ]; + } '' + mkdir image inputs + # Extract images + repos=() + manifests=() + for item in $images; do + name=$(basename $item) + mkdir inputs/$name + tar -I pigz -xf $item -C inputs/$name + if [ -f inputs/$name/repositories ]; then + repos+=(inputs/$name/repositories) + fi + if [ -f inputs/$name/manifest.json ]; then + manifests+=(inputs/$name/manifest.json) + fi + done + # Copy all layers from input images to output image directory + cp -R --update=none inputs/*/* image/ + # Merge repositories objects and manifests + jq -s add "''${repos[@]}" > repositories + jq -s add "''${manifests[@]}" > manifest.json + # Replace output image repositories and manifest with merged versions + mv repositories image/repositories + mv manifest.json image/manifest.json + # Create tarball and gzip + tar -C image --hard-dereference --sort=name --mtime="@$SOURCE_DATE_EPOCH" --owner=0 --group=0 --xform s:'^./':: -c . | pigz -nTR > $out + ''; + + + # Provide a /etc/passwd and /etc/group that contain root and nobody. + # Useful when packaging binaries that insist on using nss to look up + # username/groups (like nginx). + # /bin/sh is fine to not exist, and provided by another shim. + inherit fakeNss; # alias + + # This provides a /usr/bin/env, for shell scripts using the + # "#!/usr/bin/env executable" shebang. + usrBinEnv = runCommand "usr-bin-env" { } '' + mkdir -p $out/usr/bin + ln -s ${coreutils}/bin/env $out/usr/bin + ''; + + # This provides /bin/sh, pointing to bashInteractive. + binSh = runCommand "bin-sh" { } '' + mkdir -p $out/bin + ln -s ${bashInteractive}/bin/bash $out/bin/sh + ''; + + # This provides the ca bundle in common locations + caCertificates = runCommand "ca-certificates" { } '' + mkdir -p $out/etc/ssl/certs $out/etc/pki/tls/certs + # Old NixOS compatibility. + ln -s ${cacert}/etc/ssl/certs/ca-bundle.crt $out/etc/ssl/certs/ca-bundle.crt + # NixOS canonical location + Debian/Ubuntu/Arch/Gentoo compatibility. + ln -s ${cacert}/etc/ssl/certs/ca-bundle.crt $out/etc/ssl/certs/ca-certificates.crt + # CentOS/Fedora compatibility. + ln -s ${cacert}/etc/ssl/certs/ca-bundle.crt $out/etc/pki/tls/certs/ca-bundle.crt + ''; + + # Build an image and populate its nix database with the provided + # contents. The main purpose is to be able to use nix commands in + # the container. + # Be careful since this doesn't work well with multilayer. + # TODO: add the dependencies of the config json. + buildImageWithNixDb = args@{ copyToRoot ? contents, contents ? null, extraCommands ? "", ... }: ( + buildImage (args // { + extraCommands = (mkDbExtraCommand copyToRoot) + extraCommands; + }) + ); + + # TODO: add the dependencies of the config json. + buildLayeredImageWithNixDb = args@{ contents ? null, extraCommands ? "", ... }: ( + buildLayeredImage (args // { + extraCommands = (mkDbExtraCommand contents) + extraCommands; + }) + ); + + streamLayeredImage = lib.makeOverridable ( + { + # Image Name + name + , # Image tag, the Nix's output hash will be used if null + tag ? null + , # Parent image, to append to. + fromImage ? null + , # Files to put on the image (a nix store path or list of paths). + contents ? [ ] + , # Docker config; e.g. what command to run on the container. + config ? { } + , # Image architecture, defaults to the architecture of the `hostPlatform` when unset + architecture ? defaultArchitecture + , # Time of creation of the image. Passing "now" will make the + # created date be the time of building. + created ? "1970-01-01T00:00:01Z" + , # Optional bash script to run on the files prior to fixturizing the layer. + extraCommands ? "" + , # Optional bash script to run inside fakeroot environment. + # Could be used for changing ownership of files in customisation layer. + fakeRootCommands ? "" + , # Whether to run fakeRootCommands in fakechroot as well, so that they + # appear to run inside the image, but have access to the normal Nix store. + # Perhaps this could be enabled on by default on pkgs.stdenv.buildPlatform.isLinux + enableFakechroot ? false + , # We pick 100 to ensure there is plenty of room for extension. I + # believe the actual maximum is 128. + maxLayers ? 100 + , # Whether to include store paths in the image. You generally want to leave + # this on, but tooling may disable this to insert the store paths more + # efficiently via other means, such as bind mounting the host store. + includeStorePaths ? true + , # Passthru arguments for the underlying derivation. + passthru ? {} + , + }: + assert + (lib.assertMsg (maxLayers > 1) + "the maxLayers argument of dockerTools.buildLayeredImage function must be greather than 1 (current value: ${toString maxLayers})"); + let + baseName = baseNameOf name; + + streamScript = writePython3 "stream" { } ./stream_layered_image.py; + baseJson = writeText "${baseName}-base.json" (builtins.toJSON { + inherit config architecture; + os = "linux"; + }); + + contentsList = if builtins.isList contents then contents else [ contents ]; + bind-paths = builtins.toString (builtins.map (path: "--bind=${path}:${path}!") [ + "/dev/" + "/proc/" + "/sys/" + "${builtins.storeDir}/" + "$out/layer.tar" + ]); + + # We store the customisation layer as a tarball, to make sure that + # things like permissions set on 'extraCommands' are not overridden + # by Nix. Then we precompute the sha256 for performance. + customisationLayer = symlinkJoin { + name = "${baseName}-customisation-layer"; + paths = contentsList; + inherit extraCommands fakeRootCommands; + nativeBuildInputs = [ + fakeroot + ] ++ optionals enableFakechroot [ + proot + ]; + postBuild = '' + mv $out old_out + (cd old_out; eval "$extraCommands" ) + + mkdir $out + ${optionalString enableFakechroot ''proot -r $PWD/old_out ${bind-paths} --pwd=/ ''}fakeroot bash -c ' + source $stdenv/setup + ${optionalString (!enableFakechroot) ''cd old_out''} + eval "$fakeRootCommands" + tar \ + --sort name \ + --numeric-owner --mtime "@$SOURCE_DATE_EPOCH" \ + --hard-dereference \ + -cf $out/layer.tar . + ' + + sha256sum $out/layer.tar \ + | cut -f 1 -d ' ' \ + > $out/checksum + ''; + }; + + closureRoots = lib.optionals includeStorePaths /* normally true */ ( + [ baseJson customisationLayer ] + ); + overallClosure = writeText "closure" (lib.concatStringsSep " " closureRoots); + + # These derivations are only created as implementation details of docker-tools, + # so they'll be excluded from the created images. + unnecessaryDrvs = [ baseJson overallClosure customisationLayer ]; + + conf = runCommand "${baseName}-conf.json" + { + inherit fromImage maxLayers created; + imageName = lib.toLower name; + preferLocalBuild = true; + passthru.imageTag = + if tag != null + then tag + else + lib.head (lib.strings.splitString "-" (baseNameOf conf.outPath)); + paths = buildPackages.referencesByPopularity overallClosure; + nativeBuildInputs = [ jq ]; + } '' + ${if (tag == null) then '' + outName="$(basename "$out")" + outHash=$(echo "$outName" | cut -d - -f 1) + + imageTag=$outHash + '' else '' + imageTag="${tag}" + ''} + + # convert "created" to iso format + if [[ "$created" != "now" ]]; then + created="$(date -Iseconds -d "$created")" + fi + + paths() { + cat $paths ${lib.concatMapStringsSep " " + (path: "| (grep -v ${path} || true)") + unnecessaryDrvs} + } + + # Compute the number of layers that are already used by a potential + # 'fromImage' as well as the customization layer. Ensure that there is + # still at least one layer available to store the image contents. + usedLayers=0 + + # subtract number of base image layers + if [[ -n "$fromImage" ]]; then + (( usedLayers += $(tar -xOf "$fromImage" manifest.json | jq '.[0].Layers | length') )) + fi + + # one layer will be taken up by the customisation layer + (( usedLayers += 1 )) + + if ! (( $usedLayers < $maxLayers )); then + echo >&2 "Error: usedLayers $usedLayers layers to store 'fromImage' and" \ + "'extraCommands', but only maxLayers=$maxLayers were" \ + "allowed. At least 1 layer is required to store contents." + exit 1 + fi + availableLayers=$(( maxLayers - usedLayers )) + + # Create $maxLayers worth of Docker Layers, one layer per store path + # unless there are more paths than $maxLayers. In that case, create + # $maxLayers-1 for the most popular layers, and smush the remainaing + # store paths in to one final layer. + # + # The following code is fiddly w.r.t. ensuring every layer is + # created, and that no paths are missed. If you change the + # following lines, double-check that your code behaves properly + # when the number of layers equals: + # maxLayers-1, maxLayers, and maxLayers+1, 0 + paths | + jq -sR ' + rtrimstr("\n") | split("\n") + | (.[:$maxLayers-1] | map([.])) + [ .[$maxLayers-1:] ] + | map(select(length > 0)) + ' \ + --argjson maxLayers "$availableLayers" > store_layers.json + + # The index on $store_layers is necessary because the --slurpfile + # automatically reads the file as an array. + cat ${baseJson} | jq ' + . + { + "store_dir": $store_dir, + "from_image": $from_image, + "store_layers": $store_layers[0], + "customisation_layer", $customisation_layer, + "repo_tag": $repo_tag, + "created": $created + } + ' --arg store_dir "${storeDir}" \ + --argjson from_image ${if fromImage == null then "null" else "'\"${fromImage}\"'"} \ + --slurpfile store_layers store_layers.json \ + --arg customisation_layer ${customisationLayer} \ + --arg repo_tag "$imageName:$imageTag" \ + --arg created "$created" | + tee $out + ''; + + result = runCommand "stream-${baseName}" + { + inherit (conf) imageName; + preferLocalBuild = true; + passthru = passthru // { + inherit (conf) imageTag; + + # Distinguish tarballs and exes at the Nix level so functions that + # take images can know in advance how the image is supposed to be used. + isExe = true; + }; + nativeBuildInputs = [ makeWrapper ]; + } '' + makeWrapper ${streamScript} $out --add-flags ${conf} + ''; + in + result + ); + + # This function streams a docker image that behaves like a nix-shell for a derivation + streamNixShellImage = + { # The derivation whose environment this docker image should be based on + drv + , # Image Name + name ? drv.name + "-env" + , # Image tag, the Nix's output hash will be used if null + tag ? null + , # User id to run the container as. Defaults to 1000, because many + # binaries don't like to be run as root + uid ? 1000 + , # Group id to run the container as, see also uid + gid ? 1000 + , # The home directory of the user + homeDirectory ? "/build" + , # The path to the bash binary to use as the shell. See `NIX_BUILD_SHELL` in `man nix-shell` + shell ? bashInteractive + "/bin/bash" + , # Run this command in the environment of the derivation, in an interactive shell. See `--command` in `man nix-shell` + command ? null + , # Same as `command`, but runs the command in a non-interactive shell instead. See `--run` in `man nix-shell` + run ? null + }: + assert lib.assertMsg (! (drv.drvAttrs.__structuredAttrs or false)) + "streamNixShellImage: Does not work with the derivation ${drv.name} because it uses __structuredAttrs"; + assert lib.assertMsg (command == null || run == null) + "streamNixShellImage: Can't specify both command and run"; + let + + # A binary that calls the command to build the derivation + builder = writeShellScriptBin "buildDerivation" '' + exec ${lib.escapeShellArg (stringValue drv.drvAttrs.builder)} ${lib.escapeShellArgs (map stringValue drv.drvAttrs.args)} + ''; + + staticPath = "${dirOf shell}:${lib.makeBinPath [ builder ]}"; + + # https://github.com/NixOS/nix/blob/2.8.0/src/nix-build/nix-build.cc#L493-L526 + rcfile = writeText "nix-shell-rc" '' + unset PATH + dontAddDisableDepTrack=1 + # TODO: https://github.com/NixOS/nix/blob/2.8.0/src/nix-build/nix-build.cc#L506 + [ -e $stdenv/setup ] && source $stdenv/setup + PATH=${staticPath}:"$PATH" + SHELL=${lib.escapeShellArg shell} + BASH=${lib.escapeShellArg shell} + set +e + [ -n "$PS1" -a -z "$NIX_SHELL_PRESERVE_PROMPT" ] && PS1='\n\[\033[1;32m\][nix-shell:\w]\$\[\033[0m\] ' + if [ "$(type -t runHook)" = function ]; then + runHook shellHook + fi + unset NIX_ENFORCE_PURITY + shopt -u nullglob + shopt -s execfail + ${optionalString (command != null || run != null) '' + ${optionalString (command != null) command} + ${optionalString (run != null) run} + exit + ''} + ''; + + # https://github.com/NixOS/nix/blob/2.8.0/src/libstore/globals.hh#L464-L465 + sandboxBuildDir = "/build"; + + # This function closely mirrors what this Nix code does: + # https://github.com/NixOS/nix/blob/2.8.0/src/libexpr/primops.cc#L1102 + # https://github.com/NixOS/nix/blob/2.8.0/src/libexpr/eval.cc#L1981-L2036 + stringValue = value: + # We can't just use `toString` on all derivation attributes because that + # would not put path literals in the closure. So we explicitly copy + # those into the store here + if builtins.typeOf value == "path" then "${value}" + else if builtins.typeOf value == "list" then toString (map stringValue value) + else toString value; + + # https://github.com/NixOS/nix/blob/2.8.0/src/libstore/build/local-derivation-goal.cc#L992-L1004 + drvEnv = lib.mapAttrs' (name: value: + let str = stringValue value; + in if lib.elem name (drv.drvAttrs.passAsFile or []) + then lib.nameValuePair "${name}Path" (writeText "pass-as-text-${name}" str) + else lib.nameValuePair name str + ) drv.drvAttrs // + # A mapping from output name to the nix store path where they should end up + # https://github.com/NixOS/nix/blob/2.8.0/src/libexpr/primops.cc#L1253 + lib.genAttrs drv.outputs (output: builtins.unsafeDiscardStringContext drv.${output}.outPath); + + # Environment variables set in the image + envVars = { + + # Root certificates for internet access + SSL_CERT_FILE = "${cacert}/etc/ssl/certs/ca-bundle.crt"; + + # https://github.com/NixOS/nix/blob/2.8.0/src/libstore/build/local-derivation-goal.cc#L1027-L1030 + # PATH = "/path-not-set"; + # Allows calling bash and `buildDerivation` as the Cmd + PATH = staticPath; + + # https://github.com/NixOS/nix/blob/2.8.0/src/libstore/build/local-derivation-goal.cc#L1032-L1038 + HOME = homeDirectory; + + # https://github.com/NixOS/nix/blob/2.8.0/src/libstore/build/local-derivation-goal.cc#L1040-L1044 + NIX_STORE = storeDir; + + # https://github.com/NixOS/nix/blob/2.8.0/src/libstore/build/local-derivation-goal.cc#L1046-L1047 + # TODO: Make configurable? + NIX_BUILD_CORES = "1"; + + } // drvEnv // { + + # https://github.com/NixOS/nix/blob/2.8.0/src/libstore/build/local-derivation-goal.cc#L1008-L1010 + NIX_BUILD_TOP = sandboxBuildDir; + + # https://github.com/NixOS/nix/blob/2.8.0/src/libstore/build/local-derivation-goal.cc#L1012-L1013 + TMPDIR = sandboxBuildDir; + TEMPDIR = sandboxBuildDir; + TMP = sandboxBuildDir; + TEMP = sandboxBuildDir; + + # https://github.com/NixOS/nix/blob/2.8.0/src/libstore/build/local-derivation-goal.cc#L1015-L1019 + PWD = sandboxBuildDir; + + # https://github.com/NixOS/nix/blob/2.8.0/src/libstore/build/local-derivation-goal.cc#L1071-L1074 + # We don't set it here because the output here isn't handled in any special way + # NIX_LOG_FD = "2"; + + # https://github.com/NixOS/nix/blob/2.8.0/src/libstore/build/local-derivation-goal.cc#L1076-L1077 + TERM = "xterm-256color"; + }; + + + in streamLayeredImage { + inherit name tag; + contents = [ + binSh + usrBinEnv + (fakeNss.override { + # Allows programs to look up the build user's home directory + # https://github.com/NixOS/nix/blob/ffe155abd36366a870482625543f9bf924a58281/src/libstore/build/local-derivation-goal.cc#L906-L910 + # Slightly differs however: We use the passed-in homeDirectory instead of sandboxBuildDir. + # We're doing this because it's arguably a bug in Nix that sandboxBuildDir is used here: https://github.com/NixOS/nix/issues/6379 + extraPasswdLines = [ + "nixbld:x:${toString uid}:${toString gid}:Build user:${homeDirectory}:/noshell" + ]; + extraGroupLines = [ + "nixbld:!:${toString gid}:" + ]; + }) + ]; + + fakeRootCommands = '' + # Effectively a single-user installation of Nix, giving the user full + # control over the Nix store. Needed for building the derivation this + # shell is for, but also in case one wants to use Nix inside the + # image + mkdir -p ./nix/{store,var/nix} ./etc/nix + chown -R ${toString uid}:${toString gid} ./nix ./etc/nix + + # Gives the user control over the build directory + mkdir -p .${sandboxBuildDir} + chown -R ${toString uid}:${toString gid} .${sandboxBuildDir} + ''; + + # Run this image as the given uid/gid + config.User = "${toString uid}:${toString gid}"; + config.Cmd = + # https://github.com/NixOS/nix/blob/2.8.0/src/nix-build/nix-build.cc#L185-L186 + # https://github.com/NixOS/nix/blob/2.8.0/src/nix-build/nix-build.cc#L534-L536 + if run == null + then [ shell "--rcfile" rcfile ] + else [ shell rcfile ]; + config.WorkingDir = sandboxBuildDir; + config.Env = lib.mapAttrsToList (name: value: "${name}=${value}") envVars; + }; + + # Wrapper around streamNixShellImage to build an image from the result + buildNixShellImage = { drv, ... }@args: + let + stream = streamNixShellImage args; + in + runCommand "${drv.name}-env.tar.gz" + { + inherit (stream) imageName; + passthru = { inherit (stream) imageTag; }; + nativeBuildInputs = [ pigz ]; + } "${stream} | pigz -nTR > $out"; +} diff --git a/nixpkgs/pkgs/build-support/docker/detjson.py b/nixpkgs/pkgs/build-support/docker/detjson.py new file mode 100644 index 000000000000..fe82cbea11bb --- /dev/null +++ b/nixpkgs/pkgs/build-support/docker/detjson.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Deterministic layer json: https://github.com/docker/hub-feedback/issues/488 + +import sys +reload(sys) +sys.setdefaultencoding('UTF8') +import json + +# If any of the keys below are equal to a certain value +# then we can delete it because it's the default value +SAFEDELS = { + "Size": 0, + "config": { + "ExposedPorts": None, + "MacAddress": "", + "NetworkDisabled": False, + "PortSpecs": None, + "VolumeDriver": "" + } +} +SAFEDELS["container_config"] = SAFEDELS["config"] + +def makedet(j, safedels): + for k,v in safedels.items(): + if k not in j: + continue + if type(v) == dict: + makedet(j[k], v) + elif j[k] == v: + del j[k] + +def main(): + j = json.load(sys.stdin) + makedet(j, SAFEDELS) + json.dump(j, sys.stdout, sort_keys=True) + +if __name__ == '__main__': + main() diff --git a/nixpkgs/pkgs/build-support/docker/examples.nix b/nixpkgs/pkgs/build-support/docker/examples.nix new file mode 100644 index 000000000000..5784e650dc2e --- /dev/null +++ b/nixpkgs/pkgs/build-support/docker/examples.nix @@ -0,0 +1,848 @@ +# Examples of using the docker tools to build packages. +# +# This file defines several docker images. In order to use an image, +# build its derivation with `nix-build`, and then load the result with +# `docker load`. For example: +# +# $ nix-build '<nixpkgs>' -A dockerTools.examples.redis +# $ docker load < result + +{ pkgs, buildImage, buildLayeredImage, fakeNss, pullImage, shadowSetup, buildImageWithNixDb, pkgsCross, streamNixShellImage }: + +let + nixosLib = import ../../../nixos/lib { + # Experimental features need testing too, but there's no point in warning + # about it, so we enable the feature flag. + featureFlags.minimalModules = {}; + }; + evalMinimalConfig = module: nixosLib.evalModules { modules = [ module ]; }; + +in + +rec { + # 1. basic example + bash = buildImage { + name = "bash"; + tag = "latest"; + copyToRoot = pkgs.buildEnv { + name = "image-root"; + paths = [ pkgs.bashInteractive ]; + pathsToLink = [ "/bin" ]; + }; + }; + + # 2. service example, layered on another image + redis = buildImage { + name = "redis"; + tag = "latest"; + + # for example's sake, we can layer redis on top of bash or debian + fromImage = bash; + # fromImage = debian; + + copyToRoot = pkgs.buildEnv { + name = "image-root"; + paths = [ pkgs.redis ]; + pathsToLink = [ "/bin" ]; + }; + + runAsRoot = '' + mkdir -p /data + ''; + + config = { + Cmd = [ "/bin/redis-server" ]; + WorkingDir = "/data"; + Volumes = { + "/data" = {}; + }; + }; + }; + + # 3. another service example + nginx = let + nginxPort = "80"; + nginxConf = pkgs.writeText "nginx.conf" '' + user nobody nobody; + daemon off; + error_log /dev/stdout info; + pid /dev/null; + events {} + http { + access_log /dev/stdout; + server { + listen ${nginxPort}; + index index.html; + location / { + root ${nginxWebRoot}; + } + } + } + ''; + nginxWebRoot = pkgs.writeTextDir "index.html" '' + <html><body><h1>Hello from NGINX</h1></body></html> + ''; + in + buildLayeredImage { + name = "nginx-container"; + tag = "latest"; + contents = [ + fakeNss + pkgs.nginx + ]; + + extraCommands = '' + mkdir -p tmp/nginx_client_body + + # nginx still tries to read this directory even if error_log + # directive is specifying another file :/ + mkdir -p var/log/nginx + ''; + + config = { + Cmd = [ "nginx" "-c" nginxConf ]; + ExposedPorts = { + "${nginxPort}/tcp" = {}; + }; + }; + }; + + # 4. example of pulling an image. could be used as a base for other images + nixFromDockerHub = pullImage { + imageName = "nixos/nix"; + imageDigest = "sha256:85299d86263a3059cf19f419f9d286cc9f06d3c13146a8ebbb21b3437f598357"; + sha256 = "19fw0n3wmddahzr20mhdqv6jkjn1kanh6n2mrr08ai53dr8ph5n7"; + finalImageTag = "2.2.1"; + finalImageName = "nix"; + }; + # Same example, but re-fetches every time the fetcher implementation changes. + # NOTE: Only use this for testing, or you'd be wasting a lot of time, network and space. + testNixFromDockerHub = pkgs.testers.invalidateFetcherByDrvHash pullImage { + imageName = "nixos/nix"; + imageDigest = "sha256:85299d86263a3059cf19f419f9d286cc9f06d3c13146a8ebbb21b3437f598357"; + sha256 = "19fw0n3wmddahzr20mhdqv6jkjn1kanh6n2mrr08ai53dr8ph5n7"; + finalImageTag = "2.2.1"; + finalImageName = "nix"; + }; + + # 5. example of multiple contents, emacs and vi happily coexisting + editors = buildImage { + name = "editors"; + copyToRoot = pkgs.buildEnv { + name = "image-root"; + pathsToLink = [ "/bin" ]; + paths = [ + pkgs.coreutils + pkgs.bash + pkgs.emacs + pkgs.vim + pkgs.nano + ]; + }; + }; + + # 6. nix example to play with the container nix store + # docker run -it --rm nix nix-store -qR $(nix-build '<nixpkgs>' -A nix) + nix = buildImageWithNixDb { + name = "nix"; + tag = "latest"; + copyToRoot = pkgs.buildEnv { + name = "image-root"; + pathsToLink = [ "/bin" ]; + paths = [ + # nix-store uses cat program to display results as specified by + # the image env variable NIX_PAGER. + pkgs.coreutils + pkgs.nix + pkgs.bash + ]; + }; + config = { + Env = [ + "NIX_PAGER=cat" + # A user is required by nix + # https://github.com/NixOS/nix/blob/9348f9291e5d9e4ba3c4347ea1b235640f54fd79/src/libutil/util.cc#L478 + "USER=nobody" + ]; + }; + }; + + # 7. example of adding something on top of an image pull by our + # dockerTools chain. + onTopOfPulledImage = buildImage { + name = "onTopOfPulledImage"; + tag = "latest"; + fromImage = nixFromDockerHub; + copyToRoot = pkgs.buildEnv { + name = "image-root"; + pathsToLink = [ "/bin" ]; + paths = [ pkgs.hello ]; + }; + }; + + # 8. regression test for erroneous use of eval and string expansion. + # See issue #34779 and PR #40947 for details. + runAsRootExtraCommands = pkgs.dockerTools.buildImage { + name = "runAsRootExtraCommands"; + tag = "latest"; + copyToRoot = pkgs.buildEnv { + name = "image-root"; + pathsToLink = [ "/bin" ]; + paths = [ pkgs.coreutils ]; + }; + # The parens here are to create problematic bash to embed and eval. In case + # this is *embedded* into the script (with nix expansion) the initial quotes + # will close the string and the following parens are unexpected + runAsRoot = ''echo "(runAsRoot)" > runAsRoot''; + extraCommands = ''echo "(extraCommand)" > extraCommands''; + }; + + # 9. Ensure that setting created to now results in a date which + # isn't the epoch + 1 + unstableDate = pkgs.dockerTools.buildImage { + name = "unstable-date"; + tag = "latest"; + copyToRoot = pkgs.buildEnv { + name = "image-root"; + pathsToLink = [ "/bin" ]; + paths = [ pkgs.coreutils ]; + }; + created = "now"; + }; + + # 10. Create a layered image + layered-image = pkgs.dockerTools.buildLayeredImage { + name = "layered-image"; + tag = "latest"; + extraCommands = ''echo "(extraCommand)" > extraCommands''; + config.Cmd = [ "${pkgs.hello}/bin/hello" ]; + contents = [ pkgs.hello pkgs.bash pkgs.coreutils ]; + }; + + # 11. Create an image on top of a layered image + layered-on-top = pkgs.dockerTools.buildImage { + name = "layered-on-top"; + tag = "latest"; + fromImage = layered-image; + extraCommands = '' + mkdir ./example-output + chmod 777 ./example-output + ''; + config = { + Env = [ "PATH=${pkgs.coreutils}/bin/" ]; + WorkingDir = "/example-output"; + Cmd = [ + "${pkgs.bash}/bin/bash" "-c" "echo hello > foo; cat foo" + ]; + }; + }; + + # 12 Create a layered image on top of a layered image + layered-on-top-layered = pkgs.dockerTools.buildLayeredImage { + name = "layered-on-top-layered"; + tag = "latest"; + fromImage = layered-image; + extraCommands = '' + mkdir ./example-output + chmod 777 ./example-output + ''; + config = { + Env = [ "PATH=${pkgs.coreutils}/bin/" ]; + WorkingDir = "/example-output"; + Cmd = [ + "${pkgs.bash}/bin/bash" "-c" "echo hello > foo; cat foo" + ]; + }; + }; + + # 13. example of running something as root on top of a parent image + # Regression test related to PR #52109 + runAsRootParentImage = buildImage { + name = "runAsRootParentImage"; + tag = "latest"; + runAsRoot = "touch /example-file"; + fromImage = bash; + }; + + # 14. example of 3 layers images This image is used to verify the + # order of layers is correct. + # It allows to validate + # - the layer of parent are below + # - the order of parent layer is preserved at image build time + # (this is why there are 3 images) + layersOrder = let + l1 = pkgs.dockerTools.buildImage { + name = "l1"; + tag = "latest"; + extraCommands = '' + mkdir -p tmp + echo layer1 > tmp/layer1 + echo layer1 > tmp/layer2 + echo layer1 > tmp/layer3 + ''; + }; + l2 = pkgs.dockerTools.buildImage { + name = "l2"; + fromImage = l1; + tag = "latest"; + extraCommands = '' + mkdir -p tmp + echo layer2 > tmp/layer2 + echo layer2 > tmp/layer3 + ''; + }; + in pkgs.dockerTools.buildImage { + name = "l3"; + fromImage = l2; + tag = "latest"; + copyToRoot = pkgs.buildEnv { + name = "image-root"; + pathsToLink = [ "/bin" ]; + paths = [ pkgs.coreutils ]; + }; + extraCommands = '' + mkdir -p tmp + echo layer3 > tmp/layer3 + ''; + }; + + # 15. Environment variable inheritance. + # Child image should inherit parents environment variables, + # optionally overriding them. + environmentVariablesParent = pkgs.dockerTools.buildImage { + name = "parent"; + tag = "latest"; + config = { + Env = [ + "FROM_PARENT=true" + "LAST_LAYER=parent" + ]; + }; + }; + + environmentVariables = pkgs.dockerTools.buildImage { + name = "child"; + fromImage = environmentVariablesParent; + tag = "latest"; + copyToRoot = pkgs.buildEnv { + name = "image-root"; + pathsToLink = [ "/bin" ]; + paths = [ pkgs.coreutils ]; + }; + config = { + Env = [ + "FROM_CHILD=true" + "LAST_LAYER=child" + ]; + }; + }; + + environmentVariablesLayered = pkgs.dockerTools.buildLayeredImage { + name = "child"; + fromImage = environmentVariablesParent; + tag = "latest"; + contents = [ pkgs.coreutils ]; + config = { + Env = [ + "FROM_CHILD=true" + "LAST_LAYER=child" + ]; + }; + }; + + # 16. Create another layered image, for comparing layers with image 10. + another-layered-image = pkgs.dockerTools.buildLayeredImage { + name = "another-layered-image"; + tag = "latest"; + config.Cmd = [ "${pkgs.hello}/bin/hello" ]; + }; + + # 17. Create a layered image with only 2 layers + two-layered-image = pkgs.dockerTools.buildLayeredImage { + name = "two-layered-image"; + tag = "latest"; + config.Cmd = [ "${pkgs.hello}/bin/hello" ]; + contents = [ pkgs.bash pkgs.hello ]; + maxLayers = 2; + }; + + # 18. Create a layered image with more packages than max layers. + # coreutils and hello are part of the same layer + bulk-layer = pkgs.dockerTools.buildLayeredImage { + name = "bulk-layer"; + tag = "latest"; + contents = with pkgs; [ + coreutils hello + ]; + maxLayers = 2; + }; + + # 19. Create a layered image with a base image and more packages than max + # layers. coreutils and hello are part of the same layer + layered-bulk-layer = pkgs.dockerTools.buildLayeredImage { + name = "layered-bulk-layer"; + tag = "latest"; + fromImage = two-layered-image; + contents = with pkgs; [ + coreutils hello + ]; + maxLayers = 4; + }; + + # 20. Create a "layered" image without nix store layers. This is not + # recommended, but can be useful for base images in rare cases. + no-store-paths = pkgs.dockerTools.buildLayeredImage { + name = "no-store-paths"; + tag = "latest"; + extraCommands = '' + # This removes sharing of busybox and is not recommended. We do this + # to make the example suitable as a test case with working binaries. + cp -r ${pkgs.pkgsStatic.busybox}/* . + + # This is a "build" dependency that will not appear in the image + ${pkgs.hello}/bin/hello + ''; + }; + + nixLayered = pkgs.dockerTools.buildLayeredImageWithNixDb { + name = "nix-layered"; + tag = "latest"; + contents = [ + # nix-store uses cat program to display results as specified by + # the image env variable NIX_PAGER. + pkgs.coreutils + pkgs.nix + pkgs.bash + ]; + config = { + Env = [ + "NIX_PAGER=cat" + # A user is required by nix + # https://github.com/NixOS/nix/blob/9348f9291e5d9e4ba3c4347ea1b235640f54fd79/src/libutil/util.cc#L478 + "USER=nobody" + ]; + }; + }; + + # 21. Support files in the store on buildLayeredImage + # See: https://github.com/NixOS/nixpkgs/pull/91084#issuecomment-653496223 + filesInStore = pkgs.dockerTools.buildLayeredImageWithNixDb { + name = "file-in-store"; + tag = "latest"; + contents = [ + pkgs.coreutils + pkgs.nix + (pkgs.writeScriptBin "myscript" '' + #!${pkgs.runtimeShell} + cat ${pkgs.writeText "somefile" "some data"} + '') + ]; + config = { + Cmd = [ "myscript" ]; + # For some reason 'nix-store --verify' requires this environment variable + Env = [ "USER=root" ]; + }; + }; + + # 22. Ensure that setting created to now results in a date which + # isn't the epoch + 1 for layered images. + unstableDateLayered = pkgs.dockerTools.buildLayeredImage { + name = "unstable-date-layered"; + tag = "latest"; + contents = [ pkgs.coreutils ]; + created = "now"; + }; + + # 23. Ensure that layers are unpacked in the correct order before the + # runAsRoot script is executed. + layersUnpackOrder = + let + layerOnTopOf = parent: layerName: + pkgs.dockerTools.buildImage { + name = "layers-unpack-order-${layerName}"; + tag = "latest"; + fromImage = parent; + copyToRoot = pkgs.buildEnv { + name = "image-root"; + pathsToLink = [ "/bin" ]; + paths = [ pkgs.coreutils ]; + }; + runAsRoot = '' + #!${pkgs.runtimeShell} + echo -n "${layerName}" >> /layer-order + ''; + }; + # When executing the runAsRoot script when building layer C, if layer B is + # not unpacked on top of layer A, the contents of /layer-order will not be + # "ABC". + layerA = layerOnTopOf null "a"; + layerB = layerOnTopOf layerA "b"; + layerC = layerOnTopOf layerB "c"; + in layerC; + + # buildImage without explicit tag + bashNoTag = pkgs.dockerTools.buildImage { + name = "bash-no-tag"; + # Not recommended. Use `buildEnv` between copy and packages to avoid file duplication. + copyToRoot = pkgs.bashInteractive; + }; + + # buildLayeredImage without explicit tag + bashNoTagLayered = pkgs.dockerTools.buildLayeredImage { + name = "bash-no-tag-layered"; + contents = pkgs.bashInteractive; + }; + + # buildImage without explicit tag + bashNoTagStreamLayered = pkgs.dockerTools.streamLayeredImage { + name = "bash-no-tag-stream-layered"; + contents = pkgs.bashInteractive; + }; + + # buildLayeredImage with non-root user + bashLayeredWithUser = + let + nonRootShadowSetup = { user, uid, gid ? uid }: with pkgs; [ + ( + writeTextDir "etc/shadow" '' + root:!x::::::: + ${user}:!::::::: + '' + ) + ( + writeTextDir "etc/passwd" '' + root:x:0:0::/root:${runtimeShell} + ${user}:x:${toString uid}:${toString gid}::/home/${user}: + '' + ) + ( + writeTextDir "etc/group" '' + root:x:0: + ${user}:x:${toString gid}: + '' + ) + ( + writeTextDir "etc/gshadow" '' + root:x:: + ${user}:x:: + '' + ) + ]; + in + pkgs.dockerTools.buildLayeredImage { + name = "bash-layered-with-user"; + tag = "latest"; + contents = [ pkgs.bash pkgs.coreutils ] ++ nonRootShadowSetup { uid = 999; user = "somebody"; }; + }; + + # basic example, with cross compilation + cross = let + # Cross compile for x86_64 if on aarch64 + crossPkgs = + if pkgs.stdenv.hostPlatform.system == "aarch64-linux" then pkgsCross.gnu64 + else pkgsCross.aarch64-multiplatform; + in crossPkgs.dockerTools.buildImage { + name = "hello-cross"; + tag = "latest"; + copyToRoot = pkgs.buildEnv { + name = "image-root"; + pathsToLink = [ "/bin" ]; + paths = [ crossPkgs.hello ]; + }; + }; + + # layered image where a store path is itself a symlink + layeredStoreSymlink = + let + target = pkgs.writeTextDir "dir/target" "Content doesn't matter."; + symlink = pkgs.runCommand "symlink" {} "ln -s ${target} $out"; + in + pkgs.dockerTools.buildLayeredImage { + name = "layeredstoresymlink"; + tag = "latest"; + contents = [ pkgs.bash symlink ]; + } // { passthru = { inherit symlink; }; }; + + # image with registry/ prefix + prefixedImage = pkgs.dockerTools.buildImage { + name = "registry-1.docker.io/image"; + tag = "latest"; + config.Cmd = [ "${pkgs.hello}/bin/hello" ]; + }; + + # layered image with registry/ prefix + prefixedLayeredImage = pkgs.dockerTools.buildLayeredImage { + name = "registry-1.docker.io/layered-image"; + tag = "latest"; + config.Cmd = [ "${pkgs.hello}/bin/hello" ]; + }; + + # layered image with files owned by a user other than root + layeredImageWithFakeRootCommands = pkgs.dockerTools.buildLayeredImage { + name = "layered-image-with-fake-root-commands"; + tag = "latest"; + contents = [ + pkgs.pkgsStatic.busybox + ]; + fakeRootCommands = '' + mkdir -p ./home/alice + chown 1000 ./home/alice + ln -s ${pkgs.hello.overrideAttrs (o: { + # A unique `hello` to make sure that it isn't included via another mechanism by accident. + configureFlags = o.configureFlags or [] ++ [ " --program-prefix=layeredImageWithFakeRootCommands-" ]; + doCheck = false; + })} ./hello + ''; + }; + + # tarball consisting of both bash and redis images + mergedBashAndRedis = pkgs.dockerTools.mergeImages [ + bash + redis + ]; + + # tarball consisting of bash (without tag) and redis images + mergedBashNoTagAndRedis = pkgs.dockerTools.mergeImages [ + bashNoTag + redis + ]; + + # tarball consisting of bash and layered image with different owner of the + # /home/alice directory + mergedBashFakeRoot = pkgs.dockerTools.mergeImages [ + bash + layeredImageWithFakeRootCommands + ]; + + helloOnRoot = pkgs.dockerTools.streamLayeredImage { + name = "hello"; + tag = "latest"; + contents = [ + (pkgs.buildEnv { + name = "hello-root"; + paths = [ pkgs.hello ]; + }) + ]; + config.Cmd = [ "hello" ]; + }; + + helloOnRootNoStore = pkgs.dockerTools.streamLayeredImage { + name = "hello"; + tag = "latest"; + contents = [ + (pkgs.buildEnv { + name = "hello-root"; + paths = [ pkgs.hello ]; + }) + ]; + config.Cmd = [ "hello" ]; + includeStorePaths = false; + }; + + etc = + let + inherit (pkgs) lib; + nixosCore = (evalMinimalConfig ({ config, ... }: { + imports = [ + pkgs.pkgsModule + ../../../nixos/modules/system/etc/etc.nix + ]; + environment.etc."some-config-file" = { + text = '' + 127.0.0.1 localhost + ::1 localhost + ''; + # For executables: + # mode = "0755"; + }; + })); + in pkgs.dockerTools.streamLayeredImage { + name = "etc"; + tag = "latest"; + enableFakechroot = true; + fakeRootCommands = '' + mkdir -p /etc + ${nixosCore.config.system.build.etcActivationCommands} + ''; + config.Cmd = pkgs.writeScript "etc-cmd" '' + #!${pkgs.busybox}/bin/sh + ${pkgs.busybox}/bin/cat /etc/some-config-file + ''; + }; + + # Example export of the bash image + exportBash = pkgs.dockerTools.exportImage { fromImage = bash; }; + + imageViaFakeChroot = pkgs.dockerTools.streamLayeredImage { + name = "image-via-fake-chroot"; + tag = "latest"; + config.Cmd = [ "hello" ]; + enableFakechroot = true; + # Crucially, instead of a relative path, this creates /bin, which is + # intercepted by fakechroot. + # This functionality is not available on darwin as of 2021. + fakeRootCommands = '' + mkdir /bin + ln -s ${pkgs.hello}/bin/hello /bin/hello + ''; + }; + + build-image-with-path = buildImage { + name = "build-image-with-path"; + tag = "latest"; + # Not recommended. Use `buildEnv` between copy and packages to avoid file duplication. + copyToRoot = [ pkgs.bashInteractive ./test-dummy ]; + }; + + layered-image-with-path = pkgs.dockerTools.streamLayeredImage { + name = "layered-image-with-path"; + tag = "latest"; + contents = [ pkgs.bashInteractive ./test-dummy ]; + }; + + build-image-with-architecture = buildImage { + name = "build-image-with-architecture"; + tag = "latest"; + architecture = "arm64"; + # Not recommended. Use `buildEnv` between copy and packages to avoid file duplication. + copyToRoot = [ pkgs.bashInteractive ./test-dummy ]; + }; + + layered-image-with-architecture = pkgs.dockerTools.streamLayeredImage { + name = "layered-image-with-architecture"; + tag = "latest"; + architecture = "arm64"; + contents = [ pkgs.bashInteractive ./test-dummy ]; + }; + + # ensure that caCertificates builds + image-with-certs = buildImage { + name = "image-with-certs"; + tag = "latest"; + + copyToRoot = pkgs.buildEnv { + name = "image-with-certs-root"; + paths = [ + pkgs.coreutils + pkgs.dockerTools.caCertificates + ]; + }; + + config = { + }; + }; + + nix-shell-basic = streamNixShellImage { + name = "nix-shell-basic"; + tag = "latest"; + drv = pkgs.hello; + }; + + nix-shell-hook = streamNixShellImage { + name = "nix-shell-hook"; + tag = "latest"; + drv = pkgs.mkShell { + shellHook = '' + echo "This is the shell hook!" + exit + ''; + }; + }; + + nix-shell-inputs = streamNixShellImage { + name = "nix-shell-inputs"; + tag = "latest"; + drv = pkgs.mkShell { + nativeBuildInputs = [ + pkgs.hello + ]; + }; + command = '' + hello + ''; + }; + + nix-shell-pass-as-file = streamNixShellImage { + name = "nix-shell-pass-as-file"; + tag = "latest"; + drv = pkgs.mkShell { + str = "this is a string"; + passAsFile = [ "str" ]; + }; + command = '' + cat "$strPath" + ''; + }; + + nix-shell-run = streamNixShellImage { + name = "nix-shell-run"; + tag = "latest"; + drv = pkgs.mkShell {}; + run = '' + case "$-" in + *i*) echo This shell is interactive ;; + *) echo This shell is not interactive ;; + esac + ''; + }; + + nix-shell-command = streamNixShellImage { + name = "nix-shell-command"; + tag = "latest"; + drv = pkgs.mkShell {}; + command = '' + case "$-" in + *i*) echo This shell is interactive ;; + *) echo This shell is not interactive ;; + esac + ''; + }; + + nix-shell-writable-home = streamNixShellImage { + name = "nix-shell-writable-home"; + tag = "latest"; + drv = pkgs.mkShell {}; + run = '' + if [[ "$HOME" != "$(eval "echo ~$(whoami)")" ]]; then + echo "\$HOME ($HOME) is not the same as ~\$(whoami) ($(eval "echo ~$(whoami)"))" + exit 1 + fi + + if ! touch $HOME/test-file; then + echo "home directory is not writable" + exit 1 + fi + echo "home directory is writable" + ''; + }; + + nix-shell-nonexistent-home = streamNixShellImage { + name = "nix-shell-nonexistent-home"; + tag = "latest"; + drv = pkgs.mkShell {}; + homeDirectory = "/homeless-shelter"; + run = '' + if [[ "$HOME" != "$(eval "echo ~$(whoami)")" ]]; then + echo "\$HOME ($HOME) is not the same as ~\$(whoami) ($(eval "echo ~$(whoami)"))" + exit 1 + fi + + if -e $HOME; then + echo "home directory exists" + exit 1 + fi + echo "home directory doesn't exist" + ''; + }; + + nix-shell-build-derivation = streamNixShellImage { + name = "nix-shell-build-derivation"; + tag = "latest"; + drv = pkgs.hello; + run = '' + buildDerivation + $out/bin/hello + ''; + }; + +} diff --git a/nixpkgs/pkgs/build-support/docker/nix-prefetch-docker b/nixpkgs/pkgs/build-support/docker/nix-prefetch-docker new file mode 100755 index 000000000000..f551d37cda96 --- /dev/null +++ b/nixpkgs/pkgs/build-support/docker/nix-prefetch-docker @@ -0,0 +1,173 @@ +#! /usr/bin/env bash + +set -e -o pipefail + +os= +arch= +imageName= +imageTag= +imageDigest= +finalImageName= +finalImageTag= +hashType=$NIX_HASH_ALGO +hashFormat=$hashFormat +format=nix + +usage(){ + echo >&2 "syntax: nix-prefetch-docker [options] [IMAGE_NAME [IMAGE_TAG|IMAGE_DIGEST]] + +Options: + --os os OS to fetch image for + --arch linux Arch to fetch image for + --image-name name Name of the image to fetch + --image-tag tag Image tag + --image-digest digest Image digest + --final-image-name name Desired name of the image + --final-image-tag tag Desired image tag + --json Output result in json format instead of nix + --quiet Only print the final result +" + exit 1 +} + +get_image_digest(){ + local imageName=$1 + local imageTag=$2 + + if test -z "$imageTag"; then + imageTag="latest" + fi + + skopeo --override-os "${os}" --override-arch "${arch}" --insecure-policy --tmpdir=$TMPDIR inspect "docker://$imageName:$imageTag" | jq '.Digest' -r +} + +get_name() { + local imageName=$1 + local imageTag=$2 + + echo "docker-image-$(echo "$imageName:$imageTag" | tr '/:' '-').tar" +} + +argi=0 +argfun="" +for arg; do + if test -z "$argfun"; then + case $arg in + --os) argfun=set_os;; + --arch) argfun=set_arch;; + --image-name) argfun=set_imageName;; + --image-tag) argfun=set_imageTag;; + --image-digest) argfun=set_imageDigest;; + --final-image-name) argfun=set_finalImageName;; + --final-image-tag) argfun=set_finalImageTag;; + --quiet) QUIET=true;; + --json) format=json;; + --help) usage; exit;; + *) + : $((++argi)) + case $argi in + 1) imageName=$arg;; + 2) [[ $arg == *"sha256"* ]] && imageDigest=$arg || imageTag=$arg;; + *) exit 1;; + esac + ;; + esac + else + case $argfun in + set_*) + var=${argfun#set_} + eval $var=$arg + ;; + esac + argfun="" + fi +done + +if test -z "$imageName"; then + usage +fi + +if test -z "$os"; then + os=linux +fi + +if test -z "$arch"; then + arch=amd64 +fi + +if test -z "$hashType"; then + hashType=sha256 +fi + +if test -z "$hashFormat"; then + hashFormat=base32 +fi + +if test -z "$finalImageName"; then + finalImageName="$imageName" +fi + +if test -z "$finalImageTag"; then + if test -z "$imageTag"; then + finalImageTag="latest" + else + finalImageTag="$imageTag" + fi +fi + +if test -z "$imageDigest"; then + imageDigest=$(get_image_digest $imageName $imageTag) +fi + +sourceUrl="docker://$imageName@$imageDigest" + +tmpPath="$(mktemp -d "${TMPDIR:-/tmp}/skopeo-copy-tmp-XXXXXXXX")" +trap "rm -rf \"$tmpPath\"" EXIT + +tmpFile="$tmpPath/$(get_name $finalImageName $finalImageTag)" + +if test -z "$QUIET"; then + skopeo --insecure-policy --tmpdir=$TMPDIR --override-os ${os} --override-arch ${arch} copy "$sourceUrl" "docker-archive://$tmpFile:$finalImageName:$finalImageTag" >&2 +else + skopeo --insecure-policy --tmpdir=$TMPDIR --override-os ${os} --override-arch ${arch} copy "$sourceUrl" "docker-archive://$tmpFile:$finalImageName:$finalImageTag" > /dev/null +fi + +# Compute the hash. +imageHash=$(nix-hash --flat --type $hashType --base32 "$tmpFile") + +# Add the downloaded file to Nix store. +finalPath=$(nix-store --add-fixed "$hashType" "$tmpFile") + +if test -z "$QUIET"; then + echo "-> ImageName: $imageName" >&2 + echo "-> ImageDigest: $imageDigest" >&2 + echo "-> FinalImageName: $finalImageName" >&2 + echo "-> FinalImageTag: $finalImageTag" >&2 + echo "-> ImagePath: $finalPath" >&2 + echo "-> ImageHash: $imageHash" >&2 +fi + +if [ "$format" == "nix" ]; then +cat <<EOF +{ + imageName = "$imageName"; + imageDigest = "$imageDigest"; + sha256 = "$imageHash"; + finalImageName = "$finalImageName"; + finalImageTag = "$finalImageTag"; +} +EOF + +else + +cat <<EOF +{ + "imageName": "$imageName", + "imageDigest": "$imageDigest", + "sha256": "$imageHash", + "finalImageName": "$finalImageName", + "finalImageTag": "$finalImageTag" +} +EOF + +fi diff --git a/nixpkgs/pkgs/build-support/docker/nix-prefetch-docker.nix b/nixpkgs/pkgs/build-support/docker/nix-prefetch-docker.nix new file mode 100644 index 000000000000..61e917461ed9 --- /dev/null +++ b/nixpkgs/pkgs/build-support/docker/nix-prefetch-docker.nix @@ -0,0 +1,24 @@ +{ lib, stdenv, makeWrapper, nix, skopeo, jq }: + +stdenv.mkDerivation { + name = "nix-prefetch-docker"; + + nativeBuildInputs = [ makeWrapper ]; + + dontUnpack = true; + + installPhase = '' + install -vD ${./nix-prefetch-docker} $out/bin/$name; + wrapProgram $out/bin/$name \ + --prefix PATH : ${lib.makeBinPath [ nix skopeo jq ]} \ + --set HOME /homeless-shelter + ''; + + preferLocalBuild = true; + + meta = with lib; { + description = "Script used to obtain source hashes for dockerTools.pullImage"; + maintainers = with maintainers; [ offline ]; + platforms = platforms.unix; + }; +} diff --git a/nixpkgs/pkgs/build-support/docker/stream_layered_image.py b/nixpkgs/pkgs/build-support/docker/stream_layered_image.py new file mode 100644 index 000000000000..d7c63eb43a78 --- /dev/null +++ b/nixpkgs/pkgs/build-support/docker/stream_layered_image.py @@ -0,0 +1,391 @@ +""" +This script generates a Docker image from a set of store paths. Uses +Docker Image Specification v1.2 as reference [1]. + +It expects a JSON file with the following properties and writes the +image as an uncompressed tarball to stdout: + +* "architecture", "config", "os", "created", "repo_tag" correspond to + the fields with the same name on the image spec [2]. +* "created" can be "now". +* "created" is also used as mtime for files added to the image. +* "store_layers" is a list of layers in ascending order, where each + layer is the list of store paths to include in that layer. + +The main challenge for this script to create the final image in a +streaming fashion, without dumping any intermediate data to disk +for performance. + +A docker image has each layer contents archived as separate tarballs, +and they later all get enveloped into a single big tarball in a +content addressed fashion. However, because how "tar" format works, +we have to know about the name (which includes the checksum in our +case) and the size of the tarball before we can start adding it to the +outer tarball. We achieve that by creating the layer tarballs twice; +on the first iteration we calculate the file size and the checksum, +and on the second one we actually stream the contents. 'add_layer_dir' +function does all this. + +[1]: https://github.com/moby/moby/blob/master/image/spec/v1.2.md +[2]: https://github.com/moby/moby/blob/4fb59c20a4fb54f944fe170d0ff1d00eb4a24d6f/image/spec/v1.2.md#image-json-field-descriptions +""" # noqa: E501 + + +import io +import os +import re +import sys +import json +import hashlib +import pathlib +import tarfile +import itertools +import threading +from datetime import datetime, timezone +from collections import namedtuple + + +def archive_paths_to(obj, paths, mtime): + """ + Writes the given store paths as a tar file to the given stream. + + obj: Stream to write to. Should have a 'write' method. + paths: List of store paths. + """ + + # gettarinfo makes the paths relative, this makes them + # absolute again + def append_root(ti): + ti.name = "/" + ti.name + return ti + + def apply_filters(ti): + ti.mtime = mtime + ti.uid = 0 + ti.gid = 0 + ti.uname = "root" + ti.gname = "root" + return ti + + def nix_root(ti): + ti.mode = 0o0555 # r-xr-xr-x + return ti + + def dir(path): + ti = tarfile.TarInfo(path) + ti.type = tarfile.DIRTYPE + return ti + + with tarfile.open(fileobj=obj, mode="w|") as tar: + # To be consistent with the docker utilities, we need to have + # these directories first when building layer tarballs. + tar.addfile(apply_filters(nix_root(dir("/nix")))) + tar.addfile(apply_filters(nix_root(dir("/nix/store")))) + + for path in paths: + path = pathlib.Path(path) + if path.is_symlink(): + files = [path] + else: + files = itertools.chain([path], path.rglob("*")) + + for filename in sorted(files): + ti = append_root(tar.gettarinfo(filename)) + + # copy hardlinks as regular files + if ti.islnk(): + ti.type = tarfile.REGTYPE + ti.linkname = "" + ti.size = filename.stat().st_size + + ti = apply_filters(ti) + if ti.isfile(): + with open(filename, "rb") as f: + tar.addfile(ti, f) + else: + tar.addfile(ti) + + +class ExtractChecksum: + """ + A writable stream which only calculates the final file size and + sha256sum, while discarding the actual contents. + """ + + def __init__(self): + self._digest = hashlib.sha256() + self._size = 0 + + def write(self, data): + self._digest.update(data) + self._size += len(data) + + def extract(self): + """ + Returns: Hex-encoded sha256sum and size as a tuple. + """ + return (self._digest.hexdigest(), self._size) + + +FromImage = namedtuple("FromImage", ["tar", "manifest_json", "image_json"]) +# Some metadata for a layer +LayerInfo = namedtuple("LayerInfo", ["size", "checksum", "path", "paths"]) + + +def load_from_image(from_image_str): + """ + Loads the given base image, if any. + + from_image_str: Path to the base image archive. + + Returns: A 'FromImage' object with references to the loaded base image, + or 'None' if no base image was provided. + """ + if from_image_str is None: + return None + + base_tar = tarfile.open(from_image_str) + + manifest_json_tarinfo = base_tar.getmember("manifest.json") + with base_tar.extractfile(manifest_json_tarinfo) as f: + manifest_json = json.load(f) + + image_json_tarinfo = base_tar.getmember(manifest_json[0]["Config"]) + with base_tar.extractfile(image_json_tarinfo) as f: + image_json = json.load(f) + + return FromImage(base_tar, manifest_json, image_json) + + +def add_base_layers(tar, from_image): + """ + Adds the layers from the given base image to the final image. + + tar: 'tarfile.TarFile' object for new layers to be added to. + from_image: 'FromImage' object with references to the loaded base image. + """ + if from_image is None: + print("No 'fromImage' provided", file=sys.stderr) + return [] + + layers = from_image.manifest_json[0]["Layers"] + checksums = from_image.image_json["rootfs"]["diff_ids"] + layers_checksums = zip(layers, checksums) + + for num, (layer, checksum) in enumerate(layers_checksums, start=1): + layer_tarinfo = from_image.tar.getmember(layer) + checksum = re.sub(r"^sha256:", "", checksum) + + tar.addfile(layer_tarinfo, from_image.tar.extractfile(layer_tarinfo)) + path = layer_tarinfo.path + size = layer_tarinfo.size + + print("Adding base layer", num, "from", path, file=sys.stderr) + yield LayerInfo(size=size, checksum=checksum, path=path, paths=[path]) + + from_image.tar.close() + + +def overlay_base_config(from_image, final_config): + """ + Overlays the final image 'config' JSON on top of selected defaults from the + base image 'config' JSON. + + from_image: 'FromImage' object with references to the loaded base image. + final_config: 'dict' object of the final image 'config' JSON. + """ + if from_image is None: + return final_config + + base_config = from_image.image_json["config"] + + # Preserve environment from base image + final_env = base_config.get("Env", []) + final_config.get("Env", []) + if final_env: + # Resolve duplicates (last one wins) and format back as list + resolved_env = {entry.split("=", 1)[0]: entry for entry in final_env} + final_config["Env"] = list(resolved_env.values()) + return final_config + + +def add_layer_dir(tar, paths, store_dir, mtime): + """ + Appends given store paths to a TarFile object as a new layer. + + tar: 'tarfile.TarFile' object for the new layer to be added to. + paths: List of store paths. + store_dir: the root directory of the nix store + mtime: 'mtime' of the added files and the layer tarball. + Should be an integer representing a POSIX time. + + Returns: A 'LayerInfo' object containing some metadata of + the layer added. + """ + + invalid_paths = [i for i in paths if not i.startswith(store_dir)] + assert len(invalid_paths) == 0, \ + f"Expecting absolute paths from {store_dir}, but got: {invalid_paths}" + + # First, calculate the tarball checksum and the size. + extract_checksum = ExtractChecksum() + archive_paths_to( + extract_checksum, + paths, + mtime=mtime, + ) + (checksum, size) = extract_checksum.extract() + + path = f"{checksum}/layer.tar" + layer_tarinfo = tarfile.TarInfo(path) + layer_tarinfo.size = size + layer_tarinfo.mtime = mtime + + # Then actually stream the contents to the outer tarball. + read_fd, write_fd = os.pipe() + with open(read_fd, "rb") as read, open(write_fd, "wb") as write: + def producer(): + archive_paths_to( + write, + paths, + mtime=mtime, + ) + write.close() + + # Closing the write end of the fifo also closes the read end, + # so we don't need to wait until this thread is finished. + # + # Any exception from the thread will get printed by the default + # exception handler, and the 'addfile' call will fail since it + # won't be able to read required amount of bytes. + threading.Thread(target=producer).start() + tar.addfile(layer_tarinfo, read) + + return LayerInfo(size=size, checksum=checksum, path=path, paths=paths) + + +def add_customisation_layer(target_tar, customisation_layer, mtime): + """ + Adds the customisation layer as a new layer. This is layer is structured + differently; given store path has the 'layer.tar' and corresponding + sha256sum ready. + + tar: 'tarfile.TarFile' object for the new layer to be added to. + customisation_layer: Path containing the layer archive. + mtime: 'mtime' of the added layer tarball. + """ + + checksum_path = os.path.join(customisation_layer, "checksum") + with open(checksum_path) as f: + checksum = f.read().strip() + assert len(checksum) == 64, f"Invalid sha256 at ${checksum_path}." + + layer_path = os.path.join(customisation_layer, "layer.tar") + + path = f"{checksum}/layer.tar" + tarinfo = target_tar.gettarinfo(layer_path) + tarinfo.name = path + tarinfo.mtime = mtime + + with open(layer_path, "rb") as f: + target_tar.addfile(tarinfo, f) + + return LayerInfo( + size=None, + checksum=checksum, + path=path, + paths=[customisation_layer] + ) + + +def add_bytes(tar, path, content, mtime): + """ + Adds a file to the tarball with given path and contents. + + tar: 'tarfile.TarFile' object. + path: Path of the file as a string. + content: Contents of the file. + mtime: 'mtime' of the file. Should be an integer representing a POSIX time. + """ + assert type(content) is bytes + + ti = tarfile.TarInfo(path) + ti.size = len(content) + ti.mtime = mtime + tar.addfile(ti, io.BytesIO(content)) + + +def main(): + with open(sys.argv[1], "r") as f: + conf = json.load(f) + + created = ( + datetime.now(tz=timezone.utc) + if conf["created"] == "now" + else datetime.fromisoformat(conf["created"]) + ) + mtime = int(created.timestamp()) + store_dir = conf["store_dir"] + + from_image = load_from_image(conf["from_image"]) + + with tarfile.open(mode="w|", fileobj=sys.stdout.buffer) as tar: + layers = [] + layers.extend(add_base_layers(tar, from_image)) + + start = len(layers) + 1 + for num, store_layer in enumerate(conf["store_layers"], start=start): + print("Creating layer", num, "from paths:", store_layer, + file=sys.stderr) + info = add_layer_dir(tar, store_layer, store_dir, mtime=mtime) + layers.append(info) + + print("Creating layer", len(layers) + 1, "with customisation...", + file=sys.stderr) + layers.append( + add_customisation_layer( + tar, + conf["customisation_layer"], + mtime=mtime + ) + ) + + print("Adding manifests...", file=sys.stderr) + + image_json = { + "created": datetime.isoformat(created), + "architecture": conf["architecture"], + "os": "linux", + "config": overlay_base_config(from_image, conf["config"]), + "rootfs": { + "diff_ids": [f"sha256:{layer.checksum}" for layer in layers], + "type": "layers", + }, + "history": [ + { + "created": datetime.isoformat(created), + "comment": f"store paths: {layer.paths}" + } + for layer in layers + ], + } + + image_json = json.dumps(image_json, indent=4).encode("utf-8") + image_json_checksum = hashlib.sha256(image_json).hexdigest() + image_json_path = f"{image_json_checksum}.json" + add_bytes(tar, image_json_path, image_json, mtime=mtime) + + manifest_json = [ + { + "Config": image_json_path, + "RepoTags": [conf["repo_tag"]], + "Layers": [layer.path for layer in layers], + } + ] + manifest_json = json.dumps(manifest_json, indent=4).encode("utf-8") + add_bytes(tar, "manifest.json", manifest_json, mtime=mtime) + + print("Done.", file=sys.stderr) + + +if __name__ == "__main__": + main() diff --git a/nixpkgs/pkgs/build-support/docker/tarsum.go b/nixpkgs/pkgs/build-support/docker/tarsum.go new file mode 100644 index 000000000000..f91a90bdbdab --- /dev/null +++ b/nixpkgs/pkgs/build-support/docker/tarsum.go @@ -0,0 +1,24 @@ +package main + +import ( + "fmt" + "io" + "io/ioutil" + "os" + "github.com/docker/docker/pkg/tarsum" +) + +func main() { + ts, err := tarsum.NewTarSum(os.Stdin, true, tarsum.Version1) + if err != nil { + fmt.Println(err) + os.Exit(1) + } + + if _, err = io.Copy(ioutil.Discard, ts); err != nil { + fmt.Println(err) + os.Exit(1) + } + + fmt.Println(ts.Sum(nil)) +} diff --git a/nixpkgs/pkgs/build-support/docker/tarsum.nix b/nixpkgs/pkgs/build-support/docker/tarsum.nix new file mode 100644 index 000000000000..734c6b3d5aeb --- /dev/null +++ b/nixpkgs/pkgs/build-support/docker/tarsum.nix @@ -0,0 +1,42 @@ +{ stdenv, go, docker, nixosTests }: + +stdenv.mkDerivation { + name = "tarsum"; + + nativeBuildInputs = [ go ]; + disallowedReferences = [ go ]; + + dontUnpack = true; + + CGO_ENABLED = 0; + GOFLAGS = "-trimpath"; + GO111MODULE = "off"; + + buildPhase = '' + runHook preBuild + mkdir tarsum + cd tarsum + cp ${./tarsum.go} tarsum.go + export GOPATH=$(pwd) + export GOCACHE="$TMPDIR/go-cache" + mkdir -p src/github.com/docker/docker/pkg + ln -sT ${docker.moby-src}/pkg/tarsum src/github.com/docker/docker/pkg/tarsum + go build + runHook postBuild + ''; + + installPhase = '' + runHook preInstall + mkdir -p $out/bin + cp tarsum $out/bin/ + runHook postInstall + ''; + + passthru = { + tests = { + dockerTools = nixosTests.docker-tools; + }; + }; + + meta.platforms = go.meta.platforms; +} diff --git a/nixpkgs/pkgs/build-support/docker/test-dummy/hello.txt b/nixpkgs/pkgs/build-support/docker/test-dummy/hello.txt new file mode 100644 index 000000000000..495cc9fa8f9c --- /dev/null +++ b/nixpkgs/pkgs/build-support/docker/test-dummy/hello.txt @@ -0,0 +1 @@ +Hello there! diff --git a/nixpkgs/pkgs/build-support/dotnet/build-dotnet-global-tool/default.nix b/nixpkgs/pkgs/build-support/dotnet/build-dotnet-global-tool/default.nix new file mode 100644 index 000000000000..16cf029ca345 --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/build-dotnet-global-tool/default.nix @@ -0,0 +1,48 @@ +{ buildDotnetModule, emptyDirectory, mkNugetDeps, dotnet-sdk }: + +{ pname +, version + # Name of the nuget package to install, if different from pname +, nugetName ? pname + # Hash of the nuget package to install, will be given on first build +, nugetSha256 ? "" + # Additional nuget deps needed by the tool package +, nugetDeps ? (_: []) + # Executables to wrap into `$out/bin`, same as in `buildDotnetModule`, but with + # a default of `pname` instead of null, to avoid auto-wrapping everything +, executables ? pname + # The dotnet runtime to use, dotnet tools need a full SDK to function +, dotnet-runtime ? dotnet-sdk +, ... +} @ args: + +buildDotnetModule (args // { + inherit pname version dotnet-runtime executables; + + src = emptyDirectory; + + nugetDeps = mkNugetDeps { + name = pname; + nugetDeps = { fetchNuGet }: [ + (fetchNuGet { pname = nugetName; inherit version; sha256 = nugetSha256; }) + ] ++ (nugetDeps fetchNuGet); + }; + + projectFile = ""; + + useDotnetFromEnv = true; + + dontBuild = true; + + installPhase = '' + runHook preInstall + + dotnet tool install --tool-path $out/lib/${pname} ${nugetName} + + # remove files that contain nix store paths to temp nuget sources we made + find $out -name 'project.assets.json' -delete + find $out -name '.nupkg.metadata' -delete + + runHook postInstall + ''; +}) diff --git a/nixpkgs/pkgs/build-support/dotnet/build-dotnet-module/default.nix b/nixpkgs/pkgs/build-support/dotnet/build-dotnet-module/default.nix new file mode 100644 index 000000000000..686d89f8c11c --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/build-dotnet-module/default.nix @@ -0,0 +1,308 @@ +{ lib +, stdenvNoCC +, callPackage +, writeShellScript +, srcOnly +, linkFarmFromDrvs +, symlinkJoin +, makeWrapper +, dotnetCorePackages +, mkNugetSource +, mkNugetDeps +, nuget-to-nix +, cacert +, coreutils +, runtimeShellPackage +}: + +{ name ? "${args.pname}-${args.version}" +, pname ? name +, enableParallelBuilding ? true +, doCheck ? false + # Flags to pass to `makeWrapper`. This is done to avoid double wrapping. +, makeWrapperArgs ? [ ] + + # Flags to pass to `dotnet restore`. +, dotnetRestoreFlags ? [ ] + # Flags to pass to `dotnet build`. +, dotnetBuildFlags ? [ ] + # Flags to pass to `dotnet test`, if running tests is enabled. +, dotnetTestFlags ? [ ] + # Flags to pass to `dotnet install`. +, dotnetInstallFlags ? [ ] + # Flags to pass to `dotnet pack`. +, dotnetPackFlags ? [ ] + # Flags to pass to dotnet in all phases. +, dotnetFlags ? [ ] + + # The path to publish the project to. When unset, the directory "$out/lib/$pname" is used. +, installPath ? null + # The binaries that should get installed to `$out/bin`, relative to `$out/lib/$pname/`. These get wrapped accordingly. + # Unfortunately, dotnet has no method for doing this automatically. + # If unset, all executables in the projects root will get installed. This may cause bloat! +, executables ? null + # Packs a project as a `nupkg`, and installs it to `$out/share`. If set to `true`, the derivation can be used as a dependency for another dotnet project by adding it to `projectReferences`. +, packNupkg ? false + # The packages project file, which contains instructions on how to compile it. This can be an array of multiple project files as well. +, projectFile ? null + # The NuGet dependency file. This locks all NuGet dependency versions, as otherwise they cannot be deterministically fetched. + # This can be generated by running the `passthru.fetch-deps` script. +, nugetDeps ? null + # A list of derivations containing nupkg packages for local project references. + # Referenced derivations can be built with `buildDotnetModule` with `packNupkg=true` flag. + # Since we are sharing them as nugets they must be added to csproj/fsproj files as `PackageReference` as well. + # For example, your project has a local dependency: + # <ProjectReference Include="../foo/bar.fsproj" /> + # To enable discovery through `projectReferences` you would need to add a line: + # <ProjectReference Include="../foo/bar.fsproj" /> + # <PackageReference Include="bar" Version="*" Condition=" '$(ContinuousIntegrationBuild)'=='true' "/> +, projectReferences ? [ ] + # Libraries that need to be available at runtime should be passed through this. + # These get wrapped into `LD_LIBRARY_PATH`. +, runtimeDeps ? [ ] + # The dotnet runtime ID. If null, fetch-deps will gather dependencies for all + # platforms in meta.platforms which are supported by the sdk. +, runtimeId ? null + + # Tests to disable. This gets passed to `dotnet test --filter "FullyQualifiedName!={}"`, to ensure compatibility with all frameworks. + # See https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#filter-option-details for more details. +, disabledTests ? [ ] + # The project file to run unit tests against. This is usually referenced in the regular project file, but sometimes it needs to be manually set. + # It gets restored and build, but not installed. You may need to regenerate your nuget lockfile after setting this. +, testProjectFile ? "" + + # The type of build to perform. This is passed to `dotnet` with the `--configuration` flag. Possible values are `Release`, `Debug`, etc. +, buildType ? "Release" + # If set to true, builds the application as a self-contained - removing the runtime dependency on dotnet +, selfContainedBuild ? false + # Whether to use an alternative wrapper, that executes the application DLL using the dotnet runtime from the user environment. `dotnet-runtime` is provided as a default in case no .NET is installed + # This is useful for .NET tools and applications that may need to run under different .NET runtimes +, useDotnetFromEnv ? false + # Whether to explicitly enable UseAppHost when building. This is redundant if useDotnetFromEnv is enabledz +, useAppHost ? true + # The dotnet SDK to use. +, dotnet-sdk ? dotnetCorePackages.sdk_6_0 + # The dotnet runtime to use. +, dotnet-runtime ? dotnetCorePackages.runtime_6_0 + # The dotnet SDK to run tests against. This can differentiate from the SDK compiled against. +, dotnet-test-sdk ? dotnet-sdk +, ... +} @ args: + +let + platforms = + if args ? meta.platforms + then lib.intersectLists args.meta.platforms dotnet-sdk.meta.platforms + else dotnet-sdk.meta.platforms; + + inherit (callPackage ./hooks { + inherit dotnet-sdk dotnet-test-sdk disabledTests nuget-source dotnet-runtime runtimeDeps buildType; + runtimeId = + if runtimeId != null + then runtimeId + else dotnetCorePackages.systemToDotnetRid stdenvNoCC.targetPlatform.system; + }) dotnetConfigureHook dotnetBuildHook dotnetCheckHook dotnetInstallHook dotnetFixupHook; + + localDeps = + if (projectReferences != [ ]) + then linkFarmFromDrvs "${name}-project-references" projectReferences + else null; + + _nugetDeps = + if (nugetDeps != null) then + if lib.isDerivation nugetDeps + then nugetDeps + else mkNugetDeps { + inherit name; + nugetDeps = import nugetDeps; + sourceFile = nugetDeps; + } + else throw "Defining the `nugetDeps` attribute is required, as to lock the NuGet dependencies. This file can be generated by running the `passthru.fetch-deps` script."; + + # contains the actual package dependencies + dependenciesSource = mkNugetSource { + name = "${name}-dependencies-source"; + description = "A Nuget source with the dependencies for ${name}"; + deps = [ _nugetDeps ] ++ lib.optional (localDeps != null) localDeps; + }; + + # this contains all the nuget packages that are implicitly referenced by the dotnet + # build system. having them as separate deps allows us to avoid having to regenerate + # a packages dependencies when the dotnet-sdk version changes + sdkDeps = lib.lists.flatten [ dotnet-sdk.packages ]; + + sdkSource = let + version = dotnet-sdk.version or (lib.concatStringsSep "-" dotnet-sdk.versions); + in mkNugetSource { + name = "dotnet-sdk-${version}-source"; + deps = sdkDeps; + }; + + nuget-source = symlinkJoin { + name = "${name}-nuget-source"; + paths = [ dependenciesSource sdkSource ]; + }; + + nugetDepsFile = _nugetDeps.sourceFile; +in +stdenvNoCC.mkDerivation (args // { + nativeBuildInputs = args.nativeBuildInputs or [ ] ++ [ + dotnetConfigureHook + dotnetBuildHook + dotnetCheckHook + dotnetInstallHook + dotnetFixupHook + + cacert + makeWrapper + dotnet-sdk + ]; + + makeWrapperArgs = args.makeWrapperArgs or [ ] ++ [ + "--prefix LD_LIBRARY_PATH : ${dotnet-sdk.icu}/lib" + ]; + + # Stripping breaks the executable + dontStrip = args.dontStrip or true; + + # gappsWrapperArgs gets included when wrapping for dotnet, as to avoid double wrapping + dontWrapGApps = args.dontWrapGApps or true; + + inherit selfContainedBuild useAppHost useDotnetFromEnv; + + passthru = { + inherit nuget-source; + } // lib.optionalAttrs (nugetDepsFile != null) { + fetch-deps = + let + flags = dotnetFlags ++ dotnetRestoreFlags; + runtimeIds = + if runtimeId != null + then [ runtimeId ] + else map (system: dotnetCorePackages.systemToDotnetRid system) platforms; + defaultDepsFile = + # Wire in the nugetDeps file such that running the script with no args + # runs it agains the correct deps file by default. + # Note that toString is necessary here as it results in the path at + # eval time (i.e. to the file in your local Nixpkgs checkout) rather + # than the Nix store path of the path after it's been imported. + if lib.isPath nugetDepsFile && !lib.hasPrefix "${builtins.storeDir}/" (toString nugetDepsFile) + then toString nugetDepsFile + else ''$(mktemp -t "${pname}-deps-XXXXXX.nix")''; + in + writeShellScript "fetch-${pname}-deps" '' + set -euo pipefail + + export PATH="${lib.makeBinPath [ coreutils runtimeShellPackage dotnet-sdk (nuget-to-nix.override { inherit dotnet-sdk; }) ]}" + + for arg in "$@"; do + case "$arg" in + --keep-sources|-k) + keepSources=1 + shift + ;; + --help|-h) + echo "usage: $0 [--keep-sources] [--help] <output path>" + echo " <output path> The path to write the lockfile to. A temporary file is used if this is not set" + echo " --keep-sources Dont remove temporary directories upon exit, useful for debugging" + echo " --help Show this help message" + exit + ;; + esac + done + + if [[ ''${TMPDIR:-} == /run/user/* ]]; then + # /run/user is usually a tmpfs in RAM, which may be too small + # to store all downloaded dotnet packages + unset TMPDIR + fi + + export tmp=$(mktemp -td "deps-${pname}-XXXXXX") + HOME=$tmp/home + + exitTrap() { + test -n "''${ranTrap-}" && return + ranTrap=1 + + if test -n "''${keepSources-}"; then + echo -e "Path to the source: $tmp/src\nPath to the fake home: $tmp/home" + else + rm -rf "$tmp" + fi + + # Since mktemp is used this will be empty if the script didnt succesfully complete + if ! test -s "$depsFile"; then + rm -rf "$depsFile" + fi + } + + trap exitTrap EXIT INT TERM + + dotnetRestore() { + local -r project="''${1-}" + local -r rid="$2" + + dotnet restore ''${project-} \ + -p:ContinuousIntegrationBuild=true \ + -p:Deterministic=true \ + --packages "$tmp/nuget_pkgs" \ + --runtime "$rid" \ + --no-cache \ + --force \ + ${lib.optionalString (!enableParallelBuilding) "--disable-parallel"} \ + ${lib.optionalString (flags != []) (toString flags)} + } + + declare -a projectFiles=( ${toString (lib.toList projectFile)} ) + declare -a testProjectFiles=( ${toString (lib.toList testProjectFile)} ) + + export DOTNET_NOLOGO=1 + export DOTNET_CLI_TELEMETRY_OPTOUT=1 + + depsFile=$(realpath "''${1:-${defaultDepsFile}}") + echo Will write lockfile to "$depsFile" + mkdir -p "$tmp/nuget_pkgs" + + storeSrc="${srcOnly args}" + src=$tmp/src + cp -rT "$storeSrc" "$src" + chmod -R +w "$src" + + cd "$src" + echo "Restoring project..." + + ${dotnet-sdk}/bin/dotnet tool restore + cp -r $HOME/.nuget/packages/* $tmp/nuget_pkgs || true + + for rid in "${lib.concatStringsSep "\" \"" runtimeIds}"; do + (( ''${#projectFiles[@]} == 0 )) && dotnetRestore "" "$rid" + + for project in ''${projectFiles[@]-} ''${testProjectFiles[@]-}; do + dotnetRestore "$project" "$rid" + done + done + # Second copy, makes sure packages restored by ie. paket are included + cp -r $HOME/.nuget/packages/* $tmp/nuget_pkgs || true + + echo "Succesfully restored project" + + echo "Writing lockfile..." + + excluded_sources="${lib.concatStringsSep " " sdkDeps}" + for excluded_source in ''${excluded_sources[@]}; do + ls "$excluded_source" >> "$tmp/excluded_list" + done + tmpFile="$tmp"/deps.nix + echo -e "# This file was automatically generated by passthru.fetch-deps.\n# Please dont edit it manually, your changes might get overwritten!\n" > "$tmpFile" + nuget-to-nix "$tmp/nuget_pkgs" "$tmp/excluded_list" >> "$tmpFile" + mv "$tmpFile" "$depsFile" + echo "Succesfully wrote lockfile to $depsFile" + ''; + } // args.passthru or { }; + + meta = (args.meta or { }) // { inherit platforms; }; +} + # ICU tries to unconditionally load files from /usr/share/icu on Darwin, which makes builds fail + # in the sandbox, so disable ICU on Darwin. This, as far as I know, shouldn't cause any built packages + # to behave differently, just the dotnet build tool. + // lib.optionalAttrs stdenvNoCC.isDarwin { DOTNET_SYSTEM_GLOBALIZATION_INVARIANT = 1; }) diff --git a/nixpkgs/pkgs/build-support/dotnet/build-dotnet-module/hooks/default.nix b/nixpkgs/pkgs/build-support/dotnet/build-dotnet-module/hooks/default.nix new file mode 100644 index 000000000000..7012ff36a4a5 --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/build-dotnet-module/hooks/default.nix @@ -0,0 +1,89 @@ +{ lib +, stdenv +, which +, coreutils +, zlib +, openssl +, callPackage +, makeSetupHook +, makeWrapper +, dotnet-sdk +, dotnet-test-sdk +, disabledTests +, nuget-source +, dotnet-runtime +, runtimeDeps +, buildType +, runtimeId +}: +assert (builtins.isString runtimeId); + +let + libraryPath = lib.makeLibraryPath runtimeDeps; +in +{ + dotnetConfigureHook = callPackage ({ }: + makeSetupHook { + name = "dotnet-configure-hook"; + propagatedBuildInputs = [ dotnet-sdk nuget-source ]; + substitutions = { + nugetSource = nuget-source; + dynamicLinker = "${stdenv.cc}/nix-support/dynamic-linker"; + libPath = lib.makeLibraryPath [ + stdenv.cc.cc.lib + stdenv.cc.libc + dotnet-sdk.passthru.icu + zlib + openssl + ]; + inherit runtimeId; + }; + } ./dotnet-configure-hook.sh) { }; + + dotnetBuildHook = callPackage ({ }: + makeSetupHook { + name = "dotnet-build-hook"; + propagatedBuildInputs = [ dotnet-sdk ]; + substitutions = { + inherit buildType runtimeId; + }; + } ./dotnet-build-hook.sh) { }; + + dotnetCheckHook = callPackage ({ }: + makeSetupHook { + name = "dotnet-check-hook"; + propagatedBuildInputs = [ dotnet-test-sdk ]; + substitutions = { + inherit buildType runtimeId libraryPath; + disabledTests = lib.optionalString (disabledTests != []) + (let + escapedNames = lib.lists.map (n: lib.replaceStrings [","] ["%2C"] n) disabledTests; + filters = lib.lists.map (n: "FullyQualifiedName!=${n}") escapedNames; + in + "${lib.concatStringsSep "&" filters}"); + }; + } ./dotnet-check-hook.sh) { }; + + dotnetInstallHook = callPackage ({ }: + makeSetupHook { + name = "dotnet-install-hook"; + propagatedBuildInputs = [ dotnet-sdk ]; + substitutions = { + inherit buildType runtimeId; + }; + } ./dotnet-install-hook.sh) { }; + + dotnetFixupHook = callPackage ({ }: + makeSetupHook { + name = "dotnet-fixup-hook"; + propagatedBuildInputs = [ dotnet-runtime ]; + substitutions = { + dotnetRuntime = dotnet-runtime; + runtimeDeps = libraryPath; + shell = stdenv.shell; + which = "${which}/bin/which"; + dirname = "${coreutils}/bin/dirname"; + realpath = "${coreutils}/bin/realpath"; + }; + } ./dotnet-fixup-hook.sh) { }; +} diff --git a/nixpkgs/pkgs/build-support/dotnet/build-dotnet-module/hooks/dotnet-build-hook.sh b/nixpkgs/pkgs/build-support/dotnet/build-dotnet-module/hooks/dotnet-build-hook.sh new file mode 100644 index 000000000000..e9567b64cf2c --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/build-dotnet-module/hooks/dotnet-build-hook.sh @@ -0,0 +1,65 @@ +# inherit arguments from derivation +dotnetBuildFlags=( ${dotnetBuildFlags[@]-} ) + +dotnetBuildHook() { + echo "Executing dotnetBuildHook" + + runHook preBuild + + if [ "${enableParallelBuilding-}" ]; then + local -r maxCpuFlag="$NIX_BUILD_CORES" + local -r parallelBuildFlag="true" + else + local -r maxCpuFlag="1" + local -r parallelBuildFlag="false" + fi + + if [ "${selfContainedBuild-}" ]; then + dotnetBuildFlags+=("-p:SelfContained=true") + else + dotnetBuildFlags+=("-p:SelfContained=false") + fi + + if [ "${useAppHost-}" ]; then + dotnetBuildFlags+=("-p:UseAppHost=true") + fi + + if [ "${version-}" ]; then + local -r versionFlag="-p:Version=${version-}" + fi + + dotnetBuild() { + local -r project="${1-}" + + runtimeIdFlags=() + if [[ "$project" == *.csproj ]] || [ "${selfContainedBuild-}" ]; then + runtimeIdFlags+=("--runtime @runtimeId@") + fi + + env dotnet build ${project-} \ + -maxcpucount:$maxCpuFlag \ + -p:BuildInParallel=$parallelBuildFlag \ + -p:ContinuousIntegrationBuild=true \ + -p:Deterministic=true \ + --configuration "@buildType@" \ + --no-restore \ + ${versionFlag-} \ + ${runtimeIdFlags[@]} \ + ${dotnetBuildFlags[@]} \ + ${dotnetFlags[@]} + } + + (( "${#projectFile[@]}" == 0 )) && dotnetBuild + + for project in ${projectFile[@]} ${testProjectFile[@]-}; do + dotnetBuild "$project" + done + + runHook postBuild + + echo "Finished dotnetBuildHook" +} + +if [[ -z "${dontDotnetBuild-}" && -z "${buildPhase-}" ]]; then + buildPhase=dotnetBuildHook +fi diff --git a/nixpkgs/pkgs/build-support/dotnet/build-dotnet-module/hooks/dotnet-check-hook.sh b/nixpkgs/pkgs/build-support/dotnet/build-dotnet-module/hooks/dotnet-check-hook.sh new file mode 100644 index 000000000000..507721ef9818 --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/build-dotnet-module/hooks/dotnet-check-hook.sh @@ -0,0 +1,46 @@ +# inherit arguments from derivation +dotnetTestFlags=( ${dotnetTestFlags[@]-} ) + +dotnetCheckHook() { + echo "Executing dotnetCheckHook" + + runHook preCheck + + if [ "${disabledTests-}" ]; then + local -r disabledTestsFlag="--filter @disabledTests@" + fi + + if [ "${enableParallelBuilding-}" ]; then + local -r maxCpuFlag="$NIX_BUILD_CORES" + else + local -r maxCpuFlag="1" + fi + + for project in ${testProjectFile[@]-${projectFile[@]}}; do + runtimeIdFlags=() + if [[ "$project" == *.csproj ]]; then + runtimeIdFlags=("--runtime @runtimeId@") + fi + + env "LD_LIBRARY_PATH=@libraryPath@" \ + dotnet test "$project" \ + -maxcpucount:$maxCpuFlag \ + -p:ContinuousIntegrationBuild=true \ + -p:Deterministic=true \ + --configuration "@buildType@" \ + --no-build \ + --logger "console;verbosity=normal" \ + ${disabledTestsFlag-} \ + ${runtimeIdFlags[@]} \ + "${dotnetTestFlags[@]}" \ + "${dotnetFlags[@]}" + done + + runHook postCheck + + echo "Finished dotnetCheckHook" +} + +if [[ -z "${dontDotnetCheck-}" && -z "${checkPhase-}" ]]; then + checkPhase=dotnetCheckHook +fi diff --git a/nixpkgs/pkgs/build-support/dotnet/build-dotnet-module/hooks/dotnet-configure-hook.sh b/nixpkgs/pkgs/build-support/dotnet/build-dotnet-module/hooks/dotnet-configure-hook.sh new file mode 100644 index 000000000000..c046fc3c306b --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/build-dotnet-module/hooks/dotnet-configure-hook.sh @@ -0,0 +1,82 @@ +declare -a projectFile testProjectFile + +# Inherit arguments from derivation +dotnetFlags=( ${dotnetFlags[@]-} ) +dotnetRestoreFlags=( ${dotnetRestoreFlags[@]-} ) + +dotnetConfigureHook() { + echo "Executing dotnetConfigureHook" + + runHook preConfigure + + if [ -z "${enableParallelBuilding-}" ]; then + local -r parallelFlag="--disable-parallel" + fi + + dotnetRestore() { + local -r project="${1-}" + env dotnet restore ${project-} \ + -p:ContinuousIntegrationBuild=true \ + -p:Deterministic=true \ + --runtime "@runtimeId@" \ + --source "@nugetSource@/lib" \ + ${parallelFlag-} \ + ${dotnetRestoreFlags[@]} \ + ${dotnetFlags[@]} + } + + # Generate a NuGet.config file to make sure everything, + # including things like <Sdk /> dependencies, is restored from the proper source +cat <<EOF > "./NuGet.config" +<?xml version="1.0" encoding="utf-8"?> +<configuration> + <packageSources> + <clear /> + <add key="nugetSource" value="@nugetSource@/lib" /> + </packageSources> +</configuration> +EOF + + # Patch paket.dependencies and paket.lock (if found) to use the proper source. This ensures + # paket restore works correctly + # We use + instead of / in sed to avoid problems with slashes + find -name paket.dependencies -exec sed -i 's+source .*+source @nugetSource@/lib+g' {} \; + find -name paket.lock -exec sed -i 's+remote:.*+remote: @nugetSource@/lib+g' {} \; + + env dotnet tool restore --add-source "@nugetSource@/lib" + + (( "${#projectFile[@]}" == 0 )) && dotnetRestore + + for project in ${projectFile[@]} ${testProjectFile[@]-}; do + dotnetRestore "$project" + done + + echo "Fixing up native binaries..." + # Find all native binaries and nuget libraries, and fix them up, + # by setting the proper interpreter and rpath to some commonly used libraries + for binary in $(find "$HOME/.nuget/packages/" -type f -executable); do + if patchelf --print-interpreter "$binary" >/dev/null 2>/dev/null; then + echo "Found binary: $binary, fixing it up..." + patchelf --set-interpreter "$(cat "@dynamicLinker@")" "$binary" + + # This makes sure that if the binary requires some specific runtime dependencies, it can find it. + # This fixes dotnet-built binaries like crossgen2 + patchelf \ + --add-needed libicui18n.so \ + --add-needed libicuuc.so \ + --add-needed libz.so \ + --add-needed libssl.so \ + "$binary" + + patchelf --set-rpath "@libPath@" "$binary" + fi + done + + runHook postConfigure + + echo "Finished dotnetConfigureHook" +} + +if [[ -z "${dontDotnetConfigure-}" && -z "${configurePhase-}" ]]; then + configurePhase=dotnetConfigureHook +fi diff --git a/nixpkgs/pkgs/build-support/dotnet/build-dotnet-module/hooks/dotnet-fixup-hook.sh b/nixpkgs/pkgs/build-support/dotnet/build-dotnet-module/hooks/dotnet-fixup-hook.sh new file mode 100644 index 000000000000..4fec939bed33 --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/build-dotnet-module/hooks/dotnet-fixup-hook.sh @@ -0,0 +1,56 @@ +# Inherit arguments from the derivation +declare -a derivationMakeWrapperArgs="( ${makeWrapperArgs-} )" +makeWrapperArgs=( "${derivationMakeWrapperArgs[@]}" ) + +# First argument is the executable you want to wrap, +# the second is the destination for the wrapper. +wrapDotnetProgram() { + local dotnetRootFlags=() + + if [ ! "${selfContainedBuild-}" ]; then + if [ "${useDotnetFromEnv-}" ]; then + # if dotnet CLI is available, set DOTNET_ROOT based on it. Otherwise set to default .NET runtime + dotnetRootFlags+=("--run" 'command -v dotnet &>/dev/null && export DOTNET_ROOT="$(@dirname@ "$(@realpath@ "$(@which@ dotnet)")")" || export DOTNET_ROOT="@dotnetRuntime@"') + dotnetRootFlags+=("--suffix" "PATH" ":" "@dotnetRuntime@/bin") + else + dotnetRootFlags+=("--set" "DOTNET_ROOT" "@dotnetRuntime@") + dotnetRootFlags+=("--prefix" "PATH" ":" "@dotnetRuntime@/bin") + fi + fi + + makeWrapper "$1" "$2" \ + --suffix "LD_LIBRARY_PATH" : "@runtimeDeps@" \ + "${dotnetRootFlags[@]}" \ + "${gappsWrapperArgs[@]}" \ + "${makeWrapperArgs[@]}" + + echo "installed wrapper to "$2"" +} + +dotnetFixupHook() { + echo "Executing dotnetFixupPhase" + + if [ "${executables-}" ]; then + for executable in ${executables[@]}; do + path="$out/lib/$pname/$executable" + + if test -x "$path"; then + wrapDotnetProgram "$path" "$out/bin/$(basename "$executable")" + else + echo "Specified binary \"$executable\" is either not an executable or does not exist!" + echo "Looked in $path" + exit 1 + fi + done + else + while IFS= read -d '' executable; do + wrapDotnetProgram "$executable" "$out/bin/$(basename "$executable")" \; + done < <(find "$out/lib/$pname" ! -name "*.dll" -executable -type f -print0) + fi + + echo "Finished dotnetFixupPhase" +} + +if [[ -z "${dontDotnetFixup-}" ]]; then + preFixupPhases+=" dotnetFixupHook" +fi diff --git a/nixpkgs/pkgs/build-support/dotnet/build-dotnet-module/hooks/dotnet-install-hook.sh b/nixpkgs/pkgs/build-support/dotnet/build-dotnet-module/hooks/dotnet-install-hook.sh new file mode 100644 index 000000000000..3f2a89c41404 --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/build-dotnet-module/hooks/dotnet-install-hook.sh @@ -0,0 +1,79 @@ +# inherit arguments from derivation +dotnetInstallFlags=( ${dotnetInstallFlags[@]-} ) + +dotnetInstallHook() { + echo "Executing dotnetInstallHook" + + runHook preInstall + + if [ "${selfContainedBuild-}" ]; then + dotnetInstallFlags+=("--self-contained") + else + dotnetInstallFlags+=("--no-self-contained") + # https://learn.microsoft.com/en-us/dotnet/core/deploying/trimming/trim-self-contained + # Trimming is only available for self-contained build, so force disable it here + dotnetInstallFlags+=("-p:PublishTrimmed=false") + fi + + if [ "${useAppHost-}" ]; then + dotnetInstallFlags+=("-p:UseAppHost=true") + fi + + dotnetPublish() { + local -r project="${1-}" + + runtimeIdFlags=() + if [[ "$project" == *.csproj ]] || [ "${selfContainedBuild-}" ]; then + runtimeIdFlags+=("--runtime @runtimeId@") + fi + + env dotnet publish ${project-} \ + -p:ContinuousIntegrationBuild=true \ + -p:Deterministic=true \ + --output "$out/lib/${pname}" \ + --configuration "@buildType@" \ + --no-build \ + ${runtimeIdFlags[@]} \ + ${dotnetInstallFlags[@]} \ + ${dotnetFlags[@]} + } + + dotnetPack() { + local -r project="${1-}" + env dotnet pack ${project-} \ + -p:ContinuousIntegrationBuild=true \ + -p:Deterministic=true \ + --output "$out/share" \ + --configuration "@buildType@" \ + --no-build \ + --runtime "@runtimeId@" \ + ${dotnetPackFlags[@]} \ + ${dotnetFlags[@]} + } + + if (( "${#projectFile[@]}" == 0 )); then + dotnetPublish + else + for project in ${projectFile[@]}; do + dotnetPublish "$project" + done + fi + + if [[ "${packNupkg-}" ]]; then + if (( "${#projectFile[@]}" == 0 )); then + dotnetPack + else + for project in ${projectFile[@]}; do + dotnetPack "$project" + done + fi + fi + + runHook postInstall + + echo "Finished dotnetInstallHook" +} + +if [[ -z "${dontDotnetInstall-}" && -z "${installPhase-}" ]]; then + installPhase=dotnetInstallHook +fi diff --git a/nixpkgs/pkgs/build-support/dotnet/build-dotnet-package/default.nix b/nixpkgs/pkgs/build-support/dotnet/build-dotnet-package/default.nix new file mode 100644 index 000000000000..14446ef05e72 --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/build-dotnet-package/default.nix @@ -0,0 +1,116 @@ +{ stdenv, lib, makeWrapper, pkg-config, mono, dotnetbuildhelpers }: + +attrsOrig @ +{ pname +, version +, nativeBuildInputs ? [] +, xBuildFiles ? [ ] +, xBuildFlags ? [ "/p:Configuration=Release" ] +, outputFiles ? [ "bin/Release/*" ] +, dllFiles ? [ "*.dll" ] +, exeFiles ? [ "*.exe" ] +# Additional arguments to pass to the makeWrapper function, which wraps +# generated binaries. +, makeWrapperArgs ? [ ] +, ... }: + let + arrayToShell = (a: toString (map (lib.escape (lib.stringToCharacters "\\ ';$`()|<>\t") ) a)); + + attrs = { + inherit pname version; + + nativeBuildInputs = [ + pkg-config + makeWrapper + dotnetbuildhelpers + mono + ] ++ nativeBuildInputs; + + configurePhase = '' + runHook preConfigure + + [ -z "''${dontPlacateNuget-}" ] && placate-nuget.sh + [ -z "''${dontPlacatePaket-}" ] && placate-paket.sh + [ -z "''${dontPatchFSharpTargets-}" ] && patch-fsharp-targets.sh + + runHook postConfigure + ''; + + buildPhase = '' + runHook preBuild + + echo Building dotNET packages... + + # Probably needs to be moved to fsharp + if pkg-config FSharp.Core + then + export FSharpTargetsPath="$(dirname $(pkg-config FSharp.Core --variable=Libraries))/Microsoft.FSharp.Targets" + fi + + ran="" + for xBuildFile in ${arrayToShell xBuildFiles} ''${xBuildFilesExtra} + do + ran="yes" + xbuild ${arrayToShell xBuildFlags} ''${xBuildFlagsArray} $xBuildFile + done + + [ -z "$ran" ] && xbuild ${arrayToShell xBuildFlags} ''${xBuildFlagsArray} + + runHook postBuild + ''; + + dontStrip = true; + + installPhase = '' + runHook preInstall + + target="$out/lib/dotnet/${pname}" + mkdir -p "$target" + + cp -rv ${arrayToShell outputFiles} "''${outputFilesArray[@]}" "$target" + + if [ -z "''${dontRemoveDuplicatedDlls-}" ] + then + pushd "$out" + remove-duplicated-dlls.sh + popd + fi + + set -f + for dllPattern in ${arrayToShell dllFiles} ''${dllFilesArray[@]} + do + set +f + for dll in "$target"/$dllPattern + do + [ -f "$dll" ] || continue + if pkg-config $(basename -s .dll "$dll") + then + echo "$dll already exported by a buildInputs, not re-exporting" + else + create-pkg-config-for-dll.sh "$out/lib/pkgconfig" "$dll" + fi + done + done + + set -f + for exePattern in ${arrayToShell exeFiles} ''${exeFilesArray[@]} + do + set +f + for exe in "$target"/$exePattern + do + [ -f "$exe" ] || continue + mkdir -p "$out"/bin + commandName="$(basename -s .exe "$(echo "$exe" | tr "[A-Z]" "[a-z]")")" + makeWrapper \ + "${mono}/bin/mono" \ + "$out"/bin/"$commandName" \ + --add-flags "\"$exe\"" \ + ''${makeWrapperArgs} + done + done + + runHook postInstall + ''; + }; + in + stdenv.mkDerivation (attrs // (builtins.removeAttrs attrsOrig [ "nativeBuildInputs" ] )) diff --git a/nixpkgs/pkgs/build-support/dotnet/dotnetbuildhelpers/create-pkg-config-for-dll.sh b/nixpkgs/pkgs/build-support/dotnet/dotnetbuildhelpers/create-pkg-config-for-dll.sh new file mode 100644 index 000000000000..379141704523 --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/dotnetbuildhelpers/create-pkg-config-for-dll.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash + +targetDir="$1" +dllFullPath="$2" + +dllVersion="$(monodis --assembly "$dllFullPath" | grep ^Version: | cut -f 2 -d : | xargs)" +[ -z "$dllVersion" ] && echo "Defaulting dllVersion to 0.0.0" && dllVersion="0.0.0" +dllFileName="$(basename $dllFullPath)" +dllRootName="$(basename -s .dll $dllFileName)" +targetPcFile="$targetDir"/"$dllRootName".pc + +mkdir -p "$targetDir" + +cat > $targetPcFile << EOF +Libraries=$dllFullPath + +Name: $dllRootName +Description: $dllRootName +Version: $dllVersion +Libs: -r:$dllFileName +EOF + +echo "Created $targetPcFile" diff --git a/nixpkgs/pkgs/build-support/dotnet/dotnetbuildhelpers/default.nix b/nixpkgs/pkgs/build-support/dotnet/dotnetbuildhelpers/default.nix new file mode 100644 index 000000000000..4348832ac04c --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/dotnetbuildhelpers/default.nix @@ -0,0 +1,18 @@ +{ runCommand, mono, pkg-config }: + runCommand + "dotnetbuildhelpers" + { preferLocalBuild = true; } + '' + target="$out/bin" + mkdir -p "$target" + + for script in ${./create-pkg-config-for-dll.sh} ${./patch-fsharp-targets.sh} ${./remove-duplicated-dlls.sh} ${./placate-nuget.sh} ${./placate-paket.sh} + do + scriptName="$(basename "$script" | cut -f 2- -d -)" + cp -v "$script" "$target"/"$scriptName" + chmod 755 "$target"/"$scriptName" + patchShebangs "$target"/"$scriptName" + substituteInPlace "$target"/"$scriptName" --replace pkg-config ${pkg-config}/bin/${pkg-config.targetPrefix}pkg-config + substituteInPlace "$target"/"$scriptName" --replace monodis ${mono}/bin/monodis + done + '' diff --git a/nixpkgs/pkgs/build-support/dotnet/dotnetbuildhelpers/patch-fsharp-targets.sh b/nixpkgs/pkgs/build-support/dotnet/dotnetbuildhelpers/patch-fsharp-targets.sh new file mode 100755 index 000000000000..3f81cc73e801 --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/dotnetbuildhelpers/patch-fsharp-targets.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +# Some project files look for F# targets in $(FSharpTargetsPath) +# so it's a good idea to add something like this to your ~/.bash_profile: + +# export FSharpTargetsPath=$(dirname $(which fsharpc))/../lib/mono/4.0/Microsoft.FSharp.Targets + +# In build scripts, you would add somehting like this: + +# export FSharpTargetsPath="${fsharp}/lib/mono/4.0/Microsoft.FSharp.Targets" + +# However, some project files look for F# targets in the main Mono directory. When that happens +# patch the project files using this script so they will look in $(FSharpTargetsPath) instead. + +echo "Patching F# targets in fsproj files..." + +find -iname \*.fsproj -print -exec \ + sed --in-place=.bak \ + -e 's,<FSharpTargetsPath>\([^<]*\)</FSharpTargetsPath>,<FSharpTargetsPath Condition="Exists('\'\\1\'')">\1</FSharpTargetsPath>,'g \ + {} \; diff --git a/nixpkgs/pkgs/build-support/dotnet/dotnetbuildhelpers/placate-nuget.sh b/nixpkgs/pkgs/build-support/dotnet/dotnetbuildhelpers/placate-nuget.sh new file mode 100644 index 000000000000..8a7f36522a3d --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/dotnetbuildhelpers/placate-nuget.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +echo Placating Nuget in nuget.targets +find -iname nuget.targets -print -exec sed --in-place=bak -e 's,mono --runtime[^<]*,true NUGET PLACATED BY buildDotnetPackage,g' {} \; + +echo Just to be sure, replacing Nuget executables by empty files. +find . -iname nuget.exe \! -size 0 -exec mv -v {} {}.bak \; -exec touch {} \; diff --git a/nixpkgs/pkgs/build-support/dotnet/dotnetbuildhelpers/placate-paket.sh b/nixpkgs/pkgs/build-support/dotnet/dotnetbuildhelpers/placate-paket.sh new file mode 100644 index 000000000000..0dbf1eecbad8 --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/dotnetbuildhelpers/placate-paket.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +echo Placating Paket in paket.targets +find -iname paket.targets -print -exec sed --in-place=bak -e 's,mono --runtime[^<]*,true PAKET PLACATED BY buildDotnetPackage,g' {} \; + +echo Just to be sure, replacing Paket executables by empty files. +find . -iname paket\*.exe \! -size 0 -exec mv -v {} {}.bak \; -exec touch {} \; diff --git a/nixpkgs/pkgs/build-support/dotnet/dotnetbuildhelpers/remove-duplicated-dlls.sh b/nixpkgs/pkgs/build-support/dotnet/dotnetbuildhelpers/remove-duplicated-dlls.sh new file mode 100644 index 000000000000..d8d29912c8fa --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/dotnetbuildhelpers/remove-duplicated-dlls.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash + +IFS=" +" + +for dll in $(find -iname \*.dll) +do + baseName="$(basename "$dll" | sed "s/.dll$//i")" + if pkg-config "$baseName" + then + candidateDll="$(pkg-config "$baseName" --variable=Libraries)" + + if diff "$dll" "$candidateDll" >/dev/null + then + echo "$dll is identical to $candidateDll. Substituting..." + rm -vf "$dll" + ln -sv "$candidateDll" "$dll" + else + echo "$dll and $candidateDll share the same name but have different contents, leaving alone." + fi + fi +done diff --git a/nixpkgs/pkgs/build-support/dotnet/dotnetenv/Wrapper/Wrapper.sln b/nixpkgs/pkgs/build-support/dotnet/dotnetenv/Wrapper/Wrapper.sln new file mode 100644 index 000000000000..7e4c934c9bed --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/dotnetenv/Wrapper/Wrapper.sln @@ -0,0 +1,20 @@ + +Microsoft Visual Studio Solution File, Format Version 11.00 +# Visual Studio 2010 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Wrapper", "Wrapper\Wrapper.csproj", "{D01B3597-E85E-42F4-940A-EF5AE712942F}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|x86 = Debug|x86 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {D01B3597-E85E-42F4-940A-EF5AE712942F}.Debug|x86.ActiveCfg = Debug|x86 + {D01B3597-E85E-42F4-940A-EF5AE712942F}.Debug|x86.Build.0 = Debug|x86 + {D01B3597-E85E-42F4-940A-EF5AE712942F}.Release|x86.ActiveCfg = Release|x86 + {D01B3597-E85E-42F4-940A-EF5AE712942F}.Release|x86.Build.0 = Release|x86 + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/nixpkgs/pkgs/build-support/dotnet/dotnetenv/Wrapper/Wrapper/Properties/AssemblyInfo.cs b/nixpkgs/pkgs/build-support/dotnet/dotnetenv/Wrapper/Wrapper/Properties/AssemblyInfo.cs new file mode 100644 index 000000000000..633d23c05ff2 --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/dotnetenv/Wrapper/Wrapper/Properties/AssemblyInfo.cs @@ -0,0 +1,36 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("Wrapper")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("Philips Healthcare")] +[assembly: AssemblyProduct("Wrapper")] +[assembly: AssemblyCopyright("Copyright © Philips Healthcare 2011")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("2045ce22-78c7-4cd6-ad0a-9367f8a49738")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/nixpkgs/pkgs/build-support/dotnet/dotnetenv/Wrapper/Wrapper/Wrapper.cs.in b/nixpkgs/pkgs/build-support/dotnet/dotnetenv/Wrapper/Wrapper/Wrapper.cs.in new file mode 100644 index 000000000000..abad090ebcbf --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/dotnetenv/Wrapper/Wrapper/Wrapper.cs.in @@ -0,0 +1,66 @@ +using System; +using System.Reflection; +using System.IO; + +namespace @NAMESPACE@Wrapper +{ + class @MAINCLASSNAME@Wrapper + { + private String[] AssemblySearchPaths = { @ASSEMBLYSEARCHPATH@ }; + + private String ExePath = @"@EXEPATH@"; + + private String MainClassName = "@NAMESPACE@.@MAINCLASSNAME@"; + + private Assembly exeAssembly; + + public @MAINCLASSNAME@Wrapper(string[] args) + { + // Attach the resolve event handler to the AppDomain so that missing library assemblies will be searched + AppDomain currentDomain = AppDomain.CurrentDomain; + currentDomain.AssemblyResolve += new ResolveEventHandler(MyResolveEventHandler); + + // Dynamically load the executable assembly + exeAssembly = Assembly.LoadFrom(ExePath); + + // Lookup the main class + Type mainClass = exeAssembly.GetType(MainClassName); + + // Lookup the main method + MethodInfo mainMethod = mainClass.GetMethod("Main"); + + // Invoke the main method + mainMethod.Invoke(this, new Object[] {args}); + } + + static void Main(string[] args) + { + new @MAINCLASSNAME@Wrapper(args); + } + + private Assembly MyResolveEventHandler(object sender, ResolveEventArgs args) + { + // This handler is called only when the common language runtime tries to bind to the assembly and fails. + + Assembly MyAssembly; + String assemblyPath = ""; + String requestedAssemblyName = args.Name.Substring(0, args.Name.IndexOf(",")); + + // Search for the right path of the library assembly + foreach (String currentAssemblyPath in AssemblySearchPaths) + { + assemblyPath = currentAssemblyPath + "/" + requestedAssemblyName + ".dll"; + + if (File.Exists(assemblyPath)) + break; + } + + // Load the assembly from the specified path. + MyAssembly = Assembly.LoadFrom(assemblyPath); + + // Return the loaded assembly. + return MyAssembly; + } + + } +} diff --git a/nixpkgs/pkgs/build-support/dotnet/dotnetenv/Wrapper/Wrapper/Wrapper.csproj.in b/nixpkgs/pkgs/build-support/dotnet/dotnetenv/Wrapper/Wrapper/Wrapper.csproj.in new file mode 100644 index 000000000000..a991bcb6933a --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/dotnetenv/Wrapper/Wrapper/Wrapper.csproj.in @@ -0,0 +1,57 @@ +<?xml version="1.0" encoding="utf-8"?> +<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> + <PropertyGroup> + <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration> + <Platform Condition=" '$(Platform)' == '' ">x86</Platform> + <ProductVersion>8.0.30703</ProductVersion> + <SchemaVersion>2.0</SchemaVersion> + <ProjectGuid>{D01B3597-E85E-42F4-940A-EF5AE712942F}</ProjectGuid> + <OutputType>Exe</OutputType> + <AppDesignerFolder>Properties</AppDesignerFolder> + <RootNamespace>@ROOTNAMESPACE@</RootNamespace> + <AssemblyName>@ASSEMBLYNAME@</AssemblyName> + <TargetFrameworkVersion>v4.0</TargetFrameworkVersion> + <TargetFrameworkProfile>Client</TargetFrameworkProfile> + <FileAlignment>512</FileAlignment> + </PropertyGroup> + <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|x86' "> + <PlatformTarget>x86</PlatformTarget> + <DebugSymbols>true</DebugSymbols> + <DebugType>full</DebugType> + <Optimize>false</Optimize> + <OutputPath>bin\Debug\</OutputPath> + <DefineConstants>DEBUG;TRACE</DefineConstants> + <ErrorReport>prompt</ErrorReport> + <WarningLevel>4</WarningLevel> + </PropertyGroup> + <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|x86' "> + <PlatformTarget>x86</PlatformTarget> + <DebugType>pdbonly</DebugType> + <Optimize>true</Optimize> + <OutputPath>bin\Release\</OutputPath> + <DefineConstants>TRACE</DefineConstants> + <ErrorReport>prompt</ErrorReport> + <WarningLevel>4</WarningLevel> + </PropertyGroup> + <ItemGroup> + <Reference Include="System" /> + <Reference Include="System.Core" /> + <Reference Include="System.Xml.Linq" /> + <Reference Include="System.Data.DataSetExtensions" /> + <Reference Include="Microsoft.CSharp" /> + <Reference Include="System.Data" /> + <Reference Include="System.Xml" /> + </ItemGroup> + <ItemGroup> + <Compile Include="Wrapper.cs" /> + <Compile Include="Properties\AssemblyInfo.cs" /> + </ItemGroup> + <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" /> + <!-- To modify your build process, add your task inside one of the targets below and uncomment it. + Other similar extension points exist, see Microsoft.Common.targets. + <Target Name="BeforeBuild"> + </Target> + <Target Name="AfterBuild"> + </Target> + --> +</Project> \ No newline at end of file diff --git a/nixpkgs/pkgs/build-support/dotnet/dotnetenv/build-solution.nix b/nixpkgs/pkgs/build-support/dotnet/dotnetenv/build-solution.nix new file mode 100644 index 000000000000..b3372b942177 --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/dotnetenv/build-solution.nix @@ -0,0 +1,85 @@ +{ lib, stdenv, dotnetfx }: +{ name +, src +, baseDir ? "." +, slnFile +, targets ? "ReBuild" +, verbosity ? "detailed" +, options ? "/p:Configuration=Debug;Platform=Win32" +, assemblyInputs ? [] +, preBuild ? "" +, modifyPublicMain ? false +, mainClassFile ? null +}: + +assert modifyPublicMain -> mainClassFile != null; + +stdenv.mkDerivation { + inherit name src; + + buildInputs = [ dotnetfx ]; + + preConfigure = '' + cd ${baseDir} + ''; + + preBuild = '' + ${lib.optionalString modifyPublicMain '' + sed -i -e "s|static void Main|public static void Main|" ${mainClassFile} + ''} + ${preBuild} + ''; + + installPhase = '' + addDeps() + { + if [ -f $1/nix-support/dotnet-assemblies ] + then + for i in $(cat $1/nix-support/dotnet-assemblies) + do + windowsPath=$(cygpath --windows $i) + assemblySearchPaths="$assemblySearchPaths;$windowsPath" + + addDeps $i + done + fi + } + + for i in ${toString assemblyInputs} + do + windowsPath=$(cygpath --windows $i) + echo "Using assembly path: $windowsPath" + + if [ "$assemblySearchPaths" = "" ] + then + assemblySearchPaths="$windowsPath" + else + assemblySearchPaths="$assemblySearchPaths;$windowsPath" + fi + + addDeps $i + done + + echo "Assembly search paths are: $assemblySearchPaths" + + if [ "$assemblySearchPaths" != "" ] + then + echo "Using assembly search paths args: $assemblySearchPathsArg" + export AssemblySearchPaths=$assemblySearchPaths + fi + + mkdir -p $out + MSBuild.exe ${toString slnFile} /nologo /t:${targets} /p:IntermediateOutputPath=$(cygpath --windows $out)\\ /p:OutputPath=$(cygpath --windows $out)\\ /verbosity:${verbosity} ${options} + + # Because .NET assemblies store strings as UTF-16 internally, we cannot detect + # hashes. Therefore a text files containing the proper paths is created + # We can also use this file the propagate transitive dependencies. + + mkdir -p $out/nix-support + + for i in ${toString assemblyInputs} + do + echo $i >> $out/nix-support/dotnet-assemblies + done + ''; +} diff --git a/nixpkgs/pkgs/build-support/dotnet/dotnetenv/default.nix b/nixpkgs/pkgs/build-support/dotnet/dotnetenv/default.nix new file mode 100644 index 000000000000..3015db42b07b --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/dotnetenv/default.nix @@ -0,0 +1,17 @@ +{ lib, stdenv, dotnetfx }: + +let dotnetenv = +{ + buildSolution = import ./build-solution.nix { + inherit lib stdenv; + dotnetfx = dotnetfx.pkg; + }; + + buildWrapper = import ./wrapper.nix { + inherit dotnetenv; + }; + + inherit (dotnetfx) assembly20Path wcfPath referenceAssembly30Path referenceAssembly35Path; +}; +in +dotnetenv diff --git a/nixpkgs/pkgs/build-support/dotnet/dotnetenv/wrapper.nix b/nixpkgs/pkgs/build-support/dotnet/dotnetenv/wrapper.nix new file mode 100644 index 000000000000..423303c3084a --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/dotnetenv/wrapper.nix @@ -0,0 +1,64 @@ +{dotnetenv}: + +{ name +, src +, baseDir ? "." +, slnFile +, targets ? "ReBuild" +, verbosity ? "detailed" +, options ? "/p:Configuration=Debug;Platform=Win32" +, assemblyInputs ? [] +, preBuild ? "" +, namespace +, mainClassName +, mainClassFile +, modifyPublicMain ? true +}: + +let + application = dotnetenv.buildSolution { + inherit name src baseDir slnFile targets verbosity; + inherit options assemblyInputs preBuild; + inherit modifyPublicMain mainClassFile; + }; +in +dotnetenv.buildSolution { + name = "${name}-wrapper"; + src = ./Wrapper; + slnFile = "Wrapper.sln"; + assemblyInputs = [ application ]; + preBuild = '' + addRuntimeDeps() + { + if [ -f $1/nix-support/dotnet-assemblies ] + then + for i in $(cat $1/nix-support/dotnet-assemblies) + do + windowsPath=$(cygpath --windows $i | sed 's|\\|\\\\|g') + assemblySearchArray="$assemblySearchArray @\"$windowsPath\"" + + addRuntimeDeps $i + done + fi + } + + export exePath=$(cygpath --windows $(find ${application} -name \*.exe) | sed 's|\\|\\\\|g') + + # Generate assemblySearchPaths string array contents + for path in ${toString assemblyInputs} + do + assemblySearchArray="$assemblySearchArray @\"$(cygpath --windows $path | sed 's|\\|\\\\|g')\", " + addRuntimeDeps $path + done + + sed -e "s|@ROOTNAMESPACE@|${namespace}Wrapper|" \ + -e "s|@ASSEMBLYNAME@|${namespace}|" \ + Wrapper/Wrapper.csproj.in > Wrapper/Wrapper.csproj + + sed -e "s|@NAMESPACE@|${namespace}|g" \ + -e "s|@MAINCLASSNAME@|${mainClassName}|g" \ + -e "s|@EXEPATH@|$exePath|g" \ + -e "s|@ASSEMBLYSEARCHPATH@|$assemblySearchArray|" \ + Wrapper/Wrapper.cs.in > Wrapper/Wrapper.cs + ''; +} diff --git a/nixpkgs/pkgs/build-support/dotnet/fetchnuget/default.nix b/nixpkgs/pkgs/build-support/dotnet/fetchnuget/default.nix new file mode 100644 index 000000000000..061da746cead --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/fetchnuget/default.nix @@ -0,0 +1,43 @@ +{ fetchurl, buildDotnetPackage, unzip }: + +attrs @ +{ pname +, version +, url ? "https://www.nuget.org/api/v2/package/${pname}/${version}" +, sha256 ? "" +, md5 ? "" +, ... +}: +if md5 != "" then + throw "fetchnuget does not support md5 anymore, please use sha256" +else + buildDotnetPackage ({ + src = fetchurl { + inherit url sha256; + name = "${pname}.${version}.zip"; + }; + + sourceRoot = "."; + + nativeBuildInputs = [ unzip ]; + + dontBuild = true; + + preInstall = '' + function traverseRename () { + for e in * + do + t="$(echo "$e" | sed -e "s/%20/\ /g" -e "s/%2B/+/g")" + [ "$t" != "$e" ] && mv -vn "$e" "$t" + if [ -d "$t" ] + then + cd "$t" + traverseRename + cd .. + fi + done + } + + traverseRename + ''; + } // attrs) diff --git a/nixpkgs/pkgs/build-support/dotnet/make-nuget-deps/default.nix b/nixpkgs/pkgs/build-support/dotnet/make-nuget-deps/default.nix new file mode 100644 index 000000000000..8281976df626 --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/make-nuget-deps/default.nix @@ -0,0 +1,12 @@ +{ linkFarmFromDrvs, fetchurl }: +{ name, nugetDeps, sourceFile ? null }: +linkFarmFromDrvs "${name}-nuget-deps" (nugetDeps { + fetchNuGet = { pname, version, sha256 + , url ? "https://www.nuget.org/api/v2/package/${pname}/${version}" }: + fetchurl { + name = "${pname}.${version}.nupkg"; + inherit url sha256; + }; +}) // { + inherit sourceFile; +} diff --git a/nixpkgs/pkgs/build-support/dotnet/make-nuget-source/default.nix b/nixpkgs/pkgs/build-support/dotnet/make-nuget-source/default.nix new file mode 100644 index 000000000000..a23a143ab246 --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/make-nuget-source/default.nix @@ -0,0 +1,34 @@ +{ lib, python3, stdenvNoCC }: + +{ name +, description ? "" +, deps ? [] +}: + +let + nuget-source = stdenvNoCC.mkDerivation rec { + inherit name; + + meta.description = description; + nativeBuildInputs = [ python3 ]; + + buildCommand = '' + mkdir -p $out/{lib,share} + + # use -L to follow symbolic links. When `projectReferences` is used in + # buildDotnetModule, one of the deps will be a symlink farm. + find -L ${lib.concatStringsSep " " deps} -type f -name '*.nupkg' -exec \ + cp --no-clobber '{}' $out/lib ';' + + # Generates a list of all licenses' spdx ids, if available. + # Note that this currently ignores any license provided in plain text (e.g. "LICENSE.txt") + python ${./extract-licenses-from-nupkgs.py} $out/lib > $out/share/licenses + ''; + } // { # We need data from `$out` for `meta`, so we have to use overrides as to not hit infinite recursion. + meta.licence = let + depLicenses = lib.splitString "\n" (builtins.readFile "${nuget-source}/share/licenses"); + in (lib.flatten (lib.forEach depLicenses (spdx: + lib.optionals (spdx != "") (lib.getLicenseFromSpdxId spdx) + ))); + }; +in nuget-source diff --git a/nixpkgs/pkgs/build-support/dotnet/make-nuget-source/extract-licenses-from-nupkgs.py b/nixpkgs/pkgs/build-support/dotnet/make-nuget-source/extract-licenses-from-nupkgs.py new file mode 100644 index 000000000000..22564b0bb2bc --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/make-nuget-source/extract-licenses-from-nupkgs.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 +""" +Opens each .nupkg file in a directory, and extracts the SPDX license identifiers +from them if they exist. The SPDX license identifier is stored in the +'<license type="expression">...</license>' tag in the .nuspec file. +All found license identifiers will be printed to stdout. +""" + +from glob import glob +from pathlib import Path +import sys +import xml.etree.ElementTree as ET +import zipfile + +all_licenses = set() + +if len(sys.argv) < 2: + print(f"Usage: {sys.argv[0]} DIRECTORY") + sys.exit(1) + +nupkg_dir = Path(sys.argv[1]) +for nupkg_name in glob("*.nupkg", root_dir=nupkg_dir): + with zipfile.ZipFile(nupkg_dir / nupkg_name) as nupkg: + for nuspec_name in [name for name in nupkg.namelist() if name.endswith(".nuspec")]: + with nupkg.open(nuspec_name) as nuspec_stream: + nuspec = ET.parse(nuspec_stream) + licenses = nuspec.findall(".//{*}license[@type='expression']") + all_licenses.update([license.text for license in licenses]) + +print("\n".join(sorted(all_licenses))) diff --git a/nixpkgs/pkgs/build-support/dotnet/nuget-to-nix/default.nix b/nixpkgs/pkgs/build-support/dotnet/nuget-to-nix/default.nix new file mode 100644 index 000000000000..3fdda4ac68d3 --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/nuget-to-nix/default.nix @@ -0,0 +1,35 @@ +{ lib +, runCommandLocal +, runtimeShell +, substituteAll +, nix +, coreutils +, jq +, yq +, curl +, gnugrep +, gawk +, dotnet-sdk +}: + +runCommandLocal "nuget-to-nix" { + script = substituteAll { + src = ./nuget-to-nix.sh; + inherit runtimeShell; + + binPath = lib.makeBinPath [ + nix + coreutils + jq + yq + curl + gnugrep + gawk + dotnet-sdk + ]; + }; + + meta.description = "Convert a nuget packages directory to a lockfile for buildDotnetModule"; +} '' + install -Dm755 $script $out/bin/nuget-to-nix +'' diff --git a/nixpkgs/pkgs/build-support/dotnet/nuget-to-nix/nuget-to-nix.sh b/nixpkgs/pkgs/build-support/dotnet/nuget-to-nix/nuget-to-nix.sh new file mode 100755 index 000000000000..86bc4482088b --- /dev/null +++ b/nixpkgs/pkgs/build-support/dotnet/nuget-to-nix/nuget-to-nix.sh @@ -0,0 +1,86 @@ +#!@runtimeShell@ + +set -euo pipefail +shopt -s nullglob + +export PATH="@binPath@" +# used for glob ordering of package names +export LC_ALL=C + +if [ $# -eq 0 ]; then + >&2 echo "Usage: $0 <packages directory> [path to a file with a list of excluded packages] > deps.nix" + exit 1 +fi + +pkgs=$1 +tmp=$(realpath "$(mktemp -td nuget-to-nix.XXXXXX)") +trap 'rm -r "$tmp"' EXIT + +excluded_list=$(realpath "${2:-/dev/null}") + +export DOTNET_NOLOGO=1 +export DOTNET_CLI_TELEMETRY_OPTOUT=1 + +mapfile -t sources < <(dotnet nuget list source --format short | awk '/^E / { print $2 }') + +declare -A base_addresses + +for index in "${sources[@]}"; do + base_addresses[$index]=$( + curl --compressed --netrc -fsL "$index" | \ + jq -r '.resources[] | select(."@type" == "PackageBaseAddress/3.0.0")."@id"') +done + +echo "{ fetchNuGet }: [" + +cd "$pkgs" +for package in *; do + cd "$package" + for version in *; do + id=$(xq -r .package.metadata.id "$version/$package".nuspec) + + if grep -qxF "$id.$version.nupkg" "$excluded_list"; then + continue + fi + + used_source="$(jq -r '.source' "$version"/.nupkg.metadata)" + for source in "${sources[@]}"; do + url="${base_addresses[$source]}$package/$version/$package.$version.nupkg" + if [[ "$source" == "$used_source" ]]; then + sha256="$(nix-hash --type sha256 --flat --base32 "$version/$package.$version".nupkg)" + found=true + break + else + if sha256=$(nix-prefetch-url "$url" 2>"$tmp"/error); then + # If multiple remote sources are enabled, nuget will try them all + # concurrently and use the one that responds first. We always use the + # first source that has the package. + echo "$package $version is available at $url, but was restored from $used_source" 1>&2 + found=true + break + else + if ! grep -q 'HTTP error 404' "$tmp/error"; then + cat "$tmp/error" 1>&2 + exit 1 + fi + fi + fi + done + + if ! ${found-false}; then + echo "couldn't find $package $version" >&2 + exit 1 + fi + + if [[ "$source" != https://api.nuget.org/v3/index.json ]]; then + echo " (fetchNuGet { pname = \"$id\"; version = \"$version\"; sha256 = \"$sha256\"; url = \"$url\"; })" + else + echo " (fetchNuGet { pname = \"$id\"; version = \"$version\"; sha256 = \"$sha256\"; })" + fi + done + cd .. +done + +cat << EOL +] +EOL diff --git a/nixpkgs/pkgs/build-support/emacs/buffer.nix b/nixpkgs/pkgs/build-support/emacs/buffer.nix new file mode 100644 index 000000000000..48a7996916e8 --- /dev/null +++ b/nixpkgs/pkgs/build-support/emacs/buffer.nix @@ -0,0 +1,77 @@ +# Functions to build elisp files to locally configure emcas buffers. +# See https://github.com/shlevy/nix-buffer + +{ lib, writeText, inherit-local }: + +rec { + withPackages = pkgs': let + pkgs = builtins.filter (x: x != null) pkgs'; + extras = map (x: x.emacsBufferSetup pkgs) (builtins.filter (builtins.hasAttr "emacsBufferSetup") pkgs); + in writeText "dir-locals.el" '' + (require 'inherit-local "${inherit-local}/share/emacs/site-lisp/elpa/inherit-local-${inherit-local.version}/inherit-local.elc") + + ; Only set up nixpkgs buffer handling when we have some buffers active + (defvar nixpkgs--buffer-count 0) + (when (eq nixpkgs--buffer-count 0) + (make-variable-buffer-local 'nixpkgs--is-nixpkgs-buffer) + ; When generating a new temporary buffer (one whose name starts with a space), do inherit-local inheritance and make it a nixpkgs buffer + (defun nixpkgs--around-generate (orig name &optional ibh) + (if (and nixpkgs--is-nixpkgs-buffer (eq (aref name 0) ?\s)) + (let ((buf (funcall orig name ibh))) + (progn + (inherit-local-inherit-child buf) + (with-current-buffer buf + (setq nixpkgs--buffer-count (1+ nixpkgs--buffer-count)) + (add-hook 'kill-buffer-hook 'nixpkgs--decrement-buffer-count nil t))) + buf) + (funcall orig name ibh))) + (advice-add 'generate-new-buffer :around #'nixpkgs--around-generate) + ; When we have no more nixpkgs buffers, tear down the buffer handling + (defun nixpkgs--decrement-buffer-count () + (setq nixpkgs--buffer-count (1- nixpkgs--buffer-count)) + (when (eq nixpkgs--buffer-count 0) + (advice-remove 'generate-new-buffer #'nixpkgs--around-generate) + (fmakunbound 'nixpkgs--around-generate) + (fmakunbound 'nixpkgs--decrement-buffer-count)))) + (setq nixpkgs--buffer-count (1+ nixpkgs--buffer-count)) + (add-hook 'kill-buffer-hook 'nixpkgs--decrement-buffer-count nil t) + + ; Add packages to PATH and exec-path + (make-local-variable 'process-environment) + (put 'process-environment 'permanent-local t) + (inherit-local 'process-environment) + ; setenv modifies in place, so copy the environment first + (setq process-environment (copy-tree process-environment)) + (setenv "PATH" (concat "${lib.makeSearchPath "bin" pkgs}:" (getenv "PATH"))) + (inherit-local-permanent exec-path (append '(${builtins.concatStringsSep " " (map (p: "\"${p}/bin\"") pkgs)}) exec-path)) + + (inherit-local-permanent eshell-path-env (concat "${lib.makeSearchPath "bin" pkgs}:" (if (boundp 'eshell-path-env) eshell-path-env (getenv "PATH")))) + + (setq nixpkgs--is-nixpkgs-buffer t) + (inherit-local 'nixpkgs--is-nixpkgs-buffer) + + ${lib.concatStringsSep "\n" extras} + ''; + # nix-buffer function for a project with a bunch of haskell packages + # in one directory + haskellMonoRepo = { project-root # The monorepo root + , haskellPackages # The composed haskell packages set that contains all of the packages + }: { root }: + let # The haskell paths. + haskell-paths = lib.filesystem.haskellPathsInDir project-root; + # Find the haskell package that the 'root' is in, if any. + haskell-path-parent = + let filtered = builtins.filter (name: + lib.hasPrefix (toString (project-root + "/${name}")) (toString root) + ) (builtins.attrNames haskell-paths); + in + if filtered == [] then null else builtins.head filtered; + # We're in the directory of a haskell package + is-haskell-package = haskell-path-parent != null; + haskell-package = haskellPackages.${haskell-path-parent}; + # GHC environment with all needed deps for the haskell package + haskell-package-env = + builtins.head haskell-package.env.nativeBuildInputs; + in + lib.optionalAttrs is-haskell-package (withPackages [ haskell-package-env ]); +} diff --git a/nixpkgs/pkgs/build-support/emacs/elpa.nix b/nixpkgs/pkgs/build-support/emacs/elpa.nix new file mode 100644 index 000000000000..f7027dc499d8 --- /dev/null +++ b/nixpkgs/pkgs/build-support/emacs/elpa.nix @@ -0,0 +1,41 @@ +# builder for Emacs packages built for packages.el + +{ lib, stdenv, emacs, texinfo, writeText, gcc }: + +with lib; + +{ pname +, version +, src +, meta ? {} +, ... +}@args: + +let + + defaultMeta = { + homepage = args.src.meta.homepage or "https://elpa.gnu.org/packages/${pname}.html"; + }; + +in + +import ./generic.nix { inherit lib stdenv emacs texinfo writeText gcc; } ({ + + dontUnpack = true; + + installPhase = '' + runHook preInstall + + emacs --batch -Q -l ${./elpa2nix.el} \ + -f elpa2nix-install-package \ + "$src" "$out/share/emacs/site-lisp/elpa" + + runHook postInstall + ''; + + meta = defaultMeta // meta; +} + +// removeAttrs args [ "files" "fileSpecs" + "meta" + ]) diff --git a/nixpkgs/pkgs/build-support/emacs/elpa2nix.el b/nixpkgs/pkgs/build-support/emacs/elpa2nix.el new file mode 100644 index 000000000000..64587c0fad1a --- /dev/null +++ b/nixpkgs/pkgs/build-support/emacs/elpa2nix.el @@ -0,0 +1,33 @@ +(require 'package) +(package-initialize) + +(defun elpa2nix-install-package () + (if (not noninteractive) + (error "`elpa2nix-install-package' is to be used only with -batch")) + (pcase command-line-args-left + (`(,archive ,elpa) + (progn (setq package-user-dir elpa) + (elpa2nix-install-file archive))))) + +(defun elpa2nix-install-from-buffer () + "Install a package from the current buffer." + (let ((pkg-desc (if (derived-mode-p 'tar-mode) + (package-tar-file-info) + (package-buffer-info)))) + ;; Install the package itself. + (package-unpack pkg-desc) + pkg-desc)) + +(defun elpa2nix-install-file (file) + "Install a package from a file. +The file can either be a tar file or an Emacs Lisp file." + (let ((is-tar (string-match "\\.tar\\'" file))) + (with-temp-buffer + (if is-tar + (insert-file-contents-literally file) + (insert-file-contents file)) + (when is-tar (tar-mode)) + (elpa2nix-install-from-buffer)))) + +;; Allow installing package tarfiles larger than 10MB +(setq large-file-warning-threshold nil) diff --git a/nixpkgs/pkgs/build-support/emacs/emacs-funcs.sh b/nixpkgs/pkgs/build-support/emacs/emacs-funcs.sh new file mode 100644 index 000000000000..e1e6a3b62208 --- /dev/null +++ b/nixpkgs/pkgs/build-support/emacs/emacs-funcs.sh @@ -0,0 +1,34 @@ +addToEmacsLoadPath() { + local lispDir="$1" + if [[ -d $lispDir && ${EMACSLOADPATH-} != *"$lispDir":* ]] ; then + # It turns out, that the trailing : is actually required + # see https://www.gnu.org/software/emacs/manual/html_node/elisp/Library-Search.html + export EMACSLOADPATH="$lispDir:${EMACSLOADPATH-}" + fi +} + +addToEmacsNativeLoadPath() { + local nativeDir="$1" + if [[ -d $nativeDir && ${EMACSNATIVELOADPATH-} != *"$nativeDir":* ]]; then + export EMACSNATIVELOADPATH="$nativeDir:${EMACSNATIVELOADPATH-}" + fi +} + +addEmacsVars () { + addToEmacsLoadPath "$1/share/emacs/site-lisp" + + if [ -n "${addEmacsNativeLoadPath:-}" ]; then + addToEmacsNativeLoadPath "$1/share/emacs/native-lisp" + fi + + # Add sub paths to the Emacs load path if it is a directory + # containing .el files. This is necessary to build some packages, + # e.g., using trivialBuild. + for lispDir in \ + "$1/share/emacs/site-lisp/"* \ + "$1/share/emacs/site-lisp/elpa/"*; do + if [[ -d $lispDir && "$(echo "$lispDir"/*.el)" ]] ; then + addToEmacsLoadPath "$lispDir" + fi + done +} diff --git a/nixpkgs/pkgs/build-support/emacs/generic.nix b/nixpkgs/pkgs/build-support/emacs/generic.nix new file mode 100644 index 000000000000..291f45d513b7 --- /dev/null +++ b/nixpkgs/pkgs/build-support/emacs/generic.nix @@ -0,0 +1,86 @@ +# generic builder for Emacs packages + +{ lib, stdenv, emacs, texinfo, writeText, gcc, ... }: + +{ pname +, version +, buildInputs ? [] +, packageRequires ? [] +, meta ? {} +, ... +}@args: + +let + defaultMeta = { + broken = false; + platforms = emacs.meta.platforms; + } // lib.optionalAttrs ((args.src.meta.homepage or "") != "") { + homepage = args.src.meta.homepage; + }; +in + +stdenv.mkDerivation (finalAttrs: ({ + name = "emacs-${pname}-${finalAttrs.version}"; + + unpackCmd = '' + case "$curSrc" in + *.el) + # keep original source filename without the hash + local filename=$(basename "$curSrc") + filename="''${filename:33}" + cp $curSrc $filename + chmod +w $filename + sourceRoot="." + ;; + *) + _defaultUnpack "$curSrc" + ;; + esac + ''; + + buildInputs = [emacs texinfo] ++ packageRequires ++ buildInputs; + propagatedBuildInputs = packageRequires; + propagatedUserEnvPkgs = packageRequires; + + setupHook = writeText "setup-hook.sh" '' + source ${./emacs-funcs.sh} + + if [[ ! -v emacsHookDone ]]; then + emacsHookDone=1 + + # If this is for a wrapper derivation, emacs and the dependencies are all + # run-time dependencies. If this is for precompiling packages into bytecode, + # emacs is a compile-time dependency of the package. + addEnvHooks "$hostOffset" addEmacsVars + addEnvHooks "$targetOffset" addEmacsVars + fi + ''; + + doCheck = false; + + meta = defaultMeta // meta; +} + +// lib.optionalAttrs (emacs.withNativeCompilation or false) { + + LIBRARY_PATH = "${lib.getLib stdenv.cc.libc}/lib"; + + nativeBuildInputs = [ gcc ]; + + addEmacsNativeLoadPath = true; + + postInstall = '' + # Besides adding the output directory to the native load path, make sure + # the current package's elisp files are in the load path, otherwise + # (require 'file-b) from file-a.el in the same package will fail. + mkdir -p $out/share/emacs/native-lisp + source ${./emacs-funcs.sh} + addEmacsVars "$out" + + find $out/share/emacs -type f -name '*.el' -print0 \ + | xargs -0 -I {} -n 1 -P $NIX_BUILD_CORES sh -c \ + "emacs --batch --eval '(setq large-file-warning-threshold nil)' -f batch-native-compile {} || true" + ''; +} + +// removeAttrs args [ "buildInputs" "packageRequires" "meta" ])) diff --git a/nixpkgs/pkgs/build-support/emacs/melpa.nix b/nixpkgs/pkgs/build-support/emacs/melpa.nix new file mode 100644 index 000000000000..85bc8aa37b3a --- /dev/null +++ b/nixpkgs/pkgs/build-support/emacs/melpa.nix @@ -0,0 +1,110 @@ +# builder for Emacs packages built for packages.el +# using MELPA package-build.el + +{ lib, stdenv, fetchFromGitHub, emacs, texinfo, writeText, gcc }: + +with lib; + +{ /* + pname: Nix package name without special symbols and without version or + "emacs-" prefix. + */ + pname + /* + ename: Original Emacs package name, possibly containing special symbols. + */ +, ename ? null +, version +, recipe +, meta ? {} +, ... +}@args: + +let + + defaultMeta = { + homepage = args.src.meta.homepage or "https://melpa.org/#/${pname}"; + }; + +in + +import ./generic.nix { inherit lib stdenv emacs texinfo writeText gcc; } ({ + + ename = + if ename == null + then pname + else ename; + + packageBuild = stdenv.mkDerivation { + name = "package-build"; + src = fetchFromGitHub { + owner = "melpa"; + repo = "package-build"; + rev = "c3c535e93d9dc92acd21ebc4b15016b5c3b90e7d"; + sha256 = "17z0wbqdd6fspbj43yq8biff6wfggk74xgnaf1xx6ynsp1i74is5"; + }; + + patches = [ ./package-build-dont-use-mtime.patch ]; + + dontConfigure = true; + dontBuild = true; + + installPhase = " + mkdir -p $out + cp -r * $out + "; + }; + + elpa2nix = ./elpa2nix.el; + melpa2nix = ./melpa2nix.el; + + preUnpack = '' + mkdir -p "$NIX_BUILD_TOP/recipes" + if [ -n "$recipe" ]; then + cp "$recipe" "$NIX_BUILD_TOP/recipes/$ename" + fi + + ln -s "$packageBuild" "$NIX_BUILD_TOP/package-build" + + mkdir -p "$NIX_BUILD_TOP/packages" + ''; + + postUnpack = '' + mkdir -p "$NIX_BUILD_TOP/working" + ln -s "$NIX_BUILD_TOP/$sourceRoot" "$NIX_BUILD_TOP/working/$ename" + ''; + + buildPhase = '' + runHook preBuild + + cd "$NIX_BUILD_TOP" + + emacs --batch -Q \ + -L "$NIX_BUILD_TOP/package-build" \ + -l "$melpa2nix" \ + -f melpa2nix-build-package \ + $ename $version $commit + + runHook postBuild + ''; + + installPhase = '' + runHook preInstall + + archive="$NIX_BUILD_TOP/packages/$ename-$version.el" + if [ ! -f "$archive" ]; then + archive="$NIX_BUILD_TOP/packages/$ename-$version.tar" + fi + + emacs --batch -Q \ + -l "$elpa2nix" \ + -f elpa2nix-install-package \ + "$archive" "$out/share/emacs/site-lisp/elpa" + + runHook postInstall + ''; + + meta = defaultMeta // meta; +} + +// removeAttrs args [ "meta" ]) diff --git a/nixpkgs/pkgs/build-support/emacs/melpa2nix.el b/nixpkgs/pkgs/build-support/emacs/melpa2nix.el new file mode 100644 index 000000000000..72667dea652c --- /dev/null +++ b/nixpkgs/pkgs/build-support/emacs/melpa2nix.el @@ -0,0 +1,32 @@ +(require 'package) +(package-initialize) + +(require 'package-recipe) +(require 'package-build) + +(setq package-build-working-dir (expand-file-name "working/")) +(setq package-build-archive-dir (expand-file-name "packages/")) +(setq package-build-recipes-dir (expand-file-name "recipes/")) + +;; Allow installing package tarfiles larger than 10MB +(setq large-file-warning-threshold nil) + +(defun melpa2nix-build-package-1 (rcp version commit) + (let ((source-dir (package-recipe--working-tree rcp))) + (unwind-protect + (let ((files (package-build-expand-files-spec rcp t))) + (cond + ((= (length files) 1) + (package-build--build-single-file-package + rcp version commit files source-dir)) + ((> (length files) 1) + (package-build--build-multi-file-package + rcp version commit files source-dir)) + (t (error "Unable to find files matching recipe patterns"))))))) + +(defun melpa2nix-build-package () + (if (not noninteractive) + (error "`melpa2nix-build-package' is to be used only with -batch")) + (pcase command-line-args-left + (`(,package ,version ,commit) + (melpa2nix-build-package-1 (package-recipe-lookup package) version commit)))) diff --git a/nixpkgs/pkgs/build-support/emacs/mk-wrapper-subdirs.el b/nixpkgs/pkgs/build-support/emacs/mk-wrapper-subdirs.el new file mode 100644 index 000000000000..7d30400a5c65 --- /dev/null +++ b/nixpkgs/pkgs/build-support/emacs/mk-wrapper-subdirs.el @@ -0,0 +1,6 @@ +(defmacro mk-subdirs-expr (path) + `(setq load-path + (delete-dups (append '(,path) + ',(let ((default-directory path)) + (normal-top-level-add-subdirs-to-load-path)) + load-path)))) diff --git a/nixpkgs/pkgs/build-support/emacs/package-build-dont-use-mtime.patch b/nixpkgs/pkgs/build-support/emacs/package-build-dont-use-mtime.patch new file mode 100644 index 000000000000..fe94de57a300 --- /dev/null +++ b/nixpkgs/pkgs/build-support/emacs/package-build-dont-use-mtime.patch @@ -0,0 +1,40 @@ +diff --git a/package-build.el b/package-build.el +index e572045..9eb0f82 100644 +--- a/package-build.el ++++ b/package-build.el +@@ -415,7 +415,7 @@ (defun package-build--write-pkg-file (desc dir) + (princ ";; Local Variables:\n;; no-byte-compile: t\n;; End:\n" + (current-buffer))))) + +-(defun package-build--create-tar (name version directory mtime) ++(defun package-build--create-tar (name version directory) + "Create a tar file containing the contents of VERSION of package NAME. + DIRECTORY is a temporary directory that contains the directory + that is put in the tarball. MTIME is used as the modification +@@ -434,7 +434,7 @@ (defun package-build--create-tar (name version directory mtime) + ;; prevent a reproducable tarball as described at + ;; https://reproducible-builds.org/docs/archives. + "--sort=name" +- (format "--mtime=@%d" mtime) ++ "--mtime=@0" + "--owner=0" "--group=0" "--numeric-owner" + "--pax-option=exthdr.name=%d/PaxHeaders/%f,delete=atime,delete=ctime")) + (when (and package-build-verbose noninteractive) +@@ -848,12 +848,11 @@ (defun package-build--build-multi-file-package (rcp version commit files source- + (package-build--desc-from-library + name version commit files 'tar) + (error "%s[-pkg].el matching package name is missing" +- name)))) +- (mtime (package-build--get-commit-time rcp commit))) ++ name))))) + (package-build--copy-package-files files source-dir target) + (package-build--write-pkg-file desc target) + (package-build--generate-info-files files source-dir target) +- (package-build--create-tar name version tmp-dir mtime) ++ (package-build--create-tar name version tmp-dir) + (package-build--write-pkg-readme name files source-dir) + (package-build--write-archive-entry desc)) + (delete-directory tmp-dir t nil)))) +-- +2.37.2 + diff --git a/nixpkgs/pkgs/build-support/emacs/trivial.nix b/nixpkgs/pkgs/build-support/emacs/trivial.nix new file mode 100644 index 000000000000..abe4d761c6b5 --- /dev/null +++ b/nixpkgs/pkgs/build-support/emacs/trivial.nix @@ -0,0 +1,30 @@ +# trivial builder for Emacs packages + +{ callPackage, lib, ... }@envargs: + +with lib; + +args: + +callPackage ./generic.nix envargs ({ + buildPhase = '' + runHook preBuild + + emacs -L . --batch -f batch-byte-compile *.el + + runHook postBuild + ''; + + installPhase = '' + runHook preInstall + + LISPDIR=$out/share/emacs/site-lisp + install -d $LISPDIR + install *.el *.elc $LISPDIR + emacs --batch -l package --eval "(package-generate-autoloads \"${args.pname}\" \"$LISPDIR\")" + + runHook postInstall + ''; +} + +// args) diff --git a/nixpkgs/pkgs/build-support/emacs/wrapper.nix b/nixpkgs/pkgs/build-support/emacs/wrapper.nix new file mode 100644 index 000000000000..6c1383c53304 --- /dev/null +++ b/nixpkgs/pkgs/build-support/emacs/wrapper.nix @@ -0,0 +1,236 @@ +/* + +# Usage + +`emacs.pkgs.withPackages` takes a single argument: a function from a package +set to a list of packages (the packages that will be available in +Emacs). For example, +``` +emacs.pkgs.withPackages (epkgs: [ epkgs.evil epkgs.magit ]) +``` +All the packages in the list should come from the provided package +set. It is possible to add any package to the list, but the provided +set is guaranteed to have consistent dependencies and be built with +the correct version of Emacs. + +# Overriding + +`emacs.pkgs.withPackages` inherits the package set which contains it, so the +correct way to override the provided package set is to override the +set which contains `emacs.pkgs.withPackages`. For example, to override +`emacs.pkgs.emacs.pkgs.withPackages`, +``` +let customEmacsPackages = + emacs.pkgs.overrideScope (self: super: { + # use a custom version of emacs + emacs = ...; + # use the unstable MELPA version of magit + magit = self.melpaPackages.magit; + }); +in customEmacsPackages.withPackages (epkgs: [ epkgs.evil epkgs.magit ]) +``` + +*/ + +{ lib, lndir, makeBinaryWrapper, runCommand, gcc }: +self: +let + inherit (self) emacs; + withNativeCompilation = emacs.withNativeCompilation or emacs.nativeComp or false; + withTreeSitter = emacs.withTreeSitter or emacs.treeSitter or false; +in +packagesFun: # packages explicitly requested by the user +let + explicitRequires = + if lib.isFunction packagesFun + then packagesFun self + else packagesFun; +in +runCommand + (lib.appendToName "with-packages" emacs).name + { + inherit emacs explicitRequires; + nativeBuildInputs = [ emacs lndir makeBinaryWrapper ]; + + preferLocalBuild = true; + allowSubstitutes = false; + + # Store all paths we want to add to emacs here, so that we only need to add + # one path to the load lists + deps = runCommand "emacs-packages-deps" + ({ + inherit explicitRequires lndir emacs; + nativeBuildInputs = lib.optional withNativeCompilation gcc; + } // lib.optionalAttrs withNativeCompilation { + inherit (emacs) LIBRARY_PATH; + }) + '' + findInputsOld() { + local pkg="$1"; shift + local var="$1"; shift + local propagatedBuildInputsFiles=("$@") + + # TODO(@Ericson2314): Restore using associative array once Darwin + # nix-shell doesn't use impure bash. This should replace the O(n) + # case with an O(1) hash map lookup, assuming bash is implemented + # well :D. + local varSlice="$var[*]" + # ''${..-} to hack around old bash empty array problem + case "''${!varSlice-}" in + *" $pkg "*) return 0 ;; + esac + unset -v varSlice + + eval "$var"'+=("$pkg")' + + if ! [ -e "$pkg" ]; then + echo "build input $pkg does not exist" >&2 + exit 1 + fi + + local file + for file in "''${propagatedBuildInputsFiles[@]}"; do + file="$pkg/nix-support/$file" + [[ -f "$file" ]] || continue + + local pkgNext + for pkgNext in $(< "$file"); do + findInputsOld "$pkgNext" "$var" "''${propagatedBuildInputsFiles[@]}" + done + done + } + mkdir -p $out/bin + mkdir -p $out/share/emacs/site-lisp + ${lib.optionalString withNativeCompilation '' + mkdir -p $out/share/emacs/native-lisp + ''} + ${lib.optionalString withTreeSitter '' + mkdir -p $out/lib + ''} + + local requires + for pkg in $explicitRequires; do + findInputsOld $pkg requires propagated-user-env-packages + done + # requires now holds all requested packages and their transitive dependencies + + linkPath() { + local pkg=$1 + local origin_path=$2 + local dest_path=$3 + + # Add the path to the search path list, but only if it exists + if [[ -d "$pkg/$origin_path" ]]; then + $lndir/bin/lndir -silent "$pkg/$origin_path" "$out/$dest_path" + fi + } + + linkEmacsPackage() { + linkPath "$1" "bin" "bin" + linkPath "$1" "share/emacs/site-lisp" "share/emacs/site-lisp" + ${lib.optionalString withNativeCompilation '' + linkPath "$1" "share/emacs/native-lisp" "share/emacs/native-lisp" + ''} + ${lib.optionalString withTreeSitter '' + linkPath "$1" "lib" "lib" + ''} + } + + # Iterate over the array of inputs (avoiding nix's own interpolation) + for pkg in "''${requires[@]}"; do + linkEmacsPackage $pkg + done + + siteStart="$out/share/emacs/site-lisp/site-start.el" + siteStartByteCompiled="$siteStart"c + subdirs="$out/share/emacs/site-lisp/subdirs.el" + subdirsByteCompiled="$subdirs"c + + # A dependency may have brought the original siteStart or subdirs, delete + # it and create our own + # Begin the new site-start.el by loading the original, which sets some + # NixOS-specific paths. Paths are searched in the reverse of the order + # they are specified in, so user and system profile paths are searched last. + # + # NOTE: Avoid displaying messages early at startup by binding + # inhibit-message to t. This would prevent the Emacs GUI from showing up + # prematurely. The messages would still be logged to the *Messages* + # buffer. + rm -f $siteStart $siteStartByteCompiled $subdirs $subdirsByteCompiled + cat >"$siteStart" <<EOF + (let ((inhibit-message t)) + (load "$emacs/share/emacs/site-lisp/site-start")) + ;; "$out/share/emacs/site-lisp" is added to load-path in wrapper.sh + ;; "$out/share/emacs/native-lisp" is added to native-comp-eln-load-path in wrapper.sh + (add-to-list 'exec-path "$out/bin") + ${lib.optionalString withTreeSitter '' + (add-to-list 'treesit-extra-load-path "$out/lib/") + ''} + EOF + + # Generate a subdirs.el that statically adds all subdirectories to load-path. + $emacs/bin/emacs \ + --batch \ + --load ${./mk-wrapper-subdirs.el} \ + --eval "(prin1 (macroexpand-1 '(mk-subdirs-expr \"$out/share/emacs/site-lisp\")))" \ + > "$subdirs" + + # Byte-compiling improves start-up time only slightly, but costs nothing. + $emacs/bin/emacs --batch -f batch-byte-compile "$siteStart" "$subdirs" + + ${lib.optionalString withNativeCompilation '' + $emacs/bin/emacs --batch \ + --eval "(add-to-list 'native-comp-eln-load-path \"$out/share/emacs/native-lisp/\")" \ + -f batch-native-compile "$siteStart" "$subdirs" + ''} + ''; + + inherit (emacs) meta; + } + '' + mkdir -p "$out/bin" + + # Wrap emacs and friends so they find our site-start.el before the original. + for prog in $emacs/bin/*; do # */ + local progname=$(basename "$prog") + rm -f "$out/bin/$progname" + + substitute ${./wrapper.sh} $out/bin/$progname \ + --subst-var-by bash ${emacs.stdenv.shell} \ + --subst-var-by wrapperSiteLisp "$deps/share/emacs/site-lisp" \ + --subst-var-by wrapperSiteLispNative "$deps/share/emacs/native-lisp" \ + --subst-var prog + chmod +x $out/bin/$progname + # Create a “NOP” binary wrapper for the pure sake of it becoming a + # non-shebang, actual binary. See the makeBinaryWrapper docs for rationale + # (summary: it allows you to use emacs as a shebang itself on Darwin, + # e.g. #!$ {emacs}/bin/emacs --script) + wrapProgramBinary $out/bin/$progname + done + + # Wrap MacOS app + # this has to pick up resources and metadata + # to recognize it as an "app" + if [ -d "$emacs/Applications/Emacs.app" ]; then + mkdir -p $out/Applications/Emacs.app/Contents/MacOS + cp -r $emacs/Applications/Emacs.app/Contents/Info.plist \ + $emacs/Applications/Emacs.app/Contents/PkgInfo \ + $emacs/Applications/Emacs.app/Contents/Resources \ + $out/Applications/Emacs.app/Contents + + + substitute ${./wrapper.sh} $out/Applications/Emacs.app/Contents/MacOS/Emacs \ + --subst-var-by bash ${emacs.stdenv.shell} \ + --subst-var-by wrapperSiteLisp "$deps/share/emacs/site-lisp" \ + --subst-var-by wrapperSiteLispNative "$deps/share/emacs/native-lisp" \ + --subst-var-by prog "$emacs/Applications/Emacs.app/Contents/MacOS/Emacs" + chmod +x $out/Applications/Emacs.app/Contents/MacOS/Emacs + wrapProgramBinary $out/Applications/Emacs.app/Contents/MacOS/Emacs + fi + + mkdir -p $out/share + # Link icons and desktop files into place + for dir in applications icons info man; do + ln -s $emacs/share/$dir $out/share/$dir + done + '' diff --git a/nixpkgs/pkgs/build-support/emacs/wrapper.sh b/nixpkgs/pkgs/build-support/emacs/wrapper.sh new file mode 100644 index 000000000000..44762bd4582b --- /dev/null +++ b/nixpkgs/pkgs/build-support/emacs/wrapper.sh @@ -0,0 +1,53 @@ +#!@bash@ + +IFS=: + +newLoadPath=() +newNativeLoadPath=() +addedNewLoadPath= +addedNewNativeLoadPath= + +if [[ -n $EMACSLOADPATH ]] +then + while read -rd: entry + do + if [[ -z $entry && -z $addedNewLoadPath ]] + then + newLoadPath+=(@wrapperSiteLisp@) + addedNewLoadPath=1 + fi + newLoadPath+=("$entry") + done <<< "$EMACSLOADPATH:" +else + newLoadPath+=(@wrapperSiteLisp@) + newLoadPath+=("") +fi + +# NOTE: Even though we treat EMACSNATIVELOADPATH like EMACSLOADPATH in +# this wrapper, empty elements in EMACSNATIVELOADPATH have no special +# meaning for Emacs. Only non-empty elements in EMACSNATIVELOADPATH +# will be prepended to native-comp-eln-load-path. +# https://git.savannah.gnu.org/cgit/emacs.git/tree/lisp/startup.el?id=3685387e609753293c4518be75e77c659c3b2d8d#n599 +if [[ -n $EMACSNATIVELOADPATH ]] +then + while read -rd: entry + do + if [[ -z $entry && -z $addedNewNativeLoadPath ]] + then + newNativeLoadPath+=(@wrapperSiteLispNative@) + addedNewNativeLoadPath=1 + fi + newNativeLoadPath+=("$entry") + done <<< "$EMACSNATIVELOADPATH:" +else + newNativeLoadPath+=(@wrapperSiteLispNative@) + newNativeLoadPath+=("") +fi + +export EMACSLOADPATH="${newLoadPath[*]}" +export emacsWithPackages_siteLisp=@wrapperSiteLisp@ + +export EMACSNATIVELOADPATH="${newNativeLoadPath[*]}" +export emacsWithPackages_siteLispNative=@wrapperSiteLispNative@ + +exec @prog@ "$@" diff --git a/nixpkgs/pkgs/build-support/expand-response-params/default.nix b/nixpkgs/pkgs/build-support/expand-response-params/default.nix new file mode 100644 index 000000000000..7ce15e98c8d9 --- /dev/null +++ b/nixpkgs/pkgs/build-support/expand-response-params/default.nix @@ -0,0 +1,28 @@ +{ stdenv }: + +# A "response file" is a sequence of arguments that is passed via a +# file, rather than via argv[]. + +# For more information see: +# https://gcc.gnu.org/wiki/Response_Files +# https://www.intel.com/content/www/us/en/docs/dpcpp-cpp-compiler/developer-guide-reference/2023-0/use-response-files.html + +stdenv.mkDerivation { + name = "expand-response-params"; + src = ./expand-response-params.c; + strictDeps = true; + enableParallelBuilding = true; + # Work around "stdenv-darwin-boot-2 is not allowed to refer to path + # /nix/store/...-expand-response-params.c" + unpackPhase = '' + cp "$src" expand-response-params.c + src=$PWD + ''; + buildPhase = '' + NIX_CC_USE_RESPONSE_FILE=0 "$CC" -std=c99 -O3 -o "expand-response-params" expand-response-params.c + ''; + installPhase = '' + mkdir -p $prefix/bin + mv expand-response-params $prefix/bin/ + ''; +} diff --git a/nixpkgs/pkgs/build-support/expand-response-params/expand-response-params.c b/nixpkgs/pkgs/build-support/expand-response-params/expand-response-params.c new file mode 100644 index 000000000000..05b9c62b1e8d --- /dev/null +++ b/nixpkgs/pkgs/build-support/expand-response-params/expand-response-params.c @@ -0,0 +1,84 @@ +#include <assert.h> +#include <ctype.h> +#include <stdio.h> +#include <stdlib.h> +#include <string.h> + +typedef struct { char *data; size_t len, cap; } String; + +void resize(String *s, size_t len) { + s->len = len; + if (s->cap < s->len) { + s->cap = s->len * 2; + s->data = (char *)realloc(s->data, s->cap); + assert(s->data); + } +} + +void append(String *s, const char *data, size_t len) { + resize(s, s->len + len); + memcpy(s->data + s->len - len, data, len); +} + +typedef enum { space = 0, other = 1, backslash = 2, apostrophe = 3, quotation_mark = 4 } CharClass; +typedef enum { outside, unq, unq_esc, sq, sq_esc, dq, dq_esc } State; + +// current State -> CharClass -> next State +const State transitions[][5] = { + [outside] = {outside, unq, unq_esc, sq, dq}, + [unq] = {outside, unq, unq_esc, sq, dq}, + [unq_esc] = {unq, unq, unq, unq, unq}, + [sq] = {sq, sq, sq_esc, unq, sq}, + [sq_esc] = {sq, sq, sq, sq, sq}, + [dq] = {dq, dq, dq_esc, dq, unq}, + [dq_esc] = {dq, dq, dq, dq, dq}, +}; + +CharClass charClass(int c) { + return c == '\\' ? backslash : c == '\'' ? apostrophe : c == '"' ? quotation_mark : + isspace(c) ? space : other; +} + +// expandArg writes NULL-terminated expansions of `arg', a NULL-terminated +// string, to stdout. If arg does not begin with `@' or does not refer to a +// file, it is written as is. Otherwise the contents of the file are +// recursively expanded. On unexpected EOF in malformed response files an +// incomplete final argument is written, even if it is empty, to parse like GCC. +void expandArg(String *arg) { + FILE *f; + if (arg->data[0] != '@' || !(f = fopen(&arg->data[1], "r"))) { + fwrite(arg->data, 1, arg->len, stdout); + return; + } + + resize(arg, 0); + State cur = outside; + int c; + do { + c = fgetc(f); + State next = transitions[cur][charClass(c)]; + if ((cur == unq && next == outside) || (cur != outside && c == EOF)) { + append(arg, "", 1); + expandArg(arg); + resize(arg, 0); + } else if (cur == unq_esc || cur == sq_esc || cur == dq_esc || + (cur == outside ? next == unq : cur == next)) { + char s = c; + append(arg, &s, 1); + } + cur = next; + } while (c != EOF); + + fclose(f); +} + +int main(int argc, char **argv) { + String arg = { 0 }; + while (*++argv) { + resize(&arg, 0); + append(&arg, *argv, strlen(*argv) + 1); + expandArg(&arg); + } + free(arg.data); + return EXIT_SUCCESS; +} diff --git a/nixpkgs/pkgs/build-support/fake-nss/default.nix b/nixpkgs/pkgs/build-support/fake-nss/default.nix new file mode 100644 index 000000000000..7d85ec5fc0a5 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fake-nss/default.nix @@ -0,0 +1,24 @@ +# Provide a /etc/passwd and /etc/group that contain root and nobody. +# Useful when packaging binaries that insist on using nss to look up +# username/groups (like nginx). +# /bin/sh is fine to not exist, and provided by another shim. +{ lib, symlinkJoin, writeTextDir, runCommand, extraPasswdLines ? [], extraGroupLines ? [] }: +symlinkJoin { + name = "fake-nss"; + paths = [ + (writeTextDir "etc/passwd" '' + root:x:0:0:root user:/var/empty:/bin/sh + ${lib.concatStrings (map (line: line + "\n") extraPasswdLines)}nobody:x:65534:65534:nobody:/var/empty:/bin/sh + '') + (writeTextDir "etc/group" '' + root:x:0: + ${lib.concatStrings (map (line: line + "\n") extraGroupLines)}nobody:x:65534: + '') + (writeTextDir "etc/nsswitch.conf" '' + hosts: files dns + '') + (runCommand "var-empty" { } '' + mkdir -p $out/var/empty + '') + ]; +} diff --git a/nixpkgs/pkgs/build-support/fetch9front/default.nix b/nixpkgs/pkgs/build-support/fetch9front/default.nix new file mode 100644 index 000000000000..677fee1decd7 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetch9front/default.nix @@ -0,0 +1,36 @@ +{ fetchgit, fetchzip, lib }: + +lib.makeOverridable ( + { owner + , repo + , rev + , domain ? "git.9front.org" + , name ? "source" + , leaveDotGit ? false + , deepClone ? false + , ... # For hash agility + } @ args: + + let + passthruAttrs = removeAttrs args [ "domain" "owner" "repo" "rev" "leaveDotGit" "deepClone" ]; + + useFetchGit = leaveDotGit || deepClone; + fetcher = if useFetchGit then fetchgit else fetchzip; + + gitRepoUrl = "git://${domain}/${owner}/${repo}"; + + fetcherArgs = (if useFetchGit then { + # git9 does not support shallow fetches + inherit rev leaveDotGit; + url = gitRepoUrl; + } else { + url = "https://${domain}/${owner}/${repo}/${rev}/snap.tar.gz"; + + passthru = { + inherit gitRepoUrl; + }; + }) // passthruAttrs // { inherit name; }; + in + + fetcher fetcherArgs // { inherit rev; } +) diff --git a/nixpkgs/pkgs/build-support/fetchbitbucket/default.nix b/nixpkgs/pkgs/build-support/fetchbitbucket/default.nix new file mode 100644 index 000000000000..2f9103f2bb3e --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchbitbucket/default.nix @@ -0,0 +1,11 @@ +{ fetchzip, lib }: + +lib.makeOverridable ( +{ owner, repo, rev, name ? "source" +, ... # For hash agility +}@args: fetchzip ({ + inherit name; + url = "https://bitbucket.org/${owner}/${repo}/get/${rev}.tar.gz"; + meta.homepage = "https://bitbucket.org/${owner}/${repo}/"; +} // removeAttrs args [ "owner" "repo" "rev" ]) // { inherit rev; } +) diff --git a/nixpkgs/pkgs/build-support/fetchbittorrent/default.nix b/nixpkgs/pkgs/build-support/fetchbittorrent/default.nix new file mode 100644 index 000000000000..916c7e7d8c94 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchbittorrent/default.nix @@ -0,0 +1,60 @@ +{ lib, runCommand, transmission_noSystemd, rqbit, writeShellScript, formats, cacert, rsync }: +let + urlRegexp = ''.*xt=urn:bt[im]h:([^&]{64}|[^&]{40}).*''; +in +{ url +, name ? + if (builtins.match urlRegexp url) == null then + "bittorrent" + else + "bittorrent-" + builtins.head (builtins.match urlRegexp url) +, config ? if (backend == "transmission") then { } else throw "json config for configuring fetchFromBitorrent only works with the transmission backend" +, hash +, backend ? "transmission" +, recursiveHash ? true +, postFetch ? "" +, postUnpack ? "" +}: +let + afterSuccess = writeShellScript "fetch-bittorrent-done.sh" '' + ${postUnpack} + # Flatten the directory, so that only the torrent contents are in $out, not + # the folder name + shopt -s dotglob + mv -v $downloadedDirectory/*/* $out + rm -v -rf $downloadedDirectory + unset downloadedDirectory + ${postFetch} + kill $PPID + ''; + jsonConfig = (formats.json {}).generate "jsonConfig" config; +in +runCommand name { + nativeBuildInputs = [ cacert ] ++ (if (backend == "transmission" ) then [ transmission_noSystemd ] else if (backend == "rqbit") then [ rqbit ] else throw "rqbit or transmission are the only available backends for fetchbittorrent"); + outputHashAlgo = if hash != "" then null else "sha256"; + outputHash = hash; + outputHashMode = if recursiveHash then "recursive" else "flat"; + + # url will be written to the derivation, meaning it can be parsed and utilized + # by external tools, such as tools that may want to seed fetchBittorrent calls + # in nixpkgs + inherit url; +} +(if (backend == "transmission") then '' + export HOME=$TMP + export downloadedDirectory=$out/downloadedDirectory + mkdir -p $downloadedDirectory + mkdir -p $HOME/.config/transmission + cp ${jsonConfig} $HOME/.config/transmission/settings.json + function handleChild { + # This detects failures and logs the contents of the transmission fetch + find $out + exit 0 + } + trap handleChild CHLD + transmission-cli --port $(shuf -n 1 -i 49152-65535) --portmap --finish ${afterSuccess} --download-dir $downloadedDirectory --config-dir "$HOME"/.config/transmission "$url" +'' else +'' + export HOME=$TMP + rqbit --disable-dht-persistence --http-api-listen-addr "127.0.0.1:$(shuf -n 1 -i 49152-65535)" download -o $out --exit-on-finish "$url" +'') diff --git a/nixpkgs/pkgs/build-support/fetchbittorrent/tests.nix b/nixpkgs/pkgs/build-support/fetchbittorrent/tests.nix new file mode 100644 index 000000000000..fce1b3933a72 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchbittorrent/tests.nix @@ -0,0 +1,25 @@ +{ testers, fetchFromBittorrent, ... }: + +{ + http-link = testers.invalidateFetcherByDrvHash fetchFromBittorrent { + url = "https://webtorrent.io/torrents/wired-cd.torrent"; + hash = "sha256-OCsC22WuanqoN6lPv5wDT5ZxPcEHDpZ1EgXGvz1SDYo="; + backend = "transmission"; + }; + magnet-link = testers.invalidateFetcherByDrvHash fetchFromBittorrent { + url = "magnet:?xt=urn:btih:a88fda5954e89178c372716a6a78b8180ed4dad3&dn=The+WIRED+CD+-+Rip.+Sample.+Mash.+Share&tr=udp%3A%2F%2Fexplodie.org%3A6969&tr=udp%3A%2F%2Ftracker.coppersurfer.tk%3A6969&tr=udp%3A%2F%2Ftracker.empire-js.us%3A1337&tr=udp%3A%2F%2Ftracker.leechers-paradise.org%3A6969&tr=udp%3A%2F%2Ftracker.opentrackr.org%3A1337&tr=wss%3A%2F%2Ftracker.btorrent.xyz&tr=wss%3A%2F%2Ftracker.fastcast.nz&tr=wss%3A%2F%2Ftracker.openwebtorrent.com&ws=https%3A%2F%2Fwebtorrent.io%2Ftorrents%2F&xs=https%3A%2F%2Fwebtorrent.io%2Ftorrents%2Fwired-cd.torrent"; + hash = "sha256-OCsC22WuanqoN6lPv5wDT5ZxPcEHDpZ1EgXGvz1SDYo="; + backend = "transmission"; + }; + http-link-rqbit = testers.invalidateFetcherByDrvHash fetchFromBittorrent { + url = "https://webtorrent.io/torrents/wired-cd.torrent"; + hash = "sha256-OCsC22WuanqoN6lPv5wDT5ZxPcEHDpZ1EgXGvz1SDYo="; + backend = "rqbit"; + }; + magnet-link-rqbit = testers.invalidateFetcherByDrvHash fetchFromBittorrent { + url = "magnet:?xt=urn:btih:a88fda5954e89178c372716a6a78b8180ed4dad3&dn=The+WIRED+CD+-+Rip.+Sample.+Mash.+Share&tr=udp%3A%2F%2Fexplodie.org%3A6969&tr=udp%3A%2F%2Ftracker.coppersurfer.tk%3A6969&tr=udp%3A%2F%2Ftracker.empire-js.us%3A1337&tr=udp%3A%2F%2Ftracker.leechers-paradise.org%3A6969&tr=udp%3A%2F%2Ftracker.opentrackr.org%3A1337&tr=wss%3A%2F%2Ftracker.btorrent.xyz&tr=wss%3A%2F%2Ftracker.fastcast.nz&tr=wss%3A%2F%2Ftracker.openwebtorrent.com&ws=https%3A%2F%2Fwebtorrent.io%2Ftorrents%2F&xs=https%3A%2F%2Fwebtorrent.io%2Ftorrents%2Fwired-cd.torrent"; + hash = "sha256-OCsC22WuanqoN6lPv5wDT5ZxPcEHDpZ1EgXGvz1SDYo="; + backend = "rqbit"; + }; +} + diff --git a/nixpkgs/pkgs/build-support/fetchbower/default.nix b/nixpkgs/pkgs/build-support/fetchbower/default.nix new file mode 100644 index 000000000000..fd971d431df8 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchbower/default.nix @@ -0,0 +1,28 @@ +{ stdenvNoCC, lib, bower2nix, cacert }: +let + bowerVersion = version: + let + components = lib.splitString "#" version; + hash = lib.last components; + ver = if builtins.length components == 1 then (cleanName version) else hash; + in ver; + + cleanName = name: lib.replaceStrings ["/" ":"] ["-" "-"] name; + + fetchbower = name: version: target: outputHash: stdenvNoCC.mkDerivation { + name = "${cleanName name}-${bowerVersion version}"; + buildCommand = '' + fetch-bower --quiet --out=$PWD/out "${name}" "${target}" "${version}" + # In some cases, the result of fetchBower is different depending + # on the output directory (e.g. if the bower package contains + # symlinks). So use a local output directory before copying to + # $out. + cp -R out $out + ''; + outputHashMode = "recursive"; + outputHashAlgo = "sha256"; + inherit outputHash; + nativeBuildInputs = [ bower2nix cacert ]; + }; + +in fetchbower diff --git a/nixpkgs/pkgs/build-support/fetchbzr/builder.sh b/nixpkgs/pkgs/build-support/fetchbzr/builder.sh new file mode 100644 index 000000000000..991864719a07 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchbzr/builder.sh @@ -0,0 +1,8 @@ +if [ -e .attrs.sh ]; then source .attrs.sh; fi +source "$stdenv/setup" + +echo "exporting \`$url' (revision $rev) into \`$out'" + +# Perform a lightweight checkout so that we don't end up importing +# all the repository's history. +XDG_CACHE_HOME="$TMPDIR" BRZ_LOG=/dev/null bzr -Ossl.cert_reqs=none export -r "$rev" --format=dir "$out" "$url" diff --git a/nixpkgs/pkgs/build-support/fetchbzr/default.nix b/nixpkgs/pkgs/build-support/fetchbzr/default.nix new file mode 100644 index 000000000000..b7db9e9274da --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchbzr/default.nix @@ -0,0 +1,15 @@ +{ stdenvNoCC, breezy }: +{ url, rev, sha256 }: + +stdenvNoCC.mkDerivation { + name = "bzr-export"; + + builder = ./builder.sh; + nativeBuildInputs = [ breezy ]; + + outputHashAlgo = "sha256"; + outputHashMode = "recursive"; + outputHash = sha256; + + inherit url rev; +} diff --git a/nixpkgs/pkgs/build-support/fetchbzr/nix-prefetch-bzr b/nixpkgs/pkgs/build-support/fetchbzr/nix-prefetch-bzr new file mode 100755 index 000000000000..dbe8a7ef8013 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchbzr/nix-prefetch-bzr @@ -0,0 +1,74 @@ +#! /bin/sh -e + +url=$1 +rev=$2 +expHash=$3 + +hashType=$NIX_HASH_ALGO +if test -z "$hashType"; then + hashType=sha256 +fi +if test -z "$hashFormat"; then + hashFormat=--base32 +fi + +if test -z "$url"; then + echo "syntax: nix-prefetch-bzr URL [REVISION [EXPECTED-HASH]]" >&2 + exit 1 +fi + +revarg="-r $rev" +test -n "$rev" || revarg="" + +repoName=$(echo $url | sed ' + s,.*/\([^/]\+\)/trunk/*$,\1,;t + s,.*/\([^/]\+\)/branches/\([^/]\+\)/*$,\1-\2,;t + s,.*/\([^/]\+\)/tags/\([^/]\+\)/*$,\1-\2,;t + s,.*/\([^/]\+\)/*$,\1,;t +') +dstFile="bzr-export" + +# If the hash was given, a file with that hash may already be in the +# store. +if test -n "$expHash"; then + finalPath=$(nix-store --print-fixed-path --recursive "$hashType" "$expHash" $dstFile) + if ! nix-store --check-validity "$finalPath" 2> /dev/null; then + finalPath= + fi + hash=$expHash +fi + + +# If we don't know the hash or a path with that hash doesn't exist, +# download the file and add it to the store. +if test -z "$finalPath"; then + tmpPath="$(mktemp -d "${TMPDIR:-/tmp}/bzr-checkout-tmp-XXXXXXXX")" + trap "rm -rf \"$tmpPath\"" EXIT + + tmpFile="$tmpPath/$dstFile" + + # Perform the checkout. + bzr -Ossl.cert_reqs=none export $revarg --format=dir "$tmpFile" "$url" + + echo "bzr revision is $(bzr revno $revarg "$url")" + + # Compute the hash. + hash=$(nix-hash --type $hashType $hashFormat $tmpFile) + if ! test -n "$QUIET"; then echo "hash is $hash" >&2; fi + + # Add the downloaded file to the Nix store. + finalPath=$(nix-store --add-fixed --recursive "$hashType" $tmpFile) + + if test -n "$expHash" -a "$expHash" != "$hash"; then + echo "hash mismatch for URL \`$url'" + exit 1 + fi +fi + +if ! test -n "$QUIET"; then echo "path is $finalPath" >&2; fi + +echo $hash + +if test -n "$PRINT_PATH"; then + echo $finalPath +fi diff --git a/nixpkgs/pkgs/build-support/fetchcvs/builder.sh b/nixpkgs/pkgs/build-support/fetchcvs/builder.sh new file mode 100644 index 000000000000..4b49e9676ec0 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchcvs/builder.sh @@ -0,0 +1,26 @@ +if [ -e .attrs.sh ]; then source .attrs.sh; fi +source $stdenv/setup + +(echo "#!$SHELL"; \ + echo 'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no "$@"') > ssh +chmod +x ssh +export CVS_RSH=$PWD/ssh + +# creating the export drictory and checking out there only to be able to +# move the content without the root directory into $out ... +# cvs -f -d "$url" export $tag -d "$out" "$module" +# should work (but didn't - got no response on #cvs) +# See als man Page for those options + +mkdir -p export +if [ -n "$tag" ]; then + tag="-r $tag" +else + if [ -n "$date" ]; then + tag="-D $date" + else + tag="-D NOW" + fi +fi +(cd export && cvs -f -z0 -d "$cvsRoot" export $tag "$module") +mv export/* $out diff --git a/nixpkgs/pkgs/build-support/fetchcvs/default.nix b/nixpkgs/pkgs/build-support/fetchcvs/default.nix new file mode 100644 index 000000000000..43a10c3aa552 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchcvs/default.nix @@ -0,0 +1,22 @@ +# example tags: +# date="2007-20-10"; (get the last version before given date) +# tag="<tagname>" (get version by tag name) +# If you don't specify neither one date="NOW" will be used (get latest) + +{stdenvNoCC, cvs, openssh, lib}: + +lib.makeOverridable ( +{cvsRoot, module, tag ? null, date ? null, sha256}: + +stdenvNoCC.mkDerivation { + name = "cvs-export"; + builder = ./builder.sh; + nativeBuildInputs = [cvs openssh]; + + outputHashAlgo = "sha256"; + outputHashMode = "recursive"; + outputHash = sha256; + + inherit cvsRoot module sha256 tag date; +} +) diff --git a/nixpkgs/pkgs/build-support/fetchcvs/nix-prefetch-cvs b/nixpkgs/pkgs/build-support/fetchcvs/nix-prefetch-cvs new file mode 100755 index 000000000000..b6a169f8b531 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchcvs/nix-prefetch-cvs @@ -0,0 +1,80 @@ +#! /bin/sh -e + +cvsRoot=$1 +module=$2 +tag=$3 +expHash=$4 + +hashType=$NIX_HASH_ALGO +if test -z "$hashType"; then + hashType=sha256 +fi + +if test -z "$cvsRoot"; then + echo "syntax: nix-prefetch-cvs CVSROOT MODULE [TAG [HASH]]" >&2 + exit 1 +elif test -z "$module"; then + echo "syntax: nix-prefetch-cvs CVSROOT MODULE [TAG [HASH]]" >&2 + exit 1 +fi + + +mkTempDir() { + tmpPath="$(mktemp -d "${TMPDIR:-/tmp}/nix-prefetch-cvs-XXXXXXXX")" + trap removeTempDir EXIT +} + +removeTempDir() { + rm -rf "$tmpPath" +} + + +# If the hash was given, a file with that hash may already be in the +# store. +if test -n "$expHash"; then + finalPath=$(nix-store --print-fixed-path --recursive "$hashType" "$expHash" cvs-export) + if ! nix-store --check-validity "$finalPath" 2> /dev/null; then + finalPath= + fi + hash=$expHash +fi + + +# If we don't know the hash or a path with that hash doesn't exist, +# download the file and add it to the store. +if test -z "$finalPath"; then + + mkTempDir + tmpFile=$tmpPath/cvs-export + #mkdir $tmpPath + + # Perform the checkout. + if test -z "$tag"; then + args=(-D "now") + elif test "$USE_DATE" = "1"; then + args=(-D "$tag") + else + args=(-r "$tag") + fi + (cd "$tmpPath" && cvs -f -z0 -d $cvsRoot export "${args[*]}" -d cvs-export $module >&2) + + # Compute the hash. + hash=$(nix-hash --type $hashType $hashFormat $tmpFile) + if ! test -n "$QUIET"; then echo "hash is $hash" >&2; fi + + # Add the downloaded file to the Nix store. + finalPath=$(nix-store --add-fixed --recursive "$hashType" $tmpFile) + + if test -n "$expHash" -a "$expHash" != "$hash"; then + echo "hash mismatch for CVS root \`$cvsRoot'" + exit 1 + fi +fi + +if ! test -n "$QUIET"; then echo "path is $finalPath" >&2; fi + +echo $hash + +if test -n "$PRINT_PATH"; then + echo $finalPath +fi diff --git a/nixpkgs/pkgs/build-support/fetchdarcs/builder.sh b/nixpkgs/pkgs/build-support/fetchdarcs/builder.sh new file mode 100644 index 000000000000..75b9066dba6f --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchdarcs/builder.sh @@ -0,0 +1,22 @@ +if [ -e .attrs.sh ]; then source .attrs.sh; fi +source $stdenv/setup + +tagtext="" +tagflags="" +# Darcs hashes are sha1 (120 bits, 40-character hex) +if [[ "$rev" =~ [a-fA-F0-9]{40} ]]; then + tagtext="(hash $rev)" + tagflags="--to-hash=$rev" +elif test -n "$rev"; then + tagtext="(tag $rev)" + tagflags="--tag=$rev" +elif test -n "$context"; then + tagtext="(context)" + tagflags="--context=$context" +fi + +echo "getting $url $partial ${tagtext} into $out" + +darcs get --lazy $tagflags "$url" "$out" +# remove metadata, because it can change +rm -rf "$out/_darcs" diff --git a/nixpkgs/pkgs/build-support/fetchdarcs/default.nix b/nixpkgs/pkgs/build-support/fetchdarcs/default.nix new file mode 100644 index 000000000000..6073efec2815 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchdarcs/default.nix @@ -0,0 +1,21 @@ +{stdenvNoCC, darcs, cacert, lib}: + +lib.makeOverridable ( +{ url +, rev ? null +, context ? null +, sha256 ? "" +, name ? "fetchdarcs" +}: + +stdenvNoCC.mkDerivation { + builder = ./builder.sh; + nativeBuildInputs = [cacert darcs]; + + outputHashAlgo = "sha256"; + outputHashMode = "recursive"; + outputHash = sha256; + + inherit url rev context name; +} +) diff --git a/nixpkgs/pkgs/build-support/fetchdebianpatch/default.nix b/nixpkgs/pkgs/build-support/fetchdebianpatch/default.nix new file mode 100644 index 000000000000..8d8076bd59d6 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchdebianpatch/default.nix @@ -0,0 +1,19 @@ +{ lib, fetchpatch }: + +lib.makeOverridable ( + { pname, version, debianRevision ? null, area ? "main", + patch, name ? patch, hash }: + let + inherit (lib.strings) hasPrefix substring; + prefix = + substring 0 (if hasPrefix "lib" pname then 4 else 1) pname; + versionString = + if debianRevision == null then version + else "${version}-${debianRevision}"; + in fetchpatch { + inherit name hash; + url = + "https://sources.debian.org/data/${area}/${prefix}/" + + "${pname}/${versionString}/debian/patches/${patch}"; + } +) diff --git a/nixpkgs/pkgs/build-support/fetchdebianpatch/tests.nix b/nixpkgs/pkgs/build-support/fetchdebianpatch/tests.nix new file mode 100644 index 000000000000..58f3b395d1fc --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchdebianpatch/tests.nix @@ -0,0 +1,19 @@ +{ testers, fetchDebianPatch, ... }: + +{ + simple = testers.invalidateFetcherByDrvHash fetchDebianPatch { + pname = "pysimplesoap"; + version = "1.16.2"; + debianRevision = "5"; + patch = "Add-quotes-to-SOAPAction-header-in-SoapClient"; + hash = "sha256-xA8Wnrpr31H8wy3zHSNfezFNjUJt1HbSXn3qUMzeKc0="; + }; + + libPackage = testers.invalidateFetcherByDrvHash fetchDebianPatch { + pname = "libfile-pid-perl"; + version = "1.01"; + debianRevision = "2"; + patch = "missing-pidfile"; + hash = "sha256-VBsIYyCnjcZLYQ2Uq2MKPK3kF2wiMKvnq0m727DoavM="; + }; +} diff --git a/nixpkgs/pkgs/build-support/fetchdocker/credentials.nix b/nixpkgs/pkgs/build-support/fetchdocker/credentials.nix new file mode 100644 index 000000000000..f8a229ccb6bb --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchdocker/credentials.nix @@ -0,0 +1,39 @@ +{ lib }: +# We provide three paths to get the credentials into the builder's +# environment: +# +# 1. Via impureEnvVars. This method is difficult for multi-user Nix +# installations (but works very well for single-user Nix +# installations!) because it requires setting the environment +# variables on the nix-daemon which is either complicated or unsafe +# (i.e: configuring via Nix means the secrets will be persisted +# into the store) +# +# 2. If the DOCKER_CREDENTIALS key with a path to a credentials file +# is added to the NIX_PATH (usually via the '-I ' argument to most +# Nix tools) then an attempt will be made to read credentials from +# it. The semantics are simple, the file should contain two lines +# for the username and password based authentication: +# +# $ cat ./credentials-file.txt +# DOCKER_USER=myusername +# DOCKER_PASS=mypassword +# +# ... and a single line for the token based authentication: +# +# $ cat ./credentials-file.txt +# DOCKER_TOKEN=mytoken +# +# 3. A credential file at /etc/nix-docker-credentials.txt with the +# same format as the file described in #2 can also be used to +# communicate credentials to the builder. This is necessary for +# situations (like Hydra) where you cannot customize the NIX_PATH +# given to the nix-build invocation to provide it with the +# DOCKER_CREDENTIALS path +let + pathParts = + (builtins.filter + ({prefix, path}: "DOCKER_CREDENTIALS" == prefix) + builtins.nixPath); +in + lib.optionalString (pathParts != []) ((builtins.head pathParts).path) diff --git a/nixpkgs/pkgs/build-support/fetchdocker/default.nix b/nixpkgs/pkgs/build-support/fetchdocker/default.nix new file mode 100644 index 000000000000..ef6132bfe3a5 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchdocker/default.nix @@ -0,0 +1,61 @@ +{ stdenv, lib, coreutils, bash, gnutar, writeText }: +let + stripScheme = + builtins.replaceStrings [ "https://" "http://" ] [ "" "" ]; + stripNixStore = + s: lib.removePrefix "${builtins.storeDir}/" s; +in +{ name +, registry ? "https://registry-1.docker.io/v2/" +, repository ? "library" +, imageName +, tag +, imageLayers +, imageConfig +, image ? "${stripScheme registry}/${repository}/${imageName}:${tag}" +}: + +# Make sure there are *no* slashes in the repository or container +# names since we use these to make the output derivation name for the +# nix-store path. +assert null == lib.findFirst (c: "/"==c) null (lib.stringToCharacters repository); +assert null == lib.findFirst (c: "/"==c) null (lib.stringToCharacters imageName); + +let + # Abuse paths to collapse possible double slashes + repoTag0 = builtins.toString (/. + "/${stripScheme registry}/${repository}/${imageName}"); + repoTag1 = lib.removePrefix "/" repoTag0; + + layers = builtins.map stripNixStore imageLayers; + + manifest = + writeText "manifest.json" (builtins.toJSON [ + { Config = stripNixStore imageConfig; + Layers = layers; + RepoTags = [ "${repoTag1}:${tag}" ]; + }]); + + repositories = + writeText "repositories" (builtins.toJSON { + ${repoTag1} = { + ${tag} = lib.last layers; + }; + }); + + imageFileStorePaths = + writeText "imageFileStorePaths.txt" + (lib.concatStringsSep "\n" ((lib.unique imageLayers) ++ [imageConfig])); +in +stdenv.mkDerivation { + builder = ./fetchdocker-builder.sh; + buildInputs = [ coreutils ]; + preferLocalBuild = true; + + inherit name imageName repository tag; + inherit bash gnutar manifest repositories; + inherit imageFileStorePaths; + + passthru = { + inherit image; + }; +} diff --git a/nixpkgs/pkgs/build-support/fetchdocker/fetchDockerConfig.nix b/nixpkgs/pkgs/build-support/fetchdocker/fetchDockerConfig.nix new file mode 100644 index 000000000000..9fd813bfa575 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchdocker/fetchDockerConfig.nix @@ -0,0 +1,13 @@ +pkgargs@{ stdenv, lib, haskellPackages, writeText, gawk }: +let + generic-fetcher = + import ./generic-fetcher.nix pkgargs; +in + +args@{ repository ? "library", imageName, tag, ... }: + +generic-fetcher ({ + fetcher = "hocker-config"; + name = "${repository}_${imageName}_${tag}-config.json"; + tag = "unused"; +} // args) diff --git a/nixpkgs/pkgs/build-support/fetchdocker/fetchDockerLayer.nix b/nixpkgs/pkgs/build-support/fetchdocker/fetchDockerLayer.nix new file mode 100644 index 000000000000..869ba637429c --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchdocker/fetchDockerLayer.nix @@ -0,0 +1,13 @@ +pkgargs@{ stdenv, lib, haskellPackages, writeText, gawk }: +let + generic-fetcher = + import ./generic-fetcher.nix pkgargs; +in + +args@{ layerDigest, ... }: + +generic-fetcher ({ + fetcher = "hocker-layer"; + name = "docker-layer-${layerDigest}.tar.gz"; + tag = "unused"; +} // args) diff --git a/nixpkgs/pkgs/build-support/fetchdocker/fetchdocker-builder.sh b/nixpkgs/pkgs/build-support/fetchdocker/fetchdocker-builder.sh new file mode 100644 index 000000000000..4eb70f672d48 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchdocker/fetchdocker-builder.sh @@ -0,0 +1,28 @@ +if [ -e .attrs.sh ]; then source .attrs.sh; fi +source "${stdenv}/setup" +echo "exporting ${repository}/${imageName} (tag: ${tag}) into ${out}" +mkdir -p "${out}" + +cat <<EOF > "${out}/compositeImage.sh" +#! ${bash}/bin/bash +# +# Create a tar archive of a docker image's layers, docker image config +# json, manifest.json, and repositories json; this streams directly to +# stdout and is intended to be used in concert with docker load, i.e: +# +# ${out}/compositeImage.sh | docker load + +# The first character follow the 's' command for sed becomes the +# delimiter sed will use; this makes the transformation regex easy to +# read. We feed tar a file listing the files we want in the archive, +# because the paths are absolute and docker load wants them flattened in +# the archive, we need to transform all of the paths going in by +# stripping everything *including* the last solidus so that we end up +# with the basename of the path. +${gnutar}/bin/tar \ + --transform='s=.*/==' \ + --transform="s=.*-manifest.json=manifest.json=" \ + --transform="s=.*-repositories=repositories=" \ + -c "${manifest}" "${repositories}" -T "${imageFileStorePaths}" +EOF +chmod +x "${out}/compositeImage.sh" diff --git a/nixpkgs/pkgs/build-support/fetchdocker/generic-fetcher.nix b/nixpkgs/pkgs/build-support/fetchdocker/generic-fetcher.nix new file mode 100644 index 000000000000..95b193490a82 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchdocker/generic-fetcher.nix @@ -0,0 +1,93 @@ +{ stdenv, lib, haskellPackages, writeText, gawk }: +let + awk = "${gawk}/bin/awk"; + dockerCredentialsFile = import ./credentials.nix { inherit lib; }; +in +{ fetcher +, name + , registry ? "https://registry-1.docker.io/v2/" + , repository ? "library" + , imageName + , sha256 + , tag ? "" + , layerDigest ? "" +}: + +# There must be no slashes in the repository or container names since +# we use these to make the output derivation name for the nix store +# path +assert null == lib.findFirst (c: "/"==c) null (lib.stringToCharacters repository); +assert null == lib.findFirst (c: "/"==c) null (lib.stringToCharacters imageName); + +# Only allow hocker-config and hocker-layer as fetchers for now +assert (builtins.elem fetcher ["hocker-config" "hocker-layer"]); + +# If layerDigest is non-empty then it must not have a 'sha256:' prefix! +assert + (if layerDigest != "" + then !lib.hasPrefix "sha256:" layerDigest + else true); + +let + layerDigestFlag = + lib.optionalString (layerDigest != "") "--layer ${layerDigest}"; +in +stdenv.mkDerivation { + inherit name; + builder = writeText "${fetcher}-builder.sh" '' + source "$stdenv/setup" + echo "${fetcher} exporting to $out" + + declare -A creds + + # This is a hack for Hydra since we have no way of adding values + # to the NIX_PATH for Hydra jobsets!! + staticCredentialsFile="/etc/nix-docker-credentials.txt" + if [ ! -f "$dockerCredentialsFile" -a -f "$staticCredentialsFile" ]; then + echo "credentials file not set, falling back on static credentials file at: $staticCredentialsFile" + dockerCredentialsFile=$staticCredentialsFile + fi + + if [ -f "$dockerCredentialsFile" ]; then + echo "using credentials from $dockerCredentialsFile" + + CREDSFILE=$(cat "$dockerCredentialsFile") + creds[token]=$(${awk} -F'=' '/DOCKER_TOKEN/ {print $2}' <<< "$CREDSFILE" | head -n1) + + # Prefer DOCKER_TOKEN over the username and password + # authentication method + if [ -z "''${creds[token]}" ]; then + creds[user]=$(${awk} -F'=' '/DOCKER_USER/ {print $2}' <<< "$CREDSFILE" | head -n1) + creds[pass]=$(${awk} -F'=' '/DOCKER_PASS/ {print $2}' <<< "$CREDSFILE" | head -n1) + fi + fi + + # These variables will be filled in first by the impureEnvVars, if + # those variables are empty then they will default to the + # credentials that may have been read in from the 'DOCKER_CREDENTIALS' + DOCKER_USER="''${DOCKER_USER:-''${creds[user]}}" + DOCKER_PASS="''${DOCKER_PASS:-''${creds[pass]}}" + DOCKER_TOKEN="''${DOCKER_TOKEN:-''${creds[token]}}" + + ${fetcher} --out="$out" \ + ''${registry:+--registry "$registry"} \ + ''${DOCKER_USER:+--username "$DOCKER_USER"} \ + ''${DOCKER_PASS:+--password "$DOCKER_PASS"} \ + ''${DOCKER_TOKEN:+--token "$DOCKER_TOKEN"} \ + ${layerDigestFlag} \ + "${repository}/${imageName}" \ + "${tag}" + ''; + + buildInputs = [ haskellPackages.hocker ]; + + outputHashAlgo = "sha256"; + outputHashMode = "flat"; + outputHash = sha256; + + preferLocalBuild = true; + + impureEnvVars = [ "DOCKER_USER" "DOCKER_PASS" "DOCKER_TOKEN" ]; + + inherit registry dockerCredentialsFile; +} diff --git a/nixpkgs/pkgs/build-support/fetchfirefoxaddon/default.nix b/nixpkgs/pkgs/build-support/fetchfirefoxaddon/default.nix new file mode 100644 index 000000000000..e07a6a1a79dd --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchfirefoxaddon/default.nix @@ -0,0 +1,57 @@ +{ stdenv +, fetchurl +, jq +, strip-nondeterminism +, unzip +, writeScript +, zip +}: + +{ name +, url ? null +, sha1 ? "" +, sha256 ? "" +, sha512 ? "" +, fixedExtid ? null +, hash ? "" +, src ? "" +}: + +let + extid = if fixedExtid == null then "nixos@${name}" else fixedExtid; + source = if url == null then src else + fetchurl { + url = url; + inherit sha1 sha256 sha512 hash; + }; +in +stdenv.mkDerivation { + inherit name; + + passthru = { + inherit extid; + }; + + builder = writeScript "xpibuilder" '' + source $stdenv/setup + + echo "firefox addon $name into $out" + + UUID="${extid}" + mkdir -p "$out/$UUID" + unzip -q ${source} -d "$out/$UUID" + NEW_MANIFEST=$(jq '. + {"applications": { "gecko": { "id": "${extid}" }}, "browser_specific_settings":{"gecko":{"id": "${extid}"}}}' "$out/$UUID/manifest.json") + echo "$NEW_MANIFEST" > "$out/$UUID/manifest.json" + cd "$out/$UUID" + zip -r -q -FS "$out/$UUID.xpi" * + strip-nondeterminism "$out/$UUID.xpi" + rm -r "$out/$UUID" + ''; + + nativeBuildInputs = [ + jq + strip-nondeterminism + unzip + zip + ]; +} diff --git a/nixpkgs/pkgs/build-support/fetchfirefoxaddon/tests.nix b/nixpkgs/pkgs/build-support/fetchfirefoxaddon/tests.nix new file mode 100644 index 000000000000..a29f65c542ad --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchfirefoxaddon/tests.nix @@ -0,0 +1,21 @@ +{ testers, fetchFirefoxAddon, fetchurl, ... }: + +{ + simple = testers.invalidateFetcherByDrvHash fetchFirefoxAddon { + name = "image-search-options"; + # Chosen because its only 147KB + url = "https://addons.mozilla.org/firefox/downloads/file/3059971/image_search_options-3.0.12-fx.xpi"; + sha256 = "sha256-H73YWX/DKxvhEwKpWOo7orAQ7c/rQywpljeyxYxv0Gg="; + }; + overridden-source = + let + image-search-options = fetchurl { + url = "https://addons.mozilla.org/firefox/downloads/file/3059971/image_search_options-3.0.12-fx.xpi"; + sha256 = "sha256-H73YWX/DKxvhEwKpWOo7orAQ7c/rQywpljeyxYxv0Gg="; + }; + in + testers.invalidateFetcherByDrvHash fetchFirefoxAddon { + name = "image-search-options"; + src = image-search-options; + }; +} diff --git a/nixpkgs/pkgs/build-support/fetchfossil/builder.sh b/nixpkgs/pkgs/build-support/fetchfossil/builder.sh new file mode 100644 index 000000000000..36b758ab574e --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchfossil/builder.sh @@ -0,0 +1,21 @@ +if [ -e .attrs.sh ]; then source .attrs.sh; fi +source $stdenv/setup +echo "Cloning Fossil $url [$rev] into $out" + +# Fossil, bless its adorable little heart, wants to write global configuration +# to $HOME/.fossil. AFAICT, there is no way to disable this functionality. +# Instead, we'll let it write to the build directory. +export HOME=$(pwd) + +# We must explicitly set the admin user for the clone to something reasonable. +fossil clone -A nobody "$url" fossil-clone.fossil + +mkdir fossil-clone +WORKDIR=$(pwd) +mkdir $out +pushd $out +fossil open "$WORKDIR/fossil-clone.fossil" "$rev" +popd + +# Just nuke the checkout file. +rm $out/.fslckout diff --git a/nixpkgs/pkgs/build-support/fetchfossil/default.nix b/nixpkgs/pkgs/build-support/fetchfossil/default.nix new file mode 100644 index 000000000000..7866c403ec42 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchfossil/default.nix @@ -0,0 +1,20 @@ +{stdenv, lib, fossil, cacert}: + +{name ? null, url, rev, sha256}: + +stdenv.mkDerivation { + name = "fossil-archive" + (lib.optionalString (name != null) "-${name}"); + builder = ./builder.sh; + nativeBuildInputs = [fossil cacert]; + + # Envvar docs are hard to find. A link for the future: + # https://www.fossil-scm.org/index.html/doc/trunk/www/env-opts.md + impureEnvVars = [ "http_proxy" ]; + + outputHashAlgo = "sha256"; + outputHashMode = "recursive"; + outputHash = sha256; + + inherit url rev; + preferLocalBuild = true; +} diff --git a/nixpkgs/pkgs/build-support/fetchgit/builder.sh b/nixpkgs/pkgs/build-support/fetchgit/builder.sh new file mode 100644 index 000000000000..77f6381b09ab --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchgit/builder.sh @@ -0,0 +1,20 @@ +# tested so far with: +# - no revision specified and remote has a HEAD which is used +# - revision specified and remote has a HEAD +# - revision specified and remote without HEAD +# +if [ -e .attrs.sh ]; then source .attrs.sh; fi +source $stdenv/setup + +echo "exporting $url (rev $rev) into $out" + +$SHELL $fetcher --builder --url "$url" --out "$out" --rev "$rev" \ + ${leaveDotGit:+--leave-dotGit} \ + ${fetchLFS:+--fetch-lfs} \ + ${deepClone:+--deepClone} \ + ${fetchSubmodules:+--fetch-submodules} \ + ${sparseCheckout:+--sparse-checkout "$sparseCheckout"} \ + ${nonConeMode:+--non-cone-mode} \ + ${branchName:+--branch-name "$branchName"} + +runHook postFetch diff --git a/nixpkgs/pkgs/build-support/fetchgit/default.nix b/nixpkgs/pkgs/build-support/fetchgit/default.nix new file mode 100644 index 000000000000..1d06ce44a91e --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchgit/default.nix @@ -0,0 +1,109 @@ +{lib, stdenvNoCC, git, git-lfs, cacert}: let + urlToName = url: rev: let + inherit (lib) removeSuffix splitString last; + base = last (splitString ":" (baseNameOf (removeSuffix "/" url))); + + matched = builtins.match "(.*)\\.git" base; + + short = builtins.substring 0 7 rev; + + appendShort = lib.optionalString ((builtins.match "[a-f0-9]*" rev) != null) "-${short}"; + in "${if matched == null then base else builtins.head matched}${appendShort}"; +in +lib.makeOverridable ( +{ url, rev ? "HEAD", sha256 ? "", hash ? "", leaveDotGit ? deepClone +, fetchSubmodules ? true, deepClone ? false +, branchName ? null +, sparseCheckout ? [] +, nonConeMode ? false +, name ? urlToName url rev +, # Shell code executed after the file has been fetched + # successfully. This can do things like check or transform the file. + postFetch ? "" +, preferLocalBuild ? true +, fetchLFS ? false +, # Shell code to build a netrc file for BASIC auth + netrcPhase ? null +, # Impure env vars (https://nixos.org/nix/manual/#sec-advanced-attributes) + # needed for netrcPhase + netrcImpureEnvVars ? [] +, meta ? {} +, allowedRequisites ? null +}: + +/* NOTE: + fetchgit has one problem: git fetch only works for refs. + This is because fetching arbitrary (maybe dangling) commits creates garbage collection risks + and checking whether a commit belongs to a ref is expensive. This may + change in the future when some caching is added to git (?) + Usually refs are either tags (refs/tags/*) or branches (refs/heads/*) + Cloning branches will make the hash check fail when there is an update. + But not all patches we want can be accessed by tags. + + The workaround is getting the last n commits so that it's likely that they + still contain the hash we want. + + for now : increase depth iteratively (TODO) + + real fix: ask git folks to add a + git fetch $HASH contained in $BRANCH + facility because checking that $HASH is contained in $BRANCH is less + expensive than fetching --depth $N. + Even if git folks implemented this feature soon it may take years until + server admins start using the new version? +*/ + +assert deepClone -> leaveDotGit; +assert nonConeMode -> (sparseCheckout != []); + +if hash != "" && sha256 != "" then + throw "Only one of sha256 or hash can be set" +else if builtins.isString sparseCheckout then + # Changed to throw on 2023-06-04 + throw "Please provide directories/patterns for sparse checkout as a list of strings. Passing a (multi-line) string is not supported any more." +else +stdenvNoCC.mkDerivation { + inherit name; + builder = ./builder.sh; + fetcher = ./nix-prefetch-git; + + nativeBuildInputs = [ git ] + ++ lib.optionals fetchLFS [ git-lfs ]; + + outputHashAlgo = if hash != "" then null else "sha256"; + outputHashMode = "recursive"; + outputHash = if hash != "" then + hash + else if sha256 != "" then + sha256 + else + lib.fakeSha256; + + # git-sparse-checkout(1) says: + # > When the --stdin option is provided, the directories or patterns are read + # > from standard in as a newline-delimited list instead of from the arguments. + sparseCheckout = builtins.concatStringsSep "\n" sparseCheckout; + + inherit url rev leaveDotGit fetchLFS fetchSubmodules deepClone branchName nonConeMode postFetch; + + postHook = if netrcPhase == null then null else '' + ${netrcPhase} + # required that git uses the netrc file + mv {,.}netrc + export HOME=$PWD + ''; + + GIT_SSL_CAINFO = "${cacert}/etc/ssl/certs/ca-bundle.crt"; + + impureEnvVars = lib.fetchers.proxyImpureEnvVars ++ netrcImpureEnvVars ++ [ + "GIT_PROXY_COMMAND" "NIX_GIT_SSL_CAINFO" "SOCKS_SERVER" + ]; + + + inherit preferLocalBuild meta allowedRequisites; + + passthru = { + gitRepoUrl = url; + }; +} +) diff --git a/nixpkgs/pkgs/build-support/fetchgit/deterministic-git b/nixpkgs/pkgs/build-support/fetchgit/deterministic-git new file mode 100755 index 000000000000..67f585559116 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchgit/deterministic-git @@ -0,0 +1,45 @@ +#!/bin/sh + +# some git commands print to stdout, which would contaminate our JSON output +clean_git(){ + git "$@" >&2 +} + +# Remove all remote branches, remove tags not reachable from HEAD, do a full +# repack and then garbage collect unreferenced objects. +make_deterministic_repo(){ + local repo="$1" + + # run in sub-shell to not touch current working directory + ( + cd "$repo" + # Remove files that contain timestamps or otherwise have non-deterministic + # properties. + rm -rf .git/logs/ .git/hooks/ .git/index .git/FETCH_HEAD .git/ORIG_HEAD \ + .git/refs/remotes/origin/HEAD .git/config + + # Remove all remote branches. + git branch -r | while read -r branch; do + clean_git branch -rD "$branch" + done + + # Remove tags not reachable from HEAD. If we're exactly on a tag, don't + # delete it. + maybe_tag=$(git tag --points-at HEAD) + git tag --contains HEAD | while read -r tag; do + if [ "$tag" != "$maybe_tag" ]; then + clean_git tag -d "$tag" + fi + done + + # Do a full repack. Must run single-threaded, or else we lose determinism. + clean_git config pack.threads 1 + clean_git repack -A -d -f + rm -f .git/config + + # Garbage collect unreferenced objects. + # Note: --keep-largest-pack prevents non-deterministic ordering of packs + # listed in .git/objects/info/packs by only using a single pack + clean_git gc --prune=all --keep-largest-pack + ) +} diff --git a/nixpkgs/pkgs/build-support/fetchgit/nix-prefetch-git b/nixpkgs/pkgs/build-support/fetchgit/nix-prefetch-git new file mode 100755 index 000000000000..feb6c9e30535 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchgit/nix-prefetch-git @@ -0,0 +1,479 @@ +#! /usr/bin/env bash + +set -e -o pipefail + +url= +rev= +expHash= +hashType=$NIX_HASH_ALGO +deepClone=$NIX_PREFETCH_GIT_DEEP_CLONE +leaveDotGit=$NIX_PREFETCH_GIT_LEAVE_DOT_GIT +fetchSubmodules= +fetchLFS= +builder= +branchName=$NIX_PREFETCH_GIT_BRANCH_NAME + +# ENV params +out=${out:-} +http_proxy=${http_proxy:-} + +# allow overwriting cacert's ca-bundle.crt with a custom one +# this can be done by setting NIX_GIT_SSL_CAINFO and NIX_SSL_CERT_FILE environment variables for the nix-daemon +GIT_SSL_CAINFO=${NIX_GIT_SSL_CAINFO:-$GIT_SSL_CAINFO} + +# populated by clone_user_rev() +fullRev= +humanReadableRev= +commitDate= +commitDateStrict8601= + +if test -n "$deepClone"; then + deepClone=true +else + deepClone= +fi + +if test "$leaveDotGit" != 1; then + leaveDotGit= +else + leaveDotGit=true +fi + +usage(){ + echo >&2 "syntax: nix-prefetch-git [options] [URL [REVISION [EXPECTED-HASH]]] + +Options: + --out path Path where the output would be stored. + --url url Any url understood by 'git clone'. + --rev ref Any sha1 or references (such as refs/heads/master) + --hash h Expected hash. + --branch-name Branch name to check out into + --sparse-checkout Only fetch and checkout part of the repository. + --non-cone-mode Use non-cone mode for sparse checkouts. + --deepClone Clone the entire repository. + --no-deepClone Make a shallow clone of just the required ref. + --leave-dotGit Keep the .git directories. + --fetch-lfs Fetch git Large File Storage (LFS) files. + --fetch-submodules Fetch submodules. + --builder Clone as fetchgit does, but url, rev, and out option are mandatory. + --quiet Only print the final json summary. +" + exit 1 +} + +# some git commands print to stdout, which would contaminate our JSON output +clean_git(){ + git "$@" >&2 +} + +argi=0 +argfun="" +for arg; do + if test -z "$argfun"; then + case $arg in + --out) argfun=set_out;; + --url) argfun=set_url;; + --rev) argfun=set_rev;; + --hash) argfun=set_hashType;; + --branch-name) argfun=set_branchName;; + --deepClone) deepClone=true;; + --sparse-checkout) argfun=set_sparseCheckout;; + --non-cone-mode) nonConeMode=true;; + --quiet) QUIET=true;; + --no-deepClone) deepClone=;; + --leave-dotGit) leaveDotGit=true;; + --fetch-lfs) fetchLFS=true;; + --fetch-submodules) fetchSubmodules=true;; + --builder) builder=true;; + -h|--help) usage; exit;; + *) + : $((++argi)) + case $argi in + 1) url=$arg;; + 2) rev=$arg;; + 3) expHash=$arg;; + *) exit 1;; + esac + ;; + esac + else + case $argfun in + set_*) + var=${argfun#set_} + eval "$var=$(printf %q "$arg")" + ;; + esac + argfun="" + fi +done + +if test -z "$url"; then + usage +fi + + +init_remote(){ + local url=$1 + clean_git init --initial-branch=master + clean_git remote add origin "$url" + if [ -n "$sparseCheckout" ]; then + git config remote.origin.partialclonefilter "blob:none" + echo "$sparseCheckout" | git sparse-checkout set --stdin ${nonConeMode:+--no-cone} + fi + ( [ -n "$http_proxy" ] && clean_git config http.proxy "$http_proxy" ) || true +} + +# Return the reference of an hash if it exists on the remote repository. +ref_from_hash(){ + local hash=$1 + git ls-remote origin | sed -n "\,$hash\t, { s,\(.*\)\t\(.*\),\2,; p; q}" +} + +# Return the hash of a reference if it exists on the remote repository. +hash_from_ref(){ + local ref=$1 + git ls-remote origin | sed -n "\,\t$ref, { s,\(.*\)\t\(.*\),\1,; p; q}" +} + +# Returns a name based on the url and reference +# +# This function needs to be in sync with nix's fetchgit implementation +# of urlToName() to re-use the same nix store paths. +url_to_name(){ + local url=$1 + local ref=$2 + local base + base=$(basename "$url" .git | cut -d: -f2) + + if [[ $ref =~ ^[a-z0-9]+$ ]]; then + echo "$base-${ref:0:7}" + else + echo "$base" + fi +} + +# Fetch and checkout the right sha1 +checkout_hash(){ + local hash="$1" + local ref="$2" + + if test -z "$hash"; then + hash=$(hash_from_ref "$ref") + fi + + [[ -z "$deepClone" ]] && \ + clean_git fetch ${builder:+--progress} --depth=1 origin "$hash" || \ + clean_git fetch -t ${builder:+--progress} origin || return 1 + + local object_type=$(git cat-file -t "$hash") + if [[ "$object_type" == "commit" ]]; then + clean_git checkout -b "$branchName" "$hash" || return 1 + elif [[ "$object_type" == "tree" ]]; then + clean_git config user.email "nix-prefetch-git@localhost" + clean_git config user.name "nix-prefetch-git" + local commit_id=$(git commit-tree "$hash" -m "Commit created from tree hash $hash") + clean_git checkout -b "$branchName" "$commit_id" || return 1 + else + echo "Unrecognized git object type: $object_type" + return 1 + fi +} + +# Fetch only a branch/tag and checkout it. +checkout_ref(){ + local hash="$1" + local ref="$2" + + if [[ -n "$deepClone" ]]; then + # The caller explicitly asked for a deep clone. Deep clones + # allow "git describe" and similar tools to work. See + # https://marc.info/?l=nix-dev&m=139641582514772 + # for a discussion. + return 1 + fi + + if test -z "$ref"; then + ref=$(ref_from_hash "$hash") + fi + + if test -n "$ref"; then + # --depth option is ignored on http repository. + clean_git fetch ${builder:+--progress} --depth 1 origin +"$ref" || return 1 + clean_git checkout -b "$branchName" FETCH_HEAD || return 1 + else + return 1 + fi +} + +# Update submodules +init_submodules(){ + clean_git submodule update --init --recursive -j ${NIX_BUILD_CORES:-1} +} + +clone(){ + local top=$PWD + local dir="$1" + local url="$2" + local hash="$3" + local ref="$4" + + cd "$dir" + + # Initialize the repository. + init_remote "$url" + + # Download data from the repository. + checkout_ref "$hash" "$ref" || + checkout_hash "$hash" "$ref" || ( + echo 1>&2 "Unable to checkout $hash$ref from $url." + exit 1 + ) + + # Checkout linked sources. + if test -n "$fetchSubmodules"; then + init_submodules + fi + + if [ -z "$builder" ] && [ -f .topdeps ]; then + if tg help &>/dev/null; then + echo "populating TopGit branches..." + tg remote --populate origin + else + echo "WARNING: would populate TopGit branches but TopGit is not available" >&2 + echo "WARNING: install TopGit to fix the problem" >&2 + fi + fi + + cd "$top" +} + +# Remove all remote branches, remove tags not reachable from HEAD, do a full +# repack and then garbage collect unreferenced objects. +make_deterministic_repo(){ + local repo="$1" + + # run in sub-shell to not touch current working directory + ( + cd "$repo" + # Remove files that contain timestamps or otherwise have non-deterministic + # properties. + if [ -f .git ]; then + local dotgit_content=$(<.git) + local dotgit_dir="${dotgit_content#gitdir: }" + else + local dotgit_dir=".git" + fi + pushd "$dotgit_dir" + rm -rf logs/ hooks/ index FETCH_HEAD ORIG_HEAD refs/remotes/origin/HEAD config + popd + # Remove all remote branches. + git branch -r | while read -r branch; do + clean_git branch -rD "$branch" + done + + # Remove tags not reachable from HEAD. If we're exactly on a tag, don't + # delete it. + maybe_tag=$(git tag --points-at HEAD) + git tag --contains HEAD | while read -r tag; do + if [ "$tag" != "$maybe_tag" ]; then + clean_git tag -d "$tag" + fi + done + + # Do a full repack. Must run single-threaded, or else we lose determinism. + clean_git config pack.threads 1 + clean_git repack -A -d -f + rm -f "$dotgit_dir/config" + + # Garbage collect unreferenced objects. + # Note: --keep-largest-pack prevents non-deterministic ordering of packs + # listed in .git/objects/info/packs by only using a single pack + clean_git gc --prune=all --keep-largest-pack + ) +} + + +clone_user_rev() { + local dir="$1" + local url="$2" + local rev="${3:-HEAD}" + + if [ -n "$fetchLFS" ]; then + clean_git lfs install + fi + + # Perform the checkout. + case "$rev" in + HEAD|refs/*) + clone "$dir" "$url" "" "$rev" 1>&2;; + *) + if test -z "$(echo "$rev" | tr -d 0123456789abcdef)"; then + clone "$dir" "$url" "$rev" "" 1>&2 + else + # if revision is not hexadecimal it might be a tag + clone "$dir" "$url" "" "refs/tags/$rev" 1>&2 + fi;; + esac + + pushd "$dir" >/dev/null + fullRev=$( (git rev-parse "$rev" 2>/dev/null || git rev-parse "refs/heads/$branchName") | tail -n1) + humanReadableRev=$(git describe "$fullRev" 2> /dev/null || git describe --tags "$fullRev" 2> /dev/null || echo -- none --) + commitDate=$(git show -1 --no-patch --pretty=%ci "$fullRev") + commitDateStrict8601=$(git show -1 --no-patch --pretty=%cI "$fullRev") + popd >/dev/null + + # Allow doing additional processing before .git removal + eval "$NIX_PREFETCH_GIT_CHECKOUT_HOOK" + if test -z "$leaveDotGit"; then + echo "removing \`.git'..." >&2 + find "$dir" -name .git -print0 | xargs -0 rm -rf + else + find "$dir" -name .git | while read -r gitdir; do + make_deterministic_repo "$(readlink -f "$(dirname "$gitdir")")" + done + fi +} + +exit_handlers=() + +run_exit_handlers() { + exit_status=$? + for handler in "${exit_handlers[@]}"; do + eval "$handler $exit_status" + done +} + +trap run_exit_handlers EXIT + +quiet_exit_handler() { + exec 2>&3 3>&- + if [ $1 -ne 0 ]; then + cat "$errfile" >&2 + fi + rm -f "$errfile" +} + +quiet_mode() { + errfile="$(mktemp "${TMPDIR:-/tmp}/git-checkout-err-XXXXXXXX")" + exit_handlers+=(quiet_exit_handler) + exec 3>&2 2>"$errfile" +} + +json_escape() { + local s="$1" + s="${s//\\/\\\\}" # \ + s="${s//\"/\\\"}" # " + s="${s//^H/\\\b}" # \b (backspace) + s="${s//^L/\\\f}" # \f (form feed) + s="${s// +/\\\n}" # \n (newline) + s="${s//^M/\\\r}" # \r (carriage return) + s="${s// /\\t}" # \t (tab) + echo "$s" +} + +print_results() { + hash="$1" + if ! test -n "$QUIET"; then + echo "" >&2 + echo "git revision is $fullRev" >&2 + if test -n "$finalPath"; then + echo "path is $finalPath" >&2 + fi + echo "git human-readable version is $humanReadableRev" >&2 + echo "Commit date is $commitDate" >&2 + if test -n "$hash"; then + echo "hash is $hash" >&2 + fi + fi + if test -n "$hash"; then + cat <<EOF +{ + "url": "$(json_escape "$url")", + "rev": "$(json_escape "$fullRev")", + "date": "$(json_escape "$commitDateStrict8601")", + "path": "$(json_escape "$finalPath")", + "$(json_escape "$hashType")": "$(json_escape "$hash")", + "hash": "$(nix-hash --to-sri --type $hashType $hash)", + "fetchLFS": $([[ -n "$fetchLFS" ]] && echo true || echo false), + "fetchSubmodules": $([[ -n "$fetchSubmodules" ]] && echo true || echo false), + "deepClone": $([[ -n "$deepClone" ]] && echo true || echo false), + "leaveDotGit": $([[ -n "$leaveDotGit" ]] && echo true || echo false) +} +EOF + fi +} + +remove_tmpPath() { + rm -rf "$tmpPath" +} + +remove_tmpHomePath() { + rm -rf "$tmpHomePath" +} + +if test -n "$QUIET"; then + quiet_mode +fi + +if test -z "$branchName"; then + branchName=fetchgit +fi + +tmpHomePath="$(mktemp -d "${TMPDIR:-/tmp}/nix-prefetch-git-tmp-home-XXXXXXXXXX")" +exit_handlers+=(remove_tmpHomePath) +HOME="$tmpHomePath" +ln -s "$NIX_BUILD_TOP/.netrc" "$HOME/" +unset XDG_CONFIG_HOME +export GIT_CONFIG_NOSYSTEM=1 + +if test -n "$builder"; then + test -n "$out" -a -n "$url" -a -n "$rev" || usage + mkdir -p "$out" + clone_user_rev "$out" "$url" "$rev" +else + if test -z "$hashType"; then + hashType=sha256 + fi + + # If the hash was given, a file with that hash may already be in the + # store. + if test -n "$expHash"; then + finalPath=$(nix-store --print-fixed-path --recursive "$hashType" "$expHash" "$(url_to_name "$url" "$rev")") + if ! nix-store --check-validity "$finalPath" 2> /dev/null; then + finalPath= + fi + hash=$expHash + fi + + # If we don't know the hash or a path with that hash doesn't exist, + # download the file and add it to the store. + if test -z "$finalPath"; then + + tmpPath="$(mktemp -d "${TMPDIR:-/tmp}/git-checkout-tmp-XXXXXXXX")" + exit_handlers+=(remove_tmpPath) + + tmpFile="$tmpPath/$(url_to_name "$url" "$rev")" + mkdir -p "$tmpFile" + + # Perform the checkout. + clone_user_rev "$tmpFile" "$url" "$rev" + + # Compute the hash. + hash=$(nix-hash --type $hashType --base32 "$tmpFile") + + # Add the downloaded file to the Nix store. + finalPath=$(nix-store --add-fixed --recursive "$hashType" "$tmpFile") + + if test -n "$expHash" -a "$expHash" != "$hash"; then + echo "hash mismatch for URL \`$url'. Got \`$hash'; expected \`$expHash'." >&2 + exit 1 + fi + fi + + print_results "$hash" + + if test -n "$PRINT_PATH"; then + echo "$finalPath" + fi +fi diff --git a/nixpkgs/pkgs/build-support/fetchgit/tests.nix b/nixpkgs/pkgs/build-support/fetchgit/tests.nix new file mode 100644 index 000000000000..a18be65327b5 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchgit/tests.nix @@ -0,0 +1,33 @@ +{ testers, fetchgit, ... }: + +{ + simple = testers.invalidateFetcherByDrvHash fetchgit { + name = "nix-source"; + url = "https://github.com/NixOS/nix"; + rev = "9d9dbe6ed05854e03811c361a3380e09183f4f4a"; + sha256 = "sha256-7DszvbCNTjpzGRmpIVAWXk20P0/XTrWZ79KSOGLrUWY="; + }; + + sparseCheckout = testers.invalidateFetcherByDrvHash fetchgit { + name = "nix-source"; + url = "https://github.com/NixOS/nix"; + rev = "9d9dbe6ed05854e03811c361a3380e09183f4f4a"; + sparseCheckout = [ + "src" + "tests" + ]; + sha256 = "sha256-g1PHGTWgAcd/+sXHo1o6AjVWCvC6HiocOfMbMh873LQ="; + }; + + sparseCheckoutNonConeMode = testers.invalidateFetcherByDrvHash fetchgit { + name = "nix-source"; + url = "https://github.com/NixOS/nix"; + rev = "9d9dbe6ed05854e03811c361a3380e09183f4f4a"; + sparseCheckout = [ + "src" + "tests" + ]; + nonConeMode = true; + sha256 = "sha256-FknO6C/PSnMPfhUqObD4vsW4PhkwdmPa9blNzcNvJQ4="; + }; +} diff --git a/nixpkgs/pkgs/build-support/fetchgitea/default.nix b/nixpkgs/pkgs/build-support/fetchgitea/default.nix new file mode 100644 index 000000000000..513ceba861cb --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchgitea/default.nix @@ -0,0 +1,9 @@ +# Gitea's URLs are compatible with GitHub + +{ lib, fetchFromGitHub }: + +lib.makeOverridable ( +{ domain, ... }@args: + +fetchFromGitHub ((removeAttrs args [ "domain" ]) // { githubBase = domain; }) +) diff --git a/nixpkgs/pkgs/build-support/fetchgithub/default.nix b/nixpkgs/pkgs/build-support/fetchgithub/default.nix new file mode 100644 index 000000000000..a2498700b545 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchgithub/default.nix @@ -0,0 +1,64 @@ +{ lib, fetchgit, fetchzip }: + +lib.makeOverridable ( +{ owner, repo, rev, name ? "source" +, fetchSubmodules ? false, leaveDotGit ? null +, deepClone ? false, private ? false, forceFetchGit ? false +, sparseCheckout ? [] +, githubBase ? "github.com", varPrefix ? null +, meta ? { } +, ... # For hash agility +}@args: + +let + + position = (if args.meta.description or null != null + then builtins.unsafeGetAttrPos "description" args.meta + else builtins.unsafeGetAttrPos "rev" args + ); + baseUrl = "https://${githubBase}/${owner}/${repo}"; + newMeta = meta // { + homepage = meta.homepage or baseUrl; + + # to indicate where derivation originates, similar to make-derivation.nix's mkDerivation + position = "${position.file}:${toString position.line}"; + }; + passthruAttrs = removeAttrs args [ "owner" "repo" "rev" "fetchSubmodules" "forceFetchGit" "private" "githubBase" "varPrefix" ]; + varBase = "NIX${lib.optionalString (varPrefix != null) "_${varPrefix}"}_GITHUB_PRIVATE_"; + useFetchGit = fetchSubmodules || (leaveDotGit == true) || deepClone || forceFetchGit || (sparseCheckout != []); + # We prefer fetchzip in cases we don't need submodules as the hash + # is more stable in that case. + fetcher = if useFetchGit then fetchgit else fetchzip; + privateAttrs = lib.optionalAttrs private { + netrcPhase = '' + if [ -z "''$${varBase}USERNAME" -o -z "''$${varBase}PASSWORD" ]; then + echo "Error: Private fetchFromGitHub requires the nix building process (nix-daemon in multi user mode) to have the ${varBase}USERNAME and ${varBase}PASSWORD env vars set." >&2 + exit 1 + fi + cat > netrc <<EOF + machine ${githubBase} + login ''$${varBase}USERNAME + password ''$${varBase}PASSWORD + EOF + ''; + netrcImpureEnvVars = [ "${varBase}USERNAME" "${varBase}PASSWORD" ]; + }; + + gitRepoUrl = "${baseUrl}.git"; + + fetcherArgs = (if useFetchGit + then { + inherit rev deepClone fetchSubmodules sparseCheckout; url = gitRepoUrl; + } // lib.optionalAttrs (leaveDotGit != null) { inherit leaveDotGit; } + else { + url = "${baseUrl}/archive/${rev}.tar.gz"; + + passthru = { + inherit gitRepoUrl; + }; + } + ) // privateAttrs // passthruAttrs // { inherit name; }; +in + +fetcher fetcherArgs // { meta = newMeta; inherit rev owner repo; } +) diff --git a/nixpkgs/pkgs/build-support/fetchgitiles/default.nix b/nixpkgs/pkgs/build-support/fetchgitiles/default.nix new file mode 100644 index 000000000000..be81c6e8a4c2 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchgitiles/default.nix @@ -0,0 +1,12 @@ +{ fetchzip, lib }: + +lib.makeOverridable ( +{ url, rev, name ? "source", ... } @ args: + +fetchzip ({ + inherit name; + url = "${url}/+archive/${rev}.tar.gz"; + stripRoot = false; + meta.homepage = url; +} // removeAttrs args [ "url" "rev" ]) // { inherit rev; } +) diff --git a/nixpkgs/pkgs/build-support/fetchgitlab/default.nix b/nixpkgs/pkgs/build-support/fetchgitlab/default.nix new file mode 100644 index 000000000000..146845e06a71 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchgitlab/default.nix @@ -0,0 +1,34 @@ +{ fetchgit, fetchzip, lib }: + +lib.makeOverridable ( +# gitlab example +{ owner, repo, rev, protocol ? "https", domain ? "gitlab.com", name ? "source", group ? null +, fetchSubmodules ? false, leaveDotGit ? false, deepClone ? false +, ... # For hash agility +} @ args: + +let + slug = lib.concatStringsSep "/" ((lib.optional (group != null) group) ++ [ owner repo ]); + escapedSlug = lib.replaceStrings [ "." "/" ] [ "%2E" "%2F" ] slug; + escapedRev = lib.replaceStrings [ "+" "%" "/" ] [ "%2B" "%25" "%2F" ] rev; + passthruAttrs = removeAttrs args [ "protocol" "domain" "owner" "group" "repo" "rev" "fetchSubmodules" "leaveDotGit" "deepClone" ]; + + useFetchGit = deepClone || fetchSubmodules || leaveDotGit; + fetcher = if useFetchGit then fetchgit else fetchzip; + + gitRepoUrl = "${protocol}://${domain}/${slug}.git"; + + fetcherArgs = (if useFetchGit then { + inherit rev deepClone fetchSubmodules leaveDotGit; + url = gitRepoUrl; + } else { + url = "${protocol}://${domain}/api/v4/projects/${escapedSlug}/repository/archive.tar.gz?sha=${escapedRev}"; + + passthru = { + inherit gitRepoUrl; + }; + }) // passthruAttrs // { inherit name; }; +in + +fetcher fetcherArgs // { meta.homepage = "${protocol}://${domain}/${slug}/"; inherit rev; } +) diff --git a/nixpkgs/pkgs/build-support/fetchgitlocal/default.nix b/nixpkgs/pkgs/build-support/fetchgitlocal/default.nix new file mode 100644 index 000000000000..02a77b25ae06 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchgitlocal/default.nix @@ -0,0 +1,44 @@ +{ runCommand, git, lib }: + +lib.makeOverridable ( +src: + +let + srcStr = toString src; + + # Adds the current directory (respecting ignored files) to the git store, and returns the hash + gitHashFile = runCommand "put-in-git" { + nativeBuildInputs = [ git ]; + dummy = builtins.currentTime; # impure, do every time + preferLocalBuild = true; + } '' + cd ${srcStr} + DOT_GIT=$(git rev-parse --resolve-git-dir .git) # path to repo + + cp $DOT_GIT/index $DOT_GIT/index-user # backup index + git reset # reset index + git add . # add current directory + + # hash of current directory + # remove trailing newline + git rev-parse $(git write-tree) \ + | tr -d '\n' > $out + + mv $DOT_GIT/index-user $DOT_GIT/index # restore index + ''; + + gitHash = builtins.readFile gitHashFile; # cache against git hash + + nixPath = runCommand "put-in-nix" { + nativeBuildInputs = [ git ]; + preferLocalBuild = true; + } '' + mkdir $out + + # dump tar of *current directory* at given revision + git -C ${srcStr} archive --format=tar ${gitHash} \ + | tar xf - -C $out + ''; + +in nixPath +) diff --git a/nixpkgs/pkgs/build-support/fetchgx/default.nix b/nixpkgs/pkgs/build-support/fetchgx/default.nix new file mode 100644 index 000000000000..93f60c0a9cac --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchgx/default.nix @@ -0,0 +1,30 @@ +{ stdenvNoCC, gx, gx-go, go, cacert }: + +{ name, src, sha256 }: + +stdenvNoCC.mkDerivation { + name = "${name}-gxdeps"; + inherit src; + + nativeBuildInputs = [ cacert go gx gx-go ]; + + outputHashAlgo = "sha256"; + outputHashMode = "recursive"; + outputHash = sha256; + + dontConfigure = true; + doCheck = false; + doInstallCheck = false; + + buildPhase = '' + export GOPATH=$(pwd)/vendor + mkdir -p vendor + gx install + ''; + + installPhase = '' + mv vendor $out + ''; + + preferLocalBuild = true; +} diff --git a/nixpkgs/pkgs/build-support/fetchhg/builder.sh b/nixpkgs/pkgs/build-support/fetchhg/builder.sh new file mode 100644 index 000000000000..1ce294757713 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchhg/builder.sh @@ -0,0 +1,8 @@ +if [ -e .attrs.sh ]; then source .attrs.sh; fi +source $stdenv/setup +echo "getting $url${rev:+ ($rev)} into $out" + +hg clone --insecure "$url" hg-clone + +hg archive -q$subrepoClause -y ${rev:+-r "$rev"} --cwd hg-clone $out +rm -f $out/.hg_archival.txt diff --git a/nixpkgs/pkgs/build-support/fetchhg/default.nix b/nixpkgs/pkgs/build-support/fetchhg/default.nix new file mode 100644 index 000000000000..6af886bf0934 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchhg/default.nix @@ -0,0 +1,34 @@ +{ lib, stdenvNoCC, mercurial }: +{ name ? null +, url +, rev ? null +, sha256 ? null +, hash ? null +, fetchSubrepos ? false +, preferLocalBuild ? true }: + +if hash != null && sha256 != null then + throw "Only one of sha256 or hash can be set" +else +# TODO: statically check if mercurial as the https support if the url starts woth https. +stdenvNoCC.mkDerivation { + name = "hg-archive" + (lib.optionalString (name != null) "-${name}"); + builder = ./builder.sh; + nativeBuildInputs = [mercurial]; + + impureEnvVars = lib.fetchers.proxyImpureEnvVars; + + subrepoClause = lib.optionalString fetchSubrepos "S"; + + outputHashAlgo = if hash != null then null else "sha256"; + outputHashMode = "recursive"; + outputHash = if hash != null then + hash + else if sha256 != null then + sha256 + else + lib.fakeSha256; + + inherit url rev; + inherit preferLocalBuild; +} diff --git a/nixpkgs/pkgs/build-support/fetchhg/nix-prefetch-hg b/nixpkgs/pkgs/build-support/fetchhg/nix-prefetch-hg new file mode 100755 index 000000000000..94c6b1ec6945 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchhg/nix-prefetch-hg @@ -0,0 +1,83 @@ +#! /usr/bin/env bash +set -e + +url=$1 +rev=$2 +expHash=$3 + +hashType="${NIX_HASH_ALGO:-sha256}" +hashFormat=${hashFormat:-"--base32"} +rev="${rev:-tip}" + +LOG() { + echo "$@" >&2 +} + +die() { + LOG "$@" + exit 1 +} + +if [[ -z "$url" || "$url" == "--help" ]]; then + die "Usage: nix-prefetch-hg URL [rev [EXPECTED-HASH]]" +fi + +if [[ "${fetchSubrepos:-0}" == 1 ]]; then + subrepoClause=S +else + subrepoClause= +fi + +# If the hash was given, a file with that hash may already be in the +# store. +if [[ -n "$expHash" ]]; then + finalPath=$(nix-store --print-fixed-path --recursive "$hashType" "$expHash" hg-archive) + if ! nix-store --check-validity "$finalPath" 2> /dev/null; then + finalPath= + fi + hash="$expHash" +fi + + +# If we don't know the hash or a path with that hash doesn't exist, +# download the file and add it to the store. +if [[ -z "$finalPath" ]]; then + + tmpPath="$(mktemp -d "${TMPDIR:-/tmp}/hg-checkout-tmp-XXXXXXXX")" + cleanup() { x=$?; rm -rf "$tmpPath"; exit $x; }; trap cleanup EXIT + + tmpArchive="$tmpPath/hg-archive" + + # Perform the checkout. + if [[ "$url" != /* ]]; then + tmpClone="$tmpPath/hg-clone" + hg clone -q -y -U "$url" "$tmpClone" >&2 + else + tmpClone=$url + fi + hg archive -q$subrepoClause -y -r "$rev" --cwd "$tmpClone" "$tmpArchive" + rm -f "$tmpArchive/.hg_archival.txt" + + LOG "hg revision is $(cd "$tmpClone"; hg id -r "$rev" -i)" + + # Compute the hash. + hash=$(nix-hash --type "$hashType" "$hashFormat" "$tmpArchive") + if [[ -z "$QUIET" ]]; then LOG "hash is $hash"; fi + + # Add the downloaded file to the Nix store. + finalPath=$(nix-store --add-fixed --recursive "$hashType" "$tmpArchive") + + if [[ -n "$expHash" && "$expHash" != "$hash" ]]; then + die "ERROR: hash mismatch for URL \`$url'" + fi + + +fi + +if [[ -z "$QUIET" ]]; then LOG "path is $finalPath"; fi + +echo "$hash" + +if [[ -n "$PRINT_PATH" ]]; then + echo "$finalPath" +fi diff --git a/nixpkgs/pkgs/build-support/fetchipfs/builder.sh b/nixpkgs/pkgs/build-support/fetchipfs/builder.sh new file mode 100644 index 000000000000..ca77962b5384 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchipfs/builder.sh @@ -0,0 +1,88 @@ +if [ -e .attrs.sh ]; then source .attrs.sh; fi +source $stdenv/setup + +# Curl flags to handle redirects, not use EPSV, handle cookies for +# servers to need them during redirects, and work on SSL without a +# certificate (this isn't a security problem because we check the +# cryptographic hash of the output anyway). + +set -o noglob + +curl="curl \ + --location \ + --max-redirs 20 \ + --retry 2 \ + --disable-epsv \ + --cookie-jar cookies \ + --insecure \ + --speed-time 5 \ + -# \ + --fail \ + $curlOpts \ + $NIX_CURL_FLAGS" + +finish() { + runHook postFetch + set +o noglob + exit 0 +} + +ipfs_add() { + if curl --retry 0 --head --silent "localhost:5001" > /dev/null; then + echo "[0m[01;36m=IPFS=[0m add $ipfs" + tar --owner=root --group=root -cWf "source.tar" $(echo *) + res=$(curl -# -F "file=@source.tar" "localhost:5001/api/v0/tar/add" | sed 's/.*"Hash":"\(.*\)".*/\1/') + if [ $ipfs != $res ]; then + echo "\`ipfs tar add' results in $res when $ipfs is expected" + exit 1 + fi + rm "source.tar" + fi +} + +echo + +mkdir download +cd download + +if curl --retry 0 --head --silent "localhost:5001" > /dev/null; then + curlexit=18; + echo "[0m[01;36m=IPFS=[0m get $ipfs" + # if we get error code 18, resume partial download + while [ $curlexit -eq 18 ]; do + # keep this inside an if statement, since on failure it doesn't abort the script + if $curl -C - "http://localhost:5001/api/v0/tar/cat?arg=$ipfs" --output "$ipfs.tar"; then + unpackFile "$ipfs.tar" + rm "$ipfs.tar" + set +o noglob + mv $(echo *) "$out" + finish + else + curlexit=$?; + fi + done +fi + +if test -n "$url"; then + curlexit=18; + echo "Downloading $url" + while [ $curlexit -eq 18 ]; do + # keep this inside an if statement, since on failure it doesn't abort the script + if $curl "$url" -O; then + set +o noglob + tmpfile=$(echo *) + unpackFile $tmpfile + rm $tmpfile + ipfs_add + mv $(echo *) "$out" + finish + else + curlexit=$?; + fi + done +fi + +echo "[01;31merror:[0m cannot download $ipfs from ipfs or the given url" +echo +set +o noglob +exit 1 diff --git a/nixpkgs/pkgs/build-support/fetchipfs/default.nix b/nixpkgs/pkgs/build-support/fetchipfs/default.nix new file mode 100644 index 000000000000..0cbb094d6003 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchipfs/default.nix @@ -0,0 +1,50 @@ +{ stdenv +, curl +}: + +{ ipfs +, url ? "" +, curlOpts ? "" +, outputHash ? "" +, outputHashAlgo ? "" +, md5 ? "" +, sha1 ? "" +, sha256 ? "" +, sha512 ? "" +, meta ? {} +, port ? "8080" +, postFetch ? "" +, preferLocalBuild ? true +}: + +let + + hasHash = (outputHash != "" && outputHashAlgo != "") + || md5 != "" || sha1 != "" || sha256 != "" || sha512 != ""; + +in + +if (!hasHash) then throw "Specify sha for fetchipfs fixed-output derivation" else stdenv.mkDerivation { + name = ipfs; + builder = ./builder.sh; + nativeBuildInputs = [ curl ]; + + # New-style output content requirements. + outputHashAlgo = if outputHashAlgo != "" then outputHashAlgo else + if sha512 != "" then "sha512" else if sha256 != "" then "sha256" else if sha1 != "" then "sha1" else "md5"; + outputHash = if outputHash != "" then outputHash else + if sha512 != "" then sha512 else if sha256 != "" then sha256 else if sha1 != "" then sha1 else md5; + + outputHashMode = "recursive"; + + inherit curlOpts + postFetch + ipfs + url + port + meta; + + # Doing the download on a remote machine just duplicates network + # traffic, so don't do that. + inherit preferLocalBuild; +} diff --git a/nixpkgs/pkgs/build-support/fetchmavenartifact/default.nix b/nixpkgs/pkgs/build-support/fetchmavenartifact/default.nix new file mode 100644 index 000000000000..0f3cd4e64dd6 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchmavenartifact/default.nix @@ -0,0 +1,73 @@ +# Adaptation of the MIT-licensed work on `sbt2nix` done by Charles O'Farrell + +{ lib, fetchurl, stdenv }: +let + defaultRepos = [ + "https://repo1.maven.org/maven2" + "https://oss.sonatype.org/content/repositories/releases" + "https://oss.sonatype.org/content/repositories/public" + "https://repo.typesafe.com/typesafe/releases" + ]; +in + +args@ +{ # Example: "org.apache.httpcomponents" + groupId +, # Example: "httpclient" + artifactId +, # Example: "4.3.6" + version +, # Example: "jdk11" + classifier ? null +, # List of maven repositories from where to fetch the artifact. + # Example: [ http://oss.sonatype.org/content/repositories/public ]. + repos ? defaultRepos + # The `url` and `urls` parameters, if specified should point to the JAR + # file and will take precedence over the `repos` parameter. Only one of `url` + # and `urls` can be specified, not both. +, url ? "" +, urls ? [] +, # The rest of the arguments are just forwarded to `fetchurl`. + ... +}: + +# only one of url and urls can be specified at a time. +assert (url == "") || (urls == []); +# if repos is empty, then url or urls must be specified. +assert (repos != []) || (url != "") || (urls != []); + +let + pname = (lib.replaceStrings [ "." ] [ "_" ] groupId) + "_" + (lib.replaceStrings [ "." ] [ "_" ] artifactId); + suffix = lib.optionalString (classifier != null) "-${classifier}"; + filename = "${artifactId}-${version}${suffix}.jar"; + mkJarUrl = repoUrl: + lib.concatStringsSep "/" [ + (lib.removeSuffix "/" repoUrl) + (lib.replaceStrings ["."] ["/"] groupId) + artifactId + version + filename + ]; + urls_ = + if url != "" then [url] + else if urls != [] then urls + else map mkJarUrl repos; + jar = + fetchurl ( + builtins.removeAttrs args [ "groupId" "artifactId" "version" "classifier" "repos" "url" ] + // { urls = urls_; name = "${pname}-${version}.jar"; } + ); +in + stdenv.mkDerivation { + inherit pname version; + dontUnpack = true; + # By moving the jar to $out/share/java we make it discoverable by java + # packages packages that mention this derivation in their buildInputs. + installPhase = '' + mkdir -p $out/share/java + ln -s ${jar} $out/share/java/${filename} + ''; + # We also add a `jar` attribute that can be used to easily obtain the path + # to the downloaded jar file. + passthru.jar = jar; + } diff --git a/nixpkgs/pkgs/build-support/fetchmtn/builder.sh b/nixpkgs/pkgs/build-support/fetchmtn/builder.sh new file mode 100644 index 000000000000..1aabd7949ee1 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchmtn/builder.sh @@ -0,0 +1,46 @@ +if [ -e .attrs.sh ]; then source .attrs.sh; fi +source $stdenv/setup + +set -x + +if ! [ -f "$cacheDB" ]; then + echo "Creating cache DB $cacheDB" + mtn --db "$cacheDB" db init +fi + +echo "getting revision $selector"; + +done=; +for source in $dbs; do + if mtn pull --db "$cacheDB" "$source" "${branch}"; then + revision="$(mtn --db "$cacheDB" au toposort $(mtn --db "$cacheDB" au select "$selector") | tail -1)"; + if [ -n "$revision" ]; then + if mtn --db "$cacheDB" au get_revision "$revision"; then + echo "found revision $revision" + done=1; + else + echo "revision $revision does not exist"; + fi + else + echo "selector $selector does not match any revision"; + fi + else + echo "pulling branch $branch wasn't successful"; + fi; + if test -n "$done"; then + break; + fi; +done; + +echo "checking out the revision $revision"; + +if test -n "$done"; then + mtn checkout --db "$cacheDB" -r "$revision" "$out" -b "${branch}" +else + echo "Needed revision still not found. Exiting"; + exit 1; +fi; + +echo "clearing _MTN in the output" + +rm -rf "$out/_MTN" diff --git a/nixpkgs/pkgs/build-support/fetchmtn/default.nix b/nixpkgs/pkgs/build-support/fetchmtn/default.nix new file mode 100644 index 000000000000..4aa134242aa7 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchmtn/default.nix @@ -0,0 +1,25 @@ +# You can specify some extra mirrors and a cache DB via options +{lib, stdenvNoCC, monotone, defaultDBMirrors ? [], cacheDB ? "./mtn-checkout.db"}: +# dbs is a list of strings +# each is an url for sync + +# selector is mtn selector, like h:org.example.branch +# +{name ? "mtn-checkout", dbs ? [], sha256 +, selector ? "h:" + branch, branch}: + +stdenvNoCC.mkDerivation { + builder = ./builder.sh; + nativeBuildInputs = [monotone]; + + outputHashAlgo = "sha256"; + outputHashMode = "recursive"; + outputHash = sha256; + + dbs = defaultDBMirrors ++ dbs; + inherit branch cacheDB name selector; + + impureEnvVars = lib.fetchers.proxyImpureEnvVars; + +} + diff --git a/nixpkgs/pkgs/build-support/fetchnextcloudapp/default.nix b/nixpkgs/pkgs/build-support/fetchnextcloudapp/default.nix new file mode 100644 index 000000000000..0ec64bbe1df6 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchnextcloudapp/default.nix @@ -0,0 +1,35 @@ +{ stdenv, fetchzip, applyPatches, lib, ... }: +{ url +, sha256 +, appName ? null +, appVersion ? null +, license +, patches ? [ ] +, description ? null +, homepage ? null +}: +applyPatches ({ + inherit patches; + src = fetchzip { + inherit url sha256; + postFetch = '' + pushd $out &>/dev/null + if [ ! -f ./appinfo/info.xml ]; then + echo "appinfo/info.xml doesn't exist in $out, aborting!" + exit 1 + fi + popd &>/dev/null + ''; + meta = { + license = lib.licenses.${license}; + longDescription = description; + inherit homepage; + } // lib.optionalAttrs (description != null) { + longDescription = description; + } // lib.optionalAttrs (homepage != null) { + inherit homepage; + }; + }; +} // lib.optionalAttrs (appName != null && appVersion != null) { + name = "nextcloud-app-${appName}-${appVersion}"; +}) diff --git a/nixpkgs/pkgs/build-support/fetchpatch/default.nix b/nixpkgs/pkgs/build-support/fetchpatch/default.nix new file mode 100644 index 000000000000..a3ca6685147d --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchpatch/default.nix @@ -0,0 +1,93 @@ +# This function downloads and normalizes a patch/diff file. +# This is primarily useful for dynamically generated patches, +# such as GitHub's or cgit's, where the non-significant content parts +# often change with updating of git or cgit. +# stripLen acts as the -p parameter when applying a patch. + +{ lib, fetchurl, patchutils }: + +{ relative ? null +, stripLen ? 0 +, decode ? "cat" # custom command to decode patch e.g. base64 -d +, extraPrefix ? null +, excludes ? [] +, includes ? [] +, revert ? false +, postFetch ? "" +, ... +}@args: +let + args' = if relative != null then { + stripLen = 1 + lib.length (lib.splitString "/" relative) + stripLen; + extraPrefix = lib.optionalString (extraPrefix != null) extraPrefix; + } else { + inherit stripLen extraPrefix; + }; +in let + inherit (args') stripLen extraPrefix; +in +lib.throwIfNot (excludes == [] || includes == []) + "fetchpatch: cannot use excludes and includes simultaneously" +fetchurl ({ + postFetch = '' + tmpfile="$TMPDIR/patch" + + if [ ! -s "$out" ]; then + echo "error: Fetched patch file '$out' is empty!" 1>&2 + exit 1 + fi + + set +e + ${decode} < "$out" > "$tmpfile" + if [ $? -ne 0 ] || [ ! -s "$tmpfile" ]; then + echo 'Failed to decode patch with command "'${lib.escapeShellArg decode}'"' >&2 + echo 'Fetched file was (limited to 128 bytes):' >&2 + od -A x -t x1z -v -N 128 "$out" >&2 + exit 1 + fi + set -e + mv "$tmpfile" "$out" + + "${patchutils}/bin/lsdiff" \ + ${lib.optionalString (relative != null) "-p1 -i ${lib.escapeShellArg relative}/'*'"} \ + "$out" \ + | sort -u | sed -e 's/[*?]/\\&/g' \ + | xargs -I{} \ + "${patchutils}/bin/filterdiff" \ + --include={} \ + --strip=${toString stripLen} \ + ${lib.optionalString (extraPrefix != null) '' + --addoldprefix=a/${lib.escapeShellArg extraPrefix} \ + --addnewprefix=b/${lib.escapeShellArg extraPrefix} \ + ''} \ + --clean "$out" > "$tmpfile" + + if [ ! -s "$tmpfile" ]; then + echo "error: Normalized patch '$tmpfile' is empty (while the fetched file was not)!" 1>&2 + echo "Did you maybe fetch a HTML representation of a patch instead of a raw patch?" 1>&2 + echo "Fetched file was:" 1>&2 + cat "$out" 1>&2 + exit 1 + fi + + ${patchutils}/bin/filterdiff \ + -p1 \ + ${builtins.toString (builtins.map (x: "-x ${lib.escapeShellArg x}") excludes)} \ + ${builtins.toString (builtins.map (x: "-i ${lib.escapeShellArg x}") includes)} \ + "$tmpfile" > "$out" + + if [ ! -s "$out" ]; then + echo "error: Filtered patch '$out' is empty (while the original patch file was not)!" 1>&2 + echo "Check your includes and excludes." 1>&2 + echo "Normalized patch file was:" 1>&2 + cat "$tmpfile" 1>&2 + exit 1 + fi + '' + lib.optionalString revert '' + ${patchutils}/bin/interdiff "$out" /dev/null > "$tmpfile" + mv "$tmpfile" "$out" + '' + postFetch; +} // builtins.removeAttrs args [ + "relative" "stripLen" "decode" "extraPrefix" "excludes" "includes" "revert" + "postFetch" +]) diff --git a/nixpkgs/pkgs/build-support/fetchpatch/tests.nix b/nixpkgs/pkgs/build-support/fetchpatch/tests.nix new file mode 100644 index 000000000000..0a27f1bc70e7 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchpatch/tests.nix @@ -0,0 +1,35 @@ +{ testers, fetchpatch, ... }: + +let + isFetchpatch2 = fetchpatch.version == 2; +in + +{ + simple = testers.invalidateFetcherByDrvHash fetchpatch { + url = "https://github.com/facebook/zstd/pull/2724/commits/e1f85dbca3a0ed5ef06c8396912a0914db8dea6a.patch"; + sha256 = if isFetchpatch2 then "sha256-01BrkHLye4KOdqCw3tv7AJzIF6578pl2fl270TJFTmw=" else "sha256-PuYAqnJWAE+L9bsroOnnBGJhERW8LHrGSLtIEkKU9vg="; + }; + + relative = testers.invalidateFetcherByDrvHash fetchpatch { + url = "https://github.com/boostorg/math/commit/7d482f6ebc356e6ec455ccb5f51a23971bf6ce5b.patch"; + relative = "include"; + sha256 = if isFetchpatch2 then "sha256-1TtmuKeNIl/Yp+sfzBMR8Ue78tPIgjqGgjasa5IN52o=" else "sha256-KlmIbixcds6GyKYt1fx5BxDIrU7msrgDdYo9Va/KJR4="; + }; + + full = testers.invalidateFetcherByDrvHash fetchpatch { + url = "https://github.com/boostorg/math/commit/7d482f6ebc356e6ec455ccb5f51a23971bf6ce5b.patch"; + relative = "test"; + stripLen = 1; + extraPrefix = "foo/bar/"; + excludes = [ "foo/bar/bernoulli_no_atomic_mp.cpp" ]; + revert = true; + sha256 = if isFetchpatch2 then "sha256-+UKmEbr2rIAweCav/hR/7d4ZrYV84ht/domTrHtm8sM=" else "sha256-+UKmEbr2rIAweCav/hR/7d4ZrYV84ht/domTrHtm8sM="; + }; + + decode = testers.invalidateFetcherByDrvHash fetchpatch { + name = "gcc.patch"; + url = "https://chromium.googlesource.com/aosp/platform/external/libchrome/+/f37ae3b1a873d74182a2ac31d96742ead9c1f523^!?format=TEXT"; + decode = "base64 -d"; + sha256 = if isFetchpatch2 then "sha256-oMvPlmzE51ArI+EvFxONXkqmNee39106/O1ikG0Bdso=" else "sha256-SJHk8XrutqAyoIdORlhCpBCN626P+uzed7mjKz5eQYY="; + }; +} diff --git a/nixpkgs/pkgs/build-support/fetchpijul/default.nix b/nixpkgs/pkgs/build-support/fetchpijul/default.nix new file mode 100644 index 000000000000..ca7e1a7926e8 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchpijul/default.nix @@ -0,0 +1,56 @@ +{ lib, stdenvNoCC, pijul }: + +lib.makeOverridable ( +{ url +, hash ? "" +, change ? null +, state ? null +, channel ? "main" +, name ? "fetchpijul" +, # TODO: Changes in pijul are unordered so there's many ways to end up with the same repository state. + # This makes leaveDotPijul unfeasible to implement until pijul CLI implements + # a way of reordering changes to sort them in a consistent and deterministic manner. + # leaveDotPijul ? false +}: +if change != null && state != null then + throw "Only one of 'change' or 'state' can be set" +else + stdenvNoCC.mkDerivation { + inherit name; + nativeBuildInputs = [ pijul ]; + + dontUnpack = true; + dontConfigure = true; + dontBuild = true; + + installPhase = '' + runHook preInstall + + pijul clone \ + ''${change:+--change "$change"} \ + ''${state:+--state "$state"} \ + --channel "$channel" \ + "$url" \ + "$out" + + runHook postInstall + ''; + + fixupPhase = '' + runHook preFixup + + rm -rf "$out/.pijul" + + runHook postFixup + ''; + + outputHashAlgo = if hash != "" then null else "sha256"; + outputHashMode = "recursive"; + outputHash = if hash != "" then + hash + else + lib.fakeSha256; + + inherit url change state channel; + } +) diff --git a/nixpkgs/pkgs/build-support/fetchpypi/default.nix b/nixpkgs/pkgs/build-support/fetchpypi/default.nix new file mode 100644 index 000000000000..ebd277cd2bdf --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchpypi/default.nix @@ -0,0 +1,28 @@ +# `fetchPypi` function for fetching artifacts from PyPI. +{ fetchurl +, makeOverridable +}: + +let + computeUrl = {format ? "setuptools", ... } @attrs: let + computeWheelUrl = {pname, version, dist ? "py2.py3", python ? "py2.py3", abi ? "none", platform ? "any"}: + # Fetch a wheel. By default we fetch an universal wheel. + # See https://www.python.org/dev/peps/pep-0427/#file-name-convention for details regarding the optional arguments. + "https://files.pythonhosted.org/packages/${dist}/${builtins.substring 0 1 pname}/${pname}/${pname}-${version}-${python}-${abi}-${platform}.whl"; + + computeSourceUrl = {pname, version, extension ? "tar.gz"}: + # Fetch a source tarball. + "mirror://pypi/${builtins.substring 0 1 pname}/${pname}/${pname}-${version}.${extension}"; + + compute = (if format == "wheel" then computeWheelUrl + else if format == "setuptools" then computeSourceUrl + else throw "Unsupported format ${format}"); + + in compute (builtins.removeAttrs attrs ["format"]); + +in makeOverridable( {format ? "setuptools", sha256 ? "", hash ? "", ... } @attrs: + let + url = computeUrl (builtins.removeAttrs attrs ["sha256" "hash"]) ; + in fetchurl { + inherit url sha256 hash; + }) diff --git a/nixpkgs/pkgs/build-support/fetchrepoorcz/default.nix b/nixpkgs/pkgs/build-support/fetchrepoorcz/default.nix new file mode 100644 index 000000000000..3ac7cace0dcf --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchrepoorcz/default.nix @@ -0,0 +1,10 @@ +{ fetchzip }: + +# gitweb example, snapshot support is optional in gitweb +{ repo, rev, name ? "source" +, ... # For hash agility +}@args: fetchzip ({ + inherit name; + url = "https://repo.or.cz/${repo}.git/snapshot/${rev}.tar.gz"; + meta.homepage = "https://repo.or.cz/${repo}.git/"; +} // removeAttrs args [ "repo" "rev" ]) // { inherit rev; } diff --git a/nixpkgs/pkgs/build-support/fetchrepoproject/default.nix b/nixpkgs/pkgs/build-support/fetchrepoproject/default.nix new file mode 100644 index 000000000000..78b8caeb8091 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchrepoproject/default.nix @@ -0,0 +1,78 @@ +{ lib, stdenvNoCC, gitRepo, cacert, copyPathsToStore }: + +{ name, manifest, rev ? "HEAD", sha256 +# Optional parameters: +, repoRepoURL ? "", repoRepoRev ? "", referenceDir ? "", manifestName ? "" +, localManifests ? [], createMirror ? false, useArchive ? false +}: + +assert repoRepoRev != "" -> repoRepoURL != ""; +assert createMirror -> !useArchive; + +with lib; + +let + extraRepoInitFlags = [ + (optionalString (repoRepoURL != "") "--repo-url=${repoRepoURL}") + (optionalString (repoRepoRev != "") "--repo-branch=${repoRepoRev}") + (optionalString (referenceDir != "") "--reference=${referenceDir}") + (optionalString (manifestName != "") "--manifest-name=${manifestName}") + ]; + + repoInitFlags = [ + "--manifest-url=${manifest}" + "--manifest-branch=${rev}" + "--depth=1" + (optionalString createMirror "--mirror") + (optionalString useArchive "--archive") + ] ++ extraRepoInitFlags; + + local_manifests = copyPathsToStore localManifests; + +in stdenvNoCC.mkDerivation { + inherit name; + + inherit cacert manifest rev repoRepoURL repoRepoRev referenceDir; # TODO + + outputHashAlgo = "sha256"; + outputHashMode = "recursive"; + outputHash = sha256; + + preferLocalBuild = true; + enableParallelBuilding = true; + + impureEnvVars = fetchers.proxyImpureEnvVars ++ [ + "GIT_PROXY_COMMAND" "SOCKS_SERVER" + ]; + + nativeBuildInputs = [ gitRepo cacert ]; + + GIT_SSL_CAINFO = "${cacert}/etc/ssl/certs/ca-bundle.crt"; + + buildCommand = '' + # Path must be absolute (e.g. for GnuPG: ~/.repoconfig/gnupg/pubring.kbx) + export HOME="$(pwd)" + + mkdir $out + cd $out + + mkdir .repo + ${optionalString (local_manifests != []) '' + mkdir .repo/local_manifests + for local_manifest in ${concatMapStringsSep " " toString local_manifests}; do + cp $local_manifest .repo/local_manifests/$(stripHash $local_manifest) + done + ''} + + repo init ${concatStringsSep " " repoInitFlags} + repo sync --jobs=$NIX_BUILD_CORES --current-branch + + # TODO: The git-index files (and probably the files in .repo as well) have + # different contents each time and will therefore change the final hash + # (i.e. creating a mirror probably won't work). + ${optionalString (!createMirror) '' + rm -rf .repo + find -type d -name '.git' -prune -exec rm -rf {} + + ''} + ''; +} diff --git a/nixpkgs/pkgs/build-support/fetchs3/default.nix b/nixpkgs/pkgs/build-support/fetchs3/default.nix new file mode 100644 index 000000000000..acad0749b663 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchs3/default.nix @@ -0,0 +1,36 @@ +{ lib, runCommand, awscli }: + +{ s3url +, name ? builtins.baseNameOf s3url +, sha256 +, region ? "us-east-1" +, credentials ? null # Default to looking at local EC2 metadata service +, recursiveHash ? false +, postFetch ? null +}: + +let + mkCredentials = { access_key_id, secret_access_key, session_token ? null }: { + AWS_ACCESS_KEY_ID = access_key_id; + AWS_SECRET_ACCESS_KEY = secret_access_key; + AWS_SESSION_TOKEN = session_token; + }; + + credentialAttrs = lib.optionalAttrs (credentials != null) (mkCredentials credentials); +in runCommand name ({ + nativeBuildInputs = [ awscli ]; + + outputHashAlgo = "sha256"; + outputHash = sha256; + outputHashMode = if recursiveHash then "recursive" else "flat"; + + preferLocalBuild = true; + + AWS_DEFAULT_REGION = region; +} // credentialAttrs) (if postFetch != null then '' + downloadedFile="$(mktemp)" + aws s3 cp ${s3url} $downloadedFile + ${postFetch} +'' else '' + aws s3 cp ${s3url} $out +'') diff --git a/nixpkgs/pkgs/build-support/fetchsavannah/default.nix b/nixpkgs/pkgs/build-support/fetchsavannah/default.nix new file mode 100644 index 000000000000..e75e25fc1e70 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchsavannah/default.nix @@ -0,0 +1,12 @@ +{ fetchzip, lib }: + +lib.makeOverridable ( +# cgit example, snapshot support is optional in cgit +{ repo, rev, name ? "source" +, ... # For hash agility +}@args: fetchzip ({ + inherit name; + url = "https://git.savannah.gnu.org/cgit/${repo}.git/snapshot/${repo}-${rev}.tar.gz"; + meta.homepage = "https://git.savannah.gnu.org/cgit/${repo}.git/"; +} // removeAttrs args [ "repo" "rev" ]) // { inherit rev; } +) diff --git a/nixpkgs/pkgs/build-support/fetchsourcehut/default.nix b/nixpkgs/pkgs/build-support/fetchsourcehut/default.nix new file mode 100644 index 000000000000..ed6e85bd639b --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchsourcehut/default.nix @@ -0,0 +1,56 @@ +{ fetchgit, fetchhg, fetchzip, lib }: + +lib.makeOverridable ( +{ owner +, repo, rev +, domain ? "sr.ht" +, vc ? "git" +, name ? "source" +, fetchSubmodules ? false +, ... # For hash agility +} @ args: + +with lib; + +assert (lib.assertOneOf "vc" vc [ "hg" "git" ]); + +let + urlFor = resource: "https://${resource}.${domain}/${owner}/${repo}"; + baseUrl = urlFor vc; + baseArgs = { + inherit name; + } // removeAttrs args [ + "owner" "repo" "rev" "domain" "vc" "name" "fetchSubmodules" + ]; + vcArgs = baseArgs // { + inherit rev; + url = baseUrl; + }; + fetcher = if fetchSubmodules then vc else "zip"; + cases = { + git = { + fetch = fetchgit; + arguments = vcArgs // { fetchSubmodules = true; }; + }; + hg = { + fetch = fetchhg; + arguments = vcArgs // { fetchSubrepos = true; }; + }; + zip = { + fetch = fetchzip; + arguments = baseArgs // { + url = "${baseUrl}/archive/${rev}.tar.gz"; + postFetch = optionalString (vc == "hg") '' + rm -f "$out/.hg_archival.txt" + ''; # impure file; see #12002 + passthru = { + gitRepoUrl = urlFor "git"; + }; + }; + }; + }; +in cases.${fetcher}.fetch cases.${fetcher}.arguments // { + inherit rev; + meta.homepage = "${baseUrl}"; +} +) diff --git a/nixpkgs/pkgs/build-support/fetchsvn/builder.sh b/nixpkgs/pkgs/build-support/fetchsvn/builder.sh new file mode 100644 index 000000000000..aa4d049aba43 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchsvn/builder.sh @@ -0,0 +1,24 @@ +if [ -e .attrs.sh ]; then source .attrs.sh; fi +source $stdenv/setup + +echo "exporting $url (r$rev) into $out" + +if test -n "$http_proxy"; then + # Configure proxy + mkdir .subversion + proxy="${http_proxy#*://}" + + echo '[global]' > .subversion/servers + echo "http-proxy-host = ${proxy%:*}" >> .subversion/servers + echo "http-proxy-port = ${proxy##*:}" >> .subversion/servers + + export HOME="$PWD" +fi; + +if test -z "$LC_ALL"; then + export LC_ALL="en_US.UTF-8" +fi; + +svn export --trust-server-cert --non-interactive \ + ${ignoreExternals:+--ignore-externals} ${ignoreKeywords:+--ignore-keywords} \ + -r "$rev" "$url" "$out" diff --git a/nixpkgs/pkgs/build-support/fetchsvn/default.nix b/nixpkgs/pkgs/build-support/fetchsvn/default.nix new file mode 100644 index 000000000000..41752eb55a7a --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchsvn/default.nix @@ -0,0 +1,59 @@ +{ lib, stdenvNoCC, buildPackages +, subversion, glibcLocales, sshSupport ? true, openssh ? null +}: + +{ url, rev ? "HEAD", sha256 ? "", hash ? "" +, ignoreExternals ? false, ignoreKeywords ? false, name ? null +, preferLocalBuild ? true +}: + +assert sshSupport -> openssh != null; + +let + repoName = with lib; + let + fst = head; + snd = l: head (tail l); + trd = l: head (tail (tail l)); + path_ = + (p: if head p == "" then tail p else p) # ~ drop final slash if any + (reverseList (splitString "/" url)); + path = [ (removeSuffix "/" (head path_)) ] ++ (tail path_); + in + # ../repo/trunk -> repo + if fst path == "trunk" then snd path + # ../repo/branches/branch -> repo-branch + else if snd path == "branches" then "${trd path}-${fst path}" + # ../repo/tags/tag -> repo-tag + else if snd path == "tags" then "${trd path}-${fst path}" + # ../repo (no trunk) -> repo + else fst path; + + name_ = if name == null then "${repoName}-r${toString rev}" else name; +in + +if hash != "" && sha256 != "" then + throw "Only one of sha256 or hash can be set" +else +stdenvNoCC.mkDerivation { + name = name_; + builder = ./builder.sh; + nativeBuildInputs = [ subversion glibcLocales ] + ++ lib.optional sshSupport openssh; + + SVN_SSH = if sshSupport then "${buildPackages.openssh}/bin/ssh" else null; + + outputHashAlgo = if hash != "" then null else "sha256"; + outputHashMode = "recursive"; + outputHash = if hash != "" then + hash + else if sha256 != "" then + sha256 + else + lib.fakeSha256; + + inherit url rev ignoreExternals ignoreKeywords; + + impureEnvVars = lib.fetchers.proxyImpureEnvVars; + inherit preferLocalBuild; +} diff --git a/nixpkgs/pkgs/build-support/fetchsvn/nix-prefetch-svn b/nixpkgs/pkgs/build-support/fetchsvn/nix-prefetch-svn new file mode 100755 index 000000000000..03b9eb9a03df --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchsvn/nix-prefetch-svn @@ -0,0 +1,79 @@ +#! /bin/sh -e + +url=$1 +rev=$2 +expHash=$3 + +hashType=$NIX_HASH_ALGO +if test -z "$hashType"; then + hashType=sha256 +fi +if test -z "$hashFormat"; then + hashFormat=--base32 +fi + +if test -z "$url"; then + echo "syntax: nix-prefetch-svn URL [REVISION [EXPECTED-HASH]]" >&2 + exit 1 +fi + +test -n "$rev" || rev="HEAD" + +repoName=$(echo $url | sed ' + s,.*/\([^/]\+\)/trunk/*$,\1,;t + s,.*/\([^/]\+\)/branches/\([^/]\+\)/*$,\1-\2,;t + s,.*/\([^/]\+\)/tags/\([^/]\+\)/*$,\1-\2,;t + s,.*/\([^/]\+\)/*$,\1,;t +') +dstFile=$repoName-r$rev + +# If the hash was given, a file with that hash may already be in the +# store. +if test -n "$expHash"; then + finalPath=$(nix-store --print-fixed-path --recursive "$hashType" "$expHash" $dstFile) + if ! nix-store --check-validity "$finalPath" 2> /dev/null; then + finalPath= + fi + hash=$expHash +fi + + +# If we don't know the hash or a path with that hash doesn't exist, +# download the file and add it to the store. +if test -z "$finalPath"; then + tmpPath="$(mktemp -d "${TMPDIR:-/tmp}/svn-checkout-tmp-XXXXXXXX")" + trap "rm -rf \"$tmpPath\"" EXIT + + tmpFile="$tmpPath/$dstFile" + + # Perform the checkout. + if test "$NIX_PREFETCH_SVN_LEAVE_DOT_SVN" != 1 + then + command="export" + else + command="checkout" + fi + + echo p | svn "$command" --quiet -r "$rev" "$url" "$tmpFile" >&2 + echo "svn revision is $(svn info -r "$rev" "$url" | grep "Revision: " | cut -d' ' -f2)" + + # Compute the hash. + hash=$(nix-hash --type $hashType $hashFormat $tmpFile) + if ! test -n "$QUIET"; then echo "hash is $hash" >&2; fi + + # Add the downloaded file to the Nix store. + finalPath=$(nix-store --add-fixed --recursive "$hashType" $tmpFile) + + if test -n "$expHash" -a "$expHash" != "$hash"; then + echo "hash mismatch for URL \`$url'" + exit 1 + fi +fi + +if ! test -n "$QUIET"; then echo "path is $finalPath" >&2; fi + +echo $hash + +if test -n "$PRINT_PATH"; then + echo $finalPath +fi diff --git a/nixpkgs/pkgs/build-support/fetchsvnrevision/default.nix b/nixpkgs/pkgs/build-support/fetchsvnrevision/default.nix new file mode 100644 index 000000000000..f2e2a11da8d5 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchsvnrevision/default.nix @@ -0,0 +1,10 @@ +runCommand: subversion: repository: + import (runCommand "head-revision" + { buildInputs = [ subversion ]; + dummy = builtins.currentTime; + } + '' + rev=$(echo p | svn ls -v --depth empty ${repository} |awk '{ print $1 }') + echo "[ \"$rev\" ]" > $out + echo Latest revision is $rev + '') diff --git a/nixpkgs/pkgs/build-support/fetchsvnssh/builder.sh b/nixpkgs/pkgs/build-support/fetchsvnssh/builder.sh new file mode 100644 index 000000000000..5782151524f7 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchsvnssh/builder.sh @@ -0,0 +1,14 @@ +if [ -e .attrs.sh ]; then source .attrs.sh; fi +source $stdenv/setup + +echo "exporting $url (r$rev) into $out" + +if test "$sshSupport"; then + export SVN_SSH="$openssh/bin/ssh" +fi + +# Pipe the "p" character into Subversion to force it to accept the +# server's certificate. This is perfectly safe: we don't care +# whether the server is being spoofed --- only the cryptographic +# hash of the output matters. +expect -f $sshSubversion "$username" "$password" "$rev" "$url" $out diff --git a/nixpkgs/pkgs/build-support/fetchsvnssh/default.nix b/nixpkgs/pkgs/build-support/fetchsvnssh/default.nix new file mode 100644 index 000000000000..ef72de61fe21 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchsvnssh/default.nix @@ -0,0 +1,17 @@ +{stdenvNoCC, subversion, sshSupport ? true, openssh ? null, expect}: +{username, password, url, rev ? "HEAD", sha256 ? ""}: + + +stdenvNoCC.mkDerivation { + name = "svn-export-ssh"; + builder = ./builder.sh; + nativeBuildInputs = [subversion expect]; + + outputHashAlgo = "sha256"; + outputHashMode = "recursive"; + outputHash = sha256; + + sshSubversion = ./sshsubversion.exp; + + inherit username password url rev sshSupport openssh; +} diff --git a/nixpkgs/pkgs/build-support/fetchsvnssh/sshsubversion.exp b/nixpkgs/pkgs/build-support/fetchsvnssh/sshsubversion.exp new file mode 100755 index 000000000000..c00f39714e5b --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchsvnssh/sshsubversion.exp @@ -0,0 +1,22 @@ +#!/nix/var/nix/profiles/default/bin/expect -f + +# Set variables +set username [lindex $argv 0] +set password [lindex $argv 1] +set rev [lindex $argv 2] +set url [lindex $argv 3] +set out [lindex $argv 4] +set timeout -1 + +spawn svn export -r$rev svn+ssh://$username@$url $out +match_max 100000 + +expect "*continue connecting*" { send -- "yes\r"; expect "*?assword:*"; send -- "$password\r" } \ + "*?assword:*" { send -- "$password\r" } + +expect "*?assword:*" +send -- "$password\r" + +# Send blank line +send -- "\r" +expect eof diff --git a/nixpkgs/pkgs/build-support/fetchurl/boot.nix b/nixpkgs/pkgs/build-support/fetchurl/boot.nix new file mode 100644 index 000000000000..8f8c78b7a454 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchurl/boot.nix @@ -0,0 +1,25 @@ +let mirrors = import ./mirrors.nix; in + +{ system }: + +{ url ? builtins.head urls +, urls ? [] +, sha256 ? "" +, hash ? "" +, name ? baseNameOf (toString url) +}: + +# assert exactly one hash is set +assert hash != "" || sha256 != ""; +assert hash != "" -> sha256 == ""; + +import <nix/fetchurl.nix> { + inherit system hash sha256 name; + + url = + # Handle mirror:// URIs. Since <nix/fetchurl.nix> currently + # supports only one URI, use the first listed mirror. + let m = builtins.match "mirror://([a-z]+)/(.*)" url; in + if m == null then url + else builtins.head (mirrors.${builtins.elemAt m 0}) + (builtins.elemAt m 1); +} diff --git a/nixpkgs/pkgs/build-support/fetchurl/builder.sh b/nixpkgs/pkgs/build-support/fetchurl/builder.sh new file mode 100644 index 000000000000..e8eaba934858 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchurl/builder.sh @@ -0,0 +1,173 @@ +if [ -e .attrs.sh ]; then source .attrs.sh; fi +source $stdenv/setup + +source $mirrorsFile + +curlVersion=$(curl -V | head -1 | cut -d' ' -f2) + +# Curl flags to handle redirects, not use EPSV, handle cookies for +# servers to need them during redirects, and work on SSL without a +# certificate (this isn't a security problem because we check the +# cryptographic hash of the output anyway). +curl=( + curl + --location + --max-redirs 20 + --retry 3 + --disable-epsv + --cookie-jar cookies + --user-agent "curl/$curlVersion Nixpkgs/$nixpkgsVersion" +) + +if ! [ -f "$SSL_CERT_FILE" ]; then + curl+=(--insecure) +fi + +eval "curl+=($curlOptsList)" + +curl+=( + $curlOpts + $NIX_CURL_FLAGS +) + +downloadedFile="$out" +if [ -n "$downloadToTemp" ]; then downloadedFile="$TMPDIR/file"; fi + + +tryDownload() { + local url="$1" + echo + echo "trying $url" + local curlexit=18; + + success= + + # if we get error code 18, resume partial download + while [ $curlexit -eq 18 ]; do + # keep this inside an if statement, since on failure it doesn't abort the script + if "${curl[@]}" -C - --fail "$url" --output "$downloadedFile"; then + success=1 + break + else + curlexit=$?; + fi + done +} + + +finish() { + local skipPostFetch="$1" + + set +o noglob + + if [[ $executable == "1" ]]; then + chmod +x $downloadedFile + fi + + if [ -z "$skipPostFetch" ]; then + runHook postFetch + fi + + exit 0 +} + + +tryHashedMirrors() { + if test -n "$NIX_HASHED_MIRRORS"; then + hashedMirrors="$NIX_HASHED_MIRRORS" + fi + + for mirror in $hashedMirrors; do + url="$mirror/$outputHashAlgo/$outputHash" + if "${curl[@]}" --retry 0 --connect-timeout "${NIX_CONNECT_TIMEOUT:-15}" \ + --fail --silent --show-error --head "$url" \ + --write-out "%{http_code}" --output /dev/null > code 2> log; then + tryDownload "$url" + + # We skip postFetch here, because hashed-mirrors are + # already content addressed. So if $outputHash is in the + # hashed-mirror, changes from ‘postFetch’ would already be + # made. So, running postFetch will end up applying the + # change /again/, which we don’t want. + if test -n "$success"; then finish skipPostFetch; fi + else + # Be quiet about 404 errors, which we interpret as the file + # not being present on this particular mirror. + if test "$(cat code)" != 404; then + echo "error checking the existence of $url:" + cat log + fi + fi + done +} + + +# URL list may contain ?. No glob expansion for that, please +set -o noglob + +urls2= +for url in $urls; do + if test "${url:0:9}" != "mirror://"; then + urls2="$urls2 $url" + else + url2="${url:9}"; echo "${url2/\// }" > split; read site fileName < split + #varName="mirror_$site" + varName="$site" # !!! danger of name clash, fix this + if test -z "${!varName}"; then + echo "warning: unknown mirror:// site \`$site'" + else + mirrors=${!varName} + + # Allow command-line override by setting NIX_MIRRORS_$site. + varName="NIX_MIRRORS_$site" + if test -n "${!varName}"; then mirrors="${!varName}"; fi + + for url3 in $mirrors; do + urls2="$urls2 $url3$fileName"; + done + fi + fi +done +urls="$urls2" + +# Restore globbing settings +set +o noglob + +if test -n "$showURLs"; then + echo "$urls" > $out + exit 0 +fi + +if test -n "$preferHashedMirrors"; then + tryHashedMirrors +fi + +# URL list may contain ?. No glob expansion for that, please +set -o noglob + +success= +for url in $urls; do + if [ -z "$postFetch" ]; then + case "$url" in + https://github.com/*/archive/*) + echo "warning: archives from GitHub revisions should use fetchFromGitHub" + ;; + https://gitlab.com/*/-/archive/*) + echo "warning: archives from GitLab revisions should use fetchFromGitLab" + ;; + esac + fi + tryDownload "$url" + if test -n "$success"; then finish; fi +done + +# Restore globbing settings +set +o noglob + +if test -z "$preferHashedMirrors"; then + tryHashedMirrors +fi + + +echo "error: cannot download $name from any mirror" +exit 1 diff --git a/nixpkgs/pkgs/build-support/fetchurl/default.nix b/nixpkgs/pkgs/build-support/fetchurl/default.nix new file mode 100644 index 000000000000..a9c2c7c46d14 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchurl/default.nix @@ -0,0 +1,190 @@ +{ lib, buildPackages ? { inherit stdenvNoCC; }, stdenvNoCC +, curl # Note that `curl' may be `null', in case of the native stdenvNoCC. +, cacert ? null }: + +let + + mirrors = import ./mirrors.nix; + + # Write the list of mirrors to a file that we can reuse between + # fetchurl instantiations, instead of passing the mirrors to + # fetchurl instantiations via environment variables. This makes the + # resulting store derivations (.drv files) much smaller, which in + # turn makes nix-env/nix-instantiate faster. + mirrorsFile = + buildPackages.stdenvNoCC.mkDerivation ({ + name = "mirrors-list"; + strictDeps = true; + builder = ./write-mirror-list.sh; + preferLocalBuild = true; + } // mirrors); + + # Names of the master sites that are mirrored (i.e., "sourceforge", + # "gnu", etc.). + sites = builtins.attrNames mirrors; + + impureEnvVars = lib.fetchers.proxyImpureEnvVars ++ [ + # This variable allows the user to pass additional options to curl + "NIX_CURL_FLAGS" + + # This variable allows the user to override hashedMirrors from the + # command-line. + "NIX_HASHED_MIRRORS" + + # This variable allows overriding the timeout for connecting to + # the hashed mirrors. + "NIX_CONNECT_TIMEOUT" + ] ++ (map (site: "NIX_MIRRORS_${site}") sites); + +in + +{ # URL to fetch. + url ? "" + +, # Alternatively, a list of URLs specifying alternative download + # locations. They are tried in order. + urls ? [] + +, # Additional curl options needed for the download to succeed. + # Warning: Each space (no matter the escaping) will start a new argument. + # If you wish to pass arguments with spaces, use `curlOptsList` + curlOpts ? "" + +, # Additional curl options needed for the download to succeed. + curlOptsList ? [] + +, # Name of the file. If empty, use the basename of `url' (or of the + # first element of `urls'). + name ? "" + + # for versioned downloads optionally take pname + version. +, pname ? "" +, version ? "" + +, # SRI hash. + hash ? "" + +, # Legacy ways of specifying the hash. + outputHash ? "" +, outputHashAlgo ? "" +, sha1 ? "" +, sha256 ? "" +, sha512 ? "" + +, recursiveHash ? false + +, # Shell code to build a netrc file for BASIC auth + netrcPhase ? null + +, # Impure env vars (https://nixos.org/nix/manual/#sec-advanced-attributes) + # needed for netrcPhase + netrcImpureEnvVars ? [] + +, # Shell code executed after the file has been fetched + # successfully. This can do things like check or transform the file. + postFetch ? "" + +, # Whether to download to a temporary path rather than $out. Useful + # in conjunction with postFetch. The location of the temporary file + # is communicated to postFetch via $downloadedFile. + downloadToTemp ? false + +, # If true, set executable bit on downloaded file + executable ? false + +, # If set, don't download the file, but write a list of all possible + # URLs (resulting from resolving mirror:// URLs) to $out. + showURLs ? false + +, # Meta information, if any. + meta ? {} + + # Passthru information, if any. +, passthru ? {} + # Doing the download on a remote machine just duplicates network + # traffic, so don't do that by default +, preferLocalBuild ? true + + # Additional packages needed as part of a fetch +, nativeBuildInputs ? [ ] +}: + +let + urls_ = + if urls != [] && url == "" then + (if lib.isList urls then urls + else throw "`urls` is not a list") + else if urls == [] && url != "" then + (if lib.isString url then [url] + else throw "`url` is not a string") + else throw "fetchurl requires either `url` or `urls` to be set"; + + hash_ = + if with lib.lists; length (filter (s: s != "") [ hash outputHash sha1 sha256 sha512 ]) > 1 + then throw "multiple hashes passed to fetchurl" else + + if hash != "" then { outputHashAlgo = null; outputHash = hash; } + else if outputHash != "" then + if outputHashAlgo != "" then { inherit outputHashAlgo outputHash; } + else throw "fetchurl was passed outputHash without outputHashAlgo" + else if sha512 != "" then { outputHashAlgo = "sha512"; outputHash = sha512; } + else if sha256 != "" then { outputHashAlgo = "sha256"; outputHash = sha256; } + else if sha1 != "" then { outputHashAlgo = "sha1"; outputHash = sha1; } + else if cacert != null then { outputHashAlgo = "sha256"; outputHash = ""; } + else throw "fetchurl requires a hash for fixed-output derivation: ${lib.concatStringsSep ", " urls_}"; +in + +assert (lib.isList curlOpts) -> lib.warn '' + fetchurl for ${toString (builtins.head urls_)}: curlOpts is a list (${lib.generators.toPretty { multiline = false; } curlOpts}), which is not supported anymore. + - If you wish to get the same effect as before, for elements with spaces (even if escaped) to expand to multiple curl arguments, use a string argument instead: + curlOpts = ${lib.strings.escapeNixString (toString curlOpts)}; + - If you wish for each list element to be passed as a separate curl argument, allowing arguments to contain spaces, use curlOptsList instead: + curlOptsList = [ ${lib.concatMapStringsSep " " lib.strings.escapeNixString curlOpts} ];'' true; + +stdenvNoCC.mkDerivation (( + if (pname != "" && version != "") then + { inherit pname version; } + else + { name = + if showURLs then "urls" + else if name != "" then name + else baseNameOf (toString (builtins.head urls_)); + } +) // { + builder = ./builder.sh; + + nativeBuildInputs = [ curl ] ++ nativeBuildInputs; + + urls = urls_; + + # If set, prefer the content-addressable mirrors + # (http://tarballs.nixos.org) over the original URLs. + preferHashedMirrors = true; + + # New-style output content requirements. + inherit (hash_) outputHashAlgo outputHash; + + SSL_CERT_FILE = if (hash_.outputHash == "" || hash_.outputHash == lib.fakeSha256 || hash_.outputHash == lib.fakeSha512 || hash_.outputHash == lib.fakeHash) + then "${cacert}/etc/ssl/certs/ca-bundle.crt" + else "/no-cert-file.crt"; + + outputHashMode = if (recursiveHash || executable) then "recursive" else "flat"; + + inherit curlOpts; + curlOptsList = lib.escapeShellArgs curlOptsList; + inherit showURLs mirrorsFile postFetch downloadToTemp executable; + + impureEnvVars = impureEnvVars ++ netrcImpureEnvVars; + + nixpkgsVersion = lib.trivial.release; + + inherit preferLocalBuild; + + postHook = if netrcPhase == null then null else '' + ${netrcPhase} + curlOpts="$curlOpts --netrc-file $PWD/netrc" + ''; + + inherit meta; + passthru = { inherit url; } // passthru; +}) diff --git a/nixpkgs/pkgs/build-support/fetchurl/mirrors.nix b/nixpkgs/pkgs/build-support/fetchurl/mirrors.nix new file mode 100644 index 000000000000..2546b8e6dc99 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchurl/mirrors.nix @@ -0,0 +1,415 @@ +{ + + # Content-addressable Nix mirrors + hashedMirrors = [ + "https://tarballs.nixos.org" + ]; + + # Mirrors for mirror://site/filename URIs, where "site" is + # "sourceforge", "gnu", etc. + + # Alsa Project + alsa = [ + "https://www.alsa-project.org/files/pub/" + "ftp://ftp.alsa-project.org/pub/" + "http://alsa.cybermirror.org/" + "http://www.mirrorservice.org/sites/ftp.alsa-project.org/pub/" + ]; + + # Apache + apache = [ + "https://dlcdn.apache.org/" + "https://www-eu.apache.org/dist/" + "https://ftp.wayne.edu/apache/" + "https://www.apache.org/dist/" + "https://archive.apache.org/dist/" # fallback for old releases + "https://apache.cs.uu.nl/" + "https://apache.cs.utah.edu/" + "http://ftp.tudelft.nl/apache/" + "ftp://ftp.funet.fi/pub/mirrors/apache.org/" + ]; + + # Bioconductor mirrors (from https://bioconductor.org/about/mirrors/) + # The commented-out ones don't seem to allow direct package downloads; + # they serve error messages that result in hash mismatches instead + bioc = [ + # http://bioc.ism.ac.jp/ + # http://bioc.openanalytics.eu/ + # http://bioconductor.fmrp.usp.br/ + # http://mirror.aarnet.edu.au/pub/bioconductor/ + # http://watson.nci.nih.gov/bioc_mirror/ + "https://bioconductor.statistik.tu-dortmund.de/packages/" + "https://mirrors.ustc.edu.cn/bioc/" + "http://bioconductor.jp/packages/" + ]; + + # CRAN mirrors + cran = [ + "https://cran.r-project.org/src/contrib/" + ]; + + # BitlBee mirrors, see https://www.bitlbee.org/main.php/mirrors.html + bitlbee = [ + "https://get.bitlbee.org/" + "https://ftp.snt.utwente.nl/pub/software/bitlbee/" + "http://bitlbee.intergenia.de/" + ]; + + # GCC + gcc = [ + "https://mirror.koddos.net/gcc/" + "https://bigsearcher.com/mirrors/gcc/" + "ftp://ftp.nluug.nl/mirror/languages/gcc/" + "ftp://ftp.fu-berlin.de/unix/languages/gcc/" + "ftp://ftp.irisa.fr/pub/mirrors/gcc.gnu.org/gcc/" + "ftp://gcc.gnu.org/pub/gcc/" + ]; + + # GNOME + gnome = [ + # This one redirects to some mirror closeby, so it should be all you need + "https://download.gnome.org/" + + "https://fr2.rpmfind.net/linux/gnome.org/" + "https://ftp.acc.umu.se/pub/GNOME/" + "https://ftp.belnet.be/mirror/ftp.gnome.org/" + "ftp://ftp.cse.buffalo.edu/pub/Gnome/" + "ftp://ftp.nara.wide.ad.jp/pub/X11/GNOME/" + ]; + + # GNU (https://www.gnu.org/prep/ftp.html) + gnu = [ + # This one redirects to a (supposedly) nearby and (supposedly) up-to-date + # mirror + "https://ftpmirror.gnu.org/" + + "https://ftp.nluug.nl/pub/gnu/" + "https://mirrors.kernel.org/gnu/" + "https://mirror.ibcp.fr/pub/gnu/" + "https://mirror.dogado.de/gnu/" + "https://mirror.tochlab.net/pub/gnu/" + + # This one is the master repository, and thus it's always up-to-date + "https://ftp.gnu.org/pub/gnu/" + + "ftp://ftp.funet.fi/pub/mirrors/ftp.gnu.org/gnu/" + ]; + + # GnuPG + gnupg = [ + "https://gnupg.org/ftp/gcrypt/" + "https://mirrors.dotsrc.org/gcrypt/" + "https://ftp.heanet.ie/mirrors/ftp.gnupg.org/gcrypt/" + "https://www.mirrorservice.org/sites/ftp.gnupg.org/gcrypt/" + "http://www.ring.gr.jp/pub/net/" + ]; + + # IBiblio (former metalab/sunsite) + # Most of the time the expressions refer to the /pub/Linux/ subdirectory; + # however there are other useful files outside it + ibiblioPubLinux = [ + "https://www.ibiblio.org/pub/Linux/" + "ftp://ftp.ibiblio.org/pub/linux/" + "ftp://ftp.gwdg.de/pub/linux/metalab/" + "ftp://ftp.metalab.unc.edu/pub/linux/" + ]; + + # ImageMagick mirrors, see https://www.imagemagick.org/script/mirror.php + imagemagick = [ + "https://www.imagemagick.org/download/" + "https://mirror.checkdomain.de/imagemagick/" + "https://ftp.nluug.nl/ImageMagick/" + "https://ftp.sunet.se/mirror/imagemagick.org/ftp/" + "ftp://ftp.sunet.se/mirror/imagemagick.org/ftp/" # also contains older versions removed from most mirrors + ]; + + # Mirrors from https://download.kde.org/ls-lR.mirrorlist + kde = [ + "https://download.kde.org/download.php?url=" + "https://ftp.gwdg.de/pub/linux/kde/" + "https://mirrors.ocf.berkeley.edu/kde/" + "https://mirrors.mit.edu/kde/" + "https://mirrors.ustc.edu.cn/kde/" + "https://ftp.funet.fi/pub/mirrors/ftp.kde.org/pub/kde/" + ]; + + # kernel.org's /pub (/pub/{linux,software}) tree + kernel = [ + "https://cdn.kernel.org/pub/" + "http://linux-kernel.uio.no/pub/" + "ftp://ftp.funet.fi/pub/mirrors/ftp.kernel.org/pub/" + ]; + + # MySQL + mysql = [ + "https://cdn.mysql.com/Downloads/" + ]; + + # Maven Central + maven = [ + "https://repo1.maven.org/maven2/" + ]; + + # Mozilla projects + mozilla = [ + "https://download.cdn.mozilla.net/pub/mozilla.org/" + "https://archive.mozilla.org/pub/" + ]; + + # OSDN (formerly SourceForge.jp) + osdn = [ + "https://osdn.dl.osdn.jp/" + "https://osdn.mirror.constant.com/" + "https://mirrors.gigenet.com/OSDN/" + "https://osdn.dl.sourceforge.jp/" + "https://jaist.dl.sourceforge.jp/" + ]; + + # PostgreSQL + postgresql = [ + "https://ftp.postgresql.org/pub/" + ]; + + # Qt + qt = [ + "https://download.qt.io/" + ]; + + # Sage mirrors (https://www.sagemath.org/mirrors.html) + sageupstream = [ + # Africa (HTTPS) + "https://sagemath.mirror.ac.za/spkg/upstream/" + "https://mirror.ufs.ac.za/sagemath/spkg/upstream/" + + # America, North (HTTPS) + "https://mirrors.mit.edu/sage/spkg/upstream/" + "https://mirrors.xmission.com/sage/spkg/upstream/" + + # Asia (HTTPS) + "https://mirrors.tuna.tsinghua.edu.cn/sagemath/spkg/upstream/" + "https://mirrors.ustc.edu.cn/sagemath/spkg/upstream/" + "http://ftp.tsukuba.wide.ad.jp/software/sage/spkg/upstream/" + "https://ftp.yz.yamagata-u.ac.jp/pub/math/sage/spkg/upstream/" + "https://mirror.yandex.ru/mirrors/sage.math.washington.edu/spkg/upstream/" + + # Australia (HTTPS) + "https://mirror.aarnet.edu.au/pub/sage/spkg/upstream/" + + # Europe (HTTPS) + "https://sage.mirror.garr.it/mirrors/sage/spkg/upstream/" + "https://www-ftp.lip6.fr/pub/math/sagemath/spkg/upstream/" + + # Africa (non-HTTPS) + "ftp://ftp.sun.ac.za/pub/mirrors/www.sagemath.org/spkg/upstream/" + + # America, North (non-HTTPS) + "http://www.cecm.sfu.ca/sage/spkg/upstream/" + + # America, South (non-HTTPS) + "http://sagemath.c3sl.ufpr.br/spkg/upstream/" + "http://linorg.usp.br/sage/spkg/upstream" + + # Asia (non-HTTPS) + "http://ftp.kaist.ac.kr/sage/spkg/upstream/" + "http://ftp.riken.jp/sagemath/spkg/upstream/" + + # Europe (non-HTTPS) + "http://mirrors.fe.up.pt/pub/sage/spkg/upstream/" + "http://ftp.ntua.gr/pub/sagemath/spkg/upstream/" + ]; + + # SAMBA + samba = [ + "https://www.samba.org/ftp/" + "http://www.samba.org/ftp/" + ]; + + # GNU Savannah + savannah = [ + # Mirrors from https://download-mirror.savannah.gnu.org/releases/00_MIRRORS.html + "https://mirror.easyname.at/nongnu/" + "https://savannah.c3sl.ufpr.br/" + "https://mirror.csclub.uwaterloo.ca/nongnu/" + "https://mirror.cedia.org.ec/nongnu/" + "https://ftp.igh.cnrs.fr/pub/nongnu/" + "https://mirror6.layerjet.com/nongnu" + "https://mirror.netcologne.de/savannah/" + "https://ftp.cc.uoc.gr/mirrors/nongnu.org/" + "https://nongnu.uib.no/" + "https://ftp.acc.umu.se/mirror/gnu.org/savannah/" + "http://mirror2.klaus-uwe.me/nongnu/" + "http://mirrors.fe.up.pt/pub/nongnu/" + "http://ftp.twaren.net/Unix/NonGNU/" + "http://savannah-nongnu-org.ip-connect.vn.ua/" + "http://www.mirrorservice.org/sites/download.savannah.gnu.org/releases/" + "http://gnu.mirrors.pair.com/savannah/savannah/" + "ftp://mirror.easyname.at/nongnu/" + "ftp://mirror2.klaus-uwe.me/nongnu/" + "ftp://mirror.csclub.uwaterloo.ca/nongnu/" + "ftp://ftp.igh.cnrs.fr/pub/nongnu/" + "ftp://mirror.netcologne.de/savannah/" + "ftp://nongnu.uib.no/pub/nongnu/" + "ftp://mirrors.fe.up.pt/pub/nongnu/" + "ftp://ftp.twaren.net/Unix/NonGNU/" + "ftp://savannah-nongnu-org.ip-connect.vn.ua/mirror/savannah.nongnu.org/" + "ftp://ftp.mirrorservice.org/sites/download.savannah.gnu.org/releases/" + ]; + + # SourceForge + sourceforge = [ + "https://downloads.sourceforge.net/" + "https://prdownloads.sourceforge.net/" + "https://netcologne.dl.sourceforge.net/sourceforge/" + "https://versaweb.dl.sourceforge.net/sourceforge/" + "https://freefr.dl.sourceforge.net/sourceforge/" + "https://osdn.dl.sourceforge.net/sourceforge/" + ]; + + # Steam Runtime + steamrt = [ + "https://repo.steampowered.com/steamrt/" + "https://public.abbradar.moe/steamrt/" + ]; + + # TCSH shell + tcsh = [ + "https://astron.com/pub/tcsh/" + "https://astron.com/pub/tcsh/old/" + "http://ftp.funet.fi/pub/mirrors/ftp.astron.com/pub/tcsh/" + "http://ftp.funet.fi/pub/mirrors/ftp.astron.com/pub/tcsh/old/" + "ftp://ftp.astron.com/pub/tcsh/" + "ftp://ftp.astron.com/pub/tcsh/old/" + "ftp://ftp.funet.fi/pub/unix/shells/tcsh/" + "ftp://ftp.funet.fi/pub/unix/shells/tcsh/old/" + ]; + + # XFCE + xfce = [ + "https://archive.xfce.org/" + "https://mirror.netcologne.de/xfce/" + "https://archive.be.xfce.org/xfce/" + "https://archive.al-us.xfce.org/" + "http://archive.se.xfce.org/xfce/" + "http://mirror.perldude.de/archive.xfce.org/" + "http://archive.be2.xfce.org/" + "http://ftp.udc.es/xfce/" + ]; + + # X.org + xorg = [ + "https://xorg.freedesktop.org/releases/" + "https://ftp.x.org/archive/" + ]; + + ### Programming languages' package repos + + # Perl CPAN + cpan = [ + "https://cpan.metacpan.org/" + "https://cpan.perl.org/" + "https://mirrors.kernel.org/CPAN/" + "https://backpan.perl.org/" # for old releases + ]; + + # Haskell Hackage + hackage = [ + "https://hackage.haskell.org/package/" + ]; + + # Lua Rocks + luarocks = [ + "https://luarocks.org/" + "https://raw.githubusercontent.com/rocks-moonscript-org/moonrocks-mirror/master/" + "https://luafr.org/moonrocks/" + ]; + + # Python PyPI + pypi = [ + "https://files.pythonhosted.org/packages/source/" + # pypi.io is a more semantic link, but atm it’s referencing + # files.pythonhosted.org over two redirects + "https://pypi.io/packages/source/" + ]; + + # Python Test-PyPI + testpypi = [ + "https://test.pypi.io/packages/source/" + ]; + + ### Linux distros + + # CentOS + centos = [ + # For old releases + "https://vault.centos.org/" + "https://archive.kernel.org/centos-vault/" + "https://ftp.jaist.ac.jp/pub/Linux/CentOS-vault/" + "https://mirrors.aliyun.com/centos-vault/" + "https://mirror.chpc.utah.edu/pub/vault.centos.org/" + "https://mirror.math.princeton.edu/pub/centos-vault/" + "https://mirrors.tripadvisor.com/centos-vault/" + "http://mirror.centos.org/centos/" + ]; + + # Debian + debian = [ + "https://httpredir.debian.org/debian/" + "https://ftp.debian.org/debian/" + "https://mirrors.edge.kernel.org/debian/" + "ftp://ftp.de.debian.org/debian/" + "ftp://ftp.fr.debian.org/debian/" + "ftp://ftp.nl.debian.org/debian/" + "ftp://ftp.ru.debian.org/debian/" + "http://archive.debian.org/debian-archive/debian/" + "ftp://ftp.funet.fi/pub/mirrors/ftp.debian.org/debian/" + ]; + + # Fedora + # Please add only full mirrors that carry old Fedora distributions as well + # See: https://mirrors.fedoraproject.org/publiclist (but not all carry old content) + fedora = [ + "https://archives.fedoraproject.org/pub/fedora/" + "https://fedora.osuosl.org/" + "https://ftp.funet.fi/pub/mirrors/ftp.redhat.com/pub/fedora/" + "https://ftp.linux.cz/pub/linux/fedora/" + "https://archives.fedoraproject.org/pub/archive/fedora/" + "http://ftp.nluug.nl/pub/os/Linux/distr/fedora/" + "http://mirror.csclub.uwaterloo.ca/fedora/" + "http://mirror.1000mbps.com/fedora/" + ]; + + # Gentoo + gentoo = [ + "https://ftp.snt.utwente.nl/pub/os/linux/gentoo/" + "https://distfiles.gentoo.org/" + "https://mirrors.kernel.org/gentoo/" + ]; + + # openSUSE + opensuse = [ + "https://opensuse.hro.nl/opensuse/distribution/" + "https://ftp.funet.fi/pub/linux/mirrors/opensuse/distribution/" + "https://ftp.opensuse.org/pub/opensuse/distribution/" + "https://ftp5.gwdg.de/pub/opensuse/discontinued/distribution/" + "https://mirrors.edge.kernel.org/opensuse/distribution/" + "http://ftp.hosteurope.de/mirror/ftp.opensuse.org/discontinued/" + ]; + + # Ubuntu + ubuntu = [ + "https://nl.archive.ubuntu.com/ubuntu/" + "https://old-releases.ubuntu.com/ubuntu/" + "https://mirrors.edge.kernel.org/ubuntu/" + "http://de.archive.ubuntu.com/ubuntu/" + "http://archive.ubuntu.com/ubuntu/" + ]; + + # ... and other OSes in general + + # OpenBSD + openbsd = [ + "https://ftp.openbsd.org/pub/OpenBSD/" + "ftp://ftp.nluug.nl/pub/OpenBSD/" + "ftp://ftp-stud.fht-esslingen.de/pub/OpenBSD/" + ]; +} diff --git a/nixpkgs/pkgs/build-support/fetchurl/tests.nix b/nixpkgs/pkgs/build-support/fetchurl/tests.nix new file mode 100644 index 000000000000..e348d77db0bd --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchurl/tests.nix @@ -0,0 +1,13 @@ +{ testers, fetchurl, jq, moreutils, ... }: { + # Tests that we can send custom headers with spaces in them + header = + let headerValue = "Test '\" <- These are some quotes"; + in testers.invalidateFetcherByDrvHash fetchurl { + url = "https://httpbin.org/headers"; + sha256 = builtins.hashString "sha256" (headerValue + "\n"); + curlOptsList = [ "-H" "Hello: ${headerValue}" ]; + postFetch = '' + ${jq}/bin/jq -r '.headers.Hello' $out | ${moreutils}/bin/sponge $out + ''; + }; +} diff --git a/nixpkgs/pkgs/build-support/fetchurl/write-mirror-list.sh b/nixpkgs/pkgs/build-support/fetchurl/write-mirror-list.sh new file mode 100644 index 000000000000..2dabd2e722be --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchurl/write-mirror-list.sh @@ -0,0 +1,4 @@ +source $stdenv/setup + +# !!! this is kinda hacky. +set | grep -E '^[a-zA-Z]+=.*://' > $out diff --git a/nixpkgs/pkgs/build-support/fetchzip/default.nix b/nixpkgs/pkgs/build-support/fetchzip/default.nix new file mode 100644 index 000000000000..0446851d6409 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchzip/default.nix @@ -0,0 +1,77 @@ +# This function downloads and unpacks an archive file, such as a zip +# or tar file. This is primarily useful for dynamically generated +# archives, such as GitHub's /archive URLs, where the unpacked content +# of the zip file doesn't change, but the zip file itself may +# (e.g. due to minor changes in the compression algorithm, or changes +# in timestamps). + +{ lib, fetchurl, unzip, glibcLocalesUtf8 }: + +{ name ? "source" +, url ? "" +, urls ? [] +, nativeBuildInputs ? [] +, postFetch ? "" +, extraPostFetch ? "" + +# Optionally move the contents of the unpacked tree up one level. +, stripRoot ? true +# Allows to set the extension for the intermediate downloaded +# file. This can be used as a hint for the unpackCmdHooks to select +# an appropriate unpacking tool. +, extension ? null + +# the rest are given to fetchurl as is +, ... } @ args: + +assert (extraPostFetch != "") -> lib.warn "use 'postFetch' instead of 'extraPostFetch' with 'fetchzip' and 'fetchFromGitHub'." true; + +let + tmpFilename = + if extension != null + then "download.${extension}" + else baseNameOf (if url != "" then url else builtins.head urls); +in + +fetchurl ({ + inherit name; + recursiveHash = true; + + downloadToTemp = true; + + # Have to pull in glibcLocalesUtf8 for unzip in setup-hook.sh to handle + # UTF-8 aware locale: + # https://github.com/NixOS/nixpkgs/issues/176225#issuecomment-1146617263 + nativeBuildInputs = [ unzip glibcLocalesUtf8 ] ++ nativeBuildInputs; + + postFetch = + '' + unpackDir="$TMPDIR/unpack" + mkdir "$unpackDir" + cd "$unpackDir" + + renamed="$TMPDIR/${tmpFilename}" + mv "$downloadedFile" "$renamed" + unpackFile "$renamed" + chmod -R +w "$unpackDir" + '' + (if stripRoot then '' + if [ $(ls -A "$unpackDir" | wc -l) != 1 ]; then + echo "error: zip file must contain a single file or directory." + echo "hint: Pass stripRoot=false; to fetchzip to assume flat list of files." + exit 1 + fi + fn=$(cd "$unpackDir" && ls -A) + if [ -f "$unpackDir/$fn" ]; then + mkdir $out + fi + mv "$unpackDir/$fn" "$out" + '' else '' + mv "$unpackDir" "$out" + '') + '' + ${postFetch} + ${extraPostFetch} + chmod 755 "$out" + ''; + # ^ Remove non-owner write permissions + # Fixes https://github.com/NixOS/nixpkgs/issues/38649 +} // removeAttrs args [ "stripRoot" "extraPostFetch" "postFetch" "extension" "nativeBuildInputs" ]) diff --git a/nixpkgs/pkgs/build-support/fetchzip/tests.nix b/nixpkgs/pkgs/build-support/fetchzip/tests.nix new file mode 100644 index 000000000000..13175d5ce921 --- /dev/null +++ b/nixpkgs/pkgs/build-support/fetchzip/tests.nix @@ -0,0 +1,25 @@ +{ testers, fetchzip, runCommand, ... }: + +let + url = "https://gist.github.com/glandium/01d54cefdb70561b5f6675e08f2990f2/archive/2f430f0c136a69b0886281d0c76708997d8878af.zip"; +in +{ + simple = testers.invalidateFetcherByDrvHash fetchzip { + inherit url; + sha256 = "sha256-0ecwgL8qUavSj1+WkaxpmRBmu7cvj53V5eXQV71fddU="; + }; + + postFetch = testers.invalidateFetcherByDrvHash fetchzip { + inherit url; + sha256 = "sha256-7sAOzKa+9vYx5XyndHxeY2ffWAjOsgCkXC9anK6cuV0="; + postFetch = "touch $out/filee"; + }; + + hiddenDir = testers.invalidateFetcherByDrvHash fetchzip { + url = "file://${runCommand "hiddendir.tar" {} '' + mkdir .foo + tar -cf $out .foo + ''}"; + sha256 = "sha256-pQpattmS9VmO3ZIQUFn66az8GSmB4IvYhTTCFn6SUmo="; + }; +} diff --git a/nixpkgs/pkgs/build-support/flutter/default.nix b/nixpkgs/pkgs/build-support/flutter/default.nix new file mode 100644 index 000000000000..3e136211655b --- /dev/null +++ b/nixpkgs/pkgs/build-support/flutter/default.nix @@ -0,0 +1,103 @@ +{ lib +, callPackage +, runCommand +, makeWrapper +, wrapGAppsHook +, fetchDartDeps +, buildDartApplication +, cacert +, glib +, flutter +}: + +# absolutely no mac support for now + +{ pubGetScript ? "flutter pub get" +, flutterBuildFlags ? [ ] +, extraWrapProgramArgs ? "" +, ... +}@args: + +(buildDartApplication.override { + dart = flutter; + fetchDartDeps = fetchDartDeps.override { dart = flutter; }; +}) (args // { + sdkSetupScript = '' + # Pub needs SSL certificates. Dart normally looks in a hardcoded path. + # https://github.com/dart-lang/sdk/blob/3.1.0/runtime/bin/security_context_linux.cc#L48 + # + # Dart does not respect SSL_CERT_FILE... + # https://github.com/dart-lang/sdk/issues/48506 + # ...and Flutter does not support --root-certs-file, so the path cannot be manually set. + # https://github.com/flutter/flutter/issues/56607 + # https://github.com/flutter/flutter/issues/113594 + # + # libredirect is of no use either, as Flutter does not pass any + # environment variables (including LD_PRELOAD) to the Pub process. + # + # Instead, Flutter is patched to allow the path to the Dart binary used for + # Pub commands to be overriden. + export NIX_FLUTTER_PUB_DART="${runCommand "dart-with-certs" { nativeBuildInputs = [ makeWrapper ]; } '' + mkdir -p "$out/bin" + makeWrapper ${flutter.dart}/bin/dart "$out/bin/dart" \ + --add-flags "--root-certs-file=${cacert}/etc/ssl/certs/ca-bundle.crt" + ''}/bin/dart" + + export HOME="$NIX_BUILD_TOP" + flutter config --no-analytics &>/dev/null # mute first-run + flutter config --enable-linux-desktop >/dev/null + ''; + + nativeBuildInputs = (args.nativeBuildInputs or [ ]) ++ [ wrapGAppsHook ]; + buildInputs = (args.buildInputs or [ ]) ++ [ glib ]; + + dontDartBuild = true; + buildPhase = args.buildPhase or '' + runHook preBuild + + mkdir -p build/flutter_assets/fonts + + doPubGet flutter pub get --offline -v + flutter build linux -v --release --split-debug-info="$debug" ${builtins.concatStringsSep " " (map (flag: "\"${flag}\"") flutterBuildFlags)} + + runHook postBuild + ''; + + dontDartInstall = true; + installPhase = args.installPhase or '' + runHook preInstall + + built=build/linux/*/release/bundle + + mkdir -p $out/bin + mv $built $out/app + + for f in $(find $out/app -iname "*.desktop" -type f); do + install -D $f $out/share/applications/$(basename $f) + done + + for f in $(find $out/app -maxdepth 1 -type f); do + ln -s $f $out/bin/$(basename $f) + done + + # make *.so executable + find $out/app -iname "*.so" -type f -exec chmod +x {} + + + # remove stuff like /build/source/packages/ubuntu_desktop_installer/linux/flutter/ephemeral + for f in $(find $out/app -executable -type f); do + if patchelf --print-rpath "$f" | grep /build; then # this ignores static libs (e,g. libapp.so) also + echo "strip RPath of $f" + newrp=$(patchelf --print-rpath $f | sed -r "s|/build.*ephemeral:||g" | sed -r "s|/build.*profile:||g") + patchelf --set-rpath "$newrp" "$f" + fi + done + + runHook postInstall + ''; + + dontWrapGApps = true; + extraWrapProgramArgs = '' + ''${gappsWrapperArgs[@]} \ + ${extraWrapProgramArgs} + ''; +}) diff --git a/nixpkgs/pkgs/build-support/go/module.nix b/nixpkgs/pkgs/build-support/go/module.nix new file mode 100644 index 000000000000..3bd72cbea889 --- /dev/null +++ b/nixpkgs/pkgs/build-support/go/module.nix @@ -0,0 +1,308 @@ +let + buildGoPackage = + { go, cacert, git, lib, stdenv }: + + { name ? "${args'.pname}-${args'.version}" + , src + , nativeBuildInputs ? [ ] + , passthru ? { } + , patches ? [ ] + + # A function to override the goModules derivation + , overrideModAttrs ? (_oldAttrs: { }) + + # path to go.mod and go.sum directory + , modRoot ? "./" + + # vendorHash is the SRI hash of the vendored dependencies + # + # if vendorHash is null, then we won't fetch any dependencies and + # rely on the vendor folder within the source. + , vendorHash ? args'.vendorSha256 or (throw "buildGoModule: vendorHash is missing") + # Whether to delete the vendor folder supplied with the source. + , deleteVendor ? false + # Whether to fetch (go mod download) and proxy the vendor directory. + # This is useful if your code depends on c code and go mod tidy does not + # include the needed sources to build or if any dependency has case-insensitive + # conflicts which will produce platform dependant `vendorHash` checksums. + , proxyVendor ? false + + # We want parallel builds by default + , enableParallelBuilding ? true + + # Do not enable this without good reason + # IE: programs coupled with the compiler + , allowGoReference ? false + + , CGO_ENABLED ? go.CGO_ENABLED + + , meta ? { } + + # Not needed with buildGoModule + , goPackagePath ? "" + + # needed for buildFlags{,Array} warning + , buildFlags ? "" + , buildFlagsArray ? "" + + , ... + }@args': + + assert goPackagePath != "" -> throw "`goPackagePath` is not needed with `buildGoModule`"; + assert (args' ? vendorHash && args' ? vendorSha256) -> throw "both `vendorHash` and `vendorSha256` set. only one can be set."; + + let + args = removeAttrs args' [ "overrideModAttrs" "vendorSha256" "vendorHash" ]; + + GO111MODULE = "on"; + GOTOOLCHAIN = "local"; + + goModules = if (vendorHash == null) then "" else + (stdenv.mkDerivation { + name = "${name}-go-modules"; + + nativeBuildInputs = (args.nativeBuildInputs or [ ]) ++ [ go git cacert ]; + + inherit (args) src; + inherit (go) GOOS GOARCH; + inherit GO111MODULE GOTOOLCHAIN; + + # The following inheritence behavior is not trivial to expect, and some may + # argue it's not ideal. Changing it may break vendor hashes in Nixpkgs and + # out in the wild. In anycase, it's documented in: + # doc/languages-frameworks/go.section.md + prePatch = args.prePatch or ""; + patches = args.patches or [ ]; + patchFlags = args.patchFlags or [ ]; + postPatch = args.postPatch or ""; + preBuild = args.preBuild or ""; + postBuild = args.modPostBuild or ""; + sourceRoot = args.sourceRoot or ""; + + impureEnvVars = lib.fetchers.proxyImpureEnvVars ++ [ + "GIT_PROXY_COMMAND" + "SOCKS_SERVER" + "GOPROXY" + ]; + + configurePhase = args.modConfigurePhase or '' + runHook preConfigure + export GOCACHE=$TMPDIR/go-cache + export GOPATH="$TMPDIR/go" + cd "${modRoot}" + runHook postConfigure + ''; + + buildPhase = args.modBuildPhase or ('' + runHook preBuild + '' + lib.optionalString deleteVendor '' + if [ ! -d vendor ]; then + echo "vendor folder does not exist, 'deleteVendor' is not needed" + exit 10 + else + rm -rf vendor + fi + '' + '' + if [ -d vendor ]; then + echo "vendor folder exists, please set 'vendorHash = null;' in your expression" + exit 10 + fi + + ${if proxyVendor then '' + mkdir -p "''${GOPATH}/pkg/mod/cache/download" + go mod download + '' else '' + if (( "''${NIX_DEBUG:-0}" >= 1 )); then + goModVendorFlags+=(-v) + fi + go mod vendor "''${goModVendorFlags[@]}" + ''} + + mkdir -p vendor + + runHook postBuild + ''); + + installPhase = args.modInstallPhase or '' + runHook preInstall + + ${if proxyVendor then '' + rm -rf "''${GOPATH}/pkg/mod/cache/download/sumdb" + cp -r --reflink=auto "''${GOPATH}/pkg/mod/cache/download" $out + '' else '' + cp -r --reflink=auto vendor $out + ''} + + if ! [ "$(ls -A $out)" ]; then + echo "vendor folder is empty, please set 'vendorHash = null;' in your expression" + exit 10 + fi + + runHook postInstall + ''; + + dontFixup = true; + + outputHashMode = "recursive"; + outputHash = vendorHash; + outputHashAlgo = if args' ? vendorSha256 || vendorHash == "" then "sha256" else null; + }).overrideAttrs overrideModAttrs; + + package = stdenv.mkDerivation (args // { + nativeBuildInputs = [ go ] ++ nativeBuildInputs; + + inherit (go) GOOS GOARCH; + + GOFLAGS = lib.optionals (!proxyVendor) [ "-mod=vendor" ] ++ lib.optionals (!allowGoReference) [ "-trimpath" ]; + inherit CGO_ENABLED enableParallelBuilding GO111MODULE GOTOOLCHAIN; + + configurePhase = args.configurePhase or ('' + runHook preConfigure + + export GOCACHE=$TMPDIR/go-cache + export GOPATH="$TMPDIR/go" + export GOPROXY=off + export GOSUMDB=off + cd "$modRoot" + '' + lib.optionalString (vendorHash != null) '' + ${if proxyVendor then '' + export GOPROXY=file://${goModules} + '' else '' + rm -rf vendor + cp -r --reflink=auto ${goModules} vendor + ''} + '' + '' + + # currently pie is only enabled by default in pkgsMusl + # this will respect the `hardening{Disable,Enable}` flags if set + if [[ $NIX_HARDENING_ENABLE =~ "pie" ]]; then + export GOFLAGS="-buildmode=pie $GOFLAGS" + fi + + runHook postConfigure + ''); + + buildPhase = args.buildPhase or ('' + runHook preBuild + + exclude='\(/_\|examples\|Godeps\|testdata' + if [[ -n "$excludedPackages" ]]; then + IFS=' ' read -r -a excludedArr <<<$excludedPackages + printf -v excludedAlternates '%s\\|' "''${excludedArr[@]}" + excludedAlternates=''${excludedAlternates%\\|} # drop final \| added by printf + exclude+='\|'"$excludedAlternates" + fi + exclude+='\)' + + buildGoDir() { + local cmd="$1" dir="$2" + + . $TMPDIR/buildFlagsArray + + declare -a flags + flags+=($buildFlags "''${buildFlagsArray[@]}") + flags+=(''${tags:+-tags=''${tags// /,}}) + flags+=(''${ldflags:+-ldflags="$ldflags"}) + flags+=("-p" "$NIX_BUILD_CORES") + + if [ "$cmd" = "test" ]; then + flags+=(-vet=off) + flags+=($checkFlags) + fi + + local OUT + if ! OUT="$(go $cmd "''${flags[@]}" $dir 2>&1)"; then + if ! echo "$OUT" | grep -qE '(no( buildable| non-test)?|build constraints exclude all) Go (source )?files'; then + echo "$OUT" >&2 + return 1 + fi + fi + if [ -n "$OUT" ]; then + echo "$OUT" >&2 + fi + return 0 + } + + getGoDirs() { + local type; + type="$1" + if [ -n "$subPackages" ]; then + echo "$subPackages" | sed "s,\(^\| \),\1./,g" + else + find . -type f -name \*$type.go -exec dirname {} \; | grep -v "/vendor/" | sort --unique | grep -v "$exclude" + fi + } + + if (( "''${NIX_DEBUG:-0}" >= 1 )); then + buildFlagsArray+=(-x) + fi + + if [ ''${#buildFlagsArray[@]} -ne 0 ]; then + declare -p buildFlagsArray > $TMPDIR/buildFlagsArray + else + touch $TMPDIR/buildFlagsArray + fi + if [ -z "$enableParallelBuilding" ]; then + export NIX_BUILD_CORES=1 + fi + for pkg in $(getGoDirs ""); do + echo "Building subPackage $pkg" + buildGoDir install "$pkg" + done + '' + lib.optionalString (stdenv.hostPlatform != stdenv.buildPlatform) '' + # normalize cross-compiled builds w.r.t. native builds + ( + dir=$GOPATH/bin/${go.GOOS}_${go.GOARCH} + if [[ -n "$(shopt -s nullglob; echo $dir/*)" ]]; then + mv $dir/* $dir/.. + fi + if [[ -d $dir ]]; then + rmdir $dir + fi + ) + '' + '' + runHook postBuild + ''); + + doCheck = args.doCheck or true; + checkPhase = args.checkPhase or '' + runHook preCheck + # We do not set trimpath for tests, in case they reference test assets + export GOFLAGS=''${GOFLAGS//-trimpath/} + + for pkg in $(getGoDirs test); do + buildGoDir test "$pkg" + done + + runHook postCheck + ''; + + installPhase = args.installPhase or '' + runHook preInstall + + mkdir -p $out + dir="$GOPATH/bin" + [ -e "$dir" ] && cp -r $dir $out + + runHook postInstall + ''; + + strictDeps = true; + + disallowedReferences = lib.optional (!allowGoReference) go; + + passthru = passthru // { inherit go goModules vendorHash; } // { inherit (args') vendorSha256; }; + + meta = { + # Add default meta information + platforms = go.meta.platforms or lib.platforms.all; + } // meta; + }) // { + overrideGoAttrs = f: buildGoPackage (args' // (f args')); + }; + in + lib.warnIf (buildFlags != "" || buildFlagsArray != "") + "Use the `ldflags` and/or `tags` attributes instead of `buildFlags`/`buildFlagsArray`" + package; +in + buildGoPackage diff --git a/nixpkgs/pkgs/build-support/go/package.nix b/nixpkgs/pkgs/build-support/go/package.nix new file mode 100644 index 000000000000..7e099b76f0b7 --- /dev/null +++ b/nixpkgs/pkgs/build-support/go/package.nix @@ -0,0 +1,283 @@ +{ go, govers, lib, fetchgit, fetchhg, fetchbzr, rsync +, fetchFromGitHub, stdenv }: + +{ buildInputs ? [] +, nativeBuildInputs ? [] +, passthru ? {} +, preFixup ? "" +, shellHook ? "" + +# We want parallel builds by default +, enableParallelBuilding ? true + +# Go import path of the package +, goPackagePath + +# Go package aliases +, goPackageAliases ? [ ] + +# Extra sources to include in the gopath +, extraSrcs ? [ ] + +# Extra gopaths containing src subfolder +# with sources to include in the gopath +, extraSrcPaths ? [ ] + +# go2nix dependency file +, goDeps ? null + +# Whether to delete the vendor folder supplied with the source. +, deleteVendor ? false + +, dontRenameImports ? false + +# Do not enable this without good reason +# IE: programs coupled with the compiler +, allowGoReference ? false + +, CGO_ENABLED ? go.CGO_ENABLED + +# needed for buildFlags{,Array} warning +, buildFlags ? "" +, buildFlagsArray ? "" + +, meta ? {}, ... } @ args: + +let + dep2src = goDep: + { + inherit (goDep) goPackagePath; + src = if goDep.fetch.type == "git" then + fetchgit { + inherit (goDep.fetch) url rev sha256; + } + else if goDep.fetch.type == "hg" then + fetchhg { + inherit (goDep.fetch) url rev sha256; + } + else if goDep.fetch.type == "bzr" then + fetchbzr { + inherit (goDep.fetch) url rev sha256; + } + else if goDep.fetch.type == "FromGitHub" then + fetchFromGitHub { + inherit (goDep.fetch) owner repo rev sha256; + } + else abort "Unrecognized package fetch type: ${goDep.fetch.type}"; + }; + + importGodeps = { depsFile }: + map dep2src (import depsFile); + + goPath = if goDeps != null then importGodeps { depsFile = goDeps; } ++ extraSrcs + else extraSrcs; + package = stdenv.mkDerivation ( + (builtins.removeAttrs args [ "goPackageAliases" "disabled" "extraSrcs"]) // { + + nativeBuildInputs = [ go ] + ++ (lib.optional (!dontRenameImports) govers) ++ nativeBuildInputs; + buildInputs = buildInputs; + + inherit (go) GOOS GOARCH GO386; + + GOHOSTARCH = go.GOHOSTARCH or null; + GOHOSTOS = go.GOHOSTOS or null; + + inherit CGO_ENABLED enableParallelBuilding; + + GO111MODULE = "off"; + GOTOOLCHAIN = "local"; + GOFLAGS = lib.optionals (!allowGoReference) [ "-trimpath" ]; + + GOARM = toString (lib.intersectLists [(stdenv.hostPlatform.parsed.cpu.version or "")] ["5" "6" "7"]); + + configurePhase = args.configurePhase or ('' + runHook preConfigure + + # Extract the source + cd "$NIX_BUILD_TOP" + mkdir -p "go/src/$(dirname "$goPackagePath")" + mv "$sourceRoot" "go/src/$goPackagePath" + + '' + lib.optionalString deleteVendor '' + if [ ! -d "go/src/$goPackagePath/vendor" ]; then + echo "vendor folder does not exist, 'deleteVendor' is not needed" + exit 10 + else + rm -rf "go/src/$goPackagePath/vendor" + fi + '' + lib.optionalString (goDeps != null) '' + if [ -d "go/src/$goPackagePath/vendor" ]; then + echo "vendor folder exists, 'goDeps' is not needed" + exit 10 + fi + '' + lib.flip lib.concatMapStrings goPath ({ src, goPackagePath }: '' + mkdir goPath + (cd goPath; unpackFile "${src}") + mkdir -p "go/src/$(dirname "${goPackagePath}")" + chmod -R u+w goPath/* + mv goPath/* "go/src/${goPackagePath}" + rmdir goPath + + '') + (lib.optionalString (extraSrcPaths != []) '' + ${rsync}/bin/rsync -a ${lib.concatMapStringsSep " " (p: "${p}/src") extraSrcPaths} go + + '') + '' + export GOPATH=$NIX_BUILD_TOP/go:$GOPATH + export GOCACHE=$TMPDIR/go-cache + + # currently pie is only enabled by default in pkgsMusl + # this will respect the `hardening{Disable,Enable}` flags if set + if [[ $NIX_HARDENING_ENABLE =~ "pie" ]]; then + export GOFLAGS="-buildmode=pie $GOFLAGS" + fi + + runHook postConfigure + ''); + + renameImports = args.renameImports or ( + let + inputsWithAliases = lib.filter (x: x ? goPackageAliases) + (buildInputs ++ (args.propagatedBuildInputs or [ ])); + rename = to: from: "echo Renaming '${from}' to '${to}'; govers -d -m ${from} ${to}"; + renames = p: lib.concatMapStringsSep "\n" (rename p.goPackagePath) p.goPackageAliases; + in lib.concatMapStringsSep "\n" renames inputsWithAliases); + + buildPhase = args.buildPhase or ('' + runHook preBuild + + runHook renameImports + + exclude='\(/_\|examples\|Godeps\|testdata' + if [[ -n "$excludedPackages" ]]; then + IFS=' ' read -r -a excludedArr <<<$excludedPackages + printf -v excludedAlternates '%s\\|' "''${excludedArr[@]}" + excludedAlternates=''${excludedAlternates%\\|} # drop final \| added by printf + exclude+='\|'"$excludedAlternates" + fi + exclude+='\)' + + buildGoDir() { + local cmd="$1" dir="$2" + + . $TMPDIR/buildFlagsArray + + declare -a flags + flags+=($buildFlags "''${buildFlagsArray[@]}") + flags+=(''${tags:+-tags=''${tags// /,}}) + flags+=(''${ldflags:+-ldflags="$ldflags"}) + flags+=("-p" "$NIX_BUILD_CORES") + + if [ "$cmd" = "test" ]; then + flags+=(-vet=off) + flags+=($checkFlags) + fi + + local OUT + if ! OUT="$(go $cmd "''${flags[@]}" $dir 2>&1)"; then + if ! echo "$OUT" | grep -qE '(no( buildable| non-test)?|build constraints exclude all) Go (source )?files'; then + echo "$OUT" >&2 + return 1 + fi + fi + if [ -n "$OUT" ]; then + echo "$OUT" >&2 + fi + return 0 + } + + getGoDirs() { + local type; + type="$1" + if [ -n "$subPackages" ]; then + echo "$subPackages" | sed "s,\(^\| \),\1$goPackagePath/,g" + else + pushd "$NIX_BUILD_TOP/go/src" >/dev/null + find "$goPackagePath" -type f -name \*$type.go -exec dirname {} \; | grep -v "/vendor/" | sort | uniq | grep -v "$exclude" + popd >/dev/null + fi + } + + if (( "''${NIX_DEBUG:-0}" >= 1 )); then + buildFlagsArray+=(-x) + fi + + if [ ''${#buildFlagsArray[@]} -ne 0 ]; then + declare -p buildFlagsArray > $TMPDIR/buildFlagsArray + else + touch $TMPDIR/buildFlagsArray + fi + if [ -z "$enableParallelBuilding" ]; then + export NIX_BUILD_CORES=1 + fi + for pkg in $(getGoDirs ""); do + echo "Building subPackage $pkg" + buildGoDir install "$pkg" + done + '' + lib.optionalString (stdenv.hostPlatform != stdenv.buildPlatform) '' + # normalize cross-compiled builds w.r.t. native builds + ( + dir=$NIX_BUILD_TOP/go/bin/${go.GOOS}_${go.GOARCH} + if [[ -n "$(shopt -s nullglob; echo $dir/*)" ]]; then + mv $dir/* $dir/.. + fi + if [[ -d $dir ]]; then + rmdir $dir + fi + ) + '' + '' + runHook postBuild + ''); + + doCheck = args.doCheck or false; + checkPhase = args.checkPhase or '' + runHook preCheck + # We do not set trimpath for tests, in case they reference test assets + export GOFLAGS=''${GOFLAGS//-trimpath/} + + for pkg in $(getGoDirs test); do + buildGoDir test "$pkg" + done + + runHook postCheck + ''; + + installPhase = args.installPhase or '' + runHook preInstall + + mkdir -p $out + dir="$NIX_BUILD_TOP/go/bin" + [ -e "$dir" ] && cp -r $dir $out + + runHook postInstall + ''; + + strictDeps = true; + + shellHook = '' + d=$(mktemp -d "--suffix=-$name") + '' + toString (map (dep: '' + mkdir -p "$d/src/$(dirname "${dep.goPackagePath}")" + ln -s "${dep.src}" "$d/src/${dep.goPackagePath}" + '' + ) goPath) + '' + export GOPATH=${lib.concatStringsSep ":" ( ["$d"] ++ ["$GOPATH"] ++ ["$PWD"] ++ extraSrcPaths)} + '' + shellHook; + + disallowedReferences = lib.optional (!allowGoReference) go + ++ lib.optional (!dontRenameImports) govers; + + passthru = passthru // + { inherit go; } // + lib.optionalAttrs (goPackageAliases != []) { inherit goPackageAliases; }; + + meta = { + # Add default meta information + homepage = "https://${goPackagePath}"; + platforms = go.meta.platforms or lib.platforms.all; + } // meta; + }); +in +lib.warnIf (buildFlags != "" || buildFlagsArray != "") + "Use the `ldflags` and/or `tags` attributes instead of `buildFlags`/`buildFlagsArray`" + package diff --git a/nixpkgs/pkgs/build-support/icon-conv-tools/bin/extractWinRscIconsToStdFreeDesktopDir.sh b/nixpkgs/pkgs/build-support/icon-conv-tools/bin/extractWinRscIconsToStdFreeDesktopDir.sh new file mode 100755 index 000000000000..994adbd91dae --- /dev/null +++ b/nixpkgs/pkgs/build-support/icon-conv-tools/bin/extractWinRscIconsToStdFreeDesktopDir.sh @@ -0,0 +1,74 @@ +#!/bin/sh + +# The file from which to extract *.ico files or a particular *.ico file. +# (e.g.: './KeePass.exe', './myLibrary.dll', './my/path/to/app.ico'). +# As you notived, the utility can extract icons from a windows executable or +# dll. +rscFile=$1 + +# A regexp that can extract the image size from the file name. Because we +# use 'icotool', this value should usually be set to something like +# '[^\.]+\.exe_[0-9]+_[0-9]+_[0-9]+_[0-9]+_([0-9]+x[0-9]+)x[0-9]+\.png'. +# A reg expression may be written at some point that relegate this to +# an implementation detail. +sizeRegex=$2 + +# A regexp replace expression that will be used with 'sizeRegex' to create +# a proper size directory (e.g.: '48x48'). Usually this is left to '\1'. +sizeReplaceExp=$3 + +# A regexp that can extract the name of the target image from the file name +# of the image (usually png) extracted from the *.ico file(s). A good +# default is '([^\.]+).+' which gets the basename without extension. +nameRegex=$4 + +# A regexp replace expression that will be used alongside 'nameRegex' to create +# a icon file name. Note that you usually put directly you icon name here +# without any extension (e.g.: 'my-app'). But in case you've got something +# fancy, it will usually be '\1'. +nameReplaceExp=$5 + +# The +# out=./myOut +out=$6 + +# An optional temp dir. +if [ "" != "$7" ]; then + tmp=$7 + isOwnerOfTmpDir=false +else + tmp=`mktemp -d` + isOwnerOfTmpDir=true +fi + +rm -rf $tmp/png $tmp/ico +mkdir -p $tmp/png $tmp/ico + +# Extract the ressource file's extension. +rscFileExt=`echo "$rscFile" | sed -re 's/.+\.(.+)$/\1/'` + +if [ "ico" = "$rscFileExt" ]; then + cp -p $rscFile $tmp/ico +else + wrestool -x --output=$tmp/ico -t14 $rscFile +fi + +icotool --icon -x --palette-size=0 -o $tmp/png $tmp/ico/*.ico + +mkdir -p $out + +for i in $tmp/png/*.png; do + fn=`basename "$i"` + size=$(echo $fn | sed -re 's/'${sizeRegex}'/'${sizeReplaceExp}'/') + name=$(echo $fn | sed -re 's/'${nameRegex}'/'${nameReplaceExp}'/') + targetDir=$out/share/icons/hicolor/$size/apps + targetFile=$targetDir/$name.png + mkdir -p $targetDir + mv $i $targetFile +done + +rm -rf "$tmp/png" "$tmp/ico" + +if $isOwnerOfTmpDir; then + rm -rf "$tmp" +fi diff --git a/nixpkgs/pkgs/build-support/icon-conv-tools/bin/icoFileToHiColorTheme b/nixpkgs/pkgs/build-support/icon-conv-tools/bin/icoFileToHiColorTheme new file mode 100755 index 000000000000..192f3bb54c29 --- /dev/null +++ b/nixpkgs/pkgs/build-support/icon-conv-tools/bin/icoFileToHiColorTheme @@ -0,0 +1,28 @@ +#!/bin/sh + +SCRIPT_DIR=`cd "$(dirname $0)" && pwd` + +# The '*.ico' file that needs to be converted (e.g.: "./my/path/to/file.ico"). +icoFile="$1" + +# The desired name of created icon files without extension. (e.g.: "my-app"). +targetIconName="$2" + +# The output directory where the free desktop hierarchy will be created. +# (e.g.: "./path/to/my/out" or usually in nix "$out"). Note that the +# whole directory hierarchy to the icon will be created in the specified +# output directory (e.g.: "$out/share/icons/hicolor/48x48/apps/my-app.png"). +out="$3" + +# An optional temp directory location (e.g.: ./tmp). If not specified +# a random '/tmp' directory will be created. +tmp="$4" + +$SCRIPT_DIR/extractWinRscIconsToStdFreeDesktopDir.sh \ + "$icoFile" \ + '[^\.]+_[0-9]+_([0-9]+x[0-9]+)x[0-9]+\.png' \ + '\1' \ + '([^\.]+).+' \ + "$targetIconName" \ + "$out" \ + "$tmp" diff --git a/nixpkgs/pkgs/build-support/icon-conv-tools/default.nix b/nixpkgs/pkgs/build-support/icon-conv-tools/default.nix new file mode 100644 index 000000000000..442f1f2235fa --- /dev/null +++ b/nixpkgs/pkgs/build-support/icon-conv-tools/default.nix @@ -0,0 +1,32 @@ +{ lib, stdenv, icoutils }: + +stdenv.mkDerivation { + pname = "icon-conv-tools"; + version = "0.0.0"; + + src = ./bin; + + buildInputs = [ icoutils ]; + + patchPhase = '' + substituteInPlace extractWinRscIconsToStdFreeDesktopDir.sh \ + --replace "icotool" "${icoutils}/bin/icotool" \ + --replace "wrestool" "${icoutils}/bin/wrestool" + ''; + + buildPhase = '' + mkdir -p "$out/bin" + cp -p * "$out/bin" + ''; + + installPhase = "true"; + + dontPatchELF = true; + dontStrip = true; + + meta = with lib; { + description = "Tools for icon conversion specific to nix package manager"; + maintainers = with maintainers; [ jraygauthier ]; + platforms = platforms.linux; + }; +} diff --git a/nixpkgs/pkgs/build-support/install-shell-files/default.nix b/nixpkgs/pkgs/build-support/install-shell-files/default.nix new file mode 100644 index 000000000000..d50661ddc65d --- /dev/null +++ b/nixpkgs/pkgs/build-support/install-shell-files/default.nix @@ -0,0 +1,12 @@ +{ makeSetupHook, tests }: + +# See the header comment in ../setup-hooks/install-shell-files.sh for example usage. +let + setupHook = makeSetupHook { name = "install-shell-files"; } ../setup-hooks/install-shell-files.sh; +in + +setupHook.overrideAttrs (oldAttrs: { + passthru = (oldAttrs.passthru or {}) // { + tests = tests.install-shell-files; + }; +}) diff --git a/nixpkgs/pkgs/build-support/java/canonicalize-jar.nix b/nixpkgs/pkgs/build-support/java/canonicalize-jar.nix new file mode 100644 index 000000000000..1edd9a6e0d20 --- /dev/null +++ b/nixpkgs/pkgs/build-support/java/canonicalize-jar.nix @@ -0,0 +1,9 @@ +{ substituteAll, unzip, zip }: + +substituteAll { + name = "canonicalize-jar"; + src = ./canonicalize-jar.sh; + + unzip = "${unzip}/bin/unzip"; + zip = "${zip}/bin/zip"; +} diff --git a/nixpkgs/pkgs/build-support/java/canonicalize-jar.sh b/nixpkgs/pkgs/build-support/java/canonicalize-jar.sh new file mode 100644 index 000000000000..af010bcd2b26 --- /dev/null +++ b/nixpkgs/pkgs/build-support/java/canonicalize-jar.sh @@ -0,0 +1,29 @@ +# Canonicalize the manifest & repack with deterministic timestamps. +canonicalizeJar() { + local input='' outer='' + input="$(realpath -sm -- "$1")" + outer="$(pwd)" + # -qq: even quieter + @unzip@ -qq "$input" -d "$input-tmp" + canonicalizeJarManifest "$input-tmp/META-INF/MANIFEST.MF" + # Sets all timestamps to Jan 1 1980, the earliest mtime zips support. + find -- "$input-tmp" -exec touch -t 198001010000.00 {} + + rm "$input" + pushd "$input-tmp" 2>/dev/null + # -q|--quiet, -r|--recurse-paths + # -o|--latest-time: canonicalizes overall archive mtime + # -X|--no-extra: don't store platform-specific extra file attribute fields + @zip@ -qroX "$outer/tmp-out.jar" . 2> /dev/null + popd 2>/dev/null + rm -rf "$input-tmp" + mv "$outer/tmp-out.jar" "$input" +} + +# See also the Java specification's JAR requirements: +# https://docs.oracle.com/javase/8/docs/technotes/guides/jar/jar.html#Notes_on_Manifest_and_Signature_Files +canonicalizeJarManifest() { + local input='' + input="$(realpath -sm -- "$1")" + (head -n 1 "$input" && tail -n +2 "$input" | sort | grep -v '^\s*$') > "$input-tmp" + mv "$input-tmp" "$input" +} diff --git a/nixpkgs/pkgs/build-support/kernel/compress-firmware-xz.nix b/nixpkgs/pkgs/build-support/kernel/compress-firmware-xz.nix new file mode 100644 index 000000000000..cfb06a5c0f15 --- /dev/null +++ b/nixpkgs/pkgs/build-support/kernel/compress-firmware-xz.nix @@ -0,0 +1,20 @@ +{ runCommand, lib }: + +firmware: + +let + args = lib.optionalAttrs (firmware ? meta) { inherit (firmware) meta; }; +in + +runCommand "${firmware.name}-xz" args '' + mkdir -p $out/lib + (cd ${firmware} && find lib/firmware -type d -print0) | + (cd $out && xargs -0 mkdir -v --) + (cd ${firmware} && find lib/firmware -type f -print0) | + (cd $out && xargs -0rtP "$NIX_BUILD_CORES" -n1 \ + sh -c 'xz -9c -T1 -C crc32 --lzma2=dict=2MiB "${firmware}/$1" > "$1.xz"' --) + (cd ${firmware} && find lib/firmware -type l) | while read link; do + target="$(readlink "${firmware}/$link")" + ln -vs -- "''${target/^${firmware}/$out}.xz" "$out/$link.xz" + done +'' diff --git a/nixpkgs/pkgs/build-support/kernel/initrd-compressor-meta.nix b/nixpkgs/pkgs/build-support/kernel/initrd-compressor-meta.nix new file mode 100644 index 000000000000..443e599a239e --- /dev/null +++ b/nixpkgs/pkgs/build-support/kernel/initrd-compressor-meta.nix @@ -0,0 +1,53 @@ +rec { + cat = { + executable = pkgs: "cat"; + ubootName = "none"; + extension = ".cpio"; + }; + gzip = { + executable = pkgs: "${pkgs.gzip}/bin/gzip"; + defaultArgs = ["-9n"]; + ubootName = "gzip"; + extension = ".gz"; + }; + bzip2 = { + executable = pkgs: "${pkgs.bzip2}/bin/bzip2"; + ubootName = "bzip2"; + extension = ".bz2"; + }; + xz = { + executable = pkgs: "${pkgs.xz}/bin/xz"; + defaultArgs = ["--check=crc32" "--lzma2=dict=512KiB"]; + extension = ".xz"; + }; + lzma = { + executable = pkgs: "${pkgs.xz}/bin/lzma"; + defaultArgs = ["--check=crc32" "--lzma1=dict=512KiB"]; + ubootName = "lzma"; + extension = ".lzma"; + }; + lz4 = { + executable = pkgs: "${pkgs.lz4}/bin/lz4"; + defaultArgs = ["-l"]; + ubootName = "lz4"; + extension = ".lz4"; + }; + lzop = { + executable = pkgs: "${pkgs.lzop}/bin/lzop"; + ubootName = "lzo"; + extension = ".lzo"; + }; + zstd = { + executable = pkgs: "${pkgs.zstd}/bin/zstd"; + defaultArgs = ["-10"]; + ubootName = "zstd"; + extension = ".zst"; + }; + pigz = gzip // { + executable = pkgs: "${pkgs.pigz}/bin/pigz"; + }; + pixz = xz // { + executable = pkgs: "${pkgs.pixz}/bin/pixz"; + defaultArgs = []; + }; +} diff --git a/nixpkgs/pkgs/build-support/kernel/make-initrd-ng-tool.nix b/nixpkgs/pkgs/build-support/kernel/make-initrd-ng-tool.nix new file mode 100644 index 000000000000..b1fbee92b32e --- /dev/null +++ b/nixpkgs/pkgs/build-support/kernel/make-initrd-ng-tool.nix @@ -0,0 +1,17 @@ +{ rustPlatform, lib, makeWrapper, patchelf, glibc, binutils }: + +rustPlatform.buildRustPackage { + pname = "make-initrd-ng"; + version = "0.1.0"; + + src = ./make-initrd-ng; + cargoLock.lockFile = ./make-initrd-ng/Cargo.lock; + + passthru.updateScript = ./make-initrd-ng/update.sh; + + meta = { + description = "Tool for copying binaries and their dependencies"; + maintainers = with lib.maintainers; [ das_j elvishjerricco k900 lheckemann ]; + license = lib.licenses.mit; + }; +} diff --git a/nixpkgs/pkgs/build-support/kernel/make-initrd-ng.nix b/nixpkgs/pkgs/build-support/kernel/make-initrd-ng.nix new file mode 100644 index 000000000000..f2f7aaa6d1b6 --- /dev/null +++ b/nixpkgs/pkgs/build-support/kernel/make-initrd-ng.nix @@ -0,0 +1,97 @@ +let + # Some metadata on various compression programs, relevant to naming + # the initramfs file and, if applicable, generating a u-boot image + # from it. + compressors = import ./initrd-compressor-meta.nix; + # Get the basename of the actual compression program from the whole + # compression command, for the purpose of guessing the u-boot + # compression type and filename extension. + compressorName = fullCommand: builtins.elemAt (builtins.match "([^ ]*/)?([^ ]+).*" fullCommand) 1; +in +{ stdenvNoCC, perl, cpio, ubootTools, lib, pkgsBuildHost, makeInitrdNGTool, binutils, runCommand +# Name of the derivation (not of the resulting file!) +, name ? "initrd" + +, strip ? true + +# Program used to compress the cpio archive; use "cat" for no compression. +# This can also be a function which takes a package set and returns the path to the compressor, +# such as `pkgs: "${pkgs.lzop}/bin/lzop"`. +, compressor ? "gzip" +, _compressorFunction ? + if lib.isFunction compressor then compressor + else if ! builtins.hasContext compressor && builtins.hasAttr compressor compressors then compressors.${compressor}.executable + else _: compressor +, _compressorExecutable ? _compressorFunction pkgsBuildHost +, _compressorName ? compressorName _compressorExecutable +, _compressorMeta ? compressors.${_compressorName} or {} + +# List of arguments to pass to the compressor program, or null to use its defaults +, compressorArgs ? null +, _compressorArgsReal ? if compressorArgs == null then _compressorMeta.defaultArgs or [] else compressorArgs + +# Filename extension to use for the compressed initramfs. This is +# included for clarity, but $out/initrd will always be a symlink to +# the final image. +# If this isn't guessed, you may want to complete the metadata above and send a PR :) +, extension ? _compressorMeta.extension or + (throw "Unrecognised compressor ${_compressorName}, please specify filename extension") + +# List of { object = path_or_derivation; symlink = "/path"; } +# The paths are copied into the initramfs in their nix store path +# form, then linked at the root according to `symlink`. +, contents + +# List of uncompressed cpio files to prepend to the initramfs. This +# can be used to add files in specified paths without them becoming +# symlinks to store paths. +, prepend ? [] + +# Whether to wrap the initramfs in a u-boot image. +, makeUInitrd ? stdenvNoCC.hostPlatform.linux-kernel.target == "uImage" + +# If generating a u-boot image, the architecture to use. The default +# guess may not align with u-boot's nomenclature correctly, so it can +# be overridden. +# See https://gitlab.denx.de/u-boot/u-boot/-/blob/9bfb567e5f1bfe7de8eb41f8c6d00f49d2b9a426/common/image.c#L81-106 for a list. +, uInitrdArch ? stdenvNoCC.hostPlatform.ubootArch + +# The name of the compression, as recognised by u-boot. +# See https://gitlab.denx.de/u-boot/u-boot/-/blob/9bfb567e5f1bfe7de8eb41f8c6d00f49d2b9a426/common/image.c#L195-204 for a list. +# If this isn't guessed, you may want to complete the metadata above and send a PR :) +, uInitrdCompression ? _compressorMeta.ubootName or + (throw "Unrecognised compressor ${_compressorName}, please specify uInitrdCompression") +}: runCommand name ({ + compress = "${_compressorExecutable} ${lib.escapeShellArgs _compressorArgsReal}"; + passthru = { + compressorExecutableFunction = _compressorFunction; + compressorArgs = _compressorArgsReal; + }; + + inherit extension makeUInitrd uInitrdArch prepend; + ${if makeUInitrd then "uInitrdCompression" else null} = uInitrdCompression; + + passAsFile = ["contents"]; + contents = lib.concatMapStringsSep "\n" ({ object, symlink, ... }: "${object}\n${lib.optionalString (symlink != null) symlink}") contents + "\n"; + + nativeBuildInputs = [makeInitrdNGTool cpio] ++ lib.optional makeUInitrd ubootTools ++ lib.optional strip binutils; + + STRIP = if strip then "${pkgsBuildHost.binutils.targetPrefix}strip" else null; +}) '' + mkdir -p ./root/var/empty + make-initrd-ng "$contentsPath" ./root + mkdir "$out" + (cd root && find * .[^.*] -exec touch -h -d '@1' '{}' +) + for PREP in $prepend; do + cat $PREP >> $out/initrd + done + (cd root && find . -print0 | sort -z | cpio -o -H newc -R +0:+0 --reproducible --null | eval -- $compress >> "$out/initrd") + + if [ -n "$makeUInitrd" ]; then + mkimage -A "$uInitrdArch" -O linux -T ramdisk -C "$uInitrdCompression" -d "$out/initrd" $out/initrd.img + # Compatibility symlink + ln -sf "initrd.img" "$out/initrd" + else + ln -s "initrd" "$out/initrd$extension" + fi +'' diff --git a/nixpkgs/pkgs/build-support/kernel/make-initrd-ng/Cargo.lock b/nixpkgs/pkgs/build-support/kernel/make-initrd-ng/Cargo.lock new file mode 100644 index 000000000000..8965b8793cca --- /dev/null +++ b/nixpkgs/pkgs/build-support/kernel/make-initrd-ng/Cargo.lock @@ -0,0 +1,111 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "eyre" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c2b6b5a29c02cdc822728b7d7b8ae1bab3e3b05d44522770ddd49722eeac7eb" +dependencies = [ + "indenter", + "once_cell", +] + +[[package]] +name = "goblin" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7666983ed0dd8d21a6f6576ee00053ca0926fb281a5522577a4dbd0f1b54143" +dependencies = [ + "log", + "plain", + "scroll", +] + +[[package]] +name = "indenter" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" + +[[package]] +name = "log" +version = "0.4.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" + +[[package]] +name = "make-initrd-ng" +version = "0.1.0" +dependencies = [ + "eyre", + "goblin", +] + +[[package]] +name = "once_cell" +version = "1.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" + +[[package]] +name = "plain" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6" + +[[package]] +name = "proc-macro2" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "scroll" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04c565b551bafbef4157586fa379538366e4385d42082f255bfd96e4fe8519da" +dependencies = [ + "scroll_derive", +] + +[[package]] +name = "scroll_derive" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1db149f81d46d2deba7cd3c50772474707729550221e69588478ebf9ada425ae" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "syn" +version = "2.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" diff --git a/nixpkgs/pkgs/build-support/kernel/make-initrd-ng/Cargo.toml b/nixpkgs/pkgs/build-support/kernel/make-initrd-ng/Cargo.toml new file mode 100644 index 000000000000..028833c12bb5 --- /dev/null +++ b/nixpkgs/pkgs/build-support/kernel/make-initrd-ng/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "make-initrd-ng" +version = "0.1.0" +authors = ["Will Fancher <elvishjerricco@gmail.com>"] +edition = "2018" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +eyre = "0.6.8" +goblin = "0.5.0" diff --git a/nixpkgs/pkgs/build-support/kernel/make-initrd-ng/README.md b/nixpkgs/pkgs/build-support/kernel/make-initrd-ng/README.md new file mode 100644 index 000000000000..d92b7eab7fe1 --- /dev/null +++ b/nixpkgs/pkgs/build-support/kernel/make-initrd-ng/README.md @@ -0,0 +1,82 @@ +# What is this for? + +NixOS's traditional initrd is generated by listing the paths that +should be included in initrd and copying the full runtime closure of +those paths into the archive. For most things, like almost any +executable, this involves copying the entirety of huge packages like +glibc, when only things like the shared library files are needed. To +solve this, NixOS does a variety of patchwork to edit the files being +copied in so they only refer to small, patched up paths. For instance, +executables and their shared library dependencies are copied into an +`extraUtils` derivation, and every ELF file is patched to refer to +files in that output. + +The problem with this is that it is often difficult to correctly patch +some things. For instance, systemd bakes the path to the `mount` +command into the binary, so patchelf is no help. Instead, it's very +often easier to simply copy the desired files to their original store +locations in initrd and not copy their entire runtime closure. This +does mean that it is the burden of the developer to ensure that all +necessary dependencies are copied in, as closures won't be +consulted. However, it is rare that full closures are actually +desirable, so in the traditional initrd, the developer was likely to +do manual work on patching the dependencies explicitly anyway. + +# How it works + +This program is similar to its inspiration (`find-libs` from the +traditional initrd), except that it also handles symlinks and +directories according to certain rules. As input, it receives a +sequence of pairs of paths. The first path is an object to copy into +initrd. The second path (if not empty) is the path to a symlink that +should be placed in the initrd, pointing to that object. How that +object is copied depends on its type. + +1. A regular file is copied directly to the same absolute path in the + initrd. + + - If it is *also* an ELF file, then all of its direct shared + library dependencies are also listed as objects to be copied. + + - If an unwrapped file exists as `.[filename]-wrapped`, then it is + also listed as an object to be copied. + +2. A directory's direct children are listed as objects to be copied, + and a directory at the same absolute path in the initrd is created. + +3. A symlink's target is listed as an object to be copied. + +There are a couple of quirks to mention here. First, the term "object" +refers to the final file path that the developer intends to have +copied into initrd. This means any parent directory is not considered +an object just because its child was listed as an object in the +program input; instead those intermediate directories are simply +created in support of the target object. Second, shared libraries, +directory children, and symlink targets aren't immediately recursed, +because they simply get listed as objects themselves, and are +therefore traversed when they themselves are processed. Finally, +symlinks in the intermediate directories leading to an object are +preserved, meaning an input object `/a/symlink/b` will just result in +initrd containing `/a/symlink -> /target/b` and `/target/b`, even if +`/target` has other children. Preserving symlinks in this manner is +important for things like systemd. + +These rules automate the most important and obviously necessary +copying that needs to be done in most cases, allowing programs and +configuration files to go unpatched, while keeping the content of the +initrd to a minimum. + +# Why Rust? + +- A prototype of this logic was written in Bash, in an attempt to keep + with its `find-libs` ancestor, but that program was difficult to + write, and ended up taking several minutes to run. This program runs + in less than a second, and the code is substantially easier to work + with. + +- This will not require end users to install a rust toolchain to use + NixOS, as long as this tool is cached by Hydra. And if you're + bootstrapping NixOS from source, rustc is already required anyway. + +- Rust was favored over Python for its type system, and because if you + want to go fast, why not go *really fast*? diff --git a/nixpkgs/pkgs/build-support/kernel/make-initrd-ng/src/main.rs b/nixpkgs/pkgs/build-support/kernel/make-initrd-ng/src/main.rs new file mode 100644 index 000000000000..daa688976c6c --- /dev/null +++ b/nixpkgs/pkgs/build-support/kernel/make-initrd-ng/src/main.rs @@ -0,0 +1,268 @@ +use std::collections::{HashSet, VecDeque}; +use std::env; +use std::ffi::{OsStr, OsString}; +use std::fs; +use std::hash::Hash; +use std::io::{BufRead, BufReader}; +use std::iter::FromIterator; +use std::os::unix; +use std::path::{Component, Path, PathBuf}; +use std::process::Command; + +use eyre::Context; +use goblin::{elf::Elf, Object}; + +struct NonRepeatingQueue<T> { + queue: VecDeque<T>, + seen: HashSet<T>, +} + +impl<T> NonRepeatingQueue<T> { + fn new() -> NonRepeatingQueue<T> { + NonRepeatingQueue { + queue: VecDeque::new(), + seen: HashSet::new(), + } + } +} + +impl<T: Clone + Eq + Hash> NonRepeatingQueue<T> { + fn push_back(&mut self, value: T) -> bool { + if self.seen.contains(&value) { + false + } else { + self.seen.insert(value.clone()); + self.queue.push_back(value); + true + } + } + + fn pop_front(&mut self) -> Option<T> { + self.queue.pop_front() + } +} + +fn add_dependencies<P: AsRef<Path> + AsRef<OsStr>>( + source: P, + elf: Elf, + queue: &mut NonRepeatingQueue<Box<Path>>, +) { + if let Some(interp) = elf.interpreter { + queue.push_back(Box::from(Path::new(interp))); + } + + let rpaths = if elf.runpaths.len() > 0 { + elf.runpaths + } else if elf.rpaths.len() > 0 { + elf.rpaths + } else { + vec![] + }; + + let rpaths_as_path = rpaths + .into_iter() + .flat_map(|p| p.split(":")) + .map(|p| Box::<Path>::from(Path::new(p))) + .collect::<Vec<_>>(); + + for line in elf.libraries { + let mut found = false; + for path in &rpaths_as_path { + let lib = path.join(line); + if lib.exists() { + // No need to recurse. The queue will bring it back round. + queue.push_back(Box::from(lib.as_path())); + found = true; + break; + } + } + if !found { + // glibc makes it tricky to make this an error because + // none of the files have a useful rpath. + println!( + "Warning: Couldn't satisfy dependency {} for {:?}", + line, + OsStr::new(&source) + ); + } + } +} + +fn copy_file< + P: AsRef<Path> + AsRef<OsStr> + std::fmt::Debug, + S: AsRef<Path> + AsRef<OsStr> + std::fmt::Debug, +>( + source: P, + target: S, + queue: &mut NonRepeatingQueue<Box<Path>>, +) -> eyre::Result<()> { + fs::copy(&source, &target) + .wrap_err_with(|| format!("failed to copy {:?} to {:?}", source, target))?; + + let contents = + fs::read(&source).wrap_err_with(|| format!("failed to read from {:?}", source))?; + + if let Ok(Object::Elf(e)) = Object::parse(&contents) { + add_dependencies(source, e, queue); + + // Make file writable to strip it + let mut permissions = fs::metadata(&target) + .wrap_err_with(|| format!("failed to get metadata for {:?}", target))? + .permissions(); + permissions.set_readonly(false); + fs::set_permissions(&target, permissions) + .wrap_err_with(|| format!("failed to set readonly flag to false for {:?}", target))?; + + // Strip further than normal + if let Ok(strip) = env::var("STRIP") { + if !Command::new(strip) + .arg("--strip-all") + .arg(OsStr::new(&target)) + .output()? + .status + .success() + { + println!("{:?} was not successfully stripped.", OsStr::new(&target)); + } + } + }; + + Ok(()) +} + +fn queue_dir<P: AsRef<Path> + std::fmt::Debug>( + source: P, + queue: &mut NonRepeatingQueue<Box<Path>>, +) -> eyre::Result<()> { + for entry in + fs::read_dir(&source).wrap_err_with(|| format!("failed to read dir {:?}", source))? + { + let entry = entry?; + // No need to recurse. The queue will bring us back round here on its own. + queue.push_back(Box::from(entry.path().as_path())); + } + + Ok(()) +} + +fn handle_path( + root: &Path, + p: &Path, + queue: &mut NonRepeatingQueue<Box<Path>>, +) -> eyre::Result<()> { + let mut source = PathBuf::new(); + let mut target = Path::new(root).to_path_buf(); + let mut iter = p.components().peekable(); + while let Some(comp) = iter.next() { + match comp { + Component::Prefix(_) => panic!("This tool is not meant for Windows"), + Component::RootDir => { + target.clear(); + target.push(root); + source.clear(); + source.push("/"); + } + Component::CurDir => {} + Component::ParentDir => { + // Don't over-pop the target if the path has too many ParentDirs + if source.pop() { + target.pop(); + } + } + Component::Normal(name) => { + target.push(name); + source.push(name); + let typ = fs::symlink_metadata(&source) + .wrap_err_with(|| format!("failed to get symlink metadata for {:?}", source))? + .file_type(); + if typ.is_file() && !target.exists() { + copy_file(&source, &target, queue)?; + + if let Some(filename) = source.file_name() { + source.set_file_name(OsString::from_iter([ + OsStr::new("."), + filename, + OsStr::new("-wrapped"), + ])); + + let wrapped_path = source.as_path(); + if wrapped_path.exists() { + queue.push_back(Box::from(wrapped_path)); + } + } + } else if typ.is_symlink() { + let link_target = fs::read_link(&source) + .wrap_err_with(|| format!("failed to resolve symlink of {:?}", source))?; + + // Create the link, then push its target to the queue + if !target.exists() && !target.is_symlink() { + unix::fs::symlink(&link_target, &target).wrap_err_with(|| { + format!("failed to symlink {:?} to {:?}", link_target, target) + })?; + } + source.pop(); + source.push(link_target); + while let Some(c) = iter.next() { + source.push(c); + } + let link_target_path = source.as_path(); + if link_target_path.exists() { + queue.push_back(Box::from(link_target_path)); + } + break; + } else if typ.is_dir() { + if !target.exists() { + fs::create_dir(&target) + .wrap_err_with(|| format!("failed to create dir {:?}", target))?; + } + + // Only recursively copy if the directory is the target object + if iter.peek().is_none() { + queue_dir(&source, queue) + .wrap_err_with(|| format!("failed to queue dir {:?}", source))?; + } + } + } + } + } + + Ok(()) +} + +fn main() -> eyre::Result<()> { + let args: Vec<String> = env::args().collect(); + let input = + fs::File::open(&args[1]).wrap_err_with(|| format!("failed to open file {:?}", &args[1]))?; + let output = &args[2]; + let out_path = Path::new(output); + + let mut queue = NonRepeatingQueue::<Box<Path>>::new(); + + let mut lines = BufReader::new(input).lines(); + while let Some(obj) = lines.next() { + // Lines should always come in pairs + let obj = obj?; + let sym = lines.next().unwrap()?; + + let obj_path = Path::new(&obj); + queue.push_back(Box::from(obj_path)); + if !sym.is_empty() { + println!("{} -> {}", &sym, &obj); + // We don't care about preserving symlink structure here + // nearly as much as for the actual objects. + let link_string = format!("{}/{}", output, sym); + let link_path = Path::new(&link_string); + let mut link_parent = link_path.to_path_buf(); + link_parent.pop(); + fs::create_dir_all(&link_parent) + .wrap_err_with(|| format!("failed to create directories to {:?}", link_parent))?; + unix::fs::symlink(obj_path, link_path) + .wrap_err_with(|| format!("failed to symlink {:?} to {:?}", obj_path, link_path))?; + } + } + while let Some(obj) = queue.pop_front() { + handle_path(out_path, &*obj, &mut queue)?; + } + + Ok(()) +} diff --git a/nixpkgs/pkgs/build-support/kernel/make-initrd-ng/update.sh b/nixpkgs/pkgs/build-support/kernel/make-initrd-ng/update.sh new file mode 100755 index 000000000000..ffc5ad3917f7 --- /dev/null +++ b/nixpkgs/pkgs/build-support/kernel/make-initrd-ng/update.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env nix-shell +#!nix-shell -p cargo -i bash +cd "$(dirname "$0")" +cargo update diff --git a/nixpkgs/pkgs/build-support/kernel/make-initrd.nix b/nixpkgs/pkgs/build-support/kernel/make-initrd.nix new file mode 100644 index 000000000000..9c27a142f4b6 --- /dev/null +++ b/nixpkgs/pkgs/build-support/kernel/make-initrd.nix @@ -0,0 +1,113 @@ +# Create an initramfs containing the closure of the specified +# file system objects. An initramfs is used during the initial +# stages of booting a Linux system. It is loaded by the boot loader +# along with the kernel image. It's supposed to contain everything +# (such as kernel modules) necessary to allow us to mount the root +# file system. Once the root file system is mounted, the `real' boot +# script can be called. +# +# An initramfs is a cpio archive, and may be compressed with a number +# of algorithms. +let + # Some metadata on various compression programs, relevant to naming + # the initramfs file and, if applicable, generating a u-boot image + # from it. + compressors = import ./initrd-compressor-meta.nix; + # Get the basename of the actual compression program from the whole + # compression command, for the purpose of guessing the u-boot + # compression type and filename extension. + compressorName = fullCommand: builtins.elemAt (builtins.match "([^ ]*/)?([^ ]+).*" fullCommand) 1; +in +{ stdenvNoCC, perl, libarchive, ubootTools, lib, pkgsBuildHost +# Name of the derivation (not of the resulting file!) +, name ? "initrd" + +# Program used to compress the cpio archive; use "cat" for no compression. +# This can also be a function which takes a package set and returns the path to the compressor, +# such as `pkgs: "${pkgs.lzop}/bin/lzop"`. +, compressor ? "gzip" +, _compressorFunction ? + if lib.isFunction compressor then compressor + else if ! builtins.hasContext compressor && builtins.hasAttr compressor compressors then compressors.${compressor}.executable + else _: compressor +, _compressorExecutable ? _compressorFunction pkgsBuildHost +, _compressorName ? compressorName _compressorExecutable +, _compressorMeta ? compressors.${_compressorName} or {} + +# List of arguments to pass to the compressor program, or null to use its defaults +, compressorArgs ? null +, _compressorArgsReal ? if compressorArgs == null then _compressorMeta.defaultArgs or [] else compressorArgs + +# Filename extension to use for the compressed initramfs. This is +# included for clarity, but $out/initrd will always be a symlink to +# the final image. +# If this isn't guessed, you may want to complete the metadata above and send a PR :) +, extension ? _compressorMeta.extension or + (throw "Unrecognised compressor ${_compressorName}, please specify filename extension") + +# List of { object = path_or_derivation; symlink = "/path"; } +# The paths are copied into the initramfs in their nix store path +# form, then linked at the root according to `symlink`. +, contents + +# List of uncompressed cpio files to prepend to the initramfs. This +# can be used to add files in specified paths without them becoming +# symlinks to store paths. +, prepend ? [] + +# Whether to wrap the initramfs in a u-boot image. +, makeUInitrd ? stdenvNoCC.hostPlatform.linux-kernel.target == "uImage" + +# If generating a u-boot image, the architecture to use. The default +# guess may not align with u-boot's nomenclature correctly, so it can +# be overridden. +# See https://gitlab.denx.de/u-boot/u-boot/-/blob/9bfb567e5f1bfe7de8eb41f8c6d00f49d2b9a426/common/image.c#L81-106 for a list. +, uInitrdArch ? stdenvNoCC.hostPlatform.linuxArch + +# The name of the compression, as recognised by u-boot. +# See https://gitlab.denx.de/u-boot/u-boot/-/blob/9bfb567e5f1bfe7de8eb41f8c6d00f49d2b9a426/common/image.c#L195-204 for a list. +# If this isn't guessed, you may want to complete the metadata above and send a PR :) +, uInitrdCompression ? _compressorMeta.ubootName or + (throw "Unrecognised compressor ${_compressorName}, please specify uInitrdCompression") +}: +let + # !!! Move this into a public lib function, it is probably useful for others + toValidStoreName = x: with builtins; + lib.concatStringsSep "-" (filter (x: !(isList x)) (split "[^a-zA-Z0-9_=.?-]+" x)); + +in stdenvNoCC.mkDerivation rec { + inherit name makeUInitrd extension uInitrdArch prepend; + + ${if makeUInitrd then "uInitrdCompression" else null} = uInitrdCompression; + + builder = ./make-initrd.sh; + + nativeBuildInputs = [ perl libarchive ] + ++ lib.optional makeUInitrd ubootTools; + + compress = "${_compressorExecutable} ${lib.escapeShellArgs _compressorArgsReal}"; + + # Pass the function through, for reuse in append-initrd-secrets. The + # function is used instead of the string, in order to support + # cross-compilation (append-initrd-secrets running on a different + # architecture than what the main initramfs is built on). + passthru = { + compressorExecutableFunction = _compressorFunction; + compressorArgs = _compressorArgsReal; + }; + + # !!! should use XML. + objects = map (x: x.object) contents; + symlinks = map (x: x.symlink) contents; + suffices = map (x: if x ? suffix then x.suffix else "none") contents; + + # For obtaining the closure of `contents'. + # Note: we don't use closureInfo yet, as that won't build with nix-1.x. + # See #36268. + exportReferencesGraph = + lib.zipListsWith + (x: i: [("closure-${toValidStoreName (baseNameOf x.symlink)}-${toString i}") x.object]) + contents + (lib.range 0 (lib.length contents - 1)); + pathsFromGraph = ./paths-from-graph.pl; +} diff --git a/nixpkgs/pkgs/build-support/kernel/make-initrd.sh b/nixpkgs/pkgs/build-support/kernel/make-initrd.sh new file mode 100644 index 000000000000..8f64114d54c3 --- /dev/null +++ b/nixpkgs/pkgs/build-support/kernel/make-initrd.sh @@ -0,0 +1,51 @@ +source $stdenv/setup + +set -o pipefail + +objects=($objects) +symlinks=($symlinks) +suffices=($suffices) + +mkdir root + +# Needed for splash_helper, which gets run before init. +mkdir root/dev +mkdir root/sys +mkdir root/proc + + +for ((n = 0; n < ${#objects[*]}; n++)); do + object=${objects[$n]} + symlink=${symlinks[$n]} + suffix=${suffices[$n]} + if test "$suffix" = none; then suffix=; fi + + mkdir -p $(dirname root/$symlink) + ln -s $object$suffix root/$symlink +done + + +# Get the paths in the closure of `object'. +storePaths=$(perl $pathsFromGraph closure-*) + + +# Paths in cpio archives *must* be relative, otherwise the kernel +# won't unpack 'em. +(cd root && cp -prP --parents $storePaths .) + + +# Put the closure in a gzipped cpio archive. +mkdir -p $out +for PREP in $prepend; do + cat $PREP >> $out/initrd +done +(cd root && find * .[^.*] -exec touch -h -d '@1' '{}' +) +(cd root && find * .[^.*] -print0 | sort -z | bsdtar --uid 0 --gid 0 -cnf - -T - | bsdtar --null -cf - --format=newc @- | eval -- $compress >> "$out/initrd") + +if [ -n "$makeUInitrd" ]; then + mkimage -A "$uInitrdArch" -O linux -T ramdisk -C "$uInitrdCompression" -d "$out/initrd" $out/initrd.img + # Compatibility symlink + ln -sf "initrd.img" "$out/initrd" +else + ln -s "initrd" "$out/initrd$extension" +fi diff --git a/nixpkgs/pkgs/build-support/kernel/modules-closure.nix b/nixpkgs/pkgs/build-support/kernel/modules-closure.nix new file mode 100644 index 000000000000..d82e279799ba --- /dev/null +++ b/nixpkgs/pkgs/build-support/kernel/modules-closure.nix @@ -0,0 +1,15 @@ +# Given a kernel build (with modules in $kernel/lib/modules/VERSION), +# produce a module tree in $out/lib/modules/VERSION that contains only +# the modules identified by `rootModules', plus their dependencies. +# Also generate an appropriate modules.dep. + +{ stdenvNoCC, kernel, firmware, nukeReferences, rootModules +, kmod, allowMissing ? false }: + +stdenvNoCC.mkDerivation { + name = kernel.name + "-shrunk"; + builder = ./modules-closure.sh; + nativeBuildInputs = [ nukeReferences kmod ]; + inherit kernel firmware rootModules allowMissing; + allowedReferences = ["out"]; +} diff --git a/nixpkgs/pkgs/build-support/kernel/modules-closure.sh b/nixpkgs/pkgs/build-support/kernel/modules-closure.sh new file mode 100644 index 000000000000..74bc490eb15c --- /dev/null +++ b/nixpkgs/pkgs/build-support/kernel/modules-closure.sh @@ -0,0 +1,101 @@ +source $stdenv/setup + +# When no modules are built, the $out/lib/modules directory will not +# exist. Because the rest of the script assumes it does exist, we +# handle this special case first. +if ! test -d "$kernel/lib/modules"; then + if test -z "$rootModules" || test -n "$allowMissing"; then + mkdir -p "$out" + exit 0 + else + echo "Required modules: $rootModules" + echo "Can not derive a closure of kernel modules because no modules were provided." + exit 1 + fi +fi + +version=$(cd $kernel/lib/modules && ls -d *) + +echo "kernel version is $version" + +# Determine the dependencies of each root module. +mkdir -p $out/lib/modules/"$version" +touch closure +for module in $rootModules; do + echo "root module: $module" + modprobe --config no-config -d $kernel --set-version "$version" --show-depends "$module" \ + | while read cmd module args; do + case "$cmd" in + builtin) + touch found + echo "$module" >>closure + echo " builtin dependency: $module";; + insmod) + touch found + if ! test -e "$module"; then + echo " dependency not found: $module" + exit 1 + fi + target=$(echo "$module" | sed "s^$NIX_STORE.*/lib/modules/^$out/lib/modules/^") + if test -e "$target"; then + echo " dependency already copied: $module" + continue + fi + echo "$module" >>closure + echo " copying dependency: $module" + mkdir -p $(dirname $target) + cp "$module" "$target" + # If the kernel is compiled with coverage instrumentation, it + # contains the paths of the *.gcda coverage data output files + # (which it doesn't actually use...). Get rid of them to prevent + # the whole kernel from being included in the initrd. + nuke-refs "$target" + echo "$target" >> $out/insmod-list;; + *) + echo " unexpected modprobe output: $cmd $module" + exit 1;; + esac + done || test -n "$allowMissing" + if ! test -e found; then + echo " not found" + if test -z "$allowMissing"; then + exit 1 + fi + else + rm found + fi +done + +mkdir -p $out/lib/firmware +for module in $(cat closure); do + # for builtin modules, modinfo will reply with a wrong output looking like: + # $ modinfo -F firmware unix + # name: unix + # + # There is a pending attempt to fix this: + # https://github.com/NixOS/nixpkgs/pull/96153 + # https://lore.kernel.org/linux-modules/20200823215433.j5gc5rnsmahpf43v@blumerang/T/#u + # + # For now, the workaround is just to filter out the extraneous lines out + # of its output. + for i in $(modinfo -b $kernel --set-version "$version" -F firmware $module | grep -v '^name:'); do + mkdir -p "$out/lib/firmware/$(dirname "$i")" + echo "firmware for $module: $i" + for name in "$i" "$i.xz" ""; do + [ -z "$name" ] && echo "WARNING: missing firmware $i for module $module" + if cp "$firmware/lib/firmware/$name" "$out/lib/firmware/$name" 2>/dev/null; then + break + fi + done + done +done + +# copy module ordering hints for depmod +cp $kernel/lib/modules/"$version"/modules.order $out/lib/modules/"$version"/. +cp $kernel/lib/modules/"$version"/modules.builtin $out/lib/modules/"$version"/. + +depmod -b $out -a $version + +# remove original hints from final derivation +rm $out/lib/modules/"$version"/modules.order +rm $out/lib/modules/"$version"/modules.builtin diff --git a/nixpkgs/pkgs/build-support/kernel/paths-from-graph.pl b/nixpkgs/pkgs/build-support/kernel/paths-from-graph.pl new file mode 100644 index 000000000000..1465b73fddb6 --- /dev/null +++ b/nixpkgs/pkgs/build-support/kernel/paths-from-graph.pl @@ -0,0 +1,68 @@ +# NOTE: this script is deprecated. Use closureInfo instead. + +# Parses a /nix/store/*-closure file and prints +# various information. +# By default, the nodes in the graph are printed to stdout. +# If printRegistration is set, then the graph is written +# as a registration file for a manifest is written +# in the `nix-store --load-db' format. + +use strict; +use File::Basename; + +my %storePaths; +my %refs; + +# Each argument on the command line is a graph file. +# The graph file contains line-triples and a variable +# number of references: +# <store-path> +# <deriver> +# <count> +# <ref-#1> +# ... +# <ref-#count> +foreach my $graph (@ARGV) { + open GRAPH, "<$graph" or die; + + while (<GRAPH>) { + chomp; + my $storePath = "$_"; + $storePaths{$storePath} = 1; + + my $deriver = <GRAPH>; chomp $deriver; + my $count = <GRAPH>; chomp $count; + + my @refs = (); + for (my $i = 0; $i < $count; ++$i) { + my $ref = <GRAPH>; chomp $ref; + push @refs, $ref; + } + $refs{$storePath} = \@refs; + + } + + close GRAPH; +} + + +if ($ENV{"printRegistration"} eq "1") { + # This is the format used by `nix-store --register-validity + # --hash-given' / `nix-store --load-db'. + foreach my $storePath (sort (keys %storePaths)) { + print "$storePath\n"; + print "0000000000000000000000000000000000000000000000000000000000000000\n"; # !!! fix + print "0\n"; # !!! fix + print "\n"; # don't care about preserving the deriver + print scalar(@{$refs{$storePath}}), "\n"; + foreach my $ref (@{$refs{$storePath}}) { + print "$ref\n"; + } + } +} + +else { + foreach my $storePath (sort (keys %storePaths)) { + print "$storePath\n"; + } +} diff --git a/nixpkgs/pkgs/build-support/libredirect/default.nix b/nixpkgs/pkgs/build-support/libredirect/default.nix new file mode 100644 index 000000000000..1ab4a0db827a --- /dev/null +++ b/nixpkgs/pkgs/build-support/libredirect/default.nix @@ -0,0 +1,122 @@ +{ lib, stdenv, bintools-unwrapped, llvmPackages, llvmPackages_13, coreutils }: + +let + # aarch64-darwin needs a clang that can build arm64e binaries, so make sure a version of LLVM + # is used that can do that, but prefer the stdenv one if it is new enough. + llvmPkgs = if (lib.versionAtLeast (lib.getVersion llvmPackages.clang) "13") + then llvmPackages + else llvmPackages_13; + in +if stdenv.hostPlatform.isStatic +then throw '' + libredirect is not available on static builds. + + Please fix your derivation to not depend on libredirect on static + builds, using something like following: + + nativeBuildInputs = + lib.optional (!stdenv.buildPlatform.isStatic) libredirect; + + and disable tests as necessary, although fixing tests to work without + libredirect is even better. + + libredirect uses LD_PRELOAD feature of dynamic loader and does not + work on static builds where dynamic loader is not used. + '' +else stdenv.mkDerivation rec { + pname = "libredirect"; + version = "0"; + + unpackPhase = '' + cp ${./libredirect.c} libredirect.c + cp ${./test.c} test.c + ''; + + outputs = ["out" "hook"]; + + libName = "libredirect" + stdenv.hostPlatform.extensions.sharedLibrary; + + buildPhase = '' + runHook preBuild + + ${if stdenv.isDarwin && stdenv.isAarch64 then '' + # We need the unwrapped binutils and clang: + # We also want to build a fat library with x86_64, arm64, arm64e in there. + # Because we use the unwrapped tools, we need to provide -isystem for headers + # and the library search directory for libdl. + # We can't build this on x86_64, because the libSystem we point to doesn't + # like arm64(e). + PATH=${bintools-unwrapped}/bin:${llvmPkgs.clang-unwrapped}/bin:$PATH \ + clang -arch x86_64 -arch arm64 -arch arm64e \ + -isystem ${llvmPkgs.clang.libc}/include \ + -isystem ${llvmPkgs.libclang.lib}/lib/clang/*/include \ + -L${llvmPkgs.clang.libc}/lib \ + -Wl,-install_name,$libName \ + -Wall -std=c99 -O3 -fPIC libredirect.c \ + -shared -o "$libName" + '' else if stdenv.isDarwin then '' + $CC -Wall -std=c99 -O3 -fPIC libredirect.c \ + -Wl,-install_name,$out/lib/$libName \ + -shared -o "$libName" + '' else '' + $CC -Wall -std=c99 -O3 -fPIC libredirect.c \ + -shared -o "$libName" + ''} + + if [ -n "$doInstallCheck" ]; then + $CC -Wall -std=c99 \ + ${lib.optionalString (!stdenv.isDarwin) "-D_GNU_SOURCE"} \ + -O3 test.c -o test + fi + + runHook postBuild + ''; + + # We want to retain debugging info to be able to use GDB on libredirect.so + # to more easily investigate which function overrides are missing or why + # existing ones do not have the intended effect. + dontStrip = true; + + installPhase = '' + runHook preInstall + + install -vD "$libName" "$out/lib/$libName" + + '' + lib.optionalString (stdenv.isDarwin && stdenv.isAarch64) '' + # dylib will be rejected unless dylib rpath gets explictly set + install_name_tool \ + -change $libName $out/lib/$libName \ + $out/lib/$libName + '' + '' + # Provide a setup hook that injects our library into every process. + mkdir -p "$hook/nix-support" + cat <<SETUP_HOOK > "$hook/nix-support/setup-hook" + ${if stdenv.isDarwin then '' + export DYLD_INSERT_LIBRARIES="$out/lib/$libName" + '' else '' + export LD_PRELOAD="$out/lib/$libName" + ''} + SETUP_HOOK + + runHook postInstall + ''; + + doInstallCheck = true; + + installCheckPhase = '' + ( + source "$hook/nix-support/setup-hook" + NIX_REDIRECTS="/foo/bar/test=${coreutils}/bin/true:/bar/baz=$(mktemp -d)" ./test + ) + ''; + + meta = with lib; { + platforms = platforms.unix; + description = "An LD_PRELOAD library to intercept and rewrite the paths in glibc calls"; + longDescription = '' + libredirect is an LD_PRELOAD library to intercept and rewrite the paths in + glibc calls based on the value of $NIX_REDIRECTS, a colon-separated list + of path prefixes to be rewritten, e.g. "/src=/dst:/usr/=/nix/store/". + ''; + }; +} diff --git a/nixpkgs/pkgs/build-support/libredirect/libredirect.c b/nixpkgs/pkgs/build-support/libredirect/libredirect.c new file mode 100644 index 000000000000..fdbdcb6ebb86 --- /dev/null +++ b/nixpkgs/pkgs/build-support/libredirect/libredirect.c @@ -0,0 +1,530 @@ +#define _GNU_SOURCE +#include <stdio.h> +#include <stdarg.h> +#include <stdlib.h> +#include <unistd.h> +#include <dlfcn.h> +#include <sys/types.h> +#include <sys/stat.h> +#include <fcntl.h> +#include <limits.h> +#include <string.h> +#include <spawn.h> +#include <dirent.h> + +#define MAX_REDIRECTS 128 + +#ifdef __APPLE__ + struct dyld_interpose { + const void * replacement; + const void * replacee; + }; + #define WRAPPER(ret, name) static ret _libredirect_wrapper_##name + #define LOOKUP_REAL(name) &name + #define WRAPPER_DEF(name) \ + __attribute__((used)) static struct dyld_interpose _libredirect_interpose_##name \ + __attribute__((section("__DATA,__interpose"))) = { &_libredirect_wrapper_##name, &name }; +#else + #define WRAPPER(ret, name) ret name + #define LOOKUP_REAL(name) dlsym(RTLD_NEXT, #name) + #define WRAPPER_DEF(name) +#endif + +static int nrRedirects = 0; +static char * from[MAX_REDIRECTS]; +static char * to[MAX_REDIRECTS]; + +static int isInitialized = 0; + +// FIXME: might run too late. +static void init() __attribute__((constructor)); + +static void init() +{ + if (isInitialized) return; + + char * spec = getenv("NIX_REDIRECTS"); + if (!spec) return; + + // Ensure we only run this code once. + // We do not do `unsetenv("NIX_REDIRECTS")` to ensure that redirects + // also get initialized for subprocesses. + isInitialized = 1; + + char * spec2 = malloc(strlen(spec) + 1); + strcpy(spec2, spec); + + char * pos = spec2, * eq; + while ((eq = strchr(pos, '='))) { + *eq = 0; + from[nrRedirects] = pos; + pos = eq + 1; + to[nrRedirects] = pos; + nrRedirects++; + if (nrRedirects == MAX_REDIRECTS) break; + char * end = strchr(pos, ':'); + if (!end) break; + *end = 0; + pos = end + 1; + } + +} + +static const char * rewrite(const char * volatile path, char * buf) +{ + // Marking the path volatile is needed so the the following check isn't + // optimized away by the compiler. + if (path == NULL) return path; + + for (int n = 0; n < nrRedirects; ++n) { + int len = strlen(from[n]); + if (strncmp(path, from[n], len) != 0) continue; + if (snprintf(buf, PATH_MAX, "%s%s", to[n], path + len) >= PATH_MAX) + abort(); + return buf; + } + + return path; +} + +static char * rewrite_non_const(char * path, char * buf) +{ + // as long as the argument `path` is non-const, we can consider discarding + // the const qualifier of the return value to be safe. + return (char *)rewrite(path, buf); +} + +static int open_needs_mode(int flags) +{ +#ifdef O_TMPFILE + return (flags & O_CREAT) || (flags & O_TMPFILE) == O_TMPFILE; +#else + return flags & O_CREAT; +#endif +} + +/* The following set of Glibc library functions is very incomplete - + it contains only what we needed for programs in Nixpkgs. Just add + more functions as needed. */ + +WRAPPER(int, open)(const char * path, int flags, ...) +{ + int (*open_real) (const char *, int, ...) = LOOKUP_REAL(open); + mode_t mode = 0; + if (open_needs_mode(flags)) { + va_list ap; + va_start(ap, flags); + mode = va_arg(ap, mode_t); + va_end(ap); + } + char buf[PATH_MAX]; + return open_real(rewrite(path, buf), flags, mode); +} +WRAPPER_DEF(open) + +// In musl libc, open64 is simply a macro for open +#if !defined(__APPLE__) && !defined(open64) +WRAPPER(int, open64)(const char * path, int flags, ...) +{ + int (*open64_real) (const char *, int, mode_t) = LOOKUP_REAL(open64); + mode_t mode = 0; + if (open_needs_mode(flags)) { + va_list ap; + va_start(ap, flags); + mode = va_arg(ap, mode_t); + va_end(ap); + } + char buf[PATH_MAX]; + return open64_real(rewrite(path, buf), flags, mode); +} +WRAPPER_DEF(open64) +#endif + +WRAPPER(int, openat)(int dirfd, const char * path, int flags, ...) +{ + int (*openat_real) (int, const char *, int, ...) = LOOKUP_REAL(openat); + mode_t mode = 0; + if (open_needs_mode(flags)) { + va_list ap; + va_start(ap, flags); + mode = va_arg(ap, mode_t); + va_end(ap); + } + char buf[PATH_MAX]; + return openat_real(dirfd, rewrite(path, buf), flags, mode); +} +WRAPPER_DEF(openat) + +WRAPPER(FILE *, fopen)(const char * path, const char * mode) +{ + FILE * (*fopen_real) (const char *, const char *) = LOOKUP_REAL(fopen); + char buf[PATH_MAX]; + return fopen_real(rewrite(path, buf), mode); +} +WRAPPER_DEF(fopen) + +#ifdef __GLIBC__ +WRAPPER(FILE *, __nss_files_fopen)(const char * path) +{ + FILE * (*__nss_files_fopen_real) (const char *) = LOOKUP_REAL(__nss_files_fopen); + char buf[PATH_MAX]; + return __nss_files_fopen_real(rewrite(path, buf)); +} +WRAPPER_DEF(__nss_files_fopen) +#endif + +// In musl libc, fopen64 is simply a macro for fopen +#if !defined(__APPLE__) && !defined(fopen64) +WRAPPER(FILE *, fopen64)(const char * path, const char * mode) +{ + FILE * (*fopen64_real) (const char *, const char *) = LOOKUP_REAL(fopen64); + char buf[PATH_MAX]; + return fopen64_real(rewrite(path, buf), mode); +} +WRAPPER_DEF(fopen64) +#endif + +#ifdef __linux__ +WRAPPER(int, __xstat)(int ver, const char * path, struct stat * st) +{ + int (*__xstat_real) (int ver, const char *, struct stat *) = LOOKUP_REAL(__xstat); + char buf[PATH_MAX]; + return __xstat_real(ver, rewrite(path, buf), st); +} +WRAPPER_DEF(__xstat) +#endif + +#ifdef __linux__ +WRAPPER(int, __xstat64)(int ver, const char * path, struct stat64 * st) +{ + int (*__xstat64_real) (int ver, const char *, struct stat64 *) = LOOKUP_REAL(__xstat64); + char buf[PATH_MAX]; + return __xstat64_real(ver, rewrite(path, buf), st); +} +WRAPPER_DEF(__xstat64) +#endif + +#if defined(__linux__) && defined(STATX_TYPE) +WRAPPER(int, statx)(int dirfd, const char * restrict pathname, int flags, + unsigned int mask, struct statx * restrict statxbuf) +{ + int (*statx_real) (int, const char * restrict, int, + unsigned int, struct statx * restrict) = LOOKUP_REAL(statx); + char buf[PATH_MAX]; + return statx_real(dirfd, rewrite(pathname, buf), flags, mask, statxbuf); +} +WRAPPER_DEF(statx) +#endif + +WRAPPER(int, fstatat)(int dirfd, const char * pathname, struct stat * statbuf, int flags) +{ + int (*fstatat_real) (int, const char *, struct stat *, int) = LOOKUP_REAL(fstatat); + char buf[PATH_MAX]; + return fstatat_real(dirfd, rewrite(pathname, buf), statbuf, flags); +} +WRAPPER_DEF(fstatat); + +// In musl libc, fstatat64 is simply a macro for fstatat +#if !defined(__APPLE__) && !defined(fstatat64) +WRAPPER(int, fstatat64)(int dirfd, const char * pathname, struct stat64 * statbuf, int flags) +{ + int (*fstatat64_real) (int, const char *, struct stat64 *, int) = LOOKUP_REAL(fstatat64); + char buf[PATH_MAX]; + return fstatat64_real(dirfd, rewrite(pathname, buf), statbuf, flags); +} +WRAPPER_DEF(fstatat64); +#endif + +WRAPPER(int, stat)(const char * path, struct stat * st) +{ + int (*__stat_real) (const char *, struct stat *) = LOOKUP_REAL(stat); + char buf[PATH_MAX]; + return __stat_real(rewrite(path, buf), st); +} +WRAPPER_DEF(stat) + +// In musl libc, stat64 is simply a macro for stat +#if !defined(__APPLE__) && !defined(stat64) +WRAPPER(int, stat64)(const char * path, struct stat64 * st) +{ + int (*stat64_real) (const char *, struct stat64 *) = LOOKUP_REAL(stat64); + char buf[PATH_MAX]; + return stat64_real(rewrite(path, buf), st); +} +WRAPPER_DEF(stat64) +#endif + +WRAPPER(int, access)(const char * path, int mode) +{ + int (*access_real) (const char *, int mode) = LOOKUP_REAL(access); + char buf[PATH_MAX]; + return access_real(rewrite(path, buf), mode); +} +WRAPPER_DEF(access) + +WRAPPER(int, posix_spawn)(pid_t * pid, const char * path, + const posix_spawn_file_actions_t * file_actions, + const posix_spawnattr_t * attrp, + char * const argv[], char * const envp[]) +{ + int (*posix_spawn_real) (pid_t *, const char *, + const posix_spawn_file_actions_t *, + const posix_spawnattr_t *, + char * const argv[], char * const envp[]) = LOOKUP_REAL(posix_spawn); + char buf[PATH_MAX]; + return posix_spawn_real(pid, rewrite(path, buf), file_actions, attrp, argv, envp); +} +WRAPPER_DEF(posix_spawn) + +WRAPPER(int, posix_spawnp)(pid_t * pid, const char * file, + const posix_spawn_file_actions_t * file_actions, + const posix_spawnattr_t * attrp, + char * const argv[], char * const envp[]) +{ + int (*posix_spawnp_real) (pid_t *, const char *, + const posix_spawn_file_actions_t *, + const posix_spawnattr_t *, + char * const argv[], char * const envp[]) = LOOKUP_REAL(posix_spawnp); + char buf[PATH_MAX]; + return posix_spawnp_real(pid, rewrite(file, buf), file_actions, attrp, argv, envp); +} +WRAPPER_DEF(posix_spawnp) + +WRAPPER(int, execv)(const char * path, char * const argv[]) +{ + int (*execv_real) (const char * path, char * const argv[]) = LOOKUP_REAL(execv); + char buf[PATH_MAX]; + return execv_real(rewrite(path, buf), argv); +} +WRAPPER_DEF(execv) + +WRAPPER(int, execvp)(const char * path, char * const argv[]) +{ + int (*_execvp) (const char *, char * const argv[]) = LOOKUP_REAL(execvp); + char buf[PATH_MAX]; + return _execvp(rewrite(path, buf), argv); +} +WRAPPER_DEF(execvp) + +WRAPPER(int, execve)(const char * path, char * const argv[], char * const envp[]) +{ + int (*_execve) (const char *, char * const argv[], char * const envp[]) = LOOKUP_REAL(execve); + char buf[PATH_MAX]; + return _execve(rewrite(path, buf), argv, envp); +} +WRAPPER_DEF(execve) + +WRAPPER(DIR *, opendir)(const char * path) +{ + char buf[PATH_MAX]; + DIR * (*_opendir) (const char*) = LOOKUP_REAL(opendir); + + return _opendir(rewrite(path, buf)); +} +WRAPPER_DEF(opendir) + +#define SYSTEM_CMD_MAX 512 + +static char * replace_substring(char * source, char * buf, char * replace_string, char * start_ptr, char * suffix_ptr) { + char head[SYSTEM_CMD_MAX] = {0}; + strncpy(head, source, start_ptr - source); + + char tail[SYSTEM_CMD_MAX] = {0}; + if(suffix_ptr < source + strlen(source)) { + strcpy(tail, suffix_ptr); + } + + sprintf(buf, "%s%s%s", head, replace_string, tail); + return buf; +} + +static char * replace_string(char * buf, char * from, char * to) { + int num_matches = 0; + char * matches[SYSTEM_CMD_MAX]; + int from_len = strlen(from); + for(int i=0; i<strlen(buf); i++){ + char *cmp_start = buf + i; + if(strncmp(from, cmp_start, from_len) == 0){ + matches[num_matches] = cmp_start; + num_matches++; + } + } + int len_diff = strlen(to) - strlen(from); + for(int n = 0; n < num_matches; n++) { + char replaced[SYSTEM_CMD_MAX]; + replace_substring(buf, replaced, to, matches[n], matches[n]+from_len); + strcpy(buf, replaced); + for(int nn = n+1; nn < num_matches; nn++) { + matches[nn] += len_diff; + } + } + return buf; +} + +static void rewriteSystemCall(const char * command, char * buf) { + char * p = buf; + + #ifdef __APPLE__ + // The dyld environment variable is not inherited by the subprocess spawned + // by system(), so this hack redefines it. + Dl_info info; + dladdr(&rewriteSystemCall, &info); + p = stpcpy(p, "export DYLD_INSERT_LIBRARIES="); + p = stpcpy(p, info.dli_fname); + p = stpcpy(p, ";"); + #endif + + stpcpy(p, command); + + for (int n = 0; n < nrRedirects; ++n) { + replace_string(buf, from[n], to[n]); + } +} + +WRAPPER(int, system)(const char *command) +{ + int (*_system) (const char*) = LOOKUP_REAL(system); + + char newCommand[SYSTEM_CMD_MAX]; + rewriteSystemCall(command, newCommand); + return _system(newCommand); +} +WRAPPER_DEF(system) + +WRAPPER(int, chdir)(const char *path) +{ + int (*chdir_real) (const char *) = LOOKUP_REAL(chdir); + char buf[PATH_MAX]; + return chdir_real(rewrite(path, buf)); +} +WRAPPER_DEF(chdir); + +WRAPPER(int, mkdir)(const char *path, mode_t mode) +{ + int (*mkdir_real) (const char *path, mode_t mode) = LOOKUP_REAL(mkdir); + char buf[PATH_MAX]; + return mkdir_real(rewrite(path, buf), mode); +} +WRAPPER_DEF(mkdir) + +WRAPPER(int, mkdirat)(int dirfd, const char *path, mode_t mode) +{ + int (*mkdirat_real) (int dirfd, const char *path, mode_t mode) = LOOKUP_REAL(mkdirat); + char buf[PATH_MAX]; + return mkdirat_real(dirfd, rewrite(path, buf), mode); +} +WRAPPER_DEF(mkdirat) + +WRAPPER(int, unlink)(const char *path) +{ + int (*unlink_real) (const char *path) = LOOKUP_REAL(unlink); + char buf[PATH_MAX]; + return unlink_real(rewrite(path, buf)); +} +WRAPPER_DEF(unlink) + +WRAPPER(int, unlinkat)(int dirfd, const char *path, int flags) +{ + int (*unlinkat_real) (int dirfd, const char *path, int flags) = LOOKUP_REAL(unlinkat); + char buf[PATH_MAX]; + return unlinkat_real(dirfd, rewrite(path, buf), flags); +} +WRAPPER_DEF(unlinkat) + +WRAPPER(int, rmdir)(const char *path) +{ + int (*rmdir_real) (const char *path) = LOOKUP_REAL(rmdir); + char buf[PATH_MAX]; + return rmdir_real(rewrite(path, buf)); +} +WRAPPER_DEF(rmdir) + +static void copy_temp_wildcard(char * dest, char * src, int suffixlen) { + int dest_len = strnlen(dest, PATH_MAX); + int src_len = strnlen(src, PATH_MAX); + memcpy(dest + dest_len - (6 + suffixlen), src + src_len - (6 + suffixlen), 6); +} + +WRAPPER(int, mkstemp)(char *template) +{ + int (*mkstemp_real) (char *template) = LOOKUP_REAL(mkstemp); + char buf[PATH_MAX]; + char * rewritten = rewrite_non_const(template, buf); + int retval = mkstemp_real(rewritten); + if (retval >= 0 && rewritten != template) { + copy_temp_wildcard(template, rewritten, 0); + } + return retval; +} +WRAPPER_DEF(mkstemp) + +WRAPPER(int, mkostemp)(char *template, int flags) +{ + int (*mkostemp_real) (char *template, int flags) = LOOKUP_REAL(mkostemp); + char buf[PATH_MAX]; + char * rewritten = rewrite_non_const(template, buf); + int retval = mkostemp_real(rewritten, flags); + if (retval >= 0 && rewritten != template) { + copy_temp_wildcard(template, rewritten, 0); + } + return retval; +} +WRAPPER_DEF(mkostemp) + +WRAPPER(int, mkstemps)(char *template, int suffixlen) +{ + int (*mkstemps_real) (char *template, int suffixlen) = LOOKUP_REAL(mkstemps); + char buf[PATH_MAX]; + char * rewritten = rewrite_non_const(template, buf); + int retval = mkstemps_real(rewritten, suffixlen); + if (retval >= 0 && rewritten != template) { + copy_temp_wildcard(template, rewritten, suffixlen); + } + return retval; +} +WRAPPER_DEF(mkstemps) + +WRAPPER(int, mkostemps)(char *template, int suffixlen, int flags) +{ + int (*mkostemps_real) (char *template, int suffixlen, int flags) = LOOKUP_REAL(mkostemps); + char buf[PATH_MAX]; + char * rewritten = rewrite_non_const(template, buf); + int retval = mkostemps_real(rewritten, suffixlen, flags); + if (retval >= 0 && rewritten != template) { + copy_temp_wildcard(template, rewritten, suffixlen); + } + return retval; +} +WRAPPER_DEF(mkostemps) + +WRAPPER(char *, mkdtemp)(char *template) +{ + char * (*mkdtemp_real) (char *template) = LOOKUP_REAL(mkdtemp); + char buf[PATH_MAX]; + char * rewritten = rewrite_non_const(template, buf); + char * retval = mkdtemp_real(rewritten); + if (retval == NULL) { + return retval; + }; + if (rewritten != template) { + copy_temp_wildcard(template, rewritten, 0); + } + return template; +} +WRAPPER_DEF(mkdtemp) + +WRAPPER(char *, mktemp)(char *template) +{ + char * (*mktemp_real) (char *template) = LOOKUP_REAL(mktemp); + char buf[PATH_MAX]; + char * rewritten = rewrite_non_const(template, buf); + char * retval = mktemp_real(rewritten); + if (retval == NULL) { + return retval; + }; + if (rewritten != template) { + copy_temp_wildcard(template, rewritten, 0); + } + return template; +} +WRAPPER_DEF(mktemp) diff --git a/nixpkgs/pkgs/build-support/libredirect/test.c b/nixpkgs/pkgs/build-support/libredirect/test.c new file mode 100644 index 000000000000..20b27759f019 --- /dev/null +++ b/nixpkgs/pkgs/build-support/libredirect/test.c @@ -0,0 +1,176 @@ +#include <assert.h> +#include <errno.h> +#include <fcntl.h> +#include <limits.h> +#include <spawn.h> +#include <stdio.h> +#include <stdlib.h> +#include <string.h> +#include <unistd.h> + +#include <sys/stat.h> +#include <sys/types.h> +#include <sys/wait.h> + +#define TESTDIR "/bar/baz" +#define TESTPATH "/foo/bar/test" +#define SUBTEST "./test sub" + +extern char **environ; + +void test_spawn(void) { + pid_t pid; + int ret; + posix_spawn_file_actions_t file_actions; + char *argv[] = {"true", NULL}; + + assert(posix_spawn_file_actions_init(&file_actions) == 0); + + ret = posix_spawn(&pid, TESTPATH, &file_actions, NULL, argv, environ); + + assert(ret == 0); + assert(waitpid(pid, NULL, 0) != -1); +} + +void test_execv(void) { + char *argv[] = {"true", NULL}; + assert(execv(TESTPATH, argv) == 0); +} + +void test_system(void) { + assert(system(TESTPATH) == 0); +} + +void test_subprocess(void) { + assert(system(SUBTEST) == 0); +} + +void test_stat_with_null_path(void) { + // This checks whether the compiler optimizes away the null pointer check + // on the path passed to stat(). If that's the case, the following code + // should segfault. + struct stat buf; +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wnonnull" + stat(NULL, &buf); +#pragma GCC diagnostic pop +} + +void assert_mktemp_path( + const char * orig_prefix, + const char * orig_suffix, + const char * updated +) { + // prefix unchanged + assert(strncmp(updated, orig_prefix, strlen(orig_prefix)) == 0); + // wildcards replaced + assert(strcmp(updated + strlen(orig_prefix), "XXXXXX") != 0); + // suffix unchanged + assert(strcmp(updated + strlen(orig_prefix) + 6, orig_suffix) == 0); +} + +int main(int argc, char *argv[]) +{ + FILE *testfp; + int testfd; + struct stat testsb; +#ifndef __APPLE__ + struct stat64 testsb64; +#endif +#if defined(__linux__) && defined(STATX_TYPE) + struct statx testsbx; +#endif + char buf[PATH_MAX]; + + testfp = fopen(TESTPATH, "r"); + assert(testfp != NULL); + fclose(testfp); + + testfd = open(TESTPATH, O_RDONLY); + assert(testfd != -1); + close(testfd); + + assert(access(TESTPATH, X_OK) == 0); + + assert(stat(TESTPATH, &testsb) != -1); +#ifndef __APPLE__ + assert(stat64(TESTPATH, &testsb64) != -1); +#endif + assert(fstatat(123, TESTPATH, &testsb, 0) != -1); +#ifndef __APPLE__ + assert(fstatat64(123, TESTPATH, &testsb64, 0) != -1); +#endif +#if defined(__linux__) && defined(STATX_TYPE) + assert(statx(123, TESTPATH, 0, STATX_ALL, &testsbx) != -1); +#endif + + assert(getcwd(buf, PATH_MAX) != NULL); + assert(chdir(TESTDIR) == 0); + assert(chdir(buf) == 0); + + assert(mkdir(TESTDIR "/dir-mkdir", 0777) == 0); + assert(unlink(TESTDIR "/dir-mkdir") == -1); // it's a directory! +#ifndef __APPLE__ + assert(errno == EISDIR); +#endif + assert(rmdir(TESTDIR "/dir-mkdir") == 0); + assert(unlink(TESTDIR "/dir-mkdir") == -1); + assert(errno == ENOENT); + + assert(mkdirat(123, TESTDIR "/dir-mkdirat", 0777) == 0); + assert(unlinkat(123, TESTDIR "/dir-mkdirat", 0) == -1); // it's a directory! +#ifndef __APPLE__ + assert(errno == EISDIR); +#endif + assert(unlinkat(123, TESTDIR "/dir-mkdirat", AT_REMOVEDIR) == 0); + + strncpy(buf, TESTDIR "/tempXXXXXX", PATH_MAX); + testfd = mkstemp(buf); + assert(testfd > 0); + assert_mktemp_path(TESTDIR "/temp", "", buf); + close(testfd); + + strncpy(buf, TESTDIR "/tempXXXXXX", PATH_MAX); + testfd = mkostemp(buf, 0); + assert(testfd > 0); + assert_mktemp_path(TESTDIR "/temp", "", buf); + close(testfd); + + strncpy(buf, TESTDIR "/tempXXXXXX.test", PATH_MAX); + testfd = mkstemps(buf, strlen(".test")); + assert(testfd > 0); + assert_mktemp_path(TESTDIR "/temp", ".test", buf); + close(testfd); + + strncpy(buf, TESTDIR "/tempXXXXXX.test", PATH_MAX); + testfd = mkostemps(buf, strlen(".test"), 0); + assert(testfd > 0); + assert_mktemp_path(TESTDIR "/temp", ".test", buf); + close(testfd); + + strncpy(buf, TESTDIR "/tempXXXXXX", PATH_MAX); + assert(mkdtemp(buf) == buf); + assert_mktemp_path(TESTDIR "/temp", "", buf); + + strncpy(buf, TESTDIR "/tempXXXXXX", PATH_MAX); + assert(mktemp(buf) == buf); + assert_mktemp_path(TESTDIR "/temp", "", buf); + + test_spawn(); + test_system(); + test_stat_with_null_path(); + + // Only run subprocess if no arguments are given + // as the subprocess will be called without argument + // otherwise we will have infinite recursion + if (argc == 1) { + test_subprocess(); + } + + test_execv(); + + /* If all goes well, this is never reached because test_execv() replaces + * the current process. + */ + return 0; +} diff --git a/nixpkgs/pkgs/build-support/make-darwin-bundle/default.nix b/nixpkgs/pkgs/build-support/make-darwin-bundle/default.nix new file mode 100644 index 000000000000..52dd54b0b2c4 --- /dev/null +++ b/nixpkgs/pkgs/build-support/make-darwin-bundle/default.nix @@ -0,0 +1,26 @@ +# given a package with an executable and an icon, make a darwin bundle for +# it. This package should be used when generating launchers for native Darwin +# applications. If the package conatins a .desktop file use +# `desktopToDarwinLauncher` instead. + +{ lib, writeShellScript, writeDarwinBundle }: + +{ name # The name of the Application file. +, exec # Executable file. +, icon ? "" # Optional icon file. +}: + +writeShellScript "make-darwin-bundle-${name}" ('' + function makeDarwinBundlePhase() { + mkdir -p "''${!outputBin}/Applications/${name}.app/Contents/MacOS" + mkdir -p "''${!outputBin}/Applications/${name}.app/Contents/Resources" + + if [ -n "${icon}" ]; then + ln -s "${icon}" "''${!outputBin}/Applications/${name}.app/Contents/Resources" + fi + + ${writeDarwinBundle}/bin/write-darwin-bundle "''${!outputBin}" "${name}" "${exec}" + } + + preDistPhases+=" makeDarwinBundlePhase" +'') diff --git a/nixpkgs/pkgs/build-support/make-darwin-bundle/write-darwin-bundle.nix b/nixpkgs/pkgs/build-support/make-darwin-bundle/write-darwin-bundle.nix new file mode 100644 index 000000000000..752cbbde2a31 --- /dev/null +++ b/nixpkgs/pkgs/build-support/make-darwin-bundle/write-darwin-bundle.nix @@ -0,0 +1,47 @@ +{ writeScriptBin, lib, makeBinaryWrapper }: + +let + pListText = lib.generators.toPlist { } { + CFBundleDevelopmentRegion = "English"; + CFBundleExecutable = "$name"; + CFBundleIconFile = "$icon"; + CFBundleIconFiles = [ "$icon" ]; + CFBundleIdentifier = "org.nixos.$name"; + CFBundleInfoDictionaryVersion = "6.0"; + CFBundleName = "$name"; + CFBundlePackageType = "APPL"; + CFBundleSignature = "???"; + }; +in writeScriptBin "write-darwin-bundle" '' + shopt -s nullglob + + readonly prefix=$1 + readonly name=$2 + # TODO: support executables with spaces in their names + readonly execName=''${3%% *} # Before the first space + [[ $3 =~ " " ]] && readonly execArgs=''${3#* } # Everything after the first space + readonly icon=$4.icns + readonly squircle=''${5:-1} + readonly plist=$prefix/Applications/$name.app/Contents/Info.plist + readonly binary=$prefix/bin/$execName + readonly bundleExecutable=$prefix/Applications/$name.app/Contents/MacOS/$name + + cat > "$plist" <<EOF +${pListText} +EOF + + if [[ $squircle == 0 || $squircle == "false" ]]; then + sed '/CFBundleIconFiles/,\|</array>|d' -i "$plist" + fi + + if [[ -n "$execArgs" ]]; then + ( + source ${makeBinaryWrapper}/nix-support/setup-hook + # WORKAROUND: makeBinaryWrapper fails when -u is set + set +u + makeBinaryWrapper "$binary" "$bundleExecutable" --add-flags "$execArgs" + ) + else + ln -s "$binary" "$bundleExecutable" + fi +'' diff --git a/nixpkgs/pkgs/build-support/make-desktopitem/default.nix b/nixpkgs/pkgs/build-support/make-desktopitem/default.nix new file mode 100644 index 000000000000..ccceb23256b6 --- /dev/null +++ b/nixpkgs/pkgs/build-support/make-desktopitem/default.nix @@ -0,0 +1,118 @@ +{ lib, writeTextFile, buildPackages }: + +# All possible values as defined by the spec, version 1.4. +# Please keep in spec order for easier maintenance. +# When adding a new value, don't forget to update the Version field below! +# See https://specifications.freedesktop.org/desktop-entry-spec/desktop-entry-spec-latest.html +lib.makeOverridable ({ name # The name of the desktop file +, type ? "Application" +# version is hardcoded +, desktopName # The name of the application +, genericName ? null +, noDisplay ? null +, comment ? null +, icon ? null +# we don't support the Hidden key - if you don't need something, just don't install it +, onlyShowIn ? [] +, notShowIn ? [] +, dbusActivatable ? null +, tryExec ? null +, exec ? null +, path ? null +, terminal ? null +, actions ? {} # An attrset of [internal name] -> { name, exec?, icon? } +, mimeTypes ? [] # The spec uses "MimeType" as singular, use plural here to signify list-ness +, categories ? [] +, implements ? [] +, keywords ? [] +, startupNotify ? null +, startupWMClass ? null +, url ? null +, prefersNonDefaultGPU ? null +# not supported until version 1.5, which is not supported by our desktop-file-utils as of 2022-02-23 +# , singleMainWindow ? null +, extraConfig ? {} # Additional values to be added literally to the final item, e.g. vendor extensions +}: +let + # There are multiple places in the FDO spec that make "boolean" values actually tristate, + # e.g. StartupNotify, where "unset" is literally defined as "do something reasonable". + # So, handle null values separately. + boolOrNullToString = value: + if value == null then null + else if builtins.isBool value then lib.boolToString value + else throw "makeDesktopItem: value must be a boolean or null!"; + + # Multiple values are represented as one string, joined by semicolons. + # Technically, it's possible to escape semicolons in values with \;, but this is currently not implemented. + renderList = key: value: + if !builtins.isList value then throw "makeDesktopItem: value for ${key} must be a list!" + else if builtins.any (item: lib.hasInfix ";" item) value then throw "makeDesktopItem: values in ${key} list must not contain semicolons!" + else if value == [] then null + else builtins.concatStringsSep ";" value; + + # The [Desktop Entry] section of the desktop file, as an attribute set. + # Please keep in spec order. + mainSection = { + "Type" = type; + "Version" = "1.4"; + "Name" = desktopName; + "GenericName" = genericName; + "NoDisplay" = boolOrNullToString noDisplay; + "Comment" = comment; + "Icon" = icon; + "OnlyShowIn" = renderList "onlyShowIn" onlyShowIn; + "NotShowIn" = renderList "notShowIn" notShowIn; + "DBusActivatable" = boolOrNullToString dbusActivatable; + "TryExec" = tryExec; + "Exec" = exec; + "Path" = path; + "Terminal" = boolOrNullToString terminal; + "Actions" = renderList "actions" (builtins.attrNames actions); + "MimeType" = renderList "mimeTypes" mimeTypes; + "Categories" = renderList "categories" categories; + "Implements" = renderList "implements" implements; + "Keywords" = renderList "keywords" keywords; + "StartupNotify" = boolOrNullToString startupNotify; + "StartupWMClass" = startupWMClass; + "URL" = url; + "PrefersNonDefaultGPU" = boolOrNullToString prefersNonDefaultGPU; + # "SingleMainWindow" = boolOrNullToString singleMainWindow; + } // extraConfig; + + # Render a single attribute pair to a Key=Value line. + # FIXME: this isn't entirely correct for arbitrary strings, as some characters + # need to be escaped. There are currently none in nixpkgs though, so this is OK. + renderLine = name: value: if value != null then "${name}=${value}" else null; + + # Render a full section of the file from an attrset. + # Null values are intentionally left out. + renderSection = sectionName: attrs: + lib.pipe attrs [ + (lib.mapAttrsToList renderLine) + (builtins.filter (v: v != null)) + (builtins.concatStringsSep "\n") + (section: '' + [${sectionName}] + ${section} + '') + ]; + + mainSectionRendered = renderSection "Desktop Entry" mainSection; + + # Convert from javaCase names as used in Nix to PascalCase as used in the spec. + preprocessAction = { name, icon ? null, exec ? null }: { + "Name" = name; + "Icon" = icon; + "Exec" = exec; + }; + renderAction = name: attrs: renderSection "Desktop Action ${name}" (preprocessAction attrs); + actionsRendered = lib.mapAttrsToList renderAction actions; + + content = [ mainSectionRendered ] ++ actionsRendered; +in +writeTextFile { + name = "${name}.desktop"; + destination = "/share/applications/${name}.desktop"; + text = builtins.concatStringsSep "\n" content; + checkPhase = ''${buildPackages.desktop-file-utils}/bin/desktop-file-validate "$target"''; +}) diff --git a/nixpkgs/pkgs/build-support/make-hardcode-gsettings-patch/default.nix b/nixpkgs/pkgs/build-support/make-hardcode-gsettings-patch/default.nix new file mode 100644 index 000000000000..a1d2de21c4cb --- /dev/null +++ b/nixpkgs/pkgs/build-support/make-hardcode-gsettings-patch/default.nix @@ -0,0 +1,60 @@ +{ + runCommand, + git, + coccinelle, + python3, +}: + +/* + Can be used as part of an update script to automatically create a patch + hardcoding the path of all GSettings schemas in C code. + For example: + passthru = { + hardcodeGsettingsPatch = makeHardcodeGsettingsPatch { + inherit src; + schemaIdToVariableMapping = { + ... + }; + }; + + updateScript = + let + updateSource = ...; + updatePatch = _experimental-update-script-combinators.copyAttrOutputToFile "evolution-ews.hardcodeGsettingsPatch" ./hardcode-gsettings.patch; + in + _experimental-update-script-combinators.sequence [ + updateSource + updatePatch + ]; + }; + } + takes as input a mapping from schema path to variable name. + For example `{ "org.gnome.evolution" = "EVOLUTION_SCHEMA_PATH"; }` + hardcodes looking for `org.gnome.evolution` into `@EVOLUTION_SCHEMA_PATH@`. + All schemas must be listed. +*/ +{ + src, + schemaIdToVariableMapping, +}: + +runCommand + "hardcode-gsettings.patch" + { + inherit src; + nativeBuildInputs = [ + git + coccinelle + python3 # For patch script + ]; + } + '' + unpackPhase + cd "''${sourceRoot:-.}" + set -x + cp ${builtins.toFile "glib-schema-to-var.json" (builtins.toJSON schemaIdToVariableMapping)} ./glib-schema-to-var.json + git init + git add -A + spatch --sp-file "${./hardcode-gsettings.cocci}" --dir . --in-place + git diff > "$out" + '' diff --git a/nixpkgs/pkgs/build-support/make-hardcode-gsettings-patch/hardcode-gsettings.cocci b/nixpkgs/pkgs/build-support/make-hardcode-gsettings-patch/hardcode-gsettings.cocci new file mode 100644 index 000000000000..a265f5fac384 --- /dev/null +++ b/nixpkgs/pkgs/build-support/make-hardcode-gsettings-patch/hardcode-gsettings.cocci @@ -0,0 +1,142 @@ +/** + * Since Nix does not have a standard location like /usr/share, + * where GSettings system could look for schemas, we need to point the software to a correct location somehow. + * For executables, we handle this using wrappers but this is not an option for libraries like e-d-s. + * Instead, we hardcode the schema path when creating the settings. + * A schema path (ie org.gnome.evolution) can be replaced by @EVOLUTION_SCHEMA_ID@ + * which is then replaced at build time by substituteAll. + * The mapping is provided in a json file ./glib-schema-to-var.json + */ + +@initialize:python@ +@@ +import json + +cpp_constants = {} + +def register_cpp_constant(const_name, val): + cpp_constants[const_name] = val.strip() + +def resolve_cpp_constant(const_name): + return cpp_constants.get(const_name, const_name) + +with open("./glib-schema-to-var.json") as mapping_file: + schema_to_var = json.load(mapping_file); + +def get_schema_directory(schema_id): + # Sometimes the schema id is referenced using C preprocessor #define constant in the same file + # let’s try to resolve it first. + schema_id = resolve_cpp_constant(schema_id.strip()).strip('"') + if schema_id in schema_to_var: + return f'"@{schema_to_var[schema_id]}@"' + raise Exception(f"Unknown schema path {schema_id!r}, please add it to ./glib-schema-to-var.json") + +@find_cpp_constants@ +identifier const_name; +expression val; +@@ + +#define const_name val + +@script:python record_cpp_constants depends on find_cpp_constants@ +const_name << find_cpp_constants.const_name; +val << find_cpp_constants.val; +@@ + +register_cpp_constant(const_name, val) + + +@depends on ever record_cpp_constants || never record_cpp_constants@ +// We want to run after #define constants have been collected but even if there are no #defines. +expression SCHEMA_ID; +expression settings; +// Coccinelle does not like autocleanup macros in + sections, +// let’s use fresh id with concatenation to produce the code as a string. +fresh identifier schema_source_decl = "g_autoptr(GSettingsSchemaSource) " ## "schema_source"; +fresh identifier schema_decl = "g_autoptr(GSettingsSchema) " ## "schema"; +fresh identifier SCHEMA_DIRECTORY = script:python(SCHEMA_ID) { get_schema_directory(SCHEMA_ID) }; +@@ +-settings = g_settings_new(SCHEMA_ID); ++{ ++ schema_source_decl; ++ schema_decl; ++ schema_source = g_settings_schema_source_new_from_directory(SCHEMA_DIRECTORY, ++ g_settings_schema_source_get_default(), ++ TRUE, ++ NULL); ++ schema = g_settings_schema_source_lookup(schema_source, SCHEMA_ID, FALSE); ++ settings = g_settings_new_full(schema, NULL, NULL); ++} + + +@depends on ever record_cpp_constants || never record_cpp_constants@ +// We want to run after #define constants have been collected but even if there are no #defines. +expression SCHEMA_ID; +expression settings; +expression BACKEND; +// Coccinelle does not like autocleanup macros in + sections, +// let’s use fresh id with concatenation to produce the code as a string. +fresh identifier schema_source_decl = "g_autoptr(GSettingsSchemaSource) " ## "schema_source"; +fresh identifier schema_decl = "g_autoptr(GSettingsSchema) " ## "schema"; +fresh identifier SCHEMA_DIRECTORY = script:python(SCHEMA_ID) { get_schema_directory(SCHEMA_ID) }; +@@ +-settings = g_settings_new_with_backend(SCHEMA_ID, BACKEND); ++{ ++ schema_source_decl; ++ schema_decl; ++ schema_source = g_settings_schema_source_new_from_directory(SCHEMA_DIRECTORY, ++ g_settings_schema_source_get_default(), ++ TRUE, ++ NULL); ++ schema = g_settings_schema_source_lookup(schema_source, SCHEMA_ID, FALSE); ++ settings = g_settings_new_full(schema, BACKEND, NULL); ++} + + +@depends on ever record_cpp_constants || never record_cpp_constants@ +// We want to run after #define constants have been collected but even if there are no #defines. +expression SCHEMA_ID; +expression settings; +expression BACKEND; +expression PATH; +// Coccinelle does not like autocleanup macros in + sections, +// let’s use fresh id with concatenation to produce the code as a string. +fresh identifier schema_source_decl = "g_autoptr(GSettingsSchemaSource) " ## "schema_source"; +fresh identifier schema_decl = "g_autoptr(GSettingsSchema) " ## "schema"; +fresh identifier SCHEMA_DIRECTORY = script:python(SCHEMA_ID) { get_schema_directory(SCHEMA_ID) }; +@@ +-settings = g_settings_new_with_backend_and_path(SCHEMA_ID, BACKEND, PATH); ++{ ++ schema_source_decl; ++ schema_decl; ++ schema_source = g_settings_schema_source_new_from_directory(SCHEMA_DIRECTORY, ++ g_settings_schema_source_get_default(), ++ TRUE, ++ NULL); ++ schema = g_settings_schema_source_lookup(schema_source, SCHEMA_ID, FALSE); ++ settings = g_settings_new_full(schema, BACKEND, PATH); ++} + + +@depends on ever record_cpp_constants || never record_cpp_constants@ +// We want to run after #define constants have been collected but even if there are no #defines. +expression SCHEMA_ID; +expression settings; +expression PATH; +// Coccinelle does not like autocleanup macros in + sections, +// let’s use fresh id with concatenation to produce the code as a string. +fresh identifier schema_source_decl = "g_autoptr(GSettingsSchemaSource) " ## "schema_source"; +fresh identifier schema_decl = "g_autoptr(GSettingsSchema) " ## "schema"; +fresh identifier SCHEMA_DIRECTORY = script:python(SCHEMA_ID) { get_schema_directory(SCHEMA_ID) }; +@@ +-settings = g_settings_new_with_path(SCHEMA_ID, PATH); ++{ ++ schema_source_decl; ++ schema_decl; ++ schema_source = g_settings_schema_source_new_from_directory(SCHEMA_DIRECTORY, ++ g_settings_schema_source_get_default(), ++ TRUE, ++ NULL); ++ schema = g_settings_schema_source_lookup(schema_source, SCHEMA_ID, FALSE); ++ settings = g_settings_new_full(schema, NULL, PATH); ++} diff --git a/nixpkgs/pkgs/build-support/make-impure-test.nix b/nixpkgs/pkgs/build-support/make-impure-test.nix new file mode 100644 index 000000000000..84d0b30f426a --- /dev/null +++ b/nixpkgs/pkgs/build-support/make-impure-test.nix @@ -0,0 +1,96 @@ +/* Create tests that run in the nix sandbox with additional access to selected host paths + + This is for example useful for testing hardware where a tests needs access to + /sys and optionally more. + + The following example shows a test that accesses the GPU: + + Example: + makeImpureTest { + name = "opencl"; + testedPackage = "mypackage"; # Or testPath = "mypackage.impureTests.opencl.testDerivation" + + sandboxPaths = [ "/sys" "/dev/dri" ]; # Defaults to ["/sys"] + prepareRunCommands = ""; # (Optional) Setup for the runScript + nixFlags = []; # (Optional) nix-build options for the runScript + + testScript = "..."; + } + + Save as `test.nix` next to a package and reference it from the package: + passthru.impureTests = { opencl = callPackage ./test.nix {}; }; + + `makeImpureTest` will return here a script that contains the actual nix-build command including all necessary sandbox flags. + + It can be executed like this: + $(nix-build -A mypackage.impureTests) + + Rerun an already cached test: + $(nix-build -A mypackage.impureTests) --check +*/ +{ lib +, stdenv +, writeShellScript + +, name +, testedPackage ? null +, testPath ? "${testedPackage}.impureTests.${name}.testDerivation" +, sandboxPaths ? [ "/sys" ] +, prepareRunCommands ? "" +, nixFlags ? [ ] +, testScript +, ... +} @ args: + +let + sandboxPathsTests = builtins.map (path: "[[ ! -e '${path}' ]]") sandboxPaths; + sandboxPathsTest = lib.concatStringsSep " || " sandboxPathsTests; + sandboxPathsList = lib.concatStringsSep " " sandboxPaths; + + testDerivation = stdenv.mkDerivation (lib.recursiveUpdate + { + name = "test-run-${name}"; + + requiredSystemFeatures = [ "nixos-test" ]; + + buildCommand = '' + mkdir -p $out + + if ${sandboxPathsTest}; then + echo 'Run this test as *root* with `--option extra-sandbox-paths '"'${sandboxPathsList}'"'`' + exit 1 + fi + + # Run test + ${testScript} + ''; + + passthru.runScript = runScript; + } + (builtins.removeAttrs args [ + "lib" + "stdenv" + "writeShellScript" + + "name" + "testedPackage" + "testPath" + "sandboxPaths" + "prepareRunCommands" + "nixFlags" + "testScript" + ]) + ); + + runScript = writeShellScript "run-script-${name}" '' + set -euo pipefail + + ${prepareRunCommands} + + sudo nix-build --option extra-sandbox-paths '${sandboxPathsList}' ${lib.escapeShellArgs nixFlags} -A ${testPath} "$@" + ''; +in +# The main output is the run script, inject the derivation for the actual test +runScript.overrideAttrs (old: { + passthru = { inherit testDerivation; }; +}) diff --git a/nixpkgs/pkgs/build-support/make-pkgconfigitem/default.nix b/nixpkgs/pkgs/build-support/make-pkgconfigitem/default.nix new file mode 100644 index 000000000000..d3bcabbb940f --- /dev/null +++ b/nixpkgs/pkgs/build-support/make-pkgconfigitem/default.nix @@ -0,0 +1,69 @@ +{ lib, writeTextFile, buildPackages }: + +# See https://people.freedesktop.org/~dbn/pkg-config-guide.html#concepts +{ name # The name of the pc file + # keywords + # provide a default description for convenience. it's not important but still required by pkg-config. +, description ? "A pkg-config file for ${name}" +, url ? "" +, version ? "" +, requires ? [ ] +, requiresPrivate ? [ ] +, conflicts ? [ ] +, cflags ? [ ] +, libs ? [ ] +, libsPrivate ? [ ] +, variables ? { } +}: + +let + # only 'out' has to be changed, otherwise it would be replaced by the out of the writeTextFile + placeholderToSubstVar = builtins.replaceStrings [ "${placeholder "out"}" ] [ "@out@" ]; + + replacePlaceholderAndListToString = x: + if builtins.isList x + then placeholderToSubstVar (builtins.concatStringsSep " " x) + else placeholderToSubstVar x; + + keywordsSection = + let + mustBeAList = attr: attrName: lib.throwIfNot (lib.isList attr) "'${attrName}' must be a list" attr; + in + { + "Name" = name; + "Description" = description; + "URL" = url; + "Version" = version; + "Requires" = mustBeAList requires "requires"; + "Requires.private" = mustBeAList requiresPrivate "requiresPrivate"; + "Conflicts" = mustBeAList conflicts "conflicts"; + "Cflags" = mustBeAList cflags "cflags"; + "Libs" = mustBeAList libs "libs"; + "Libs.private" = mustBeAList libsPrivate "libsPrivate"; + }; + + renderVariable = name: value: + lib.optionalString (value != "" && value != [ ]) "${name}=${replacePlaceholderAndListToString value}"; + renderKeyword = name: value: + lib.optionalString (value != "" && value != [ ]) "${name}: ${replacePlaceholderAndListToString value}"; + + renderSomething = renderFunc: attrs: + lib.pipe attrs [ + (lib.mapAttrsToList renderFunc) + (builtins.filter (v: v != "")) + (builtins.concatStringsSep "\n") + (section: ''${section} + '') + ]; + + variablesSectionRendered = renderSomething renderVariable variables; + keywordsSectionRendered = renderSomething renderKeyword keywordsSection; + + content = [ variablesSectionRendered keywordsSectionRendered ]; +in +writeTextFile { + name = "${name}.pc"; + destination = "/lib/pkgconfig/${name}.pc"; + text = builtins.concatStringsSep "\n" content; + checkPhase = ''${buildPackages.pkg-config}/bin/${buildPackages.pkg-config.targetPrefix}pkg-config --validate "$target"''; +} diff --git a/nixpkgs/pkgs/build-support/make-startupitem/default.nix b/nixpkgs/pkgs/build-support/make-startupitem/default.nix new file mode 100644 index 000000000000..94bf07cfbbe4 --- /dev/null +++ b/nixpkgs/pkgs/build-support/make-startupitem/default.nix @@ -0,0 +1,35 @@ +# given a package with a $name.desktop file, makes a copy +# as autostart item. + +{stdenv, lib}: +{ name # name of the desktop file (without .desktop) +, package # package where the desktop file resides in +, srcPrefix ? "" # additional prefix that the desktop file may have in the 'package' +, after ? null +, condition ? null +, phase ? "2" +}: + +# the builder requires that +# $package/share/applications/$name.desktop +# exists as file. + +stdenv.mkDerivation { + name = "autostart-${name}"; + priority = 5; + + buildCommand = '' + mkdir -p $out/etc/xdg/autostart + target=${name}.desktop + cp ${package}/share/applications/${srcPrefix}${name}.desktop $target + chmod +rw $target + echo "X-KDE-autostart-phase=${phase}" >> $target + ${lib.optionalString (after != null) ''echo "${after}" >> $target''} + ${lib.optionalString (condition != null) ''echo "${condition}" >> $target''} + cp $target $out/etc/xdg/autostart + ''; + + # this will automatically put 'package' in the environment when you + # put its startup item in there. + propagatedBuildInputs = [ package ]; +} diff --git a/nixpkgs/pkgs/build-support/mkshell/default.nix b/nixpkgs/pkgs/build-support/mkshell/default.nix new file mode 100644 index 000000000000..3517e949f67a --- /dev/null +++ b/nixpkgs/pkgs/build-support/mkshell/default.nix @@ -0,0 +1,58 @@ +{ lib, stdenv, buildEnv }: + +# A special kind of derivation that is only meant to be consumed by the +# nix-shell. +{ name ? "nix-shell" +, # a list of packages to add to the shell environment + packages ? [ ] +, # propagate all the inputs from the given derivations + inputsFrom ? [ ] +, buildInputs ? [ ] +, nativeBuildInputs ? [ ] +, propagatedBuildInputs ? [ ] +, propagatedNativeBuildInputs ? [ ] +, ... +}@attrs: +let + mergeInputs = name: + (attrs.${name} or [ ]) ++ + (lib.subtractLists inputsFrom (lib.flatten (lib.catAttrs name inputsFrom))); + + rest = builtins.removeAttrs attrs [ + "name" + "packages" + "inputsFrom" + "buildInputs" + "nativeBuildInputs" + "propagatedBuildInputs" + "propagatedNativeBuildInputs" + "shellHook" + ]; +in + +stdenv.mkDerivation ({ + inherit name; + + buildInputs = mergeInputs "buildInputs"; + nativeBuildInputs = packages ++ (mergeInputs "nativeBuildInputs"); + propagatedBuildInputs = mergeInputs "propagatedBuildInputs"; + propagatedNativeBuildInputs = mergeInputs "propagatedNativeBuildInputs"; + + shellHook = lib.concatStringsSep "\n" (lib.catAttrs "shellHook" + (lib.reverseList inputsFrom ++ [ attrs ])); + + phases = [ "buildPhase" ]; + + buildPhase = '' + { echo "------------------------------------------------------------"; + echo " WARNING: the existence of this path is not guaranteed."; + echo " It is an internal implementation detail for pkgs.mkShell."; + echo "------------------------------------------------------------"; + echo; + # Record all build inputs as runtime dependencies + export; + } >> "$out" + ''; + + preferLocalBuild = true; +} // rest) diff --git a/nixpkgs/pkgs/build-support/mono-dll-fixer/default.nix b/nixpkgs/pkgs/build-support/mono-dll-fixer/default.nix new file mode 100644 index 000000000000..09a986015eed --- /dev/null +++ b/nixpkgs/pkgs/build-support/mono-dll-fixer/default.nix @@ -0,0 +1,11 @@ +{stdenv, perl}: +stdenv.mkDerivation { + name = "mono-dll-fixer"; + dllFixer = ./dll-fixer.pl; + dontUnpack = true; + installPhase = '' + substitute $dllFixer $out --subst-var-by perl $perl/bin/perl + chmod +x $out + ''; + inherit perl; +} diff --git a/nixpkgs/pkgs/build-support/mono-dll-fixer/dll-fixer.pl b/nixpkgs/pkgs/build-support/mono-dll-fixer/dll-fixer.pl new file mode 100644 index 000000000000..4a8b468692f0 --- /dev/null +++ b/nixpkgs/pkgs/build-support/mono-dll-fixer/dll-fixer.pl @@ -0,0 +1,32 @@ +#! @perl@ -w + +use strict; + +my @paths = split ' ', $ENV{"ALL_INPUTS"}; + +open IN, "<$ARGV[0]" or die; +open OUT, ">$ARGV[0].tmp" or die; + +while (<IN>) { + # !!! should use a real XML library here. + if (!/<dllmap dll="(.*)" target="(.*)"\/>/) { + print OUT; + next; + } + my $dll = $1; + my $target = $2; + + foreach my $path (@paths) { + my $fullPath = "$path/lib/$target"; + if (-e "$fullPath") { + $target = $fullPath; + last; + } + } + + print OUT " <dllmap dll=\"$dll\" target=\"$target\"/>\n"; +} + +close IN; + +rename "$ARGV[0].tmp", "$ARGV[0]" or die "cannot rename $ARGV[0]"; diff --git a/nixpkgs/pkgs/build-support/nix-gitignore/default.nix b/nixpkgs/pkgs/build-support/nix-gitignore/default.nix new file mode 100644 index 000000000000..c047bfc7d9a2 --- /dev/null +++ b/nixpkgs/pkgs/build-support/nix-gitignore/default.nix @@ -0,0 +1,175 @@ +# https://github.com/siers/nix-gitignore/ + +{ lib, runCommand }: + +# An interesting bit from the gitignore(5): +# - A slash followed by two consecutive asterisks then a slash matches +# - zero or more directories. For example, "a/**/b" matches "a/b", +# - "a/x/b", "a/x/y/b" and so on. + +with builtins; + +let + debug = a: trace a a; + last = l: elemAt l ((length l) - 1); +in rec { + # [["good/relative/source/file" true] ["bad.tmpfile" false]] -> root -> path + filterPattern = patterns: root: + (name: _type: + let + relPath = lib.removePrefix ((toString root) + "/") name; + matches = pair: (match (head pair) relPath) != null; + matched = map (pair: [(matches pair) (last pair)]) patterns; + in + last (last ([[true true]] ++ (filter head matched))) + ); + + # string -> [[regex bool]] + gitignoreToPatterns = gitignore: + let + # ignore -> bool + isComment = i: (match "^(#.*|$)" i) != null; + + # ignore -> [ignore bool] + computeNegation = l: + let split = match "^(!?)(.*)" l; + in [(elemAt split 1) (head split == "!")]; + + # regex -> regex + handleHashesBangs = replaceStrings ["\\#" "\\!"] ["#" "!"]; + + # ignore -> regex + substWildcards = + let + special = "^$.+{}()"; + escs = "\\*?"; + splitString = + let recurse = str : [(substring 0 1 str)] ++ + (lib.optionals (str != "") (recurse (substring 1 (stringLength(str)) str) )); + in str : recurse str; + chars = s: filter (c: c != "" && !isList c) (splitString s); + escape = s: map (c: "\\" + c) (chars s); + in + replaceStrings + ((chars special) ++ (escape escs) ++ ["**/" "**" "*" "?"]) + ((escape special) ++ (escape escs) ++ ["(.*/)?" ".*" "[^/]*" "[^/]"]); + + # (regex -> regex) -> regex -> regex + mapAroundCharclass = f: r: # rl = regex or list + let slightFix = replaceStrings ["\\]"] ["]"]; + in + concatStringsSep "" + (map (rl: if isList rl then slightFix (elemAt rl 0) else f rl) + (split "(\\[([^\\\\]|\\\\.)+])" r)); + + # regex -> regex + handleSlashPrefix = l: + let + split = (match "^(/?)(.*)" l); + findSlash = l: lib.optionalString ((match ".+/.+" l) == null) l; + hasSlash = mapAroundCharclass findSlash l != l; + in + (if (elemAt split 0) == "/" || hasSlash + then "^" + else "(^|.*/)" + ) + (elemAt split 1); + + # regex -> regex + handleSlashSuffix = l: + let split = (match "^(.*)/$" l); + in if split != null then (elemAt split 0) + "($|/.*)" else l; + + # (regex -> regex) -> [regex, bool] -> [regex, bool] + mapPat = f: l: [(f (head l)) (last l)]; + in + map (l: # `l' for "line" + mapPat (l: handleSlashSuffix (handleSlashPrefix (handleHashesBangs (mapAroundCharclass substWildcards l)))) + (computeNegation l)) + (filter (l: !isList l && !isComment l) + (split "\n" gitignore)); + + gitignoreFilter = ign: root: filterPattern (gitignoreToPatterns ign) root; + + # string|[string|file] (→ [string|file] → [string]) -> string + gitignoreCompileIgnore = file_str_patterns: root: + let + onPath = f: a: if typeOf a == "path" then f a else a; + str_patterns = map (onPath readFile) (lib.toList file_str_patterns); + in concatStringsSep "\n" str_patterns; + + gitignoreFilterPure = filter: patterns: root: name: type: + gitignoreFilter (gitignoreCompileIgnore patterns root) root name type + && filter name type; + + # This is a very hacky way of programming this! + # A better way would be to reuse existing filtering by making multiple gitignore functions per each root. + # Then for each file find the set of roots with gitignores (and functions). + # This would make gitignoreFilterSource very different from gitignoreFilterPure. + # rootPath → gitignoresConcatenated + compileRecursiveGitignore = root: + let + dirOrIgnore = file: type: baseNameOf file == ".gitignore" || type == "directory"; + ignores = builtins.filterSource dirOrIgnore root; + in readFile ( + runCommand "${baseNameOf root}-recursive-gitignore" {} '' + cd ${ignores} + + find -type f -exec sh -c ' + rel="$(realpath --relative-to=. "$(dirname "$1")")/" + if [ "$rel" = "./" ]; then rel=""; fi + + awk -v prefix="$rel" -v root="$1" -v top="$(test -z "$rel" && echo 1)" " + BEGIN { print \"# \"root } + + /^!?[^\\/]+\/?$/ { + match(\$0, /^!?/, negation) + sub(/^!?/, \"\") + + if (top) { middle = \"\" } else { middle = \"**/\" } + + print negation[0] prefix middle \$0 + } + + /^!?(\\/|.*\\/.+$)/ { + match(\$0, /^!?/, negation) + sub(/^!?/, \"\") + + if (!top) sub(/^\//, \"\") + + print negation[0] prefix \$0 + } + + END { print \"\" } + " "$1" + ' sh {} \; > $out + ''); + + withGitignoreFile = patterns: root: + lib.toList patterns ++ [ ".git" ] ++ [(root + "/.gitignore")]; + + withRecursiveGitignoreFile = patterns: root: + lib.toList patterns ++ [ ".git" ] ++ [(compileRecursiveGitignore root)]; + + # filterSource derivatives + + gitignoreFilterSourcePure = filter: patterns: root: + filterSource (gitignoreFilterPure filter patterns root) root; + + gitignoreFilterSource = filter: patterns: root: + gitignoreFilterSourcePure filter (withGitignoreFile patterns root) root; + + gitignoreFilterRecursiveSource = filter: patterns: root: + gitignoreFilterSourcePure filter (withRecursiveGitignoreFile patterns root) root; + + # "Filter"-less alternatives + + gitignoreSourcePure = gitignoreFilterSourcePure (_: _: true); + gitignoreSource = patterns: let type = typeOf patterns; in + if (type == "string" && pathExists patterns) || type == "path" + then throw + "type error in gitignoreSource(patterns -> source -> path), " + "use [] or \"\" if there are no additional patterns" + else gitignoreFilterSource (_: _: true) patterns; + + gitignoreRecursiveSource = gitignoreFilterSourcePure (_: _: true); +} diff --git a/nixpkgs/pkgs/build-support/node/build-npm-package/default.nix b/nixpkgs/pkgs/build-support/node/build-npm-package/default.nix new file mode 100644 index 000000000000..7cfc0e9f9c0a --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/build-npm-package/default.nix @@ -0,0 +1,66 @@ +{ lib, stdenv, fetchNpmDeps, buildPackages, nodejs }: + +{ name ? "${args.pname}-${args.version}" +, src ? null +, srcs ? null +, sourceRoot ? null +, prePatch ? "" +, patches ? [ ] +, postPatch ? "" +, nativeBuildInputs ? [ ] +, buildInputs ? [ ] + # The output hash of the dependencies for this project. + # Can be calculated in advance with prefetch-npm-deps. +, npmDepsHash ? "" + # Whether to force the usage of Git dependencies that have install scripts, but not a lockfile. + # Use with care. +, forceGitDeps ? false + # Whether to make the cache writable prior to installing dependencies. + # Don't set this unless npm tries to write to the cache directory, as it can slow down the build. +, makeCacheWritable ? false + # The script to run to build the project. +, npmBuildScript ? "build" + # Flags to pass to all npm commands. +, npmFlags ? [ ] + # Flags to pass to `npm ci`. +, npmInstallFlags ? [ ] + # Flags to pass to `npm rebuild`. +, npmRebuildFlags ? [ ] + # Flags to pass to `npm run ${npmBuildScript}`. +, npmBuildFlags ? [ ] + # Flags to pass to `npm pack`. +, npmPackFlags ? [ ] + # Flags to pass to `npm prune`. +, npmPruneFlags ? npmInstallFlags + # Value for npm `--workspace` flag and directory in which the files to be installed are found. +, npmWorkspace ? null +, ... +} @ args: + +let + npmDeps = fetchNpmDeps { + inherit forceGitDeps src srcs sourceRoot prePatch patches postPatch; + name = "${name}-npm-deps"; + hash = npmDepsHash; + }; + + # .override {} negates splicing, so we need to use buildPackages explicitly + npmHooks = buildPackages.npmHooks.override { + inherit nodejs; + }; + + inherit (npmHooks) npmConfigHook npmBuildHook npmInstallHook; +in +stdenv.mkDerivation (args // { + inherit npmDeps npmBuildScript; + + nativeBuildInputs = nativeBuildInputs ++ [ nodejs npmConfigHook npmBuildHook npmInstallHook ]; + buildInputs = buildInputs ++ [ nodejs ]; + + strictDeps = true; + + # Stripping takes way too long with the amount of files required by a typical Node.js project. + dontStrip = args.dontStrip or true; + + meta = (args.meta or { }) // { platforms = args.meta.platforms or nodejs.meta.platforms; }; +}) diff --git a/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/default.nix b/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/default.nix new file mode 100644 index 000000000000..36f0319e3d23 --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/default.nix @@ -0,0 +1,48 @@ +{ lib +, srcOnly +, makeSetupHook +, makeWrapper +, nodejs +, jq +, prefetch-npm-deps +, diffutils +, installShellFiles +}: + +{ + npmConfigHook = makeSetupHook + { + name = "npm-config-hook"; + substitutions = { + nodeSrc = srcOnly nodejs; + nodeGyp = "${nodejs}/lib/node_modules/npm/node_modules/node-gyp/bin/node-gyp.js"; + + # Specify `diff`, `jq`, and `prefetch-npm-deps` by abspath to ensure that the user's build + # inputs do not cause us to find the wrong binaries. + diff = "${diffutils}/bin/diff"; + jq = "${jq}/bin/jq"; + prefetchNpmDeps = "${prefetch-npm-deps}/bin/prefetch-npm-deps"; + + nodeVersion = nodejs.version; + nodeVersionMajor = lib.versions.major nodejs.version; + }; + } ./npm-config-hook.sh; + + npmBuildHook = makeSetupHook + { + name = "npm-build-hook"; + } ./npm-build-hook.sh; + + npmInstallHook = makeSetupHook + { + name = "npm-install-hook"; + propagatedBuildInputs = [ + installShellFiles + makeWrapper + ]; + substitutions = { + hostNode = "${nodejs}/bin/node"; + jq = "${jq}/bin/jq"; + }; + } ./npm-install-hook.sh; +} diff --git a/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/npm-build-hook.sh b/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/npm-build-hook.sh new file mode 100644 index 000000000000..c341f672363a --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/npm-build-hook.sh @@ -0,0 +1,38 @@ +# shellcheck shell=bash + +npmBuildHook() { + echo "Executing npmBuildHook" + + runHook preBuild + + if [ -z "${npmBuildScript-}" ]; then + echo + echo "ERROR: no build script was specified" + echo 'Hint: set `npmBuildScript`, override `buildPhase`, or set `dontNpmBuild = true`.' + echo + + exit 1 + fi + + if ! npm run ${npmWorkspace+--workspace=$npmWorkspace} "$npmBuildScript" $npmBuildFlags "${npmBuildFlagsArray[@]}" $npmFlags "${npmFlagsArray[@]}"; then + echo + echo 'ERROR: `npm build` failed' + echo + echo "Here are a few things you can try, depending on the error:" + echo "1. Make sure your build script ($npmBuildScript) exists" + echo ' If there is none, set `dontNpmBuild = true`.' + echo '2. If the error being thrown is something similar to "error:0308010C:digital envelope routines::unsupported", add `NODE_OPTIONS = "--openssl-legacy-provider"` to your derivation' + echo " See https://github.com/webpack/webpack/issues/14532 for more information." + echo + + exit 1 + fi + + runHook postBuild + + echo "Finished npmBuildHook" +} + +if [ -z "${dontNpmBuild-}" ] && [ -z "${buildPhase-}" ]; then + buildPhase=npmBuildHook +fi diff --git a/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/npm-config-hook.sh b/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/npm-config-hook.sh new file mode 100644 index 000000000000..486b0c2f8372 --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/npm-config-hook.sh @@ -0,0 +1,119 @@ +# shellcheck shell=bash + +npmConfigHook() { + echo "Executing npmConfigHook" + + # Use npm patches in the nodejs package + export NIX_NODEJS_BUILDNPMPACKAGE=1 + export prefetchNpmDeps="@prefetchNpmDeps@" + + if [ -n "${npmRoot-}" ]; then + pushd "$npmRoot" + fi + + echo "Configuring npm" + + export HOME="$TMPDIR" + export npm_config_nodedir="@nodeSrc@" + export npm_config_node_gyp="@nodeGyp@" + + if [ -z "${npmDeps-}" ]; then + echo + echo "ERROR: no dependencies were specified" + echo 'Hint: set `npmDeps` if using these hooks individually. If this is happening with `buildNpmPackage`, please open an issue.' + echo + + exit 1 + fi + + local -r cacheLockfile="$npmDeps/package-lock.json" + local -r srcLockfile="$PWD/package-lock.json" + + echo "Validating consistency between $srcLockfile and $cacheLockfile" + + if ! @diff@ "$srcLockfile" "$cacheLockfile"; then + # If the diff failed, first double-check that the file exists, so we can + # give a friendlier error msg. + if ! [ -e "$srcLockfile" ]; then + echo + echo "ERROR: Missing package-lock.json from src. Expected to find it at: $srcLockfile" + echo "Hint: You can copy a vendored package-lock.json file via postPatch." + echo + + exit 1 + fi + + if ! [ -e "$cacheLockfile" ]; then + echo + echo "ERROR: Missing lockfile from cache. Expected to find it at: $cacheLockfile" + echo + + exit 1 + fi + + echo + echo "ERROR: npmDepsHash is out of date" + echo + echo "The package-lock.json in src is not the same as the in $npmDeps." + echo + echo "To fix the issue:" + echo '1. Use `lib.fakeHash` as the npmDepsHash value' + echo "2. Build the derivation and wait for it to fail with a hash mismatch" + echo "3. Copy the 'got: sha256-' value back into the npmDepsHash field" + echo + + exit 1 + fi + + export CACHE_MAP_PATH="$TMP/MEOW" + @prefetchNpmDeps@ --map-cache + + @prefetchNpmDeps@ --fixup-lockfile "$srcLockfile" + + local cachePath + + if [ -z "${makeCacheWritable-}" ]; then + cachePath="$npmDeps" + else + echo "Making cache writable" + cp -r "$npmDeps" "$TMPDIR/cache" + chmod -R 700 "$TMPDIR/cache" + cachePath="$TMPDIR/cache" + fi + + npm config set cache "$cachePath" + npm config set offline true + npm config set progress false + + echo "Installing dependencies" + + if ! npm ci --ignore-scripts $npmInstallFlags "${npmInstallFlagsArray[@]}" $npmFlags "${npmFlagsArray[@]}"; then + echo + echo "ERROR: npm failed to install dependencies" + echo + echo "Here are a few things you can try, depending on the error:" + echo '1. Set `makeCacheWritable = true`' + echo " Note that this won't help if npm is complaining about not being able to write to the logs directory -- look above that for the actual error." + echo '2. Set `npmFlags = [ "--legacy-peer-deps" ]`' + echo + + exit 1 + fi + + patchShebangs node_modules + + npm rebuild $npmRebuildFlags "${npmRebuildFlagsArray[@]}" $npmFlags "${npmFlagsArray[@]}" + + patchShebangs node_modules + + rm "$CACHE_MAP_PATH" + unset CACHE_MAP_PATH + + if [ -n "${npmRoot-}" ]; then + popd + fi + + echo "Finished npmConfigHook" +} + +postPatchHooks+=(npmConfigHook) diff --git a/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/npm-install-hook.sh b/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/npm-install-hook.sh new file mode 100644 index 000000000000..79e2c4b26860 --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/build-npm-package/hooks/npm-install-hook.sh @@ -0,0 +1,71 @@ +# shellcheck shell=bash + +npmInstallHook() { + echo "Executing npmInstallHook" + + runHook preInstall + + # `npm pack` writes to cache + npm config delete cache + + local -r packageOut="$out/lib/node_modules/$(@jq@ --raw-output '.name' package.json)" + + while IFS= read -r file; do + local dest="$packageOut/$(dirname "$file")" + mkdir -p "$dest" + cp "${npmWorkspace-.}/$file" "$dest" + done < <(@jq@ --raw-output '.[0].files | map(.path) | join("\n")' <<< "$(npm pack --json --dry-run ${npmWorkspace+--workspace=$npmWorkspace} $npmPackFlags "${npmPackFlagsArray[@]}" $npmFlags "${npmFlagsArray[@]}")") + + # Based on code from Python's buildPythonPackage wrap.sh script, for + # supporting both the case when makeWrapperArgs is an array and a + # IFS-separated string. + # + # TODO: remove the string branch when __structuredAttrs are used. + if [[ "${makeWrapperArgs+defined}" == "defined" && "$(declare -p makeWrapperArgs)" =~ ^'declare -a makeWrapperArgs=' ]]; then + local -a user_args=("${makeWrapperArgs[@]}") + else + local -a user_args="(${makeWrapperArgs:-})" + fi + while IFS=" " read -ra bin; do + mkdir -p "$out/bin" + makeWrapper @hostNode@ "$out/bin/${bin[0]}" --add-flags "$packageOut/${bin[1]}" "${user_args[@]}" + done < <(@jq@ --raw-output '(.bin | type) as $typ | if $typ == "string" then + .name + " " + .bin + elif $typ == "object" then .bin | to_entries | map(.key + " " + .value) | join("\n") + else "invalid type " + $typ | halt_error end' "${npmWorkspace-.}/package.json") + + while IFS= read -r man; do + installManPage "$packageOut/$man" + done < <(@jq@ --raw-output '(.man | type) as $typ | if $typ == "string" then .man + elif $typ == "list" then .man | join("\n") + else "invalid type " + $typ | halt_error end' "${npmWorkspace-.}/package.json") + + local -r nodeModulesPath="$packageOut/node_modules" + + if [ ! -d "$nodeModulesPath" ]; then + if [ -z "${dontNpmPrune-}" ]; then + if ! npm prune --omit=dev --no-save ${npmWorkspace+--workspace=$npmWorkspace} $npmPruneFlags "${npmPruneFlagsArray[@]}" $npmFlags "${npmFlagsArray[@]}"; then + echo + echo + echo "ERROR: npm prune step failed" + echo + echo 'If npm tried to download additional dependencies above, try setting `dontNpmPrune = true`.' + echo + + exit 1 + fi + fi + + find node_modules -maxdepth 1 -type d -empty -delete + + cp -r node_modules "$nodeModulesPath" + fi + + runHook postInstall + + echo "Finished npmInstallHook" +} + +if [ -z "${dontNpmInstall-}" ] && [ -z "${installPhase-}" ]; then + installPhase=npmInstallHook +fi diff --git a/nixpkgs/pkgs/build-support/node/fetch-npm-deps/.gitignore b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/.gitignore new file mode 100644 index 000000000000..ea8c4bf7f35f --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/.gitignore @@ -0,0 +1 @@ +/target diff --git a/nixpkgs/pkgs/build-support/node/fetch-npm-deps/Cargo.lock b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/Cargo.lock new file mode 100644 index 000000000000..4f6e177fd9ae --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/Cargo.lock @@ -0,0 +1,1046 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "aho-corasick" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" +dependencies = [ + "memchr", +] + +[[package]] +name = "anyhow" +version = "1.0.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" + +[[package]] +name = "async-channel" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" +dependencies = [ + "concurrent-queue", + "event-listener", + "futures-core", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "backoff" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b62ddb9cb1ec0a098ad4bbf9344d0713fa193ae1a80af55febcff2627b6a00c1" +dependencies = [ + "getrandom", + "instant", + "rand", +] + +[[package]] +name = "base64" +version = "0.21.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ba43ea6f343b788c8764558649e08df62f86c6ef251fdaeb1ffd010a9ae50a2" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "bytes" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" + +[[package]] +name = "castaway" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6" + +[[package]] +name = "cc" +version = "1.0.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" +dependencies = [ + "libc", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "concurrent-queue" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f057a694a54f12365049b0958a1685bb52d567f5593b355fbf685838e873d400" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "cpufeatures" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" +dependencies = [ + "libc", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" +dependencies = [ + "cfg-if", + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" +dependencies = [ + "autocfg", + "cfg-if", + "crossbeam-utils", + "memoffset", + "scopeguard", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "curl" +version = "0.4.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "509bd11746c7ac09ebd19f0b17782eae80aadee26237658a6b4808afb5c11a22" +dependencies = [ + "curl-sys", + "libc", + "openssl-probe", + "openssl-sys", + "schannel", + "socket2", + "winapi", +] + +[[package]] +name = "curl-sys" +version = "0.4.67+curl-8.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3cc35d066510b197a0f72de863736641539957628c8a42e70e27c66849e77c34" +dependencies = [ + "cc", + "libc", + "libz-sys", + "openssl-sys", + "pkg-config", + "vcpkg", + "windows-sys", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", +] + +[[package]] +name = "either" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" + +[[package]] +name = "env_logger" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85cdab6a89accf66733ad5a1693a4dcced6aeff64602b634530dd73c1f3ee9f0" +dependencies = [ + "humantime", + "is-terminal", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "errno" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3e13f66a2f95e32a39eaa81f6b95d42878ca0e1db0c7543723dfe12557e860" +dependencies = [ + "libc", + "windows-sys", +] + +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "fastrand" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" +dependencies = [ + "instant", +] + +[[package]] +name = "fastrand" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "form_urlencoded" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures-core" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" + +[[package]] +name = "futures-io" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" + +[[package]] +name = "futures-lite" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" +dependencies = [ + "fastrand 1.9.0", + "futures-core", + "futures-io", + "memchr", + "parking", + "pin-project-lite", + "waker-fn", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "hermit-abi" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" + +[[package]] +name = "http" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "idna" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "is-terminal" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" +dependencies = [ + "hermit-abi", + "rustix", + "windows-sys", +] + +[[package]] +name = "isahc" +version = "1.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "334e04b4d781f436dc315cb1e7515bd96826426345d498149e4bde36b67f8ee9" +dependencies = [ + "async-channel", + "castaway", + "crossbeam-utils", + "curl", + "curl-sys", + "event-listener", + "futures-lite", + "http", + "log", + "once_cell", + "polling", + "slab", + "sluice", + "tracing", + "tracing-futures", + "url", + "waker-fn", +] + +[[package]] +name = "itoa" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" + +[[package]] +name = "libc" +version = "0.2.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" + +[[package]] +name = "libz-sys" +version = "1.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d97137b25e321a73eef1418d1d5d2eda4d77e12813f8e6dead84bc52c5870a7b" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "linux-raw-sys" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f" + +[[package]] +name = "log" +version = "0.4.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" + +[[package]] +name = "memchr" +version = "2.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" + +[[package]] +name = "memoffset" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" +dependencies = [ + "autocfg", +] + +[[package]] +name = "once_cell" +version = "1.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-sys" +version = "0.9.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db4d56a4c0478783083cfafcc42493dd4a981d41669da64b4572a2a089b51b1d" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "parking" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e52c774a4c39359c1d1c52e43f73dd91a75a614652c825408eec30c95a9b2067" + +[[package]] +name = "percent-encoding" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" + +[[package]] +name = "pin-project" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" + +[[package]] +name = "pkg-config" +version = "0.3.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" + +[[package]] +name = "polling" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" +dependencies = [ + "autocfg", + "bitflags 1.3.2", + "cfg-if", + "concurrent-queue", + "libc", + "log", + "pin-project-lite", + "windows-sys", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "prefetch-npm-deps" +version = "0.1.0" +dependencies = [ + "anyhow", + "backoff", + "base64", + "digest", + "env_logger", + "isahc", + "rayon", + "serde", + "serde_json", + "sha1", + "sha2", + "tempfile", + "url", + "walkdir", +] + +[[package]] +name = "proc-macro2" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rayon" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + +[[package]] +name = "redox_syscall" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "regex" +version = "1.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebee201405406dbf528b8b672104ae6d6d63e6d118cb10e4d51abbc7b58044ff" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59b23e92ee4318893fa3fe3e6fb365258efbfe6ac6ab30f090cdcbb7aa37efa9" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da" + +[[package]] +name = "rustix" +version = "0.38.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a74ee2d7c2581cd139b42447d7d9389b889bdaad3a73f1ebb16f2a3237bb19c" +dependencies = [ + "bitflags 2.4.0", + "errno", + "libc", + "linux-raw-sys", + "windows-sys", +] + +[[package]] +name = "ryu" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schannel" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "serde" +version = "1.0.188" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.188" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.107" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "sluice" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d7400c0eff44aa2fcb5e31a5f24ba9716ed90138769e4977a2ba6014ae63eb5" +dependencies = [ + "async-channel", + "futures-core", + "futures-io", +] + +[[package]] +name = "socket2" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "syn" +version = "2.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "tempfile" +version = "3.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" +dependencies = [ + "cfg-if", + "fastrand 2.0.1", + "redox_syscall", + "rustix", + "windows-sys", +] + +[[package]] +name = "termcolor" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6093bad37da69aab9d123a8091e4be0aa4a03e4d601ec641c327398315f62b64" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tracing" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +dependencies = [ + "cfg-if", + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" +dependencies = [ + "once_cell", +] + +[[package]] +name = "tracing-futures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" +dependencies = [ + "pin-project", + "tracing", +] + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "unicode-bidi" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "unicode-normalization" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "url" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "waker-fn" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3c4517f54858c779bbcbf228f4fca63d121bf85fbecb2dc578cdf4a39395690" + +[[package]] +name = "walkdir" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" diff --git a/nixpkgs/pkgs/build-support/node/fetch-npm-deps/Cargo.toml b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/Cargo.toml new file mode 100644 index 000000000000..0f7735a6e827 --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "prefetch-npm-deps" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +anyhow = "1.0.75" +backoff = "0.4.0" +base64 = "0.21.4" +digest = "0.10.7" +env_logger = "0.10.0" +isahc = { version = "1.7.2", default_features = false } +rayon = "1.8.0" +serde = { version = "1.0.188", features = ["derive"] } +serde_json = "1.0.107" +sha1 = "0.10.6" +sha2 = "0.10.8" +tempfile = "3.8.0" +url = { version = "2.4.1", features = ["serde"] } +walkdir = "2.4.0" diff --git a/nixpkgs/pkgs/build-support/node/fetch-npm-deps/default.nix b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/default.nix new file mode 100644 index 000000000000..67a4c337c0d2 --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/default.nix @@ -0,0 +1,178 @@ +{ lib, stdenvNoCC, rustPlatform, makeWrapper, pkg-config, curl, gnutar, gzip, nix, testers, fetchurl, cacert, prefetch-npm-deps, fetchNpmDeps }: + +{ + prefetch-npm-deps = rustPlatform.buildRustPackage { + pname = "prefetch-npm-deps"; + version = (lib.importTOML ./Cargo.toml).package.version; + + src = lib.cleanSourceWith { + src = ./.; + filter = name: type: + let + name' = builtins.baseNameOf name; + in + name' != "default.nix" && name' != "target"; + }; + + cargoLock.lockFile = ./Cargo.lock; + + nativeBuildInputs = [ makeWrapper pkg-config ]; + buildInputs = [ curl ]; + + postInstall = '' + wrapProgram "$out/bin/prefetch-npm-deps" --prefix PATH : ${lib.makeBinPath [ gnutar gzip nix ]} + ''; + + passthru.tests = + let + makeTestSrc = { name, src }: stdenvNoCC.mkDerivation { + name = "${name}-src"; + + inherit src; + + buildCommand = '' + mkdir -p $out + cp $src $out/package-lock.json + ''; + }; + + makeTest = { name, src, hash, forceGitDeps ? false }: testers.invalidateFetcherByDrvHash fetchNpmDeps { + inherit name hash forceGitDeps; + + src = makeTestSrc { inherit name src; }; + }; + in + { + lockfileV1 = makeTest { + name = "lockfile-v1"; + + src = fetchurl { + url = "https://raw.githubusercontent.com/jellyfin/jellyfin-web/v10.8.4/package-lock.json"; + hash = "sha256-uQmc+S+V1co1Rfc4d82PpeXjmd1UqdsG492ADQFcZGA="; + }; + + hash = "sha256-wca1QvxUw3OrLStfYN9Co6oVBR1LbfcNUKlDqvObps4="; + }; + + lockfileV2 = makeTest { + name = "lockfile-v2"; + + src = fetchurl { + url = "https://raw.githubusercontent.com/jesec/flood/v4.7.0/package-lock.json"; + hash = "sha256-qS29tq5QPnGxV+PU40VgMAtdwVLtLyyhG2z9GMeYtC4="; + }; + + hash = "sha256-tuEfyePwlOy2/mOPdXbqJskO6IowvAP4DWg8xSZwbJw="; + }; + + hashPrecedence = makeTest { + name = "hash-precedence"; + + src = fetchurl { + url = "https://raw.githubusercontent.com/matrix-org/matrix-appservice-irc/0.34.0/package-lock.json"; + hash = "sha256-1+0AQw9EmbHiMPA/H8OP8XenhrkhLRYBRhmd1cNPFjk="; + }; + + hash = "sha256-oItUls7AXcCECuyA+crQO6B0kv4toIr8pBubNwB7kAM="; + }; + + hostedGitDeps = makeTest { + name = "hosted-git-deps"; + + src = fetchurl { + url = "https://cyberchaos.dev/yuka/trainsearch/-/raw/e3cba6427e8ecfd843d0f697251ddaf5e53c2327/package-lock.json"; + hash = "sha256-X9mCwPqV5yP0S2GonNvpYnLSLJMd/SUIked+hMRxDpA="; + }; + + hash = "sha256-tEdElWJ+KBTxBobzXBpPopQSwK2usGW/it1+yfbVzBw="; + }; + + linkDependencies = makeTest { + name = "link-dependencies"; + + src = fetchurl { + url = "https://raw.githubusercontent.com/evcc-io/evcc/0.106.3/package-lock.json"; + hash = "sha256-6ZTBMyuyPP/63gpQugggHhKVup6OB4hZ2rmSvPJ0yEs="; + }; + + hash = "sha256-VzQhArHoznYSXUT7l9HkJV4yoSOmoP8eYTLel1QwmB4="; + }; + + # This package contains both hosted Git shorthand, and a bundled dependency that happens to override an existing one. + etherpadLite1818 = makeTest { + name = "etherpad-lite-1.8.18"; + + src = fetchurl { + url = "https://raw.githubusercontent.com/ether/etherpad-lite/1.8.18/src/package-lock.json"; + hash = "sha256-1fGNxYJi1I4cXK/jinNG+Y6tPEOhP3QAqWOBEQttS9E="; + }; + + hash = "sha256-+KA8/orSBJ4EhuSyQO8IKSxsN/FAsYU3lOzq+awuxNQ="; + + forceGitDeps = true; + }; + }; + + meta = with lib; { + description = "Prefetch dependencies from npm (for use with `fetchNpmDeps`)"; + maintainers = with maintainers; [ lilyinstarlight winter ]; + license = licenses.mit; + }; + }; + + fetchNpmDeps = + { name ? "npm-deps" + , hash ? "" + , forceGitDeps ? false + , ... + } @ args: + let + hash_ = + if hash != "" then { + outputHash = hash; + } else { + outputHash = ""; + outputHashAlgo = "sha256"; + }; + + forceGitDeps_ = lib.optionalAttrs forceGitDeps { FORCE_GIT_DEPS = true; }; + in + stdenvNoCC.mkDerivation (args // { + inherit name; + + nativeBuildInputs = [ prefetch-npm-deps ]; + + buildPhase = '' + runHook preBuild + + if [[ ! -e package-lock.json ]]; then + echo + echo "ERROR: The package-lock.json file does not exist!" + echo + echo "package-lock.json is required to make sure that npmDepsHash doesn't change" + echo "when packages are updated on npm." + echo + echo "Hint: You can copy a vendored package-lock.json file via postPatch." + echo + + exit 1 + fi + + prefetch-npm-deps package-lock.json $out + + runHook postBuild + ''; + + dontInstall = true; + + # NIX_NPM_TOKENS environment variable should be a JSON mapping in the shape of: + # `{ "registry.example.com": "example-registry-bearer-token", ... }` + impureEnvVars = lib.fetchers.proxyImpureEnvVars ++ [ "NIX_NPM_TOKENS" ]; + + SSL_CERT_FILE = if (hash_.outputHash == "" || hash_.outputHash == lib.fakeSha256 || hash_.outputHash == lib.fakeSha512 || hash_.outputHash == lib.fakeHash) + then "${cacert}/etc/ssl/certs/ca-bundle.crt" + else "/no-cert-file.crt"; + + outputHashMode = "recursive"; + } // hash_ // forceGitDeps_); +} diff --git a/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/cacache.rs b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/cacache.rs new file mode 100644 index 000000000000..b7efedac59bd --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/cacache.rs @@ -0,0 +1,117 @@ +use base64::prelude::{Engine, BASE64_STANDARD}; +use digest::{Digest, Update}; +use serde::{Deserialize, Serialize}; +use sha1::Sha1; +use sha2::{Sha256, Sha512}; +use std::{ + fs::{self, File}, + io::Write, + path::PathBuf, +}; +use url::Url; + +#[derive(Serialize, Deserialize)] +pub(super) struct Key { + pub(super) key: String, + pub(super) integrity: String, + pub(super) time: u8, + pub(super) size: usize, + pub(super) metadata: Metadata, +} + +#[derive(Serialize, Deserialize)] +pub(super) struct Metadata { + pub(super) url: Url, + pub(super) options: Options, +} + +#[derive(Serialize, Deserialize)] +pub(super) struct Options { + pub(super) compress: bool, +} + +pub struct Cache(PathBuf); + +fn push_hash_segments(path: &mut PathBuf, hash: &str) { + path.push(&hash[0..2]); + path.push(&hash[2..4]); + path.push(&hash[4..]); +} + +impl Cache { + pub fn new(path: PathBuf) -> Cache { + Cache(path) + } + + pub fn put( + &self, + key: String, + url: Url, + data: &[u8], + integrity: Option<String>, + ) -> anyhow::Result<()> { + let (algo, hash, integrity) = if let Some(integrity) = integrity { + let (algo, hash) = integrity.split_once('-').unwrap(); + + (algo.to_string(), BASE64_STANDARD.decode(hash)?, integrity) + } else { + let hash = Sha512::new().chain(data).finalize(); + + ( + String::from("sha512"), + hash.to_vec(), + format!("sha512-{}", BASE64_STANDARD.encode(hash)), + ) + }; + + let content_path = { + let mut p = self.0.join("content-v2"); + + p.push(algo); + + push_hash_segments( + &mut p, + &hash + .into_iter() + .map(|n| format!("{n:02x}")) + .collect::<String>(), + ); + + p + }; + + fs::create_dir_all(content_path.parent().unwrap())?; + + fs::write(content_path, data)?; + + let index_path = { + let mut p = self.0.join("index-v5"); + + push_hash_segments( + &mut p, + &format!("{:x}", Sha256::new().chain(&key).finalize()), + ); + + p + }; + + fs::create_dir_all(index_path.parent().unwrap())?; + + let data = serde_json::to_string(&Key { + key, + integrity, + time: 0, + size: data.len(), + metadata: Metadata { + url, + options: Options { compress: true }, + }, + })?; + + let mut file = File::options().append(true).create(true).open(index_path)?; + + write!(file, "{:x}\t{data}", Sha1::new().chain(&data).finalize())?; + + Ok(()) + } +} diff --git a/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/main.rs b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/main.rs new file mode 100644 index 000000000000..9d86bd8091a7 --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/main.rs @@ -0,0 +1,425 @@ +#![warn(clippy::pedantic)] + +use crate::cacache::{Cache, Key}; +use anyhow::{anyhow, bail}; +use rayon::prelude::*; +use serde_json::{Map, Value}; +use std::{ + collections::HashMap, + env, fs, + path::{Path, PathBuf}, + process::{self, Command}, +}; +use tempfile::tempdir; +use url::Url; +use walkdir::WalkDir; + +mod cacache; +mod parse; +mod util; + +fn cache_map_path() -> Option<PathBuf> { + env::var_os("CACHE_MAP_PATH").map(PathBuf::from) +} + +/// `fixup_lockfile` rewrites `integrity` hashes to match cache and removes the `integrity` field from Git dependencies. +/// +/// Sometimes npm has multiple instances of a given `resolved` URL that have different types of `integrity` hashes (e.g. SHA-1 +/// and SHA-512) in the lockfile. Given we only cache one version of these, the `integrity` field must be normalized to the hash +/// we cache as (which is the strongest available one). +/// +/// Git dependencies from specific providers can be retrieved from those providers' automatic tarball features. +/// When these dependencies are specified with a commit identifier, npm generates a tarball, and inserts the integrity hash of that +/// tarball into the lockfile. +/// +/// Thus, we remove this hash, to replace it with our own determinstic copies of dependencies from hosted Git providers. +/// +/// If no fixups were performed, `None` is returned and the lockfile structure should be left as-is. If fixups were performed, the +/// `dependencies` key in v2 lockfiles designed for backwards compatibility with v1 parsers is removed because of inconsistent data. +fn fixup_lockfile( + mut lock: Map<String, Value>, + cache: &Option<HashMap<String, String>>, +) -> anyhow::Result<Option<Map<String, Value>>> { + let mut fixed = false; + + match lock + .get("lockfileVersion") + .ok_or_else(|| anyhow!("couldn't get lockfile version"))? + .as_i64() + .ok_or_else(|| anyhow!("lockfile version isn't an int"))? + { + 1 => fixup_v1_deps( + lock.get_mut("dependencies") + .unwrap() + .as_object_mut() + .unwrap(), + cache, + &mut fixed, + ), + 2 | 3 => { + for package in lock + .get_mut("packages") + .ok_or_else(|| anyhow!("couldn't get packages"))? + .as_object_mut() + .ok_or_else(|| anyhow!("packages isn't a map"))? + .values_mut() + { + if let Some(Value::String(resolved)) = package.get("resolved") { + if let Some(Value::String(integrity)) = package.get("integrity") { + if resolved.starts_with("git+ssh://") { + fixed = true; + + package + .as_object_mut() + .ok_or_else(|| anyhow!("package isn't a map"))? + .remove("integrity"); + } else if let Some(cache_hashes) = cache { + let cache_hash = cache_hashes + .get(resolved) + .expect("dependency should have a hash"); + + if integrity != cache_hash { + fixed = true; + + *package + .as_object_mut() + .ok_or_else(|| anyhow!("package isn't a map"))? + .get_mut("integrity") + .unwrap() = Value::String(cache_hash.clone()); + } + } + } + } + } + + if fixed { + lock.remove("dependencies"); + } + } + v => bail!("unsupported lockfile version {v}"), + } + + if fixed { + Ok(Some(lock)) + } else { + Ok(None) + } +} + +// Recursive helper to fixup v1 lockfile deps +fn fixup_v1_deps( + dependencies: &mut Map<String, Value>, + cache: &Option<HashMap<String, String>>, + fixed: &mut bool, +) { + for dep in dependencies.values_mut() { + if let Some(Value::String(resolved)) = dep + .as_object() + .expect("v1 dep must be object") + .get("resolved") + { + if let Some(Value::String(integrity)) = dep + .as_object() + .expect("v1 dep must be object") + .get("integrity") + { + if resolved.starts_with("git+ssh://") { + *fixed = true; + + dep.as_object_mut() + .expect("v1 dep must be object") + .remove("integrity"); + } else if let Some(cache_hashes) = cache { + let cache_hash = cache_hashes + .get(resolved) + .expect("dependency should have a hash"); + + if integrity != cache_hash { + *fixed = true; + + *dep.as_object_mut() + .expect("v1 dep must be object") + .get_mut("integrity") + .unwrap() = Value::String(cache_hash.clone()); + } + } + } + } + + if let Some(Value::Object(more_deps)) = dep.as_object_mut().unwrap().get_mut("dependencies") + { + fixup_v1_deps(more_deps, cache, fixed); + } + } +} + +fn map_cache() -> anyhow::Result<HashMap<Url, String>> { + let mut hashes = HashMap::new(); + + let content_path = Path::new(&env::var_os("npmDeps").unwrap()).join("_cacache/index-v5"); + + for entry in WalkDir::new(content_path) { + let entry = entry?; + + if entry.file_type().is_file() { + let content = fs::read_to_string(entry.path())?; + let key: Key = serde_json::from_str(content.split_ascii_whitespace().nth(1).unwrap())?; + + hashes.insert(key.metadata.url, key.integrity); + } + } + + Ok(hashes) +} + +fn main() -> anyhow::Result<()> { + env_logger::init(); + + let args = env::args().collect::<Vec<_>>(); + + if args.len() < 2 { + println!("usage: {} <path/to/package-lock.json>", args[0]); + println!(); + println!("Prefetches npm dependencies for usage by fetchNpmDeps."); + + process::exit(1); + } + + if let Ok(jobs) = env::var("NIX_BUILD_CORES") { + if !jobs.is_empty() { + rayon::ThreadPoolBuilder::new() + .num_threads( + jobs.parse() + .expect("NIX_BUILD_CORES must be a whole number"), + ) + .build_global() + .unwrap(); + } + } + + if args[1] == "--fixup-lockfile" { + let lock = serde_json::from_str(&fs::read_to_string(&args[2])?)?; + + let cache = cache_map_path() + .map(|map_path| Ok::<_, anyhow::Error>(serde_json::from_slice(&fs::read(map_path)?)?)) + .transpose()?; + + if let Some(fixed) = fixup_lockfile(lock, &cache)? { + println!("Fixing lockfile"); + + fs::write(&args[2], serde_json::to_string(&fixed)?)?; + } + + return Ok(()); + } else if args[1] == "--map-cache" { + let map = map_cache()?; + + fs::write( + cache_map_path().expect("CACHE_MAP_PATH environment variable must be set"), + serde_json::to_string(&map)?, + )?; + + return Ok(()); + } + + let lock_content = fs::read_to_string(&args[1])?; + + let out_tempdir; + + let (out, print_hash) = if let Some(path) = args.get(2) { + (Path::new(path), false) + } else { + out_tempdir = tempdir()?; + + (out_tempdir.path(), true) + }; + + let packages = parse::lockfile(&lock_content, env::var("FORCE_GIT_DEPS").is_ok())?; + + let cache = Cache::new(out.join("_cacache")); + + packages.into_par_iter().try_for_each(|package| { + eprintln!("{}", package.name); + + let tarball = package.tarball()?; + let integrity = package.integrity().map(ToString::to_string); + + cache + .put( + format!("make-fetch-happen:request-cache:{}", package.url), + package.url, + &tarball, + integrity, + ) + .map_err(|e| anyhow!("couldn't insert cache entry for {}: {e:?}", package.name))?; + + Ok::<_, anyhow::Error>(()) + })?; + + fs::write(out.join("package-lock.json"), lock_content)?; + + if print_hash { + Command::new("nix") + .args(["--experimental-features", "nix-command", "hash", "path"]) + .arg(out.as_os_str()) + .status()?; + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use std::collections::HashMap; + + use super::fixup_lockfile; + use serde_json::json; + + #[test] + fn lockfile_fixup() -> anyhow::Result<()> { + let input = json!({ + "lockfileVersion": 2, + "name": "foo", + "packages": { + "": { + + }, + "foo": { + "resolved": "https://github.com/NixOS/nixpkgs", + "integrity": "sha1-aaa" + }, + "bar": { + "resolved": "git+ssh://git@github.com/NixOS/nixpkgs.git", + "integrity": "sha512-aaa" + }, + "foo-bad": { + "resolved": "foo", + "integrity": "sha1-foo" + }, + "foo-good": { + "resolved": "foo", + "integrity": "sha512-foo" + }, + } + }); + + let expected = json!({ + "lockfileVersion": 2, + "name": "foo", + "packages": { + "": { + + }, + "foo": { + "resolved": "https://github.com/NixOS/nixpkgs", + "integrity": "" + }, + "bar": { + "resolved": "git+ssh://git@github.com/NixOS/nixpkgs.git", + }, + "foo-bad": { + "resolved": "foo", + "integrity": "sha512-foo" + }, + "foo-good": { + "resolved": "foo", + "integrity": "sha512-foo" + }, + } + }); + + let mut hashes = HashMap::new(); + + hashes.insert( + String::from("https://github.com/NixOS/nixpkgs"), + String::new(), + ); + + hashes.insert( + String::from("git+ssh://git@github.com/NixOS/nixpkgs.git"), + String::new(), + ); + + hashes.insert(String::from("foo"), String::from("sha512-foo")); + + assert_eq!( + fixup_lockfile(input.as_object().unwrap().clone(), &Some(hashes))?, + Some(expected.as_object().unwrap().clone()) + ); + + Ok(()) + } + + #[test] + fn lockfile_v1_fixup() -> anyhow::Result<()> { + let input = json!({ + "lockfileVersion": 1, + "name": "foo", + "dependencies": { + "foo": { + "resolved": "https://github.com/NixOS/nixpkgs", + "integrity": "sha512-aaa" + }, + "foo-good": { + "resolved": "foo", + "integrity": "sha512-foo" + }, + "bar": { + "resolved": "git+ssh://git@github.com/NixOS/nixpkgs.git", + "integrity": "sha512-bbb", + "dependencies": { + "foo-bad": { + "resolved": "foo", + "integrity": "sha1-foo" + }, + }, + }, + } + }); + + let expected = json!({ + "lockfileVersion": 1, + "name": "foo", + "dependencies": { + "foo": { + "resolved": "https://github.com/NixOS/nixpkgs", + "integrity": "" + }, + "foo-good": { + "resolved": "foo", + "integrity": "sha512-foo" + }, + "bar": { + "resolved": "git+ssh://git@github.com/NixOS/nixpkgs.git", + "dependencies": { + "foo-bad": { + "resolved": "foo", + "integrity": "sha512-foo" + }, + }, + }, + } + }); + + let mut hashes = HashMap::new(); + + hashes.insert( + String::from("https://github.com/NixOS/nixpkgs"), + String::new(), + ); + + hashes.insert( + String::from("git+ssh://git@github.com/NixOS/nixpkgs.git"), + String::new(), + ); + + hashes.insert(String::from("foo"), String::from("sha512-foo")); + + assert_eq!( + fixup_lockfile(input.as_object().unwrap().clone(), &Some(hashes))?, + Some(expected.as_object().unwrap().clone()) + ); + + Ok(()) + } +} diff --git a/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/parse/lock.rs b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/parse/lock.rs new file mode 100644 index 000000000000..f50a31651d0e --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/parse/lock.rs @@ -0,0 +1,333 @@ +use anyhow::{anyhow, bail, Context}; +use rayon::slice::ParallelSliceMut; +use serde::{ + de::{self, Visitor}, + Deserialize, Deserializer, +}; +use std::{ + cmp::Ordering, + collections::{HashMap, HashSet}, + fmt, +}; +use url::Url; + +pub(super) fn packages(content: &str) -> anyhow::Result<Vec<Package>> { + let lockfile: Lockfile = serde_json::from_str(content)?; + + let mut packages = match lockfile.version { + 1 => { + let initial_url = get_initial_url()?; + + lockfile + .dependencies + .map(|p| to_new_packages(p, &initial_url)) + .transpose()? + } + 2 | 3 => lockfile.packages.map(|pkgs| { + pkgs.into_iter() + .filter(|(n, p)| !n.is_empty() && matches!(p.resolved, Some(UrlOrString::Url(_)))) + .map(|(n, p)| Package { name: Some(n), ..p }) + .collect() + }), + _ => bail!( + "We don't support lockfile version {}, please file an issue.", + lockfile.version + ), + } + .expect("lockfile should have packages"); + + packages.par_sort_by(|x, y| { + x.resolved + .partial_cmp(&y.resolved) + .expect("resolved should be comparable") + .then( + // v1 lockfiles can contain multiple references to the same version of a package, with + // different integrity values (e.g. a SHA-1 and a SHA-512 in one, but just a SHA-512 in another) + y.integrity + .partial_cmp(&x.integrity) + .expect("integrity should be comparable"), + ) + }); + + packages.dedup_by(|x, y| x.resolved == y.resolved); + + Ok(packages) +} + +#[derive(Deserialize)] +struct Lockfile { + #[serde(rename = "lockfileVersion")] + version: u8, + dependencies: Option<HashMap<String, OldPackage>>, + packages: Option<HashMap<String, Package>>, +} + +#[derive(Deserialize)] +struct OldPackage { + version: UrlOrString, + #[serde(default)] + bundled: bool, + resolved: Option<UrlOrString>, + integrity: Option<HashCollection>, + dependencies: Option<HashMap<String, OldPackage>>, +} + +#[derive(Debug, Deserialize, PartialEq, Eq)] +pub(super) struct Package { + #[serde(default)] + pub(super) name: Option<String>, + pub(super) resolved: Option<UrlOrString>, + pub(super) integrity: Option<HashCollection>, +} + +#[derive(Debug, Deserialize, PartialEq, Eq, PartialOrd, Ord)] +#[serde(untagged)] +pub(super) enum UrlOrString { + Url(Url), + String(String), +} + +impl fmt::Display for UrlOrString { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + UrlOrString::Url(url) => url.fmt(f), + UrlOrString::String(string) => string.fmt(f), + } + } +} + +#[derive(Debug, PartialEq, Eq)] +pub struct HashCollection(HashSet<Hash>); + +impl HashCollection { + pub fn from_str(s: impl AsRef<str>) -> anyhow::Result<HashCollection> { + let hashes = s + .as_ref() + .split_ascii_whitespace() + .map(Hash::new) + .collect::<anyhow::Result<_>>()?; + + Ok(HashCollection(hashes)) + } + + pub fn into_best(self) -> Option<Hash> { + self.0.into_iter().max() + } +} + +impl PartialOrd for HashCollection { + fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + let lhs = self.0.iter().max()?; + let rhs = other.0.iter().max()?; + + lhs.partial_cmp(rhs) + } +} + +impl<'de> Deserialize<'de> for HashCollection { + fn deserialize<D>(deserializer: D) -> Result<HashCollection, D::Error> + where + D: Deserializer<'de>, + { + deserializer.deserialize_string(HashCollectionVisitor) + } +} + +struct HashCollectionVisitor; + +impl<'de> Visitor<'de> for HashCollectionVisitor { + type Value = HashCollection; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a single SRI hash or a collection of them (separated by spaces)") + } + + fn visit_str<E>(self, value: &str) -> Result<HashCollection, E> + where + E: de::Error, + { + HashCollection::from_str(value).map_err(E::custom) + } +} + +#[derive(Clone, Debug, Deserialize, PartialEq, Eq, Hash)] +pub struct Hash(String); + +// Hash algorithms, in ascending preference. +const ALGOS: &[&str] = &["sha1", "sha512"]; + +impl Hash { + fn new(s: impl AsRef<str>) -> anyhow::Result<Hash> { + let algo = s + .as_ref() + .split_once('-') + .ok_or_else(|| anyhow!("expected SRI hash, got {:?}", s.as_ref()))? + .0; + + if ALGOS.iter().any(|&a| algo == a) { + Ok(Hash(s.as_ref().to_string())) + } else { + Err(anyhow!("unknown hash algorithm {algo:?}")) + } + } + + pub fn as_str(&self) -> &str { + &self.0 + } +} + +impl fmt::Display for Hash { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.as_str().fmt(f) + } +} + +impl PartialOrd for Hash { + fn partial_cmp(&self, other: &Hash) -> Option<Ordering> { + let lhs = self.0.split_once('-')?.0; + let rhs = other.0.split_once('-')?.0; + + ALGOS + .iter() + .position(|&s| lhs == s)? + .partial_cmp(&ALGOS.iter().position(|&s| rhs == s)?) + } +} + +impl Ord for Hash { + fn cmp(&self, other: &Hash) -> Ordering { + self.partial_cmp(other).unwrap() + } +} + +#[allow(clippy::case_sensitive_file_extension_comparisons)] +fn to_new_packages( + old_packages: HashMap<String, OldPackage>, + initial_url: &Url, +) -> anyhow::Result<Vec<Package>> { + let mut new = Vec::new(); + + for (name, mut package) in old_packages { + // In some cases, a bundled dependency happens to have the same version as a non-bundled one, causing + // the bundled one without a URL to override the entry for the non-bundled instance, which prevents the + // dependency from being downloaded. + if package.bundled { + continue; + } + + if let UrlOrString::Url(v) = &package.version { + for (scheme, host) in [ + ("github", "github.com"), + ("bitbucket", "bitbucket.org"), + ("gitlab", "gitlab.com"), + ] { + if v.scheme() == scheme { + package.version = { + let mut new_url = initial_url.clone(); + + new_url.set_host(Some(host))?; + + if v.path().ends_with(".git") { + new_url.set_path(v.path()); + } else { + new_url.set_path(&format!("{}.git", v.path())); + } + + new_url.set_fragment(v.fragment()); + + UrlOrString::Url(new_url) + }; + + break; + } + } + } + + new.push(Package { + name: Some(name), + resolved: if matches!(package.version, UrlOrString::Url(_)) { + Some(package.version) + } else { + package.resolved + }, + integrity: package.integrity, + }); + + if let Some(dependencies) = package.dependencies { + new.append(&mut to_new_packages(dependencies, initial_url)?); + } + } + + Ok(new) +} + +fn get_initial_url() -> anyhow::Result<Url> { + Url::parse("git+ssh://git@a.b").context("initial url should be valid") +} + +#[cfg(test)] +mod tests { + use super::{ + get_initial_url, to_new_packages, Hash, HashCollection, OldPackage, Package, UrlOrString, + }; + use std::{ + cmp::Ordering, + collections::{HashMap, HashSet}, + }; + use url::Url; + + #[test] + fn git_shorthand_v1() -> anyhow::Result<()> { + let old = { + let mut o = HashMap::new(); + o.insert( + String::from("sqlite3"), + OldPackage { + version: UrlOrString::Url( + Url::parse( + "github:mapbox/node-sqlite3#593c9d498be2510d286349134537e3bf89401c4a", + ) + .unwrap(), + ), + bundled: false, + resolved: None, + integrity: None, + dependencies: None, + }, + ); + o + }; + + let initial_url = get_initial_url()?; + + let new = to_new_packages(old, &initial_url)?; + + assert_eq!(new.len(), 1, "new packages map should contain 1 value"); + assert_eq!(new[0], Package { + name: Some(String::from("sqlite3")), + resolved: Some(UrlOrString::Url(Url::parse("git+ssh://git@github.com/mapbox/node-sqlite3.git#593c9d498be2510d286349134537e3bf89401c4a").unwrap())), + integrity: None + }); + + Ok(()) + } + + #[test] + fn hash_preference() { + assert_eq!( + Hash(String::from("sha1-foo")).partial_cmp(&Hash(String::from("sha512-foo"))), + Some(Ordering::Less) + ); + + assert_eq!( + HashCollection({ + let mut set = HashSet::new(); + set.insert(Hash(String::from("sha512-foo"))); + set.insert(Hash(String::from("sha1-bar"))); + set + }) + .into_best(), + Some(Hash(String::from("sha512-foo"))) + ); + } +} diff --git a/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/parse/mod.rs b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/parse/mod.rs new file mode 100644 index 000000000000..86e9120de02f --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/parse/mod.rs @@ -0,0 +1,334 @@ +use anyhow::{anyhow, bail, Context}; +use lock::UrlOrString; +use rayon::prelude::*; +use serde_json::{Map, Value}; +use std::{ + fs, + io::Write, + process::{Command, Stdio}, +}; +use tempfile::{tempdir, TempDir}; +use url::Url; + +use crate::util; + +pub mod lock; + +pub fn lockfile(content: &str, force_git_deps: bool) -> anyhow::Result<Vec<Package>> { + let mut packages = lock::packages(content) + .context("failed to extract packages from lockfile")? + .into_par_iter() + .map(|p| { + let n = p.name.clone().unwrap(); + + Package::from_lock(p).with_context(|| format!("failed to parse data for {n}")) + }) + .collect::<anyhow::Result<Vec<_>>>()?; + + let mut new = Vec::new(); + + for pkg in packages + .iter() + .filter(|p| matches!(p.specifics, Specifics::Git { .. })) + { + let dir = match &pkg.specifics { + Specifics::Git { workdir } => workdir, + Specifics::Registry { .. } => unimplemented!(), + }; + + let path = dir.path().join("package"); + + let lockfile_contents = fs::read_to_string(path.join("package-lock.json")); + + let package_json_path = path.join("package.json"); + let mut package_json: Map<String, Value> = + serde_json::from_str(&fs::read_to_string(package_json_path)?)?; + + if let Some(scripts) = package_json + .get_mut("scripts") + .and_then(Value::as_object_mut) + { + // https://github.com/npm/pacote/blob/272edc1bac06991fc5f95d06342334bbacfbaa4b/lib/git.js#L166-L172 + for typ in [ + "postinstall", + "build", + "preinstall", + "install", + "prepack", + "prepare", + ] { + if scripts.contains_key(typ) && lockfile_contents.is_err() && !force_git_deps { + bail!("Git dependency {} contains install scripts, but has no lockfile, which is something that will probably break. Open an issue if you can't feasibly patch this dependency out, and we'll come up with a workaround.\nIf you'd like to attempt to try to use this dependency anyways, set `forceGitDeps = true`.", pkg.name); + } + } + } + + if let Ok(lockfile_contents) = lockfile_contents { + new.append(&mut lockfile(&lockfile_contents, force_git_deps)?); + } + } + + packages.append(&mut new); + + packages.par_sort_by(|x, y| { + x.url + .partial_cmp(&y.url) + .expect("resolved should be comparable") + }); + + packages.dedup_by(|x, y| x.url == y.url); + + Ok(packages) +} + +#[derive(Debug)] +pub struct Package { + pub name: String, + pub url: Url, + specifics: Specifics, +} + +#[derive(Debug)] +enum Specifics { + Registry { integrity: lock::Hash }, + Git { workdir: TempDir }, +} + +impl Package { + fn from_lock(pkg: lock::Package) -> anyhow::Result<Package> { + let mut resolved = match pkg + .resolved + .expect("at this point, packages should have URLs") + { + UrlOrString::Url(u) => u, + UrlOrString::String(_) => panic!("at this point, all packages should have URLs"), + }; + + let specifics = match get_hosted_git_url(&resolved)? { + Some(hosted) => { + let body = util::get_url_body_with_retry(&hosted)?; + + let workdir = tempdir()?; + + let tar_path = workdir.path().join("package"); + + fs::create_dir(&tar_path)?; + + let mut cmd = Command::new("tar") + .args(["--extract", "--gzip", "--strip-components=1", "-C"]) + .arg(&tar_path) + .stdin(Stdio::piped()) + .spawn()?; + + cmd.stdin.take().unwrap().write_all(&body)?; + + let exit = cmd.wait()?; + + if !exit.success() { + bail!( + "failed to extract tarball for {}: tar exited with status code {}", + pkg.name.unwrap(), + exit.code().unwrap() + ); + } + + resolved = hosted; + + Specifics::Git { workdir } + } + None => Specifics::Registry { + integrity: pkg + .integrity + .expect("non-git dependencies should have associated integrity") + .into_best() + .expect("non-git dependencies should have non-empty associated integrity"), + }, + }; + + Ok(Package { + name: pkg.name.unwrap(), + url: resolved, + specifics, + }) + } + + pub fn tarball(&self) -> anyhow::Result<Vec<u8>> { + match &self.specifics { + Specifics::Registry { .. } => Ok(util::get_url_body_with_retry(&self.url)?), + Specifics::Git { workdir } => Ok(Command::new("tar") + .args([ + "--sort=name", + "--mtime=@0", + "--owner=0", + "--group=0", + "--numeric-owner", + "--format=gnu", + "-I", + "gzip -n -9", + "--create", + "-C", + ]) + .arg(workdir.path()) + .arg("package") + .output()? + .stdout), + } + } + + pub fn integrity(&self) -> Option<&lock::Hash> { + match &self.specifics { + Specifics::Registry { integrity } => Some(integrity), + Specifics::Git { .. } => None, + } + } +} + +#[allow(clippy::case_sensitive_file_extension_comparisons)] +fn get_hosted_git_url(url: &Url) -> anyhow::Result<Option<Url>> { + if ["git", "git+ssh", "git+https", "ssh"].contains(&url.scheme()) { + let mut s = url + .path_segments() + .ok_or_else(|| anyhow!("bad URL: {url}"))?; + + let mut get_url = || match url.host_str()? { + "github.com" => { + let user = s.next()?; + let mut project = s.next()?; + let typ = s.next(); + let mut commit = s.next(); + + if typ.is_none() { + commit = url.fragment(); + } else if typ.is_some() && typ != Some("tree") { + return None; + } + + if project.ends_with(".git") { + project = project.strip_suffix(".git")?; + } + + let commit = commit.unwrap(); + + Some( + Url::parse(&format!( + "https://codeload.github.com/{user}/{project}/tar.gz/{commit}" + )) + .ok()?, + ) + } + "bitbucket.org" => { + let user = s.next()?; + let mut project = s.next()?; + let aux = s.next(); + + if aux == Some("get") { + return None; + } + + if project.ends_with(".git") { + project = project.strip_suffix(".git")?; + } + + let commit = url.fragment()?; + + Some( + Url::parse(&format!( + "https://bitbucket.org/{user}/{project}/get/{commit}.tar.gz" + )) + .ok()?, + ) + } + "gitlab.com" => { + /* let path = &url.path()[1..]; + + if path.contains("/~/") || path.contains("/archive.tar.gz") { + return None; + } + + let user = s.next()?; + let mut project = s.next()?; + + if project.ends_with(".git") { + project = project.strip_suffix(".git")?; + } + + let commit = url.fragment()?; + + Some( + Url::parse(&format!( + "https://gitlab.com/{user}/{project}/repository/archive.tar.gz?ref={commit}" + )) + .ok()?, + ) */ + + // lmao: https://github.com/npm/hosted-git-info/pull/109 + None + } + "git.sr.ht" => { + let user = s.next()?; + let mut project = s.next()?; + let aux = s.next(); + + if aux == Some("archive") { + return None; + } + + if project.ends_with(".git") { + project = project.strip_suffix(".git")?; + } + + let commit = url.fragment()?; + + Some( + Url::parse(&format!( + "https://git.sr.ht/{user}/{project}/archive/{commit}.tar.gz" + )) + .ok()?, + ) + } + _ => None, + }; + + match get_url() { + Some(u) => Ok(Some(u)), + None => Err(anyhow!("This lockfile either contains a Git dependency with an unsupported host, or a malformed URL in the lockfile: {url}")) + } + } else { + Ok(None) + } +} + +#[cfg(test)] +mod tests { + use super::get_hosted_git_url; + use url::Url; + + #[test] + fn hosted_git_urls() { + for (input, expected) in [ + ( + "git+ssh://git@github.com/castlabs/electron-releases.git#fc5f78d046e8d7cdeb66345a2633c383ab41f525", + Some("https://codeload.github.com/castlabs/electron-releases/tar.gz/fc5f78d046e8d7cdeb66345a2633c383ab41f525"), + ), + ( + "git+ssh://bitbucket.org/foo/bar#branch", + Some("https://bitbucket.org/foo/bar/get/branch.tar.gz") + ), + ( + "git+ssh://git.sr.ht/~foo/bar#branch", + Some("https://git.sr.ht/~foo/bar/archive/branch.tar.gz") + ), + ] { + assert_eq!( + get_hosted_git_url(&Url::parse(input).unwrap()).unwrap(), + expected.map(|u| Url::parse(u).unwrap()) + ); + } + + assert!( + get_hosted_git_url(&Url::parse("ssh://git@gitlab.com/foo/bar.git#fix/bug").unwrap()) + .is_err(), + "GitLab URLs should be marked as invalid (lol)" + ); + } +} diff --git a/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/util.rs b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/util.rs new file mode 100644 index 000000000000..7dd928fdc43f --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/fetch-npm-deps/src/util.rs @@ -0,0 +1,66 @@ +use backoff::{retry, ExponentialBackoff}; +use isahc::{ + config::{CaCertificate, Configurable, RedirectPolicy, SslOption}, + Body, Request, RequestExt, +}; +use serde_json::{Map, Value}; +use std::{env, io::Read, path::Path}; +use url::Url; + +pub fn get_url(url: &Url) -> Result<Body, isahc::Error> { + let mut request = Request::get(url.as_str()).redirect_policy(RedirectPolicy::Limit(10)); + + // Respect SSL_CERT_FILE if environment variable exists + if let Ok(ssl_cert_file) = env::var("SSL_CERT_FILE") { + if Path::new(&ssl_cert_file).exists() { + // When file exists, use it. NIX_SSL_CERT_FILE will still override. + request = request.ssl_ca_certificate(CaCertificate::file(ssl_cert_file)); + } else if env::var("outputHash").is_ok() { + // When file does not exist, assume we are downloading in a FOD and + // therefore do not need to check certificates, since the output is + // already hashed. + request = request.ssl_options(SslOption::DANGER_ACCEPT_INVALID_CERTS); + } + } + + // Respect NIX_NPM_TOKENS environment variable, which should be a JSON mapping in the shape of: + // `{ "registry.example.com": "example-registry-bearer-token", ... }` + if let Some(host) = url.host_str() { + if let Ok(npm_tokens) = env::var("NIX_NPM_TOKENS") { + if let Ok(tokens) = serde_json::from_str::<Map<String, Value>>(&npm_tokens) { + if let Some(token) = tokens.get(host).and_then(serde_json::Value::as_str) { + request = request.header("Authorization", format!("Bearer {token}")); + } + } + } + } + + Ok(request.body(())?.send()?.into_body()) +} + +pub fn get_url_body_with_retry(url: &Url) -> Result<Vec<u8>, isahc::Error> { + retry(ExponentialBackoff::default(), || { + get_url(url) + .and_then(|mut body| { + let mut buf = Vec::new(); + + body.read_to_end(&mut buf)?; + + Ok(buf) + }) + .map_err(|err| { + if err.is_network() || err.is_timeout() { + backoff::Error::transient(err) + } else { + backoff::Error::permanent(err) + } + }) + }) + .map_err(|backoff_err| match backoff_err { + backoff::Error::Permanent(err) + | backoff::Error::Transient { + err, + retry_after: _, + } => err, + }) +} diff --git a/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/common.js b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/common.js new file mode 100644 index 000000000000..8e0d1b0e470b --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/common.js @@ -0,0 +1,17 @@ +const path = require('path') + +// This has to match the logic in pkgs/development/tools/yarn2nix-moretea/yarn2nix/lib/urlToName.js +// so that fixup_yarn_lock produces the same paths +const urlToName = url => { + const isCodeloadGitTarballUrl = url.startsWith('https://codeload.github.com/') && url.includes('/tar.gz/') + + if (url.startsWith('git+') || isCodeloadGitTarballUrl) { + return path.basename(url) + } else { + return url + .replace(/https:\/\/(.)*(.com)\//g, '') // prevents having long directory names + .replace(/[@/%:-]/g, '_') // replace @ and : and - and % characters with underscore + } +} + +module.exports = { urlToName }; diff --git a/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/default.nix b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/default.nix new file mode 100644 index 000000000000..49c2f6cbfc98 --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/default.nix @@ -0,0 +1,84 @@ +{ stdenv, lib, makeWrapper, coreutils, nix-prefetch-git, fetchurl, nodejs-slim, prefetch-yarn-deps, cacert, callPackage, nix }: + +let + yarnpkg-lockfile-tar = fetchurl { + url = "https://registry.yarnpkg.com/@yarnpkg/lockfile/-/lockfile-1.1.0.tgz"; + hash = "sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ=="; + }; + + tests = callPackage ./tests {}; + +in { + prefetch-yarn-deps = stdenv.mkDerivation { + name = "prefetch-yarn-deps"; + + dontUnpack = true; + + nativeBuildInputs = [ makeWrapper ]; + buildInputs = [ coreutils nix-prefetch-git nodejs-slim nix ]; + + buildPhase = '' + runHook preBuild + + mkdir libexec + tar --strip-components=1 -xf ${yarnpkg-lockfile-tar} package/index.js + mv index.js libexec/yarnpkg-lockfile.js + cp ${./.}/*.js libexec/ + patchShebangs libexec + + runHook postBuild + ''; + + installPhase = '' + runHook preInstall + + mkdir -p $out/bin + cp -r libexec $out + makeWrapper $out/libexec/index.js $out/bin/prefetch-yarn-deps \ + --prefix PATH : ${lib.makeBinPath [ coreutils nix-prefetch-git nix ]} + makeWrapper $out/libexec/fixup.js $out/bin/fixup-yarn-lock + + runHook postInstall + ''; + + passthru = { inherit tests; }; + }; + + fetchYarnDeps = let + f = { + name ? "offline", + src ? null, + hash ? "", + sha256 ? "", + ... + }@args: let + hash_ = + if hash != "" then { outputHashAlgo = null; outputHash = hash; } + else if sha256 != "" then { outputHashAlgo = "sha256"; outputHash = sha256; } + else { outputHashAlgo = "sha256"; outputHash = lib.fakeSha256; }; + in stdenv.mkDerivation ({ + inherit name; + + dontUnpack = src == null; + dontInstall = true; + + nativeBuildInputs = [ prefetch-yarn-deps ]; + GIT_SSL_CAINFO = "${cacert}/etc/ssl/certs/ca-bundle.crt"; + + buildPhase = '' + runHook preBuild + + yarnLock=''${yarnLock:=$PWD/yarn.lock} + mkdir -p $out + (cd $out; prefetch-yarn-deps --verbose --builder $yarnLock) + + runHook postBuild + ''; + + outputHashMode = "recursive"; + } // hash_ // (removeAttrs args ["src" "name" "hash" "sha256"])); + + in lib.setFunctionArgs f (lib.functionArgs f) // { + inherit tests; + }; +} diff --git a/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/fixup.js b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/fixup.js new file mode 100755 index 000000000000..8b91e7efa63f --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/fixup.js @@ -0,0 +1,74 @@ +#!/usr/bin/env node +'use strict' + +const fs = require('fs') +const process = require('process') +const lockfile = require('./yarnpkg-lockfile.js') +const { urlToName } = require('./common.js') + +const fixupYarnLock = async (lockContents, verbose) => { + const lockData = lockfile.parse(lockContents) + + const fixedData = Object.fromEntries( + Object.entries(lockData.object) + .map(([dep, pkg]) => { + const [ url, hash ] = pkg.resolved.split("#", 2) + + if (hash || url.startsWith("https://codeload.github.com")) { + if (verbose) console.log(`Removing integrity for git dependency ${dep}`) + delete pkg.integrity + } + + if (verbose) console.log(`Rewriting URL ${url} for dependency ${dep}`) + pkg.resolved = urlToName(url) + + return [dep, pkg] + }) + ) + + if (verbose) console.log('Done') + + return fixedData +} + +const showUsage = async () => { + process.stderr.write(` +syntax: fixup-yarn-lock [path to yarn.lock] [options] + +Options: + -h --help Show this help + -v --verbose Verbose output +`) + process.exit(1) +} + +const main = async () => { + const args = process.argv.slice(2) + let next, lockFile, verbose + while (next = args.shift()) { + if (next == '--verbose' || next == '-v') { + verbose = true + } else if (next == '--help' || next == '-h') { + showUsage() + } else if (!lockFile) { + lockFile = next + } else { + showUsage() + } + } + let lockContents + try { + lockContents = await fs.promises.readFile(lockFile || 'yarn.lock', 'utf-8') + } catch { + showUsage() + } + + const fixedData = await fixupYarnLock(lockContents, verbose) + await fs.promises.writeFile(lockFile || 'yarn.lock', lockfile.stringify(fixedData)) +} + +main() + .catch(e => { + console.error(e) + process.exit(1) + }) diff --git a/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/index.js b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/index.js new file mode 100755 index 000000000000..de2a09ee9041 --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/index.js @@ -0,0 +1,208 @@ +#!/usr/bin/env node +'use strict' + +const fs = require('fs') +const crypto = require('crypto') +const process = require('process') +const https = require('https') +const child_process = require('child_process') +const path = require('path') +const lockfile = require('./yarnpkg-lockfile.js') +const { promisify } = require('util') +const url = require('url') +const { urlToName } = require('./common.js') + +const execFile = promisify(child_process.execFile) + +const exec = async (...args) => { + const res = await execFile(...args) + if (res.error) throw new Error(res.stderr) + return res +} + +const downloadFileHttps = (fileName, url, expectedHash, hashType = 'sha1') => { + return new Promise((resolve, reject) => { + const get = (url, redirects = 0) => https.get(url, (res) => { + if(redirects > 10) { + reject('Too many redirects!'); + return; + } + if(res.statusCode === 301 || res.statusCode === 302) { + return get(res.headers.location, redirects + 1) + } + const file = fs.createWriteStream(fileName) + const hash = crypto.createHash(hashType) + res.pipe(file) + res.pipe(hash).setEncoding('hex') + res.on('end', () => { + file.close() + const h = hash.read() + if (expectedHash === undefined){ + console.log(`Warning: lockfile url ${url} doesn't end in "#<hash>" to validate against. Downloaded file had hash ${h}.`); + } else if (h != expectedHash) return reject(new Error(`hash mismatch, expected ${expectedHash}, got ${h}`)) + resolve() + }) + res.on('error', e => reject(e)) + }) + get(url) + }) +} + +const downloadGit = async (fileName, url, rev) => { + await exec('nix-prefetch-git', [ + '--out', fileName + '.tmp', + '--url', url, + '--rev', rev, + '--builder' + ]) + + await exec('tar', [ + // hopefully make it reproducible across runs and systems + '--owner=0', '--group=0', '--numeric-owner', '--format=gnu', '--sort=name', '--mtime=@1', + + // Set u+w because tar-fs can't unpack archives with read-only dirs: https://github.com/mafintosh/tar-fs/issues/79 + '--mode', 'u+w', + + '-C', fileName + '.tmp', + '-cf', fileName, '.' + ]) + + await exec('rm', [ '-rf', fileName + '.tmp', ]) +} + +const isGitUrl = pattern => { + // https://github.com/yarnpkg/yarn/blob/3119382885ea373d3c13d6a846de743eca8c914b/src/resolvers/exotics/git-resolver.js#L15-L47 + const GIT_HOSTS = ['github.com', 'gitlab.com', 'bitbucket.com', 'bitbucket.org'] + const GIT_PATTERN_MATCHERS = [/^git:/, /^git\+.+:/, /^ssh:/, /^https?:.+\.git$/, /^https?:.+\.git#.+/] + + for (const matcher of GIT_PATTERN_MATCHERS) if (matcher.test(pattern)) return true + + const {hostname, path} = url.parse(pattern) + if (hostname && path && GIT_HOSTS.indexOf(hostname) >= 0 + // only if dependency is pointing to a git repo, + // e.g. facebook/flow and not file in a git repo facebook/flow/archive/v1.0.0.tar.gz + && path.split('/').filter(p => !!p).length === 2 + ) return true + + return false +} + +const downloadPkg = (pkg, verbose) => { + const [ name, spec ] = pkg.key.split('@', 2); + if (spec.startsWith('file:')) { + console.info(`ignoring relative file:path dependency "${spec}"`) + return + } + + const [ url, hash ] = pkg.resolved.split('#') + if (verbose) console.log('downloading ' + url) + const fileName = urlToName(url) + if (url.startsWith('https://codeload.github.com/') && url.includes('/tar.gz/')) { + const s = url.split('/') + return downloadGit(fileName, `https://github.com/${s[3]}/${s[4]}.git`, s[s.length-1]) + } else if (url.startsWith('https://github.com/') && url.endsWith('.tar.gz')) { + const s = url.split('/') + return downloadGit(fileName, `https://github.com/${s[3]}/${s[4]}.git`, s[s.length-1].replace(/.tar.gz$/, '')) + } else if (isGitUrl(url)) { + return downloadGit(fileName, url.replace(/^git\+/, ''), hash) + } else if (url.startsWith('https://')) { + if (typeof pkg.integrity === 'string' || pkg.integrity instanceof String) { + const [ type, checksum ] = pkg.integrity.split('-') + return downloadFileHttps(fileName, url, Buffer.from(checksum, 'base64').toString('hex'), type) + } + return downloadFileHttps(fileName, url, hash) + } else if (url.startsWith('file:')) { + console.warn(`ignoring unsupported file:path url "${url}"`) + } else { + throw new Error('don\'t know how to download "' + url + '"') + } +} + +const performParallel = tasks => { + const worker = async () => { + while (tasks.length > 0) await tasks.shift()() + } + + const workers = [] + for (let i = 0; i < 4; i++) { + workers.push(worker()) + } + + return Promise.all(workers) +} + +const prefetchYarnDeps = async (lockContents, verbose) => { + const lockData = lockfile.parse(lockContents) + const tasks = Object.values( + Object.entries(lockData.object) + .map(([key, value]) => { + return { key, ...value } + }) + .reduce((out, pkg) => { + out[pkg.resolved] = pkg + return out + }, {}) + ) + .map(pkg => () => downloadPkg(pkg, verbose)) + + await performParallel(tasks) + await fs.promises.writeFile('yarn.lock', lockContents) + if (verbose) console.log('Done') +} + +const showUsage = async () => { + process.stderr.write(` +syntax: prefetch-yarn-deps [path to yarn.lock] [options] + +Options: + -h --help Show this help + -v --verbose Verbose output + --builder Only perform the download to current directory, then exit +`) + process.exit(1) +} + +const main = async () => { + const args = process.argv.slice(2) + let next, lockFile, verbose, isBuilder + while (next = args.shift()) { + if (next == '--builder') { + isBuilder = true + } else if (next == '--verbose' || next == '-v') { + verbose = true + } else if (next == '--help' || next == '-h') { + showUsage() + } else if (!lockFile) { + lockFile = next + } else { + showUsage() + } + } + let lockContents + try { + lockContents = await fs.promises.readFile(lockFile || 'yarn.lock', 'utf-8') + } catch { + showUsage() + } + + if (isBuilder) { + await prefetchYarnDeps(lockContents, verbose) + } else { + const { stdout: tmpDir } = await exec('mktemp', [ '-d' ]) + + try { + process.chdir(tmpDir.trim()) + await prefetchYarnDeps(lockContents, verbose) + const { stdout: hash } = await exec('nix-hash', [ '--type', 'sha256', '--base32', tmpDir.trim() ]) + console.log(hash) + } finally { + await exec('rm', [ '-rf', tmpDir.trim() ]) + } + } +} + +main() + .catch(e => { + console.error(e) + process.exit(1) + }) diff --git a/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/default.nix b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/default.nix new file mode 100644 index 000000000000..8ffe103a9548 --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/default.nix @@ -0,0 +1,20 @@ +{ testers, fetchYarnDeps, ... }: + +{ + simple = testers.invalidateFetcherByDrvHash fetchYarnDeps { + yarnLock = ./simple.lock; + sha256 = "sha256-FRrt8BixleILmFB2ZV8RgPNLqgS+dlH5nWoPgeaaNQ8="; + }; + gitDep = testers.invalidateFetcherByDrvHash fetchYarnDeps { + yarnLock = ./git.lock; + sha256 = "sha256-f90IiEzHDiBdswWewRBHcJfqqpPipaMg8N0DVLq2e8Q="; + }; + githubDep = testers.invalidateFetcherByDrvHash fetchYarnDeps { + yarnLock = ./github.lock; + sha256 = "sha256-DIKrhDKoqm7tHZmcuh9eK9VTqp6BxeW0zqDUpY4F57A="; + }; + gitUrlDep = testers.invalidateFetcherByDrvHash fetchYarnDeps { + yarnLock = ./giturl.lock; + sha256 = "sha256-VPnyqN6lePQZGXwR7VhbFnP7/0/LB621RZwT1F+KzVQ="; + }; +} diff --git a/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/git.lock b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/git.lock new file mode 100644 index 000000000000..9eda5b2c409d --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/git.lock @@ -0,0 +1,7 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"async@git+https://github.com/caolan/async": + version "3.2.1" + resolved "git+https://github.com/caolan/async#fc9ba651341af5ab974aade6b1640e345912be83" diff --git a/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/github.lock b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/github.lock new file mode 100644 index 000000000000..057e043a5390 --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/github.lock @@ -0,0 +1,7 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"async@github:caolan/async": + version "3.2.1" + resolved "https://codeload.github.com/caolan/async/tar.gz/fc9ba651341af5ab974aade6b1640e345912be83" diff --git a/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/giturl.lock b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/giturl.lock new file mode 100644 index 000000000000..154030a7e358 --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/giturl.lock @@ -0,0 +1,11 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"autocomplete-atom-api@https://codeload.github.com/atom/autocomplete-atom-api/legacy.tar.gz/refs/tags/v0.10.7": + version "0.10.7" + resolved "https://codeload.github.com/atom/autocomplete-atom-api/legacy.tar.gz/refs/tags/v0.10.7#c9d51fa721d543ccfc1b2189101155e81db6b97d" + +"find-and-replace@https://github.com/atom-community/find-and-replace/archive/refs/tags/v0.220.1.tar.gz": + version "0.220.1" + resolved "https://github.com/atom-community/find-and-replace/archive/refs/tags/v0.220.1.tar.gz#d7a0f56511e38ee72a89895a795bbbcab4a1a405" diff --git a/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/simple.lock b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/simple.lock new file mode 100644 index 000000000000..db2f4b2be4b7 --- /dev/null +++ b/nixpkgs/pkgs/build-support/node/fetch-yarn-deps/tests/simple.lock @@ -0,0 +1,8 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +lit-html@1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/lit-html/-/lit-html-1.4.1.tgz#0c6f3ee4ad4eb610a49831787f0478ad8e9ae5e0" + integrity sha512-B9btcSgPYb1q4oSOb/PrOT6Z/H+r6xuNzfH4lFli/AWhYwdtrgQkQWBbIc6mdnf6E2IL3gDXdkkqNktpU0OZQA== diff --git a/nixpkgs/pkgs/build-support/nuke-references/darwin-sign-fixup.sh b/nixpkgs/pkgs/build-support/nuke-references/darwin-sign-fixup.sh new file mode 100644 index 000000000000..940c18e5a627 --- /dev/null +++ b/nixpkgs/pkgs/build-support/nuke-references/darwin-sign-fixup.sh @@ -0,0 +1,5 @@ +# Fixup hook for nukeReferences, not stdenv + +source @signingUtils@ + +fixupHooks+=(signIfRequired) diff --git a/nixpkgs/pkgs/build-support/nuke-references/default.nix b/nixpkgs/pkgs/build-support/nuke-references/default.nix new file mode 100644 index 000000000000..8f02c559238e --- /dev/null +++ b/nixpkgs/pkgs/build-support/nuke-references/default.nix @@ -0,0 +1,41 @@ +# The program `nuke-refs' created by this derivation replaces all +# references to the Nix store in the specified files by a non-existant +# path (/nix/store/eeee...). This is useful for getting rid of +# dependencies that you know are not actually needed at runtime. + +{ lib, stdenvNoCC, perl, signingUtils, shell ? stdenvNoCC.shell }: + +let + stdenv = stdenvNoCC; + + darwinCodeSign = stdenv.targetPlatform.isDarwin && stdenv.targetPlatform.isAarch64; +in + +stdenvNoCC.mkDerivation { + name = "nuke-references"; + + strictDeps = true; + enableParallelBuilding = true; + dontUnpack = true; + dontConfigure = true; + dontBuild = true; + + installPhase = '' + mkdir -p $out/bin + substituteAll ${./nuke-refs.sh} $out/bin/nuke-refs + chmod a+x $out/bin/nuke-refs + ''; + + postFixup = lib.optionalString darwinCodeSign '' + mkdir -p $out/nix-support + substituteAll ${./darwin-sign-fixup.sh} $out/nix-support/setup-hooks.sh + ''; + + # FIXME: get rid of perl dependency. + env = { + inherit perl; + inherit (builtins) storeDir; + shell = lib.getBin shell + (shell.shellPath or ""); + signingUtils = lib.optionalString darwinCodeSign signingUtils; + }; +} diff --git a/nixpkgs/pkgs/build-support/nuke-references/nuke-refs.sh b/nixpkgs/pkgs/build-support/nuke-references/nuke-refs.sh new file mode 100644 index 000000000000..21eb855cbad9 --- /dev/null +++ b/nixpkgs/pkgs/build-support/nuke-references/nuke-refs.sh @@ -0,0 +1,33 @@ +#! @shell@ + +fixupHooks=() + +if [ -e @out@/nix-support/setup-hooks.sh ]; then + source @out@/nix-support/setup-hooks.sh +fi + +excludes="" +while getopts e: o; do + case "$o" in + e) storeId=$(echo "$OPTARG" | @perl@/bin/perl -ne "print \"\$1\" if m|^\Q@storeDir@\E/([a-z0-9]{32})-.*|") + if [ -z "$storeId" ]; then + echo "-e argument must be a Nix store path" + exit 1 + fi + excludes="$excludes(?!$storeId)" + ;; + esac +done +shift $(($OPTIND-1)) + +for i in "$@"; do + if test ! -L "$i" -a -f "$i"; then + cat "$i" | @perl@/bin/perl -pe "s|\Q@storeDir@\E/$excludes[a-z0-9]{32}-|@storeDir@/eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee-|g" > "$i.tmp" + if test -x "$i"; then chmod +x "$i.tmp"; fi + mv "$i.tmp" "$i" + + for hook in "${fixupHooks[@]}"; do + eval "$hook" "$i" + done + fi +done diff --git a/nixpkgs/pkgs/build-support/ocaml/dune.nix b/nixpkgs/pkgs/build-support/ocaml/dune.nix new file mode 100644 index 000000000000..972244f80b0a --- /dev/null +++ b/nixpkgs/pkgs/build-support/ocaml/dune.nix @@ -0,0 +1,50 @@ +{ lib, stdenv, ocaml, findlib, dune_1, dune_2, dune_3 }: + +{ pname, version, nativeBuildInputs ? [], enableParallelBuilding ? true, ... }@args: + +let Dune = + let dune-version = args.duneVersion or "3"; in + { "1" = dune_1; "2" = dune_2; "3" = dune_3; }."${dune-version}" +; in + +if (args ? minimumOCamlVersion && lib.versionOlder ocaml.version args.minimumOCamlVersion) || + (args ? minimalOCamlVersion && lib.versionOlder ocaml.version args.minimalOCamlVersion) +then throw "${pname}-${version} is not available for OCaml ${ocaml.version}" +else + +stdenv.mkDerivation ({ + + inherit enableParallelBuilding; + dontAddStaticConfigureFlags = true; + configurePlatforms = []; + + buildPhase = '' + runHook preBuild + dune build -p ${pname} ''${enableParallelBuilding:+-j $NIX_BUILD_CORES} + runHook postBuild + ''; + checkPhase = '' + runHook preCheck + dune runtest -p ${pname} ''${enableParallelBuilding:+-j $NIX_BUILD_CORES} + runHook postCheck + ''; + installPhase = '' + runHook preInstall + dune install --prefix $out --libdir $OCAMLFIND_DESTDIR ${pname} \ + ${if lib.versionAtLeast Dune.version "2.9" + then "--docdir $out/share/doc --mandir $out/share/man" + else ""} + runHook postInstall + ''; + + strictDeps = true; + +} // (builtins.removeAttrs args [ "minimalOCamlVersion" "duneVersion" ]) // { + + name = "ocaml${ocaml.version}-${pname}-${version}"; + + nativeBuildInputs = [ ocaml Dune findlib ] ++ nativeBuildInputs; + + meta = (args.meta or {}) // { platforms = args.meta.platforms or ocaml.meta.platforms; }; + +}) diff --git a/nixpkgs/pkgs/build-support/ocaml/oasis.nix b/nixpkgs/pkgs/build-support/ocaml/oasis.nix new file mode 100644 index 000000000000..91daad59050d --- /dev/null +++ b/nixpkgs/pkgs/build-support/ocaml/oasis.nix @@ -0,0 +1,46 @@ +{ lib, stdenv, ocaml_oasis, ocaml, findlib, ocamlbuild }: + +{ pname, version, nativeBuildInputs ? [], meta ? { platforms = ocaml.meta.platforms or []; }, + minimumOCamlVersion ? null, + createFindlibDestdir ? true, + dontStrip ? true, + ... +}@args: + +if args ? minimumOCamlVersion && + lib.versionOlder ocaml.version args.minimumOCamlVersion +then throw "${pname}-${version} is not available for OCaml ${ocaml.version}" +else + +stdenv.mkDerivation (args // { + name = "ocaml${ocaml.version}-${pname}-${version}"; + + nativeBuildInputs = [ ocaml findlib ocamlbuild ocaml_oasis ] ++ nativeBuildInputs; + + inherit createFindlibDestdir; + inherit dontStrip; + + strictDeps = true; + + buildPhase = '' + runHook preBuild + oasis setup + ocaml setup.ml -configure --prefix $OCAMLFIND_DESTDIR --exec-prefix $out + ocaml setup.ml -build + runHook postBuild + ''; + + checkPhase = '' + runHook preCheck + ocaml setup.ml -test + runHook postCheck + ''; + + installPhase = '' + runHook preInstall + mkdir -p $out + ocaml setup.ml -install + runHook postInstall + ''; + +}) diff --git a/nixpkgs/pkgs/build-support/ocaml/topkg.nix b/nixpkgs/pkgs/build-support/ocaml/topkg.nix new file mode 100644 index 000000000000..73be5815e44c --- /dev/null +++ b/nixpkgs/pkgs/build-support/ocaml/topkg.nix @@ -0,0 +1,28 @@ +{ lib, stdenv, fetchurl, ocaml, findlib, topkg, ocamlbuild, cmdliner, odoc, b0 +}: + +{ pname, version, nativeBuildInputs ? [ ], buildInputs ? [ ], ... }@args: + +lib.throwIf (args ? minimalOCamlVersion + && lib.versionOlder ocaml.version args.minimalOCamlVersion) +"${pname}-${version} is not available for OCaml ${ocaml.version}" + +stdenv.mkDerivation ({ + + dontAddStaticConfigureFlags = true; + configurePlatforms = [ ]; + strictDeps = true; + inherit (topkg) buildPhase installPhase; + +} // (builtins.removeAttrs args [ "minimalOCamlVersion" ]) // { + + name = "ocaml${ocaml.version}-${pname}-${version}"; + + nativeBuildInputs = [ ocaml findlib ocamlbuild topkg ] ++ nativeBuildInputs; + buildInputs = [ topkg ] ++ buildInputs; + + meta = (args.meta or { }) // { + platforms = args.meta.platforms or ocaml.meta.platforms; + }; + +}) diff --git a/nixpkgs/pkgs/build-support/oci-tools/default.nix b/nixpkgs/pkgs/build-support/oci-tools/default.nix new file mode 100644 index 000000000000..18b238033ffd --- /dev/null +++ b/nixpkgs/pkgs/build-support/oci-tools/default.nix @@ -0,0 +1,78 @@ +{ lib, writeText, runCommand, writeReferencesToFile }: + +{ + buildContainer = + { args + , mounts ? {} + , os ? "linux" + , arch ? "x86_64" + , readonly ? false + }: + let + sysMounts = { + "/proc" = { + type = "proc"; + source = "proc"; + }; + "/dev" = { + type = "tmpfs"; + source = "tmpfs"; + options = [ "nosuid" "strictatime" "mode=755" "size=65536k" ]; + }; + "/dev/pts" = { + type = "devpts"; + source = "devpts"; + options = [ "nosuid" "noexec" "newinstance" "ptmxmode=0666" "mode=755" "gid=5" ]; + }; + "/dev/shm" = { + type = "tmpfs"; + source = "shm"; + options = [ "nosuid" "noexec" "nodev" "mode=1777" "size=65536k" ]; + }; + "/dev/mqueue" = { + type = "mqueue"; + source = "mqueue"; + options = [ "nosuid" "noexec" "nodev" ]; + }; + "/sys" = { + type = "sysfs"; + source = "sysfs"; + options = [ "nosuid" "noexec" "nodev" "ro" ]; + }; + "/sys/fs/cgroup" = { + type = "cgroup"; + source = "cgroup"; + options = [ "nosuid" "noexec" "nodev" "realatime" "ro" ]; + }; + }; + config = writeText "config.json" (builtins.toJSON { + ociVersion = "1.0.0"; + platform = { + inherit os arch; + }; + + linux = { + namespaces = map (type: { inherit type; }) [ "pid" "network" "mount" "ipc" "uts" ]; + }; + + root = { path = "rootfs"; inherit readonly; }; + + process = { + inherit args; + user = { uid = 0; gid = 0; }; + cwd = "/"; + }; + + mounts = lib.mapAttrsToList (destination: { type, source, options ? null }: { + inherit destination type source options; + }) sysMounts; + }); + in + runCommand "join" {} '' + set -o pipefail + mkdir -p $out/rootfs/{dev,proc,sys} + cp ${config} $out/config.json + xargs tar c < ${writeReferencesToFile args} | tar -xC $out/rootfs/ + ''; +} + diff --git a/nixpkgs/pkgs/build-support/php/build-composer-project.nix b/nixpkgs/pkgs/build-support/php/build-composer-project.nix new file mode 100644 index 000000000000..778aa35fa6a5 --- /dev/null +++ b/nixpkgs/pkgs/build-support/php/build-composer-project.nix @@ -0,0 +1,85 @@ +{ callPackage, stdenvNoCC, lib, writeTextDir, php, makeBinaryWrapper, fetchFromGitHub, fetchurl }: + +let + buildComposerProjectOverride = finalAttrs: previousAttrs: + + let + phpDrv = finalAttrs.php or php; + composer = finalAttrs.composer or phpDrv.packages.composer; + composer-local-repo-plugin = callPackage ./pkgs/composer-local-repo-plugin.nix { }; + in + { + composerLock = previousAttrs.composerLock or null; + composerNoDev = previousAttrs.composerNoDev or true; + composerNoPlugins = previousAttrs.composerNoPlugins or true; + composerNoScripts = previousAttrs.composerNoScripts or true; + composerStrictValidation = previousAttrs.composerStrictValidation or true; + + nativeBuildInputs = (previousAttrs.nativeBuildInputs or [ ]) ++ [ + composer + composer-local-repo-plugin + phpDrv + phpDrv.composerHooks.composerInstallHook + ]; + + buildInputs = (previousAttrs.buildInputs or [ ]) ++ [ + phpDrv + ]; + + patches = previousAttrs.patches or [ ]; + strictDeps = previousAttrs.strictDeps or true; + + # Should we keep these empty phases? + configurePhase = previousAttrs.configurePhase or '' + runHook preConfigure + + runHook postConfigure + ''; + + buildPhase = previousAttrs.buildPhase or '' + runHook preBuild + + runHook postBuild + ''; + + doCheck = previousAttrs.doCheck or true; + checkPhase = previousAttrs.checkPhase or '' + runHook preCheck + + runHook postCheck + ''; + + installPhase = previousAttrs.installPhase or '' + runHook preInstall + + runHook postInstall + ''; + + doInstallCheck = previousAttrs.doInstallCheck or false; + installCheckPhase = previousAttrs.installCheckPhase or '' + runHook preCheckInstall + + runHook postCheckInstall + ''; + + composerRepository = phpDrv.mkComposerRepository { + inherit composer composer-local-repo-plugin; + inherit (finalAttrs) patches pname src vendorHash version; + + composerLock = previousAttrs.composerLock or null; + composerNoDev = previousAttrs.composerNoDev or true; + composerNoPlugins = previousAttrs.composerNoPlugins or true; + composerNoScripts = previousAttrs.composerNoScripts or true; + composerStrictValidation = previousAttrs.composerStrictValidation or true; + }; + + COMPOSER_CACHE_DIR="/dev/null"; + COMPOSER_DISABLE_NETWORK="1"; + COMPOSER_MIRROR_PATH_REPOS="1"; + + meta = previousAttrs.meta or { } // { + platforms = lib.platforms.all; + }; + }; +in +args: (stdenvNoCC.mkDerivation args).overrideAttrs buildComposerProjectOverride diff --git a/nixpkgs/pkgs/build-support/php/build-composer-repository.nix b/nixpkgs/pkgs/build-support/php/build-composer-repository.nix new file mode 100644 index 000000000000..5b31f86e61cf --- /dev/null +++ b/nixpkgs/pkgs/build-support/php/build-composer-repository.nix @@ -0,0 +1,96 @@ +{ callPackage, stdenvNoCC, lib, writeTextDir, fetchFromGitHub, php }: + +let + mkComposerRepositoryOverride = + /* + We cannot destruct finalAttrs since the attrset below is used to construct it + and Nix currently does not support lazy attribute names. + { + php ? null, + composer ? null, + composerLock ? "composer.lock", + src, + vendorHash, + ... + }@finalAttrs: + */ + finalAttrs: previousAttrs: + + let + phpDrv = finalAttrs.php or php; + composer = finalAttrs.composer or phpDrv.packages.composer; + composer-local-repo-plugin = callPackage ./pkgs/composer-local-repo-plugin.nix { }; + in + assert (lib.assertMsg (previousAttrs ? src) "mkComposerRepository expects src argument."); + assert (lib.assertMsg (previousAttrs ? vendorHash) "mkComposerRepository expects vendorHash argument."); + assert (lib.assertMsg (previousAttrs ? version) "mkComposerRepository expects version argument."); + assert (lib.assertMsg (previousAttrs ? pname) "mkComposerRepository expects pname argument."); + assert (lib.assertMsg (previousAttrs ? composerNoDev) "mkComposerRepository expects composerNoDev argument."); + assert (lib.assertMsg (previousAttrs ? composerNoPlugins) "mkComposerRepository expects composerNoPlugins argument."); + assert (lib.assertMsg (previousAttrs ? composerNoScripts) "mkComposerRepository expects composerNoScripts argument."); + { + composerNoDev = previousAttrs.composerNoDev or true; + composerNoPlugins = previousAttrs.composerNoPlugins or true; + composerNoScripts = previousAttrs.composerNoScripts or true; + composerStrictValidation = previousAttrs.composerStrictValidation or true; + + name = "${previousAttrs.pname}-${previousAttrs.version}-composer-repository"; + + # See https://github.com/NixOS/nix/issues/6660 + dontPatchShebangs = previousAttrs.dontPatchShebangs or true; + + nativeBuildInputs = (previousAttrs.nativeBuildInputs or [ ]) ++ [ + composer + composer-local-repo-plugin + phpDrv + phpDrv.composerHooks.composerRepositoryHook + ]; + + buildInputs = previousAttrs.buildInputs or [ ]; + + strictDeps = previousAttrs.strictDeps or true; + + # Should we keep these empty phases? + configurePhase = previousAttrs.configurePhase or '' + runHook preConfigure + + runHook postConfigure + ''; + + buildPhase = previousAttrs.buildPhase or '' + runHook preBuild + + runHook postBuild + ''; + + doCheck = previousAttrs.doCheck or true; + checkPhase = previousAttrs.checkPhase or '' + runHook preCheck + + runHook postCheck + ''; + + installPhase = previousAttrs.installPhase or '' + runHook preInstall + + runHook postInstall + ''; + + doInstallCheck = previousAttrs.doInstallCheck or false; + installCheckPhase = previousAttrs.installCheckPhase or '' + runHook preCheckInstall + + runHook postCheckInstall + ''; + + COMPOSER_CACHE_DIR = "/dev/null"; + COMPOSER_MIRROR_PATH_REPOS = "1"; + COMPOSER_HTACCESS_PROTECT = "0"; + COMPOSER_DISABLE_NETWORK = "0"; + + outputHashMode = "recursive"; + outputHashAlgo = if (finalAttrs ? vendorHash && finalAttrs.vendorHash != "") then null else "sha256"; + outputHash = finalAttrs.vendorHash or ""; + }; +in +args: (stdenvNoCC.mkDerivation args).overrideAttrs mkComposerRepositoryOverride diff --git a/nixpkgs/pkgs/build-support/php/build-pecl.nix b/nixpkgs/pkgs/build-support/php/build-pecl.nix new file mode 100644 index 000000000000..6f38a668f3a3 --- /dev/null +++ b/nixpkgs/pkgs/build-support/php/build-pecl.nix @@ -0,0 +1,45 @@ +{ stdenv, lib, php, autoreconfHook, fetchurl, re2c, nix-update-script }: + +{ pname +, version +, internalDeps ? [ ] +, peclDeps ? [ ] +, buildInputs ? [ ] +, nativeBuildInputs ? [ ] +, postPhpize ? "" +, makeFlags ? [ ] +, src ? fetchurl ({ + url = "https://pecl.php.net/get/${pname}-${version}.tgz"; + } // lib.filterAttrs (attrName: _: lib.elem attrName [ "sha256" "hash" ]) args) +, passthru ? { } +, ... +}@args: + +stdenv.mkDerivation (args // { + name = "php-${pname}-${version}"; + extensionName = pname; + + inherit src; + + nativeBuildInputs = [ autoreconfHook re2c ] ++ nativeBuildInputs; + buildInputs = [ php ] ++ peclDeps ++ buildInputs; + + makeFlags = [ "EXTENSION_DIR=$(out)/lib/php/extensions" ] ++ makeFlags; + + autoreconfPhase = '' + phpize + ${postPhpize} + ${lib.concatMapStringsSep "\n" + (dep: "mkdir -p ext; ln -s ${dep.dev}/include ext/${dep.extensionName}") + internalDeps} + ''; + checkPhase = "NO_INTERACTON=yes make test"; + + passthru = passthru // { + # Thes flags were introduced for `nix-update` so that it can update + # PHP extensions correctly. + # See the corresponding PR: https://github.com/Mic92/nix-update/pull/123 + isPhpExtension = true; + updateScript = nix-update-script {}; + }; +}) diff --git a/nixpkgs/pkgs/build-support/php/hooks/composer-install-hook.sh b/nixpkgs/pkgs/build-support/php/hooks/composer-install-hook.sh new file mode 100644 index 000000000000..6fe1c4e5f7dd --- /dev/null +++ b/nixpkgs/pkgs/build-support/php/hooks/composer-install-hook.sh @@ -0,0 +1,164 @@ +declare composerRepository +declare version +declare composerNoDev +declare composerNoPlugins +declare composerNoScripts + +preConfigureHooks+=(composerInstallConfigureHook) +preBuildHooks+=(composerInstallBuildHook) +preCheckHooks+=(composerInstallCheckHook) +preInstallHooks+=(composerInstallInstallHook) + +composerInstallConfigureHook() { + echo "Executing composerInstallConfigureHook" + + if [[ ! -e "${composerRepository}" ]]; then + echo "No local composer repository found." + exit 1 + fi + + if [[ -e "$composerLock" ]]; then + cp "$composerLock" composer.lock + fi + + if [[ ! -f "composer.lock" ]]; then + composer \ + --no-ansi \ + --no-install \ + --no-interaction \ + ${composerNoDev:+--no-dev} \ + ${composerNoPlugins:+--no-plugins} \ + ${composerNoScripts:+--no-scripts} \ + update + + mkdir -p $out + cp composer.lock $out/ + + echo + echo -e "\e[31mERROR: No composer.lock found\e[0m" + echo + echo -e '\e[31mNo composer.lock file found, consider adding one to your repository to ensure reproducible builds.\e[0m' + echo -e "\e[31mIn the meantime, a composer.lock file has been generated for you in $out/composer.lock\e[0m" + echo + echo -e '\e[31mTo fix the issue:\e[0m' + echo -e "\e[31m1. Copy the composer.lock file from $out/composer.lock to the project's source:\e[0m" + echo -e "\e[31m cp $out/composer.lock <path>\e[0m" + echo -e '\e[31m2. Add the composerLock attribute, pointing to the copied composer.lock file:\e[0m' + echo -e '\e[31m composerLock = ./composer.lock;\e[0m' + echo + + exit 1 + fi + + echo "Validating consistency between composer.lock and ${composerRepository}/composer.lock" + if ! @cmp@ -s "composer.lock" "${composerRepository}/composer.lock"; then + echo + echo -e "\e[31mERROR: vendorHash is out of date\e[0m" + echo + echo -e "\e[31mcomposer.lock is not the same in $composerRepository\e[0m" + echo + echo -e "\e[31mTo fix the issue:\e[0m" + echo -e '\e[31m1. Set vendorHash to an empty string: `vendorHash = "";`\e[0m' + echo -e '\e[31m2. Build the derivation and wait for it to fail with a hash mismatch\e[0m' + echo -e '\e[31m3. Copy the "got: sha256-..." value back into the vendorHash field\e[0m' + echo -e '\e[31m You should have: vendorHash = "sha256-XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX=";\e[0m' + echo + + exit 1 + fi + + chmod +w composer.json composer.lock + + echo "Finished composerInstallConfigureHook" +} + +composerInstallBuildHook() { + echo "Executing composerInstallBuildHook" + + # Since this file cannot be generated in the composer-repository-hook.sh + # because the file contains hardcoded nix store paths, we generate it here. + composer-local-repo-plugin --no-ansi build-local-repo -m "${composerRepository}" . + + # Remove all the repositories of type "composer" and "vcs" + # from the composer.json file. + jq -r -c 'del(try .repositories[] | select(.type == "composer" or .type == "vcs"))' composer.json | sponge composer.json + + # Configure composer to disable packagist and avoid using the network. + composer config repo.packagist false + # Configure composer to use the local repository. + composer config repo.composer composer file://"$PWD"/packages.json + + # Since the composer.json file has been modified in the previous step, the + # composer.lock file needs to be updated. + COMPOSER_ROOT_VERSION="${version}" \ + composer \ + --lock \ + --no-ansi \ + --no-install \ + --no-interaction \ + ${composerNoDev:+--no-dev} \ + ${composerNoPlugins:+--no-plugins} \ + ${composerNoScripts:+--no-scripts} \ + update + + echo "Finished composerInstallBuildHook" +} + +composerInstallCheckHook() { + echo "Executing composerInstallCheckHook" + + if ! composer validate --strict --no-ansi --no-interaction --quiet; then + if [ ! -z "${composerStrictValidation-}" ]; then + echo + echo -e "\e[31mERROR: composer files validation failed\e[0m" + echo + echo -e '\e[31mThe validation of the composer.json and composer.lock failed.\e[0m' + echo -e '\e[31mMake sure that the file composer.lock is consistent with composer.json.\e[0m' + echo + exit 1 + else + echo + echo -e "\e[33mWARNING: composer files validation failed\e[0m" + echo + echo -e '\e[33mThe validation of the composer.json and composer.lock failed.\e[0m' + echo -e '\e[33mMake sure that the file composer.lock is consistent with composer.json.\e[0m' + echo + echo -e '\e[33mThis check is not blocking, but it is recommended to fix the issue.\e[0m' + echo + fi + fi + + echo "Finished composerInstallCheckHook" +} + +composerInstallInstallHook() { + echo "Executing composerInstallInstallHook" + + # Finally, run `composer install` to install the dependencies and generate + # the autoloader. + # The COMPOSER_ROOT_VERSION environment variable is needed only for + # vimeo/psalm. + COMPOSER_ROOT_VERSION="${version}" \ + composer \ + --no-ansi \ + --no-interaction \ + ${composerNoDev:+--no-dev} \ + ${composerNoPlugins:+--no-plugins} \ + ${composerNoScripts:+--no-scripts} \ + install + + # Remove packages.json, we don't need it in the store. + rm packages.json + + # Copy the relevant files only in the store. + mkdir -p "$out"/share/php/"${pname}" + cp -r . "$out"/share/php/"${pname}"/ + + # Create symlinks for the binaries. + jq -r -c 'try .bin[]' composer.json | while read -r bin; do + mkdir -p "$out"/share/php/"${pname}" "$out"/bin + makeWrapper "$out"/share/php/"${pname}"/"$bin" "$out"/bin/"$(basename "$bin")" + done + + echo "Finished composerInstallInstallHook" +} diff --git a/nixpkgs/pkgs/build-support/php/hooks/composer-repository-hook.sh b/nixpkgs/pkgs/build-support/php/hooks/composer-repository-hook.sh new file mode 100644 index 000000000000..03783d9d639c --- /dev/null +++ b/nixpkgs/pkgs/build-support/php/hooks/composer-repository-hook.sh @@ -0,0 +1,105 @@ +declare composerLock +declare version +declare composerNoDev +declare composerNoPlugins +declare composerNoScripts +declare composerStrictValidation + +preConfigureHooks+=(composerRepositoryConfigureHook) +preBuildHooks+=(composerRepositoryBuildHook) +preCheckHooks+=(composerRepositoryCheckHook) +preInstallHooks+=(composerRepositoryInstallHook) + +composerRepositoryConfigureHook() { + echo "Executing composerRepositoryConfigureHook" + + if [[ -e "$composerLock" ]]; then + cp $composerLock composer.lock + fi + + if [[ ! -f "composer.lock" ]]; then + COMPOSER_ROOT_VERSION="${version}" \ + composer \ + --no-ansi \ + --no-install \ + --no-interaction \ + ${composerNoDev:+--no-dev} \ + ${composerNoPlugins:+--no-plugins} \ + ${composerNoScripts:+--no-scripts} \ + update + + mkdir -p $out + cp composer.lock $out/ + + echo + echo -e "\e[31mERROR: No composer.lock found\e[0m" + echo + echo -e '\e[31mNo composer.lock file found, consider adding one to your repository to ensure reproducible builds.\e[0m' + echo -e "\e[31mIn the meantime, a composer.lock file has been generated for you in $out/composer.lock\e[0m" + echo + echo -e '\e[31mTo fix the issue:\e[0m' + echo -e "\e[31m1. Copy the composer.lock file from $out/composer.lock to the project's source:\e[0m" + echo -e "\e[31m cp $out/composer.lock <path>\e[0m" + echo -e '\e[31m2. Add the composerLock attribute, pointing to the copied composer.lock file:\e[0m' + echo -e '\e[31m composerLock = ./composer.lock;\e[0m' + echo + + exit 1 + fi + + echo "Finished composerRepositoryConfigureHook" +} + +composerRepositoryBuildHook() { + echo "Executing composerRepositoryBuildHook" + + mkdir -p repository + + # Build the local composer repository + # The command 'build-local-repo' is provided by the Composer plugin + # nix-community/composer-local-repo-plugin. + composer-local-repo-plugin --no-ansi build-local-repo ${composerNoDev:+--no-dev} -r repository + + echo "Finished composerRepositoryBuildHook" +} + +composerRepositoryCheckHook() { + echo "Executing composerRepositoryCheckHook" + + if ! composer validate --strict --no-ansi --no-interaction --quiet; then + if [ ! -z "${composerStrictValidation-}" ]; then + echo + echo -e "\e[31mERROR: composer files validation failed\e[0m" + echo + echo -e '\e[31mThe validation of the composer.json and composer.lock failed.\e[0m' + echo -e '\e[31mMake sure that the file composer.lock is consistent with composer.json.\e[0m' + echo + exit 1 + else + echo + echo -e "\e[33mWARNING: composer files validation failed\e[0m" + echo + echo -e '\e[33mThe validation of the composer.json and composer.lock failed.\e[0m' + echo -e '\e[33mMake sure that the file composer.lock is consistent with composer.json.\e[0m' + echo + echo -e '\e[33mThis check is not blocking, but it is recommended to fix the issue.\e[0m' + echo + fi + fi + + echo "Finished composerRepositoryCheckHook" +} + +composerRepositoryInstallHook() { + echo "Executing composerRepositoryInstallHook" + + mkdir -p $out + + cp -ar repository/. $out/ + + # Copy the composer.lock files to the output directory, to be able to validate consistency with + # the src composer.lock file where this fixed-output derivation is used + cp composer.lock $out/ + + echo "Finished composerRepositoryInstallHook" +} diff --git a/nixpkgs/pkgs/build-support/php/hooks/default.nix b/nixpkgs/pkgs/build-support/php/hooks/default.nix new file mode 100644 index 000000000000..240ec640723a --- /dev/null +++ b/nixpkgs/pkgs/build-support/php/hooks/default.nix @@ -0,0 +1,29 @@ +{ lib +, makeSetupHook +, diffutils +, jq +, moreutils +, makeBinaryWrapper +, cacert +, buildPackages +}: + +{ + composerRepositoryHook = makeSetupHook + { + name = "composer-repository-hook.sh"; + propagatedBuildInputs = [ jq moreutils cacert ]; + substitutions = { }; + } ./composer-repository-hook.sh; + + composerInstallHook = makeSetupHook + { + name = "composer-install-hook.sh"; + propagatedBuildInputs = [ jq makeBinaryWrapper moreutils cacert ]; + substitutions = { + # Specify the stdenv's `diff` by abspath to ensure that the user's build + # inputs do not cause us to find the wrong `diff`. + cmp = "${lib.getBin buildPackages.diffutils}/bin/cmp"; + }; + } ./composer-install-hook.sh; +} diff --git a/nixpkgs/pkgs/build-support/php/pkgs/composer-local-repo-plugin.nix b/nixpkgs/pkgs/build-support/php/pkgs/composer-local-repo-plugin.nix new file mode 100644 index 000000000000..48d05b7a0008 --- /dev/null +++ b/nixpkgs/pkgs/build-support/php/pkgs/composer-local-repo-plugin.nix @@ -0,0 +1,113 @@ +{ php, callPackage, stdenvNoCC, lib, fetchFromGitHub, makeBinaryWrapper }: + +let + composer = callPackage ./composer-phar.nix { + inherit (php.packages.composer) version pharHash; + }; + + composerKeys = stdenvNoCC.mkDerivation (finalComposerKeysAttrs: { + pname = "composer-keys"; + version = "fa5a62092f33e094073fbda23bbfc7188df3cbc5"; + + src = fetchFromGitHub { + owner = "composer"; + repo = "composer.github.io"; + rev = "${finalComposerKeysAttrs.version}"; + hash = "sha256-3Sfn71LDG1jHwuEIU8iEnV3k6D6QTX7KVIKVaNSuCVE="; + }; + + installPhase = '' + runHook preInstall + + mkdir -p $out + install releases.pub $out/keys.tags.pub + install snapshots.pub $out/keys.dev.pub + + runHook postInstall + ''; + }); +in +stdenvNoCC.mkDerivation (finalAttrs: { + pname = "composer-local-repo-plugin"; + version = "1.0.3"; + + src = fetchFromGitHub { + owner = "nix-community"; + repo = "composer-local-repo-plugin"; + rev = finalAttrs.version; + hash = "sha256-fLJlxcAQ7X28GDK8PVYKxJgTzbspfWxvgRmRK4NZRIA="; + }; + + COMPOSER_CACHE_DIR = "/dev/null"; + COMPOSER_MIRROR_PATH_REPOS = "1"; + COMPOSER_HTACCESS_PROTECT = "0"; + COMPOSER_DISABLE_NETWORK = "1"; + + nativeBuildInputs = [ + makeBinaryWrapper + ]; + + buildInputs = [ + composer + ]; + + configurePhase = '' + runHook preConfigure + + export COMPOSER_HOME=${placeholder "out"} + + runHook postConfigure + ''; + + buildPhase = '' + runHook preBuild + + # Configure composer globally + composer global init --quiet --no-interaction --no-ansi \ + --name="nixos/composer" \ + --homepage "https://nixos.org/" \ + --description "Composer with nix-community/composer-local-repo-plugin" \ + --license "MIT" + + composer global config --quiet minimum-stability dev + composer global config --quiet prefer-stable true + composer global config --quiet apcu-autoloader false + composer global config --quiet allow-plugins.nix-community/composer-local-repo-plugin true + composer global config --quiet repo.packagist false + composer global config --quiet repo.plugin path $src + + # Install the local repository plugin + composer global require --quiet --no-ansi --no-interaction nix-community/composer-local-repo-plugin + + runHook postBuild + ''; + + checkPhase = '' + runHook preCheck + + composer global validate --no-ansi + composer global show --no-ansi nix-community/composer-local-repo-plugin + + runHook postCheck + ''; + + installPhase = '' + runHook preInstall + + mkdir -p $out + cp -ar ${composerKeys}/* $out/ + + makeWrapper ${composer}/bin/composer $out/bin/composer-local-repo-plugin \ + --prefix COMPOSER_HOME : $out + + runHook postInstall + ''; + + meta = { + description = "Composer local repo plugin for Composer"; + homepage = "https://github.com/nix-community/composer-local-repo-plugin"; + license = lib.licenses.mit; + maintainers = with lib.maintainers; [ drupol ]; + platforms = lib.platforms.all; + }; +}) diff --git a/nixpkgs/pkgs/build-support/php/pkgs/composer-phar.nix b/nixpkgs/pkgs/build-support/php/pkgs/composer-phar.nix new file mode 100644 index 000000000000..f281334ab2d9 --- /dev/null +++ b/nixpkgs/pkgs/build-support/php/pkgs/composer-phar.nix @@ -0,0 +1,50 @@ +{ + _7zz + , cacert + , curl + , fetchurl + , git + , lib + , makeBinaryWrapper + , php + , stdenvNoCC + , unzip + , xz + , version + , pharHash +}: + +stdenvNoCC.mkDerivation (finalAttrs: { + pname = "composer-phar"; + inherit version; + + src = fetchurl { + url = "https://github.com/composer/composer/releases/download/${finalAttrs.version}/composer.phar"; + hash = pharHash; + }; + + dontUnpack = true; + + nativeBuildInputs = [ makeBinaryWrapper ]; + + installPhase = '' + runHook preInstall + + mkdir -p $out/bin + install -D $src $out/libexec/composer/composer.phar + makeWrapper ${php}/bin/php $out/bin/composer \ + --add-flags "$out/libexec/composer/composer.phar" \ + --prefix PATH : ${lib.makeBinPath [ _7zz cacert curl git unzip xz ]} + + runHook postInstall + ''; + + meta = { + changelog = "https://github.com/composer/composer/releases/tag/${finalAttrs.version}"; + description = "Dependency Manager for PHP, shipped from the PHAR file"; + homepage = "https://getcomposer.org/"; + license = lib.licenses.mit; + maintainers = with lib.maintainers; [ drupol ]; + platforms = lib.platforms.all; + }; +}) diff --git a/nixpkgs/pkgs/build-support/pkg-config-wrapper/add-flags.sh b/nixpkgs/pkgs/build-support/pkg-config-wrapper/add-flags.sh new file mode 100644 index 000000000000..90aee712be63 --- /dev/null +++ b/nixpkgs/pkgs/build-support/pkg-config-wrapper/add-flags.sh @@ -0,0 +1,12 @@ +# See cc-wrapper for comments. +var_templates_list=( + PKG_CONFIG_PATH +) + +accumulateRoles + +for var in "${var_templates_list[@]}"; do + mangleVarListGeneric ":" "$var" ${role_suffixes[@]+"${role_suffixes[@]}"} +done + +export NIX_PKG_CONFIG_WRAPPER_FLAGS_SET_@suffixSalt@=1 diff --git a/nixpkgs/pkgs/build-support/pkg-config-wrapper/default.nix b/nixpkgs/pkgs/build-support/pkg-config-wrapper/default.nix new file mode 100644 index 000000000000..f409ca3a7d4b --- /dev/null +++ b/nixpkgs/pkgs/build-support/pkg-config-wrapper/default.nix @@ -0,0 +1,129 @@ +# The wrapper script ensures variables like PKG_CONFIG_PATH and +# PKG_CONFIG_PATH_FOR_BUILD work properly. + +{ stdenvNoCC +, lib +, buildPackages +, pkg-config +, baseBinName ? "pkg-config" +, propagateDoc ? pkg-config != null && pkg-config ? man +, extraPackages ? [], extraBuildCommands ? "" +}: + +with lib; + +let + stdenv = stdenvNoCC; + inherit (stdenv) hostPlatform targetPlatform; + + # Prefix for binaries. Customarily ends with a dash separator. + # + # TODO(@Ericson2314) Make unconditional, or optional but always true by + # default. + targetPrefix = lib.optionalString (targetPlatform != hostPlatform) + (targetPlatform.config + "-"); + + # See description in cc-wrapper. + suffixSalt = replaceStrings ["-" "."] ["_" "_"] targetPlatform.config; + +in + +stdenv.mkDerivation { + pname = targetPrefix + pkg-config.pname + "-wrapper"; + inherit (pkg-config) version; + + enableParallelBuilding = true; + + preferLocalBuild = true; + + outputs = [ "out" ] ++ optionals propagateDoc ([ "man" ] ++ optional (pkg-config ? doc) "doc"); + + passthru = { + inherit targetPrefix suffixSalt; + inherit pkg-config; + }; + + strictDeps = true; + dontBuild = true; + dontConfigure = true; + dontUnpack = true; + + # Additional flags passed to pkg-config. + addFlags = lib.optional stdenv.targetPlatform.isStatic "--static"; + + installPhase = + '' + mkdir -p $out/bin $out/nix-support + + wrap() { + local dst="$1" + local wrapper="$2" + export prog="$3" + substituteAll "$wrapper" "$out/bin/$dst" + chmod +x "$out/bin/$dst" + } + + echo $pkg-config > $out/nix-support/orig-pkg-config + + wrap ${targetPrefix}${baseBinName} ${./pkg-config-wrapper.sh} "${getBin pkg-config}/bin/${baseBinName}" + '' + # symlink in share for autoconf to find macros + + # TODO(@Ericson2314): in the future just make the unwrapped pkg-config a + # propagated dep once we can rely on downstream deps comming first in + # search paths. (https://github.com/NixOS/nixpkgs/pull/31414 took a crack + # at this.) + + '' + ln -s ${pkg-config}/share $out/share + ''; + + setupHooks = [ + ../setup-hooks/role.bash + ./setup-hook.sh + ]; + + postFixup = + ## + ## User env support + ## + + # Propagate the underling unwrapped pkg-config so that if you + # install the wrapper, you get anything else it might provide. + '' + printWords ${pkg-config} > $out/nix-support/propagated-user-env-packages + '' + + ## + ## Man page and doc support + ## + + optionalString propagateDoc ('' + ln -s ${pkg-config.man} $man + '' + optionalString (pkg-config ? doc) '' + ln -s ${pkg-config.doc} $doc + '') + + + '' + substituteAll ${./add-flags.sh} $out/nix-support/add-flags.sh + substituteAll ${../wrapper-common/utils.bash} $out/nix-support/utils.bash + '' + + ## + ## Extra custom steps + ## + + extraBuildCommands; + + env = { + shell = getBin stdenvNoCC.shell + stdenvNoCC.shell.shellPath or ""; + wrapperName = "PKG_CONFIG_WRAPPER"; + inherit targetPrefix suffixSalt baseBinName; + }; + + meta = + let pkg-config_ = lib.optionalAttrs (pkg-config != null) pkg-config; in + (lib.optionalAttrs (pkg-config_ ? meta) (removeAttrs pkg-config.meta ["priority"])) // + { description = + lib.attrByPath ["meta" "description"] "pkg-config" pkg-config_ + + " (wrapper script)"; + priority = 10; + }; +} diff --git a/nixpkgs/pkgs/build-support/pkg-config-wrapper/pkg-config-wrapper.sh b/nixpkgs/pkgs/build-support/pkg-config-wrapper/pkg-config-wrapper.sh new file mode 100644 index 000000000000..1d43c8e162ab --- /dev/null +++ b/nixpkgs/pkgs/build-support/pkg-config-wrapper/pkg-config-wrapper.sh @@ -0,0 +1,23 @@ +#! @shell@ +set -eu -o pipefail +o posix +shopt -s nullglob + +if (( "${NIX_DEBUG:-0}" >= 7 )); then + set -x +fi + +source @out@/nix-support/utils.bash + +if [ -z "${NIX_PKG_CONFIG_WRAPPER_FLAGS_SET_@suffixSalt@:-}" ]; then + source @out@/nix-support/add-flags.sh +fi + +set -- @addFlags@ "$@" + +if (( ${#role_suffixes[@]} > 0 )); then + # replace env var with nix-modified one + PKG_CONFIG_PATH=$PKG_CONFIG_PATH_@suffixSalt@ exec @prog@ "$@" +else + # pkg-config isn't a real dependency so ignore setup hook entirely + exec @prog@ "$@" +fi diff --git a/nixpkgs/pkgs/build-support/pkg-config-wrapper/setup-hook.sh b/nixpkgs/pkgs/build-support/pkg-config-wrapper/setup-hook.sh new file mode 100644 index 000000000000..34f1a999a82e --- /dev/null +++ b/nixpkgs/pkgs/build-support/pkg-config-wrapper/setup-hook.sh @@ -0,0 +1,29 @@ +# pkg-config Wrapper hygiene +# +# See comments in cc-wrapper's setup hook. This works exactly the same way. + +# Skip setup hook if we're neither a build-time dep, nor, temporarily, doing a +# native compile. +# +# TODO(@Ericson2314): No native exception +[[ -z ${strictDeps-} ]] || (( "$hostOffset" < 0 )) || return 0 + +pkgConfigWrapper_addPkgConfigPath () { + # See ../setup-hooks/role.bash + local role_post + getHostRoleEnvHook + + addToSearchPath "PKG_CONFIG_PATH${role_post}" "$1/lib/pkgconfig" + addToSearchPath "PKG_CONFIG_PATH${role_post}" "$1/share/pkgconfig" +} + +# See ../setup-hooks/role.bash +getTargetRole +getTargetRoleWrapper + +addEnvHooks "$targetOffset" pkgConfigWrapper_addPkgConfigPath + +export PKG_CONFIG${role_post}=@targetPrefix@@baseBinName@ + +# No local scope in sourced file +unset -v role_post diff --git a/nixpkgs/pkgs/build-support/plugins.nix b/nixpkgs/pkgs/build-support/plugins.nix new file mode 100644 index 000000000000..31b478c6c0de --- /dev/null +++ b/nixpkgs/pkgs/build-support/plugins.nix @@ -0,0 +1,29 @@ +{ lib }: +# helper functions for packaging programs with plugin systems +{ + + /* Takes a list of expected plugin names + * and compares it to the found plugins given in the file, + * one plugin per line. + * If the lists differ, the build fails with a nice message. + * + * This is helpful to ensure maintainers don’t miss + * the addition or removal of a plugin. + */ + diffPlugins = expectedPlugins: foundPluginsFilePath: '' + # sort both lists first + plugins_expected=$(mktemp) + (${lib.concatMapStrings (s: "echo \"${s}\";") expectedPlugins}) \ + | sort -u > "$plugins_expected" + plugins_found=$(mktemp) + sort -u "${foundPluginsFilePath}" > "$plugins_found" + + if ! mismatches="$(diff -y "$plugins_expected" "$plugins_found")"; then + echo "The the list of expected plugins (left side) doesn't match" \ + "the list of plugins we found (right side):" >&2 + echo "$mismatches" >&2 + exit 1 + fi + ''; + +} diff --git a/nixpkgs/pkgs/build-support/portable-service/default.nix b/nixpkgs/pkgs/build-support/portable-service/default.nix new file mode 100644 index 000000000000..6389e8d66fb1 --- /dev/null +++ b/nixpkgs/pkgs/build-support/portable-service/default.nix @@ -0,0 +1,111 @@ +{ pkgs, lib, stdenv }: +/* + Create a systemd portable service image + https://systemd.io/PORTABLE_SERVICES/ + + Example: + pkgs.portableService { + pname = "demo"; + version = "1.0"; + units = [ demo-service demo-socket ]; + } +*/ +{ + # The name and version of the portable service. The resulting image will be + # created in result/$pname_$version.raw + pname +, version + + # Units is a list of derivations for systemd unit files. Those files will be + # copied to /etc/systemd/system in the resulting image. Note that the unit + # names must be prefixed with the name of the portable service. +, units + + # Basic info about the portable service image, used for the generated + # /etc/os-release +, description ? null +, homepage ? null + + # A list of attribute sets {object, symlink}. Symlinks will be created + # in the root filesystem of the image to objects in the nix store. +, symlinks ? [ ] + + # A list of additional derivations to be included in the image as-is. +, contents ? [ ] + + # mksquashfs options +, squashfsTools ? pkgs.squashfsTools +, squash-compression ? "xz -Xdict-size 100%" +, squash-block-size ? "1M" +}: + +let + filterNull = lib.filterAttrs (_: v: v != null); + envFileGenerator = lib.generators.toKeyValue { }; + + rootFsScaffold = + let + os-release-params = { + PORTABLE_ID = pname; + PORTABLE_PRETTY_NAME = description; + HOME_URL = homepage; + ID = "nixos"; + PRETTY_NAME = "NixOS"; + BUILD_ID = "rolling"; + }; + os-release = pkgs.writeText "os-release" + (envFileGenerator (filterNull os-release-params)); + + in + stdenv.mkDerivation { + pname = "root-fs-scaffold"; + inherit version; + + buildCommand = '' + # scaffold a file system layout + mkdir -p $out/etc/systemd/system $out/proc $out/sys $out/dev $out/run \ + $out/tmp $out/var/tmp $out/var/lib $out/var/cache $out/var/log + + # empty files to mount over with host's version + touch $out/etc/resolv.conf $out/etc/machine-id + + # required for portable services + cp ${os-release} $out/etc/os-release + '' + # units **must** be copied to /etc/systemd/system/ + + (lib.concatMapStringsSep "\n" (u: "cp ${u} $out/etc/systemd/system/${u.name};") units) + + (lib.concatMapStringsSep "\n" + ({ object, symlink }: '' + mkdir -p $(dirname $out/${symlink}); + ln -s ${object} $out/${symlink}; + '') + symlinks) + ; + }; +in + +assert lib.assertMsg (lib.all (u: lib.hasPrefix pname u.name) units) "Unit names must be prefixed with the service name"; + +stdenv.mkDerivation { + pname = "${pname}-img"; + inherit version; + + nativeBuildInputs = [ squashfsTools ]; + closureInfo = pkgs.closureInfo { rootPaths = [ rootFsScaffold ] ++ contents; }; + + buildCommand = '' + mkdir -p nix/store + for i in $(< $closureInfo/store-paths); do + cp -a "$i" "''${i:1}" + done + + mkdir -p $out + # the '.raw' suffix is mandatory by the portable service spec + mksquashfs nix ${rootFsScaffold}/* $out/"${pname}_${version}.raw" \ + -quiet -noappend \ + -exit-on-error \ + -keep-as-directory \ + -all-root -root-mode 755 \ + -b ${squash-block-size} -comp ${squash-compression} + ''; +} diff --git a/nixpkgs/pkgs/build-support/prefer-remote-fetch/default.nix b/nixpkgs/pkgs/build-support/prefer-remote-fetch/default.nix new file mode 100644 index 000000000000..a1f2d0c56cff --- /dev/null +++ b/nixpkgs/pkgs/build-support/prefer-remote-fetch/default.nix @@ -0,0 +1,19 @@ +# An overlay that download sources on remote builder. +# This is useful when the evaluating machine has a slow +# upload while the builder can fetch faster directly from the source. +# Usage: Put the following snippet in your usual overlay definition: +# +# self: super: +# (super.prefer-remote-fetch self super) +# Full configuration example for your own account: +# +# $ mkdir ~/.config/nixpkgs/overlays/ +# $ echo 'self: super: super.prefer-remote-fetch self super' > ~/.config/nixpkgs/overlays/prefer-remote-fetch.nix +# +self: super: { + fetchurl = args: super.fetchurl ({ preferLocalBuild = false; } // args); + fetchgit = args: super.fetchgit ({ preferLocalBuild = false; } // args); + fetchhg = args: super.fetchhg ({ preferLocalBuild = false; } // args); + fetchsvn = args: super.fetchsvn ({ preferLocalBuild = false; } // args); + fetchipfs = args: super.fetchipfs ({ preferLocalBuild = false; } // args); +} diff --git a/nixpkgs/pkgs/build-support/references-by-popularity/closure-graph.py b/nixpkgs/pkgs/build-support/references-by-popularity/closure-graph.py new file mode 100644 index 000000000000..579f3b041fa8 --- /dev/null +++ b/nixpkgs/pkgs/build-support/references-by-popularity/closure-graph.py @@ -0,0 +1,567 @@ +# IMPORTANT: Making changes? +# +# Validate your changes with python3 ./closure-graph.py --test + + +# Using a simple algorithm, convert the references to a path in to a +# sorted list of dependent paths based on how often they're referenced +# and how deep in the tree they live. Equally-"popular" paths are then +# sorted by name. +# +# The existing writeReferencesToFile prints the paths in a simple +# ascii-based sorting of the paths. +# +# Sorting the paths by graph improves the chances that the difference +# between two builds appear near the end of the list, instead of near +# the beginning. This makes a difference for Nix builds which export a +# closure for another program to consume, if that program implements its +# own level of binary diffing. +# +# For an example, Docker Images. If each store path is a separate layer +# then Docker Images can be very efficiently transfered between systems, +# and we get very good cache reuse between images built with the same +# version of Nixpkgs. However, since Docker only reliably supports a +# small number of layers (42) it is important to pick the individual +# layers carefully. By storing very popular store paths in the first 40 +# layers, we improve the chances that the next Docker image will share +# many of those layers.* +# +# Given the dependency tree: +# +# A - B - C - D -\ +# \ \ \ \ +# \ \ \ \ +# \ \ - E ---- F +# \- G +# +# Nodes which have multiple references are duplicated: +# +# A - B - C - D - F +# \ \ \ +# \ \ \- E - F +# \ \ +# \ \- E - F +# \ +# \- G +# +# Each leaf node is now replaced by a counter defaulted to 1: +# +# A - B - C - D - (F:1) +# \ \ \ +# \ \ \- E - (F:1) +# \ \ +# \ \- E - (F:1) +# \ +# \- (G:1) +# +# Then each leaf counter is merged with its parent node, replacing the +# parent node with a counter of 1, and each existing counter being +# incremented by 1. That is to say `- D - (F:1)` becomes `- (D:1, F:2)`: +# +# A - B - C - (D:1, F:2) +# \ \ \ +# \ \ \- (E:1, F:2) +# \ \ +# \ \- (E:1, F:2) +# \ +# \- (G:1) +# +# Then each leaf counter is merged with its parent node again, merging +# any counters, then incrementing each: +# +# A - B - (C:1, D:2, E:2, F:5) +# \ \ +# \ \- (E:1, F:2) +# \ +# \- (G:1) +# +# And again: +# +# A - (B:1, C:2, D:3, E:4, F:8) +# \ +# \- (G:1) +# +# And again: +# +# (A:1, B:2, C:3, D:4, E:5, F:9, G:2) +# +# and then paths have the following "popularity": +# +# A 1 +# B 2 +# C 3 +# D 4 +# E 5 +# F 9 +# G 2 +# +# and the popularity contest would result in the paths being printed as: +# +# F +# E +# D +# C +# B +# G +# A +# +# * Note: People who have used a Dockerfile before assume Docker's +# Layers are inherently ordered. However, this is not true -- Docker +# layers are content-addressable and are not explicitly layered until +# they are composed in to an Image. + +import sys +import json +import unittest + +from pprint import pprint +from collections import defaultdict + + +def debug(msg, *args, **kwargs): + if False: + print( + "DEBUG: {}".format( + msg.format(*args, **kwargs) + ), + file=sys.stderr + ) + + +# Find paths in the original dataset which are never referenced by +# any other paths +def find_roots(closures): + roots = []; + + for closure in closures: + path = closure['path'] + if not any_refer_to(path, closures): + roots.append(path) + + return roots + +class TestFindRoots(unittest.TestCase): + def test_find_roots(self): + self.assertCountEqual( + find_roots([ + { + "path": "/nix/store/foo", + "references": [ + "/nix/store/foo", + "/nix/store/bar" + ] + }, + { + "path": "/nix/store/bar", + "references": [ + "/nix/store/bar", + "/nix/store/tux" + ] + }, + { + "path": "/nix/store/hello", + "references": [ + ] + } + ]), + ["/nix/store/foo", "/nix/store/hello"] + ) + + +def any_refer_to(path, closures): + for closure in closures: + if path != closure['path']: + if path in closure['references']: + return True + return False + +class TestAnyReferTo(unittest.TestCase): + def test_has_references(self): + self.assertTrue( + any_refer_to( + "/nix/store/bar", + [ + { + "path": "/nix/store/foo", + "references": [ + "/nix/store/bar" + ] + }, + ] + ), + ) + def test_no_references(self): + self.assertFalse( + any_refer_to( + "/nix/store/foo", + [ + { + "path": "/nix/store/foo", + "references": [ + "/nix/store/foo", + "/nix/store/bar" + ] + }, + ] + ), + ) + +def all_paths(closures): + paths = [] + for closure in closures: + paths.append(closure['path']) + paths.extend(closure['references']) + paths.sort() + return list(set(paths)) + + +class TestAllPaths(unittest.TestCase): + def test_returns_all_paths(self): + self.assertCountEqual( + all_paths([ + { + "path": "/nix/store/foo", + "references": [ + "/nix/store/foo", + "/nix/store/bar" + ] + }, + { + "path": "/nix/store/bar", + "references": [ + "/nix/store/bar", + "/nix/store/tux" + ] + }, + { + "path": "/nix/store/hello", + "references": [ + ] + } + ]), + ["/nix/store/foo", "/nix/store/bar", "/nix/store/hello", "/nix/store/tux",] + ) + def test_no_references(self): + self.assertFalse( + any_refer_to( + "/nix/store/foo", + [ + { + "path": "/nix/store/foo", + "references": [ + "/nix/store/foo", + "/nix/store/bar" + ] + }, + ] + ), + ) + +# Convert: +# +# [ +# { path: /nix/store/foo, references: [ /nix/store/foo, /nix/store/bar, /nix/store/baz ] }, +# { path: /nix/store/bar, references: [ /nix/store/bar, /nix/store/baz ] }, +# { path: /nix/store/baz, references: [ /nix/store/baz, /nix/store/tux ] }, +# { path: /nix/store/tux, references: [ /nix/store/tux ] } +# ] +# +# To: +# { +# /nix/store/foo: [ /nix/store/bar, /nix/store/baz ], +# /nix/store/bar: [ /nix/store/baz ], +# /nix/store/baz: [ /nix/store/tux ] }, +# /nix/store/tux: [ ] +# } +# +# Note that it drops self-references to avoid loops. +def make_lookup(closures): + lookup = {} + + for closure in closures: + # paths often self-refer + nonreferential_paths = [ref for ref in closure['references'] if ref != closure['path']] + lookup[closure['path']] = nonreferential_paths + + return lookup + +class TestMakeLookup(unittest.TestCase): + def test_returns_lookp(self): + self.assertDictEqual( + make_lookup([ + { + "path": "/nix/store/foo", + "references": [ + "/nix/store/foo", + "/nix/store/bar" + ] + }, + { + "path": "/nix/store/bar", + "references": [ + "/nix/store/bar", + "/nix/store/tux" + ] + }, + { + "path": "/nix/store/hello", + "references": [ + ] + } + ]), + { + "/nix/store/foo": [ "/nix/store/bar" ], + "/nix/store/bar": [ "/nix/store/tux" ], + "/nix/store/hello": [ ], + } + ) + +# Convert: +# +# /nix/store/foo with +# { +# /nix/store/foo: [ /nix/store/bar, /nix/store/baz ], +# /nix/store/bar: [ /nix/store/baz ], +# /nix/store/baz: [ /nix/store/tux ] }, +# /nix/store/tux: [ ] +# } +# +# To: +# +# { +# /nix/store/bar: { +# /nix/store/baz: { +# /nix/store/tux: {} +# } +# }, +# /nix/store/baz: { +# /nix/store/tux: {} +# } +# } +subgraphs_cache = {} +def make_graph_segment_from_root(root, lookup): + global subgraphs_cache + children = {} + for ref in lookup[root]: + # make_graph_segment_from_root is a pure function, and will + # always return the same result based on a given input. Thus, + # cache computation. + # + # Python's assignment will use a pointer, preventing memory + # bloat for large graphs. + if ref not in subgraphs_cache: + debug("Subgraph Cache miss on {}".format(ref)) + subgraphs_cache[ref] = make_graph_segment_from_root(ref, lookup) + else: + debug("Subgraph Cache hit on {}".format(ref)) + children[ref] = subgraphs_cache[ref] + return children + +class TestMakeGraphSegmentFromRoot(unittest.TestCase): + def test_returns_graph(self): + self.assertDictEqual( + make_graph_segment_from_root("/nix/store/foo", { + "/nix/store/foo": [ "/nix/store/bar" ], + "/nix/store/bar": [ "/nix/store/tux" ], + "/nix/store/tux": [ ], + "/nix/store/hello": [ ], + }), + { + "/nix/store/bar": { + "/nix/store/tux": {} + } + } + ) + def test_returns_graph_tiny(self): + self.assertDictEqual( + make_graph_segment_from_root("/nix/store/tux", { + "/nix/store/foo": [ "/nix/store/bar" ], + "/nix/store/bar": [ "/nix/store/tux" ], + "/nix/store/tux": [ ], + }), + {} + ) + +# Convert a graph segment in to a popularity-counted dictionary: +# +# From: +# { +# /nix/store/foo: { +# /nix/store/bar: { +# /nix/store/baz: { +# /nix/store/tux: {} +# } +# } +# /nix/store/baz: { +# /nix/store/tux: {} +# } +# } +# } +# +# to: +# [ +# /nix/store/foo: 1 +# /nix/store/bar: 2 +# /nix/store/baz: 4 +# /nix/store/tux: 6 +# ] +popularity_cache = {} +def graph_popularity_contest(full_graph): + global popularity_cache + popularity = defaultdict(int) + for path, subgraph in full_graph.items(): + popularity[path] += 1 + # graph_popularity_contest is a pure function, and will + # always return the same result based on a given input. Thus, + # cache computation. + # + # Python's assignment will use a pointer, preventing memory + # bloat for large graphs. + if path not in popularity_cache: + debug("Popularity Cache miss on {}", path) + popularity_cache[path] = graph_popularity_contest(subgraph) + else: + debug("Popularity Cache hit on {}", path) + + subcontest = popularity_cache[path] + for subpath, subpopularity in subcontest.items(): + debug("Calculating popularity for {}", subpath) + popularity[subpath] += subpopularity + 1 + + return popularity + +class TestGraphPopularityContest(unittest.TestCase): + def test_counts_popularity(self): + self.assertDictEqual( + graph_popularity_contest({ + "/nix/store/foo": { + "/nix/store/bar": { + "/nix/store/baz": { + "/nix/store/tux": {} + } + }, + "/nix/store/baz": { + "/nix/store/tux": {} + } + } + }), + { + "/nix/store/foo": 1, + "/nix/store/bar": 2, + "/nix/store/baz": 4, + "/nix/store/tux": 6, + } + ) + +# Emit a list of packages by popularity, most first: +# +# From: +# [ +# /nix/store/foo: 1 +# /nix/store/bar: 1 +# /nix/store/baz: 2 +# /nix/store/tux: 2 +# ] +# +# To: +# [ /nix/store/baz /nix/store/tux /nix/store/bar /nix/store/foo ] +def order_by_popularity(paths): + paths_by_popularity = defaultdict(list) + popularities = [] + for path, popularity in paths.items(): + popularities.append(popularity) + paths_by_popularity[popularity].append(path) + + popularities = list(set(popularities)) + popularities.sort() + + flat_ordered = [] + for popularity in popularities: + paths = paths_by_popularity[popularity] + paths.sort(key=package_name) + + flat_ordered.extend(reversed(paths)) + return list(reversed(flat_ordered)) + + +class TestOrderByPopularity(unittest.TestCase): + def test_returns_in_order(self): + self.assertEqual( + order_by_popularity({ + "/nix/store/foo": 1, + "/nix/store/bar": 1, + "/nix/store/baz": 2, + "/nix/store/tux": 2, + }), + [ + "/nix/store/baz", + "/nix/store/tux", + "/nix/store/bar", + "/nix/store/foo" + ] + ) + +def package_name(path): + parts = path.split('-') + start = parts.pop(0) + # don't throw away any data, so the order is always the same. + # even in cases where only the hash at the start has changed. + parts.append(start) + return '-'.join(parts) + +def main(): + filename = sys.argv[1] + key = sys.argv[2] + + debug("Loading from {}", filename) + with open(filename) as f: + data = json.load(f) + + # Data comes in as: + # [ + # { path: /nix/store/foo, references: [ /nix/store/foo, /nix/store/bar, /nix/store/baz ] }, + # { path: /nix/store/bar, references: [ /nix/store/bar, /nix/store/baz ] }, + # { path: /nix/store/baz, references: [ /nix/store/baz, /nix/store/tux ] }, + # { path: /nix/store/tux, references: [ /nix/store/tux ] } + # ] + # + # and we want to get out a list of paths ordered by how universally, + # important they are, ie: tux is referenced by every path, transitively + # so it should be #1 + # + # [ + # /nix/store/tux, + # /nix/store/baz, + # /nix/store/bar, + # /nix/store/foo, + # ] + graph = data[key] + + debug("Finding roots from {}", key) + roots = find_roots(graph); + debug("Making lookup for {}", key) + lookup = make_lookup(graph) + + full_graph = {} + for root in roots: + debug("Making full graph for {}", root) + full_graph[root] = make_graph_segment_from_root(root, lookup) + + debug("Running contest") + contest = graph_popularity_contest(full_graph) + debug("Ordering by popularity") + ordered = order_by_popularity(contest) + debug("Checking for missing paths") + missing = [] + for path in all_paths(graph): + if path not in ordered: + missing.append(path) + + ordered.extend(missing) + print("\n".join(ordered)) + +if "--test" in sys.argv: + # Don't pass --test otherwise unittest gets mad + unittest.main(argv = [f for f in sys.argv if f != "--test" ]) +else: + main() diff --git a/nixpkgs/pkgs/build-support/references-by-popularity/default.nix b/nixpkgs/pkgs/build-support/references-by-popularity/default.nix new file mode 100644 index 000000000000..dfc25275f34c --- /dev/null +++ b/nixpkgs/pkgs/build-support/references-by-popularity/default.nix @@ -0,0 +1,16 @@ +{ runCommand, python3, coreutils }: +# Write the references of `path' to a file, in order of how "popular" each +# reference is. Nix 2 only. +path: runCommand "closure-paths" +{ + exportReferencesGraph.graph = path; + __structuredAttrs = true; + preferLocalBuild = true; + PATH = "${coreutils}/bin:${python3}/bin"; + builder = builtins.toFile "builder" + '' + . .attrs.sh + python3 ${./closure-graph.py} .attrs.json graph > ''${outputs[out]} + ''; + } + "" diff --git a/nixpkgs/pkgs/build-support/release/binary-tarball.nix b/nixpkgs/pkgs/build-support/release/binary-tarball.nix new file mode 100644 index 000000000000..37c5f8c7ee86 --- /dev/null +++ b/nixpkgs/pkgs/build-support/release/binary-tarball.nix @@ -0,0 +1,78 @@ +/* This function builds a binary tarball. The resulting binaries are + usually only useful if they are don't have any runtime dependencies + on any paths in the Nix store, since those aren't distributed in + the tarball. For instance, the binaries should be statically + linked: they can't depend on dynamic libraries in the store + (including Glibc). + + The binaries are built and installed with a prefix of /usr/local by + default. They are installed by setting DESTDIR to a temporary + directory, so the Makefile of the package should support DESTDIR. +*/ + +{ src, lib, stdenv +, name ? "binary-tarball" +, ... } @ args: + +stdenv.mkDerivation ( + + { + # Also run a `make check'. + doCheck = true; + + showBuildStats = true; + + prefix = "/usr/local"; + + postPhases = "finalPhase"; + } + + // args // + + { + name = name + (lib.optionalString (src ? version) "-${src.version}"); + + postHook = '' + mkdir -p $out/nix-support + echo "$system" > $out/nix-support/system + . ${./functions.sh} + + origSrc=$src + src=$(findTarball $src) + + if test -e $origSrc/nix-support/hydra-release-name; then + releaseName=$(cat $origSrc/nix-support/hydra-release-name) + fi + + installFlagsArray=(DESTDIR=$TMPDIR/inst) + + # Prefix hackery because of a bug in stdenv (it tries to `mkdir + # $prefix', which doesn't work due to the DESTDIR). + configureFlags="--prefix=$prefix $configureFlags" + dontAddPrefix=1 + prefix=$TMPDIR/inst$prefix + ''; + + doDist = true; + + distPhase = '' + mkdir -p $out/tarballs + tar cvfj $out/tarballs/''${releaseName:-binary-dist}.tar.bz2 -C $TMPDIR/inst . + ''; + + finalPhase = '' + for i in $out/tarballs/*; do + echo "file binary-dist $i" >> $out/nix-support/hydra-build-products + done + + # Propagate the release name of the source tarball. This is + # to get nice package names in channels. + test -n "$releaseName" && (echo "$releaseName" >> $out/nix-support/hydra-release-name) + ''; + + meta = (lib.optionalAttrs (args ? meta) args.meta) // { + description = "Build of a generic binary distribution"; + }; + + } +) diff --git a/nixpkgs/pkgs/build-support/release/debian-build.nix b/nixpkgs/pkgs/build-support/release/debian-build.nix new file mode 100644 index 000000000000..ab84a504b54c --- /dev/null +++ b/nixpkgs/pkgs/build-support/release/debian-build.nix @@ -0,0 +1,94 @@ +# This function compiles a source tarball in a virtual machine image +# that contains a Debian-like (i.e. dpkg-based) OS. + +{ name ? "debian-build" +, diskImage +, src, lib, stdenv, vmTools, checkinstall +, fsTranslation ? false +, # Features provided by this package. + debProvides ? [] +, # Features required by this package. + debRequires ? [] +, ... } @ args: + +vmTools.runInLinuxImage (stdenv.mkDerivation ( + + { + doCheck = true; + + prefix = "/usr"; + + prePhases = "installExtraDebsPhase sysInfoPhase"; + } + + // removeAttrs args ["vmTools" "lib"] // + + { + name = name + "-" + diskImage.name + (lib.optionalString (src ? version) "-${src.version}"); + + # !!! cut&paste from rpm-build.nix + postHook = '' + . ${./functions.sh} + propagateImageName + src=$(findTarball $src) + ''; + + installExtraDebsPhase = '' + for i in $extraDebs; do + dpkg --install $(ls $i/debs/*.deb | sort | head -1) + done + ''; + + sysInfoPhase = '' + [ ! -f /etc/lsb-release ] || (source /etc/lsb-release; echo "OS release: $DISTRIB_DESCRIPTION") + echo "System/kernel: $(uname -a)" + if test -e /etc/debian_version; then echo "Debian release: $(cat /etc/debian_version)"; fi + echo "installed Debian packages" + dpkg-query --list + ''; + + installPhase = '' + eval "$preInstall" + export LOGNAME=root + + # otherwise build hangs when it wants to display + # the log file + export PAGER=cat + ${checkinstall}/sbin/checkinstall --nodoc -y -D \ + --fstrans=${if fsTranslation then "yes" else "no"} \ + --requires="${lib.concatStringsSep "," debRequires}" \ + --provides="${lib.concatStringsSep "," debProvides}" \ + ${if (src ? version) then "--pkgversion=$(echo ${src.version} | tr _ -)" + else "--pkgversion=0.0.0"} \ + ''${debMaintainer:+--maintainer="'$debMaintainer'"} \ + ''${debName:+--pkgname="'$debName'"} \ + $checkInstallFlags \ + -- \ + $SHELL -c "''${installCommand:-make install}" + + mkdir -p $out/debs + find . -name "*.deb" -exec cp {} $out/debs \; + + [ "$(echo $out/debs/*.deb)" != "" ] + + for i in $out/debs/*.deb; do + echo "Generated DEB package: $i" + dpkg-deb --info "$i" + pkgName=$(dpkg-deb -W "$i" | awk '{print $1}') + echo "file deb $i" >> $out/nix-support/hydra-build-products + done + dpkg -i $out/debs/*.deb + + for i in $extraDebs; do + echo "file deb-extra $(ls $i/debs/*.deb | sort | head -1)" >> $out/nix-support/hydra-build-products + done + + eval "$postInstall" + ''; + + meta = (lib.optionalAttrs (args ? meta) args.meta) // { + description = "Deb package for ${diskImage.fullName}"; + }; + } + +)) diff --git a/nixpkgs/pkgs/build-support/release/default.nix b/nixpkgs/pkgs/build-support/release/default.nix new file mode 100644 index 000000000000..d09f6c8568be --- /dev/null +++ b/nixpkgs/pkgs/build-support/release/default.nix @@ -0,0 +1,120 @@ +{ lib, pkgs }: + +with pkgs; + +rec { + + sourceTarball = args: import ./source-tarball.nix ( + { inherit lib stdenv autoconf automake libtool; + } // args); + + makeSourceTarball = sourceTarball; # compatibility + + binaryTarball = args: import ./binary-tarball.nix ( + { inherit lib stdenv; + } // args); + + mvnBuild = args: import ./maven-build.nix ( + { inherit lib stdenv; + } // args); + + nixBuild = args: import ./nix-build.nix ( + { inherit lib stdenv; + } // args); + + coverageAnalysis = args: nixBuild ( + { inherit lcov enableGCOVInstrumentation makeGCOVReport; + doCoverageAnalysis = true; + } // args); + + clangAnalysis = args: nixBuild ( + { inherit clang-analyzer; + doClangAnalysis = true; + } // args); + + coverityAnalysis = args: nixBuild ( + { inherit cov-build xz; + doCoverityAnalysis = true; + } // args); + + rpmBuild = args: import ./rpm-build.nix ( + { inherit lib vmTools; + } // args); + + debBuild = args: import ./debian-build.nix ( + { inherit lib stdenv vmTools checkinstall; + } // args); + + aggregate = + { name, constituents, meta ? { } }: + pkgs.runCommand name + { inherit constituents meta; + preferLocalBuild = true; + _hydraAggregate = true; + } + '' + mkdir -p $out/nix-support + touch $out/nix-support/hydra-build-products + echo $constituents > $out/nix-support/hydra-aggregate-constituents + + # Propagate build failures. + for i in $constituents; do + if [ -e $i/nix-support/failed ]; then + touch $out/nix-support/failed + fi + done + ''; + + /* Create a channel job which success depends on the success of all of + its contituents. Channel jobs are a special type of jobs that are + listed in the channel tab of Hydra and that can be suscribed. + A tarball of the src attribute is distributed via the channel. + + - constituents: a list of derivations on which the channel success depends. + - name: the channel name that will be used in the hydra interface. + - src: should point to the root folder of the nix-expressions used by the + channel, typically a folder containing a `default.nix`. + + channel { + constituents = [ foo bar baz ]; + name = "my-channel"; + src = ./.; + }; + + */ + channel = + { name, src, constituents ? [], meta ? {}, isNixOS ? true, ... }@args: + stdenv.mkDerivation ({ + preferLocalBuild = true; + _hydraAggregate = true; + + phases = [ "unpackPhase" "patchPhase" "installPhase" ]; + + patchPhase = lib.optionalString isNixOS '' + touch .update-on-nixos-rebuild + ''; + + installPhase = '' + mkdir -p $out/{tarballs,nix-support} + + tar cJf "$out/tarballs/nixexprs.tar.xz" \ + --owner=0 --group=0 --mtime="1970-01-01 00:00:00 UTC" \ + --transform='s!^\.!${name}!' . + + echo "channel - $out/tarballs/nixexprs.tar.xz" > "$out/nix-support/hydra-build-products" + echo $constituents > "$out/nix-support/hydra-aggregate-constituents" + + # Propagate build failures. + for i in $constituents; do + if [ -e "$i/nix-support/failed" ]; then + touch "$out/nix-support/failed" + fi + done + ''; + + meta = meta // { + isHydraChannel = true; + }; + } // removeAttrs args [ "meta" ]); + +} diff --git a/nixpkgs/pkgs/build-support/release/functions.sh b/nixpkgs/pkgs/build-support/release/functions.sh new file mode 100644 index 000000000000..0c4b81edf822 --- /dev/null +++ b/nixpkgs/pkgs/build-support/release/functions.sh @@ -0,0 +1,17 @@ +findTarball() { + local suffix i + if [ -d "$1/tarballs/" ]; then + for suffix in tar.gz tgz tar.bz2 tbz2 tbz tar.xz txz tar.lzma; do + for i in $1/tarballs/*.$suffix; do echo $i; break; done + done | sort | head -1 + return + else + echo "$1" + return + fi +} + +propagateImageName() { + mkdir -p $out/nix-support + cat "$diskImage"/nix-support/full-name > $out/nix-support/full-name +} diff --git a/nixpkgs/pkgs/build-support/release/maven-build.nix b/nixpkgs/pkgs/build-support/release/maven-build.nix new file mode 100644 index 000000000000..f618032724c9 --- /dev/null +++ b/nixpkgs/pkgs/build-support/release/maven-build.nix @@ -0,0 +1,103 @@ +{ stdenv +, lib +, name +, src +, doTest ? true +, doTestCompile ? true +, doJavadoc ? false +, doCheckstyle ? false +, doRelease ? false +, includeTestClasses ? true +, extraMvnFlags ? "" +, ... +} @ args : + +let + mvnFlags = lib.escapeShellArgs [ + "-Dmaven.repo.local=$M2_REPO" + (lib.optionalString (!doTest) "-Dmaven.test.skip.exec=true") + "${extraMvnFlags}" + ]; +in + +stdenv.mkDerivation ( { + inherit name src; + phases = "setupPhase unpackPhase patchPhase mvnCompile ${lib.optionalString doTestCompile "mvnTestCompile mvnTestJar"} ${lib.optionalString doTest "mvnTest"} ${lib.optionalString doJavadoc "mvnJavadoc"} ${lib.optionalString doCheckstyle "mvnCheckstyle"} mvnJar mvnAssembly mvnRelease finalPhase"; + + setupPhase = '' + runHook preSetupPhase + + mkdir -p $out/nix-support + export LANG="en_US.UTF-8" + export LOCALE_ARCHIVE=$glibcLocales/lib/locale/locale-archive + export M2_REPO=$TMPDIR/repository + + runHook postSetupPhase + ''; + + mvnCompile = '' + mvn compile ${mvnFlags} + ''; + + mvnTestCompile = '' + mvn test-compile ${mvnFlags} + ''; + + mvnTestJar = '' + mvn jar:test-jar ${mvnFlags} + ''; + + mvnTest = '' + mvn test ${mvnFlags} + + if [ -d target/site/cobertura ] ; then + echo "report coverage $out/site/cobertura" >> $out/nix-support/hydra-build-products + fi + + if [ -d target/surefire-reports ] ; then + mvn surefire-report:report-only + echo "report coverage $out/site/surefire-report.html" >> $out/nix-support/hydra-build-products + fi + ''; + + mvnJavadoc = '' + mvn javadoc:javadoc ${mvnFlags} + echo "report javadoc $out/site/apidocs" >> $out/nix-support/hydra-build-products + ''; + + mvnCheckstyle = '' + mvn checkstyle:checkstyle ${mvnFlags} + echo "report checkstyle $out/site/checkstyle.html" >> $out/nix-support/hydra-build-products + ''; + + mvnJar = '' + mvn jar:jar ${mvnFlags} + ''; + + mvnAssembly = '' + mvn assembly:assembly -Dmaven.test.skip=true ${mvnFlags} + ''; + + mvnRelease = '' + mkdir -p $out/release + + zip=$(ls target/*.zip| head -1) + releaseName=$(basename $zip .zip) + releaseName="$releaseName-r${toString src.rev or "0"}" + cp $zip $out/release/$releaseName.zip + + echo "$releaseName" > $out/nix-support/hydra-release-name + + ${lib.optionalString doRelease '' + echo "file zip $out/release/$releaseName.zip" >> $out/nix-support/hydra-build-products + ''} + ''; + + finalPhase = '' + if [ -d target/site ] ; then + cp -R target/site $out/ + echo "report site $out/site" >> $out/nix-support/hydra-build-products + fi + ''; +} // args +) diff --git a/nixpkgs/pkgs/build-support/release/nix-build.nix b/nixpkgs/pkgs/build-support/release/nix-build.nix new file mode 100644 index 000000000000..348cd5b8345c --- /dev/null +++ b/nixpkgs/pkgs/build-support/release/nix-build.nix @@ -0,0 +1,174 @@ +# This function builds and tests an Autoconf-style source tarball. +# The result can be installed normally in an environment (e.g., after +# making it available through a channel). If `doCoverageAnalysis' is +# true, it does an ordinary build from a source tarball, except that +# it turns on GCC's coverage analysis feature. It then runs `make +# check' and produces a coverage analysis report using `lcov'. + +{ buildOutOfSourceTree ? false +, preConfigure ? null +, doCoverageAnalysis ? false +, doClangAnalysis ? false +, doCoverityAnalysis ? false +, lcovFilter ? [] +, lcovExtraTraceFiles ? [] +, src, lib, stdenv +, name ? if doCoverageAnalysis then "nix-coverage" else "nix-build" +, failureHook ? null +, prePhases ? [] +, postPhases ? [] +, buildInputs ? [] +, preHook ? "" +, postHook ? "" +, ... } @ args: + +let + doingAnalysis = doCoverageAnalysis || doClangAnalysis || doCoverityAnalysis; +in +stdenv.mkDerivation ( + + { + # Also run a `make check'. + doCheck = true; + + # When doing coverage analysis, we don't care about the result. + dontInstall = doingAnalysis; + useTempPrefix = doingAnalysis; + + showBuildStats = true; + + finalPhase = + '' + # Propagate the release name of the source tarball. This is + # to get nice package names in channels. + if test -e $origSrc/nix-support/hydra-release-name; then + cp $origSrc/nix-support/hydra-release-name $out/nix-support/hydra-release-name + fi + + # Package up Coverity analysis results + if [ ! -z "${toString doCoverityAnalysis}" ]; then + if [ -d "_coverity_$name/cov-int" ]; then + mkdir -p $out/tarballs + NAME=`cat $out/nix-support/hydra-release-name` + cd _coverity_$name + tar caf $out/tarballs/$NAME-coverity-int.xz cov-int + echo "file cov-build $out/tarballs/$NAME-coverity-int.xz" >> $out/nix-support/hydra-build-products + fi + fi + + # Package up Clang analysis results + if [ ! -z "${toString doClangAnalysis}" ]; then + if [ ! -z "`ls _clang_analyze_$name`" ]; then + cd _clang_analyze_$name && mv * $out/analysis + else + mkdir -p $out/analysis + echo "No bugs found." >> $out/analysis/index.html + fi + + echo "report analysis $out/analysis" >> $out/nix-support/hydra-build-products + fi + ''; + + failureHook = (lib.optionalString (failureHook != null) failureHook) + + '' + if test -n "$succeedOnFailure"; then + if test -n "$keepBuildDirectory"; then + KEEPBUILDDIR="$out/`basename $TMPDIR`" + echo "Copying build directory to $KEEPBUILDDIR" + mkdir -p $KEEPBUILDDIR + cp -R "$TMPDIR/"* $KEEPBUILDDIR + fi + fi + ''; + } + + // removeAttrs args [ "lib" ] # Propagating lib causes the evaluation to fail, because lib is a function that can't be converted to a string + + // { + name = name + (lib.optionalString (src ? version) "-${src.version}"); + + postHook = '' + . ${./functions.sh} + origSrc=$src + src=$(findTarball $src) + ${postHook} + ''; + + preHook = '' + # Perform Coverity Analysis + if [ ! -z "${toString doCoverityAnalysis}" ]; then + shopt -s expand_aliases + mkdir _coverity_$name + alias make="cov-build --dir _coverity_$name/cov-int make" + fi + + # Perform Clang Analysis + if [ ! -z "${toString doClangAnalysis}" ]; then + shopt -s expand_aliases + alias make="scan-build -o _clang_analyze_$name --html-title='Scan results for $name' make" + fi + + ${preHook} + ''; + + # Clean up after analysis + postBuild = '' + if [ ! -z "${toString (doCoverityAnalysis || doClangAnalysis)}" ]; then + unalias make + fi + ''; + + initPhase = '' + mkdir -p $out/nix-support + echo "$system" > $out/nix-support/system + + if [ -z "${toString doingAnalysis}" ]; then + for i in $(getAllOutputNames); do + if [ "$i" = out ]; then j=none; else j="$i"; fi + mkdir -p ''${!i}/nix-support + echo "nix-build $j ''${!i}" >> ''${!i}/nix-support/hydra-build-products + done + fi + ''; + + prePhases = ["initPhase"] ++ prePhases; + + buildInputs = + buildInputs ++ + (lib.optional doCoverageAnalysis args.makeGCOVReport) ++ + (lib.optional doClangAnalysis args.clang-analyzer) ++ + (lib.optional doCoverityAnalysis args.cov-build) ++ + (lib.optional doCoverityAnalysis args.xz); + + lcovFilter = ["${builtins.storeDir}/*"] ++ lcovFilter; + + inherit lcovExtraTraceFiles; + + postPhases = postPhases ++ ["finalPhase"]; + + meta = (lib.optionalAttrs (args ? meta) args.meta) // { + description = if doCoverageAnalysis then "Coverage analysis" else "Nix package for ${stdenv.hostPlatform.system}"; + }; + + } + + // + + (lib.optionalAttrs buildOutOfSourceTree + { + preConfigure = + # Build out of source tree and make the source tree read-only. This + # helps catch violations of the GNU Coding Standards (info + # "(standards) Configuration"), like `make distcheck' does. + '' mkdir "../build" + cd "../build" + configureScript="../$sourceRoot/configure" + chmod -R a-w "../$sourceRoot" + + echo "building out of source tree, from \`$PWD'..." + + ${lib.optionalString (preConfigure != null) preConfigure} + ''; + } + ) +) diff --git a/nixpkgs/pkgs/build-support/release/rpm-build.nix b/nixpkgs/pkgs/build-support/release/rpm-build.nix new file mode 100644 index 000000000000..ccbbd57107bd --- /dev/null +++ b/nixpkgs/pkgs/build-support/release/rpm-build.nix @@ -0,0 +1,54 @@ +# This function builds an RPM from a source tarball that contains a +# RPM spec file (i.e., one that can be built using `rpmbuild -ta'). + +{ name ? "rpm-build" +, diskImage +, src, lib, vmTools +, ... } @ args: + +vmTools.buildRPM ( + + removeAttrs args ["vmTools"] // + + { + name = name + "-" + diskImage.name + (lib.optionalString (src ? version) "-${src.version}"); + + preBuild = '' + . ${./functions.sh} + propagateImageName + src=$(findTarball $src) + ''; + + postInstall = '' + declare -a rpms rpmNames + for i in $out/rpms/*/*.rpm; do + if echo $i | grep -vq "\.src\.rpm$"; then + echo "file rpm $i" >> $out/nix-support/hydra-build-products + rpms+=($i) + rpmNames+=("$(rpm -qp "$i")") + fi + done + + echo "installing ''${rpms[*]}..." + rpm -Up ''${rpms[*]} --excludepath /nix/store + + eval "$postRPMInstall" + + echo "uninstalling ''${rpmNames[*]}..." + rpm -e ''${rpmNames[*]} --nodeps + + for i in $out/rpms/*/*.src.rpm; do + echo "file srpm $i" >> $out/nix-support/hydra-build-products + done + + for rpmdir in $extraRPMs ; do + echo "file rpm-extra $(ls $rpmdir/rpms/*/*.rpm | grep -v 'src\.rpm' | sort | head -1)" >> $out/nix-support/hydra-build-products + done + ''; + + meta = (lib.optionalAttrs (args ? meta) args.meta) // { + description = "RPM package for ${diskImage.fullName}"; + }; + } + +) diff --git a/nixpkgs/pkgs/build-support/release/source-tarball.nix b/nixpkgs/pkgs/build-support/release/source-tarball.nix new file mode 100644 index 000000000000..fbc8bc6b258b --- /dev/null +++ b/nixpkgs/pkgs/build-support/release/source-tarball.nix @@ -0,0 +1,129 @@ +# This function converts an un-Autoconfed source tarball (typically a +# checkout from a Subversion or CVS repository) into a source tarball +# by running `autoreconf', `configure' and `make dist'. + +{ officialRelease ? false +, buildInputs ? [] +, name ? "source-tarball" +, version ? "0" +, versionSuffix ? + if officialRelease + then "" + else "pre${toString (src.rev or src.revCount or "")}" +, src, lib, stdenv, autoconf, automake, libtool +, # By default, provide all the GNU Build System as input. + bootstrapBuildInputs ? [ autoconf automake libtool ] +, ... } @ args: + +stdenv.mkDerivation ( + + # First, attributes that can be overridden by the caller (via args): + { + # By default, only configure and build a source distribution. + # Some packages can only build a distribution after a general + # `make' (or even `make install'). + dontBuild = true; + dontInstall = true; + doDist = true; + + # If we do install, install to a dummy location. + useTempPrefix = true; + + showBuildStats = true; + + preConfigurePhases = "autoconfPhase"; + postPhases = "finalPhase"; + + # Autoconfiscate the sources. + autoconfPhase = '' + export VERSION=${version} + export VERSION_SUFFIX=${versionSuffix} + + # `svn-revision' is set for backwards compatibility with the old + # Nix buildfarm. (Stratego/XT's autoxt uses it. We should + # update it eventually.) + echo ${versionSuffix} | sed -e s/pre// > svn-revision + + eval "$preAutoconf" + + if test -x ./bootstrap && test -f ./bootstrap; then ./bootstrap + elif test -x ./bootstrap.sh; then ./bootstrap.sh + elif test -x ./autogen.sh; then ./autogen.sh + elif test -x ./autogen ; then ./autogen + elif test -x ./reconf; then ./reconf + elif test -f ./configure.in || test -f ./configure.ac; then + autoreconf --install --force --verbose + else + echo "No bootstrap, bootstrap.sh, configure.in or configure.ac. Assuming this is not an GNU Autotools package." + fi + + eval "$postAutoconf" + ''; + + failureHook = '' + if test -n "$succeedOnFailure"; then + if test -n "$keepBuildDirectory"; then + KEEPBUILDDIR="$out/`basename $TMPDIR`" + echo "Copying build directory to $KEEPBUILDDIR" + mkdir -p $KEEPBUILDDIR + cp -R "$TMPDIR/"* $KEEPBUILDDIR + fi + fi + ''; + } + + # Then, the caller-supplied attributes. + // (builtins.removeAttrs args [ "lib" ]) // + + # And finally, our own stuff. + { + name = name + "-" + version + versionSuffix; + + buildInputs = buildInputs ++ bootstrapBuildInputs; + + preUnpack = '' + mkdir -p $out/nix-support + ''; + + postUnpack = '' + # Set all source files to the current date. This is because Nix + # resets the timestamp on all files to 0 (1/1/1970), which some + # people don't like (in particular GNU tar prints harmless but + # frightening warnings about it). + touch now + touch -d "1970-01-01 00:00:00 UTC" then + find $sourceRoot ! -newer then -print0 | xargs -0r touch --reference now + rm now then + eval "$nextPostUnpack" + ''; + + nextPostUnpack = if args ? postUnpack then args.postUnpack else ""; + + # Cause distPhase to copy tar.bz2 in addition to tar.gz. + tarballs = "*.tar.gz *.tar.bz2 *.tar.xz"; + + finalPhase = '' + for i in "$out/tarballs/"*; do + echo "file source-dist $i" >> $out/nix-support/hydra-build-products + done + + # Try to figure out the release name. + releaseName=$( (cd $out/tarballs && ls) | head -n 1 | sed -e 's^\.[a-z].*^^') + test -n "$releaseName" && (echo "$releaseName" >> $out/nix-support/hydra-release-name) + ''; + + passthru = { + inherit src; + version = version + versionSuffix; + }; + + meta = (lib.optionalAttrs (args ? meta) args.meta) // { + description = "Source distribution"; + + # Tarball builds are generally important, so give them a high + # default priority. + schedulingPriority = 200; + }; + } + +) diff --git a/nixpkgs/pkgs/build-support/remove-references-to/darwin-sign-fixup.sh b/nixpkgs/pkgs/build-support/remove-references-to/darwin-sign-fixup.sh new file mode 100644 index 000000000000..940c18e5a627 --- /dev/null +++ b/nixpkgs/pkgs/build-support/remove-references-to/darwin-sign-fixup.sh @@ -0,0 +1,5 @@ +# Fixup hook for nukeReferences, not stdenv + +source @signingUtils@ + +fixupHooks+=(signIfRequired) diff --git a/nixpkgs/pkgs/build-support/remove-references-to/default.nix b/nixpkgs/pkgs/build-support/remove-references-to/default.nix new file mode 100644 index 000000000000..f022611ef913 --- /dev/null +++ b/nixpkgs/pkgs/build-support/remove-references-to/default.nix @@ -0,0 +1,35 @@ +# The program `remove-references-to' created by this derivation replaces all +# references to the given Nix store paths in the specified files by a +# non-existent path (/nix/store/eeee...). This is useful for getting rid of +# dependencies that you know are not actually needed at runtime. + +{ lib, stdenvNoCC, signingUtils, shell ? stdenvNoCC.shell }: + +let + stdenv = stdenvNoCC; + + darwinCodeSign = stdenv.targetPlatform.isDarwin && stdenv.targetPlatform.isAarch64; +in + +stdenv.mkDerivation { + name = "remove-references-to"; + + dontUnpack = true; + dontConfigure = true; + dontBuild = true; + + installPhase = '' + mkdir -p $out/bin + substituteAll ${./remove-references-to.sh} $out/bin/remove-references-to + chmod a+x $out/bin/remove-references-to + ''; + + postFixup = lib.optionalString darwinCodeSign '' + mkdir -p $out/nix-support + substituteAll ${./darwin-sign-fixup.sh} $out/nix-support/setup-hooks.sh + ''; + + inherit (builtins) storeDir; + shell = lib.getBin shell + (shell.shellPath or ""); + signingUtils = if darwinCodeSign then signingUtils else null; +} diff --git a/nixpkgs/pkgs/build-support/remove-references-to/remove-references-to.sh b/nixpkgs/pkgs/build-support/remove-references-to/remove-references-to.sh new file mode 100755 index 000000000000..a4d068eb591e --- /dev/null +++ b/nixpkgs/pkgs/build-support/remove-references-to/remove-references-to.sh @@ -0,0 +1,37 @@ +#! @shell@ -e + +fixupHooks=() + +if [ -e @out@/nix-support/setup-hooks.sh ]; then + source @out@/nix-support/setup-hooks.sh +fi + +# References to remove +targets=() +while getopts t: o; do + case "$o" in + t) storeId=$(echo "$OPTARG" | sed -n "s|^@storeDir@/\\([a-z0-9]\{32\}\\)-.*|\1|p") + if [ -z "$storeId" ]; then + echo "-t argument must be a Nix store path" + exit 1 + fi + targets+=("$storeId") + esac +done +shift $(($OPTIND-1)) + +# Files to remove the references from +regions=() +for i in "$@"; do + test ! -L "$i" -a -f "$i" && regions+=("$i") +done + +for target in "${targets[@]}" ; do + sed -i -e "s|$target|eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee|g" "${regions[@]}" +done + +for region in "${regions[@]}"; do + for hook in "${fixupHooks[@]}"; do + eval "$hook" "$region" + done +done diff --git a/nixpkgs/pkgs/build-support/replace-dependency.nix b/nixpkgs/pkgs/build-support/replace-dependency.nix new file mode 100644 index 000000000000..5b4c127cdd8e --- /dev/null +++ b/nixpkgs/pkgs/build-support/replace-dependency.nix @@ -0,0 +1,83 @@ +{ runCommandLocal, nix, lib }: + +# Replace a single dependency in the requisites tree of drv, propagating +# the change all the way up the tree, without a full rebuild. This can be +# useful, for example, to patch a security hole in libc and still use your +# system safely without rebuilding the world. This should be a short term +# solution, as soon as a rebuild can be done the properly rebuild derivation +# should be used. The old dependency and new dependency MUST have the same-length +# name, and ideally should have close-to-identical directory layout. +# +# Example: safeFirefox = replaceDependency { +# drv = firefox; +# oldDependency = glibc; +# newDependency = overrideDerivation glibc (attrs: { +# patches = attrs.patches ++ [ ./fix-glibc-hole.patch ]; +# }); +# }; +# This will rebuild glibc with your security patch, then copy over firefox +# (and all of its dependencies) without rebuilding further. +{ drv, oldDependency, newDependency, verbose ? true }: + +with lib; + +let + warn = if verbose then builtins.trace else (x: y: y); + references = import (runCommandLocal "references.nix" { exportReferencesGraph = [ "graph" drv ]; } '' + (echo { + while read path + do + echo " \"$path\" = [" + read count + read count + while [ "0" != "$count" ] + do + read ref_path + if [ "$ref_path" != "$path" ] + then + echo " (builtins.storePath (/. + \"$ref_path\"))" + fi + count=$(($count - 1)) + done + echo " ];" + done < graph + echo }) > $out + '').outPath; + + discard = builtins.unsafeDiscardStringContext; + + oldStorepath = builtins.storePath (discard (toString oldDependency)); + + referencesOf = drv: references.${discard (toString drv)}; + + dependsOnOldMemo = listToAttrs (map + (drv: { name = discard (toString drv); + value = elem oldStorepath (referencesOf drv) || + any dependsOnOld (referencesOf drv); + }) (builtins.attrNames references)); + + dependsOnOld = drv: dependsOnOldMemo.${discard (toString drv)}; + + drvName = drv: + discard (substring 33 (stringLength (builtins.baseNameOf drv)) (builtins.baseNameOf drv)); + + rewriteHashes = drv: hashes: runCommandLocal (drvName drv) { nixStore = "${nix.out}/bin/nix-store"; } '' + $nixStore --dump ${drv} | sed 's|${baseNameOf drv}|'$(basename $out)'|g' | sed -e ${ + concatStringsSep " -e " (mapAttrsToList (name: value: + "'s|${baseNameOf name}|${baseNameOf value}|g'" + ) hashes) + } | $nixStore --restore $out + ''; + + rewrittenDeps = listToAttrs [ {name = discard (toString oldDependency); value = newDependency;} ]; + + rewriteMemo = listToAttrs (map + (drv: { name = discard (toString drv); + value = rewriteHashes (builtins.storePath drv) + (filterAttrs (n: v: builtins.elem (builtins.storePath (discard (toString n))) (referencesOf drv)) rewriteMemo); + }) + (filter dependsOnOld (builtins.attrNames references))) // rewrittenDeps; + + drvHash = discard (toString drv); +in assert (stringLength (drvName (toString oldDependency)) == stringLength (drvName (toString newDependency))); +rewriteMemo.${drvHash} or (warn "replace-dependency.nix: Derivation ${drvHash} does not depend on ${discard (toString oldDependency)}" drv) diff --git a/nixpkgs/pkgs/build-support/replace-secret/replace-secret.nix b/nixpkgs/pkgs/build-support/replace-secret/replace-secret.nix new file mode 100644 index 000000000000..4881ba25f5d2 --- /dev/null +++ b/nixpkgs/pkgs/build-support/replace-secret/replace-secret.nix @@ -0,0 +1,36 @@ +{ stdenv, lib, python3 }: + +stdenv.mkDerivation { + name = "replace-secret"; + buildInputs = [ python3 ]; + dontUnpack = true; + installPhase = '' + runHook preInstall + install -D ${./replace-secret.py} $out/bin/replace-secret + patchShebangs $out + runHook postInstall + ''; + installCheckPhase = '' + install -m 0600 ${./test/input_file} long_test + $out/bin/replace-secret "replace this" ${./test/passwd} long_test + $out/bin/replace-secret "and this" ${./test/rsa} long_test + diff ${./test/expected_long_output} long_test + + install -m 0600 ${./test/input_file} short_test + $out/bin/replace-secret "replace this" <(echo "a") short_test + $out/bin/replace-secret "and this" <(echo "b") short_test + diff ${./test/expected_short_output} short_test + ''; + meta = with lib; { + platforms = platforms.all; + maintainers = with maintainers; [ talyz ]; + license = licenses.mit; + description = "Replace a string in one file with a secret from a second file"; + longDescription = '' + Replace a string in one file with a secret from a second file. + + Since the secret is read from a file, it won't be leaked through + '/proc/<pid>/cmdline', unlike when 'sed' or 'replace' is used. + ''; + }; +} diff --git a/nixpkgs/pkgs/build-support/replace-secret/replace-secret.py b/nixpkgs/pkgs/build-support/replace-secret/replace-secret.py new file mode 100755 index 000000000000..30ff41d491ba --- /dev/null +++ b/nixpkgs/pkgs/build-support/replace-secret/replace-secret.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python + +import argparse +from argparse import RawDescriptionHelpFormatter + +description = """ +Replace a string in one file with a secret from a second file. + +Since the secret is read from a file, it won't be leaked through +'/proc/<pid>/cmdline', unlike when 'sed' or 'replace' is used. +""" + +parser = argparse.ArgumentParser( + description=description, + formatter_class=RawDescriptionHelpFormatter +) +parser.add_argument("string_to_replace", help="the string to replace") +parser.add_argument("secret_file", help="the file containing the secret") +parser.add_argument("file", help="the file to perform the replacement on") +args = parser.parse_args() + +with open(args.secret_file) as sf, open(args.file, 'r+') as f: + old = f.read() + secret = sf.read().strip("\n") + new_content = old.replace(args.string_to_replace, secret) + f.seek(0) + f.write(new_content) + f.truncate() diff --git a/nixpkgs/pkgs/build-support/replace-secret/test/expected_long_output b/nixpkgs/pkgs/build-support/replace-secret/test/expected_long_output new file mode 100644 index 000000000000..37bd66b905f5 --- /dev/null +++ b/nixpkgs/pkgs/build-support/replace-secret/test/expected_long_output @@ -0,0 +1,30 @@ +beginning +middle $6$UcbJUl5g$HRMfKNKsLTfVbcQb.P5o0bmZUfHDYkWseMSuZ8F5jSIGZZcI3Jnit23f8ZeZOGi4KL86HVM9RYqrpYySOu/fl0 not this +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAzrru6v5tfwQl6L+rOUjtLo8kbhMUlCLXP7TYngSGrkzPMWe+ +0gB04UAmiPZXfBmvj5fPqYiFjIaEDHE/SD41vJB/RJKKtId2gCAIHhBLkbr+4+60 +yEbLkJci5i4kJC1dt8OKFEzXkaVnwOSgjH+0NwO3bstZ+E70zMXS9+NS71qGsIEb +5J1TnacwW/u6CdFyakLljWOXOR14rLIpiPBBFLf+oZiepjIhlWXWHqsxZOb7zMI0 +T4W5WJ2dwGFsJ8rkYaGZ+A5qzYbi/KmHqaSPaNDsyoi7yJhAhKPByALJU916+8QO +xOnqZxWGki3PDzCslRwW4i3mGbZlBQMnlfbN3QIDAQABAoIBAHDn1W7QkFrLmCy6 +6bf6pVdFZF8d2qJhOPAZRClhTXFKj+pqv+QPzcXr9F/fMr6bhK/G+Oqdnlq2aM4m +16oMF+spe+impEyeo1CsreJFghBQcb9o8qFjUPBiKvROBP0hLcscZ4BYy29HSBgo +harWYEWfqQJA251q+fYQoP0z0WrZKddOZbRRnJ0ICRxAE7IEtDT6EYt8R9oGi2j4 +/rpdW+rYGjW3TcmzdR7lpVMJRLlbMbSdR8n6cI6rnfySygcoE5tFX5t/YZSNbBPg +GebKCbEHYNTTG8bC1qjUyzlbEQ6XYWvFO7HTKU7105XpjYTQFByeo0IVkin0o5KW +t7eQWb0CgYEA6zZUWsYoQ13nXEU6Ky89Q9uhesMfaJ/F2X5ikQSRqRvrR3QR+ULe +eNnCl10O9SiFpR4b5gSbLSHMffxGN60P1nEO4CiIKE+gOii8Kdk5htIJFy/dcZUc +PuPM+zD9/6Is5sAWUZo45bnT6685h6EjM2+6zNZtx/XMjSfWbHaY+HMCgYEA4QAy +6ZEgd6FHnNfM/q2o8XU3d6OCdhcu26u6ydnCalbSpPSKWOi6gnHK4ZnGdryXgIYw +hRkvYINfiONkShYytotIh4YxUbgpwdvJRyKa2ZdWhcMmtFzZOcEVzQTKBasFT74C +Wo0iybZ++XZh3M0+n7oyyx39aR7diZ+/zq6PnG8CgYB8B1QH4cHNdDDRqPd5WhmW +NLQ7xbREOSvc+hYDnkMoxz4TmZL4u1gQpdNEeZ+visSeQvg3HGqvK8lnDaYBKdLW +IxvS+8yAZSx6PoyqDI+XFh4RCf5dLGGOkBTAyB7Hs761lsiuEwK5sHmdJ/LQIBot +v1bjOJb/AA/yxvT8kLUtHQKBgGIA9iwqXJv/EfRNQytDdS0HQ4vHGtJZMr3YRVoa +kcZD3yieo4wqguLCsf4mPv4FE3CWAphW6f39+yTi9xIWLSy56nOtjdnsf7PDCh8E +AbL5amSFJly1fKDda6OLjHt/jKa5Osk6ZIa8CP6cA/BrLfXg4rL6cyDQouqJPMDH +5CHdAoGBAIChjbTyoYvANkoANCK4SuqLUYeiYREfiM3sqHe1xirK1PPHw03ZLITl +ltjo9qE6kPXWcTBVckTKGFlntyCT283FC0/vMmHo8dTdtxF4/wSbkqs3ORuJ3p5J +cNtLYGD3vgwLmg6tTur4U60XN+tYDzWGteez8J9GwTMfKJmuS9af +-----END RSA PRIVATE KEY----- +end diff --git a/nixpkgs/pkgs/build-support/replace-secret/test/expected_short_output b/nixpkgs/pkgs/build-support/replace-secret/test/expected_short_output new file mode 100644 index 000000000000..3c81b2e2f991 --- /dev/null +++ b/nixpkgs/pkgs/build-support/replace-secret/test/expected_short_output @@ -0,0 +1,4 @@ +beginning +middle a not this +b +end diff --git a/nixpkgs/pkgs/build-support/replace-secret/test/input_file b/nixpkgs/pkgs/build-support/replace-secret/test/input_file new file mode 100644 index 000000000000..1e7eadfaab20 --- /dev/null +++ b/nixpkgs/pkgs/build-support/replace-secret/test/input_file @@ -0,0 +1,4 @@ +beginning +middle replace this not this +and this +end diff --git a/nixpkgs/pkgs/build-support/replace-secret/test/passwd b/nixpkgs/pkgs/build-support/replace-secret/test/passwd new file mode 100644 index 000000000000..68f266226e4a --- /dev/null +++ b/nixpkgs/pkgs/build-support/replace-secret/test/passwd @@ -0,0 +1 @@ +$6$UcbJUl5g$HRMfKNKsLTfVbcQb.P5o0bmZUfHDYkWseMSuZ8F5jSIGZZcI3Jnit23f8ZeZOGi4KL86HVM9RYqrpYySOu/fl0 diff --git a/nixpkgs/pkgs/build-support/replace-secret/test/rsa b/nixpkgs/pkgs/build-support/replace-secret/test/rsa new file mode 100644 index 000000000000..138cc99ed225 --- /dev/null +++ b/nixpkgs/pkgs/build-support/replace-secret/test/rsa @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAzrru6v5tfwQl6L+rOUjtLo8kbhMUlCLXP7TYngSGrkzPMWe+ +0gB04UAmiPZXfBmvj5fPqYiFjIaEDHE/SD41vJB/RJKKtId2gCAIHhBLkbr+4+60 +yEbLkJci5i4kJC1dt8OKFEzXkaVnwOSgjH+0NwO3bstZ+E70zMXS9+NS71qGsIEb +5J1TnacwW/u6CdFyakLljWOXOR14rLIpiPBBFLf+oZiepjIhlWXWHqsxZOb7zMI0 +T4W5WJ2dwGFsJ8rkYaGZ+A5qzYbi/KmHqaSPaNDsyoi7yJhAhKPByALJU916+8QO +xOnqZxWGki3PDzCslRwW4i3mGbZlBQMnlfbN3QIDAQABAoIBAHDn1W7QkFrLmCy6 +6bf6pVdFZF8d2qJhOPAZRClhTXFKj+pqv+QPzcXr9F/fMr6bhK/G+Oqdnlq2aM4m +16oMF+spe+impEyeo1CsreJFghBQcb9o8qFjUPBiKvROBP0hLcscZ4BYy29HSBgo +harWYEWfqQJA251q+fYQoP0z0WrZKddOZbRRnJ0ICRxAE7IEtDT6EYt8R9oGi2j4 +/rpdW+rYGjW3TcmzdR7lpVMJRLlbMbSdR8n6cI6rnfySygcoE5tFX5t/YZSNbBPg +GebKCbEHYNTTG8bC1qjUyzlbEQ6XYWvFO7HTKU7105XpjYTQFByeo0IVkin0o5KW +t7eQWb0CgYEA6zZUWsYoQ13nXEU6Ky89Q9uhesMfaJ/F2X5ikQSRqRvrR3QR+ULe +eNnCl10O9SiFpR4b5gSbLSHMffxGN60P1nEO4CiIKE+gOii8Kdk5htIJFy/dcZUc +PuPM+zD9/6Is5sAWUZo45bnT6685h6EjM2+6zNZtx/XMjSfWbHaY+HMCgYEA4QAy +6ZEgd6FHnNfM/q2o8XU3d6OCdhcu26u6ydnCalbSpPSKWOi6gnHK4ZnGdryXgIYw +hRkvYINfiONkShYytotIh4YxUbgpwdvJRyKa2ZdWhcMmtFzZOcEVzQTKBasFT74C +Wo0iybZ++XZh3M0+n7oyyx39aR7diZ+/zq6PnG8CgYB8B1QH4cHNdDDRqPd5WhmW +NLQ7xbREOSvc+hYDnkMoxz4TmZL4u1gQpdNEeZ+visSeQvg3HGqvK8lnDaYBKdLW +IxvS+8yAZSx6PoyqDI+XFh4RCf5dLGGOkBTAyB7Hs761lsiuEwK5sHmdJ/LQIBot +v1bjOJb/AA/yxvT8kLUtHQKBgGIA9iwqXJv/EfRNQytDdS0HQ4vHGtJZMr3YRVoa +kcZD3yieo4wqguLCsf4mPv4FE3CWAphW6f39+yTi9xIWLSy56nOtjdnsf7PDCh8E +AbL5amSFJly1fKDda6OLjHt/jKa5Osk6ZIa8CP6cA/BrLfXg4rL6cyDQouqJPMDH +5CHdAoGBAIChjbTyoYvANkoANCK4SuqLUYeiYREfiM3sqHe1xirK1PPHw03ZLITl +ltjo9qE6kPXWcTBVckTKGFlntyCT283FC0/vMmHo8dTdtxF4/wSbkqs3ORuJ3p5J +cNtLYGD3vgwLmg6tTur4U60XN+tYDzWGteez8J9GwTMfKJmuS9af +-----END RSA PRIVATE KEY----- diff --git a/nixpkgs/pkgs/build-support/rust/build-rust-crate/build-crate.nix b/nixpkgs/pkgs/build-support/rust/build-rust-crate/build-crate.nix new file mode 100644 index 000000000000..37bf3ec26f77 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/build-rust-crate/build-crate.nix @@ -0,0 +1,129 @@ +{ lib, stdenv +, mkRustcDepArgs, mkRustcFeatureArgs, needUnstableCLI +, rust +}: + +{ crateName, + dependencies, + crateFeatures, crateRenames, libName, release, libPath, + crateType, metadata, crateBin, hasCrateBin, + extraRustcOpts, verbose, colors, + buildTests, + codegenUnits +}: + + let + baseRustcOpts = + [ + (if release then "-C opt-level=3" else "-C debuginfo=2") + "-C codegen-units=${toString codegenUnits}" + "--remap-path-prefix=$NIX_BUILD_TOP=/" + (mkRustcDepArgs dependencies crateRenames) + (mkRustcFeatureArgs crateFeatures) + ] ++ lib.optionals (stdenv.hostPlatform != stdenv.buildPlatform) [ + "--target" (rust.toRustTargetSpec stdenv.hostPlatform) + ] ++ lib.optionals (needUnstableCLI dependencies) [ + "-Z" "unstable-options" + ] ++ extraRustcOpts + # since rustc 1.42 the "proc_macro" crate is part of the default crate prelude + # https://github.com/rust-lang/cargo/commit/4d64eb99a4#diff-7f98585dbf9d30aa100c8318e2c77e79R1021-R1022 + ++ lib.optional (lib.elem "proc-macro" crateType) "--extern proc_macro" + ; + rustcMeta = "-C metadata=${metadata} -C extra-filename=-${metadata}"; + + + # build the final rustc arguments that can be different between different + # crates + libRustcOpts = lib.concatStringsSep " " ( + baseRustcOpts + ++ [rustcMeta] + ++ (map (x: "--crate-type ${x}") crateType) + ); + + binRustcOpts = lib.concatStringsSep " " ( + baseRustcOpts + ); + + build_bin = if buildTests then "build_bin_test" else "build_bin"; + in '' + runHook preBuild + + # configure & source common build functions + LIB_RUSTC_OPTS="${libRustcOpts}" + BIN_RUSTC_OPTS="${binRustcOpts}" + LIB_EXT="${stdenv.hostPlatform.extensions.sharedLibrary}" + LIB_PATH="${libPath}" + LIB_NAME="${libName}" + + CRATE_NAME='${lib.replaceStrings ["-"] ["_"] libName}' + + setup_link_paths + + if [[ -e "$LIB_PATH" ]]; then + build_lib "$LIB_PATH" + ${lib.optionalString buildTests ''build_lib_test "$LIB_PATH"''} + elif [[ -e src/lib.rs ]]; then + build_lib src/lib.rs + ${lib.optionalString buildTests "build_lib_test src/lib.rs"} + fi + + + + ${lib.optionalString (lib.length crateBin > 0) (lib.concatMapStringsSep "\n" (bin: + let + haveRequiredFeature = if bin ? requiredFeatures then + # Check that all element in requiredFeatures are also present in crateFeatures + lib.intersectLists bin.requiredFeatures crateFeatures == bin.requiredFeatures + else + true; + in + if haveRequiredFeature then '' + mkdir -p target/bin + BIN_NAME='${bin.name or crateName}' + ${if !bin ? path then '' + BIN_PATH="" + search_for_bin_path "$BIN_NAME" + '' else '' + BIN_PATH='${bin.path}' + ''} + ${build_bin} "$BIN_NAME" "$BIN_PATH" + '' else '' + echo Binary ${bin.name or crateName} not compiled due to not having all of the required features -- ${lib.escapeShellArg (builtins.toJSON bin.requiredFeatures)} -- enabled. + '') crateBin)} + + ${lib.optionalString buildTests '' + # When tests are enabled build all the files in the `tests` directory as + # test binaries. + if [ -d tests ]; then + # find all the .rs files (or symlinks to those) in the tests directory, no subdirectories + find tests -maxdepth 1 \( -type f -o -type l \) -a -name '*.rs' -print0 | while IFS= read -r -d ''' file; do + mkdir -p target/bin + build_bin_test_file "$file" + done + + # find all the subdirectories of tests/ that contain a main.rs file as + # that is also a test according to cargo + find tests/ -mindepth 1 -maxdepth 2 \( -type f -o -type l \) -a -name 'main.rs' -print0 | while IFS= read -r -d ''' file; do + mkdir -p target/bin + build_bin_test_file "$file" + done + + fi + ''} + + # If crateBin is empty and hasCrateBin is not set then we must try to + # detect some kind of bin target based on some files that might exist. + ${lib.optionalString (lib.length crateBin == 0 && !hasCrateBin) '' + if [[ -e src/main.rs ]]; then + mkdir -p target/bin + ${build_bin} ${crateName} src/main.rs + fi + for i in src/bin/*.rs; do #*/ + mkdir -p target/bin + ${build_bin} "$(basename $i .rs)" "$i" + done + ''} + # Remove object files to avoid "wrong ELF type" + find target -type f -name "*.o" -print0 | xargs -0 rm -f + runHook postBuild + '' diff --git a/nixpkgs/pkgs/build-support/rust/build-rust-crate/configure-crate.nix b/nixpkgs/pkgs/build-support/rust/build-rust-crate/configure-crate.nix new file mode 100644 index 000000000000..60310f178747 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/build-rust-crate/configure-crate.nix @@ -0,0 +1,217 @@ +{ lib, stdenv, rust, echo_colored, noisily, mkRustcDepArgs, mkRustcFeatureArgs }: +{ + build +, buildDependencies +, codegenUnits +, colors +, completeBuildDeps +, completeDeps +, crateAuthors +, crateDescription +, crateHomepage +, crateFeatures +, crateName +, crateRenames +, crateVersion +, extraLinkFlags +, extraRustcOptsForBuildRs +, libName +, libPath +, release +, verbose +, workspace_member }: +let version_ = lib.splitString "-" crateVersion; + versionPre = lib.optionalString (lib.tail version_ != []) (lib.elemAt version_ 1); + version = lib.splitVersion (lib.head version_); + rustcOpts = lib.foldl' (opts: opt: opts + " " + opt) + (if release then "-C opt-level=3" else "-C debuginfo=2") + (["-C codegen-units=${toString codegenUnits}"] ++ extraRustcOptsForBuildRs); + buildDeps = mkRustcDepArgs buildDependencies crateRenames; + authors = lib.concatStringsSep ":" crateAuthors; + optLevel = if release then 3 else 0; + completeDepsDir = lib.concatStringsSep " " completeDeps; + completeBuildDepsDir = lib.concatStringsSep " " completeBuildDeps; + envFeatures = lib.concatStringsSep " " ( + map (f: lib.replaceStrings ["-"] ["_"] (lib.toUpper f)) crateFeatures + ); +in '' + ${echo_colored colors} + ${noisily colors verbose} + source ${./lib.sh} + + ${lib.optionalString (workspace_member != null) '' + noisily cd "${workspace_member}" +''} + ${lib.optionalString (workspace_member == null) '' + echo_colored "Searching for matching Cargo.toml (${crateName})" + local cargo_toml_dir=$(matching_cargo_toml_dir "${crateName}") + if [ -z "$cargo_toml_dir" ]; then + echo_error "ERROR configuring ${crateName}: No matching Cargo.toml in $(pwd) found." >&2 + exit 23 + fi + noisily cd "$cargo_toml_dir" +''} + + runHook preConfigure + + symlink_dependency() { + # $1 is the nix-store path of a dependency + # $2 is the target path + i=$1 + ln -s -f $i/lib/*.rlib $2 #*/ + ln -s -f $i/lib/*.so $i/lib/*.dylib $2 #*/ + if [ -e $i/env ]; then + source $i/env + fi + } + + # The following steps set up the dependencies of the crate. Two + # kinds of dependencies are distinguished: build dependencies + # (used by the build script) and crate dependencies. For each + # dependency we have to: + # + # - Make its Rust library available to rustc. This is done by + # symlinking all library dependencies into a directory that + # can be provided to rustc. + # - Accumulate linking flags. These flags are largely used for + # linking native libraries. + # + # The crate link flags are added to the `link` and `link.final` + # files. The `link` file is used for linkage in the current + # crate. The `link.final` file will be copied to the output and can + # be used by downstream crates to get the linker flags of this + # crate. + + mkdir -p target/{deps,lib,build,buildDeps} + chmod uga+w target -R + echo ${extraLinkFlags} > target/link + echo ${extraLinkFlags} > target/link.final + + # Prepare crate dependencies + for i in ${completeDepsDir}; do + symlink_dependency $i target/deps + if [ -e "$i/lib/link" ]; then + cat $i/lib/link >> target/link + cat $i/lib/link >> target/link.final + fi + done + + # Prepare crate build dependencies that are used for the build script. + for i in ${completeBuildDepsDir}; do + symlink_dependency $i target/buildDeps + if [ -e "$i/lib/link" ]; then + cat $i/lib/link >> target/link.build + fi + done + + # Remove duplicate linker flags from the build dependencies. + if [[ -e target/link.build ]]; then + sort -uo target/link.build target/link.build + fi + + # Remove duplicate linker flags from the dependencies. + sort -uo target/link target/link + tr '\n' ' ' < target/link > target/link_ + + # Remove duplicate linker flags from the that are written + # to the derivation's output. + sort -uo target/link.final target/link.final + + EXTRA_BUILD="" + BUILD_OUT_DIR="" + export CARGO_PKG_NAME=${crateName} + export CARGO_PKG_VERSION=${crateVersion} + export CARGO_PKG_AUTHORS="${authors}" + export CARGO_PKG_DESCRIPTION="${crateDescription}" + + export CARGO_CFG_TARGET_ARCH=${rust.toTargetArch stdenv.hostPlatform} + export CARGO_CFG_TARGET_OS=${rust.toTargetOs stdenv.hostPlatform} + export CARGO_CFG_TARGET_FAMILY="unix" + export CARGO_CFG_UNIX=1 + export CARGO_CFG_TARGET_ENV="gnu" + export CARGO_CFG_TARGET_ENDIAN=${if stdenv.hostPlatform.parsed.cpu.significantByte.name == "littleEndian" then "little" else "big"} + export CARGO_CFG_TARGET_POINTER_WIDTH=${with stdenv.hostPlatform; toString (if isILP32 then 32 else parsed.cpu.bits)} + export CARGO_CFG_TARGET_VENDOR=${stdenv.hostPlatform.parsed.vendor.name} + + export CARGO_MANIFEST_DIR=$(pwd) + export DEBUG="${toString (!release)}" + export OPT_LEVEL="${toString optLevel}" + export TARGET="${rust.toRustTargetSpec stdenv.hostPlatform}" + export HOST="${rust.toRustTargetSpec stdenv.buildPlatform}" + export PROFILE=${if release then "release" else "debug"} + export OUT_DIR=$(pwd)/target/build/${crateName}.out + export CARGO_PKG_VERSION_MAJOR=${lib.elemAt version 0} + export CARGO_PKG_VERSION_MINOR=${lib.elemAt version 1} + export CARGO_PKG_VERSION_PATCH=${lib.elemAt version 2} + export CARGO_PKG_VERSION_PRE="${versionPre}" + export CARGO_PKG_HOMEPAGE="${crateHomepage}" + export NUM_JOBS=$NIX_BUILD_CORES + export RUSTC="rustc" + export RUSTDOC="rustdoc" + + BUILD="" + if [[ ! -z "${build}" ]] ; then + BUILD=${build} + elif [[ -e "build.rs" ]]; then + BUILD="build.rs" + fi + + # Compile and run the build script, when available. + if [[ ! -z "$BUILD" ]] ; then + echo_build_heading "$BUILD" ${libName} + mkdir -p target/build/${crateName} + EXTRA_BUILD_FLAGS="" + if [ -e target/link.build ]; then + EXTRA_BUILD_FLAGS="$EXTRA_BUILD_FLAGS $(tr '\n' ' ' < target/link.build)" + fi + noisily rustc --crate-name build_script_build $BUILD --crate-type bin ${rustcOpts} \ + ${mkRustcFeatureArgs crateFeatures} --out-dir target/build/${crateName} --emit=dep-info,link \ + -L dependency=target/buildDeps ${buildDeps} --cap-lints allow $EXTRA_BUILD_FLAGS --color ${colors} + + mkdir -p target/build/${crateName}.out + export RUST_BACKTRACE=1 + BUILD_OUT_DIR="-L $OUT_DIR" + mkdir -p $OUT_DIR + + ( + # Features should be set as environment variable for build scripts: + # https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts + for feature in ${envFeatures}; do + export CARGO_FEATURE_$feature=1 + done + + target/build/${crateName}/build_script_build > target/build/${crateName}.opt + ) + + set +e + EXTRA_BUILD=$(sed -n "s/^cargo:rustc-flags=\(.*\)/\1/p" target/build/${crateName}.opt | tr '\n' ' ' | sort -u) + EXTRA_FEATURES=$(sed -n "s/^cargo:rustc-cfg=\(.*\)/--cfg \1/p" target/build/${crateName}.opt | tr '\n' ' ') + EXTRA_LINK_ARGS=$(sed -n "s/^cargo:rustc-link-arg=\(.*\)/-C link-arg=\1/p" target/build/${crateName}.opt | tr '\n' ' ') + EXTRA_LINK_ARGS_BINS=$(sed -n "s/^cargo:rustc-link-arg-bins=\(.*\)/-C link-arg=\1/p" target/build/${crateName}.opt | tr '\n' ' ') + EXTRA_LINK_ARGS_LIB=$(sed -n "s/^cargo:rustc-link-arg-lib=\(.*\)/-C link-arg=\1/p" target/build/${crateName}.opt | tr '\n' ' ') + EXTRA_LINK_LIBS=$(sed -n "s/^cargo:rustc-link-lib=\(.*\)/\1/p" target/build/${crateName}.opt | tr '\n' ' ') + EXTRA_LINK_SEARCH=$(sed -n "s/^cargo:rustc-link-search=\(.*\)/\1/p" target/build/${crateName}.opt | tr '\n' ' ' | sort -u) + + # We want to read part of every line that has cargo:rustc-env= prefix and + # export it as environment variables. This turns out tricky if the lines + # have spaces: we can't wrap the command in double quotes as that captures + # all the lines in single output. We can't use while read loop because + # exporting from inside of it doesn't make it to the outside scope. We + # can't use xargs as export is a built-in and does not work from it. As a + # last resort then, we change the IFS so that the for loop does not split + # on spaces and reset it after we are done. See ticket #199298. + # + _OLDIFS="$IFS" + IFS=$'\n' + for env in $(sed -n "s/^cargo:rustc-env=\(.*\)/\1/p" target/build/${crateName}.opt); do + export "$env" + done + IFS="$_OLDIFS" + + CRATENAME=$(echo ${crateName} | sed -e "s/\(.*\)-sys$/\U\1/" -e "s/-/_/g") + grep -P "^cargo:(?!(rustc-|warning=|rerun-if-changed=|rerun-if-env-changed))" target/build/${crateName}.opt \ + | awk -F= "/^cargo:/ { sub(/^cargo:/, \"\", \$1); gsub(/-/, \"_\", \$1); print \"export \" toupper(\"DEP_$(echo $CRATENAME)_\" \$1) \"=\" \$2 }" > target/env + set -e + fi + runHook postConfigure +'' diff --git a/nixpkgs/pkgs/build-support/rust/build-rust-crate/default.nix b/nixpkgs/pkgs/build-support/rust/build-rust-crate/default.nix new file mode 100644 index 000000000000..d977fb9f7c27 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/build-rust-crate/default.nix @@ -0,0 +1,402 @@ +# Code for buildRustCrate, a Nix function that builds Rust code, just +# like Cargo, but using Nix instead. +# +# This can be useful for deploying packages with NixOps, and to share +# binary dependencies between projects. + +{ lib +, stdenv +, defaultCrateOverrides +, fetchCrate +, pkgsBuildBuild +, rustc +, rust +, cargo +, jq +, libiconv +}: + +let + # Create rustc arguments to link against the given list of dependencies + # and renames. + # + # See docs for crateRenames below. + mkRustcDepArgs = dependencies: crateRenames: + lib.concatMapStringsSep " " + (dep: + let + normalizeName = lib.replaceStrings [ "-" ] [ "_" ]; + extern = normalizeName dep.libName; + # Find a choice that matches in name and optionally version. + findMatchOrUseExtern = choices: + lib.findFirst + (choice: + (!(choice ? version) + || choice.version == dep.version or "")) + { rename = extern; } + choices; + name = + if lib.hasAttr dep.crateName crateRenames then + let choices = crateRenames.${dep.crateName}; + in + normalizeName ( + if builtins.isList choices + then (findMatchOrUseExtern choices).rename + else choices + ) + else + extern; + opts = lib.optionalString (dep.stdlib or false) "noprelude:"; + filename = + if lib.any (x: x == "lib" || x == "rlib") dep.crateType + then "${dep.metadata}.rlib" + else "${dep.metadata}${stdenv.hostPlatform.extensions.sharedLibrary}"; + in + " --extern ${opts}${name}=${dep.lib}/lib/lib${extern}-${filename}" + ) + dependencies; + + # Create feature arguments for rustc. + mkRustcFeatureArgs = lib.concatMapStringsSep " " (f: ''--cfg feature=\"${f}\"''); + + # Whether we need to use unstable command line flags + # + # Currently just needed for standard library dependencies, which have a + # special "noprelude:" modifier. If in later versions of Rust this is + # stabilized we can account for that here, too, so we don't opt into + # instability unnecessarily. + needUnstableCLI = dependencies: + lib.any (dep: dep.stdlib or false) dependencies; + + inherit (import ./log.nix { inherit lib; }) noisily echo_colored; + + configureCrate = import ./configure-crate.nix { + inherit lib stdenv rust echo_colored noisily mkRustcDepArgs mkRustcFeatureArgs; + }; + + buildCrate = import ./build-crate.nix { + inherit lib stdenv mkRustcDepArgs mkRustcFeatureArgs needUnstableCLI rust; + }; + + installCrate = import ./install-crate.nix { inherit stdenv; }; + + # Allow access to the rust attribute set from inside buildRustCrate, which + # has a parameter that shadows the name. + rustAttrs = rust; +in + + /* The overridable pkgs.buildRustCrate function. + * + * Any unrecognized parameters will be passed as to + * the underlying stdenv.mkDerivation. + */ +crate_: lib.makeOverridable + ( + # The rust compiler to use. + # + # Default: pkgs.rustc + { rust + # Whether to build a release version (`true`) or a debug + # version (`false`). Debug versions are faster to build + # but might be much slower at runtime. + , release + # Whether to print rustc invocations etc. + # + # Example: false + # Default: true + , verbose + # A list of rust/cargo features to enable while building the crate. + # Example: [ "std" "async" ] + , features + # Additional native build inputs for building this crate. + , nativeBuildInputs + # Additional build inputs for building this crate. + # + # Example: [ pkgs.openssl ] + , buildInputs + # Allows to override the parameters to buildRustCrate + # for any rust dependency in the transitive build tree. + # + # Default: pkgs.defaultCrateOverrides + # + # Example: + # + # pkgs.defaultCrateOverrides // { + # hello = attrs: { buildInputs = [ openssl ]; }; + # } + , crateOverrides + # Rust library dependencies, i.e. other libraries that were built + # with buildRustCrate. + , dependencies + # Rust build dependencies, i.e. other libraries that were built + # with buildRustCrate and are used by a build script. + , buildDependencies + # Specify the "extern" name of a library if it differs from the library target. + # See above for an extended explanation. + # + # Default: no renames. + # + # Example: + # + # `crateRenames` supports two formats. + # + # The simple version is an attrset that maps the + # `crateName`s of the dependencies to their alternative + # names. + # + # ```nix + # { + # my_crate_name = "my_alternative_name"; + # # ... + # } + # ``` + # + # The extended version is also keyed by the `crateName`s but allows + # different names for different crate versions: + # + # ```nix + # { + # my_crate_name = [ + # { version = "1.2.3"; rename = "my_alternative_name01"; } + # { version = "3.2.3"; rename = "my_alternative_name03"; } + # ] + # # ... + # } + # ``` + # + # This roughly corresponds to the following snippet in Cargo.toml: + # + # ```toml + # [dependencies] + # my_alternative_name01 = { package = "my_crate_name", version = "0.1" } + # my_alternative_name03 = { package = "my_crate_name", version = "0.3" } + # ``` + # + # Dependencies which use the lib target name as extern name, do not need + # to be specified in the crateRenames, even if their crate name differs. + # + # Including multiple versions of a crate is very popular during + # ecosystem transitions, e.g. from futures 0.1 to futures 0.3. + , crateRenames + # A list of extra options to pass to rustc. + # + # Example: [ "-Z debuginfo=2" ] + # Default: [] + , extraRustcOpts + # A list of extra options to pass to rustc when building a build.rs. + # + # Example: [ "-Z debuginfo=2" ] + # Default: [] + , extraRustcOptsForBuildRs + # Whether to enable building tests. + # Use true to enable. + # Default: false + , buildTests + # Passed to stdenv.mkDerivation. + , preUnpack + # Passed to stdenv.mkDerivation. + , postUnpack + # Passed to stdenv.mkDerivation. + , prePatch + # Passed to stdenv.mkDerivation. + , patches + # Passed to stdenv.mkDerivation. + , postPatch + # Passed to stdenv.mkDerivation. + , preConfigure + # Passed to stdenv.mkDerivation. + , postConfigure + # Passed to stdenv.mkDerivation. + , preBuild + # Passed to stdenv.mkDerivation. + , postBuild + # Passed to stdenv.mkDerivation. + , preInstall + # Passed to stdenv.mkDerivation. + , postInstall + }: + + let + crate = crate_ // (lib.attrByPath [ crate_.crateName ] (attr: { }) crateOverrides crate_); + dependencies_ = dependencies; + buildDependencies_ = buildDependencies; + processedAttrs = [ + "src" + "nativeBuildInputs" + "buildInputs" + "crateBin" + "crateLib" + "libName" + "libPath" + "buildDependencies" + "dependencies" + "features" + "crateRenames" + "crateName" + "version" + "build" + "authors" + "colors" + "edition" + "buildTests" + "codegenUnits" + ]; + extraDerivationAttrs = builtins.removeAttrs crate processedAttrs; + nativeBuildInputs_ = nativeBuildInputs; + buildInputs_ = buildInputs; + extraRustcOpts_ = extraRustcOpts; + extraRustcOptsForBuildRs_ = extraRustcOptsForBuildRs; + buildTests_ = buildTests; + + # crate2nix has a hack for the old bash based build script that did split + # entries at `,`. No we have to work around that hack. + # https://github.com/kolloch/crate2nix/blame/5b19c1b14e1b0e5522c3e44e300d0b332dc939e7/crate2nix/templates/build.nix.tera#L89 + crateBin = lib.filter (bin: !(bin ? name && bin.name == ",")) (crate.crateBin or [ ]); + hasCrateBin = crate ? crateBin; + in + stdenv.mkDerivation (rec { + + inherit (crate) crateName; + inherit + preUnpack + postUnpack + prePatch + patches + postPatch + preConfigure + postConfigure + preBuild + postBuild + preInstall + postInstall + buildTests + ; + + src = crate.src or (fetchCrate { inherit (crate) crateName version sha256; }); + name = "rust_${crate.crateName}-${crate.version}${lib.optionalString buildTests_ "-test"}"; + version = crate.version; + depsBuildBuild = [ pkgsBuildBuild.stdenv.cc ]; + nativeBuildInputs = [ rust stdenv.cc cargo jq ] + ++ lib.optionals stdenv.buildPlatform.isDarwin [ libiconv ] + ++ (crate.nativeBuildInputs or [ ]) ++ nativeBuildInputs_; + buildInputs = lib.optionals stdenv.isDarwin [ libiconv ] ++ (crate.buildInputs or [ ]) ++ buildInputs_; + dependencies = map lib.getLib dependencies_; + buildDependencies = map lib.getLib buildDependencies_; + + completeDeps = lib.unique (dependencies ++ lib.concatMap (dep: dep.completeDeps) dependencies); + completeBuildDeps = lib.unique ( + buildDependencies + ++ lib.concatMap (dep: dep.completeBuildDeps ++ dep.completeDeps) buildDependencies + ); + + # Create a list of features that are enabled by the crate itself and + # through the features argument of buildRustCrate. Exclude features + # with a forward slash, since they are passed through to dependencies, + # and dep: features, since they're internal-only and do nothing except + # enable optional dependencies. + crateFeatures = lib.optionals (crate ? features) + (builtins.filter + (f: !(lib.hasInfix "/" f || lib.hasPrefix "dep:" f)) + (crate.features ++ features) + ); + + libName = if crate ? libName then crate.libName else crate.crateName; + libPath = lib.optionalString (crate ? libPath) crate.libPath; + + # Seed the symbol hashes with something unique every time. + # https://doc.rust-lang.org/1.0.0/rustc/metadata/loader/index.html#frobbing-symbols + metadata = + let + depsMetadata = lib.foldl' (str: dep: str + dep.metadata) "" (dependencies ++ buildDependencies); + hashedMetadata = builtins.hashString "sha256" + (crateName + "-" + crateVersion + "___" + toString (mkRustcFeatureArgs crateFeatures) + + "___" + depsMetadata + "___" + rustAttrs.toRustTarget stdenv.hostPlatform); + in + lib.substring 0 10 hashedMetadata; + + build = crate.build or ""; + # Either set to a concrete sub path to the crate root + # or use `null` for auto-detect. + workspace_member = crate.workspace_member or "."; + crateVersion = crate.version; + crateDescription = crate.description or ""; + crateAuthors = if crate ? authors && lib.isList crate.authors then crate.authors else [ ]; + crateHomepage = crate.homepage or ""; + crateType = + if lib.attrByPath [ "procMacro" ] false crate then [ "proc-macro" ] else + if lib.attrByPath [ "plugin" ] false crate then [ "dylib" ] else + (crate.type or [ "lib" ]); + colors = lib.attrByPath [ "colors" ] "always" crate; + extraLinkFlags = lib.concatStringsSep " " (crate.extraLinkFlags or [ ]); + edition = crate.edition or null; + codegenUnits = if crate ? codegenUnits then crate.codegenUnits else 1; + extraRustcOpts = + lib.optionals (crate ? extraRustcOpts) crate.extraRustcOpts + ++ extraRustcOpts_ + ++ (lib.optional (edition != null) "--edition ${edition}"); + extraRustcOptsForBuildRs = + lib.optionals (crate ? extraRustcOptsForBuildRs) crate.extraRustcOptsForBuildRs + ++ extraRustcOptsForBuildRs_ + ++ (lib.optional (edition != null) "--edition ${edition}"); + + + configurePhase = configureCrate { + inherit crateName buildDependencies completeDeps completeBuildDeps crateDescription + crateFeatures crateRenames libName build workspace_member release libPath crateVersion + extraLinkFlags extraRustcOptsForBuildRs + crateAuthors crateHomepage verbose colors codegenUnits; + }; + buildPhase = buildCrate { + inherit crateName dependencies + crateFeatures crateRenames libName release libPath crateType + metadata hasCrateBin crateBin verbose colors + extraRustcOpts buildTests codegenUnits; + }; + dontStrip = !release; + + # We need to preserve metadata in .rlib, which might get stripped on macOS. See https://github.com/NixOS/nixpkgs/issues/218712 + stripExclude = [ "*.rlib" ]; + + installPhase = installCrate crateName metadata buildTests; + + # depending on the test setting we are either producing something with bins + # and libs or just test binaries + outputs = if buildTests then [ "out" ] else [ "out" "lib" ]; + outputDev = if buildTests then [ "out" ] else [ "lib" ]; + + meta = { + mainProgram = crateName; + badPlatforms = [ + # Rust is currently unable to target the n32 ABI + lib.systems.inspect.patterns.isMips64n32 + ]; + }; + } // extraDerivationAttrs + ) + ) +{ + rust = rustc; + release = crate_.release or true; + verbose = crate_.verbose or true; + extraRustcOpts = [ ]; + extraRustcOptsForBuildRs = [ ]; + features = [ ]; + nativeBuildInputs = [ ]; + buildInputs = [ ]; + crateOverrides = defaultCrateOverrides; + preUnpack = crate_.preUnpack or ""; + postUnpack = crate_.postUnpack or ""; + prePatch = crate_.prePatch or ""; + patches = crate_.patches or [ ]; + postPatch = crate_.postPatch or ""; + preConfigure = crate_.preConfigure or ""; + postConfigure = crate_.postConfigure or ""; + preBuild = crate_.preBuild or ""; + postBuild = crate_.postBuild or ""; + preInstall = crate_.preInstall or ""; + postInstall = crate_.postInstall or ""; + dependencies = crate_.dependencies or [ ]; + buildDependencies = crate_.buildDependencies or [ ]; + crateRenames = crate_.crateRenames or { }; + buildTests = crate_.buildTests or false; +} diff --git a/nixpkgs/pkgs/build-support/rust/build-rust-crate/helpers.nix b/nixpkgs/pkgs/build-support/rust/build-rust-crate/helpers.nix new file mode 100644 index 000000000000..386d0ce7084f --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/build-rust-crate/helpers.nix @@ -0,0 +1,26 @@ +{stdenv, lib}: +{ + kernel = stdenv.hostPlatform.parsed.kernel.name; + abi = stdenv.hostPlatform.parsed.abi.name; + cpu = stdenv.hostPlatform.parsed.cpu.name; + updateFeatures = f: up: functions: lib.deepSeq f (lib.foldl' (features: fun: fun features) (lib.attrsets.recursiveUpdate f up) functions); + mapFeatures = features: map (fun: fun { features = features; }); + mkFeatures = feat: lib.foldl (features: featureName: + if feat.${featureName} or false then + [ featureName ] ++ features + else + features + ) [] (lib.attrNames feat); + include = includedFiles: src: builtins.filterSource (path: type: + lib.any (f: + let p = toString (src + ("/" + f)); + in + p == path || (lib.strings.hasPrefix (p + "/") path) + ) includedFiles + ) src; + exclude = excludedFiles: src: builtins.filterSource (path: type: + lib.all (f: + !lib.strings.hasPrefix (toString (src + ("/" + f))) path + ) excludedFiles + ) src; +} diff --git a/nixpkgs/pkgs/build-support/rust/build-rust-crate/install-crate.nix b/nixpkgs/pkgs/build-support/rust/build-rust-crate/install-crate.nix new file mode 100644 index 000000000000..f4a4dcdb0d94 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/build-rust-crate/install-crate.nix @@ -0,0 +1,51 @@ +{ stdenv }: +crateName: metadata: buildTests: +if !buildTests then '' + runHook preInstall + # always create $out even if we do not have binaries. We are detecting binary targets during compilation, if those are missing there is no way to only have $lib + mkdir $out + if [[ -s target/env ]]; then + mkdir -p $lib + cp target/env $lib/env + fi + if [[ -s target/link.final ]]; then + mkdir -p $lib/lib + cp target/link.final $lib/lib/link + fi + if [[ "$(ls -A target/lib)" ]]; then + mkdir -p $lib/lib + cp -r target/lib/* $lib/lib #*/ + for library in $lib/lib/*.so $lib/lib/*.dylib; do #*/ + ln -s $library $(echo $library | sed -e "s/-${metadata}//") + done + fi + if [[ "$(ls -A target/build)" ]]; then # */ + mkdir -p $lib/lib + cp -r target/build/* $lib/lib # */ + fi + if [[ -d target/bin ]]; then + if [[ "$(ls -A target/bin)" ]]; then + mkdir -p $out/bin + cp -rP target/bin/* $out/bin # */ + fi + fi + runHook postInstall +'' else +# for tests we just put them all in the output. No execution. +'' + runHook preInstall + + mkdir -p $out/tests + if [ -e target/bin ]; then + find target/bin/ -type f -executable -exec cp {} $out/tests \; + fi + if [ -e target/lib ]; then + find target/lib/ -type f \! -name '*.rlib' \ + -a \! -name '*${stdenv.hostPlatform.extensions.sharedLibrary}' \ + -a \! -name '*.d' \ + -executable \ + -print0 | xargs --no-run-if-empty --null install --target $out/tests; + fi + + runHook postInstall +'' diff --git a/nixpkgs/pkgs/build-support/rust/build-rust-crate/lib.sh b/nixpkgs/pkgs/build-support/rust/build-rust-crate/lib.sh new file mode 100644 index 000000000000..0181ae432c85 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/build-rust-crate/lib.sh @@ -0,0 +1,182 @@ +echo_build_heading() { + if (( $# == 1 )); then + echo_colored "Building $1" + else + echo_colored "Building $1 ($2)" + fi +} + +build_lib() { + lib_src=$1 + echo_build_heading $lib_src ${libName} + + noisily rustc \ + --crate-name $CRATE_NAME \ + $lib_src \ + --out-dir target/lib \ + -L dependency=target/deps \ + --cap-lints allow \ + $LINK \ + $EXTRA_LINK_ARGS \ + $EXTRA_LINK_ARGS_LIB \ + $LIB_RUSTC_OPTS \ + $BUILD_OUT_DIR \ + $EXTRA_BUILD \ + $EXTRA_FEATURES \ + $EXTRA_RUSTC_FLAGS \ + --color $colors + + EXTRA_LIB=" --extern $CRATE_NAME=target/lib/lib$CRATE_NAME-$metadata.rlib" + if [ -e target/deps/lib$CRATE_NAME-$metadata$LIB_EXT ]; then + EXTRA_LIB="$EXTRA_LIB --extern $CRATE_NAME=target/lib/lib$CRATE_NAME-$metadata$LIB_EXT" + fi +} + +build_bin() { + local crate_name=$1 + local crate_name_=$(echo $crate_name | tr '-' '_') + local main_file="" + + if [[ ! -z $2 ]]; then + main_file=$2 + fi + echo_build_heading $@ + noisily rustc \ + --crate-name $crate_name_ \ + $main_file \ + --crate-type bin \ + $BIN_RUSTC_OPTS \ + --out-dir target/bin \ + -L dependency=target/deps \ + $LINK \ + $EXTRA_LINK_ARGS \ + $EXTRA_LINK_ARGS_BINS \ + $EXTRA_LIB \ + --cap-lints allow \ + $BUILD_OUT_DIR \ + $EXTRA_BUILD \ + $EXTRA_FEATURES \ + $EXTRA_RUSTC_FLAGS \ + --color ${colors} \ + + if [ "$crate_name_" != "$crate_name" ]; then + mv target/bin/$crate_name_ target/bin/$crate_name + fi +} + +build_lib_test() { + local file="$1" + EXTRA_RUSTC_FLAGS="--test $EXTRA_RUSTC_FLAGS" build_lib "$1" "$2" +} + +build_bin_test() { + local crate="$1" + local file="$2" + EXTRA_RUSTC_FLAGS="--test $EXTRA_RUSTC_FLAGS" build_bin "$1" "$2" +} + +build_bin_test_file() { + local file="$1" + local derived_crate_name="${file//\//_}" + # Make sure to strip the top level `tests` directory: see #204051. Note that + # a forward slash has now become an underscore due to the substitution + # above. + derived_crate_name=${derived_crate_name#"tests_"} + derived_crate_name="${derived_crate_name%.rs}" + build_bin_test "$derived_crate_name" "$file" +} + +# Add additional link options that were provided by the build script. +setup_link_paths() { + EXTRA_LIB="" + if [[ -e target/link_ ]]; then + EXTRA_BUILD="$(cat target/link_) $EXTRA_BUILD" + fi + + echo "$EXTRA_LINK_SEARCH" | while read i; do + if [[ ! -z "$i" ]]; then + for library in $i; do + echo "-L $library" >> target/link + L=$(echo $library | sed -e "s#$(pwd)/target/build#$lib/lib#") + echo "-L $L" >> target/link.final + done + fi + done + echo "$EXTRA_LINK_LIBS" | while read i; do + if [[ ! -z "$i" ]]; then + for library in $i; do + echo "-l $library" >> target/link + done + fi + done + + if [[ -e target/link ]]; then + tr '\n' ' ' < target/link > target/link_ + LINK=$(cat target/link_) + fi +} + +search_for_bin_path() { + # heuristic to "guess" the correct source file as found in cargo: + # https://github.com/rust-lang/cargo/blob/90fc9f620190d5fa3c80b0c8c65a1e1361e6b8ae/src/cargo/util/toml/targets.rs#L308-L325 + + BIN_NAME=$1 + BIN_NAME_=$(echo $BIN_NAME | tr '-' '_') + + # the first two cases are the "new" default IIRC + FILES=( "src/bin/$BIN_NAME.rs" "src/bin/$BIN_NAME/main.rs" "src/bin/$BIN_NAME_.rs" "src/bin/$BIN_NAME_/main.rs" "src/bin/main.rs" "src/main.rs" ) + + if ! [ -e "$LIB_PATH" -o -e src/lib.rs -o -e "src/$LIB_NAME.rs" ]; then + # if this is not a library the following path is also valid + FILES=( "src/$BIN_NAME.rs" "src/$BIN_NAME_.rs" "${FILES[@]}" ) + fi + + for file in "${FILES[@]}"; + do + echo "checking file $file" + # first file that exists wins + if [[ -e "$file" ]]; then + BIN_PATH="$file" + break + fi + done + + if [[ -z "$BIN_PATH" ]]; then + echo_error "ERROR: failed to find file for binary target: $BIN_NAME" >&2 + exit 1 + fi +} + +# Extracts cargo_toml_path of the matching crate. +matching_cargo_toml_path() { + local manifest_path="$1" + local expected_crate_name="$2" + + # If the Cargo.toml is not a workspace root, + # it will only contain one package in ".packages" + # because "--no-deps" suppressed dependency resolution. + # + # But to make it more general, we search for a matching + # crate in all packages and use the manifest path that + # is referenced there. + cargo metadata --no-deps --format-version 1 \ + --manifest-path "$manifest_path" \ + | jq -r '.packages[] + | select( .name == "'$expected_crate_name'") + | .manifest_path' +} + +# Find a Cargo.toml in the current or any sub directory +# with a matching crate name. +matching_cargo_toml_dir() { + local expected_crate_name="$1" + + find -L -name Cargo.toml | sort | while read manifest_path; do + echo "...checking manifest_path $manifest_path" >&2 + local matching_path="$(matching_cargo_toml_path "$manifest_path" "$expected_crate_name")" + if [ -n "${matching_path}" ]; then + echo "$(dirname $matching_path)" + break + fi + done +} diff --git a/nixpkgs/pkgs/build-support/rust/build-rust-crate/log.nix b/nixpkgs/pkgs/build-support/rust/build-rust-crate/log.nix new file mode 100644 index 000000000000..9054815f4a1b --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/build-rust-crate/log.nix @@ -0,0 +1,59 @@ +{ lib }: + +let echo_colored_body = start_escape: + # Body of a function that behaves like "echo" but + # has the output colored by the given start_escape + # sequence. E.g. + # + # * echo_x "Building ..." + # * echo_x -n "Running " + # + # This is more complicated than apparent at first sight + # because: + # * The color markers and the text must be print + # in the same echo statement. Otherise, other + # intermingled text from concurrent builds will + # be colored as well. + # * We need to preserve the trailing newline of the + # echo if and only if it is present. Bash likes + # to strip those if we capture the output of echo + # in a variable. + # * Leading "-" will be interpreted by test as an + # option for itself. Therefore, we prefix it with + # an x in `[[ "x$1" =~ ^x- ]]`. + '' + local echo_args=""; + while [[ "x$1" =~ ^x- ]]; do + echo_args+=" $1" + shift + done + + local start_escape="$(printf '${start_escape}')" + local reset="$(printf '\033[0m')" + echo $echo_args $start_escape"$@"$reset + ''; + echo_conditional_colored_body = colors: start_escape: + if colors == "always" + then (echo_colored_body start_escape) + else ''echo "$@"''; +in { + echo_colored = colors: '' + echo_colored() { + ${echo_conditional_colored_body colors ''\033[0;1;32m''} + } + + echo_error() { + ${echo_conditional_colored_body colors ''\033[0;1;31m''} + } + ''; + + noisily = colors: verbose: '' + noisily() { + ${lib.optionalString verbose '' + echo_colored -n "Running " + echo $@ + ''} + $@ + } + ''; +} diff --git a/nixpkgs/pkgs/build-support/rust/build-rust-crate/test/brotli-crates.nix b/nixpkgs/pkgs/build-support/rust/build-rust-crate/test/brotli-crates.nix new file mode 100644 index 000000000000..4831c1062715 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/build-rust-crate/test/brotli-crates.nix @@ -0,0 +1,95 @@ +{ lib, stdenv, buildRustCrate, fetchgit }: +let kernel = stdenv.buildPlatform.parsed.kernel.name; + abi = stdenv.buildPlatform.parsed.abi.name; + include = includedFiles: src: builtins.filterSource (path: type: + lib.lists.any (f: + let p = toString (src + ("/" + f)); in + (path == p) || (type == "directory" && lib.strings.hasPrefix path p) + ) includedFiles + ) src; + updateFeatures = f: up: functions: builtins.deepSeq f (lib.lists.foldl' (features: fun: fun features) (lib.attrsets.recursiveUpdate f up) functions); + mapFeatures = features: map (fun: fun { features = features; }); + mkFeatures = feat: lib.lists.foldl (features: featureName: + if feat.${featureName} or false then + [ featureName ] ++ features + else + features + ) [] (builtins.attrNames feat); +in +rec { + alloc_no_stdlib_1_3_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "alloc-no-stdlib"; + version = "1.3.0"; + authors = [ "Daniel Reiter Horn <danielrh@dropbox.com>" ]; + sha256 = "1jcp27pzmqdszgp80y484g4kwbjbg7x8a589drcwbxg0i8xwkir9"; + crateBin = [ { name = "example"; } ]; + inherit dependencies buildDependencies features; + }; + brotli_2_5_0_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "brotli"; + version = "2.5.0"; + authors = [ "Daniel Reiter Horn <danielrh@dropbox.com>" "The Brotli Authors" ]; + sha256 = "1ynw4hkdwnp0kj30p86ls44ahv4s99258s019bqrq4mya8hlsb5b"; + crateBin = [ { name = "brotli"; } ]; + inherit dependencies buildDependencies features; + }; + brotli_decompressor_1_3_1_ = { dependencies?[], buildDependencies?[], features?[] }: buildRustCrate { + crateName = "brotli-decompressor"; + version = "1.3.1"; + authors = [ "Daniel Reiter Horn <danielrh@dropbox.com>" "The Brotli Authors" ]; + sha256 = "022g69q1xzwdj0130qm3fa4qwpn4q1jx3lc8yz0v0v201p7bm8fb"; + crateBin = [ { name = "brotli-decompressor"; } ]; + inherit dependencies buildDependencies features; + }; + alloc_no_stdlib_1_3_0 = { features?(alloc_no_stdlib_1_3_0_features {}) }: alloc_no_stdlib_1_3_0_ { + features = mkFeatures (features.alloc_no_stdlib_1_3_0 or {}); + }; + alloc_no_stdlib_1_3_0_features = f: updateFeatures f ({ + alloc_no_stdlib_1_3_0.default = (f.alloc_no_stdlib_1_3_0.default or true); + }) []; + brotli_2_5_0 = { features?(brotli_2_5_0_features {}) }: brotli_2_5_0_ { + dependencies = mapFeatures features ([ alloc_no_stdlib_1_3_0 brotli_decompressor_1_3_1 ]); + features = mkFeatures (features.brotli_2_5_0 or {}); + }; + brotli_2_5_0_features = f: updateFeatures f (rec { + alloc_no_stdlib_1_3_0.no-stdlib = + (f.alloc_no_stdlib_1_3_0.no-stdlib or false) || + (brotli_2_5_0.no-stdlib or false) || + (f.brotli_2_5_0.no-stdlib or false); + alloc_no_stdlib_1_3_0.default = true; + brotli_2_5_0.default = (f.brotli_2_5_0.default or true); + brotli_decompressor_1_3_1.disable-timer = + (f.brotli_decompressor_1_3_1.disable-timer or false) || + (brotli_2_5_0.disable-timer or false) || + (f.brotli_2_5_0.disable-timer or false); + brotli_decompressor_1_3_1.no-stdlib = + (f.brotli_decompressor_1_3_1.no-stdlib or false) || + (brotli_2_5_0.no-stdlib or false) || + (f.brotli_2_5_0.no-stdlib or false); + brotli_decompressor_1_3_1.benchmark = + (f.brotli_decompressor_1_3_1.benchmark or false) || + (brotli_2_5_0.benchmark or false) || + (f.brotli_2_5_0.benchmark or false); + brotli_decompressor_1_3_1.default = true; + brotli_decompressor_1_3_1.seccomp = + (f.brotli_decompressor_1_3_1.seccomp or false) || + (brotli_2_5_0.seccomp or false) || + (f.brotli_2_5_0.seccomp or false); + }) [ alloc_no_stdlib_1_3_0_features brotli_decompressor_1_3_1_features ]; + brotli_decompressor_1_3_1 = { features?(brotli_decompressor_1_3_1_features {}) }: brotli_decompressor_1_3_1_ { + dependencies = mapFeatures features ([ alloc_no_stdlib_1_3_0 ]); + features = mkFeatures (features.brotli_decompressor_1_3_1 or {}); + }; + brotli_decompressor_1_3_1_features = f: updateFeatures f (rec { + alloc_no_stdlib_1_3_0.no-stdlib = + (f.alloc_no_stdlib_1_3_0.no-stdlib or false) || + (brotli_decompressor_1_3_1.no-stdlib or false) || + (f.brotli_decompressor_1_3_1.no-stdlib or false); + alloc_no_stdlib_1_3_0.default = true; + alloc_no_stdlib_1_3_0.unsafe = + (f.alloc_no_stdlib_1_3_0.unsafe or false) || + (brotli_decompressor_1_3_1.unsafe or false) || + (f.brotli_decompressor_1_3_1.unsafe or false); + brotli_decompressor_1_3_1.default = (f.brotli_decompressor_1_3_1.default or true); + }) [ alloc_no_stdlib_1_3_0_features ]; +} diff --git a/nixpkgs/pkgs/build-support/rust/build-rust-crate/test/default.nix b/nixpkgs/pkgs/build-support/rust/build-rust-crate/test/default.nix new file mode 100644 index 000000000000..1ecef4c8e327 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/build-rust-crate/test/default.nix @@ -0,0 +1,671 @@ +{ lib +, buildPackages +, buildRustCrate +, callPackage +, releaseTools +, runCommand +, runCommandCC +, stdenv +, symlinkJoin +, writeTextFile +}: + +let + mkCrate = buildRustCrate: args: let + p = { + crateName = "nixtestcrate"; + version = "0.1.0"; + authors = [ "Test <test@example.com>" ]; + } // args; + in buildRustCrate p; + mkHostCrate = mkCrate buildRustCrate; + + mkCargoToml = + { name, crateVersion ? "0.1.0", path ? "Cargo.toml" }: + mkFile path '' + [package] + name = ${builtins.toJSON name} + version = ${builtins.toJSON crateVersion} + ''; + + mkFile = destination: text: writeTextFile { + name = "src"; + destination = "/${destination}"; + inherit text; + }; + + mkBin = name: mkFile name '' + use std::env; + fn main() { + let name: String = env::args().nth(0).unwrap(); + println!("executed {}", name); + } + ''; + + mkBinExtern = name: extern: mkFile name '' + extern crate ${extern}; + fn main() { + assert_eq!(${extern}::test(), 23); + } + ''; + + mkTestFile = name: functionName: mkFile name '' + #[cfg(test)] + #[test] + fn ${functionName}() { + assert!(true); + } + ''; + mkTestFileWithMain = name: functionName: mkFile name '' + #[cfg(test)] + #[test] + fn ${functionName}() { + assert!(true); + } + + fn main() {} + ''; + + + mkLib = name: mkFile name "pub fn test() -> i32 { return 23; }"; + + mkTest = crateArgs: let + crate = mkHostCrate (builtins.removeAttrs crateArgs ["expectedTestOutput"]); + hasTests = crateArgs.buildTests or false; + expectedTestOutputs = crateArgs.expectedTestOutputs or null; + binaries = map (v: lib.escapeShellArg v.name) (crateArgs.crateBin or []); + isLib = crateArgs ? libName || crateArgs ? libPath; + crateName = crateArgs.crateName or "nixtestcrate"; + libName = crateArgs.libName or crateName; + + libTestBinary = if !isLib then null else mkHostCrate { + crateName = "run-test-${crateName}"; + dependencies = [ crate ]; + src = mkBinExtern "src/main.rs" libName; + }; + + in + assert expectedTestOutputs != null -> hasTests; + assert hasTests -> expectedTestOutputs != null; + + runCommand "run-buildRustCrate-${crateName}-test" { + nativeBuildInputs = [ crate ]; + } (if !hasTests then '' + ${lib.concatMapStringsSep "\n" (binary: + # Can't actually run the binary when cross-compiling + (lib.optionalString (stdenv.hostPlatform != stdenv.buildPlatform) "type ") + binary + ) binaries} + ${lib.optionalString isLib '' + test -e ${crate}/lib/*.rlib || exit 1 + ${lib.optionalString (stdenv.hostPlatform != stdenv.buildPlatform) "test -x "} \ + ${libTestBinary}/bin/run-test-${crateName} + ''} + touch $out + '' else if stdenv.hostPlatform == stdenv.buildPlatform then '' + for file in ${crate}/tests/*; do + $file 2>&1 >> $out + done + set -e + ${lib.concatMapStringsSep "\n" (o: "grep '${o}' $out || { echo 'output \"${o}\" not found in:'; cat $out; exit 23; }") expectedTestOutputs} + '' else '' + for file in ${crate}/tests/*; do + test -x "$file" + done + touch "$out" + '' + ); + + /* Returns a derivation that asserts that the crate specified by `crateArgs` + has the specified files as output. + + `name` is used as part of the derivation name that performs the checking. + + `crateArgs` is passed to `mkHostCrate` to build the crate with `buildRustCrate`. + + `expectedFiles` contains a list of expected file paths in the output. E.g. + `[ "./bin/my_binary" ]`. + + `output` specifies the name of the output to use. By default, the default + output is used but e.g. `output = "lib";` will cause the lib output + to be checked instead. You do not need to specify any directories. + */ + assertOutputs = { name, crateArgs, expectedFiles, output? null }: + assert (builtins.isString name); + assert (builtins.isAttrs crateArgs); + assert (builtins.isList expectedFiles); + + let + crate = mkHostCrate (builtins.removeAttrs crateArgs ["expectedTestOutput"]); + crateOutput = if output == null then crate else crate."${output}"; + expectedFilesFile = writeTextFile { + name = "expected-files-${name}"; + text = + let sorted = builtins.sort (a: b: a<b) expectedFiles; + concatenated = builtins.concatStringsSep "\n" sorted; + in "${concatenated}\n"; + }; + in + runCommand "assert-outputs-${name}" { + } ('' + local actualFiles=$(mktemp) + + cd "${crateOutput}" + find . -type f \ + | sort \ + '' + # sed out the hash because it differs per platform + + '' + | sed -E -e 's/-[0-9a-fA-F]{10}\.rlib/-HASH.rlib/g' \ + > "$actualFiles" + diff -q ${expectedFilesFile} "$actualFiles" > /dev/null || { + echo -e "\033[0;1;31mERROR: Difference in expected output files in ${crateOutput} \033[0m" >&2 + echo === Got: + sed -e 's/^/ /' $actualFiles + echo === Expected: + sed -e 's/^/ /' ${expectedFilesFile} + echo === Diff: + diff -u ${expectedFilesFile} $actualFiles |\ + tail -n +3 |\ + sed -e 's/^/ /' + exit 1 + } + touch $out + '') + ; + + in rec { + + tests = let + cases = rec { + libPath = { libPath = "src/my_lib.rs"; src = mkLib "src/my_lib.rs"; }; + srcLib = { src = mkLib "src/lib.rs"; }; + + # This used to be supported by cargo but as of 1.40.0 I can't make it work like that with just cargo anymore. + # This might be a regression or deprecated thing they finally removed… + # customLibName = { libName = "test_lib"; src = mkLib "src/test_lib.rs"; }; + # rustLibTestsCustomLibName = { + # libName = "test_lib"; + # src = mkTestFile "src/test_lib.rs" "foo"; + # buildTests = true; + # expectedTestOutputs = [ "test foo ... ok" ]; + # }; + + customLibNameAndLibPath = { libName = "test_lib"; libPath = "src/best-lib.rs"; src = mkLib "src/best-lib.rs"; }; + crateBinWithPath = { crateBin = [{ name = "test_binary1"; path = "src/foobar.rs"; }]; src = mkBin "src/foobar.rs"; }; + crateBinNoPath1 = { crateBin = [{ name = "my-binary2"; }]; src = mkBin "src/my_binary2.rs"; }; + crateBinNoPath2 = { + crateBin = [{ name = "my-binary3"; } { name = "my-binary4"; }]; + src = symlinkJoin { + name = "buildRustCrateMultipleBinariesCase"; + paths = [ (mkBin "src/bin/my_binary3.rs") (mkBin "src/bin/my_binary4.rs") ]; + }; + }; + crateBinNoPath3 = { crateBin = [{ name = "my-binary5"; }]; src = mkBin "src/bin/main.rs"; }; + crateBinNoPath4 = { crateBin = [{ name = "my-binary6"; }]; src = mkBin "src/main.rs";}; + crateBinRename1 = { + crateBin = [{ name = "my-binary-rename1"; }]; + src = mkBinExtern "src/main.rs" "foo_renamed"; + dependencies = [ (mkHostCrate { crateName = "foo"; src = mkLib "src/lib.rs"; }) ]; + crateRenames = { "foo" = "foo_renamed"; }; + }; + crateBinRename2 = { + crateBin = [{ name = "my-binary-rename2"; }]; + src = mkBinExtern "src/main.rs" "foo_renamed"; + dependencies = [ (mkHostCrate { crateName = "foo"; libName = "foolib"; src = mkLib "src/lib.rs"; }) ]; + crateRenames = { "foo" = "foo_renamed"; }; + }; + crateBinRenameMultiVersion = let + crateWithVersion = version: mkHostCrate { + crateName = "my_lib"; + inherit version; + src = mkFile "src/lib.rs" '' + pub const version: &str = "${version}"; + ''; + }; + depCrate01 = crateWithVersion "0.1.2"; + depCrate02 = crateWithVersion "0.2.1"; + in { + crateName = "my_bin"; + src = symlinkJoin { + name = "my_bin_src"; + paths = [ + (mkFile "src/main.rs" '' + #[test] + fn my_lib_01() { assert_eq!(lib01::version, "0.1.2"); } + + #[test] + fn my_lib_02() { assert_eq!(lib02::version, "0.2.1"); } + + fn main() { } + '') + ]; + }; + dependencies = [ depCrate01 depCrate02 ]; + crateRenames = { + "my_lib" = [ + { + version = "0.1.2"; + rename = "lib01"; + } + { + version = "0.2.1"; + rename = "lib02"; + } + ]; + }; + buildTests = true; + expectedTestOutputs = [ + "test my_lib_01 ... ok" + "test my_lib_02 ... ok" + ]; + }; + rustLibTestsDefault = { + src = mkTestFile "src/lib.rs" "baz"; + buildTests = true; + expectedTestOutputs = [ "test baz ... ok" ]; + }; + rustLibTestsCustomLibPath = { + libPath = "src/test_path.rs"; + src = mkTestFile "src/test_path.rs" "bar"; + buildTests = true; + expectedTestOutputs = [ "test bar ... ok" ]; + }; + rustLibTestsCustomLibPathWithTests = { + libPath = "src/test_path.rs"; + src = symlinkJoin { + name = "rust-lib-tests-custom-lib-path-with-tests-dir"; + paths = [ + (mkTestFile "src/test_path.rs" "bar") + (mkTestFile "tests/something.rs" "something") + ]; + }; + buildTests = true; + expectedTestOutputs = [ + "test bar ... ok" + "test something ... ok" + ]; + }; + rustBinTestsCombined = { + src = symlinkJoin { + name = "rust-bin-tests-combined"; + paths = [ + (mkTestFileWithMain "src/main.rs" "src_main") + (mkTestFile "tests/foo.rs" "tests_foo") + (mkTestFile "tests/bar.rs" "tests_bar") + ]; + }; + buildTests = true; + expectedTestOutputs = [ + "test src_main ... ok" + "test tests_foo ... ok" + "test tests_bar ... ok" + ]; + }; + rustBinTestsSubdirCombined = { + src = symlinkJoin { + name = "rust-bin-tests-subdir-combined"; + paths = [ + (mkTestFileWithMain "src/main.rs" "src_main") + (mkTestFile "tests/foo/main.rs" "tests_foo") + (mkTestFile "tests/bar/main.rs" "tests_bar") + ]; + }; + buildTests = true; + expectedTestOutputs = [ + "test src_main ... ok" + "test tests_foo ... ok" + "test tests_bar ... ok" + ]; + }; + linkAgainstRlibCrate = { + crateName = "foo"; + src = mkFile "src/main.rs" '' + extern crate somerlib; + fn main() {} + ''; + dependencies = [ + (mkHostCrate { + crateName = "somerlib"; + type = [ "rlib" ]; + src = mkLib "src/lib.rs"; + }) + ]; + }; + buildScriptDeps = let + depCrate = buildRustCrate: boolVal: mkCrate buildRustCrate { + crateName = "bar"; + src = mkFile "src/lib.rs" '' + pub const baz: bool = ${boolVal}; + ''; + }; + in { + crateName = "foo"; + src = symlinkJoin { + name = "build-script-and-main"; + paths = [ + (mkFile "src/main.rs" '' + extern crate bar; + #[cfg(test)] + #[test] + fn baz_false() { assert!(!bar::baz); } + fn main() { } + '') + (mkFile "build.rs" '' + extern crate bar; + fn main() { assert!(bar::baz); } + '') + ]; + }; + buildDependencies = [ (depCrate buildPackages.buildRustCrate "true") ]; + dependencies = [ (depCrate buildRustCrate "false") ]; + buildTests = true; + expectedTestOutputs = [ "test baz_false ... ok" ]; + }; + buildScriptFeatureEnv = { + crateName = "build-script-feature-env"; + features = [ "some-feature" "crate/another_feature" ]; + src = symlinkJoin { + name = "build-script-feature-env"; + paths = [ + (mkFile "src/main.rs" '' + #[cfg(test)] + #[test] + fn feature_not_visible() { + assert!(std::env::var("CARGO_FEATURE_SOME_FEATURE").is_err()); + assert!(option_env!("CARGO_FEATURE_SOME_FEATURE").is_none()); + } + fn main() {} + '') + (mkFile "build.rs" '' + fn main() { + assert!(std::env::var("CARGO_FEATURE_SOME_FEATURE").is_ok()); + assert!(option_env!("CARGO_FEATURE_SOME_FEATURE").is_none()); + } + '') + ]; + }; + buildTests = true; + expectedTestOutputs = [ "test feature_not_visible ... ok" ]; + }; + # Regression test for https://github.com/NixOS/nixpkgs/pull/88054 + # Build script output should be rewritten as valid env vars. + buildScriptIncludeDirDeps = let + depCrate = mkHostCrate { + crateName = "bar"; + src = symlinkJoin { + name = "build-script-and-include-dir-bar"; + paths = [ + (mkFile "src/lib.rs" '' + fn main() { } + '') + (mkFile "build.rs" '' + use std::path::PathBuf; + fn main() { println!("cargo:include-dir={}/src", std::env::current_dir().unwrap_or(PathBuf::from(".")).to_str().unwrap()); } + '') + ]; + }; + }; + in { + crateName = "foo"; + src = symlinkJoin { + name = "build-script-and-include-dir-foo"; + paths = [ + (mkFile "src/main.rs" '' + fn main() { } + '') + (mkFile "build.rs" '' + fn main() { assert!(std::env::var_os("DEP_BAR_INCLUDE_DIR").is_some()); } + '') + ]; + }; + buildDependencies = [ depCrate ]; + dependencies = [ depCrate ]; + }; + # Regression test for https://github.com/NixOS/nixpkgs/issues/74071 + # Whenevever a build.rs file is generating files those should not be overlayed onto the actual source dir + buildRsOutDirOverlay = { + src = symlinkJoin { + name = "buildrs-out-dir-overlay"; + paths = [ + (mkLib "src/lib.rs") + (mkFile "build.rs" '' + use std::env; + use std::ffi::OsString; + use std::fs; + use std::path::Path; + fn main() { + let out_dir = env::var_os("OUT_DIR").expect("OUT_DIR not set"); + let out_file = Path::new(&out_dir).join("lib.rs"); + fs::write(out_file, "invalid rust code!").expect("failed to write lib.rs"); + } + '') + ]; + }; + }; + # Regression test for https://github.com/NixOS/nixpkgs/pull/83379 + # link flag order should be preserved + linkOrder = { + src = symlinkJoin { + name = "buildrs-out-dir-overlay"; + paths = [ + (mkFile "build.rs" '' + fn main() { + // in the other order, linkage will fail + println!("cargo:rustc-link-lib=b"); + println!("cargo:rustc-link-lib=a"); + } + '') + (mkFile "src/main.rs" '' + extern "C" { + fn hello_world(); + } + fn main() { + unsafe { + hello_world(); + } + } + '') + ]; + }; + buildInputs = let + compile = name: text: let + src = writeTextFile { + name = "${name}-src.c"; + inherit text; + }; + in runCommandCC name {} '' + mkdir -p $out/lib + # Note: On darwin (which defaults to clang) we have to add + # `-undefined dynamic_lookup` as otherwise the compilation fails. + $CC -shared \ + ${lib.optionalString stdenv.isDarwin "-undefined dynamic_lookup"} \ + -o $out/lib/${name}${stdenv.hostPlatform.extensions.sharedLibrary} ${src} + ''; + b = compile "libb" '' + #include <stdio.h> + + void hello(); + + void hello_world() { + hello(); + printf(" world!\n"); + } + ''; + a = compile "liba" '' + #include <stdio.h> + + void hello() { + printf("hello"); + } + ''; + in [ a b ]; + }; + rustCargoTomlInSubDir = { + # The "workspace_member" can be set to the sub directory with the crate to build. + # By default ".", meaning the top level directory is assumed. + # Using null will trigger a search. + workspace_member = null; + src = symlinkJoin rec { + name = "find-cargo-toml"; + paths = [ + (mkCargoToml { name = "ignoreMe"; }) + (mkTestFileWithMain "src/main.rs" "ignore_main") + + (mkCargoToml { name = "rustCargoTomlInSubDir"; path = "subdir/Cargo.toml"; }) + (mkTestFileWithMain "subdir/src/main.rs" "src_main") + (mkTestFile "subdir/tests/foo/main.rs" "tests_foo") + (mkTestFile "subdir/tests/bar/main.rs" "tests_bar") + ]; + }; + buildTests = true; + expectedTestOutputs = [ + "test src_main ... ok" + "test tests_foo ... ok" + "test tests_bar ... ok" + ]; + }; + + rustCargoTomlInTopDir = + let + withoutCargoTomlSearch = builtins.removeAttrs rustCargoTomlInSubDir [ "workspace_member" ]; + in + withoutCargoTomlSearch // { + expectedTestOutputs = [ + "test ignore_main ... ok" + ]; + }; + procMacroInPrelude = { + procMacro = true; + edition = "2018"; + src = symlinkJoin { + name = "proc-macro-in-prelude"; + paths = [ + (mkFile "src/lib.rs" '' + use proc_macro::TokenTree; + '') + ]; + }; + }; + }; + brotliCrates = (callPackage ./brotli-crates.nix {}); + rcgenCrates = callPackage ./rcgen-crates.nix { + # Suppress deprecation warning + buildRustCrate = null; + }; + tests = lib.mapAttrs (key: value: mkTest (value // lib.optionalAttrs (!value?crateName) { crateName = key; })) cases; + in tests // rec { + + crateBinWithPathOutputs = assertOutputs { + name="crateBinWithPath"; + crateArgs = { + crateBin = [{ name = "test_binary1"; path = "src/foobar.rs"; }]; + src = mkBin "src/foobar.rs"; + }; + expectedFiles = [ + "./bin/test_binary1" + ]; + }; + + crateBinWithPathOutputsDebug = assertOutputs { + name="crateBinWithPath"; + crateArgs = { + release = false; + crateBin = [{ name = "test_binary1"; path = "src/foobar.rs"; }]; + src = mkBin "src/foobar.rs"; + }; + expectedFiles = [ + "./bin/test_binary1" + ] ++ lib.optionals stdenv.isDarwin [ + # On Darwin, the debug symbols are in a separate directory. + "./bin/test_binary1.dSYM/Contents/Info.plist" + "./bin/test_binary1.dSYM/Contents/Resources/DWARF/test_binary1" + ]; + }; + + crateBinNoPath1Outputs = assertOutputs { + name="crateBinNoPath1"; + crateArgs = { + crateBin = [{ name = "my-binary2"; }]; + src = mkBin "src/my_binary2.rs"; + }; + expectedFiles = [ + "./bin/my-binary2" + ]; + }; + + crateLibOutputs = assertOutputs { + name="crateLib"; + output="lib"; + crateArgs = { + libName = "test_lib"; + type = [ "rlib" ]; + libPath = "src/lib.rs"; + src = mkLib "src/lib.rs"; + }; + expectedFiles = [ + "./nix-support/propagated-build-inputs" + "./lib/libtest_lib-HASH.rlib" + "./lib/link" + ]; + }; + + crateLibOutputsDebug = assertOutputs { + name="crateLib"; + output="lib"; + crateArgs = { + release = false; + libName = "test_lib"; + type = [ "rlib" ]; + libPath = "src/lib.rs"; + src = mkLib "src/lib.rs"; + }; + expectedFiles = [ + "./nix-support/propagated-build-inputs" + "./lib/libtest_lib-HASH.rlib" + "./lib/link" + ]; + }; + + brotliTest = let + pkg = brotliCrates.brotli_2_5_0 {}; + in runCommand "run-brotli-test-cmd" { + nativeBuildInputs = [ pkg ]; + } (if stdenv.hostPlatform == stdenv.buildPlatform then '' + ${pkg}/bin/brotli -c ${pkg}/bin/brotli > /dev/null && touch $out + '' else '' + test -x '${pkg}/bin/brotli' && touch $out + ''); + allocNoStdLibTest = let + pkg = brotliCrates.alloc_no_stdlib_1_3_0 {}; + in runCommand "run-alloc-no-stdlib-test-cmd" { + nativeBuildInputs = [ pkg ]; + } '' + test -e ${pkg}/bin/example && touch $out + ''; + brotliDecompressorTest = let + pkg = brotliCrates.brotli_decompressor_1_3_1 {}; + in runCommand "run-brotli-decompressor-test-cmd" { + nativeBuildInputs = [ pkg ]; + } '' + test -e ${pkg}/bin/brotli-decompressor && touch $out + ''; + + rcgenTest = let + pkg = rcgenCrates.rootCrate.build; + in runCommand "run-rcgen-test-cmd" { + nativeBuildInputs = [ pkg ]; + } (if stdenv.hostPlatform == stdenv.buildPlatform then '' + ${pkg}/bin/rcgen && touch $out + '' else '' + test -x '${pkg}/bin/rcgen' && touch $out + ''); + }; + test = releaseTools.aggregate { + name = "buildRustCrate-tests"; + meta = { + description = "Test cases for buildRustCrate"; + maintainers = [ ]; + }; + constituents = builtins.attrValues tests; + }; +} diff --git a/nixpkgs/pkgs/build-support/rust/build-rust-crate/test/rcgen-crates.nix b/nixpkgs/pkgs/build-support/rust/build-rust-crate/test/rcgen-crates.nix new file mode 100644 index 000000000000..ed273c01d26d --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/build-rust-crate/test/rcgen-crates.nix @@ -0,0 +1,3494 @@ + +# This file was @generated by crate2nix 0.10.0 with the command: +# "generate" +# See https://github.com/kolloch/crate2nix for more info. + +{ nixpkgs ? <nixpkgs> +, pkgs ? import nixpkgs { config = {}; } +, lib ? pkgs.lib +, stdenv ? pkgs.stdenv +, buildRustCrateForPkgs ? if buildRustCrate != null + then lib.warn "crate2nix: Passing `buildRustCrate` as argument to Cargo.nix is deprecated. If you don't customize `buildRustCrate`, replace `callPackage ./Cargo.nix {}` by `import ./Cargo.nix { inherit pkgs; }`, and if you need to customize `buildRustCrate`, use `buildRustCrateForPkgs` instead." (_: buildRustCrate) + else pkgs: pkgs.buildRustCrate + # Deprecated +, buildRustCrate ? null + # This is used as the `crateOverrides` argument for `buildRustCrate`. +, defaultCrateOverrides ? pkgs.defaultCrateOverrides + # The features to enable for the root_crate or the workspace_members. +, rootFeatures ? [ "default" ] + # If true, throw errors instead of issueing deprecation warnings. +, strictDeprecation ? false + # Used for conditional compilation based on CPU feature detection. +, targetFeatures ? [] + # Whether to perform release builds: longer compile times, faster binaries. +, release ? true + # Additional crate2nix configuration if it exists. +, crateConfig + ? lib.optionalAttrs (builtins.pathExists ./crate-config.nix) (pkgs.callPackage ./crate-config.nix {}) +}: + +rec { + # + # "public" attributes that we attempt to keep stable with new versions of crate2nix. + # + + rootCrate = rec { + packageId = "rcgen"; + + # Use this attribute to refer to the derivation building your root crate package. + # You can override the features with rootCrate.build.override { features = [ "default" "feature1" ... ]; }. + build = internal.buildRustCrateWithFeatures { + inherit packageId; + }; + + # Debug support which might change between releases. + # File a bug if you depend on any for non-debug work! + debug = internal.debugCrate { inherit packageId; }; + }; + # Refer your crate build derivation by name here. + # You can override the features with + # workspaceMembers."${crateName}".build.override { features = [ "default" "feature1" ... ]; }. + workspaceMembers = { + "rcgen" = rec { + packageId = "rcgen"; + build = internal.buildRustCrateWithFeatures { + packageId = "rcgen"; + }; + + # Debug support which might change between releases. + # File a bug if you depend on any for non-debug work! + debug = internal.debugCrate { inherit packageId; }; + }; + }; + + # A derivation that joins the outputs of all workspace members together. + allWorkspaceMembers = pkgs.symlinkJoin { + name = "all-workspace-members"; + paths = + let members = builtins.attrValues workspaceMembers; + in builtins.map (m: m.build) members; + }; + + # + # "internal" ("private") attributes that may change in every new version of crate2nix. + # + + internal = rec { + # Build and dependency information for crates. + # Many of the fields are passed one-to-one to buildRustCrate. + # + # Noteworthy: + # * `dependencies`/`buildDependencies`: similar to the corresponding fields for buildRustCrate. + # but with additional information which is used during dependency/feature resolution. + # * `resolvedDependencies`: the selected default features reported by cargo - only included for debugging. + # * `devDependencies` as of now not used by `buildRustCrate` but used to + # inject test dependencies into the build + + crates = { + "asn1-rs" = rec { + crateName = "asn1-rs"; + version = "0.3.1"; + edition = "2018"; + sha256 = "0czsk1nd4dx2k83f7jzkn8klx05wbmblkx1jh51i4c170akhbzrh"; + authors = [ + "Pierre Chifflier <chifflier@wzdftpd.net>" + ]; + dependencies = [ + { + name = "asn1-rs-derive"; + packageId = "asn1-rs-derive"; + } + { + name = "asn1-rs-impl"; + packageId = "asn1-rs-impl"; + } + { + name = "displaydoc"; + packageId = "displaydoc"; + } + { + name = "nom"; + packageId = "nom"; + usesDefaultFeatures = false; + features = [ "std" ]; + } + { + name = "num-traits"; + packageId = "num-traits"; + } + { + name = "rusticata-macros"; + packageId = "rusticata-macros"; + } + { + name = "thiserror"; + packageId = "thiserror"; + } + { + name = "time"; + packageId = "time"; + optional = true; + features = [ "macros" "parsing" "formatting" ]; + } + ]; + features = { + "bigint" = [ "num-bigint" ]; + "bits" = [ "bitvec" ]; + "bitvec" = [ "dep:bitvec" ]; + "cookie-factory" = [ "dep:cookie-factory" ]; + "datetime" = [ "time" ]; + "default" = [ "std" ]; + "num-bigint" = [ "dep:num-bigint" ]; + "serialize" = [ "cookie-factory" ]; + "time" = [ "dep:time" ]; + }; + resolvedDefaultFeatures = [ "datetime" "default" "std" "time" ]; + }; + "asn1-rs-derive" = rec { + crateName = "asn1-rs-derive"; + version = "0.1.0"; + edition = "2018"; + sha256 = "1gzf9vab06lk0zjvbr07axx64fndkng2s28bnj27fnwd548pb2yv"; + procMacro = true; + authors = [ + "Pierre Chifflier <chifflier@wzdftpd.net>" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2"; + } + { + name = "quote"; + packageId = "quote"; + } + { + name = "syn"; + packageId = "syn"; + } + { + name = "synstructure"; + packageId = "synstructure"; + } + ]; + + }; + "asn1-rs-impl" = rec { + crateName = "asn1-rs-impl"; + version = "0.1.0"; + edition = "2018"; + sha256 = "1va27bn7qxqp4wanzjlkagnynv6jnrhnwmcky2ahzb1r405p6xr7"; + procMacro = true; + authors = [ + "Pierre Chifflier <chifflier@wzdftpd.net>" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2"; + } + { + name = "quote"; + packageId = "quote"; + } + { + name = "syn"; + packageId = "syn"; + } + ]; + + }; + "autocfg 0.1.7" = rec { + crateName = "autocfg"; + version = "0.1.7"; + edition = "2015"; + sha256 = "1chwgimpx5z7xbag7krr9d8asxfqbh683qhgl9kn3hxk2l0djj8x"; + authors = [ + "Josh Stone <cuviper@gmail.com>" + ]; + + }; + "autocfg 1.0.1" = rec { + crateName = "autocfg"; + version = "1.0.1"; + edition = "2015"; + sha256 = "0jj6i9zn4gjl03kjvziqdji6rwx8ykz8zk2ngpc331z2g3fk3c6d"; + authors = [ + "Josh Stone <cuviper@gmail.com>" + ]; + + }; + "base64" = rec { + crateName = "base64"; + version = "0.13.0"; + edition = "2018"; + sha256 = "1z82g23mbzjgijkpcrilc7nljpxpvpf7zxf6iyiapkgka2ngwkch"; + authors = [ + "Alice Maz <alice@alicemaz.com>" + "Marshall Pierce <marshall@mpierce.org>" + ]; + features = { + "default" = [ "std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "base64ct" = rec { + crateName = "base64ct"; + version = "1.1.1"; + edition = "2018"; + sha256 = "0p4was874qc90q2chm2i14m9mn8zmxjis8vaxihd6a2x4aqxkd76"; + authors = [ + "RustCrypto Developers" + ]; + features = { + "std" = [ "alloc" ]; + }; + }; + "bitflags" = rec { + crateName = "bitflags"; + version = "1.3.2"; + edition = "2018"; + sha256 = "12ki6w8gn1ldq7yz9y680llwk5gmrhrzszaa17g1sbrw2r2qvwxy"; + authors = [ + "The Rust Project Developers" + ]; + features = { + "compiler_builtins" = [ "dep:compiler_builtins" ]; + "core" = [ "dep:core" ]; + "rustc-dep-of-std" = [ "core" "compiler_builtins" ]; + }; + resolvedDefaultFeatures = [ "default" ]; + }; + "botan" = rec { + crateName = "botan"; + version = "0.8.1"; + edition = "2018"; + sha256 = "08bmiyn7c3b0dgx20w6hr28d9jcq7cj78cchr84pc686sb2s41ik"; + authors = [ + "Jack Lloyd <jack@randombit.net>" + ]; + dependencies = [ + { + name = "botan-sys"; + packageId = "botan-sys"; + } + { + name = "cty"; + packageId = "cty"; + } + ]; + features = { + "cstr_core" = [ "dep:cstr_core" ]; + "no-std" = [ "cstr_core/alloc" ]; + "vendored" = [ "botan-sys/vendored" ]; + }; + resolvedDefaultFeatures = [ "default" "vendored" ]; + }; + "botan-src" = rec { + crateName = "botan-src"; + version = "0.21703.0"; + edition = "2018"; + sha256 = "0s2ad9q84qsrllfsbj7hjhn7gr3hab9ng6lwzwqmimia6yvja8y8"; + authors = [ + "Rodolphe Breard <rodolphe@what.tf>" + "Jack Lloyd <jack@randombit.net>" + ]; + + }; + "botan-sys" = rec { + crateName = "botan-sys"; + version = "0.8.1"; + edition = "2015"; + sha256 = "1m11zblxfanrhl97j7z3ap7n17rr8j0rg91sr7f9j6y2bsniaz1x"; + authors = [ + "Jack Lloyd <jack@randombit.net>" + ]; + dependencies = [ + { + name = "cty"; + packageId = "cty"; + } + ]; + buildDependencies = [ + { + name = "botan-src"; + packageId = "botan-src"; + optional = true; + } + ]; + features = { + "botan-src" = [ "dep:botan-src" ]; + "vendored" = [ "botan-src" ]; + }; + resolvedDefaultFeatures = [ "botan-src" "default" "vendored" ]; + }; + "bumpalo" = rec { + crateName = "bumpalo"; + version = "3.9.1"; + edition = "2018"; + sha256 = "1688dv6s0cbj72p9lmll8a02a85dzxvdw2is7pji490zmd35m954"; + authors = [ + "Nick Fitzgerald <fitzgen@gmail.com>" + ]; + features = { + }; + resolvedDefaultFeatures = [ "default" ]; + }; + "byteorder" = rec { + crateName = "byteorder"; + version = "1.4.3"; + edition = "2018"; + sha256 = "0456lv9xi1a5bcm32arknf33ikv76p3fr9yzki4lb2897p2qkh8l"; + authors = [ + "Andrew Gallant <jamslam@gmail.com>" + ]; + features = { + "default" = [ "std" ]; + }; + }; + "cc" = rec { + crateName = "cc"; + version = "1.0.72"; + edition = "2018"; + crateBin = []; + sha256 = "1vl50h2qh0nh0iddzj6gd1pnxnxpvwmbfxc30578c1pajmxi7a92"; + authors = [ + "Alex Crichton <alex@alexcrichton.com>" + ]; + features = { + "jobserver" = [ "dep:jobserver" ]; + "parallel" = [ "jobserver" ]; + }; + }; + "cfg-if" = rec { + crateName = "cfg-if"; + version = "1.0.0"; + edition = "2018"; + sha256 = "1za0vb97n4brpzpv8lsbnzmq5r8f2b0cpqqr0sy8h5bn751xxwds"; + authors = [ + "Alex Crichton <alex@alexcrichton.com>" + ]; + features = { + "compiler_builtins" = [ "dep:compiler_builtins" ]; + "core" = [ "dep:core" ]; + "rustc-dep-of-std" = [ "core" "compiler_builtins" ]; + }; + }; + "const-oid" = rec { + crateName = "const-oid"; + version = "0.6.2"; + edition = "2018"; + sha256 = "12vv7csqqjj0x1l5mf51lgqiw76k5c3mb1yzfhfcqysks2j2lvwx"; + authors = [ + "RustCrypto Developers" + ]; + features = { + }; + }; + "crypto-bigint" = rec { + crateName = "crypto-bigint"; + version = "0.2.11"; + edition = "2018"; + sha256 = "00qckh65nzb7s7vd60wylw6alxf9g37xh31lirb1qw0l8fxx6fzq"; + authors = [ + "RustCrypto Developers" + ]; + dependencies = [ + { + name = "generic-array"; + packageId = "generic-array"; + optional = true; + } + { + name = "rand_core"; + packageId = "rand_core"; + optional = true; + } + { + name = "subtle"; + packageId = "subtle"; + usesDefaultFeatures = false; + } + ]; + features = { + "default" = [ "rand" ]; + "generic-array" = [ "dep:generic-array" ]; + "rand" = [ "rand_core" ]; + "rand_core" = [ "dep:rand_core" ]; + "rlp" = [ "dep:rlp" ]; + "zeroize" = [ "dep:zeroize" ]; + }; + resolvedDefaultFeatures = [ "default" "generic-array" "rand" "rand_core" ]; + }; + "cty" = rec { + crateName = "cty"; + version = "0.2.2"; + edition = "2015"; + sha256 = "0d8z0pbr87wgzqqb2jk5pvj0afzc6d3rb772ach6fijhg6yglrdk"; + authors = [ + "Jorge Aparicio <jorge@japaric.io>" + ]; + + }; + "data-encoding" = rec { + crateName = "data-encoding"; + version = "2.3.2"; + edition = "2018"; + sha256 = "0mvd8bjq5mq50fcf931cff57vwmbsvs1kpxynkzrshli98y3kqiy"; + authors = [ + "Julien Cretin <git@ia0.eu>" + ]; + features = { + "default" = [ "std" ]; + "std" = [ "alloc" ]; + }; + resolvedDefaultFeatures = [ "alloc" "default" "std" ]; + }; + "der" = rec { + crateName = "der"; + version = "0.4.5"; + edition = "2018"; + sha256 = "1x4k0jln8va1657cghl40l6p7hyvr1ixz71v9cd6imwmgp51rdvr"; + authors = [ + "RustCrypto Developers" + ]; + dependencies = [ + { + name = "const-oid"; + packageId = "const-oid"; + optional = true; + } + { + name = "crypto-bigint"; + packageId = "crypto-bigint"; + optional = true; + features = [ "generic-array" ]; + } + ]; + features = { + "bigint" = [ "crypto-bigint" ]; + "const-oid" = [ "dep:const-oid" ]; + "crypto-bigint" = [ "dep:crypto-bigint" ]; + "der_derive" = [ "dep:der_derive" ]; + "derive" = [ "der_derive" ]; + "oid" = [ "const-oid" ]; + "std" = [ "alloc" ]; + }; + resolvedDefaultFeatures = [ "alloc" "bigint" "const-oid" "crypto-bigint" "oid" "std" ]; + }; + "der-parser" = rec { + crateName = "der-parser"; + version = "7.0.0"; + edition = "2018"; + sha256 = "10kfa2gzl3x20mwgrd43cyi79xgkqxyzcyrh0xylv4apa33qlfgy"; + authors = [ + "Pierre Chifflier <chifflier@wzdftpd.net>" + ]; + dependencies = [ + { + name = "asn1-rs"; + packageId = "asn1-rs"; + } + { + name = "displaydoc"; + packageId = "displaydoc"; + usesDefaultFeatures = false; + } + { + name = "nom"; + packageId = "nom"; + } + { + name = "num-bigint"; + packageId = "num-bigint"; + optional = true; + } + { + name = "num-traits"; + packageId = "num-traits"; + } + { + name = "rusticata-macros"; + packageId = "rusticata-macros"; + } + ]; + features = { + "bigint" = [ "num-bigint" ]; + "cookie-factory" = [ "dep:cookie-factory" ]; + "default" = [ "std" ]; + "num-bigint" = [ "dep:num-bigint" ]; + "serialize" = [ "std" "cookie-factory" ]; + }; + resolvedDefaultFeatures = [ "bigint" "default" "num-bigint" "std" ]; + }; + "digest" = rec { + crateName = "digest"; + version = "0.9.0"; + edition = "2018"; + sha256 = "0rmhvk33rgvd6ll71z8sng91a52rw14p0drjn1da0mqa138n1pfk"; + authors = [ + "RustCrypto Developers" + ]; + dependencies = [ + { + name = "generic-array"; + packageId = "generic-array"; + } + ]; + features = { + "blobby" = [ "dep:blobby" ]; + "dev" = [ "blobby" ]; + "std" = [ "alloc" ]; + }; + resolvedDefaultFeatures = [ "alloc" "std" ]; + }; + "displaydoc" = rec { + crateName = "displaydoc"; + version = "0.2.3"; + edition = "2018"; + sha256 = "11i8p5snlc1hs4g5q3wiyr75dn276l6kr0si5m7xmfa6y31mvy9v"; + procMacro = true; + authors = [ + "Jane Lusby <jlusby@yaah.dev>" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2"; + } + { + name = "quote"; + packageId = "quote"; + } + { + name = "syn"; + packageId = "syn"; + } + ]; + features = { + "default" = [ "std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "foreign-types" = rec { + crateName = "foreign-types"; + version = "0.3.2"; + edition = "2015"; + sha256 = "1cgk0vyd7r45cj769jym4a6s7vwshvd0z4bqrb92q1fwibmkkwzn"; + authors = [ + "Steven Fackler <sfackler@gmail.com>" + ]; + dependencies = [ + { + name = "foreign-types-shared"; + packageId = "foreign-types-shared"; + } + ]; + + }; + "foreign-types-shared" = rec { + crateName = "foreign-types-shared"; + version = "0.1.1"; + edition = "2015"; + sha256 = "0jxgzd04ra4imjv8jgkmdq59kj8fsz6w4zxsbmlai34h26225c00"; + authors = [ + "Steven Fackler <sfackler@gmail.com>" + ]; + + }; + "generic-array" = rec { + crateName = "generic-array"; + version = "0.14.5"; + edition = "2015"; + sha256 = "00qqhls43bzvyb7s26iw6knvsz3mckbxl3rhaahvypzhqwzd6j7x"; + libName = "generic_array"; + authors = [ + "Bartłomiej Kamiński <fizyk20@gmail.com>" + "Aaron Trent <novacrazy@gmail.com>" + ]; + dependencies = [ + { + name = "typenum"; + packageId = "typenum"; + } + ]; + buildDependencies = [ + { + name = "version_check"; + packageId = "version_check"; + } + ]; + features = { + "serde" = [ "dep:serde" ]; + }; + }; + "getrandom" = rec { + crateName = "getrandom"; + version = "0.2.4"; + edition = "2018"; + sha256 = "0k0bdr1dyf4n9fvnkx4fmwxhv4hgnyf55gj86v4m69fln743g3a1"; + authors = [ + "The Rand Project Developers" + ]; + dependencies = [ + { + name = "cfg-if"; + packageId = "cfg-if"; + } + { + name = "libc"; + packageId = "libc"; + usesDefaultFeatures = false; + target = { target, features }: (target."unix" or false); + } + { + name = "wasi"; + packageId = "wasi"; + target = { target, features }: (target."os" == "wasi"); + } + ]; + features = { + "compiler_builtins" = [ "dep:compiler_builtins" ]; + "core" = [ "dep:core" ]; + "js" = [ "wasm-bindgen" "js-sys" ]; + "js-sys" = [ "dep:js-sys" ]; + "rustc-dep-of-std" = [ "compiler_builtins" "core" "libc/rustc-dep-of-std" "wasi/rustc-dep-of-std" ]; + "wasm-bindgen" = [ "dep:wasm-bindgen" ]; + }; + resolvedDefaultFeatures = [ "std" ]; + }; + "itoa" = rec { + crateName = "itoa"; + version = "1.0.1"; + edition = "2018"; + sha256 = "0d8wr2qf5b25a04xf10rz9r0pdbjdgb0zaw3xvf8k2sqcz1qzaqs"; + authors = [ + "David Tolnay <dtolnay@gmail.com>" + ]; + + }; + "js-sys" = rec { + crateName = "js-sys"; + version = "0.3.56"; + edition = "2018"; + sha256 = "010g8jkj5avy3xd77i3cprjzzpfa6z9z2ay0fkllqmpx617c53x3"; + authors = [ + "The wasm-bindgen Developers" + ]; + dependencies = [ + { + name = "wasm-bindgen"; + packageId = "wasm-bindgen"; + } + ]; + + }; + "lazy_static" = rec { + crateName = "lazy_static"; + version = "1.4.0"; + edition = "2015"; + sha256 = "0in6ikhw8mgl33wjv6q6xfrb5b9jr16q8ygjy803fay4zcisvaz2"; + authors = [ + "Marvin Löbel <loebel.marvin@gmail.com>" + ]; + dependencies = [ + { + name = "spin"; + packageId = "spin"; + optional = true; + } + ]; + features = { + "spin" = [ "dep:spin" ]; + "spin_no_std" = [ "spin" ]; + }; + resolvedDefaultFeatures = [ "spin" "spin_no_std" ]; + }; + "libc" = rec { + crateName = "libc"; + version = "0.2.116"; + edition = "2015"; + sha256 = "0x6sk17kv2fdsqxlm23bz9x1y79w90k7ylkflk44rgidhy4bspan"; + authors = [ + "The Rust Project Developers" + ]; + features = { + "default" = [ "std" ]; + "rustc-dep-of-std" = [ "align" "rustc-std-workspace-core" ]; + "rustc-std-workspace-core" = [ "dep:rustc-std-workspace-core" ]; + "use_std" = [ "std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "libm" = rec { + crateName = "libm"; + version = "0.2.1"; + edition = "2018"; + sha256 = "0akh56sh51adhagmk9l84dyrlz60gv8ri05xhr13i1b18czkpmy7"; + authors = [ + "Jorge Aparicio <jorge@japaric.io>" + ]; + features = { + "musl-reference-tests" = [ "rand" ]; + "rand" = [ "dep:rand" ]; + }; + resolvedDefaultFeatures = [ "default" ]; + }; + "log" = rec { + crateName = "log"; + version = "0.4.14"; + edition = "2015"; + sha256 = "04175hv0v62shd82qydq58a48k3bjijmk54v38zgqlbxqkkbpfai"; + authors = [ + "The Rust Project Developers" + ]; + dependencies = [ + { + name = "cfg-if"; + packageId = "cfg-if"; + } + ]; + features = { + "kv_unstable" = [ "value-bag" ]; + "kv_unstable_serde" = [ "kv_unstable_std" "value-bag/serde" "serde" ]; + "kv_unstable_std" = [ "std" "kv_unstable" "value-bag/error" ]; + "kv_unstable_sval" = [ "kv_unstable" "value-bag/sval" "sval" ]; + "serde" = [ "dep:serde" ]; + "sval" = [ "dep:sval" ]; + "value-bag" = [ "dep:value-bag" ]; + }; + }; + "memchr" = rec { + crateName = "memchr"; + version = "2.4.1"; + edition = "2018"; + sha256 = "0smq8xzd40njqpfzv5mghigj91fzlfrfg842iz8x0wqvw2dw731h"; + authors = [ + "Andrew Gallant <jamslam@gmail.com>" + "bluss" + ]; + features = { + "compiler_builtins" = [ "dep:compiler_builtins" ]; + "core" = [ "dep:core" ]; + "default" = [ "std" ]; + "libc" = [ "dep:libc" ]; + "rustc-dep-of-std" = [ "core" "compiler_builtins" ]; + "use_std" = [ "std" ]; + }; + resolvedDefaultFeatures = [ "std" ]; + }; + "minimal-lexical" = rec { + crateName = "minimal-lexical"; + version = "0.2.1"; + edition = "2018"; + sha256 = "16ppc5g84aijpri4jzv14rvcnslvlpphbszc7zzp6vfkddf4qdb8"; + authors = [ + "Alex Huszagh <ahuszagh@gmail.com>" + ]; + features = { + "default" = [ "std" ]; + }; + resolvedDefaultFeatures = [ "std" ]; + }; + "nom" = rec { + crateName = "nom"; + version = "7.1.0"; + edition = "2018"; + sha256 = "0281jdx0xcyhjgs1jkj9pii8py1clcpazg41bgz7d71qxzhi278v"; + authors = [ + "contact@geoffroycouprie.com" + ]; + dependencies = [ + { + name = "memchr"; + packageId = "memchr"; + usesDefaultFeatures = false; + } + { + name = "minimal-lexical"; + packageId = "minimal-lexical"; + usesDefaultFeatures = false; + } + ]; + buildDependencies = [ + { + name = "version_check"; + packageId = "version_check"; + } + ]; + features = { + "default" = [ "std" ]; + "std" = [ "alloc" "memchr/std" "minimal-lexical/std" ]; + }; + resolvedDefaultFeatures = [ "alloc" "default" "std" ]; + }; + "num-bigint" = rec { + crateName = "num-bigint"; + version = "0.4.3"; + edition = "2018"; + sha256 = "0py73wsa5j4izhd39nkqzqv260r0ma08vy30ky54ld3vkhlbcfpr"; + authors = [ + "The Rust Project Developers" + ]; + dependencies = [ + { + name = "num-integer"; + packageId = "num-integer"; + usesDefaultFeatures = false; + features = [ "i128" ]; + } + { + name = "num-traits"; + packageId = "num-traits"; + usesDefaultFeatures = false; + features = [ "i128" ]; + } + ]; + buildDependencies = [ + { + name = "autocfg"; + packageId = "autocfg 1.0.1"; + } + ]; + features = { + "arbitrary" = [ "dep:arbitrary" ]; + "default" = [ "std" ]; + "quickcheck" = [ "dep:quickcheck" ]; + "rand" = [ "dep:rand" ]; + "serde" = [ "dep:serde" ]; + "std" = [ "num-integer/std" "num-traits/std" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "num-bigint-dig" = rec { + crateName = "num-bigint-dig"; + version = "0.7.0"; + edition = "2015"; + sha256 = "1004mmipvc7pvaf3kf13i1nqh3vxf789bj72d8wl51y185aywis5"; + authors = [ + "dignifiedquire <dignifiedquire@gmail.com>" + "The Rust Project Developers" + ]; + dependencies = [ + { + name = "byteorder"; + packageId = "byteorder"; + usesDefaultFeatures = false; + } + { + name = "lazy_static"; + packageId = "lazy_static"; + usesDefaultFeatures = false; + features = [ "spin_no_std" ]; + } + { + name = "libm"; + packageId = "libm"; + } + { + name = "num-integer"; + packageId = "num-integer"; + usesDefaultFeatures = false; + } + { + name = "num-iter"; + packageId = "num-iter"; + usesDefaultFeatures = false; + } + { + name = "num-traits"; + packageId = "num-traits"; + usesDefaultFeatures = false; + } + { + name = "rand"; + packageId = "rand"; + optional = true; + usesDefaultFeatures = false; + } + { + name = "smallvec"; + packageId = "smallvec"; + usesDefaultFeatures = false; + } + { + name = "zeroize"; + packageId = "zeroize"; + optional = true; + usesDefaultFeatures = false; + features = [ "zeroize_derive" ]; + } + ]; + buildDependencies = [ + { + name = "autocfg"; + packageId = "autocfg 0.1.7"; + } + ]; + devDependencies = [ + { + name = "rand"; + packageId = "rand"; + features = [ "small_rng" ]; + } + ]; + features = { + "default" = [ "std" "i128" "u64_digit" ]; + "i128" = [ "num-integer/i128" "num-traits/i128" ]; + "prime" = [ "rand/std_rng" ]; + "rand" = [ "dep:rand" ]; + "serde" = [ "dep:serde" ]; + "std" = [ "num-integer/std" "num-traits/std" "smallvec/write" "rand/std" "serde/std" ]; + "zeroize" = [ "dep:zeroize" ]; + }; + resolvedDefaultFeatures = [ "i128" "prime" "rand" "u64_digit" "zeroize" ]; + }; + "num-integer" = rec { + crateName = "num-integer"; + version = "0.1.44"; + edition = "2015"; + sha256 = "1nq152y3304as1iai95hqz8prqnc94lks1s7q05sfjdmcf56kk6j"; + authors = [ + "The Rust Project Developers" + ]; + dependencies = [ + { + name = "num-traits"; + packageId = "num-traits"; + usesDefaultFeatures = false; + } + ]; + buildDependencies = [ + { + name = "autocfg"; + packageId = "autocfg 1.0.1"; + } + ]; + features = { + "default" = [ "std" ]; + "i128" = [ "num-traits/i128" ]; + "std" = [ "num-traits/std" ]; + }; + resolvedDefaultFeatures = [ "i128" "std" ]; + }; + "num-iter" = rec { + crateName = "num-iter"; + version = "0.1.42"; + edition = "2015"; + sha256 = "0ndd9wb9qar50fdr16xm3i1zk6h2g9br56nml2n22kd56y1iq0mj"; + authors = [ + "The Rust Project Developers" + ]; + dependencies = [ + { + name = "num-integer"; + packageId = "num-integer"; + usesDefaultFeatures = false; + } + { + name = "num-traits"; + packageId = "num-traits"; + usesDefaultFeatures = false; + } + ]; + buildDependencies = [ + { + name = "autocfg"; + packageId = "autocfg 1.0.1"; + } + ]; + features = { + "default" = [ "std" ]; + "i128" = [ "num-integer/i128" "num-traits/i128" ]; + "std" = [ "num-integer/std" "num-traits/std" ]; + }; + }; + "num-traits" = rec { + crateName = "num-traits"; + version = "0.2.14"; + edition = "2015"; + sha256 = "144j176s2p76azy2ngk2vkdzgwdc0bc8c93jhki8c9fsbknb2r4s"; + authors = [ + "The Rust Project Developers" + ]; + dependencies = [ + { + name = "libm"; + packageId = "libm"; + optional = true; + } + ]; + buildDependencies = [ + { + name = "autocfg"; + packageId = "autocfg 1.0.1"; + } + ]; + features = { + "default" = [ "std" ]; + "libm" = [ "dep:libm" ]; + }; + resolvedDefaultFeatures = [ "default" "i128" "libm" "std" ]; + }; + "num_threads" = rec { + crateName = "num_threads"; + version = "0.1.3"; + edition = "2015"; + sha256 = "05gvsnv4k6d69iksz47i7fq1r61dj1k1nh4i8xrw7qlkcfx9kflp"; + authors = [ + "Jacob Pratt <open-source@jhpratt.dev>" + ]; + dependencies = [ + { + name = "libc"; + packageId = "libc"; + target = { target, features }: ((target."os" == "macos") || (target."os" == "freebsd")); + } + ]; + + }; + "oid-registry" = rec { + crateName = "oid-registry"; + version = "0.4.0"; + edition = "2018"; + sha256 = "0akbah3j8231ayrp2l1y5d9zmvbvqcsj0sa6s6dz6h85z8bhgqiq"; + authors = [ + "Pierre Chifflier <chifflier@wzdftpd.net>" + ]; + dependencies = [ + { + name = "asn1-rs"; + packageId = "asn1-rs"; + } + ]; + features = { + "crypto" = [ "kdf" "pkcs1" "pkcs7" "pkcs9" "pkcs12" "nist_algs" "x962" ]; + "default" = [ "registry" ]; + }; + resolvedDefaultFeatures = [ "crypto" "default" "kdf" "nist_algs" "pkcs1" "pkcs12" "pkcs7" "pkcs9" "registry" "x509" "x962" ]; + }; + "once_cell" = rec { + crateName = "once_cell"; + version = "1.9.0"; + edition = "2018"; + sha256 = "1mfqhrsgi368x92bwnq3vi3p5nv0n1qlrn69gfflhvkfkxfm2cns"; + authors = [ + "Aleksey Kladov <aleksey.kladov@gmail.com>" + ]; + features = { + "alloc" = [ "race" ]; + "atomic-polyfill" = [ "dep:atomic-polyfill" ]; + "default" = [ "std" ]; + "parking_lot" = [ "dep:parking_lot" ]; + "std" = [ "alloc" ]; + }; + resolvedDefaultFeatures = [ "alloc" "default" "race" "std" ]; + }; + "openssl" = rec { + crateName = "openssl"; + version = "0.10.38"; + edition = "2018"; + sha256 = "15baqlphisr1f7ddq11jnrrzz4shdh35kwal24adyc2c4cif4yhc"; + authors = [ + "Steven Fackler <sfackler@gmail.com>" + ]; + dependencies = [ + { + name = "bitflags"; + packageId = "bitflags"; + } + { + name = "cfg-if"; + packageId = "cfg-if"; + } + { + name = "foreign-types"; + packageId = "foreign-types"; + } + { + name = "libc"; + packageId = "libc"; + } + { + name = "once_cell"; + packageId = "once_cell"; + } + { + name = "openssl-sys"; + packageId = "openssl-sys"; + rename = "ffi"; + } + ]; + features = { + "vendored" = [ "ffi/vendored" ]; + }; + }; + "openssl-sys" = rec { + crateName = "openssl-sys"; + version = "0.9.72"; + edition = "2015"; + sha256 = "1jq3qbcvf16qn71yasdzw54b14n8nz98vr52l1gp60in72f10iky"; + build = "build/main.rs"; + authors = [ + "Alex Crichton <alex@alexcrichton.com>" + "Steven Fackler <sfackler@gmail.com>" + ]; + dependencies = [ + { + name = "libc"; + packageId = "libc"; + } + ]; + buildDependencies = [ + { + name = "autocfg"; + packageId = "autocfg 1.0.1"; + } + { + name = "cc"; + packageId = "cc"; + } + { + name = "pkg-config"; + packageId = "pkg-config"; + } + { + name = "vcpkg"; + packageId = "vcpkg"; + target = {target, features}: (target."env" == "msvc"); + } + ]; + features = { + "openssl-src" = [ "dep:openssl-src" ]; + "vendored" = [ "openssl-src" ]; + }; + }; + "pem" = rec { + crateName = "pem"; + version = "1.0.2"; + edition = "2018"; + sha256 = "0iqrvfnm71x9pvff39d5ajwn3gc9glxlv4d4h22max7342db18z9"; + authors = [ + "Jonathan Creekmore <jonathan@thecreekmores.org>" + ]; + dependencies = [ + { + name = "base64"; + packageId = "base64"; + } + ]; + + }; + "pem-rfc7468" = rec { + crateName = "pem-rfc7468"; + version = "0.2.4"; + edition = "2018"; + sha256 = "1m1c9jypydzabg4yscplmvff7pdcc8gg4cqg081hnlf03hxkmsc4"; + authors = [ + "RustCrypto Developers" + ]; + dependencies = [ + { + name = "base64ct"; + packageId = "base64ct"; + } + ]; + features = { + "std" = [ "alloc" ]; + }; + resolvedDefaultFeatures = [ "alloc" ]; + }; + "pkcs1" = rec { + crateName = "pkcs1"; + version = "0.2.4"; + edition = "2018"; + sha256 = "0b2f1a0lf5h53zrjvcqbxzjhh89gcfa1myhf6z7w10ypg61fwsqi"; + authors = [ + "RustCrypto Developers" + ]; + dependencies = [ + { + name = "der"; + packageId = "der"; + features = [ "bigint" "oid" ]; + } + { + name = "pem-rfc7468"; + packageId = "pem-rfc7468"; + optional = true; + } + { + name = "zeroize"; + packageId = "zeroize"; + optional = true; + usesDefaultFeatures = false; + features = [ "alloc" ]; + } + ]; + features = { + "alloc" = [ "der/alloc" "zeroize" ]; + "pem" = [ "alloc" "pem-rfc7468/alloc" ]; + "pem-rfc7468" = [ "dep:pem-rfc7468" ]; + "zeroize" = [ "dep:zeroize" ]; + }; + resolvedDefaultFeatures = [ "alloc" "pem" "pem-rfc7468" "std" "zeroize" ]; + }; + "pkcs8" = rec { + crateName = "pkcs8"; + version = "0.7.6"; + edition = "2018"; + sha256 = "0iq46p6fa2b8xy6pj52zpmdy8ya3fg31dj4rc19x1fi69nvgjgpf"; + authors = [ + "RustCrypto Developers" + ]; + dependencies = [ + { + name = "der"; + packageId = "der"; + features = [ "oid" ]; + } + { + name = "pem-rfc7468"; + packageId = "pem-rfc7468"; + optional = true; + } + { + name = "pkcs1"; + packageId = "pkcs1"; + optional = true; + features = [ "alloc" ]; + } + { + name = "spki"; + packageId = "spki"; + } + { + name = "zeroize"; + packageId = "zeroize"; + optional = true; + usesDefaultFeatures = false; + features = [ "alloc" ]; + } + ]; + features = { + "3des" = [ "encryption" "pkcs5/3des" ]; + "alloc" = [ "der/alloc" "zeroize" ]; + "des-insecure" = [ "encryption" "pkcs5/des-insecure" ]; + "encryption" = [ "alloc" "pkcs5/alloc" "pkcs5/pbes2" "rand_core" ]; + "pem" = [ "alloc" "pem-rfc7468/alloc" ]; + "pem-rfc7468" = [ "dep:pem-rfc7468" ]; + "pkcs1" = [ "dep:pkcs1" ]; + "pkcs5" = [ "dep:pkcs5" ]; + "rand_core" = [ "dep:rand_core" ]; + "sha1" = [ "encryption" "pkcs5/sha1" ]; + "std" = [ "alloc" "der/std" ]; + "zeroize" = [ "dep:zeroize" ]; + }; + resolvedDefaultFeatures = [ "alloc" "pem" "pem-rfc7468" "pkcs1" "std" "zeroize" ]; + }; + "pkg-config" = rec { + crateName = "pkg-config"; + version = "0.3.24"; + edition = "2015"; + sha256 = "1ghcyjp5537r7qigmgl3dj62j01arlpddaq93a3i414v3iskz2aq"; + authors = [ + "Alex Crichton <alex@alexcrichton.com>" + ]; + + }; + "ppv-lite86" = rec { + crateName = "ppv-lite86"; + version = "0.2.16"; + edition = "2018"; + sha256 = "0wkqwnvnfcgqlrahphl45vdlgi2f1bs7nqcsalsllp1y4dp9x7zb"; + authors = [ + "The CryptoCorrosion Contributors" + ]; + features = { + "default" = [ "std" ]; + }; + resolvedDefaultFeatures = [ "simd" "std" ]; + }; + "proc-macro2" = rec { + crateName = "proc-macro2"; + version = "1.0.36"; + edition = "2018"; + sha256 = "0adh6gvs31x6pfwmygypmzrv1jc7kjq568vsqcfaxk7vhdc2sd67"; + authors = [ + "David Tolnay <dtolnay@gmail.com>" + "Alex Crichton <alex@alexcrichton.com>" + ]; + dependencies = [ + { + name = "unicode-xid"; + packageId = "unicode-xid"; + } + ]; + features = { + "default" = [ "proc-macro" ]; + }; + resolvedDefaultFeatures = [ "default" "proc-macro" ]; + }; + "quote" = rec { + crateName = "quote"; + version = "1.0.15"; + edition = "2018"; + sha256 = "0id1q0875pvhkg0mlb5z8gzdm2g2rbbz76bfzhv331lrm2b3wkc6"; + authors = [ + "David Tolnay <dtolnay@gmail.com>" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2"; + usesDefaultFeatures = false; + } + ]; + features = { + "default" = [ "proc-macro" ]; + "proc-macro" = [ "proc-macro2/proc-macro" ]; + }; + resolvedDefaultFeatures = [ "default" "proc-macro" ]; + }; + "rand" = rec { + crateName = "rand"; + version = "0.8.4"; + edition = "2018"; + sha256 = "1n5wska2fbfj4dsfz8mc0pd0dgjlrb6c9anpk5mwym345rip6x9f"; + authors = [ + "The Rand Project Developers" + "The Rust Project Developers" + ]; + dependencies = [ + { + name = "libc"; + packageId = "libc"; + optional = true; + usesDefaultFeatures = false; + target = { target, features }: (target."unix" or false); + } + { + name = "rand_chacha"; + packageId = "rand_chacha"; + optional = true; + usesDefaultFeatures = false; + target = { target, features }: (!(target."os" == "emscripten")); + } + { + name = "rand_core"; + packageId = "rand_core"; + } + { + name = "rand_hc"; + packageId = "rand_hc"; + optional = true; + target = { target, features }: (target."os" == "emscripten"); + } + ]; + devDependencies = [ + { + name = "rand_hc"; + packageId = "rand_hc"; + } + ]; + features = { + "alloc" = [ "rand_core/alloc" ]; + "default" = [ "std" "std_rng" ]; + "getrandom" = [ "rand_core/getrandom" ]; + "libc" = [ "dep:libc" ]; + "log" = [ "dep:log" ]; + "packed_simd" = [ "dep:packed_simd" ]; + "rand_chacha" = [ "dep:rand_chacha" ]; + "rand_hc" = [ "dep:rand_hc" ]; + "serde" = [ "dep:serde" ]; + "serde1" = [ "serde" "rand_core/serde1" ]; + "simd_support" = [ "packed_simd" ]; + "std" = [ "rand_core/std" "rand_chacha/std" "alloc" "getrandom" "libc" ]; + "std_rng" = [ "rand_chacha" "rand_hc" ]; + }; + resolvedDefaultFeatures = [ "alloc" "default" "getrandom" "libc" "rand_chacha" "rand_hc" "std" "std_rng" ]; + }; + "rand_chacha" = rec { + crateName = "rand_chacha"; + version = "0.3.1"; + edition = "2018"; + sha256 = "123x2adin558xbhvqb8w4f6syjsdkmqff8cxwhmjacpsl1ihmhg6"; + authors = [ + "The Rand Project Developers" + "The Rust Project Developers" + "The CryptoCorrosion Contributors" + ]; + dependencies = [ + { + name = "ppv-lite86"; + packageId = "ppv-lite86"; + usesDefaultFeatures = false; + features = [ "simd" ]; + } + { + name = "rand_core"; + packageId = "rand_core"; + } + ]; + features = { + "default" = [ "std" ]; + "serde" = [ "dep:serde" ]; + "serde1" = [ "serde" ]; + "std" = [ "ppv-lite86/std" ]; + }; + resolvedDefaultFeatures = [ "std" ]; + }; + "rand_core" = rec { + crateName = "rand_core"; + version = "0.6.3"; + edition = "2018"; + sha256 = "1rxlxc3bpzgwphcg9c9yasvv9idipcg2z2y4j0vlb52jyl418kyk"; + authors = [ + "The Rand Project Developers" + "The Rust Project Developers" + ]; + dependencies = [ + { + name = "getrandom"; + packageId = "getrandom"; + optional = true; + } + ]; + features = { + "getrandom" = [ "dep:getrandom" ]; + "serde" = [ "dep:serde" ]; + "serde1" = [ "serde" ]; + "std" = [ "alloc" "getrandom" "getrandom/std" ]; + }; + resolvedDefaultFeatures = [ "alloc" "getrandom" "std" ]; + }; + "rand_hc" = rec { + crateName = "rand_hc"; + version = "0.3.1"; + edition = "2018"; + sha256 = "1rwpykyvhkxs4jvqdja3mzp9dqaqamzn113cxaigs9z2dmcry7nm"; + authors = [ + "The Rand Project Developers" + ]; + dependencies = [ + { + name = "rand_core"; + packageId = "rand_core"; + } + ]; + + }; + "rcgen" = rec { + crateName = "rcgen"; + version = "0.9.2"; + edition = "2018"; + crateBin = [ + { name = "rcgen"; path = "src/main.rs"; } + ]; + sha256 = "0ppwfl9g504x2qwk7m7mag8c3l70w9mcfha93013nlzqdlw2vynp"; + authors = [ + "est31 <MTest31@outlook.com>" + ]; + dependencies = [ + { + name = "pem"; + packageId = "pem"; + optional = true; + } + { + name = "ring"; + packageId = "ring"; + } + { + name = "time"; + packageId = "time"; + usesDefaultFeatures = false; + } + { + name = "x509-parser"; + packageId = "x509-parser"; + optional = true; + features = [ "verify" ]; + } + { + name = "yasna"; + packageId = "yasna"; + features = [ "time" "std" ]; + } + { + name = "zeroize"; + packageId = "zeroize"; + optional = true; + } + ]; + devDependencies = [ + { + name = "botan"; + packageId = "botan"; + features = [ "vendored" ]; + } + { + name = "openssl"; + packageId = "openssl"; + } + { + name = "rand"; + packageId = "rand"; + } + { + name = "rsa"; + packageId = "rsa"; + } + { + name = "webpki"; + packageId = "webpki"; + features = [ "std" ]; + } + { + name = "x509-parser"; + packageId = "x509-parser"; + features = [ "verify" ]; + } + ]; + features = { + "default" = [ "pem" ]; + "pem" = [ "dep:pem" ]; + "x509-parser" = [ "dep:x509-parser" ]; + "zeroize" = [ "dep:zeroize" ]; + }; + resolvedDefaultFeatures = [ "default" "pem" "x509-parser" "zeroize" ]; + }; + "ring" = rec { + crateName = "ring"; + version = "0.16.20"; + edition = "2018"; + sha256 = "1z682xp7v38ayq9g9nkbhhfpj6ygralmlx7wdmsfv8rnw99cylrh"; + authors = [ + "Brian Smith <brian@briansmith.org>" + ]; + dependencies = [ + { + name = "libc"; + packageId = "libc"; + usesDefaultFeatures = false; + target = { target, features }: ((target."os" == "android") || (target."os" == "linux")); + } + { + name = "once_cell"; + packageId = "once_cell"; + optional = true; + usesDefaultFeatures = false; + target = { target, features }: ((target."os" == "android") || (target."os" == "linux")); + features = [ "std" ]; + } + { + name = "once_cell"; + packageId = "once_cell"; + usesDefaultFeatures = false; + target = { target, features }: ((target."os" == "dragonfly") || (target."os" == "freebsd") || (target."os" == "illumos") || (target."os" == "netbsd") || (target."os" == "openbsd") || (target."os" == "solaris")); + features = [ "std" ]; + } + { + name = "spin"; + packageId = "spin"; + usesDefaultFeatures = false; + target = { target, features }: ((target."arch" == "x86") || (target."arch" == "x86_64") || (((target."arch" == "aarch64") || (target."arch" == "arm")) && ((target."os" == "android") || (target."os" == "fuchsia") || (target."os" == "linux")))); + } + { + name = "untrusted"; + packageId = "untrusted"; + } + { + name = "web-sys"; + packageId = "web-sys"; + usesDefaultFeatures = false; + target = { target, features }: ((target."arch" == "wasm32") && (target."vendor" == "unknown") && (target."os" == "unknown") && (target."env" == "")); + features = [ "Crypto" "Window" ]; + } + { + name = "winapi"; + packageId = "winapi"; + usesDefaultFeatures = false; + target = { target, features }: (target."os" == "windows"); + features = [ "ntsecapi" "wtypesbase" ]; + } + ]; + buildDependencies = [ + { + name = "cc"; + packageId = "cc"; + usesDefaultFeatures = false; + } + ]; + devDependencies = [ + { + name = "libc"; + packageId = "libc"; + usesDefaultFeatures = false; + target = {target, features}: ((target."unix" or false) || (target."windows" or false)); + } + ]; + features = { + "default" = [ "alloc" "dev_urandom_fallback" ]; + "dev_urandom_fallback" = [ "once_cell" ]; + "once_cell" = [ "dep:once_cell" ]; + "std" = [ "alloc" ]; + }; + resolvedDefaultFeatures = [ "alloc" "default" "dev_urandom_fallback" "once_cell" ]; + }; + "rsa" = rec { + crateName = "rsa"; + version = "0.5.0"; + edition = "2018"; + sha256 = "039676a4mj0875phdi7vc0bd37hv84dh0dql6fmk8dl2w81jcp70"; + authors = [ + "RustCrypto Developers" + "dignifiedquire <dignifiedquire@gmail.com>" + ]; + dependencies = [ + { + name = "byteorder"; + packageId = "byteorder"; + usesDefaultFeatures = false; + } + { + name = "digest"; + packageId = "digest"; + usesDefaultFeatures = false; + } + { + name = "lazy_static"; + packageId = "lazy_static"; + features = [ "spin_no_std" ]; + } + { + name = "num-bigint-dig"; + packageId = "num-bigint-dig"; + rename = "num-bigint"; + usesDefaultFeatures = false; + features = [ "i128" "u64_digit" "prime" "zeroize" ]; + } + { + name = "num-integer"; + packageId = "num-integer"; + usesDefaultFeatures = false; + } + { + name = "num-iter"; + packageId = "num-iter"; + usesDefaultFeatures = false; + } + { + name = "num-traits"; + packageId = "num-traits"; + usesDefaultFeatures = false; + features = [ "libm" ]; + } + { + name = "pkcs1"; + packageId = "pkcs1"; + usesDefaultFeatures = false; + } + { + name = "pkcs8"; + packageId = "pkcs8"; + usesDefaultFeatures = false; + } + { + name = "rand"; + packageId = "rand"; + usesDefaultFeatures = false; + features = [ "std_rng" ]; + } + { + name = "subtle"; + packageId = "subtle"; + usesDefaultFeatures = false; + } + { + name = "zeroize"; + packageId = "zeroize"; + features = [ "alloc" "zeroize_derive" ]; + } + ]; + features = { + "alloc" = [ "digest/alloc" "pkcs1/alloc" "pkcs8/alloc" "pkcs8/pkcs1" ]; + "default" = [ "std" "pem" ]; + "nightly" = [ "subtle/nightly" "num-bigint/nightly" ]; + "pem" = [ "alloc" "pkcs1/pem" "pkcs8/pem" ]; + "pkcs5" = [ "pkcs8/encryption" ]; + "serde" = [ "num-bigint/serde" "serde_crate" ]; + "serde_crate" = [ "dep:serde_crate" ]; + "std" = [ "alloc" "digest/std" "pkcs1/std" "pkcs8/std" "rand/std" ]; + }; + resolvedDefaultFeatures = [ "alloc" "default" "pem" "std" ]; + }; + "rusticata-macros" = rec { + crateName = "rusticata-macros"; + version = "4.0.0"; + edition = "2018"; + sha256 = "03dmfxhgwzpm1360iwcpcg3y18ddgya0i0hc599am212pdvj7ib5"; + authors = [ + "Pierre Chifflier <chifflier@wzdftpd.net>" + ]; + dependencies = [ + { + name = "nom"; + packageId = "nom"; + usesDefaultFeatures = false; + features = [ "std" ]; + } + ]; + + }; + "smallvec" = rec { + crateName = "smallvec"; + version = "1.8.0"; + edition = "2018"; + sha256 = "10zf4fn63p2d6sx8qap3jvyarcfw563308x3431hd4c34r35gpgj"; + authors = [ + "The Servo Project Developers" + ]; + features = { + "arbitrary" = [ "dep:arbitrary" ]; + "const_new" = [ "const_generics" ]; + "serde" = [ "dep:serde" ]; + }; + }; + "spin" = rec { + crateName = "spin"; + version = "0.5.2"; + edition = "2015"; + sha256 = "0b84m6dbzrwf2kxylnw82d3dr8w06av7rfkr8s85fb5f43rwyqvf"; + authors = [ + "Mathijs van de Nes <git@mathijs.vd-nes.nl>" + "John Ericson <git@JohnEricson.me>" + ]; + + }; + "spki" = rec { + crateName = "spki"; + version = "0.4.1"; + edition = "2018"; + sha256 = "0ckgkcg6db5y94dqhmyikgn8yrsah6pyf4j197hv1c51bp0s00aw"; + authors = [ + "RustCrypto Developers" + ]; + dependencies = [ + { + name = "der"; + packageId = "der"; + features = [ "oid" ]; + } + ]; + features = { + "std" = [ "der/std" ]; + }; + }; + "subtle" = rec { + crateName = "subtle"; + version = "2.4.1"; + edition = "2015"; + sha256 = "00b6jzh9gzb0h9n25g06nqr90z3xzqppfhhb260s1hjhh4pg7pkb"; + authors = [ + "Isis Lovecruft <isis@patternsinthevoid.net>" + "Henry de Valence <hdevalence@hdevalence.ca>" + ]; + features = { + "default" = [ "std" "i128" ]; + }; + }; + "syn" = rec { + crateName = "syn"; + version = "1.0.86"; + edition = "2018"; + sha256 = "0sqwa4nqxzm89nj8xd8sk4iz0hbrw3mb17b6hyc2w2d0zzsb6rca"; + authors = [ + "David Tolnay <dtolnay@gmail.com>" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2"; + usesDefaultFeatures = false; + } + { + name = "quote"; + packageId = "quote"; + optional = true; + usesDefaultFeatures = false; + } + { + name = "unicode-xid"; + packageId = "unicode-xid"; + } + ]; + features = { + "default" = [ "derive" "parsing" "printing" "clone-impls" "proc-macro" ]; + "printing" = [ "quote" ]; + "proc-macro" = [ "proc-macro2/proc-macro" "quote/proc-macro" ]; + "quote" = [ "dep:quote" ]; + "test" = [ "syn-test-suite/all-features" ]; + }; + resolvedDefaultFeatures = [ "clone-impls" "default" "derive" "extra-traits" "full" "parsing" "printing" "proc-macro" "quote" "visit" ]; + }; + "synstructure" = rec { + crateName = "synstructure"; + version = "0.12.6"; + edition = "2018"; + sha256 = "03r1lydbf3japnlpc4wka7y90pmz1i0danaj3f9a7b431akdlszk"; + authors = [ + "Nika Layzell <nika@thelayzells.com>" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2"; + usesDefaultFeatures = false; + } + { + name = "quote"; + packageId = "quote"; + usesDefaultFeatures = false; + } + { + name = "syn"; + packageId = "syn"; + usesDefaultFeatures = false; + features = [ "derive" "parsing" "printing" "clone-impls" "visit" "extra-traits" ]; + } + { + name = "unicode-xid"; + packageId = "unicode-xid"; + } + ]; + features = { + "default" = [ "proc-macro" ]; + "proc-macro" = [ "proc-macro2/proc-macro" "syn/proc-macro" "quote/proc-macro" ]; + }; + resolvedDefaultFeatures = [ "default" "proc-macro" ]; + }; + "thiserror" = rec { + crateName = "thiserror"; + version = "1.0.30"; + edition = "2018"; + sha256 = "05y4wm29ck8flwq5k1q6nhwh00a3b30cz3xr0qvnbwad5vjsnjw5"; + authors = [ + "David Tolnay <dtolnay@gmail.com>" + ]; + dependencies = [ + { + name = "thiserror-impl"; + packageId = "thiserror-impl"; + } + ]; + + }; + "thiserror-impl" = rec { + crateName = "thiserror-impl"; + version = "1.0.30"; + edition = "2018"; + sha256 = "0jviwmvx6wzawsj6c9msic7h419wmsbjagl9dzhpydkzc8zzscma"; + procMacro = true; + authors = [ + "David Tolnay <dtolnay@gmail.com>" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2"; + } + { + name = "quote"; + packageId = "quote"; + } + { + name = "syn"; + packageId = "syn"; + } + ]; + + }; + "time" = rec { + crateName = "time"; + version = "0.3.7"; + edition = "2018"; + sha256 = "0gbmwlkj15dfhbqvxlzji1ffc1lidblpgg1q3b3378hgyfcbqk00"; + authors = [ + "Jacob Pratt <open-source@jhpratt.dev>" + "Time contributors" + ]; + dependencies = [ + { + name = "itoa"; + packageId = "itoa"; + optional = true; + } + { + name = "libc"; + packageId = "libc"; + target = { target, features }: (target."family" == "unix"); + } + { + name = "num_threads"; + packageId = "num_threads"; + target = { target, features }: (target."family" == "unix"); + } + { + name = "time-macros"; + packageId = "time-macros"; + optional = true; + } + ]; + features = { + "default" = [ "std" ]; + "formatting" = [ "itoa" "std" ]; + "itoa" = [ "dep:itoa" ]; + "large-dates" = [ "time-macros/large-dates" ]; + "local-offset" = [ "std" ]; + "macros" = [ "time-macros" ]; + "quickcheck" = [ "quickcheck-dep" "alloc" ]; + "quickcheck-dep" = [ "dep:quickcheck-dep" ]; + "rand" = [ "dep:rand" ]; + "serde" = [ "dep:serde" ]; + "serde-human-readable" = [ "serde" "formatting" "parsing" ]; + "serde-well-known" = [ "serde/alloc" "formatting" "parsing" ]; + "std" = [ "alloc" ]; + "time-macros" = [ "dep:time-macros" ]; + }; + resolvedDefaultFeatures = [ "alloc" "default" "formatting" "itoa" "macros" "parsing" "std" "time-macros" ]; + }; + "time-macros" = rec { + crateName = "time-macros"; + version = "0.2.3"; + edition = "2018"; + sha256 = "1mj7pv8y9j2csrh1l8aabras36pgysbnfy18330srh4g8sihrsr5"; + procMacro = true; + authors = [ + "Jacob Pratt <open-source@jhpratt.dev>" + "Time contributors" + ]; + features = { + }; + }; + "typenum" = rec { + crateName = "typenum"; + version = "1.15.0"; + edition = "2018"; + sha256 = "11yrvz1vd43gqv738yw1v75rzngjbs7iwcgzjy3cq5ywkv2imy6w"; + build = "build/main.rs"; + authors = [ + "Paho Lurie-Gregg <paho@paholg.com>" + "Andre Bogus <bogusandre@gmail.com>" + ]; + features = { + "scale-info" = [ "dep:scale-info" ]; + "scale_info" = [ "scale-info/derive" ]; + }; + }; + "unicode-xid" = rec { + crateName = "unicode-xid"; + version = "0.2.2"; + edition = "2015"; + sha256 = "1wrkgcw557v311dkdb6n2hrix9dm2qdsb1zpw7pn79l03zb85jwc"; + authors = [ + "erick.tryzelaar <erick.tryzelaar@gmail.com>" + "kwantam <kwantam@gmail.com>" + "Manish Goregaokar <manishsmail@gmail.com>" + ]; + features = { + }; + resolvedDefaultFeatures = [ "default" ]; + }; + "untrusted" = rec { + crateName = "untrusted"; + version = "0.7.1"; + edition = "2018"; + sha256 = "0jkbqaj9d3v5a91pp3wp9mffvng1nhycx6sh4qkdd9qyr62ccmm1"; + libPath = "src/untrusted.rs"; + authors = [ + "Brian Smith <brian@briansmith.org>" + ]; + + }; + "vcpkg" = rec { + crateName = "vcpkg"; + version = "0.2.15"; + edition = "2015"; + sha256 = "09i4nf5y8lig6xgj3f7fyrvzd3nlaw4znrihw8psidvv5yk4xkdc"; + authors = [ + "Jim McGrath <jimmc2@gmail.com>" + ]; + + }; + "version_check" = rec { + crateName = "version_check"; + version = "0.9.4"; + edition = "2015"; + sha256 = "0gs8grwdlgh0xq660d7wr80x14vxbizmd8dbp29p2pdncx8lp1s9"; + authors = [ + "Sergio Benitez <sb@sergio.bz>" + ]; + + }; + "wasi" = rec { + crateName = "wasi"; + version = "0.10.2+wasi-snapshot-preview1"; + edition = "2018"; + sha256 = "1ii7nff4y1mpcrxzzvbpgxm7a1nn3szjf1n21jnx37c2g6dbsvzx"; + authors = [ + "The Cranelift Project Developers" + ]; + features = { + "compiler_builtins" = [ "dep:compiler_builtins" ]; + "core" = [ "dep:core" ]; + "default" = [ "std" ]; + "rustc-dep-of-std" = [ "compiler_builtins" "core" "rustc-std-workspace-alloc" ]; + "rustc-std-workspace-alloc" = [ "dep:rustc-std-workspace-alloc" ]; + }; + resolvedDefaultFeatures = [ "default" "std" ]; + }; + "wasm-bindgen" = rec { + crateName = "wasm-bindgen"; + version = "0.2.79"; + edition = "2018"; + sha256 = "01kc4lj2vlf0ra2w63izrgdlv8p6f8p15086hhyqln6q4dsazw95"; + authors = [ + "The wasm-bindgen Developers" + ]; + dependencies = [ + { + name = "cfg-if"; + packageId = "cfg-if"; + } + { + name = "wasm-bindgen-macro"; + packageId = "wasm-bindgen-macro"; + } + ]; + features = { + "default" = [ "spans" "std" ]; + "enable-interning" = [ "std" ]; + "serde" = [ "dep:serde" ]; + "serde-serialize" = [ "serde" "serde_json" "std" ]; + "serde_json" = [ "dep:serde_json" ]; + "spans" = [ "wasm-bindgen-macro/spans" ]; + "strict-macro" = [ "wasm-bindgen-macro/strict-macro" ]; + "xxx_debug_only_print_generated_code" = [ "wasm-bindgen-macro/xxx_debug_only_print_generated_code" ]; + }; + resolvedDefaultFeatures = [ "default" "spans" "std" ]; + }; + "wasm-bindgen-backend" = rec { + crateName = "wasm-bindgen-backend"; + version = "0.2.79"; + edition = "2018"; + sha256 = "1jpdrl5jj01961jxhmvj7v25ws928fyfj8ms7izifnhg0ggw08cb"; + authors = [ + "The wasm-bindgen Developers" + ]; + dependencies = [ + { + name = "bumpalo"; + packageId = "bumpalo"; + } + { + name = "lazy_static"; + packageId = "lazy_static"; + } + { + name = "log"; + packageId = "log"; + } + { + name = "proc-macro2"; + packageId = "proc-macro2"; + } + { + name = "quote"; + packageId = "quote"; + } + { + name = "syn"; + packageId = "syn"; + features = [ "full" ]; + } + { + name = "wasm-bindgen-shared"; + packageId = "wasm-bindgen-shared"; + } + ]; + features = { + "extra-traits" = [ "syn/extra-traits" ]; + }; + resolvedDefaultFeatures = [ "spans" ]; + }; + "wasm-bindgen-macro" = rec { + crateName = "wasm-bindgen-macro"; + version = "0.2.79"; + edition = "2018"; + sha256 = "00gdh0dlf2r77mxwh08q0z01vz2z7mvrllmj4gjjx9a0kvb06hig"; + procMacro = true; + authors = [ + "The wasm-bindgen Developers" + ]; + dependencies = [ + { + name = "quote"; + packageId = "quote"; + } + { + name = "wasm-bindgen-macro-support"; + packageId = "wasm-bindgen-macro-support"; + } + ]; + features = { + "spans" = [ "wasm-bindgen-macro-support/spans" ]; + "strict-macro" = [ "wasm-bindgen-macro-support/strict-macro" ]; + }; + resolvedDefaultFeatures = [ "spans" ]; + }; + "wasm-bindgen-macro-support" = rec { + crateName = "wasm-bindgen-macro-support"; + version = "0.2.79"; + edition = "2018"; + sha256 = "1g1fjqvrkrf3j20z8nxsf60cypxg9dfvpbachl2b53908q6s7a5z"; + authors = [ + "The wasm-bindgen Developers" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2"; + } + { + name = "quote"; + packageId = "quote"; + } + { + name = "syn"; + packageId = "syn"; + features = [ "visit" "full" ]; + } + { + name = "wasm-bindgen-backend"; + packageId = "wasm-bindgen-backend"; + } + { + name = "wasm-bindgen-shared"; + packageId = "wasm-bindgen-shared"; + } + ]; + features = { + "extra-traits" = [ "syn/extra-traits" ]; + "spans" = [ "wasm-bindgen-backend/spans" ]; + }; + resolvedDefaultFeatures = [ "spans" ]; + }; + "wasm-bindgen-shared" = rec { + crateName = "wasm-bindgen-shared"; + version = "0.2.79"; + edition = "2018"; + sha256 = "18h67l9b9jn06iw9r2p7bh9i0brh24lilcp4f26f4f24bh1qv59x"; + authors = [ + "The wasm-bindgen Developers" + ]; + + }; + "web-sys" = rec { + crateName = "web-sys"; + version = "0.3.56"; + edition = "2018"; + sha256 = "1sxqmwq773ss5m6vz7z95fdm6bqlix0s2awsy0j5gllxy8cv6q60"; + authors = [ + "The wasm-bindgen Developers" + ]; + dependencies = [ + { + name = "js-sys"; + packageId = "js-sys"; + } + { + name = "wasm-bindgen"; + packageId = "wasm-bindgen"; + } + ]; + features = { + "AbortSignal" = [ "EventTarget" ]; + "AnalyserNode" = [ "AudioNode" "EventTarget" ]; + "Animation" = [ "EventTarget" ]; + "AnimationEvent" = [ "Event" ]; + "AnimationPlaybackEvent" = [ "Event" ]; + "Attr" = [ "EventTarget" "Node" ]; + "AudioBufferSourceNode" = [ "AudioNode" "AudioScheduledSourceNode" "EventTarget" ]; + "AudioContext" = [ "BaseAudioContext" "EventTarget" ]; + "AudioDestinationNode" = [ "AudioNode" "EventTarget" ]; + "AudioNode" = [ "EventTarget" ]; + "AudioProcessingEvent" = [ "Event" ]; + "AudioScheduledSourceNode" = [ "AudioNode" "EventTarget" ]; + "AudioStreamTrack" = [ "EventTarget" "MediaStreamTrack" ]; + "AudioTrackList" = [ "EventTarget" ]; + "AudioWorklet" = [ "Worklet" ]; + "AudioWorkletGlobalScope" = [ "WorkletGlobalScope" ]; + "AudioWorkletNode" = [ "AudioNode" "EventTarget" ]; + "AuthenticatorAssertionResponse" = [ "AuthenticatorResponse" ]; + "AuthenticatorAttestationResponse" = [ "AuthenticatorResponse" ]; + "BaseAudioContext" = [ "EventTarget" ]; + "BatteryManager" = [ "EventTarget" ]; + "BeforeUnloadEvent" = [ "Event" ]; + "BiquadFilterNode" = [ "AudioNode" "EventTarget" ]; + "BlobEvent" = [ "Event" ]; + "Bluetooth" = [ "EventTarget" ]; + "BluetoothAdvertisingEvent" = [ "Event" ]; + "BluetoothDevice" = [ "EventTarget" ]; + "BluetoothPermissionResult" = [ "EventTarget" "PermissionStatus" ]; + "BluetoothRemoteGattCharacteristic" = [ "EventTarget" ]; + "BluetoothRemoteGattService" = [ "EventTarget" ]; + "BroadcastChannel" = [ "EventTarget" ]; + "CanvasCaptureMediaStream" = [ "EventTarget" "MediaStream" ]; + "CdataSection" = [ "CharacterData" "EventTarget" "Node" "Text" ]; + "ChannelMergerNode" = [ "AudioNode" "EventTarget" ]; + "ChannelSplitterNode" = [ "AudioNode" "EventTarget" ]; + "CharacterData" = [ "EventTarget" "Node" ]; + "ChromeWorker" = [ "EventTarget" "Worker" ]; + "Clipboard" = [ "EventTarget" ]; + "ClipboardEvent" = [ "Event" ]; + "CloseEvent" = [ "Event" ]; + "Comment" = [ "CharacterData" "EventTarget" "Node" ]; + "CompositionEvent" = [ "Event" "UiEvent" ]; + "ConstantSourceNode" = [ "AudioNode" "AudioScheduledSourceNode" "EventTarget" ]; + "ConvolverNode" = [ "AudioNode" "EventTarget" ]; + "CssAnimation" = [ "Animation" "EventTarget" ]; + "CssConditionRule" = [ "CssGroupingRule" "CssRule" ]; + "CssCounterStyleRule" = [ "CssRule" ]; + "CssFontFaceRule" = [ "CssRule" ]; + "CssFontFeatureValuesRule" = [ "CssRule" ]; + "CssGroupingRule" = [ "CssRule" ]; + "CssImportRule" = [ "CssRule" ]; + "CssKeyframeRule" = [ "CssRule" ]; + "CssKeyframesRule" = [ "CssRule" ]; + "CssMediaRule" = [ "CssConditionRule" "CssGroupingRule" "CssRule" ]; + "CssNamespaceRule" = [ "CssRule" ]; + "CssPageRule" = [ "CssRule" ]; + "CssStyleRule" = [ "CssRule" ]; + "CssStyleSheet" = [ "StyleSheet" ]; + "CssSupportsRule" = [ "CssConditionRule" "CssGroupingRule" "CssRule" ]; + "CssTransition" = [ "Animation" "EventTarget" ]; + "CustomEvent" = [ "Event" ]; + "DedicatedWorkerGlobalScope" = [ "EventTarget" "WorkerGlobalScope" ]; + "DelayNode" = [ "AudioNode" "EventTarget" ]; + "DeviceLightEvent" = [ "Event" ]; + "DeviceMotionEvent" = [ "Event" ]; + "DeviceOrientationEvent" = [ "Event" ]; + "DeviceProximityEvent" = [ "Event" ]; + "Document" = [ "EventTarget" "Node" ]; + "DocumentFragment" = [ "EventTarget" "Node" ]; + "DocumentTimeline" = [ "AnimationTimeline" ]; + "DocumentType" = [ "EventTarget" "Node" ]; + "DomMatrix" = [ "DomMatrixReadOnly" ]; + "DomPoint" = [ "DomPointReadOnly" ]; + "DomRect" = [ "DomRectReadOnly" ]; + "DomRequest" = [ "EventTarget" ]; + "DragEvent" = [ "Event" "MouseEvent" "UiEvent" ]; + "DynamicsCompressorNode" = [ "AudioNode" "EventTarget" ]; + "Element" = [ "EventTarget" "Node" ]; + "ErrorEvent" = [ "Event" ]; + "EventSource" = [ "EventTarget" ]; + "ExtendableEvent" = [ "Event" ]; + "ExtendableMessageEvent" = [ "Event" "ExtendableEvent" ]; + "FetchEvent" = [ "Event" "ExtendableEvent" ]; + "FetchObserver" = [ "EventTarget" ]; + "File" = [ "Blob" ]; + "FileReader" = [ "EventTarget" ]; + "FileSystemDirectoryEntry" = [ "FileSystemEntry" ]; + "FileSystemFileEntry" = [ "FileSystemEntry" ]; + "FocusEvent" = [ "Event" "UiEvent" ]; + "FontFaceSet" = [ "EventTarget" ]; + "FontFaceSetLoadEvent" = [ "Event" ]; + "GainNode" = [ "AudioNode" "EventTarget" ]; + "GamepadAxisMoveEvent" = [ "Event" "GamepadEvent" ]; + "GamepadButtonEvent" = [ "Event" "GamepadEvent" ]; + "GamepadEvent" = [ "Event" ]; + "GpuDevice" = [ "EventTarget" ]; + "GpuUncapturedErrorEvent" = [ "Event" ]; + "HashChangeEvent" = [ "Event" ]; + "Hid" = [ "EventTarget" ]; + "HidConnectionEvent" = [ "Event" ]; + "HidDevice" = [ "EventTarget" ]; + "HidInputReportEvent" = [ "Event" ]; + "HtmlAnchorElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlAreaElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlAudioElement" = [ "Element" "EventTarget" "HtmlElement" "HtmlMediaElement" "Node" ]; + "HtmlBaseElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlBodyElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlBrElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlButtonElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlCanvasElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlDListElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlDataElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlDataListElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlDetailsElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlDialogElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlDirectoryElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlDivElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlDocument" = [ "Document" "EventTarget" "Node" ]; + "HtmlElement" = [ "Element" "EventTarget" "Node" ]; + "HtmlEmbedElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlFieldSetElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlFontElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlFormControlsCollection" = [ "HtmlCollection" ]; + "HtmlFormElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlFrameElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlFrameSetElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlHeadElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlHeadingElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlHrElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlHtmlElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlIFrameElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlImageElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlInputElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlLabelElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlLegendElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlLiElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlLinkElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlMapElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlMediaElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlMenuElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlMenuItemElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlMetaElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlMeterElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlModElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlOListElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlObjectElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlOptGroupElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlOptionElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlOptionsCollection" = [ "HtmlCollection" ]; + "HtmlOutputElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlParagraphElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlParamElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlPictureElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlPreElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlProgressElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlQuoteElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlScriptElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlSelectElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlSlotElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlSourceElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlSpanElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlStyleElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlTableCaptionElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlTableCellElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlTableColElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlTableElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlTableRowElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlTableSectionElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlTemplateElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlTextAreaElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlTimeElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlTitleElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlTrackElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlUListElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlUnknownElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ]; + "HtmlVideoElement" = [ "Element" "EventTarget" "HtmlElement" "HtmlMediaElement" "Node" ]; + "IdbCursorWithValue" = [ "IdbCursor" ]; + "IdbDatabase" = [ "EventTarget" ]; + "IdbFileHandle" = [ "EventTarget" ]; + "IdbFileRequest" = [ "DomRequest" "EventTarget" ]; + "IdbLocaleAwareKeyRange" = [ "IdbKeyRange" ]; + "IdbMutableFile" = [ "EventTarget" ]; + "IdbOpenDbRequest" = [ "EventTarget" "IdbRequest" ]; + "IdbRequest" = [ "EventTarget" ]; + "IdbTransaction" = [ "EventTarget" ]; + "IdbVersionChangeEvent" = [ "Event" ]; + "IirFilterNode" = [ "AudioNode" "EventTarget" ]; + "ImageCaptureErrorEvent" = [ "Event" ]; + "InputEvent" = [ "Event" "UiEvent" ]; + "KeyboardEvent" = [ "Event" "UiEvent" ]; + "KeyframeEffect" = [ "AnimationEffect" ]; + "LocalMediaStream" = [ "EventTarget" "MediaStream" ]; + "MediaDevices" = [ "EventTarget" ]; + "MediaElementAudioSourceNode" = [ "AudioNode" "EventTarget" ]; + "MediaEncryptedEvent" = [ "Event" ]; + "MediaKeyError" = [ "Event" ]; + "MediaKeyMessageEvent" = [ "Event" ]; + "MediaKeySession" = [ "EventTarget" ]; + "MediaQueryList" = [ "EventTarget" ]; + "MediaQueryListEvent" = [ "Event" ]; + "MediaRecorder" = [ "EventTarget" ]; + "MediaRecorderErrorEvent" = [ "Event" ]; + "MediaSource" = [ "EventTarget" ]; + "MediaStream" = [ "EventTarget" ]; + "MediaStreamAudioDestinationNode" = [ "AudioNode" "EventTarget" ]; + "MediaStreamAudioSourceNode" = [ "AudioNode" "EventTarget" ]; + "MediaStreamEvent" = [ "Event" ]; + "MediaStreamTrack" = [ "EventTarget" ]; + "MediaStreamTrackEvent" = [ "Event" ]; + "MessageEvent" = [ "Event" ]; + "MessagePort" = [ "EventTarget" ]; + "MidiAccess" = [ "EventTarget" ]; + "MidiConnectionEvent" = [ "Event" ]; + "MidiInput" = [ "EventTarget" "MidiPort" ]; + "MidiMessageEvent" = [ "Event" ]; + "MidiOutput" = [ "EventTarget" "MidiPort" ]; + "MidiPort" = [ "EventTarget" ]; + "MouseEvent" = [ "Event" "UiEvent" ]; + "MouseScrollEvent" = [ "Event" "MouseEvent" "UiEvent" ]; + "MutationEvent" = [ "Event" ]; + "NetworkInformation" = [ "EventTarget" ]; + "Node" = [ "EventTarget" ]; + "Notification" = [ "EventTarget" ]; + "NotificationEvent" = [ "Event" "ExtendableEvent" ]; + "OfflineAudioCompletionEvent" = [ "Event" ]; + "OfflineAudioContext" = [ "BaseAudioContext" "EventTarget" ]; + "OfflineResourceList" = [ "EventTarget" ]; + "OffscreenCanvas" = [ "EventTarget" ]; + "OscillatorNode" = [ "AudioNode" "AudioScheduledSourceNode" "EventTarget" ]; + "PageTransitionEvent" = [ "Event" ]; + "PaintWorkletGlobalScope" = [ "WorkletGlobalScope" ]; + "PannerNode" = [ "AudioNode" "EventTarget" ]; + "PaymentMethodChangeEvent" = [ "Event" "PaymentRequestUpdateEvent" ]; + "PaymentRequestUpdateEvent" = [ "Event" ]; + "Performance" = [ "EventTarget" ]; + "PerformanceMark" = [ "PerformanceEntry" ]; + "PerformanceMeasure" = [ "PerformanceEntry" ]; + "PerformanceNavigationTiming" = [ "PerformanceEntry" "PerformanceResourceTiming" ]; + "PerformanceResourceTiming" = [ "PerformanceEntry" ]; + "PermissionStatus" = [ "EventTarget" ]; + "PointerEvent" = [ "Event" "MouseEvent" "UiEvent" ]; + "PopStateEvent" = [ "Event" ]; + "PopupBlockedEvent" = [ "Event" ]; + "PresentationAvailability" = [ "EventTarget" ]; + "PresentationConnection" = [ "EventTarget" ]; + "PresentationConnectionAvailableEvent" = [ "Event" ]; + "PresentationConnectionCloseEvent" = [ "Event" ]; + "PresentationConnectionList" = [ "EventTarget" ]; + "PresentationRequest" = [ "EventTarget" ]; + "ProcessingInstruction" = [ "CharacterData" "EventTarget" "Node" ]; + "ProgressEvent" = [ "Event" ]; + "PromiseRejectionEvent" = [ "Event" ]; + "PublicKeyCredential" = [ "Credential" ]; + "PushEvent" = [ "Event" "ExtendableEvent" ]; + "RadioNodeList" = [ "NodeList" ]; + "RtcDataChannel" = [ "EventTarget" ]; + "RtcDataChannelEvent" = [ "Event" ]; + "RtcPeerConnection" = [ "EventTarget" ]; + "RtcPeerConnectionIceEvent" = [ "Event" ]; + "RtcTrackEvent" = [ "Event" ]; + "RtcdtmfSender" = [ "EventTarget" ]; + "RtcdtmfToneChangeEvent" = [ "Event" ]; + "Screen" = [ "EventTarget" ]; + "ScreenOrientation" = [ "EventTarget" ]; + "ScriptProcessorNode" = [ "AudioNode" "EventTarget" ]; + "ScrollAreaEvent" = [ "Event" "UiEvent" ]; + "SecurityPolicyViolationEvent" = [ "Event" ]; + "ServiceWorker" = [ "EventTarget" ]; + "ServiceWorkerContainer" = [ "EventTarget" ]; + "ServiceWorkerGlobalScope" = [ "EventTarget" "WorkerGlobalScope" ]; + "ServiceWorkerRegistration" = [ "EventTarget" ]; + "ShadowRoot" = [ "DocumentFragment" "EventTarget" "Node" ]; + "SharedWorker" = [ "EventTarget" ]; + "SharedWorkerGlobalScope" = [ "EventTarget" "WorkerGlobalScope" ]; + "SourceBuffer" = [ "EventTarget" ]; + "SourceBufferList" = [ "EventTarget" ]; + "SpeechRecognition" = [ "EventTarget" ]; + "SpeechRecognitionError" = [ "Event" ]; + "SpeechRecognitionEvent" = [ "Event" ]; + "SpeechSynthesis" = [ "EventTarget" ]; + "SpeechSynthesisErrorEvent" = [ "Event" "SpeechSynthesisEvent" ]; + "SpeechSynthesisEvent" = [ "Event" ]; + "SpeechSynthesisUtterance" = [ "EventTarget" ]; + "StereoPannerNode" = [ "AudioNode" "EventTarget" ]; + "StorageEvent" = [ "Event" ]; + "SvgAnimateElement" = [ "Element" "EventTarget" "Node" "SvgAnimationElement" "SvgElement" ]; + "SvgAnimateMotionElement" = [ "Element" "EventTarget" "Node" "SvgAnimationElement" "SvgElement" ]; + "SvgAnimateTransformElement" = [ "Element" "EventTarget" "Node" "SvgAnimationElement" "SvgElement" ]; + "SvgAnimationElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgCircleElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGeometryElement" "SvgGraphicsElement" ]; + "SvgClipPathElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgComponentTransferFunctionElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgDefsElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ]; + "SvgDescElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgElement" = [ "Element" "EventTarget" "Node" ]; + "SvgEllipseElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGeometryElement" "SvgGraphicsElement" ]; + "SvgFilterElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgForeignObjectElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ]; + "SvgGeometryElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ]; + "SvgGradientElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgGraphicsElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgImageElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ]; + "SvgLineElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGeometryElement" "SvgGraphicsElement" ]; + "SvgLinearGradientElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGradientElement" ]; + "SvgMarkerElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgMaskElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgMetadataElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgPathElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGeometryElement" "SvgGraphicsElement" ]; + "SvgPathSegArcAbs" = [ "SvgPathSeg" ]; + "SvgPathSegArcRel" = [ "SvgPathSeg" ]; + "SvgPathSegClosePath" = [ "SvgPathSeg" ]; + "SvgPathSegCurvetoCubicAbs" = [ "SvgPathSeg" ]; + "SvgPathSegCurvetoCubicRel" = [ "SvgPathSeg" ]; + "SvgPathSegCurvetoCubicSmoothAbs" = [ "SvgPathSeg" ]; + "SvgPathSegCurvetoCubicSmoothRel" = [ "SvgPathSeg" ]; + "SvgPathSegCurvetoQuadraticAbs" = [ "SvgPathSeg" ]; + "SvgPathSegCurvetoQuadraticRel" = [ "SvgPathSeg" ]; + "SvgPathSegCurvetoQuadraticSmoothAbs" = [ "SvgPathSeg" ]; + "SvgPathSegCurvetoQuadraticSmoothRel" = [ "SvgPathSeg" ]; + "SvgPathSegLinetoAbs" = [ "SvgPathSeg" ]; + "SvgPathSegLinetoHorizontalAbs" = [ "SvgPathSeg" ]; + "SvgPathSegLinetoHorizontalRel" = [ "SvgPathSeg" ]; + "SvgPathSegLinetoRel" = [ "SvgPathSeg" ]; + "SvgPathSegLinetoVerticalAbs" = [ "SvgPathSeg" ]; + "SvgPathSegLinetoVerticalRel" = [ "SvgPathSeg" ]; + "SvgPathSegMovetoAbs" = [ "SvgPathSeg" ]; + "SvgPathSegMovetoRel" = [ "SvgPathSeg" ]; + "SvgPatternElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgPolygonElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGeometryElement" "SvgGraphicsElement" ]; + "SvgPolylineElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGeometryElement" "SvgGraphicsElement" ]; + "SvgRadialGradientElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGradientElement" ]; + "SvgRectElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGeometryElement" "SvgGraphicsElement" ]; + "SvgScriptElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgSetElement" = [ "Element" "EventTarget" "Node" "SvgAnimationElement" "SvgElement" ]; + "SvgStopElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgStyleElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgSwitchElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ]; + "SvgSymbolElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgTextContentElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ]; + "SvgTextElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" "SvgTextContentElement" "SvgTextPositioningElement" ]; + "SvgTextPathElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" "SvgTextContentElement" ]; + "SvgTextPositioningElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" "SvgTextContentElement" ]; + "SvgTitleElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgUseElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ]; + "SvgViewElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgaElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ]; + "SvgfeBlendElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeColorMatrixElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeComponentTransferElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeCompositeElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeConvolveMatrixElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeDiffuseLightingElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeDisplacementMapElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeDistantLightElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeDropShadowElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeFloodElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeFuncAElement" = [ "Element" "EventTarget" "Node" "SvgComponentTransferFunctionElement" "SvgElement" ]; + "SvgfeFuncBElement" = [ "Element" "EventTarget" "Node" "SvgComponentTransferFunctionElement" "SvgElement" ]; + "SvgfeFuncGElement" = [ "Element" "EventTarget" "Node" "SvgComponentTransferFunctionElement" "SvgElement" ]; + "SvgfeFuncRElement" = [ "Element" "EventTarget" "Node" "SvgComponentTransferFunctionElement" "SvgElement" ]; + "SvgfeGaussianBlurElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeImageElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeMergeElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeMergeNodeElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeMorphologyElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeOffsetElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfePointLightElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeSpecularLightingElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeSpotLightElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeTileElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgfeTurbulenceElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvggElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ]; + "SvgmPathElement" = [ "Element" "EventTarget" "Node" "SvgElement" ]; + "SvgsvgElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ]; + "SvgtSpanElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" "SvgTextContentElement" "SvgTextPositioningElement" ]; + "TcpServerSocket" = [ "EventTarget" ]; + "TcpServerSocketEvent" = [ "Event" ]; + "TcpSocket" = [ "EventTarget" ]; + "TcpSocketErrorEvent" = [ "Event" ]; + "TcpSocketEvent" = [ "Event" ]; + "Text" = [ "CharacterData" "EventTarget" "Node" ]; + "TextTrack" = [ "EventTarget" ]; + "TextTrackCue" = [ "EventTarget" ]; + "TextTrackList" = [ "EventTarget" ]; + "TimeEvent" = [ "Event" ]; + "TouchEvent" = [ "Event" "UiEvent" ]; + "TrackEvent" = [ "Event" ]; + "TransitionEvent" = [ "Event" ]; + "UiEvent" = [ "Event" ]; + "Usb" = [ "EventTarget" ]; + "UsbConnectionEvent" = [ "Event" ]; + "UsbPermissionResult" = [ "EventTarget" "PermissionStatus" ]; + "UserProximityEvent" = [ "Event" ]; + "ValueEvent" = [ "Event" ]; + "VideoStreamTrack" = [ "EventTarget" "MediaStreamTrack" ]; + "VideoTrackList" = [ "EventTarget" ]; + "VrDisplay" = [ "EventTarget" ]; + "VttCue" = [ "EventTarget" "TextTrackCue" ]; + "WakeLockSentinel" = [ "EventTarget" ]; + "WaveShaperNode" = [ "AudioNode" "EventTarget" ]; + "WebGlContextEvent" = [ "Event" ]; + "WebKitCssMatrix" = [ "DomMatrix" "DomMatrixReadOnly" ]; + "WebSocket" = [ "EventTarget" ]; + "WheelEvent" = [ "Event" "MouseEvent" "UiEvent" ]; + "Window" = [ "EventTarget" ]; + "WindowClient" = [ "Client" ]; + "Worker" = [ "EventTarget" ]; + "WorkerDebuggerGlobalScope" = [ "EventTarget" ]; + "WorkerGlobalScope" = [ "EventTarget" ]; + "XmlDocument" = [ "Document" "EventTarget" "Node" ]; + "XmlHttpRequest" = [ "EventTarget" "XmlHttpRequestEventTarget" ]; + "XmlHttpRequestEventTarget" = [ "EventTarget" ]; + "XmlHttpRequestUpload" = [ "EventTarget" "XmlHttpRequestEventTarget" ]; + "Xr" = [ "EventTarget" ]; + "XrBoundedReferenceSpace" = [ "EventTarget" "XrReferenceSpace" "XrSpace" ]; + "XrInputSourceEvent" = [ "Event" ]; + "XrInputSourcesChangeEvent" = [ "Event" ]; + "XrReferenceSpace" = [ "EventTarget" "XrSpace" ]; + "XrReferenceSpaceEvent" = [ "Event" ]; + "XrSession" = [ "EventTarget" ]; + "XrSessionEvent" = [ "Event" ]; + "XrSpace" = [ "EventTarget" ]; + "XrViewerPose" = [ "XrPose" ]; + }; + resolvedDefaultFeatures = [ "Crypto" "EventTarget" "Window" ]; + }; + "webpki" = rec { + crateName = "webpki"; + version = "0.22.0"; + edition = "2018"; + sha256 = "1gd1gxip5kgdwmrvhj5gjxij2mgg2mavq1ych4q1h272ja0xg5gh"; + authors = [ + "Brian Smith <brian@briansmith.org>" + ]; + dependencies = [ + { + name = "ring"; + packageId = "ring"; + usesDefaultFeatures = false; + } + { + name = "untrusted"; + packageId = "untrusted"; + } + ]; + features = { + "alloc" = [ "ring/alloc" ]; + "std" = [ "alloc" ]; + }; + resolvedDefaultFeatures = [ "alloc" "std" ]; + }; + "winapi" = rec { + crateName = "winapi"; + version = "0.3.9"; + edition = "2015"; + sha256 = "06gl025x418lchw1wxj64ycr7gha83m44cjr5sarhynd9xkrm0sw"; + authors = [ + "Peter Atashian <retep998@gmail.com>" + ]; + dependencies = [ + { + name = "winapi-i686-pc-windows-gnu"; + packageId = "winapi-i686-pc-windows-gnu"; + target = { target, features }: (stdenv.hostPlatform.config == "i686-pc-windows-gnu"); + } + { + name = "winapi-x86_64-pc-windows-gnu"; + packageId = "winapi-x86_64-pc-windows-gnu"; + target = { target, features }: (stdenv.hostPlatform.config == "x86_64-pc-windows-gnu"); + } + ]; + features = { + "debug" = [ "impl-debug" ]; + }; + resolvedDefaultFeatures = [ "ntsecapi" "wtypesbase" ]; + }; + "winapi-i686-pc-windows-gnu" = rec { + crateName = "winapi-i686-pc-windows-gnu"; + version = "0.4.0"; + edition = "2015"; + sha256 = "1dmpa6mvcvzz16zg6d5vrfy4bxgg541wxrcip7cnshi06v38ffxc"; + authors = [ + "Peter Atashian <retep998@gmail.com>" + ]; + + }; + "winapi-x86_64-pc-windows-gnu" = rec { + crateName = "winapi-x86_64-pc-windows-gnu"; + version = "0.4.0"; + edition = "2015"; + sha256 = "0gqq64czqb64kskjryj8isp62m2sgvx25yyj3kpc2myh85w24bki"; + authors = [ + "Peter Atashian <retep998@gmail.com>" + ]; + + }; + "x509-parser" = rec { + crateName = "x509-parser"; + version = "0.13.0"; + edition = "2018"; + sha256 = "0f3fqbv92q3a3s51md94sw3vgzs934agl4ii5a6ym364mkdlpwg5"; + authors = [ + "Pierre Chifflier <chifflier@wzdftpd.net>" + ]; + dependencies = [ + { + name = "asn1-rs"; + packageId = "asn1-rs"; + features = [ "datetime" ]; + } + { + name = "base64"; + packageId = "base64"; + } + { + name = "data-encoding"; + packageId = "data-encoding"; + } + { + name = "der-parser"; + packageId = "der-parser"; + features = [ "bigint" ]; + } + { + name = "lazy_static"; + packageId = "lazy_static"; + } + { + name = "nom"; + packageId = "nom"; + } + { + name = "oid-registry"; + packageId = "oid-registry"; + features = [ "crypto" "x509" ]; + } + { + name = "ring"; + packageId = "ring"; + optional = true; + } + { + name = "rusticata-macros"; + packageId = "rusticata-macros"; + } + { + name = "thiserror"; + packageId = "thiserror"; + } + { + name = "time"; + packageId = "time"; + features = [ "formatting" ]; + } + ]; + features = { + "ring" = [ "dep:ring" ]; + "verify" = [ "ring" ]; + }; + resolvedDefaultFeatures = [ "default" "ring" "verify" ]; + }; + "yasna" = rec { + crateName = "yasna"; + version = "0.5.0"; + edition = "2018"; + sha256 = "0k1gk11hq4rwlppv9f50bz8bnmgr73r66idpp7rybly96si38v9l"; + authors = [ + "Masaki Hara <ackie.h.gmai@gmail.com>" + ]; + dependencies = [ + { + name = "time"; + packageId = "time"; + optional = true; + usesDefaultFeatures = false; + features = [ "std" ]; + } + ]; + features = { + "bit-vec" = [ "dep:bit-vec" ]; + "num-bigint" = [ "dep:num-bigint" ]; + "time" = [ "dep:time" ]; + }; + resolvedDefaultFeatures = [ "default" "std" "time" ]; + }; + "zeroize" = rec { + crateName = "zeroize"; + version = "1.4.3"; + edition = "2018"; + sha256 = "068nvl3n5hk6lfn5y24grf2c7anzzqfzjjccscq3md7rqp79v3fn"; + authors = [ + "The RustCrypto Project Developers" + ]; + dependencies = [ + { + name = "zeroize_derive"; + packageId = "zeroize_derive"; + optional = true; + } + ]; + features = { + "default" = [ "alloc" ]; + "zeroize_derive" = [ "dep:zeroize_derive" ]; + }; + resolvedDefaultFeatures = [ "alloc" "default" "zeroize_derive" ]; + }; + "zeroize_derive" = rec { + crateName = "zeroize_derive"; + version = "1.3.1"; + edition = "2018"; + sha256 = "1nzdqyryjnqcrqz0vhddpkd8sybhn0bd8rbd6l33rdhhxwzz3s41"; + procMacro = true; + authors = [ + "The RustCrypto Project Developers" + ]; + dependencies = [ + { + name = "proc-macro2"; + packageId = "proc-macro2"; + } + { + name = "quote"; + packageId = "quote"; + } + { + name = "syn"; + packageId = "syn"; + } + { + name = "synstructure"; + packageId = "synstructure"; + } + ]; + + }; + }; + + # +# crate2nix/default.nix (excerpt start) +# + + /* Target (platform) data for conditional dependencies. + This corresponds roughly to what buildRustCrate is setting. + */ + defaultTarget = { + unix = true; + windows = false; + fuchsia = true; + test = false; + + # This doesn't appear to be officially documented anywhere yet. + # See https://github.com/rust-lang-nursery/rust-forge/issues/101. + os = + if stdenv.hostPlatform.isDarwin + then "macos" + else stdenv.hostPlatform.parsed.kernel.name; + arch = stdenv.hostPlatform.parsed.cpu.name; + family = "unix"; + env = "gnu"; + endian = + if stdenv.hostPlatform.parsed.cpu.significantByte.name == "littleEndian" + then "little" else "big"; + pointer_width = toString stdenv.hostPlatform.parsed.cpu.bits; + vendor = stdenv.hostPlatform.parsed.vendor.name; + debug_assertions = false; + }; + + /* Filters common temp files and build files. */ + # TODO(pkolloch): Substitute with gitignore filter + sourceFilter = name: type: + let + baseName = builtins.baseNameOf (builtins.toString name); + in + ! ( + # Filter out git + baseName == ".gitignore" + || (type == "directory" && baseName == ".git") + + # Filter out build results + || ( + type == "directory" && ( + baseName == "target" + || baseName == "_site" + || baseName == ".sass-cache" + || baseName == ".jekyll-metadata" + || baseName == "build-artifacts" + ) + ) + + # Filter out nix-build result symlinks + || ( + type == "symlink" && lib.hasPrefix "result" baseName + ) + + # Filter out IDE config + || ( + type == "directory" && ( + baseName == ".idea" || baseName == ".vscode" + ) + ) || lib.hasSuffix ".iml" baseName + + # Filter out nix build files + || baseName == "Cargo.nix" + + # Filter out editor backup / swap files. + || lib.hasSuffix "~" baseName + || builtins.match "^\\.sw[a-z]$$" baseName != null + || builtins.match "^\\..*\\.sw[a-z]$$" baseName != null + || lib.hasSuffix ".tmp" baseName + || lib.hasSuffix ".bak" baseName + || baseName == "tests.nix" + ); + + /* Returns a crate which depends on successful test execution + of crate given as the second argument. + + testCrateFlags: list of flags to pass to the test executable + testInputs: list of packages that should be available during test execution + */ + crateWithTest = { crate, testCrate, testCrateFlags, testInputs, testPreRun, testPostRun }: + assert builtins.typeOf testCrateFlags == "list"; + assert builtins.typeOf testInputs == "list"; + assert builtins.typeOf testPreRun == "string"; + assert builtins.typeOf testPostRun == "string"; + let + # override the `crate` so that it will build and execute tests instead of + # building the actual lib and bin targets We just have to pass `--test` + # to rustc and it will do the right thing. We execute the tests and copy + # their log and the test executables to $out for later inspection. + test = + let + drv = testCrate.override + ( + _: { + buildTests = true; + } + ); + # If the user hasn't set any pre/post commands, we don't want to + # insert empty lines. This means that any existing users of crate2nix + # don't get a spurious rebuild unless they set these explicitly. + testCommand = pkgs.lib.concatStringsSep "\n" + (pkgs.lib.filter (s: s != "") [ + testPreRun + "$f $testCrateFlags 2>&1 | tee -a $out" + testPostRun + ]); + in + pkgs.runCommand "run-tests-${testCrate.name}" + { + inherit testCrateFlags; + buildInputs = testInputs; + } '' + set -ex + + export RUST_BACKTRACE=1 + + # recreate a file hierarchy as when running tests with cargo + + # the source for test data + ${pkgs.xorg.lndir}/bin/lndir ${crate.src} + + # build outputs + testRoot=target/debug + mkdir -p $testRoot + + # executables of the crate + # we copy to prevent std::env::current_exe() to resolve to a store location + for i in ${crate}/bin/*; do + cp "$i" "$testRoot" + done + chmod +w -R . + + # test harness executables are suffixed with a hash, like cargo does + # this allows to prevent name collision with the main + # executables of the crate + hash=$(basename $out) + for file in ${drv}/tests/*; do + f=$testRoot/$(basename $file)-$hash + cp $file $f + ${testCommand} + done + ''; + in + pkgs.runCommand "${crate.name}-linked" + { + inherit (crate) outputs crateName; + passthru = (crate.passthru or { }) // { + inherit test; + }; + } '' + echo tested by ${test} + ${lib.concatMapStringsSep "\n" (output: "ln -s ${crate.${output}} ${"$"}${output}") crate.outputs} + ''; + + /* A restricted overridable version of builtRustCratesWithFeatures. */ + buildRustCrateWithFeatures = + { packageId + , features ? rootFeatures + , crateOverrides ? defaultCrateOverrides + , buildRustCrateForPkgsFunc ? null + , runTests ? false + , testCrateFlags ? [ ] + , testInputs ? [ ] + # Any command to run immediatelly before a test is executed. + , testPreRun ? "" + # Any command run immediatelly after a test is executed. + , testPostRun ? "" + }: + lib.makeOverridable + ( + { features + , crateOverrides + , runTests + , testCrateFlags + , testInputs + , testPreRun + , testPostRun + }: + let + buildRustCrateForPkgsFuncOverriden = + if buildRustCrateForPkgsFunc != null + then buildRustCrateForPkgsFunc + else + ( + if crateOverrides == pkgs.defaultCrateOverrides + then buildRustCrateForPkgs + else + pkgs: (buildRustCrateForPkgs pkgs).override { + defaultCrateOverrides = crateOverrides; + } + ); + builtRustCrates = builtRustCratesWithFeatures { + inherit packageId features; + buildRustCrateForPkgsFunc = buildRustCrateForPkgsFuncOverriden; + runTests = false; + }; + builtTestRustCrates = builtRustCratesWithFeatures { + inherit packageId features; + buildRustCrateForPkgsFunc = buildRustCrateForPkgsFuncOverriden; + runTests = true; + }; + drv = builtRustCrates.crates.${packageId}; + testDrv = builtTestRustCrates.crates.${packageId}; + derivation = + if runTests then + crateWithTest + { + crate = drv; + testCrate = testDrv; + inherit testCrateFlags testInputs testPreRun testPostRun; + } + else drv; + in + derivation + ) + { inherit features crateOverrides runTests testCrateFlags testInputs testPreRun testPostRun; }; + + /* Returns an attr set with packageId mapped to the result of buildRustCrateForPkgsFunc + for the corresponding crate. + */ + builtRustCratesWithFeatures = + { packageId + , features + , crateConfigs ? crates + , buildRustCrateForPkgsFunc + , runTests + , target ? defaultTarget + } @ args: + assert (builtins.isAttrs crateConfigs); + assert (builtins.isString packageId); + assert (builtins.isList features); + assert (builtins.isAttrs target); + assert (builtins.isBool runTests); + let + rootPackageId = packageId; + mergedFeatures = mergePackageFeatures + ( + args // { + inherit rootPackageId; + target = target // { test = runTests; }; + } + ); + # Memoize built packages so that reappearing packages are only built once. + builtByPackageIdByPkgs = mkBuiltByPackageIdByPkgs pkgs; + mkBuiltByPackageIdByPkgs = pkgs: + let + self = { + crates = lib.mapAttrs (packageId: value: buildByPackageIdForPkgsImpl self pkgs packageId) crateConfigs; + build = mkBuiltByPackageIdByPkgs pkgs.buildPackages; + }; + in + self; + buildByPackageIdForPkgsImpl = self: pkgs: packageId: + let + features = mergedFeatures."${packageId}" or [ ]; + crateConfig' = crateConfigs."${packageId}"; + crateConfig = + builtins.removeAttrs crateConfig' [ "resolvedDefaultFeatures" "devDependencies" ]; + devDependencies = + lib.optionals + (runTests && packageId == rootPackageId) + (crateConfig'.devDependencies or [ ]); + dependencies = + dependencyDerivations { + inherit features target; + buildByPackageId = depPackageId: + # proc_macro crates must be compiled for the build architecture + if crateConfigs.${depPackageId}.procMacro or false + then self.build.crates.${depPackageId} + else self.crates.${depPackageId}; + dependencies = + (crateConfig.dependencies or [ ]) + ++ devDependencies; + }; + buildDependencies = + dependencyDerivations { + inherit features target; + buildByPackageId = depPackageId: + self.build.crates.${depPackageId}; + dependencies = crateConfig.buildDependencies or [ ]; + }; + filterEnabledDependenciesForThis = dependencies: filterEnabledDependencies { + inherit dependencies features target; + }; + dependenciesWithRenames = + lib.filter (d: d ? "rename") + ( + filterEnabledDependenciesForThis + ( + (crateConfig.buildDependencies or [ ]) + ++ (crateConfig.dependencies or [ ]) + ++ devDependencies + ) + ); + # Crate renames have the form: + # + # { + # crate_name = [ + # { version = "1.2.3"; rename = "crate_name01"; } + # ]; + # # ... + # } + crateRenames = + let + grouped = + lib.groupBy + (dependency: dependency.name) + dependenciesWithRenames; + versionAndRename = dep: + let + package = crateConfigs."${dep.packageId}"; + in + { inherit (dep) rename; version = package.version; }; + in + lib.mapAttrs (name: choices: builtins.map versionAndRename choices) grouped; + in + buildRustCrateForPkgsFunc pkgs + ( + crateConfig // { + src = crateConfig.src or ( + pkgs.fetchurl rec { + name = "${crateConfig.crateName}-${crateConfig.version}.tar.gz"; + # https://www.pietroalbini.org/blog/downloading-crates-io/ + # Not rate-limited, CDN URL. + url = "https://static.crates.io/crates/${crateConfig.crateName}/${crateConfig.crateName}-${crateConfig.version}.crate"; + sha256 = + assert (lib.assertMsg (crateConfig ? sha256) "Missing sha256 for ${name}"); + crateConfig.sha256; + } + ); + extraRustcOpts = lib.lists.optional (targetFeatures != [ ]) "-C target-feature=${lib.concatMapStringsSep "," (x: "+${x}") targetFeatures}"; + inherit features dependencies buildDependencies crateRenames release; + } + ); + in + builtByPackageIdByPkgs; + + /* Returns the actual derivations for the given dependencies. */ + dependencyDerivations = + { buildByPackageId + , features + , dependencies + , target + }: + assert (builtins.isList features); + assert (builtins.isList dependencies); + assert (builtins.isAttrs target); + let + enabledDependencies = filterEnabledDependencies { + inherit dependencies features target; + }; + depDerivation = dependency: buildByPackageId dependency.packageId; + in + map depDerivation enabledDependencies; + + /* Returns a sanitized version of val with all values substituted that cannot + be serialized as JSON. + */ + sanitizeForJson = val: + if builtins.isAttrs val + then lib.mapAttrs (n: v: sanitizeForJson v) val + else if builtins.isList val + then builtins.map sanitizeForJson val + else if builtins.isFunction val + then "function" + else val; + + /* Returns various tools to debug a crate. */ + debugCrate = { packageId, target ? defaultTarget }: + assert (builtins.isString packageId); + let + debug = rec { + # The built tree as passed to buildRustCrate. + buildTree = buildRustCrateWithFeatures { + buildRustCrateForPkgsFunc = _: lib.id; + inherit packageId; + }; + sanitizedBuildTree = sanitizeForJson buildTree; + dependencyTree = sanitizeForJson + ( + buildRustCrateWithFeatures { + buildRustCrateForPkgsFunc = _: crate: { + "01_crateName" = crate.crateName or false; + "02_features" = crate.features or [ ]; + "03_dependencies" = crate.dependencies or [ ]; + }; + inherit packageId; + } + ); + mergedPackageFeatures = mergePackageFeatures { + features = rootFeatures; + inherit packageId target; + }; + diffedDefaultPackageFeatures = diffDefaultPackageFeatures { + inherit packageId target; + }; + }; + in + { internal = debug; }; + + /* Returns differences between cargo default features and crate2nix default + features. + + This is useful for verifying the feature resolution in crate2nix. + */ + diffDefaultPackageFeatures = + { crateConfigs ? crates + , packageId + , target + }: + assert (builtins.isAttrs crateConfigs); + let + prefixValues = prefix: lib.mapAttrs (n: v: { "${prefix}" = v; }); + mergedFeatures = + prefixValues + "crate2nix" + (mergePackageFeatures { inherit crateConfigs packageId target; features = [ "default" ]; }); + configs = prefixValues "cargo" crateConfigs; + combined = lib.foldAttrs (a: b: a // b) { } [ mergedFeatures configs ]; + onlyInCargo = + builtins.attrNames + (lib.filterAttrs (n: v: !(v ? "crate2nix") && (v ? "cargo")) combined); + onlyInCrate2Nix = + builtins.attrNames + (lib.filterAttrs (n: v: (v ? "crate2nix") && !(v ? "cargo")) combined); + differentFeatures = lib.filterAttrs + ( + n: v: + (v ? "crate2nix") + && (v ? "cargo") + && (v.crate2nix.features or [ ]) != (v."cargo".resolved_default_features or [ ]) + ) + combined; + in + builtins.toJSON { + inherit onlyInCargo onlyInCrate2Nix differentFeatures; + }; + + /* Returns an attrset mapping packageId to the list of enabled features. + + If multiple paths to a dependency enable different features, the + corresponding feature sets are merged. Features in rust are additive. + */ + mergePackageFeatures = + { crateConfigs ? crates + , packageId + , rootPackageId ? packageId + , features ? rootFeatures + , dependencyPath ? [ crates.${packageId}.crateName ] + , featuresByPackageId ? { } + , target + # Adds devDependencies to the crate with rootPackageId. + , runTests ? false + , ... + } @ args: + assert (builtins.isAttrs crateConfigs); + assert (builtins.isString packageId); + assert (builtins.isString rootPackageId); + assert (builtins.isList features); + assert (builtins.isList dependencyPath); + assert (builtins.isAttrs featuresByPackageId); + assert (builtins.isAttrs target); + assert (builtins.isBool runTests); + let + crateConfig = crateConfigs."${packageId}" or (builtins.throw "Package not found: ${packageId}"); + expandedFeatures = expandFeatures (crateConfig.features or { }) features; + enabledFeatures = enableFeatures (crateConfig.dependencies or [ ]) expandedFeatures; + depWithResolvedFeatures = dependency: + let + packageId = dependency.packageId; + features = dependencyFeatures enabledFeatures dependency; + in + { inherit packageId features; }; + resolveDependencies = cache: path: dependencies: + assert (builtins.isAttrs cache); + assert (builtins.isList dependencies); + let + enabledDependencies = filterEnabledDependencies { + inherit dependencies target; + features = enabledFeatures; + }; + directDependencies = map depWithResolvedFeatures enabledDependencies; + foldOverCache = op: lib.foldl op cache directDependencies; + in + foldOverCache + ( + cache: { packageId, features }: + let + cacheFeatures = cache.${packageId} or [ ]; + combinedFeatures = sortedUnique (cacheFeatures ++ features); + in + if cache ? ${packageId} && cache.${packageId} == combinedFeatures + then cache + else + mergePackageFeatures { + features = combinedFeatures; + featuresByPackageId = cache; + inherit crateConfigs packageId target runTests rootPackageId; + } + ); + cacheWithSelf = + let + cacheFeatures = featuresByPackageId.${packageId} or [ ]; + combinedFeatures = sortedUnique (cacheFeatures ++ enabledFeatures); + in + featuresByPackageId // { + "${packageId}" = combinedFeatures; + }; + cacheWithDependencies = + resolveDependencies cacheWithSelf "dep" + ( + crateConfig.dependencies or [ ] + ++ lib.optionals + (runTests && packageId == rootPackageId) + (crateConfig.devDependencies or [ ]) + ); + cacheWithAll = + resolveDependencies + cacheWithDependencies "build" + (crateConfig.buildDependencies or [ ]); + in + cacheWithAll; + + /* Returns the enabled dependencies given the enabled features. */ + filterEnabledDependencies = { dependencies, features, target }: + assert (builtins.isList dependencies); + assert (builtins.isList features); + assert (builtins.isAttrs target); + + lib.filter + ( + dep: + let + targetFunc = dep.target or (features: true); + in + targetFunc { inherit features target; } + && ( + !(dep.optional or false) + || builtins.any (doesFeatureEnableDependency dep) features + ) + ) + dependencies; + + /* Returns whether the given feature should enable the given dependency. */ + doesFeatureEnableDependency = dependency: feature: + let + name = dependency.rename or dependency.name; + prefix = "${name}/"; + len = builtins.stringLength prefix; + startsWithPrefix = builtins.substring 0 len feature == prefix; + in + feature == name || startsWithPrefix; + + /* Returns the expanded features for the given inputFeatures by applying the + rules in featureMap. + + featureMap is an attribute set which maps feature names to lists of further + feature names to enable in case this feature is selected. + */ + expandFeatures = featureMap: inputFeatures: + assert (builtins.isAttrs featureMap); + assert (builtins.isList inputFeatures); + let + expandFeature = feature: + assert (builtins.isString feature); + [ feature ] ++ (expandFeatures featureMap (featureMap."${feature}" or [ ])); + outFeatures = lib.concatMap expandFeature inputFeatures; + in + sortedUnique outFeatures; + + /* This function adds optional dependencies as features if they are enabled + indirectly by dependency features. This function mimics Cargo's behavior + described in a note at: + https://doc.rust-lang.org/nightly/cargo/reference/features.html#dependency-features + */ + enableFeatures = dependencies: features: + assert (builtins.isList features); + assert (builtins.isList dependencies); + let + additionalFeatures = lib.concatMap + ( + dependency: + assert (builtins.isAttrs dependency); + let + enabled = builtins.any (doesFeatureEnableDependency dependency) features; + in + if (dependency.optional or false) && enabled + then [ (dependency.rename or dependency.name) ] + else [ ] + ) + dependencies; + in + sortedUnique (features ++ additionalFeatures); + + /* + Returns the actual features for the given dependency. + + features: The features of the crate that refers this dependency. + */ + dependencyFeatures = features: dependency: + assert (builtins.isList features); + assert (builtins.isAttrs dependency); + let + defaultOrNil = + if dependency.usesDefaultFeatures or true + then [ "default" ] + else [ ]; + explicitFeatures = dependency.features or [ ]; + additionalDependencyFeatures = + let + dependencyPrefix = (dependency.rename or dependency.name) + "/"; + dependencyFeatures = + builtins.filter (f: lib.hasPrefix dependencyPrefix f) features; + in + builtins.map (lib.removePrefix dependencyPrefix) dependencyFeatures; + in + defaultOrNil ++ explicitFeatures ++ additionalDependencyFeatures; + + /* Sorts and removes duplicates from a list of strings. */ + sortedUnique = features: + assert (builtins.isList features); + assert (builtins.all builtins.isString features); + let + outFeaturesSet = lib.foldl (set: feature: set // { "${feature}" = 1; }) { } features; + outFeaturesUnique = builtins.attrNames outFeaturesSet; + in + builtins.sort (a: b: a < b) outFeaturesUnique; + + deprecationWarning = message: value: + if strictDeprecation + then builtins.throw "strictDeprecation enabled, aborting: ${message}" + else builtins.trace message value; + + # + # crate2nix/default.nix (excerpt end) + # + }; +} + diff --git a/nixpkgs/pkgs/build-support/rust/build-rust-package/default.nix b/nixpkgs/pkgs/build-support/rust/build-rust-package/default.nix new file mode 100644 index 000000000000..8b87bcbee3af --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/build-rust-package/default.nix @@ -0,0 +1,174 @@ +{ lib +, importCargoLock +, fetchCargoTarball +, rust +, stdenv +, callPackage +, cargoBuildHook +, cargoCheckHook +, cargoInstallHook +, cargoNextestHook +, cargoSetupHook +, cargo +, cargo-auditable +, buildPackages +, rustc +, libiconv +, windows +}: + +let + buildRustPackage = + { name ? "${args.pname}-${args.version}" + + # Name for the vendored dependencies tarball + , cargoDepsName ? name + + , src ? null + , srcs ? null + , preUnpack ? null + , unpackPhase ? null + , postUnpack ? null + , cargoPatches ? [] + , patches ? [] + , sourceRoot ? null + , logLevel ? "" + , buildInputs ? [] + , nativeBuildInputs ? [] + , cargoUpdateHook ? "" + , cargoDepsHook ? "" + , buildType ? "release" + , meta ? {} + , cargoLock ? null + , cargoVendorDir ? null + , checkType ? buildType + , buildNoDefaultFeatures ? false + , checkNoDefaultFeatures ? buildNoDefaultFeatures + , buildFeatures ? [ ] + , checkFeatures ? buildFeatures + , useNextest ? false + , auditable ? !cargo-auditable.meta.broken + + , depsExtraArgs ? {} + + # Toggles whether a custom sysroot is created when the target is a .json file. + , __internal_dontAddSysroot ? false + + # Needed to `pushd`/`popd` into a subdir of a tarball if this subdir + # contains a Cargo.toml, but isn't part of a workspace (which is e.g. the + # case for `rustfmt`/etc from the `rust-sources). + # Otherwise, everything from the tarball would've been built/tested. + , buildAndTestSubdir ? null + , ... } @ args: + + assert cargoVendorDir == null && cargoLock == null + -> !(args ? cargoSha256 && args.cargoSha256 != null) && !(args ? cargoHash && args.cargoHash != null) + -> throw "cargoSha256, cargoHash, cargoVendorDir, or cargoLock must be set"; + + let + + cargoDeps = + if cargoVendorDir != null then null + else if cargoLock != null then importCargoLock cargoLock + else fetchCargoTarball ({ + inherit src srcs sourceRoot preUnpack unpackPhase postUnpack cargoUpdateHook; + name = cargoDepsName; + patches = cargoPatches; + } // lib.optionalAttrs (args ? cargoHash) { + hash = args.cargoHash; + } // lib.optionalAttrs (args ? cargoSha256) { + sha256 = args.cargoSha256; + } // depsExtraArgs); + + target = rust.toRustTargetSpec stdenv.hostPlatform; + targetIsJSON = lib.hasSuffix ".json" target; + useSysroot = targetIsJSON && !__internal_dontAddSysroot; + + sysroot = callPackage ./sysroot { } { + inherit target; + shortTarget = rust.lib.toRustTargetSpecShort stdenv.hostPlatform; + RUSTFLAGS = args.RUSTFLAGS or ""; + originalCargoToml = src + /Cargo.toml; # profile info is later extracted + }; + + in + + # Tests don't currently work for `no_std`, and all custom sysroots are currently built without `std`. + # See https://os.phil-opp.com/testing/ for more information. + assert useSysroot -> !(args.doCheck or true); + + stdenv.mkDerivation ((removeAttrs args [ "depsExtraArgs" "cargoUpdateHook" "cargoLock" ]) // lib.optionalAttrs useSysroot { + RUSTFLAGS = "--sysroot ${sysroot} " + (args.RUSTFLAGS or ""); + } // { + inherit buildAndTestSubdir cargoDeps; + + cargoBuildType = buildType; + + cargoCheckType = checkType; + + cargoBuildNoDefaultFeatures = buildNoDefaultFeatures; + + cargoCheckNoDefaultFeatures = checkNoDefaultFeatures; + + cargoBuildFeatures = buildFeatures; + + cargoCheckFeatures = checkFeatures; + + patchRegistryDeps = ./patch-registry-deps; + + nativeBuildInputs = nativeBuildInputs ++ lib.optionals auditable [ + (buildPackages.cargo-auditable-cargo-wrapper.override { + inherit cargo cargo-auditable; + }) + ] ++ [ + cargoBuildHook + (if useNextest then cargoNextestHook else cargoCheckHook) + cargoInstallHook + cargoSetupHook + rustc + ]; + + buildInputs = buildInputs + ++ lib.optionals stdenv.hostPlatform.isDarwin [ libiconv ] + ++ lib.optionals stdenv.hostPlatform.isMinGW [ windows.pthreads ]; + + patches = cargoPatches ++ patches; + + PKG_CONFIG_ALLOW_CROSS = + if stdenv.buildPlatform != stdenv.hostPlatform then 1 else 0; + + postUnpack = '' + eval "$cargoDepsHook" + + export RUST_LOG=${logLevel} + '' + (args.postUnpack or ""); + + configurePhase = args.configurePhase or '' + runHook preConfigure + runHook postConfigure + ''; + + doCheck = args.doCheck or true; + + strictDeps = true; + + meta = { + # default to Rust's platforms + platforms = rustc.meta.platforms ++ [ + # Platforms without host tools from + # https://doc.rust-lang.org/nightly/rustc/platform-support.html + "armv7a-darwin" + "armv5tel-linux" "armv7a-linux" "m68k-linux" "riscv32-linux" + "armv6l-netbsd" + "x86_64-redox" + "wasm32-wasi" + ]; + badPlatforms = [ + # Rust is currently unable to target the n32 ABI + lib.systems.inspect.patterns.isMips64n32 + ]; + } // meta; + }) // { + overrideRustAttrs = f: buildRustPackage (args // (f args)); + }; +in buildRustPackage diff --git a/nixpkgs/pkgs/build-support/rust/build-rust-package/patch-registry-deps/pkg-config b/nixpkgs/pkgs/build-support/rust/build-rust-package/patch-registry-deps/pkg-config new file mode 100644 index 000000000000..fbb094304587 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/build-rust-package/patch-registry-deps/pkg-config @@ -0,0 +1,8 @@ +for dir in pkg-config-*; do + [ -d "$dir" ] || continue + + echo "Patching pkg-config registry dep" + + substituteInPlace "$dir/src/lib.rs" \ + --replace '"/usr"' '"'"$NIX_STORE"'/"' +done diff --git a/nixpkgs/pkgs/build-support/rust/build-rust-package/sysroot/default.nix b/nixpkgs/pkgs/build-support/rust/build-rust-package/sysroot/default.nix new file mode 100644 index 000000000000..a6d53056d9c7 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/build-rust-package/sysroot/default.nix @@ -0,0 +1,35 @@ +{ lib, stdenv, rust, rustPlatform, buildPackages }: + +{ shortTarget, originalCargoToml, target, RUSTFLAGS }: + +let + cargoSrc = import ../../sysroot/src.nix { + inherit lib stdenv rustPlatform buildPackages originalCargoToml; + }; +in rustPlatform.buildRustPackage { + inherit target RUSTFLAGS; + + name = "custom-sysroot"; + src = cargoSrc; + + RUSTC_BOOTSTRAP = 1; + __internal_dontAddSysroot = true; + cargoSha256 = "sha256-zgkwevitxsu1C4OgGTsqNSc0gDxaNXYK1WPbfER48d0="; + + doCheck = false; + + installPhase = '' + export LIBS_DIR=$out/lib/rustlib/${shortTarget}/lib + mkdir -p $LIBS_DIR + for f in target/${shortTarget}/release/deps/*.{rlib,rmeta}; do + cp $f $LIBS_DIR + done + + export RUST_SYSROOT=$(rustc --print=sysroot) + host=${rust.toRustTarget stdenv.buildPlatform} + cp -r $RUST_SYSROOT/lib/rustlib/$host $out + ''; + + # allows support for cross-compilation + meta.platforms = lib.platforms.all; +} diff --git a/nixpkgs/pkgs/build-support/rust/default-crate-overrides.nix b/nixpkgs/pkgs/build-support/rust/default-crate-overrides.nix new file mode 100644 index 000000000000..92c71dfc059c --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/default-crate-overrides.nix @@ -0,0 +1,311 @@ +{ lib +, stdenv +, atk +, pkg-config +, curl +, darwin +, libgit2 +, gtk3 +, libssh2 +, openssl +, sqlite +, zlib +, dbus +, dbus-glib +, gdk-pixbuf +, cairo +, python3 +, libsodium +, postgresql +, gmp +, gobject-introspection +, foundationdb +, capnproto +, nettle +, gtk4 +, clang +, llvmPackages +, linux-pam +, pango +, cmake +, glib +, freetype +, fontconfig +, rdkafka +, udev +, libevdev +, alsa-lib +, graphene +, protobuf +, autoconf +, automake +, libtool +, seatd # =libseat +, ... +}: + +let + inherit (darwin.apple_sdk.frameworks) CoreFoundation Security; +in +{ + alsa-sys = attrs: { + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ alsa-lib ]; + }; + + cairo-rs = attrs: { + buildInputs = [ cairo ]; + }; + + cairo-sys-rs = attrs: { + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ cairo ]; + }; + + capnp-rpc = attrs: { + nativeBuildInputs = [ capnproto ]; + }; + + cargo = attrs: { + buildInputs = [ openssl zlib curl ] + ++ lib.optionals stdenv.isDarwin [ CoreFoundation Security ]; + }; + + libz-sys = attrs: { + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ zlib ]; + extraLinkFlags = [ "-L${zlib.out}/lib" ]; + }; + + curl-sys = attrs: { + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ zlib curl ]; + propagatedBuildInputs = [ curl zlib ]; + extraLinkFlags = [ "-L${zlib.out}/lib" ]; + }; + + dbus = attrs: { + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ dbus ]; + }; + + evdev-sys = attrs: { + nativeBuildInputs = [ + pkg-config + ] ++ lib.optionals (stdenv.buildPlatform.config != stdenv.hostPlatform.config) [ + python3 autoconf automake libtool + ]; + buildInputs = [ libevdev ]; + + # This prevents libevdev's build.rs from trying to `git fetch` when HOST!=TARGET + prePatch = '' + touch libevdev/.git + ''; + }; + + expat-sys = attrs: { + nativeBuildInputs = [ cmake ]; + }; + + foundationdb-sys = attrs: { + buildInputs = [ foundationdb ]; + # needed for 0.4+ release, when the FFI bindings are auto-generated + # + # patchPhase = '' + # substituteInPlace ./foundationdb-sys/build.rs \ + # --replace /usr/local/include ${foundationdb.dev}/include + # ''; + }; + + foundationdb = attrs: { + buildInputs = [ foundationdb ]; + }; + + freetype-sys = attrs: { + nativeBuildInputs = [ cmake ]; + buildInputs = [ freetype ]; + }; + + glib-sys = attrs: { + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ glib ]; + }; + + gobject-sys = attrs: { + buildInputs = [ dbus-glib ]; + }; + + gio-sys = attrs: { + buildInputs = [ dbus-glib ]; + }; + + gdk-pixbuf-sys = attrs: { + buildInputs = [ dbus-glib ]; + }; + + gdk-pixbuf = attrs: { + buildInputs = [ gdk-pixbuf ]; + }; + + gtk-sys = attrs: { + buildInputs = [ gtk3 ]; + nativeBuildInputs = [ pkg-config ]; + }; + + gtk4-sys = attrs: { + buildInputs = [ gtk4 ]; + nativeBuildInputs = [ pkg-config ]; + }; + + gdk4-sys = attrs: { + buildInputs = [ gtk4 ]; + nativeBuildInputs = [ pkg-config ]; + }; + + gsk4-sys = attrs: { + buildInputs = [ gtk4 ]; + nativeBuildInputs = [ pkg-config ]; + }; + + libgit2-sys = attrs: { + LIBGIT2_SYS_USE_PKG_CONFIG = true; + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ openssl zlib libgit2 ]; + }; + + libseat-sys = attrs: { + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ seatd ]; + }; + + libsqlite3-sys = attrs: { + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ sqlite ]; + }; + + libssh2-sys = attrs: { + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ openssl zlib libssh2 ]; + }; + + libdbus-sys = attrs: { + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ dbus ]; + }; + + libudev-sys = attrs: { + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ udev ]; + }; + + graphene-sys = attrs: { + nativeBuildInputs = [ pkg-config gobject-introspection ]; + buildInputs = [ graphene ]; + }; + + nettle-sys = attrs: { + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ nettle clang ]; + LIBCLANG_PATH = "${llvmPackages.libclang.lib}/lib"; + }; + + openssl = attrs: { + buildInputs = [ openssl ]; + }; + + openssl-sys = attrs: { + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ openssl ]; + }; + + pam-sys = attr: { + buildInputs = [ linux-pam ]; + }; + + pango-sys = attr: { + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ pango ]; + }; + + pq-sys = attr: { + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ postgresql ]; + }; + + prost-build = attr: { + nativeBuildInputs = [ protobuf ]; + }; + + rdkafka-sys = attr: { + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ rdkafka ]; + }; + + rink = attrs: { + buildInputs = [ gmp ]; + crateBin = [{ name = "rink"; path = "src/bin/rink.rs"; }]; + }; + + security-framework-sys = attr: { + propagatedBuildInputs = lib.optional stdenv.isDarwin Security; + }; + + sequoia-openpgp = attrs: { + buildInputs = [ gmp ]; + }; + + sequoia-openpgp-ffi = attrs: { + buildInputs = [ gmp ]; + }; + + sequoia-ipc = attrs: { + buildInputs = [ gmp ]; + }; + + sequoia-guide = attrs: { + buildInputs = [ gmp ]; + }; + + pangocairo-sys = attr: { + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ pango ]; + }; + + sequoia-store = attrs: { + nativeBuildInputs = [ capnproto ]; + buildInputs = [ sqlite gmp ]; + }; + + sequoia-sq = attrs: { + buildInputs = [ sqlite gmp ]; + }; + + sequoia-tool = attrs: { + nativeBuildInputs = [ capnproto ]; + buildInputs = [ sqlite gmp ]; + }; + + serde_derive = attrs: { + buildInputs = lib.optional stdenv.isDarwin Security; + }; + + servo-fontconfig-sys = attrs: { + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ freetype fontconfig ]; + }; + + thrussh-libsodium = attrs: { + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ libsodium ]; + }; + + xcb = attrs: { + buildInputs = [ python3 ]; + }; + + atk-sys = attrs: { + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ atk ]; + }; + +} diff --git a/nixpkgs/pkgs/build-support/rust/fetch-cargo-tarball/cargo-vendor-normalise.py b/nixpkgs/pkgs/build-support/rust/fetch-cargo-tarball/cargo-vendor-normalise.py new file mode 100755 index 000000000000..90933b089c92 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/fetch-cargo-tarball/cargo-vendor-normalise.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +import sys + +import toml + + +def quote(s: str) -> str: + escaped = s.replace('"', r"\"").replace("\n", r"\n").replace("\\", "\\\\") + return '"{}"'.format(escaped) + + +def main() -> None: + data = toml.load(sys.stdin) + + # There is no dependency to vendor in this project. + if not list(data.keys()) == ["source"]: + return + + # this value is non deterministic + data["source"]["vendored-sources"]["directory"] = "@vendor@" + + lines = [] + inner = data["source"] + for source, attrs in sorted(inner.items()): + lines.append("[source.{}]".format(quote(source))) + if source == "vendored-sources": + lines.append('"directory" = "@vendor@"\n') + else: + for key, value in sorted(attrs.items()): + attr = "{} = {}".format(quote(key), quote(value)) + lines.append(attr) + lines.append("") + + result = "\n".join(lines) + real = toml.loads(result) + assert real == data, "output = {} while input = {}".format(real, data) + + print(result) + + +if __name__ == "__main__": + main() diff --git a/nixpkgs/pkgs/build-support/rust/fetch-cargo-tarball/default.nix b/nixpkgs/pkgs/build-support/rust/fetch-cargo-tarball/default.nix new file mode 100644 index 000000000000..adbfe98d8103 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/fetch-cargo-tarball/default.nix @@ -0,0 +1,116 @@ +{ lib, stdenv, cacert, git, cargo, python3 }: +let cargo-vendor-normalise = stdenv.mkDerivation { + name = "cargo-vendor-normalise"; + src = ./cargo-vendor-normalise.py; + nativeBuildInputs = [ python3.pkgs.wrapPython ]; + dontUnpack = true; + installPhase = "install -D $src $out/bin/cargo-vendor-normalise"; + pythonPath = [ python3.pkgs.toml ]; + postFixup = "wrapPythonPrograms"; + doInstallCheck = true; + installCheckPhase = '' + # check that ../fetchcargo-default-config.toml is a fix point + reference=${../fetchcargo-default-config.toml} + < $reference $out/bin/cargo-vendor-normalise > test; + cmp test $reference + ''; + preferLocalBuild = true; +}; +in +{ name ? "cargo-deps" +, src ? null +, srcs ? [] +, patches ? [] +, sourceRoot ? "" +, cargoUpdateHook ? "" +, nativeBuildInputs ? [] +, ... +} @ args: + +let hash_ = + if args ? hash then + { + outputHashAlgo = if args.hash == "" then "sha256" else null; + outputHash = args.hash; + } + else if args ? sha256 then { outputHashAlgo = "sha256"; outputHash = args.sha256; } + else throw "fetchCargoTarball requires a hash for ${name}"; +in stdenv.mkDerivation ({ + name = "${name}-vendor.tar.gz"; + nativeBuildInputs = [ cacert git cargo-vendor-normalise cargo ] ++ nativeBuildInputs; + + buildPhase = '' + runHook preBuild + + # Ensure deterministic Cargo vendor builds + export SOURCE_DATE_EPOCH=1 + + if [[ ! -f Cargo.lock ]]; then + echo + echo "ERROR: The Cargo.lock file doesn't exist" + echo + echo "Cargo.lock is needed to make sure that cargoHash/cargoSha256 doesn't change" + echo "when the registry is updated." + echo + + exit 1 + fi + + # Keep the original around for copyLockfile + cp Cargo.lock Cargo.lock.orig + + export CARGO_HOME=$(mktemp -d cargo-home.XXX) + CARGO_CONFIG=$(mktemp cargo-config.XXXX) + + if [[ -n "$NIX_CRATES_INDEX" ]]; then + cat >$CARGO_HOME/config.toml <<EOF + [source.crates-io] + replace-with = 'mirror' + [source.mirror] + registry = "$NIX_CRATES_INDEX" + EOF + fi + + ${cargoUpdateHook} + + # Override the `http.cainfo` option usually specified in `.cargo/config`. + export CARGO_HTTP_CAINFO=${cacert}/etc/ssl/certs/ca-bundle.crt + + if grep '^source = "git' Cargo.lock; then + echo + echo "ERROR: The Cargo.lock contains git dependencies" + echo + echo "This is currently not supported in the fixed-output derivation fetcher." + echo "Use cargoLock.lockFile / importCargoLock instead." + echo + + exit 1 + fi + + cargo vendor $name --respect-source-config | cargo-vendor-normalise > $CARGO_CONFIG + + # Create an empty vendor directory when there is no dependency to vendor + mkdir -p $name + # Add the Cargo.lock to allow hash invalidation + cp Cargo.lock.orig $name/Cargo.lock + + # Packages with git dependencies generate non-default cargo configs, so + # always install it rather than trying to write a standard default template. + install -D $CARGO_CONFIG $name/.cargo/config; + + runHook postBuild + ''; + + # Build a reproducible tar, per instructions at https://reproducible-builds.org/docs/archives/ + installPhase = '' + tar --owner=0 --group=0 --numeric-owner --format=gnu \ + --sort=name --mtime="@$SOURCE_DATE_EPOCH" \ + -czf $out $name + ''; + + inherit (hash_) outputHashAlgo outputHash; + + impureEnvVars = lib.fetchers.proxyImpureEnvVars ++ [ "NIX_CRATES_INDEX" ]; +} // (builtins.removeAttrs args [ + "name" "sha256" "cargoUpdateHook" "nativeBuildInputs" +])) diff --git a/nixpkgs/pkgs/build-support/rust/fetchcargo-default-config.toml b/nixpkgs/pkgs/build-support/rust/fetchcargo-default-config.toml new file mode 100755 index 000000000000..dd8ebbc32d31 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/fetchcargo-default-config.toml @@ -0,0 +1,7 @@ +[source."crates-io"] +"replace-with" = "vendored-sources" + +[source."vendored-sources"] +"directory" = "@vendor@" + + diff --git a/nixpkgs/pkgs/build-support/rust/fetchcrate.nix b/nixpkgs/pkgs/build-support/rust/fetchcrate.nix new file mode 100644 index 000000000000..423f4d786fde --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/fetchcrate.nix @@ -0,0 +1,20 @@ +{ lib, fetchzip, fetchurl }: + +{ crateName ? args.pname +, pname ? null + # The `dl` field of the registry's index configuration + # https://doc.rust-lang.org/cargo/reference/registry-index.html#index-configuration +, registryDl ? "https://crates.io/api/v1/crates" +, version +, unpack ? true +, ... +} @ args: + +assert pname == null || pname == crateName; + +(if unpack then fetchzip else fetchurl) ({ + name = "${crateName}-${version}.tar.gz"; + url = "${registryDl}/${crateName}/${version}/download"; +} // lib.optionalAttrs unpack { + extension = "tar.gz"; +} // removeAttrs args [ "crateName" "pname" "registryDl" "version" "unpack" ]) diff --git a/nixpkgs/pkgs/build-support/rust/hooks/cargo-build-hook.sh b/nixpkgs/pkgs/build-support/rust/hooks/cargo-build-hook.sh new file mode 100644 index 000000000000..ed982c7ff30a --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/hooks/cargo-build-hook.sh @@ -0,0 +1,53 @@ +declare -a cargoBuildFlags + +cargoBuildHook() { + echo "Executing cargoBuildHook" + + runHook preBuild + + # Let stdenv handle stripping, for consistency and to not break + # separateDebugInfo. + export "CARGO_PROFILE_${cargoBuildType@U}_STRIP"=false + + if [ ! -z "${buildAndTestSubdir-}" ]; then + # ensure the output doesn't end up in the subdirectory + export CARGO_TARGET_DIR="$(pwd)/target" + + pushd "${buildAndTestSubdir}" + fi + + if [ "${cargoBuildType}" != "debug" ]; then + cargoBuildProfileFlag="--profile ${cargoBuildType}" + fi + + if [ -n "${cargoBuildNoDefaultFeatures-}" ]; then + cargoBuildNoDefaultFeaturesFlag=--no-default-features + fi + + if [ -n "${cargoBuildFeatures-}" ]; then + cargoBuildFeaturesFlag="--features=${cargoBuildFeatures// /,}" + fi + + ( + set -x + @setEnv@ cargo build -j $NIX_BUILD_CORES \ + --target @rustHostPlatformSpec@ \ + --frozen \ + ${cargoBuildProfileFlag} \ + ${cargoBuildNoDefaultFeaturesFlag} \ + ${cargoBuildFeaturesFlag} \ + ${cargoBuildFlags} + ) + + if [ ! -z "${buildAndTestSubdir-}" ]; then + popd + fi + + runHook postBuild + + echo "Finished cargoBuildHook" +} + +if [ -z "${dontCargoBuild-}" ] && [ -z "${buildPhase-}" ]; then + buildPhase=cargoBuildHook +fi diff --git a/nixpkgs/pkgs/build-support/rust/hooks/cargo-check-hook.sh b/nixpkgs/pkgs/build-support/rust/hooks/cargo-check-hook.sh new file mode 100644 index 000000000000..971a140ec178 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/hooks/cargo-check-hook.sh @@ -0,0 +1,55 @@ +declare -a checkFlags +declare -a cargoTestFlags + +cargoCheckHook() { + echo "Executing cargoCheckHook" + + runHook preCheck + + if [[ -n "${buildAndTestSubdir-}" ]]; then + pushd "${buildAndTestSubdir}" + fi + + if [[ -z ${dontUseCargoParallelTests-} ]]; then + threads=$NIX_BUILD_CORES + else + threads=1 + fi + + if [ "${cargoCheckType}" != "debug" ]; then + cargoCheckProfileFlag="--profile ${cargoCheckType}" + fi + + if [ -n "${cargoCheckNoDefaultFeatures-}" ]; then + cargoCheckNoDefaultFeaturesFlag=--no-default-features + fi + + if [ -n "${cargoCheckFeatures-}" ]; then + cargoCheckFeaturesFlag="--features=${cargoCheckFeatures// /,}" + fi + + argstr="${cargoCheckProfileFlag} ${cargoCheckNoDefaultFeaturesFlag} ${cargoCheckFeaturesFlag} + --target @rustHostPlatformSpec@ --frozen ${cargoTestFlags}" + + ( + set -x + cargo test \ + -j $NIX_BUILD_CORES \ + ${argstr} -- \ + --test-threads=${threads} \ + ${checkFlags} \ + ${checkFlagsArray+"${checkFlagsArray[@]}"} + ) + + if [[ -n "${buildAndTestSubdir-}" ]]; then + popd + fi + + echo "Finished cargoCheckHook" + + runHook postCheck +} + +if [ -z "${dontCargoCheck-}" ] && [ -z "${checkPhase-}" ]; then + checkPhase=cargoCheckHook +fi diff --git a/nixpkgs/pkgs/build-support/rust/hooks/cargo-install-hook.sh b/nixpkgs/pkgs/build-support/rust/hooks/cargo-install-hook.sh new file mode 100644 index 000000000000..24a6e6fa9eb3 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/hooks/cargo-install-hook.sh @@ -0,0 +1,49 @@ +cargoInstallPostBuildHook() { + echo "Executing cargoInstallPostBuildHook" + + releaseDir=target/@targetSubdirectory@/$cargoBuildType + tmpDir="${releaseDir}-tmp"; + + mkdir -p $tmpDir + cp -r ${releaseDir}/* $tmpDir/ + bins=$(find $tmpDir \ + -maxdepth 1 \ + -type f \ + -executable ! \( -regex ".*\.\(so.[0-9.]+\|so\|a\|dylib\)" \)) + + echo "Finished cargoInstallPostBuildHook" +} + +cargoInstallHook() { + echo "Executing cargoInstallHook" + + runHook preInstall + + # rename the output dir to a architecture independent one + + releaseDir=target/@targetSubdirectory@/$cargoBuildType + tmpDir="${releaseDir}-tmp"; + + mapfile -t targets < <(find "$NIX_BUILD_TOP" -type d | grep "${tmpDir}$") + for target in "${targets[@]}"; do + rm -rf "$target/../../${cargoBuildType}" + ln -srf "$target" "$target/../../" + done + mkdir -p $out/bin $out/lib + + xargs -r cp -t $out/bin <<< $bins + find $tmpDir \ + -maxdepth 1 \ + -regex ".*\.\(so.[0-9.]+\|so\|a\|dylib\)" \ + -print0 | xargs -r -0 cp -t $out/lib + rmdir --ignore-fail-on-non-empty $out/lib $out/bin + runHook postInstall + + echo "Finished cargoInstallHook" +} + + +if [ -z "${dontCargoInstall-}" ] && [ -z "${installPhase-}" ]; then + installPhase=cargoInstallHook + postBuildHooks+=(cargoInstallPostBuildHook) +fi diff --git a/nixpkgs/pkgs/build-support/rust/hooks/cargo-nextest-hook.sh b/nixpkgs/pkgs/build-support/rust/hooks/cargo-nextest-hook.sh new file mode 100644 index 000000000000..29ba18a6a1e3 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/hooks/cargo-nextest-hook.sh @@ -0,0 +1,54 @@ +declare -a checkFlags +declare -a cargoTestFlags + +cargoNextestHook() { + echo "Executing cargoNextestHook" + + runHook preCheck + + if [[ -n "${buildAndTestSubdir-}" ]]; then + pushd "${buildAndTestSubdir}" + fi + + if [[ -z ${dontUseCargoParallelTests-} ]]; then + threads=$NIX_BUILD_CORES + else + threads=1 + fi + + if [ "${cargoCheckType}" != "debug" ]; then + cargoCheckProfileFlag="--cargo-profile ${cargoCheckType}" + fi + + if [ -n "${cargoCheckNoDefaultFeatures-}" ]; then + cargoCheckNoDefaultFeaturesFlag=--no-default-features + fi + + if [ -n "${cargoCheckFeatures-}" ]; then + cargoCheckFeaturesFlag="--features=${cargoCheckFeatures// /,}" + fi + + argstr="${cargoCheckProfileFlag} ${cargoCheckNoDefaultFeaturesFlag} ${cargoCheckFeaturesFlag} + --target @rustHostPlatformSpec@ --frozen ${cargoTestFlags}" + + ( + set -x + cargo nextest run \ + -j ${threads} \ + ${argstr} -- \ + ${checkFlags} \ + ${checkFlagsArray+"${checkFlagsArray[@]}"} + ) + + if [[ -n "${buildAndTestSubdir-}" ]]; then + popd + fi + + echo "Finished cargoNextestHook" + + runHook postCheck +} + +if [ -z "${dontCargoCheck-}" ] && [ -z "${checkPhase-}" ]; then + checkPhase=cargoNextestHook +fi diff --git a/nixpkgs/pkgs/build-support/rust/hooks/cargo-setup-hook.sh b/nixpkgs/pkgs/build-support/rust/hooks/cargo-setup-hook.sh new file mode 100644 index 000000000000..693c0b08759e --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/hooks/cargo-setup-hook.sh @@ -0,0 +1,93 @@ +cargoSetupPostUnpackHook() { + echo "Executing cargoSetupPostUnpackHook" + + # Some cargo builds include build hooks that modify their own vendor + # dependencies. This copies the vendor directory into the build tree and makes + # it writable. If we're using a tarball, the unpackFile hook already handles + # this for us automatically. + if [ -z $cargoVendorDir ]; then + if [ -d "$cargoDeps" ]; then + local dest=$(stripHash "$cargoDeps") + cp -Lr --reflink=auto -- "$cargoDeps" "$dest" + chmod -R +644 -- "$dest" + else + unpackFile "$cargoDeps" + fi + export cargoDepsCopy="$(realpath "$(stripHash $cargoDeps)")" + else + cargoDepsCopy="$(realpath "$(pwd)/$sourceRoot/${cargoRoot:+$cargoRoot/}${cargoVendorDir}")" + fi + + if [ ! -d .cargo ]; then + mkdir .cargo + fi + + config="$cargoDepsCopy/.cargo/config"; + if [[ ! -e $config ]]; then + config=@defaultConfig@ + fi; + + tmp_config=$(mktemp) + substitute $config $tmp_config \ + --subst-var-by vendor "$cargoDepsCopy" + cat ${tmp_config} >> .cargo/config + + cat >> .cargo/config <<'EOF' + @cargoConfig@ +EOF + + echo "Finished cargoSetupPostUnpackHook" +} + +# After unpacking and applying patches, check that the Cargo.lock matches our +# src package. Note that we do this after the patchPhase, because the +# patchPhase may create the Cargo.lock if upstream has not shipped one. +cargoSetupPostPatchHook() { + echo "Executing cargoSetupPostPatchHook" + + cargoDepsLockfile="$cargoDepsCopy/Cargo.lock" + srcLockfile="$(pwd)/${cargoRoot:+$cargoRoot/}Cargo.lock" + + echo "Validating consistency between $srcLockfile and $cargoDepsLockfile" + if ! @diff@ $srcLockfile $cargoDepsLockfile; then + + # If the diff failed, first double-check that the file exists, so we can + # give a friendlier error msg. + if ! [ -e $srcLockfile ]; then + echo "ERROR: Missing Cargo.lock from src. Expected to find it at: $srcLockfile" + echo "Hint: You can use the cargoPatches attribute to add a Cargo.lock manually to the build." + exit 1 + fi + + if ! [ -e $cargoDepsLockfile ]; then + echo "ERROR: Missing lockfile from cargo vendor. Expected to find it at: $cargoDepsLockfile" + exit 1 + fi + + echo + echo "ERROR: cargoHash or cargoSha256 is out of date" + echo + echo "Cargo.lock is not the same in $cargoDepsCopy" + echo + echo "To fix the issue:" + echo '1. Set cargoHash/cargoSha256 to an empty string: `cargoHash = "";`' + echo '2. Build the derivation and wait for it to fail with a hash mismatch' + echo '3. Copy the "got: sha256-..." value back into the cargoHash field' + echo ' You should have: cargoHash = "sha256-XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX=";' + echo + + exit 1 + fi + + unset cargoDepsCopy + + echo "Finished cargoSetupPostPatchHook" +} + +if [ -z "${dontCargoSetupPostUnpack-}" ]; then + postUnpackHooks+=(cargoSetupPostUnpackHook) +fi + +if [ -z ${cargoVendorDir-} ]; then + postPatchHooks+=(cargoSetupPostPatchHook) +fi diff --git a/nixpkgs/pkgs/build-support/rust/hooks/default.nix b/nixpkgs/pkgs/build-support/rust/hooks/default.nix new file mode 100644 index 000000000000..205d085d3507 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/hooks/default.nix @@ -0,0 +1,100 @@ +{ buildPackages +, callPackage +, cargo +, cargo-nextest +, clang +, lib +, makeSetupHook +, maturin +, rust +, rustc +, stdenv + +# This confusingly-named parameter indicates the *subdirectory of +# `target/` from which to copy the build artifacts. It is derived +# from a stdenv platform (or a JSON file). +, target ? rust.lib.toRustTargetSpecShort stdenv.hostPlatform +}: + +{ + cargoBuildHook = callPackage ({ }: + makeSetupHook { + name = "cargo-build-hook.sh"; + propagatedBuildInputs = [ cargo ]; + substitutions = { + inherit (rust.envVars) rustHostPlatformSpec setEnv; + }; + } ./cargo-build-hook.sh) {}; + + cargoCheckHook = callPackage ({ }: + makeSetupHook { + name = "cargo-check-hook.sh"; + propagatedBuildInputs = [ cargo ]; + substitutions = { + inherit (rust.envVars) rustHostPlatformSpec; + }; + } ./cargo-check-hook.sh) {}; + + cargoInstallHook = callPackage ({ }: + makeSetupHook { + name = "cargo-install-hook.sh"; + propagatedBuildInputs = [ ]; + substitutions = { + targetSubdirectory = target; + }; + } ./cargo-install-hook.sh) {}; + + cargoNextestHook = callPackage ({ }: + makeSetupHook { + name = "cargo-nextest-hook.sh"; + propagatedBuildInputs = [ cargo cargo-nextest ]; + substitutions = { + inherit (rust.envVars) rustHostPlatformSpec; + }; + } ./cargo-nextest-hook.sh) {}; + + cargoSetupHook = callPackage ({ }: + makeSetupHook { + name = "cargo-setup-hook.sh"; + propagatedBuildInputs = [ ]; + substitutions = { + defaultConfig = ../fetchcargo-default-config.toml; + + # Specify the stdenv's `diff` by abspath to ensure that the user's build + # inputs do not cause us to find the wrong `diff`. + diff = "${lib.getBin buildPackages.diffutils}/bin/diff"; + + cargoConfig = '' + [target."${rust.toRustTarget stdenv.buildPlatform}"] + "linker" = "${rust.envVars.ccForBuild}" + ${lib.optionalString (stdenv.buildPlatform.config != stdenv.hostPlatform.config) '' + [target."${rust.toRustTarget stdenv.hostPlatform}"] + "linker" = "${rust.envVars.ccForHost}" + ''} + "rustflags" = [ "-C", "target-feature=${if stdenv.hostPlatform.isStatic then "+" else "-"}crt-static" ] + ''; + }; + } ./cargo-setup-hook.sh) {}; + + maturinBuildHook = callPackage ({ pkgsHostTarget }: + makeSetupHook { + name = "maturin-build-hook.sh"; + propagatedBuildInputs = [ + pkgsHostTarget.maturin + pkgsHostTarget.cargo + pkgsHostTarget.rustc + ]; + substitutions = { + inherit (rust.envVars) rustTargetPlatformSpec setEnv; + }; + } ./maturin-build-hook.sh) {}; + + bindgenHook = callPackage ({}: makeSetupHook { + name = "rust-bindgen-hook"; + substitutions = { + libclang = clang.cc.lib; + inherit clang; + }; + } + ./rust-bindgen-hook.sh) {}; +} diff --git a/nixpkgs/pkgs/build-support/rust/hooks/maturin-build-hook.sh b/nixpkgs/pkgs/build-support/rust/hooks/maturin-build-hook.sh new file mode 100644 index 000000000000..d5ff069290ba --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/hooks/maturin-build-hook.sh @@ -0,0 +1,36 @@ +maturinBuildHook() { + echo "Executing maturinBuildHook" + + runHook preBuild + + if [ ! -z "${buildAndTestSubdir-}" ]; then + pushd "${buildAndTestSubdir}" + fi + + ( + set -x + @setEnv@ maturin build \ + --jobs=$NIX_BUILD_CORES \ + --frozen \ + --target @rustTargetPlatformSpec@ \ + --manylinux off \ + --strip \ + --release \ + ${maturinBuildFlags-} + ) + + if [ ! -z "${buildAndTestSubdir-}" ]; then + popd + fi + + # Move the wheel to dist/ so that regular Python tooling can find it. + mkdir -p dist + mv target/wheels/*.whl dist/ + + # These are python build hooks and may depend on ./dist + runHook postBuild + + echo "Finished maturinBuildHook" +} + +buildPhase=maturinBuildHook diff --git a/nixpkgs/pkgs/build-support/rust/hooks/rust-bindgen-hook.sh b/nixpkgs/pkgs/build-support/rust/hooks/rust-bindgen-hook.sh new file mode 100644 index 000000000000..53624b124f2b --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/hooks/rust-bindgen-hook.sh @@ -0,0 +1,13 @@ +# populates LIBCLANG_PATH and BINDGEN_EXTRA_CLANG_ARGS for rust projects that +# depend on the bindgen crate + +# if you modify this, you probably also need to modify the wrapper for the cli +# of bindgen in pkgs/development/tools/rust/bindgen/wrapper.sh + +populateBindgenEnv () { + export LIBCLANG_PATH=@libclang@/lib + BINDGEN_EXTRA_CLANG_ARGS="$(< @clang@/nix-support/cc-cflags) $(< @clang@/nix-support/libc-cflags) $(< @clang@/nix-support/libcxx-cxxflags) $NIX_CFLAGS_COMPILE" + export BINDGEN_EXTRA_CLANG_ARGS +} + +postHook="${postHook:-}"$'\n'"populateBindgenEnv"$'\n' diff --git a/nixpkgs/pkgs/build-support/rust/import-cargo-lock.nix b/nixpkgs/pkgs/build-support/rust/import-cargo-lock.nix new file mode 100644 index 000000000000..c17b0e41cca8 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/import-cargo-lock.nix @@ -0,0 +1,262 @@ +{ fetchgit, fetchurl, lib, writers, python3Packages, runCommand, cargo, jq }: + +{ + # Cargo lock file + lockFile ? null + + # Cargo lock file contents as string +, lockFileContents ? null + + # Allow `builtins.fetchGit` to be used to not require hashes for git dependencies +, allowBuiltinFetchGit ? false + + # Additional registries to pull sources from + # { "https://<registry index URL>" = "https://<registry download URL>"; } + # where: + # - "index URL" is the "index" value of the configuration entry for that registry + # https://doc.rust-lang.org/cargo/reference/registries.html#using-an-alternate-registry + # - "download URL" is the "dl" value of its associated index configuration + # https://doc.rust-lang.org/cargo/reference/registry-index.html#index-configuration +, extraRegistries ? {} + + # Hashes for git dependencies. +, outputHashes ? {} +} @ args: + +assert (lockFile == null) != (lockFileContents == null); + +let + # Parse a git source into different components. + parseGit = src: + let + parts = builtins.match ''git\+([^?]+)(\?(rev|tag|branch)=(.*))?#(.*)'' src; + type = builtins.elemAt parts 2; # rev, tag or branch + value = builtins.elemAt parts 3; + in + if parts == null then null + else { + url = builtins.elemAt parts 0; + sha = builtins.elemAt parts 4; + } // lib.optionalAttrs (type != null) { inherit type value; }; + + # shadows args.lockFileContents + lockFileContents = + if lockFile != null + then builtins.readFile lockFile + else args.lockFileContents; + + parsedLockFile = builtins.fromTOML lockFileContents; + + packages = parsedLockFile.package; + + # There is no source attribute for the source package itself. But + # since we do not want to vendor the source package anyway, we can + # safely skip it. + depPackages = builtins.filter (p: p ? "source") packages; + + # Create dependent crates from packages. + # + # Force evaluation of the git SHA -> hash mapping, so that an error is + # thrown if there are stale hashes. We cannot rely on gitShaOutputHash + # being evaluated otherwise, since there could be no git dependencies. + depCrates = builtins.deepSeq gitShaOutputHash (builtins.map mkCrate depPackages); + + # Map package name + version to git commit SHA for packages with a git source. + namesGitShas = builtins.listToAttrs ( + builtins.map nameGitSha (builtins.filter (pkg: lib.hasPrefix "git+" pkg.source) depPackages) + ); + + nameGitSha = pkg: let gitParts = parseGit pkg.source; in { + name = "${pkg.name}-${pkg.version}"; + value = gitParts.sha; + }; + + # Convert the attrset provided through the `outputHashes` argument to a + # a mapping from git commit SHA -> output hash. + # + # There may be multiple different packages with different names + # originating from the same git repository (typically a Cargo + # workspace). By using the git commit SHA as a universal identifier, + # the user does not have to specify the output hash for every package + # individually. + gitShaOutputHash = lib.mapAttrs' (nameVer: hash: + let + unusedHash = throw "A hash was specified for ${nameVer}, but there is no corresponding git dependency."; + rev = namesGitShas.${nameVer} or unusedHash; in { + name = rev; + value = hash; + }) outputHashes; + + # We can't use the existing fetchCrate function, since it uses a + # recursive hash of the unpacked crate. + fetchCrate = pkg: downloadUrl: + let + checksum = pkg.checksum or parsedLockFile.metadata."checksum ${pkg.name} ${pkg.version} (${pkg.source})"; + in + assert lib.assertMsg (checksum != null) '' + Package ${pkg.name} does not have a checksum. + ''; + fetchurl { + name = "crate-${pkg.name}-${pkg.version}.tar.gz"; + url = "${downloadUrl}/${pkg.name}/${pkg.version}/download"; + sha256 = checksum; + }; + + registries = { + "https://github.com/rust-lang/crates.io-index" = "https://crates.io/api/v1/crates"; + } // extraRegistries; + + # Replaces values inherited by workspace members. + replaceWorkspaceValues = writers.writePython3 "replace-workspace-values" + { libraries = with python3Packages; [ tomli tomli-w ]; flakeIgnore = [ "E501" "W503" ]; } + (builtins.readFile ./replace-workspace-values.py); + + # Fetch and unpack a crate. + mkCrate = pkg: + let + gitParts = parseGit pkg.source; + registryIndexUrl = lib.removePrefix "registry+" pkg.source; + in + if lib.hasPrefix "registry+" pkg.source && builtins.hasAttr registryIndexUrl registries then + let + crateTarball = fetchCrate pkg registries.${registryIndexUrl}; + in runCommand "${pkg.name}-${pkg.version}" {} '' + mkdir $out + tar xf "${crateTarball}" -C $out --strip-components=1 + + # Cargo is happy with largely empty metadata. + printf '{"files":{},"package":"${crateTarball.outputHash}"}' > "$out/.cargo-checksum.json" + '' + else if gitParts != null then + let + missingHash = throw '' + No hash was found while vendoring the git dependency ${pkg.name}-${pkg.version}. You can add + a hash through the `outputHashes` argument of `importCargoLock`: + + outputHashes = { + "${pkg.name}-${pkg.version}" = "<hash>"; + }; + + If you use `buildRustPackage`, you can add this attribute to the `cargoLock` + attribute set. + ''; + tree = + if gitShaOutputHash ? ${gitParts.sha} then + fetchgit { + inherit (gitParts) url; + rev = gitParts.sha; # The commit SHA is always available. + sha256 = gitShaOutputHash.${gitParts.sha}; + } + else if allowBuiltinFetchGit then + builtins.fetchGit { + inherit (gitParts) url; + rev = gitParts.sha; + allRefs = true; + submodules = true; + } + else + missingHash; + in runCommand "${pkg.name}-${pkg.version}" {} '' + tree=${tree} + + # If the target package is in a workspace, or if it's the top-level + # crate, we should find the crate path using `cargo metadata`. + # Some packages do not have a Cargo.toml at the top-level, + # but only in nested directories. + # Only check the top-level Cargo.toml, if it actually exists + if [[ -f $tree/Cargo.toml ]]; then + crateCargoTOML=$(${cargo}/bin/cargo metadata --format-version 1 --no-deps --manifest-path $tree/Cargo.toml | \ + ${jq}/bin/jq -r '.packages[] | select(.name == "${pkg.name}") | .manifest_path') + fi + + # If the repository is not a workspace the package might be in a subdirectory. + if [[ -z $crateCargoTOML ]]; then + for manifest in $(find $tree -name "Cargo.toml"); do + echo Looking at $manifest + crateCargoTOML=$(${cargo}/bin/cargo metadata --format-version 1 --no-deps --manifest-path "$manifest" | ${jq}/bin/jq -r '.packages[] | select(.name == "${pkg.name}") | .manifest_path' || :) + if [[ ! -z $crateCargoTOML ]]; then + break + fi + done + + if [[ -z $crateCargoTOML ]]; then + >&2 echo "Cannot find path for crate '${pkg.name}-${pkg.version}' in the tree in: $tree" + exit 1 + fi + fi + + echo Found crate ${pkg.name} at $crateCargoTOML + tree=$(dirname $crateCargoTOML) + + cp -prvL "$tree/" $out + chmod u+w $out + + if grep -q workspace "$out/Cargo.toml"; then + chmod u+w "$out/Cargo.toml" + ${replaceWorkspaceValues} "$out/Cargo.toml" "${tree}/Cargo.toml" + fi + + # Cargo is happy with empty metadata. + printf '{"files":{},"package":null}' > "$out/.cargo-checksum.json" + + # Set up configuration for the vendor directory. + cat > $out/.cargo-config <<EOF + [source."${gitParts.url}${lib.optionalString (gitParts ? type) "?${gitParts.type}=${gitParts.value}"}"] + git = "${gitParts.url}" + ${lib.optionalString (gitParts ? type) "${gitParts.type} = \"${gitParts.value}\""} + replace-with = "vendored-sources" + EOF + '' + else throw "Cannot handle crate source: ${pkg.source}"; + + vendorDir = runCommand "cargo-vendor-dir" + (if lockFile == null then { + inherit lockFileContents; + passAsFile = [ "lockFileContents" ]; + } else { + passthru = { + inherit lockFile; + }; + }) '' + mkdir -p $out/.cargo + + ${ + if lockFile != null + then "ln -s ${lockFile} $out/Cargo.lock" + else "cp $lockFileContentsPath $out/Cargo.lock" + } + + cat > $out/.cargo/config <<EOF +[source.crates-io] +replace-with = "vendored-sources" + +[source.vendored-sources] +directory = "cargo-vendor-dir" +EOF + + declare -A keysSeen + + for registry in ${toString (builtins.attrNames extraRegistries)}; do + cat >> $out/.cargo/config <<EOF + +[source."$registry"] +registry = "$registry" +replace-with = "vendored-sources" +EOF + done + + for crate in ${toString depCrates}; do + # Link the crate directory, removing the output path hash from the destination. + ln -s "$crate" $out/$(basename "$crate" | cut -c 34-) + + if [ -e "$crate/.cargo-config" ]; then + key=$(sed 's/\[source\."\(.*\)"\]/\1/; t; d' < "$crate/.cargo-config") + if [[ -z ''${keysSeen[$key]} ]]; then + keysSeen[$key]=1 + cat "$crate/.cargo-config" >> $out/.cargo/config + fi + fi + done + ''; +in + vendorDir diff --git a/nixpkgs/pkgs/build-support/rust/lib/default.nix b/nixpkgs/pkgs/build-support/rust/lib/default.nix new file mode 100644 index 000000000000..ceca7323176c --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/lib/default.nix @@ -0,0 +1,150 @@ +{ lib +, stdenv +, buildPackages +, targetPackages +}: + +rec { + # https://doc.rust-lang.org/reference/conditional-compilation.html#target_arch + toTargetArch = platform: + /**/ if platform ? rustc.platform then platform.rustc.platform.arch + else if platform.isAarch32 then "arm" + else if platform.isMips64 then "mips64" # never add "el" suffix + else if platform.isPower64 then "powerpc64" # never add "le" suffix + else platform.parsed.cpu.name; + + # https://doc.rust-lang.org/reference/conditional-compilation.html#target_os + toTargetOs = platform: + /**/ if platform ? rustc.platform then platform.rustc.platform.os or "none" + else if platform.isDarwin then "macos" + else platform.parsed.kernel.name; + + # https://doc.rust-lang.org/reference/conditional-compilation.html#target_family + toTargetFamily = platform: + if platform ? rustc.platform.target-family + then + ( + # Since https://github.com/rust-lang/rust/pull/84072 + # `target-family` is a list instead of single value. + let + f = platform.rustc.platform.target-family; + in + if builtins.isList f then f else [ f ] + ) + else lib.optional platform.isUnix "unix" + ++ lib.optional platform.isWindows "windows"; + + # https://doc.rust-lang.org/reference/conditional-compilation.html#target_vendor + toTargetVendor = platform: let + inherit (platform.parsed) vendor; + in platform.rustc.platform.vendor or { + "w64" = "pc"; + }.${vendor.name} or vendor.name; + + # Returns the name of the rust target, even if it is custom. Adjustments are + # because rust has slightly different naming conventions than we do. + toRustTarget = platform: let + inherit (platform.parsed) cpu kernel abi; + cpu_ = platform.rustc.platform.arch or { + "armv7a" = "armv7"; + "armv7l" = "armv7"; + "armv6l" = "arm"; + "armv5tel" = "armv5te"; + "riscv64" = "riscv64gc"; + }.${cpu.name} or cpu.name; + vendor_ = toTargetVendor platform; + in platform.rustc.config + or "${cpu_}-${vendor_}-${kernel.name}${lib.optionalString (abi.name != "unknown") "-${abi.name}"}"; + + # Returns the name of the rust target if it is standard, or the json file + # containing the custom target spec. + toRustTargetSpec = platform: + if platform ? rustc.platform + then builtins.toFile (toRustTarget platform + ".json") (builtins.toJSON platform.rustc.platform) + else toRustTarget platform; + + # Returns the name of the rust target if it is standard, or the + # basename of the file containing the custom target spec, without + # the .json extension. + # + # This is the name used by Cargo for target subdirectories. + toRustTargetSpecShort = platform: + lib.removeSuffix ".json" + (baseNameOf "${toRustTargetSpec platform}"); + + # When used as part of an environment variable name, triples are + # uppercased and have all hyphens replaced by underscores: + # + # https://github.com/rust-lang/cargo/pull/9169 + # https://github.com/rust-lang/cargo/issues/8285#issuecomment-634202431 + # + toRustTargetForUseInEnvVars = platform: + lib.strings.replaceStrings ["-"] ["_"] + (lib.strings.toUpper + (toRustTargetSpecShort platform)); + + # Returns true if the target is no_std + # https://github.com/rust-lang/rust/blob/2e44c17c12cec45b6a682b1e53a04ac5b5fcc9d2/src/bootstrap/config.rs#L415-L421 + IsNoStdTarget = platform: let rustTarget = toRustTarget platform; in + builtins.any (t: lib.hasInfix t rustTarget) ["-none" "nvptx" "switch" "-uefi"]; + + # These environment variables must be set when using `cargo-c` and + # several other tools which do not deal well with cross + # compilation. The symptom of the problem they fix is errors due + # to buildPlatform CFLAGS being passed to the + # hostPlatform-targeted compiler -- for example, `-m64` being + # passed on a build=x86_64/host=aarch64 compilation. + envVars = let + ccForBuild = "${buildPackages.stdenv.cc}/bin/${buildPackages.stdenv.cc.targetPrefix}cc"; + cxxForBuild = "${buildPackages.stdenv.cc}/bin/${buildPackages.stdenv.cc.targetPrefix}c++"; + ccForHost = "${stdenv.cc}/bin/${stdenv.cc.targetPrefix}cc"; + cxxForHost = "${stdenv.cc}/bin/${stdenv.cc.targetPrefix}c++"; + + # Unfortunately we must use the dangerous `targetPackages` here + # because hooks are artificially phase-shifted one slot earlier + # (they go in nativeBuildInputs, so the hostPlatform looks like + # a targetPlatform to them). + ccForTarget = "${targetPackages.stdenv.cc}/bin/${targetPackages.stdenv.cc.targetPrefix}cc"; + cxxForTarget = "${targetPackages.stdenv.cc}/bin/${targetPackages.stdenv.cc.targetPrefix}c++"; + + rustBuildPlatform = toRustTarget stdenv.buildPlatform; + rustBuildPlatformSpec = toRustTargetSpec stdenv.buildPlatform; + rustHostPlatform = toRustTarget stdenv.hostPlatform; + rustHostPlatformSpec = toRustTargetSpec stdenv.hostPlatform; + rustTargetPlatform = toRustTarget stdenv.targetPlatform; + rustTargetPlatformSpec = toRustTargetSpec stdenv.targetPlatform; + in { + inherit + ccForBuild cxxForBuild rustBuildPlatform rustBuildPlatformSpec + ccForHost cxxForHost rustHostPlatform rustHostPlatformSpec + ccForTarget cxxForTarget rustTargetPlatform rustTargetPlatformSpec; + + # Prefix this onto a command invocation in order to set the + # variables needed by cargo. + # + setEnv = '' + env \ + '' + # Due to a bug in how splicing and targetPackages works, in + # situations where targetPackages is irrelevant + # targetPackages.stdenv.cc is often simply wrong. We must omit + # the following lines when rustTargetPlatform collides with + # rustHostPlatform. + + lib.optionalString (rustTargetPlatform != rustHostPlatform) '' + "CC_${toRustTargetForUseInEnvVars stdenv.targetPlatform}=${ccForTarget}" \ + "CXX_${toRustTargetForUseInEnvVars stdenv.targetPlatform}=${cxxForTarget}" \ + "CARGO_TARGET_${toRustTargetForUseInEnvVars stdenv.targetPlatform}_LINKER=${ccForTarget}" \ + '' + '' + "CC_${toRustTargetForUseInEnvVars stdenv.hostPlatform}=${ccForHost}" \ + "CXX_${toRustTargetForUseInEnvVars stdenv.hostPlatform}=${cxxForHost}" \ + "CARGO_TARGET_${toRustTargetForUseInEnvVars stdenv.hostPlatform}_LINKER=${ccForHost}" \ + '' + '' + "CC_${toRustTargetForUseInEnvVars stdenv.buildPlatform}=${ccForBuild}" \ + "CXX_${toRustTargetForUseInEnvVars stdenv.buildPlatform}=${cxxForBuild}" \ + "CARGO_TARGET_${toRustTargetForUseInEnvVars stdenv.buildPlatform}_LINKER=${ccForBuild}" \ + "CARGO_BUILD_TARGET=${rustBuildPlatform}" \ + "HOST_CC=${buildPackages.stdenv.cc}/bin/cc" \ + "HOST_CXX=${buildPackages.stdenv.cc}/bin/c++" \ + ''; + }; +} diff --git a/nixpkgs/pkgs/build-support/rust/replace-workspace-values.py b/nixpkgs/pkgs/build-support/rust/replace-workspace-values.py new file mode 100644 index 000000000000..acbc38c8ae39 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/replace-workspace-values.py @@ -0,0 +1,107 @@ +# This script implements the workspace inheritance mechanism described +# here: https://doc.rust-lang.org/cargo/reference/workspaces.html#the-package-table +# +# Please run `mypy --strict`, `black`, and `isort --profile black` on this after editing, thanks! + +import sys +from typing import Any + +import tomli +import tomli_w + + +def load_file(path: str) -> dict[str, Any]: + with open(path, "rb") as f: + return tomli.load(f) + + +def replace_key( + workspace_manifest: dict[str, Any], table: dict[str, Any], section: str, key: str +) -> bool: + if ( + isinstance(table[key], dict) + and "workspace" in table[key] + and table[key]["workspace"] is True + ): + print("replacing " + key) + + replaced = table[key] + del replaced["workspace"] + + workspace_copy = workspace_manifest[section][key] + + if section == "dependencies": + crate_features = replaced.get("features") + + if type(workspace_copy) is str: + replaced["version"] = workspace_copy + else: + replaced.update(workspace_copy) + + merged_features = (crate_features or []) + ( + workspace_copy.get("features") or [] + ) + + if len(merged_features) > 0: + # Dictionaries are guaranteed to be ordered (https://stackoverflow.com/a/7961425) + replaced["features"] = list(dict.fromkeys(merged_features)) + elif section == "package": + table[key] = replaced = workspace_copy + + return True + + return False + + +def replace_dependencies( + workspace_manifest: dict[str, Any], root: dict[str, Any] +) -> bool: + changed = False + + for key in ["dependencies", "dev-dependencies", "build-dependencies"]: + if key in root: + for k in root[key].keys(): + changed |= replace_key(workspace_manifest, root[key], "dependencies", k) + + return changed + + +def main() -> None: + top_cargo_toml = load_file(sys.argv[2]) + + if "workspace" not in top_cargo_toml: + # If top_cargo_toml is not a workspace manifest, then this script was probably + # ran on something that does not actually use workspace dependencies + print(f"{sys.argv[2]} is not a workspace manifest, doing nothing.") + return + + crate_manifest = load_file(sys.argv[1]) + workspace_manifest = top_cargo_toml["workspace"] + + if "workspace" in crate_manifest: + return + + changed = False + + for key in crate_manifest["package"].keys(): + changed |= replace_key( + workspace_manifest, crate_manifest["package"], "package", key + ) + + changed |= replace_dependencies(workspace_manifest, crate_manifest) + + if "target" in crate_manifest: + for key in crate_manifest["target"].keys(): + changed |= replace_dependencies( + workspace_manifest, crate_manifest["target"][key] + ) + + if not changed: + return + + with open(sys.argv[1], "wb") as f: + tomli_w.dump(crate_manifest, f) + + +if __name__ == "__main__": + main() diff --git a/nixpkgs/pkgs/build-support/rust/sysroot/Cargo.lock b/nixpkgs/pkgs/build-support/rust/sysroot/Cargo.lock new file mode 100644 index 000000000000..d9b0c25d02f2 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/sysroot/Cargo.lock @@ -0,0 +1,44 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "alloc" +version = "0.0.0" +dependencies = [ + "compiler_builtins", + "core", +] + +[[package]] +name = "compiler_builtins" +version = "0.1.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f867ce54c09855ccd135ad4a50c777182a0c7af5ff20a8f537617bd648b10d50" +dependencies = [ + "rustc-std-workspace-core", +] + +[[package]] +name = "core" +version = "0.0.0" + +[[package]] +name = "nixpkgs-sysroot-stub-crate" +version = "0.0.0" +dependencies = [ + "alloc", + "compiler_builtins", + "core", +] + +[[package]] +name = "rustc-std-workspace-core" +version = "1.99.0" +dependencies = [ + "core", +] + +[[patch.unused]] +name = "rustc-std-workspace-alloc" +version = "1.99.0" diff --git a/nixpkgs/pkgs/build-support/rust/sysroot/cargo.py b/nixpkgs/pkgs/build-support/rust/sysroot/cargo.py new file mode 100644 index 000000000000..9d970eff79e8 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/sysroot/cargo.py @@ -0,0 +1,47 @@ +import os +import toml + +rust_src = os.environ['RUSTC_SRC'] +orig_cargo = os.environ['ORIG_CARGO'] if 'ORIG_CARGO' in os.environ else None + +base = { + 'package': { + 'name': 'nixpkgs-sysroot-stub-crate', + 'version': '0.0.0', + 'authors': ['The Rust Project Developers'], + 'edition': '2018', + }, + 'dependencies': { + 'compiler_builtins': { + 'version': '0.1.0', + 'features': ['rustc-dep-of-std', 'mem'], + }, + 'core': { + 'path': os.path.join(rust_src, 'core'), + }, + 'alloc': { + 'path': os.path.join(rust_src, 'alloc'), + }, + }, + 'patch': { + 'crates-io': { + 'rustc-std-workspace-core': { + 'path': os.path.join(rust_src, 'rustc-std-workspace-core'), + }, + 'rustc-std-workspace-alloc': { + 'path': os.path.join(rust_src, 'rustc-std-workspace-alloc'), + }, + }, + }, +} + +if orig_cargo is not None: + with open(orig_cargo, 'r') as f: + src = toml.loads(f.read()) + if 'profile' in src: + base['profile'] = src['profile'] + +out = toml.dumps(base) + +with open('Cargo.toml', 'x') as f: + f.write(out) diff --git a/nixpkgs/pkgs/build-support/rust/sysroot/src.nix b/nixpkgs/pkgs/build-support/rust/sysroot/src.nix new file mode 100644 index 000000000000..664702e82c31 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/sysroot/src.nix @@ -0,0 +1,26 @@ +{ lib, stdenv, rustPlatform, buildPackages +, originalCargoToml ? null +}: + +stdenv.mkDerivation { + name = "cargo-src"; + preferLocalBuild = true; + + unpackPhase = "true"; + dontConfigure = true; + dontBuild = true; + + installPhase = '' + export RUSTC_SRC=${rustPlatform.rustLibSrc.override { }} + '' + + lib.optionalString (originalCargoToml != null) '' + export ORIG_CARGO=${originalCargoToml} + '' + + '' + ${buildPackages.python3.withPackages (ps: with ps; [ toml ])}/bin/python3 ${./cargo.py} + mkdir -p $out/src + echo '#![no_std]' > $out/src/lib.rs + cp Cargo.toml $out/Cargo.toml + cp ${./Cargo.lock} $out/Cargo.lock + ''; +} diff --git a/nixpkgs/pkgs/build-support/rust/sysroot/update-lockfile.sh b/nixpkgs/pkgs/build-support/rust/sysroot/update-lockfile.sh new file mode 100755 index 000000000000..d0596d1e5a62 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/sysroot/update-lockfile.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env nix-shell +#!nix-shell -i bash -p python3 python3.pkgs.toml cargo + +set -eu pipefile + +HERE=$(readlink -e $(dirname "${BASH_SOURCE[0]}")) +NIXPKGS_ROOT="$HERE/../../../.." + +# https://unix.stackexchange.com/a/84980/390173 +tempdir=$(mktemp -d 2>/dev/null || mktemp -d -t 'update-lockfile') +cd "$tempdir" +mkdir -p src +touch src/lib.rs + +RUSTC_SRC=$(nix-build "${NIXPKGS_ROOT}" -A pkgs.rustPlatform.rustLibSrc --no-out-link) + +ln -s $RUSTC_SRC/{core,alloc} ./ + +export RUSTC_SRC +python3 "$HERE/cargo.py" + +export RUSTC_BOOTSTRAP=1 +cargo generate-lockfile + +cp Cargo.lock "$HERE" + +rm -rf "$tempdir" diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic-dynamic/Cargo.lock b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic-dynamic/Cargo.lock new file mode 100644 index 000000000000..522f9c260fa3 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic-dynamic/Cargo.lock @@ -0,0 +1,83 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "basic-dynamic" +version = "0.1.0" +dependencies = [ + "rand", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "getrandom" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "libc" +version = "0.2.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2a5ac8f984bfcf3a823267e5fde638acc3325f6496633a5da6bb6eb2171e103" + +[[package]] +name = "ppv-lite86" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857" + +[[package]] +name = "rand" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", + "rand_hc", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rand_hc" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d51e9f596de227fda2ea6c84607f5558e196eeaf43c986b724ba4fb8fdf497e7" +dependencies = [ + "rand_core", +] + +[[package]] +name = "wasi" +version = "0.10.2+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic-dynamic/Cargo.toml b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic-dynamic/Cargo.toml new file mode 100644 index 000000000000..851024c82e94 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic-dynamic/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "basic-dynamic" +version = "0.1.0" +authors = ["Daniël de Kok <me@danieldk.eu>"] +edition = "2018" + +[dependencies] +rand = "0.8" diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic-dynamic/default.nix b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic-dynamic/default.nix new file mode 100644 index 000000000000..eea2c3760599 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic-dynamic/default.nix @@ -0,0 +1,16 @@ +{ rustPlatform }: + +rustPlatform.buildRustPackage { + pname = "basic-dynamic"; + version = "0.1.0"; + + src = ./.; + + cargoLock.lockFileContents = builtins.readFile ./Cargo.lock; + + doInstallCheck = true; + + installCheckPhase = '' + $out/bin/basic-dynamic + ''; +} diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic-dynamic/src/main.rs b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic-dynamic/src/main.rs new file mode 100644 index 000000000000..50b4ed799e43 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic-dynamic/src/main.rs @@ -0,0 +1,9 @@ +use rand::Rng; + +fn main() { + let mut rng = rand::thread_rng(); + + // Always draw zero :). + let roll: u8 = rng.gen_range(0..1); + assert_eq!(roll, 0); +} diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/Cargo.lock b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/Cargo.lock new file mode 100644 index 000000000000..fd1b5e42ad30 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/Cargo.lock @@ -0,0 +1,83 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "basic" +version = "0.1.0" +dependencies = [ + "rand", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "getrandom" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9495705279e7140bf035dde1f6e750c162df8b625267cd52cc44e0b156732c8" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "libc" +version = "0.2.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18794a8ad5b29321f790b55d93dfba91e125cb1a9edbd4f8e3150acc771c1a5e" + +[[package]] +name = "ppv-lite86" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857" + +[[package]] +name = "rand" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ef9e7e66b4468674bfcb0c81af8b7fa0bb154fa9f28eb840da5c447baeb8d7e" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", + "rand_hc", +] + +[[package]] +name = "rand_chacha" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e12735cf05c9e10bf21534da50a147b924d555dc7a547c42e6bb2d5b6017ae0d" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34cf66eb183df1c5876e2dcf6b13d57340741e8dc255b48e40a26de954d06ae7" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rand_hc" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3190ef7066a446f2e7f42e239d161e905420ccab01eb967c9eb27d21b2322a73" +dependencies = [ + "rand_core", +] + +[[package]] +name = "wasi" +version = "0.10.2+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/Cargo.toml b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/Cargo.toml new file mode 100644 index 000000000000..f555bb0de62e --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "basic" +version = "0.1.0" +authors = ["Daniël de Kok <me@danieldk.eu>"] +edition = "2018" + +[dependencies] +rand = "0.8" diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/default.nix b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/default.nix new file mode 100644 index 000000000000..d595b58109ad --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/default.nix @@ -0,0 +1,18 @@ +{ rustPlatform }: + +rustPlatform.buildRustPackage { + pname = "basic"; + version = "0.1.0"; + + src = ./.; + + cargoLock = { + lockFile = ./Cargo.lock; + }; + + doInstallCheck = true; + + installCheckPhase = '' + $out/bin/basic + ''; +} diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/src/main.rs b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/src/main.rs new file mode 100644 index 000000000000..50b4ed799e43 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/basic/src/main.rs @@ -0,0 +1,9 @@ +use rand::Rng; + +fn main() { + let mut rng = rand::thread_rng(); + + // Always draw zero :). + let roll: u8 = rng.gen_range(0..1); + assert_eq!(roll, 0); +} diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/default.nix b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/default.nix new file mode 100644 index 000000000000..26e6487989c4 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/default.nix @@ -0,0 +1,20 @@ +{ callPackage, maturin, writers, python3Packages }: + +# Build like this from nixpkgs root: +# $ nix-build -A tests.importCargoLock +{ + basic = callPackage ./basic { }; + basicDynamic = callPackage ./basic-dynamic { }; + gitDependency = callPackage ./git-dependency { }; + gitDependencyRev = callPackage ./git-dependency-rev { }; + gitDependencyRevNonWorkspaceNestedCrate = callPackage ./git-dependency-rev-non-workspace-nested-crate { }; + gitDependencyTag = callPackage ./git-dependency-tag { }; + gitDependencyBranch = callPackage ./git-dependency-branch { }; + maturin = maturin.tests.pyo3; + v1 = callPackage ./v1 { }; + gitDependencyWorkspaceInheritance = callPackage ./git-dependency-workspace-inheritance { + replaceWorkspaceValues = writers.writePython3 "replace-workspace-values" + { libraries = with python3Packages; [ tomli tomli-w ]; flakeIgnore = [ "E501" "W503" ]; } + (builtins.readFile ../../replace-workspace-values.py); + }; +} diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-branch/Cargo.lock b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-branch/Cargo.lock new file mode 100644 index 000000000000..e832b2e5ba4f --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-branch/Cargo.lock @@ -0,0 +1,72 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "getrandom" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9495705279e7140bf035dde1f6e750c162df8b625267cd52cc44e0b156732c8" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "git-dependency-branch" +version = "0.1.0" +dependencies = [ + "rand", +] + +[[package]] +name = "libc" +version = "0.2.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18794a8ad5b29321f790b55d93dfba91e125cb1a9edbd4f8e3150acc771c1a5e" + +[[package]] +name = "ppv-lite86" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857" + +[[package]] +name = "rand" +version = "0.8.4" +source = "git+https://github.com/rust-random/rand.git?branch=master#fcc5baf31565a94f63dce41c2e739e6f182475f4" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "git+https://github.com/rust-random/rand.git?branch=master#fcc5baf31565a94f63dce41c2e739e6f182475f4" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.3" +source = "git+https://github.com/rust-random/rand.git?branch=master#fcc5baf31565a94f63dce41c2e739e6f182475f4" +dependencies = [ + "getrandom", +] + +[[package]] +name = "wasi" +version = "0.10.2+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-branch/Cargo.toml b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-branch/Cargo.toml new file mode 100644 index 000000000000..0702c5ad8a07 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-branch/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "git-dependency-branch" +version = "0.1.0" +authors = ["Daniël de Kok <me@danieldk.eu>"] +edition = "2018" + +[dependencies] +rand = { git = "https://github.com/rust-random/rand.git", branch = "master" } diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-branch/default.nix b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-branch/default.nix new file mode 100644 index 000000000000..f274d86c52b8 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-branch/default.nix @@ -0,0 +1,21 @@ +{ rustPlatform }: + +rustPlatform.buildRustPackage { + pname = "git-dependency-branch"; + version = "0.1.0"; + + src = ./.; + + cargoLock = { + lockFile = ./Cargo.lock; + outputHashes = { + "rand-0.8.4" = "1ilk9wvfw3mdm57g199ys8f5nrgdrh0n3a4c8b7nz6lgnqvfrv6z"; + }; + }; + + doInstallCheck = true; + + installCheckPhase = '' + $out/bin/git-dependency-branch + ''; +} diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-branch/src/main.rs b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-branch/src/main.rs new file mode 100644 index 000000000000..50b4ed799e43 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-branch/src/main.rs @@ -0,0 +1,9 @@ +use rand::Rng; + +fn main() { + let mut rng = rand::thread_rng(); + + // Always draw zero :). + let roll: u8 = rng.gen_range(0..1); + assert_eq!(roll, 0); +} diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev-non-workspace-nested-crate/Cargo.lock b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev-non-workspace-nested-crate/Cargo.lock new file mode 100644 index 000000000000..63ff1b3c113c --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev-non-workspace-nested-crate/Cargo.lock @@ -0,0 +1,638 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "anyhow" +version = "1.0.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61604a8f862e1d5c3229fdd78f8b02c68dcf73a4c4b05fd636d12240aaa242c1" + +[[package]] +name = "autocfg" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "cargo-test-macro" +version = "0.1.0" +source = "git+https://github.com/rust-lang/cargo?branch=rust-1.53.0#4369396ce7d270972955d876eaa4954bea56bcd9" + +[[package]] +name = "cargo-test-support" +version = "0.1.0" +source = "git+https://github.com/rust-lang/cargo?branch=rust-1.53.0#4369396ce7d270972955d876eaa4954bea56bcd9" +dependencies = [ + "anyhow", + "cargo-test-macro", + "cargo-util", + "filetime", + "flate2", + "git2", + "glob", + "lazy_static", + "remove_dir_all", + "serde_json", + "tar", + "toml", + "url", +] + +[[package]] +name = "cargo-util" +version = "0.1.0" +source = "git+https://github.com/rust-lang/cargo?branch=rust-1.53.0#4369396ce7d270972955d876eaa4954bea56bcd9" +dependencies = [ + "anyhow", + "core-foundation", + "crypto-hash", + "filetime", + "hex 0.4.3", + "jobserver", + "libc", + "log", + "miow", + "same-file", + "shell-escape", + "tempfile", + "walkdir", + "winapi", +] + +[[package]] +name = "cc" +version = "1.0.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79c2681d6594606957bbb8631c4b90a7fcaaa72cdb714743a437b156d6a7eedd" +dependencies = [ + "jobserver", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "commoncrypto" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d056a8586ba25a1e4d61cb090900e495952c7886786fc55f909ab2f819b69007" +dependencies = [ + "commoncrypto-sys", +] + +[[package]] +name = "commoncrypto-sys" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fed34f46747aa73dfaa578069fd8279d2818ade2b55f38f22a9401c7f4083e2" +dependencies = [ + "libc", +] + +[[package]] +name = "core-foundation" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6888e10551bb93e424d8df1d07f1a8b4fceb0001a3a4b048bfc47554946f47b3" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" + +[[package]] +name = "crc32fast" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crypto-hash" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a77162240fd97248d19a564a565eb563a3f592b386e4136fb300909e67dddca" +dependencies = [ + "commoncrypto", + "hex 0.3.2", + "openssl", + "winapi", +] + +[[package]] +name = "filetime" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "975ccf83d8d9d0d84682850a38c8169027be83368805971cc4f238c2b245bc98" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "winapi", +] + +[[package]] +name = "flate2" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f" +dependencies = [ + "cfg-if", + "crc32fast", + "libc", + "libz-sys", + "miniz_oxide", +] + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191" +dependencies = [ + "matches", + "percent-encoding", +] + +[[package]] +name = "getrandom" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "git-dependency-rev-non-workspace-nested-crate" +version = "0.1.0" +dependencies = [ + "cargo-test-support", +] + +[[package]] +name = "git2" +version = "0.13.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a8057932925d3a9d9e4434ea016570d37420ddb1ceed45a174d577f24ed6700" +dependencies = [ + "bitflags", + "libc", + "libgit2-sys", + "log", + "openssl-probe", + "openssl-sys", + "url", +] + +[[package]] +name = "glob" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" + +[[package]] +name = "hex" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "idna" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" +dependencies = [ + "matches", + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "itoa" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" + +[[package]] +name = "jobserver" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af25a77299a7f711a01975c35a6a424eb6862092cc2d6c72c4ed6cbc56dfc1fa" +dependencies = [ + "libc", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.105" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "869d572136620d55835903746bcb5cdc54cb2851fd0aeec53220b4bb65ef3013" + +[[package]] +name = "libgit2-sys" +version = "0.12.24+1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddbd6021eef06fb289a8f54b3c2acfdd85ff2a585dfbb24b8576325373d2152c" +dependencies = [ + "cc", + "libc", + "libssh2-sys", + "libz-sys", + "openssl-sys", + "pkg-config", +] + +[[package]] +name = "libssh2-sys" +version = "0.2.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b094a36eb4b8b8c8a7b4b8ae43b2944502be3e59cd87687595cf6b0a71b3f4ca" +dependencies = [ + "cc", + "libc", + "libz-sys", + "openssl-sys", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "libz-sys" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de5435b8549c16d423ed0c03dbaafe57cf6c3344744f1242520d59c9d8ecec66" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "log" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "matches" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" + +[[package]] +name = "miniz_oxide" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" +dependencies = [ + "adler", + "autocfg", +] + +[[package]] +name = "miow" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9f1c5b025cda876f66ef43a113f91ebc9f4ccef34843000e0adf6ebbab84e21" +dependencies = [ + "winapi", +] + +[[package]] +name = "once_cell" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "692fcb63b64b1758029e0a96ee63e049ce8c5948587f2f7208df04625e5f6b56" + +[[package]] +name = "openssl" +version = "0.10.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d9facdb76fec0b73c406f125d44d86fdad818d66fef0531eec9233ca425ff4a" +dependencies = [ + "bitflags", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-sys", +] + +[[package]] +name = "openssl-probe" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28988d872ab76095a6e6ac88d99b54fd267702734fd7ffe610ca27f533ddb95a" + +[[package]] +name = "openssl-sys" +version = "0.9.67" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69df2d8dfc6ce3aaf44b40dec6f487d5a886516cf6879c49e98e0710f310a058" +dependencies = [ + "autocfg", + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "percent-encoding" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" + +[[package]] +name = "pkg-config" +version = "0.3.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10e2fcbb64ecbe64c8e040a386c3104d384583af58b956d870aaaf229df6e66d" + +[[package]] +name = "ppv-lite86" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3ca011bd0129ff4ae15cd04c4eef202cadf6c51c21e47aba319b4e0501db741" + +[[package]] +name = "rand" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", + "rand_hc", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rand_hc" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d51e9f596de227fda2ea6c84607f5558e196eeaf43c986b724ba4fb8fdf497e7" +dependencies = [ + "rand_core", +] + +[[package]] +name = "redox_syscall" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff" +dependencies = [ + "bitflags", +] + +[[package]] +name = "remove_dir_all" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" +dependencies = [ + "winapi", +] + +[[package]] +name = "ryu" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "serde" +version = "1.0.130" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f12d06de37cf59146fbdecab66aa99f9fe4f78722e3607577a5375d66bd0c913" + +[[package]] +name = "serde_json" +version = "1.0.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f690853975602e1bfe1ccbf50504d67174e3bcf340f23b5ea9992e0587a52d8" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "shell-escape" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45bb67a18fa91266cc7807181f62f9178a6873bfad7dc788c42e6430db40184f" + +[[package]] +name = "tar" +version = "0.4.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6f5515d3add52e0bbdcad7b83c388bb36ba7b754dda3b5f5bc2d38640cdba5c" +dependencies = [ + "filetime", + "libc", +] + +[[package]] +name = "tempfile" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dac1c663cfc93810f88aed9b8941d48cabf856a1b111c29a40439018d870eb22" +dependencies = [ + "cfg-if", + "libc", + "rand", + "redox_syscall", + "remove_dir_all", + "winapi", +] + +[[package]] +name = "tinyvec" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f83b2a3d4d9091d0abd7eba4dc2710b1718583bd4d8992e2190720ea38f391f7" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" + +[[package]] +name = "toml" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa" +dependencies = [ + "serde", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a01404663e3db436ed2746d9fefef640d868edae3cceb81c3b8d5732fda678f" + +[[package]] +name = "unicode-normalization" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "url" +version = "2.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c" +dependencies = [ + "form_urlencoded", + "idna", + "matches", + "percent-encoding", +] + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "walkdir" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" +dependencies = [ + "same-file", + "winapi", + "winapi-util", +] + +[[package]] +name = "wasi" +version = "0.10.2+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev-non-workspace-nested-crate/Cargo.toml b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev-non-workspace-nested-crate/Cargo.toml new file mode 100644 index 000000000000..61f1a08dbe4f --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev-non-workspace-nested-crate/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "git-dependency-rev-non-workspace-nested-crate" +version = "0.1.0" +authors = ["Stefan Junker <mail@stefanjunker.de>"] +edition = "2018" + +[dependencies] +cargo-test-support = { git = "https://github.com/rust-lang/cargo", branch = "rust-1.53.0" } diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev-non-workspace-nested-crate/default.nix b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev-non-workspace-nested-crate/default.nix new file mode 100644 index 000000000000..dcf1e601c31c --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev-non-workspace-nested-crate/default.nix @@ -0,0 +1,31 @@ +{ rustPlatform, pkg-config, openssl, lib, darwin, stdenv }: + +rustPlatform.buildRustPackage { + pname = "git-dependency-rev-non-workspace-nested-crate"; + version = "0.1.0"; + + src = ./.; + + nativeBuildInputs = [ + pkg-config + ]; + + buildInputs = [ + openssl + ] ++ lib.optionals stdenv.isDarwin [ + darwin.apple_sdk.frameworks.Security + ]; + + cargoLock = { + lockFile = ./Cargo.lock; + outputHashes = { + "cargo-test-macro-0.1.0" = "1yy1y1d523xdzwg1gc77pigbcwsbawmy4b7vw8v21m7q957sk0c4"; + }; + }; + + doInstallCheck = true; + + installCheckPhase = '' + $out/bin/git-dependency-rev-non-workspace-nested-crate + ''; +} diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev-non-workspace-nested-crate/src/main.rs b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev-non-workspace-nested-crate/src/main.rs new file mode 100644 index 000000000000..cb4bfb5d928b --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev-non-workspace-nested-crate/src/main.rs @@ -0,0 +1,3 @@ +fn main() { + println!("{}", cargo_test_support::t!(Result::<&str, &str>::Ok("msg"))); +} diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev/Cargo.lock b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev/Cargo.lock new file mode 100644 index 000000000000..684d9419479d --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev/Cargo.lock @@ -0,0 +1,81 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "getrandom" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9495705279e7140bf035dde1f6e750c162df8b625267cd52cc44e0b156732c8" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "git-dependency-rev" +version = "0.1.0" +dependencies = [ + "rand", +] + +[[package]] +name = "libc" +version = "0.2.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18794a8ad5b29321f790b55d93dfba91e125cb1a9edbd4f8e3150acc771c1a5e" + +[[package]] +name = "ppv-lite86" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857" + +[[package]] +name = "rand" +version = "0.8.3" +source = "git+https://github.com/rust-random/rand.git?rev=0.8.3#6ecbe2626b2cc6110a25c97b1702b347574febc7" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", + "rand_hc", +] + +[[package]] +name = "rand_chacha" +version = "0.3.0" +source = "git+https://github.com/rust-random/rand.git?rev=0.8.3#6ecbe2626b2cc6110a25c97b1702b347574febc7" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.1" +source = "git+https://github.com/rust-random/rand.git?rev=0.8.3#6ecbe2626b2cc6110a25c97b1702b347574febc7" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rand_hc" +version = "0.3.0" +source = "git+https://github.com/rust-random/rand.git?rev=0.8.3#6ecbe2626b2cc6110a25c97b1702b347574febc7" +dependencies = [ + "rand_core", +] + +[[package]] +name = "wasi" +version = "0.10.2+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev/Cargo.toml b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev/Cargo.toml new file mode 100644 index 000000000000..3500325ae579 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "git-dependency-rev" +version = "0.1.0" +authors = ["Daniël de Kok <me@danieldk.eu>"] +edition = "2018" + +[dependencies] +rand = { git = "https://github.com/rust-random/rand.git", rev = "0.8.3" } diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev/default.nix b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev/default.nix new file mode 100644 index 000000000000..40487d6829a2 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev/default.nix @@ -0,0 +1,21 @@ +{ rustPlatform }: + +rustPlatform.buildRustPackage { + pname = "git-dependency-rev"; + version = "0.1.0"; + + src = ./.; + + cargoLock = { + lockFile = ./Cargo.lock; + outputHashes = { + "rand-0.8.3" = "0l3p174bpwia61vcvxz5mw65a13ri3wy94z04xrnyy5lzciykz4f"; + }; + }; + + doInstallCheck = true; + + installCheckPhase = '' + $out/bin/git-dependency-rev + ''; +} diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev/src/main.rs b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev/src/main.rs new file mode 100644 index 000000000000..50b4ed799e43 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-rev/src/main.rs @@ -0,0 +1,9 @@ +use rand::Rng; + +fn main() { + let mut rng = rand::thread_rng(); + + // Always draw zero :). + let roll: u8 = rng.gen_range(0..1); + assert_eq!(roll, 0); +} diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-tag/Cargo.lock b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-tag/Cargo.lock new file mode 100644 index 000000000000..9f8ec19a366a --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-tag/Cargo.lock @@ -0,0 +1,81 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "getrandom" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9495705279e7140bf035dde1f6e750c162df8b625267cd52cc44e0b156732c8" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "git-dependency-tag" +version = "0.1.0" +dependencies = [ + "rand", +] + +[[package]] +name = "libc" +version = "0.2.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18794a8ad5b29321f790b55d93dfba91e125cb1a9edbd4f8e3150acc771c1a5e" + +[[package]] +name = "ppv-lite86" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857" + +[[package]] +name = "rand" +version = "0.8.3" +source = "git+https://github.com/rust-random/rand.git?tag=0.8.3#6ecbe2626b2cc6110a25c97b1702b347574febc7" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", + "rand_hc", +] + +[[package]] +name = "rand_chacha" +version = "0.3.0" +source = "git+https://github.com/rust-random/rand.git?tag=0.8.3#6ecbe2626b2cc6110a25c97b1702b347574febc7" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.1" +source = "git+https://github.com/rust-random/rand.git?tag=0.8.3#6ecbe2626b2cc6110a25c97b1702b347574febc7" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rand_hc" +version = "0.3.0" +source = "git+https://github.com/rust-random/rand.git?tag=0.8.3#6ecbe2626b2cc6110a25c97b1702b347574febc7" +dependencies = [ + "rand_core", +] + +[[package]] +name = "wasi" +version = "0.10.2+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-tag/Cargo.toml b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-tag/Cargo.toml new file mode 100644 index 000000000000..1bda7336c263 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-tag/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "git-dependency-tag" +version = "0.1.0" +authors = ["Daniël de Kok <me@danieldk.eu>"] +edition = "2018" + +[dependencies] +rand = { git = "https://github.com/rust-random/rand.git", tag = "0.8.3" } diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-tag/default.nix b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-tag/default.nix new file mode 100644 index 000000000000..2a2f860ac8a1 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-tag/default.nix @@ -0,0 +1,21 @@ +{ rustPlatform }: + +rustPlatform.buildRustPackage { + pname = "git-dependency-tag"; + version = "0.1.0"; + + src = ./.; + + cargoLock = { + lockFile = ./Cargo.lock; + outputHashes = { + "rand-0.8.3" = "0l3p174bpwia61vcvxz5mw65a13ri3wy94z04xrnyy5lzciykz4f"; + }; + }; + + doInstallCheck = true; + + installCheckPhase = '' + $out/bin/git-dependency-tag + ''; +} diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-tag/src/main.rs b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-tag/src/main.rs new file mode 100644 index 000000000000..50b4ed799e43 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-tag/src/main.rs @@ -0,0 +1,9 @@ +use rand::Rng; + +fn main() { + let mut rng = rand::thread_rng(); + + // Always draw zero :). + let roll: u8 = rng.gen_range(0..1); + assert_eq!(roll, 0); +} diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-workspace-inheritance/crate.toml b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-workspace-inheritance/crate.toml new file mode 100644 index 000000000000..f7b62aed3514 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-workspace-inheritance/crate.toml @@ -0,0 +1,13 @@ +[package] +name = "im_using_workspaces" +version = { workspace = true } +publish = false +keywords = [ + "workspace", + "other_thing", + "third_thing", +] + +[dependencies] +foo = { workspace = true, features = ["cat"] } +bar = "1.0.0" diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-workspace-inheritance/default.nix b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-workspace-inheritance/default.nix new file mode 100644 index 000000000000..138b7179b95f --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-workspace-inheritance/default.nix @@ -0,0 +1,7 @@ +{ replaceWorkspaceValues, runCommand }: + +runCommand "git-dependency-workspace-inheritance-test" { } '' + cp --no-preserve=mode ${./crate.toml} "$out" + ${replaceWorkspaceValues} "$out" ${./workspace.toml} + diff -u "$out" ${./want.toml} +'' diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-workspace-inheritance/want.toml b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-workspace-inheritance/want.toml new file mode 100644 index 000000000000..ec1331455bec --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-workspace-inheritance/want.toml @@ -0,0 +1,19 @@ +[package] +name = "im_using_workspaces" +version = "1.0.0" +publish = false +keywords = [ + "workspace", + "other_thing", + "third_thing", +] + +[dependencies] +bar = "1.0.0" + +[dependencies.foo] +features = [ + "cat", + "meow", +] +version = "1.0.0" diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-workspace-inheritance/workspace.toml b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-workspace-inheritance/workspace.toml new file mode 100644 index 000000000000..c58112a782d0 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency-workspace-inheritance/workspace.toml @@ -0,0 +1,5 @@ +[workspace.package] +version = "1.0.0" + +[workspace.dependencies] +foo = { version = "1.0.0", features = ["meow"] } diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/Cargo.lock b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/Cargo.lock new file mode 100644 index 000000000000..fa71865b3e5f --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/Cargo.lock @@ -0,0 +1,81 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "getrandom" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9495705279e7140bf035dde1f6e750c162df8b625267cd52cc44e0b156732c8" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "git-dependency" +version = "0.1.0" +dependencies = [ + "rand", +] + +[[package]] +name = "libc" +version = "0.2.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18794a8ad5b29321f790b55d93dfba91e125cb1a9edbd4f8e3150acc771c1a5e" + +[[package]] +name = "ppv-lite86" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857" + +[[package]] +name = "rand" +version = "0.8.3" +source = "git+https://github.com/rust-random/rand.git#f0e01ee0a7257753cc51b291f62666f4765923ef" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", + "rand_hc", +] + +[[package]] +name = "rand_chacha" +version = "0.3.0" +source = "git+https://github.com/rust-random/rand.git#f0e01ee0a7257753cc51b291f62666f4765923ef" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.2" +source = "git+https://github.com/rust-random/rand.git#f0e01ee0a7257753cc51b291f62666f4765923ef" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rand_hc" +version = "0.3.0" +source = "git+https://github.com/rust-random/rand.git#f0e01ee0a7257753cc51b291f62666f4765923ef" +dependencies = [ + "rand_core", +] + +[[package]] +name = "wasi" +version = "0.10.2+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/Cargo.toml b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/Cargo.toml new file mode 100644 index 000000000000..a902dea9fcd0 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "git-dependency" +version = "0.1.0" +authors = ["Daniël de Kok <me@danieldk.eu>"] +edition = "2018" + +[dependencies] +rand = { git = "https://github.com/rust-random/rand.git" } diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/default.nix b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/default.nix new file mode 100644 index 000000000000..944de9b45cb5 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/default.nix @@ -0,0 +1,21 @@ +{ rustPlatform }: + +rustPlatform.buildRustPackage { + pname = "git-dependency"; + version = "0.1.0"; + + src = ./.; + + cargoLock = { + lockFile = ./Cargo.lock; + outputHashes = { + "rand-0.8.3" = "0ya2hia3cn31qa8894s3av2s8j5bjwb6yq92k0jsnlx7jid0jwqa"; + }; + }; + + doInstallCheck = true; + + installCheckPhase = '' + $out/bin/git-dependency + ''; +} diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/src/main.rs b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/src/main.rs new file mode 100644 index 000000000000..50b4ed799e43 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/git-dependency/src/main.rs @@ -0,0 +1,9 @@ +use rand::Rng; + +fn main() { + let mut rng = rand::thread_rng(); + + // Always draw zero :). + let roll: u8 = rng.gen_range(0..1); + assert_eq!(roll, 0); +} diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/v1/Cargo.lock b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/v1/Cargo.lock new file mode 100644 index 000000000000..fe976f090aac --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/v1/Cargo.lock @@ -0,0 +1,85 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "getrandom" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.94 (registry+https://github.com/rust-lang/crates.io-index)", + "wasi 0.10.2+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "libc" +version = "0.2.94" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "ppv-lite86" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "rand" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.94 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_chacha 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_hc 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_chacha" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "ppv-lite86 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_core" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "getrandom 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_hc" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "v1" +version = "0.1.0" +dependencies = [ + "rand 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "wasi" +version = "0.10.2+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +"checksum getrandom 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c9495705279e7140bf035dde1f6e750c162df8b625267cd52cc44e0b156732c8" +"checksum libc 0.2.94 (registry+https://github.com/rust-lang/crates.io-index)" = "18794a8ad5b29321f790b55d93dfba91e125cb1a9edbd4f8e3150acc771c1a5e" +"checksum ppv-lite86 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857" +"checksum rand 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0ef9e7e66b4468674bfcb0c81af8b7fa0bb154fa9f28eb840da5c447baeb8d7e" +"checksum rand_chacha 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e12735cf05c9e10bf21534da50a147b924d555dc7a547c42e6bb2d5b6017ae0d" +"checksum rand_core 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "34cf66eb183df1c5876e2dcf6b13d57340741e8dc255b48e40a26de954d06ae7" +"checksum rand_hc 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3190ef7066a446f2e7f42e239d161e905420ccab01eb967c9eb27d21b2322a73" +"checksum wasi 0.10.2+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)" = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/v1/Cargo.toml b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/v1/Cargo.toml new file mode 100644 index 000000000000..4b825c45cadc --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/v1/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "v1" +version = "0.1.0" +authors = ["Daniël de Kok <me@danieldk.eu>"] +edition = "2018" + +[dependencies] +rand = "0.8" diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/v1/default.nix b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/v1/default.nix new file mode 100644 index 000000000000..d13d468ae7f9 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/v1/default.nix @@ -0,0 +1,18 @@ +{ rustPlatform }: + +rustPlatform.buildRustPackage { + pname = "v1"; + version = "0.1.0"; + + src = ./.; + + cargoLock = { + lockFile = ./Cargo.lock; + }; + + doInstallCheck = true; + + installCheckPhase = '' + $out/bin/v1 + ''; +} diff --git a/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/v1/src/main.rs b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/v1/src/main.rs new file mode 100644 index 000000000000..50b4ed799e43 --- /dev/null +++ b/nixpkgs/pkgs/build-support/rust/test/import-cargo-lock/v1/src/main.rs @@ -0,0 +1,9 @@ +use rand::Rng; + +fn main() { + let mut rng = rand::thread_rng(); + + // Always draw zero :). + let roll: u8 = rng.gen_range(0..1); + assert_eq!(roll, 0); +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/audit-blas.sh b/nixpkgs/pkgs/build-support/setup-hooks/audit-blas.sh new file mode 100644 index 000000000000..6a40073fb234 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/audit-blas.sh @@ -0,0 +1,37 @@ +# Ensure that we are always linking against “libblas.so.3” and +# “liblapack.so.3”. + +auditBlas() { + local dir="$prefix" + [ -e "$dir" ] || return 0 + + local i + while IFS= read -r -d $'\0' i; do + if ! isELF "$i"; then continue; fi + + if $OBJDUMP -p "$i" | grep 'NEEDED' | awk '{ print $2; }' | grep -q '\(libmkl_rt.so\|libopenblas.so.0\)'; then + echo "$i refers to a specific implementation of BLAS or LAPACK." + echo "This prevents users from switching BLAS/LAPACK implementations." + echo "Add \`blas' or \`lapack' to buildInputs instead of \`mkl' or \`openblas'." + exit 1 + fi + + (IFS=: + for dir in "$(patchelf --print-rpath "$i")"; do + if [ -f "$dir/libblas.so.3" ] || [ -f "$dir/libblas.so" ]; then + if [ "$dir" != "@blas@/lib" ]; then + echo "$dir is not allowed to contain a library named libblas.so.3" + exit 1 + fi + fi + if [ -f "$dir/liblapack.so.3" ] || [ -f "$dir/liblapack.so" ]; then + if [ "$dir" != "@lapack@/lib" ]; then + echo "$dir is not allowed to contain a library named liblapack.so.3" + exit 1 + fi + fi + done) + done < <(find "$dir" -type f -print0) +} + +fixupOutputHooks+=(auditBlas) diff --git a/nixpkgs/pkgs/build-support/setup-hooks/audit-tmpdir.sh b/nixpkgs/pkgs/build-support/setup-hooks/audit-tmpdir.sh new file mode 100644 index 000000000000..36714178156b --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/audit-tmpdir.sh @@ -0,0 +1,39 @@ +# Check whether RPATHs or wrapper scripts contain references to +# $TMPDIR. This is a serious security bug because it allows any user +# to inject files into search paths of other users' processes. +# +# It might be better to have Nix scan build output for any occurrence +# of $TMPDIR (which would also be good for reproducibility), but at +# the moment that would produce too many spurious errors (e.g. debug +# info or assertion messages that refer to $TMPDIR). + +fixupOutputHooks+=('if [[ -z "${noAuditTmpdir-}" && -e "$prefix" ]]; then auditTmpdir "$prefix"; fi') + +auditTmpdir() { + local dir="$1" + [ -e "$dir" ] || return 0 + + echo "checking for references to $TMPDIR/ in $dir..." + + local i + find "$dir" -type f -print0 | while IFS= read -r -d $'\0' i; do + if [[ "$i" =~ .build-id ]]; then continue; fi + + if isELF "$i"; then + if { printf :; patchelf --print-rpath "$i"; } | grep -q -F ":$TMPDIR/"; then + echo "RPATH of binary $i contains a forbidden reference to $TMPDIR/" + exit 1 + fi + fi + + if isScript "$i"; then + if [ -e "$(dirname "$i")/.$(basename "$i")-wrapped" ]; then + if grep -q -F "$TMPDIR/" "$i"; then + echo "wrapper script $i contains a forbidden reference to $TMPDIR/" + exit 1 + fi + fi + fi + + done +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/auto-patchelf.py b/nixpkgs/pkgs/build-support/setup-hooks/auto-patchelf.py new file mode 100644 index 000000000000..965384b876fc --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/auto-patchelf.py @@ -0,0 +1,367 @@ +#!/usr/bin/env python3 + +import argparse +import os +import pprint +import subprocess +import sys +from fnmatch import fnmatch +from collections import defaultdict +from contextlib import contextmanager +from dataclasses import dataclass +from itertools import chain +from pathlib import Path, PurePath +from typing import DefaultDict, Iterator, List, Optional, Set, Tuple + +from elftools.common.exceptions import ELFError # type: ignore +from elftools.elf.dynamic import DynamicSection # type: ignore +from elftools.elf.elffile import ELFFile # type: ignore +from elftools.elf.enums import ENUM_E_TYPE, ENUM_EI_OSABI # type: ignore + + +@contextmanager +def open_elf(path: Path) -> Iterator[ELFFile]: + with path.open('rb') as stream: + yield ELFFile(stream) + + +def is_static_executable(elf: ELFFile) -> bool: + # Statically linked executables have an ELF type of EXEC but no INTERP. + return (elf.header["e_type"] == 'ET_EXEC' + and not elf.get_section_by_name(".interp")) + + +def is_dynamic_executable(elf: ELFFile) -> bool: + # We do not require an ELF type of EXEC. This also catches + # position-independent executables, as they typically have an INTERP + # section but their ELF type is DYN. + return bool(elf.get_section_by_name(".interp")) + + +def get_dependencies(elf: ELFFile) -> List[str]: + dependencies = [] + # This convoluted code is here on purpose. For some reason, using + # elf.get_section_by_name(".dynamic") does not always return an + # instance of DynamicSection, but that is required to call iter_tags + for section in elf.iter_sections(): + if isinstance(section, DynamicSection): + for tag in section.iter_tags('DT_NEEDED'): + dependencies.append(tag.needed) + break # There is only one dynamic section + + return dependencies + + +def get_rpath(elf: ELFFile) -> List[str]: + # This convoluted code is here on purpose. For some reason, using + # elf.get_section_by_name(".dynamic") does not always return an + # instance of DynamicSection, but that is required to call iter_tags + for section in elf.iter_sections(): + if isinstance(section, DynamicSection): + for tag in section.iter_tags('DT_RUNPATH'): + return tag.runpath.split(':') + + for tag in section.iter_tags('DT_RPATH'): + return tag.rpath.split(':') + + break # There is only one dynamic section + + return [] + + +def get_arch(elf: ELFFile) -> str: + return elf.get_machine_arch() + + +def get_osabi(elf: ELFFile) -> str: + return elf.header["e_ident"]["EI_OSABI"] + + +def osabi_are_compatible(wanted: str, got: str) -> bool: + """ + Tests whether two OS ABIs are compatible, taking into account the + generally accepted compatibility of SVR4 ABI with other ABIs. + """ + if not wanted or not got: + # One of the types couldn't be detected, so as a fallback we'll + # assume they're compatible. + return True + + # Generally speaking, the base ABI (0x00), which is represented by + # readelf(1) as "UNIX - System V", indicates broad compatibility + # with other ABIs. + # + # TODO: This isn't always true. For example, some OSes embed ABI + # compatibility into SHT_NOTE sections like .note.tag and + # .note.ABI-tag. It would be prudent to add these to the detection + # logic to produce better ABI information. + if wanted == 'ELFOSABI_SYSV': + return True + + # Similarly here, we should be able to link against a superset of + # features, so even if the target has another ABI, this should be + # fine. + if got == 'ELFOSABI_SYSV': + return True + + # Otherwise, we simply return whether the ABIs are identical. + return wanted == got + + +def glob(path: Path, pattern: str, recursive: bool) -> Iterator[Path]: + if path.is_dir(): + return path.rglob(pattern) if recursive else path.glob(pattern) + else: + # path.glob won't return anything if the path is not a directory. + # We extend that behavior by matching the file name against the pattern. + # This allows to pass single files instead of dirs to auto_patchelf, + # for greater control on the files to consider. + return [path] if path.match(pattern) else [] + + +cached_paths: Set[Path] = set() +soname_cache: DefaultDict[Tuple[str, str], List[Tuple[Path, str]]] = defaultdict(list) + + +def populate_cache(initial: List[Path], recursive: bool =False) -> None: + lib_dirs = list(initial) + + while lib_dirs: + lib_dir = lib_dirs.pop(0) + + if lib_dir in cached_paths: + continue + + cached_paths.add(lib_dir) + + for path in glob(lib_dir, "*.so*", recursive): + if not path.is_file(): + continue + + # As an optimisation, resolve the symlinks here, as the target is unique + # XXX: (layus, 2022-07-25) is this really an optimisation in all cases ? + # It could make the rpath bigger or break the fragile precedence of $out. + resolved = path.resolve() + # Do not use resolved paths when names do not match + if resolved.name != path.name: + resolved = path + + try: + with open_elf(path) as elf: + osabi = get_osabi(elf) + arch = get_arch(elf) + rpath = [Path(p) for p in get_rpath(elf) + if p and '$ORIGIN' not in p] + lib_dirs += rpath + soname_cache[(path.name, arch)].append((resolved.parent, osabi)) + + except ELFError: + # Not an ELF file in the right format + pass + + +def find_dependency(soname: str, soarch: str, soabi: str) -> Optional[Path]: + for lib, libabi in soname_cache[(soname, soarch)]: + if osabi_are_compatible(soabi, libabi): + return lib + return None + + +@dataclass +class Dependency: + file: Path # The file that contains the dependency + name: Path # The name of the dependency + found: bool = False # Whether it was found somewhere + + +def auto_patchelf_file(path: Path, runtime_deps: list[Path], append_rpaths: List[Path] = []) -> list[Dependency]: + try: + with open_elf(path) as elf: + + if is_static_executable(elf): + # No point patching these + print(f"skipping {path} because it is statically linked") + return [] + + if elf.num_segments() == 0: + # no segment (e.g. object file) + print(f"skipping {path} because it contains no segment") + return [] + + file_arch = get_arch(elf) + if interpreter_arch != file_arch: + # Our target architecture is different than this file's + # architecture, so skip it. + print(f"skipping {path} because its architecture ({file_arch})" + f" differs from target ({interpreter_arch})") + return [] + + file_osabi = get_osabi(elf) + if not osabi_are_compatible(interpreter_osabi, file_osabi): + print(f"skipping {path} because its OS ABI ({file_osabi}) is" + f" not compatible with target ({interpreter_osabi})") + return [] + + file_is_dynamic_executable = is_dynamic_executable(elf) + + file_dependencies = map(Path, get_dependencies(elf)) + + except ELFError: + return [] + + rpath = [] + if file_is_dynamic_executable: + print("setting interpreter of", path) + subprocess.run( + ["patchelf", "--set-interpreter", interpreter_path.as_posix(), path.as_posix()], + check=True) + rpath += runtime_deps + + print("searching for dependencies of", path) + dependencies = [] + # Be sure to get the output of all missing dependencies instead of + # failing at the first one, because it's more useful when working + # on a new package where you don't yet know the dependencies. + for dep in file_dependencies: + if dep.is_absolute() and dep.is_file(): + # This is an absolute path. If it exists, just use it. + # Otherwise, we probably want this to produce an error when + # checked (because just updating the rpath won't satisfy + # it). + continue + elif (libc_lib / dep).is_file(): + # This library exists in libc, and will be correctly + # resolved by the linker. + continue + + if found_dependency := find_dependency(dep.name, file_arch, file_osabi): + rpath.append(found_dependency) + dependencies.append(Dependency(path, dep, True)) + print(f" {dep} -> found: {found_dependency}") + else: + dependencies.append(Dependency(path, dep, False)) + print(f" {dep} -> not found!") + + rpath.extend(append_rpaths) + + # Dedup the rpath + rpath_str = ":".join(dict.fromkeys(map(Path.as_posix, rpath))) + + if rpath: + print("setting RPATH to:", rpath_str) + subprocess.run( + ["patchelf", "--set-rpath", rpath_str, path.as_posix()], + check=True) + + return dependencies + + +def auto_patchelf( + paths_to_patch: List[Path], + lib_dirs: List[Path], + runtime_deps: List[Path], + recursive: bool = True, + ignore_missing: List[str] = [], + append_rpaths: List[Path] = []) -> None: + + if not paths_to_patch: + sys.exit("No paths to patch, stopping.") + + # Add all shared objects of the current output path to the cache, + # before lib_dirs, so that they are chosen first in find_dependency. + populate_cache(paths_to_patch, recursive) + populate_cache(lib_dirs) + + dependencies = [] + for path in chain.from_iterable(glob(p, '*', recursive) for p in paths_to_patch): + if not path.is_symlink() and path.is_file(): + dependencies += auto_patchelf_file(path, runtime_deps, append_rpaths) + + missing = [dep for dep in dependencies if not dep.found] + + # Print a summary of the missing dependencies at the end + print(f"auto-patchelf: {len(missing)} dependencies could not be satisfied") + failure = False + for dep in missing: + for pattern in ignore_missing: + if fnmatch(dep.name.name, pattern): + print(f"warn: auto-patchelf ignoring missing {dep.name} wanted by {dep.file}") + break + else: + print(f"error: auto-patchelf could not satisfy dependency {dep.name} wanted by {dep.file}") + failure = True + + if failure: + sys.exit('auto-patchelf failed to find all the required dependencies.\n' + 'Add the missing dependencies to --libs or use ' + '`--ignore-missing="foo.so.1 bar.so etc.so"`.') + + +def main() -> None: + parser = argparse.ArgumentParser( + prog="auto-patchelf", + description='auto-patchelf tries as hard as possible to patch the' + ' provided binary files by looking for compatible' + 'libraries in the provided paths.') + parser.add_argument( + "--ignore-missing", + nargs="*", + type=str, + help="Do not fail when some dependencies are not found.") + parser.add_argument( + "--no-recurse", + dest="recursive", + action="store_false", + help="Disable the recursive traversal of paths to patch.") + parser.add_argument( + "--paths", nargs="*", type=Path, + help="Paths whose content needs to be patched." + " Single files and directories are accepted." + " Directories are traversed recursively by default.") + parser.add_argument( + "--libs", nargs="*", type=Path, + help="Paths where libraries are searched for." + " Single files and directories are accepted." + " Directories are not searched recursively.") + parser.add_argument( + "--runtime-dependencies", nargs="*", type=Path, + help="Paths to prepend to the runtime path of executable binaries." + " Subject to deduplication, which may imply some reordering.") + parser.add_argument( + "--append-rpaths", + nargs="*", + type=Path, + help="Paths to append to all runtime paths unconditionally", + ) + + print("automatically fixing dependencies for ELF files") + args = parser.parse_args() + pprint.pprint(vars(args)) + + auto_patchelf( + args.paths, + args.libs, + args.runtime_dependencies, + args.recursive, + args.ignore_missing, + append_rpaths=args.append_rpaths) + + +interpreter_path: Path = None # type: ignore +interpreter_osabi: str = None # type: ignore +interpreter_arch: str = None # type: ignore +libc_lib: Path = None # type: ignore + +if __name__ == "__main__": + nix_support = Path(os.environ['NIX_BINTOOLS']) / 'nix-support' + interpreter_path = Path((nix_support / 'dynamic-linker').read_text().strip()) + libc_lib = Path((nix_support / 'orig-libc').read_text().strip()) / 'lib' + + with open_elf(interpreter_path) as interpreter: + interpreter_osabi = get_osabi(interpreter) + interpreter_arch = get_arch(interpreter) + + if interpreter_arch and interpreter_osabi and interpreter_path and libc_lib: + main() + else: + sys.exit("Failed to parse dynamic linker (ld) properties.") diff --git a/nixpkgs/pkgs/build-support/setup-hooks/auto-patchelf.sh b/nixpkgs/pkgs/build-support/setup-hooks/auto-patchelf.sh new file mode 100644 index 000000000000..0625565606f3 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/auto-patchelf.sh @@ -0,0 +1,94 @@ +# shellcheck shell=bash + +declare -a autoPatchelfLibs +declare -a extraAutoPatchelfLibs + +gatherLibraries() { + autoPatchelfLibs+=("$1/lib") +} + +# shellcheck disable=SC2154 +# (targetOffset is referenced but not assigned.) +addEnvHooks "$targetOffset" gatherLibraries + +# Can be used to manually add additional directories with shared object files +# to be included for the next autoPatchelf invocation. +addAutoPatchelfSearchPath() { + local -a findOpts=() + + while [ $# -gt 0 ]; do + case "$1" in + --) shift; break;; + --no-recurse) shift; findOpts+=("-maxdepth" 1);; + --*) + echo "addAutoPatchelfSearchPath: ERROR: Invalid command line" \ + "argument: $1" >&2 + return 1;; + *) break;; + esac + done + + local dir= + while IFS= read -r -d '' dir; do + extraAutoPatchelfLibs+=("$dir") + done < <(find "$@" "${findOpts[@]}" \! -type d \ + \( -name '*.so' -o -name '*.so.*' \) -print0 \ + | sed -z 's#/[^/]*$##' \ + | uniq -z + ) +} + + +autoPatchelf() { + local norecurse= + while [ $# -gt 0 ]; do + case "$1" in + --) shift; break;; + --no-recurse) shift; norecurse=1;; + --*) + echo "autoPatchelf: ERROR: Invalid command line" \ + "argument: $1" >&2 + return 1;; + *) break;; + esac + done + + readarray -td' ' ignoreMissingDepsArray < <(echo -n "$autoPatchelfIgnoreMissingDeps") + if [ "$autoPatchelfIgnoreMissingDeps" == "1" ]; then + echo "autoPatchelf: WARNING: setting 'autoPatchelfIgnoreMissingDeps" \ + "= true;' is deprecated and will be removed in a future release." \ + "Use 'autoPatchelfIgnoreMissingDeps = [ \"*\" ];' instead." >&2 + ignoreMissingDepsArray=( "*" ) + fi + + local appendRunpathsArray=($appendRunpaths) + local runtimeDependenciesArray=($runtimeDependencies) + @pythonInterpreter@ @autoPatchelfScript@ \ + ${norecurse:+--no-recurse} \ + --ignore-missing "${ignoreMissingDepsArray[@]}" \ + --paths "$@" \ + --libs "${autoPatchelfLibs[@]}" \ + "${extraAutoPatchelfLibs[@]}" \ + --runtime-dependencies "${runtimeDependenciesArray[@]/%//lib}" \ + --append-rpaths "${appendRunpathsArray[@]}" +} + +# XXX: This should ultimately use fixupOutputHooks but we currently don't have +# a way to enforce the order. If we have $runtimeDependencies set, the setup +# hook of patchelf is going to ruin everything and strip out those additional +# RPATHs. +# +# So what we do here is basically run in postFixup and emulate the same +# behaviour as fixupOutputHooks because the setup hook for patchelf is run in +# fixupOutput and the postFixup hook runs later. +# +# shellcheck disable=SC2016 +# (Expressions don't expand in single quotes, use double quotes for that.) +postFixupHooks+=(' + if [ -z "${dontAutoPatchelf-}" ]; then + autoPatchelf -- $(for output in $(getAllOutputNames); do + [ -e "${!output}" ] || continue + echo "${!output}" + done) + fi +') diff --git a/nixpkgs/pkgs/build-support/setup-hooks/autoreconf.sh b/nixpkgs/pkgs/build-support/setup-hooks/autoreconf.sh new file mode 100644 index 000000000000..6ce879ac092d --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/autoreconf.sh @@ -0,0 +1,7 @@ +preConfigurePhases="${preConfigurePhases:-} autoreconfPhase" + +autoreconfPhase() { + runHook preAutoreconf + autoreconf ${autoreconfFlags:---install --force --verbose} + runHook postAutoreconf +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/breakpoint-hook.sh b/nixpkgs/pkgs/build-support/setup-hooks/breakpoint-hook.sh new file mode 100644 index 000000000000..6bef786ac3ac --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/breakpoint-hook.sh @@ -0,0 +1,9 @@ +breakpointHook() { + local red='\033[0;31m' + local no_color='\033[0m' + + echo -e "${red}build failed in ${curPhase} with exit code ${exitCode}${no_color}" + printf "To attach install cntr and run the following command as root:\n\n" + sh -c "echo ' cntr attach -t command cntr-${out}'; while true; do sleep 99999999; done" +} +failureHooks+=(breakpointHook) diff --git a/nixpkgs/pkgs/build-support/setup-hooks/canonicalize-jars.sh b/nixpkgs/pkgs/build-support/setup-hooks/canonicalize-jars.sh new file mode 100644 index 000000000000..5137bfc94b01 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/canonicalize-jars.sh @@ -0,0 +1,16 @@ +# This setup hook causes the fixup phase to repack all JAR files in a +# canonical & deterministic fashion, e.g. resetting mtimes (like with normal +# store files) and avoiding impure metadata. + +fixupOutputHooks+=('if [ -z "$dontCanonicalizeJars" -a -e "$prefix" ]; then canonicalizeJarsIn "$prefix"; fi') + +canonicalizeJarsIn() { + local dir="$1" + echo "canonicalizing jars in $dir" + dir="$(realpath -sm -- "$dir")" + while IFS= read -rd '' f; do + canonicalizeJar "$f" + done < <(find -- "$dir" -type f -name '*.jar' -print0) +} + +source @canonicalize_jar@ diff --git a/nixpkgs/pkgs/build-support/setup-hooks/compress-man-pages.sh b/nixpkgs/pkgs/build-support/setup-hooks/compress-man-pages.sh new file mode 100644 index 000000000000..0d8a76558026 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/compress-man-pages.sh @@ -0,0 +1,33 @@ +fixupOutputHooks+=('if [ -z "${dontGzipMan-}" ]; then compressManPages "$prefix"; fi') + +compressManPages() { + local dir="$1" + + if [ -L "$dir"/share ] || [ -L "$dir"/share/man ] || [ ! -d "$dir/share/man" ] + then return + fi + echo "gzipping man pages under $dir/share/man/" + + # Compress all uncompressed manpages. Don't follow symlinks, etc. + find "$dir"/share/man/ -type f -a '!' -regex '.*\.\(bz2\|gz\|xz\)$' -print0 \ + | while IFS= read -r -d $'\0' f + do + if gzip -c -n "$f" > "$f".gz; then + rm "$f" + else + rm "$f".gz + fi + done + + # Point symlinks to compressed manpages. + find "$dir"/share/man/ -type l -a '!' -regex '.*\.\(bz2\|gz\|xz\)$' -print0 \ + | sort -z \ + | while IFS= read -r -d $'\0' f + do + local target + target="$(readlink -f "$f")" + if [ -f "$target".gz ]; then + ln -sf "$target".gz "$f".gz && rm "$f" + fi + done +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/copy-desktop-items.sh b/nixpkgs/pkgs/build-support/setup-hooks/copy-desktop-items.sh new file mode 100644 index 000000000000..313ebc980344 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/copy-desktop-items.sh @@ -0,0 +1,43 @@ +# shellcheck shell=bash + +# Setup hook that installs specified desktop items. +# +# Example usage in a derivation: +# +# { …, makeDesktopItem, copyDesktopItems, … }: +# +# let desktopItem = makeDesktopItem { … }; in +# stdenv.mkDerivation { +# … +# nativeBuildInputs = [ copyDesktopItems ]; +# +# desktopItems = [ desktopItem ]; +# … +# } +# +# This hook will copy files which are either given by full path +# or all '*.desktop' files placed inside the 'share/applications' +# folder of each `desktopItems` argument. + +postInstallHooks+=(copyDesktopItems) + +copyDesktopItems() { + if [ "${dontCopyDesktopItems-}" = 1 ]; then return; fi + + if [ -z "$desktopItems" ]; then + return + fi + + applications="${!outputBin}/share/applications" + for desktopItem in $desktopItems; do + if [[ -f "$desktopItem" ]]; then + echo "Copying '$desktopItem' into '${applications}'" + install -D -m 444 -t "${applications}" "$desktopItem" + else + for f in "$desktopItem"/share/applications/*.desktop; do + echo "Copying '$f' into '${applications}'" + install -D -m 444 -t "${applications}" "$f" + done + fi + done +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/copy-pkgconfig-items.sh b/nixpkgs/pkgs/build-support/setup-hooks/copy-pkgconfig-items.sh new file mode 100644 index 000000000000..8c04ec9b5f0e --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/copy-pkgconfig-items.sh @@ -0,0 +1,46 @@ +# shellcheck shell=bash + +# Setup hook that installs specified pkgconfig items. +# +# Example usage in a derivation: +# +# { …, makePkgconfigItem, copyPkgconfigItems, … }: +# +# let pkgconfigItem = makePkgconfigItem { … }; in +# stdenv.mkDerivation { +# … +# nativeBuildInputs = [ copyPkgconfigItems ]; +# +# pkgconfigItems = [ pkgconfigItem ]; +# … +# } +# +# This hook will copy files which are either given by full path +# or all '*.pc' files placed inside the 'lib/pkgconfig' +# folder of each `pkgconfigItems` argument. + +postInstallHooks+=(copyPkgconfigItems) + +copyPkgconfigItems() { + if [ "${dontCopyPkgconfigItems-}" = 1 ]; then return; fi + + if [ -z "$pkgconfigItems" ]; then + return + fi + + pkgconfigdir="${!outputDev}/lib/pkgconfig" + for pkgconfigItem in $pkgconfigItems; do + if [[ -f "$pkgconfigItem" ]]; then + substituteAllInPlace "$pkgconfigItem" + echo "Copying '$pkgconfigItem' into '${pkgconfigdir}'" + install -D -m 444 -t "${pkgconfigdir}" "$pkgconfigItem" + substituteAllInPlace "${pkgconfigdir}"/* + else + for f in "$pkgconfigItem"/lib/pkgconfig/*.pc; do + echo "Copying '$f' into '${pkgconfigdir}'" + install -D -m 444 -t "${pkgconfigdir}" "$f" + substituteAllInPlace "${pkgconfigdir}"/* + done + fi + done +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/desktop-to-darwin-bundle.sh b/nixpkgs/pkgs/build-support/setup-hooks/desktop-to-darwin-bundle.sh new file mode 100644 index 000000000000..5b38f4376070 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/desktop-to-darwin-bundle.sh @@ -0,0 +1,245 @@ +# shellcheck shell=bash +fixupOutputHooks+=('convertDesktopFiles $prefix') + +# Get a param out of a desktop file. First parameter is the file and the second +# is the key who's value we should fetch. +getDesktopParam() { + local file="$1" + local key="$2" + local line k v + + while read -r line; do + if [[ "$line" = *=* ]]; then + k="${line%%=*}" + v="${line#*=}" + + if [[ "$k" = "$key" ]]; then + echo "$v" + return + fi + fi + done < "$file" + + return 1 +} + +# Convert a freedesktop.org icon theme for a given app to a .icns file. When possible, missing +# icons are synthesized from SVG or rescaled from existing ones (when within the size threshold). +convertIconTheme() { + local -r out=$1 + local -r sharePath=$2 + local -r iconName=$3 + local -r theme=${4:-hicolor} + + # Sizes based on archived Apple documentation: + # https://developer.apple.com/design/human-interface-guidelines/macos/icons-and-images/app-icon#app-icon-sizes + local -ra iconSizes=(16 32 128 256 512) + local -ra scales=([1]="" [2]="@2") + + # Based loosely on the algorithm at: + # https://specifications.freedesktop.org/icon-theme-spec/icon-theme-spec-latest.html#icon_lookup + # Assumes threshold = 2 for ease of implementation. + function findIcon() { + local -r iconSize=$1 + local -r scale=$2 + + local scaleSuffix=${scales[$scale]} + local exactSize=${iconSize}x${iconSize}${scaleSuffix} + + local -a validSizes=( + ${exactSize} + $((iconSize + 1))x$((iconSize + 1))${scaleSuffix} + $((iconSize + 2))x$((iconSize + 2))${scaleSuffix} + $((iconSize - 1))x$((iconSize - 1))${scaleSuffix} + $((iconSize - 2))x$((iconSize - 2))${scaleSuffix} + ) + + local fallbackIcon= + + for iconIndex in "${!candidateIcons[@]}"; do + for maybeSize in "${validSizes[@]}"; do + icon=${candidateIcons[$iconIndex]} + if [[ $icon = */$maybeSize/* ]]; then + if [[ $maybeSize = $exactSize ]]; then + echo "fixed $icon" + return 0 + else + echo "threshold $icon" + return 0 + fi + elif [[ -a $icon && -z "$fallbackIcon" ]]; then + fallbackIcon="$icon" + fi + done + done + + if [[ -n "$fallbackIcon" ]]; then + echo "fallback $fallbackIcon" + return 0 + fi + + echo "scalable" + } + + function resizeIcon() { + local -r in=$1 + local -r out=$2 + local -r iconSize=$3 + local -r scale=$4 + + local density=$((72 * scale))x$((72 * scale)) + local dim=$((iconSize * scale)) + + echo "desktopToDarwinBundle: resizing icon $in to $out, size $dim" >&2 + magick convert -scale "${dim}x${dim}" -density "$density" -units PixelsPerInch "$in" "$out" + } + + function synthesizeIcon() { + local -r in=$1 + local -r out=$2 + local -r iconSize=$3 + local -r scale=$4 + + if [[ $in != '-' ]]; then + local density=$((72 * scale))x$((72 * scale)) + local dim=$((iconSize * scale)) + + echo "desktopToDarwinBundle: rasterizing svg $in to $out, size $dim" >&2 + rsvg-convert --keep-aspect-ratio --width "$dim" --height "$dim" "$in" --output "$out" + magick convert -density "$density" -units PixelsPerInch "$out" "$out" + else + return 1 + fi + } + + function getIcons() { + local -r sharePath=$1 + local -r iconname=$2 + local -r theme=$3 + local -r resultdir=$(mktemp -d) + + local -ar candidateIcons=( + "${sharePath}/icons/${theme}/"*"/${iconname}.png" + "${sharePath}/icons/${theme}/"*"/${iconname}.xpm" + ) + + local -a scalableIcon=("${sharePath}/icons/${theme}/scalable/${iconname}.svg"*) + if [[ ${#scalableIcon[@]} = 0 ]]; then + scalableIcon=('-') + fi + + # Tri-state variable, NONE means no icons have been found, an empty + # icns file will be generated, not sure that's necessary because macOS + # will default to a generic icon if no icon can be found. + # + # OTHER means an appropriate icon was found. + # + # Any other value is a path to an icon file that isn't scalable or + # within the threshold. This is used as a fallback in case no better + # icon can be found and will be scaled as much as + # necessary to result in appropriate icon sizes. + local foundIcon=NONE + for iconSize in "${iconSizes[@]}"; do + for scale in "${!scales[@]}"; do + local iconResult=$(findIcon $iconSize $scale) + local type=${iconResult%% *} + local icon=${iconResult#* } + local scaleSuffix=${scales[$scale]} + local result=${resultdir}/${iconSize}x${iconSize}${scales[$scale]}${scaleSuffix:+x}.png + echo "desktopToDarwinBundle: using $type icon $icon for size $iconSize$scaleSuffix" >&2 + case $type in + fixed) + local density=$((72 * scale))x$((72 * scale)) + magick convert -density "$density" -units PixelsPerInch "$icon" "$result" + foundIcon=OTHER + ;; + threshold) + # Synthesize an icon of the exact size if a scalable icon is available + # instead of scaling one and ending up with a fuzzy icon. + if ! synthesizeIcon "${scalableIcon[0]}" "$result" "$iconSize" "$scale"; then + resizeIcon "$icon" "$result" "$iconSize" "$scale" + fi + foundIcon=OTHER + ;; + scalable) + synthesizeIcon "${scalableIcon[0]}" "$result" "$iconSize" "$scale" || true + foundIcon=OTHER + ;; + fallback) + # Use the largest size available to scale to + # appropriate sizes. + if [[ $foundIcon != OTHER ]]; then + foundIcon=$icon + fi + ;; + *) + ;; + esac + done + done + if [[ $foundIcon != NONE && $foundIcon != OTHER ]]; then + # Ideally we'd only resize to whatever the closest sizes are, + # starting from whatever icon sizes are available. + for iconSize in 16 32 128 256 512; do + local result=${resultdir}/${iconSize}x${iconSize}.png + resizeIcon "$foundIcon" "$result" "$iconSize" 1 + done + fi + echo "$resultdir" + } + + iconsdir=$(getIcons "$sharePath" "apps/${iconName}" "$theme") + if [[ -n "$(ls -A1 "$iconsdir")" ]]; then + icnsutil compose --toc "$out/${iconName}.icns" "$iconsdir/"* + else + echo "Warning: no icons were found. Creating an empty icon for ${iconName}.icns." + touch "$out/${iconName}.icns" + fi +} + +processExecFieldCodes() { + local -r file=$1 + local -r execRaw=$(getDesktopParam "${file}" "Exec") + local -r execNoK="${execRaw/\%k/${file}}" + local -r execNoKC="${execNoK/\%c/$(getDesktopParam "${file}" "Name")}" + local -r icon=$(getDesktopParam "${file}" "Icon") + local -r execNoKCI="${execNoKC/\%i/${icon:+--icon }${icon}}" + local -r execNoKCIfu="${execNoKCI/ \%[fu]/}" + local -r exec="${execNoKCIfu/ \%[FU]/}" + if [[ "$exec" != "$execRaw" ]]; then + echo 1>&2 "desktopToDarwinBundle: Application bundles do not understand desktop entry field codes. Changed '$execRaw' to '$exec'." + fi + echo "$exec" +} + +# For a given .desktop file, generate a darwin '.app' bundle for it. +convertDesktopFile() { + local -r file=$1 + local -r sharePath=$(dirname "$(dirname "$file")") + local -r name=$(getDesktopParam "${file}" "Name") + local -r macOSExec=$(getDesktopParam "${file}" "X-macOS-Exec") + if [[ "$macOSExec" ]]; then + local -r exec="$macOSExec" + else + local -r exec=$(processExecFieldCodes "${file}") + fi + local -r iconName=$(getDesktopParam "${file}" "Icon") + local -r squircle=$(getDesktopParam "${file}" "X-macOS-SquircleIcon") + + mkdir -p "${!outputBin}/Applications/${name}.app/Contents/MacOS" + mkdir -p "${!outputBin}/Applications/${name}.app/Contents/Resources" + + convertIconTheme "${!outputBin}/Applications/${name}.app/Contents/Resources" "$sharePath" "$iconName" + + write-darwin-bundle "${!outputBin}" "$name" "$exec" "$iconName" "$squircle" +} + +convertDesktopFiles() { + local dir="$1/share/applications/" + + if [ -d "${dir}" ]; then + for desktopFile in $(find "$dir" -iname "*.desktop"); do + convertDesktopFile "$desktopFile"; + done + fi +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/die.sh b/nixpkgs/pkgs/build-support/setup-hooks/die.sh new file mode 100644 index 000000000000..0db41e030f4c --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/die.sh @@ -0,0 +1,21 @@ +# Exit with backtrace and error message +# +# Usage: die "Error message" +die() { + # Let us be a little sloppy with errors, because otherwise the final + # invocation of `caller` below will cause the script to exit. + set +e + + # Print our error message + printf "\nBuilder called die: %b\n" "$*" + printf "Backtrace:\n" + + # Print a backtrace. + local frame=0 + while caller $frame; do + ((frame++)); + done + printf "\n" + + exit 1 +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/enable-coverage-instrumentation.sh b/nixpkgs/pkgs/build-support/setup-hooks/enable-coverage-instrumentation.sh new file mode 100644 index 000000000000..2b48fea4ff0b --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/enable-coverage-instrumentation.sh @@ -0,0 +1,20 @@ +postPhases+=" cleanupBuildDir" + +# Force GCC to build with coverage instrumentation. Also disable +# optimisation, since it may confuse things. +export NIX_CFLAGS_COMPILE="${NIX_CFLAGS_COMPILE:-} -O0 --coverage" + +# Get rid of everything that isn't a gcno file or a C source file. +# Also strip the `.tmp_' prefix from gcno files. (The Linux kernel +# creates these.) +cleanupBuildDir() { + if ! [ -e $out/.build ]; then return; fi + + find $out/.build/ -type f -a ! \ + \( -name "*.c" -o -name "*.cc" -o -name "*.cpp" -o -name "*.h" -o -name "*.hh" -o -name "*.y" -o -name "*.l" -o -name "*.gcno" \) \ + | xargs rm -f -- + + for i in $(find $out/.build/ -name ".tmp_*.gcno"); do + mv "$i" "$(echo $i | sed s/.tmp_//)" + done +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/find-xml-catalogs.sh b/nixpkgs/pkgs/build-support/setup-hooks/find-xml-catalogs.sh new file mode 100644 index 000000000000..f446a6f27fd9 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/find-xml-catalogs.sh @@ -0,0 +1,22 @@ +addXMLCatalogs () { + local d i + # ‘xml/dtd’ and ‘xml/xsl’ are deprecated. Catalogs should be + # installed underneath ‘share/xml’. + for d in $1/share/xml $1/xml/dtd $1/xml/xsl; do + if [ -d $d ]; then + for i in $(find $d -name catalog.xml); do + XML_CATALOG_FILES+=" $i" + done + fi + done +} + +if [ -z "${libxmlHookDone-}" ]; then + libxmlHookDone=1 + + # Set up XML_CATALOG_FILES. An empty initial value prevents + # xmllint and xsltproc from looking in /etc/xml/catalog. + export XML_CATALOG_FILES='' + if [ -z "$XML_CATALOG_FILES" ]; then XML_CATALOG_FILES=" "; fi + addEnvHooks "$hostOffset" addXMLCatalogs +fi diff --git a/nixpkgs/pkgs/build-support/setup-hooks/fix-darwin-dylib-names.sh b/nixpkgs/pkgs/build-support/setup-hooks/fix-darwin-dylib-names.sh new file mode 100644 index 000000000000..55e196e654df --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/fix-darwin-dylib-names.sh @@ -0,0 +1,40 @@ +# On macOS, binaries refer to dynamic library dependencies using +# either relative paths (e.g. "libicudata.dylib", searched relative to +# $DYLD_LIBRARY_PATH) or absolute paths +# (e.g. "/nix/store/.../lib/libicudata.dylib"). In Nix, the latter is +# preferred since it allows programs to just work. When linking +# against a library (e.g. "-licudata"), the linker uses the install +# name embedded in the dylib (which can be shown using "otool -D"). +# Most packages create dylibs with absolute install names, but some do +# not. This setup hook fixes dylibs by setting their install names to +# their absolute path (using "install_name_tool -id"). It also +# rewrites references in other dylibs to absolute paths. + +fixupOutputHooks+=('fixDarwinDylibNamesIn $prefix') + +fixDarwinDylibNames() { + local flags=() + local old_id + + for fn in "$@"; do + flags+=(-change "$(basename "$fn")" "$fn") + done + + for fn in "$@"; do + if [ -L "$fn" ]; then continue; fi + echo "$fn: fixing dylib" + int_out=$(@targetPrefix@install_name_tool -id "$fn" "${flags[@]}" "$fn" 2>&1) + result=$? + if [ "$result" -ne 0 ] && + ! grep "shared library stub file and can't be changed" <<< "$out" + then + echo "$int_out" >&2 + exit "$result" + fi + done +} + +fixDarwinDylibNamesIn() { + local dir="$1" + fixDarwinDylibNames $(find "$dir" -name "*.dylib") +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/gog-unpack.sh b/nixpkgs/pkgs/build-support/setup-hooks/gog-unpack.sh new file mode 100644 index 000000000000..559b543fadfc --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/gog-unpack.sh @@ -0,0 +1,11 @@ +unpackPhase="unpackGog" + +unpackGog() { + runHook preUnpackGog + + innoextract --silent --extract --exclude-temp "${src}" + + find . -depth -print -execdir rename -f 'y/A-Z/a-z/' '{}' \; + + runHook postUnpackGog +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/install-shell-files.sh b/nixpkgs/pkgs/build-support/setup-hooks/install-shell-files.sh new file mode 100644 index 000000000000..194b408b1050 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/install-shell-files.sh @@ -0,0 +1,230 @@ +# shellcheck shell=bash +# Setup hook for the `installShellFiles` package. +# +# Example usage in a derivation: +# +# { …, installShellFiles, … }: +# stdenv.mkDerivation { +# … +# nativeBuildInputs = [ installShellFiles ]; +# postInstall = '' +# installManPage share/doc/foobar.1 +# installShellCompletion share/completions/foobar.{bash,fish,zsh} +# ''; +# … +# } +# +# See comments on each function for more details. + +# installManPage <path> [...<path>] +# +# Each argument is checked for its man section suffix and installed into the appropriate +# share/man/man<n>/ directory. The function returns an error if any paths don't have the man +# section suffix (with optional .gz compression). +installManPage() { + local path + for path in "$@"; do + if (( "${NIX_DEBUG:-0}" >= 1 )); then + echo "installManPage: installing $path" + fi + if test -z "$path"; then + echo "installManPage: error: path cannot be empty" >&2 + return 1 + fi + local basename + basename=$(stripHash "$path") # use stripHash in case it's a nix store path + local trimmed=${basename%.gz} # don't get fooled by compressed manpages + local suffix=${trimmed##*.} + if test -z "$suffix" -o "$suffix" = "$trimmed"; then + echo "installManPage: error: path missing manpage section suffix: $path" >&2 + return 1 + fi + local outRoot + if test "$suffix" = 3; then + outRoot=${!outputDevman:?} + else + outRoot=${!outputMan:?} + fi + install -Dm644 -T "$path" "${outRoot}/share/man/man$suffix/$basename" || return + done +} + +# installShellCompletion [--cmd <name>] ([--bash|--fish|--zsh] [--name <name>] <path>)... +# +# Each path is installed into the appropriate directory for shell completions for the given shell. +# If one of `--bash`, `--fish`, or `--zsh` is given the path is assumed to belong to that shell. +# Otherwise the file extension will be examined to pick a shell. If the shell is unknown a warning +# will be logged and the command will return a non-zero status code after processing any remaining +# paths. Any of the shell flags will affect all subsequent paths (unless another shell flag is +# given). +# +# If the shell completion needs to be renamed before installing the optional `--name <name>` flag +# may be given. Any name provided with this flag only applies to the next path. +# +# If all shell completions need to be renamed before installing the optional `--cmd <name>` flag +# may be given. This will synthesize a name for each file, unless overridden with an explicit +# `--name` flag. For example, `--cmd foobar` will synthesize the name `_foobar` for zsh and +# `foobar.bash` for bash. +# +# For zsh completions, if the `--name` flag is not given, the path will be automatically renamed +# such that `foobar.zsh` becomes `_foobar`. +# +# A path may be a named fd, such as produced by the bash construct `<(cmd)`. When using a named fd, +# the shell type flag must be provided, and either the `--name` or `--cmd` flag must be provided. +# This might look something like: +# +# installShellCompletion --zsh --name _foobar <($out/bin/foobar --zsh-completion) +# +# This command accepts multiple shell flags in conjunction with multiple paths if you wish to +# install them all in one command: +# +# installShellCompletion share/completions/foobar.{bash,fish} --zsh share/completions/_foobar +# +# However it may be easier to read if each shell is split into its own invocation, especially when +# renaming is involved: +# +# installShellCompletion --bash --name foobar.bash share/completions.bash +# installShellCompletion --fish --name foobar.fish share/completions.fish +# installShellCompletion --zsh --name _foobar share/completions.zsh +# +# Or to use shell newline escaping to split a single invocation across multiple lines: +# +# installShellCompletion --cmd foobar \ +# --bash <($out/bin/foobar --bash-completion) \ +# --fish <($out/bin/foobar --fish-completion) \ +# --zsh <($out/bin/foobar --zsh-completion) +# +# If any argument is `--` the remaining arguments will be treated as paths. +installShellCompletion() { + local shell='' name='' cmdname='' retval=0 parseArgs=1 arg + while { arg=$1; shift; }; do + # Parse arguments + if (( parseArgs )); then + case "$arg" in + --bash|--fish|--zsh) + shell=${arg#--} + continue;; + --name) + name=$1 + shift || { + echo 'installShellCompletion: error: --name flag expected an argument' >&2 + return 1 + } + continue;; + --name=*) + # treat `--name=foo` the same as `--name foo` + name=${arg#--name=} + continue;; + --cmd) + cmdname=$1 + shift || { + echo 'installShellCompletion: error: --cmd flag expected an argument' >&2 + return 1 + } + continue;; + --cmd=*) + # treat `--cmd=foo` the same as `--cmd foo` + cmdname=${arg#--cmd=} + continue;; + --?*) + echo "installShellCompletion: warning: unknown flag ${arg%%=*}" >&2 + retval=2 + continue;; + --) + # treat remaining args as paths + parseArgs=0 + continue;; + esac + fi + if (( "${NIX_DEBUG:-0}" >= 1 )); then + echo "installShellCompletion: installing $arg${name:+ as $name}" + fi + # if we get here, this is a path or named pipe + # Identify shell and output name + local curShell=$shell + local outName='' + if [[ -z "$arg" ]]; then + echo "installShellCompletion: error: empty path is not allowed" >&2 + return 1 + elif [[ -p "$arg" ]]; then + # this is a named fd or fifo + if [[ -z "$curShell" ]]; then + echo "installShellCompletion: error: named pipe requires one of --bash, --fish, or --zsh" >&2 + return 1 + elif [[ -z "$name" && -z "$cmdname" ]]; then + echo "installShellCompletion: error: named pipe requires one of --cmd or --name" >&2 + return 1 + fi + else + # this is a path + local argbase + argbase=$(stripHash "$arg") + if [[ -z "$curShell" ]]; then + # auto-detect the shell + case "$argbase" in + ?*.bash) curShell=bash;; + ?*.fish) curShell=fish;; + ?*.zsh) curShell=zsh;; + *) + if [[ "$argbase" = _* && "$argbase" != *.* ]]; then + # probably zsh + echo "installShellCompletion: warning: assuming path \`$arg' is zsh; please specify with --zsh" >&2 + curShell=zsh + else + echo "installShellCompletion: warning: unknown shell for path: $arg" >&2 + retval=2 + continue + fi;; + esac + fi + outName=$argbase + fi + # Identify output path + if [[ -n "$name" ]]; then + outName=$name + elif [[ -n "$cmdname" ]]; then + case "$curShell" in + bash|fish) outName=$cmdname.$curShell;; + zsh) outName=_$cmdname;; + *) + # Our list of shells is out of sync with the flags we accept or extensions we detect. + echo 'installShellCompletion: internal error' >&2 + return 1;; + esac + fi + local sharePath + case "$curShell" in + bash) sharePath=bash-completion/completions;; + fish) sharePath=fish/vendor_completions.d;; + zsh) + sharePath=zsh/site-functions + # only apply automatic renaming if we didn't have a manual rename + if [[ -z "$name" && -z "$cmdname" ]]; then + # convert a name like `foo.zsh` into `_foo` + outName=${outName%.zsh} + outName=_${outName#_} + fi;; + *) + # Our list of shells is out of sync with the flags we accept or extensions we detect. + echo 'installShellCompletion: internal error' >&2 + return 1;; + esac + # Install file + local outDir="${!outputBin:?}/share/$sharePath" + local outPath="$outDir/$outName" + if [[ -p "$arg" ]]; then + # install handles named pipes on NixOS but not on macOS + mkdir -p "$outDir" \ + && cat "$arg" > "$outPath" + else + install -Dm644 -T "$arg" "$outPath" + fi || return + # Clear the per-path flags + name= + done + if [[ -n "$name" ]]; then + echo 'installShellCompletion: error: --name flag given with no path' >&2 + return 1 + fi + return $retval +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/keep-build-tree.sh b/nixpkgs/pkgs/build-support/setup-hooks/keep-build-tree.sh new file mode 100644 index 000000000000..754900bfc337 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/keep-build-tree.sh @@ -0,0 +1,6 @@ +prePhases+=" moveBuildDir" + +moveBuildDir() { + mkdir -p $out/.build + cd $out/.build +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/ld-is-cc-hook.sh b/nixpkgs/pkgs/build-support/setup-hooks/ld-is-cc-hook.sh new file mode 100644 index 000000000000..b53e184b0956 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/ld-is-cc-hook.sh @@ -0,0 +1,5 @@ +ld-is-cc-hook() { + LD=$CC +} + +preConfigureHooks+=(ld-is-cc-hook) diff --git a/nixpkgs/pkgs/build-support/setup-hooks/make-binary-wrapper/default.nix b/nixpkgs/pkgs/build-support/setup-hooks/make-binary-wrapper/default.nix new file mode 100644 index 000000000000..f364dd5de753 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/make-binary-wrapper/default.nix @@ -0,0 +1,27 @@ +{ targetPackages +, lib +, makeSetupHook +, dieHook +, writeShellScript +, tests +, cc ? targetPackages.stdenv.cc +, sanitizers ? [] +}: + +makeSetupHook { + name = "make-binary-wrapper-hook"; + propagatedBuildInputs = [ dieHook ]; + + substitutions = { + cc = "${cc}/bin/${cc.targetPrefix}cc ${lib.escapeShellArgs (map (s: "-fsanitize=${s}") sanitizers)}"; + }; + + passthru = { + # Extract the function call used to create a binary wrapper from its embedded docstring + extractCmd = writeShellScript "extract-binary-wrapper-cmd" '' + ${cc.bintools.targetPrefix}strings -dw "$1" | sed -n '/^makeCWrapper/,/^$/ p' + ''; + + tests = tests.makeBinaryWrapper; + }; +} ./make-binary-wrapper.sh diff --git a/nixpkgs/pkgs/build-support/setup-hooks/make-binary-wrapper/make-binary-wrapper.sh b/nixpkgs/pkgs/build-support/setup-hooks/make-binary-wrapper/make-binary-wrapper.sh new file mode 100644 index 000000000000..6cd01f6bf630 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/make-binary-wrapper/make-binary-wrapper.sh @@ -0,0 +1,425 @@ + +set -euo pipefail + +# Assert that FILE exists and is executable +# +# assertExecutable FILE +assertExecutable() { + local file="$1" + [[ -f "$file" && -x "$file" ]] || \ + die "Cannot wrap '$file' because it is not an executable file" +} + +# Generate a binary executable wrapper for wrapping an executable. +# The binary is compiled from generated C-code using gcc. +# makeWrapper EXECUTABLE OUT_PATH ARGS + +# ARGS: +# --argv0 NAME : set the name of the executed process to NAME +# (if unset or empty, defaults to EXECUTABLE) +# --inherit-argv0 : the executable inherits argv0 from the wrapper. +# (use instead of --argv0 '$0') +# --set VAR VAL : add VAR with value VAL to the executable's environment +# --set-default VAR VAL : like --set, but only adds VAR if not already set in +# the environment +# --unset VAR : remove VAR from the environment +# --chdir DIR : change working directory (use instead of --run "cd DIR") +# --add-flags ARGS : prepend ARGS to the invocation of the executable +# (that is, *before* any arguments passed on the command line) +# --append-flags ARGS : append ARGS to the invocation of the executable +# (that is, *after* any arguments passed on the command line) + +# --prefix ENV SEP VAL : suffix/prefix ENV with VAL, separated by SEP +# --suffix + +# To troubleshoot a binary wrapper after you compiled it, +# use the `strings` command or open the binary file in a text editor. +makeWrapper() { makeBinaryWrapper "$@"; } +makeBinaryWrapper() { + local NIX_CFLAGS_COMPILE= NIX_CFLAGS_LINK= + local original="$1" + local wrapper="$2" + shift 2 + + assertExecutable "$original" + + mkdir -p "$(dirname "$wrapper")" + + makeDocumentedCWrapper "$original" "$@" | \ + @cc@ \ + -Wall -Werror -Wpedantic \ + -Wno-overlength-strings \ + -Os \ + -x c \ + -o "$wrapper" - +} + +# Syntax: wrapProgram <PROGRAM> <MAKE-WRAPPER FLAGS...> +wrapProgram() { wrapProgramBinary "$@"; } +wrapProgramBinary() { + local prog="$1" + local hidden + + assertExecutable "$prog" + + hidden="$(dirname "$prog")/.$(basename "$prog")"-wrapped + while [ -e "$hidden" ]; do + hidden="${hidden}_" + done + mv "$prog" "$hidden" + makeBinaryWrapper "$hidden" "$prog" --inherit-argv0 "${@:2}" +} + +# Generate source code for the wrapper in such a way that the wrapper inputs +# will still be readable even after compilation +# makeDocumentedCWrapper EXECUTABLE ARGS +# ARGS: same as makeWrapper +makeDocumentedCWrapper() { + local src docs + src=$(makeCWrapper "$@") + docs=$(docstring "$@") + printf '%s\n\n' "$src" + printf '%s\n' "$docs" +} + +# makeCWrapper EXECUTABLE ARGS +# ARGS: same as makeWrapper +makeCWrapper() { + local argv0 inherit_argv0 n params cmd main flagsBefore flagsAfter flags executable length + local uses_prefix uses_suffix uses_assert uses_assert_success uses_stdio uses_asprintf + executable=$(escapeStringLiteral "$1") + params=("$@") + length=${#params[*]} + for ((n = 1; n < length; n += 1)); do + p="${params[n]}" + case $p in + --set) + cmd=$(setEnv "${params[n + 1]}" "${params[n + 2]}") + main="$main$cmd"$'\n' + n=$((n + 2)) + [ $n -ge "$length" ] && main="$main#error makeCWrapper: $p takes 2 arguments"$'\n' + ;; + --set-default) + cmd=$(setDefaultEnv "${params[n + 1]}" "${params[n + 2]}") + main="$main$cmd"$'\n' + uses_stdio=1 + uses_assert_success=1 + n=$((n + 2)) + [ $n -ge "$length" ] && main="$main#error makeCWrapper: $p takes 2 arguments"$'\n' + ;; + --unset) + cmd=$(unsetEnv "${params[n + 1]}") + main="$main$cmd"$'\n' + uses_stdio=1 + uses_assert_success=1 + n=$((n + 1)) + [ $n -ge "$length" ] && main="$main#error makeCWrapper: $p takes 1 argument"$'\n' + ;; + --prefix) + cmd=$(setEnvPrefix "${params[n + 1]}" "${params[n + 2]}" "${params[n + 3]}") + main="$main$cmd"$'\n' + uses_prefix=1 + uses_asprintf=1 + uses_stdio=1 + uses_assert_success=1 + uses_assert=1 + n=$((n + 3)) + [ $n -ge "$length" ] && main="$main#error makeCWrapper: $p takes 3 arguments"$'\n' + ;; + --suffix) + cmd=$(setEnvSuffix "${params[n + 1]}" "${params[n + 2]}" "${params[n + 3]}") + main="$main$cmd"$'\n' + uses_suffix=1 + uses_asprintf=1 + uses_stdio=1 + uses_assert_success=1 + uses_assert=1 + n=$((n + 3)) + [ $n -ge "$length" ] && main="$main#error makeCWrapper: $p takes 3 arguments"$'\n' + ;; + --chdir) + cmd=$(changeDir "${params[n + 1]}") + main="$main$cmd"$'\n' + uses_stdio=1 + uses_assert_success=1 + n=$((n + 1)) + [ $n -ge "$length" ] && main="$main#error makeCWrapper: $p takes 1 argument"$'\n' + ;; + --add-flags) + flags="${params[n + 1]}" + flagsBefore="$flagsBefore $flags" + uses_assert=1 + n=$((n + 1)) + [ $n -ge "$length" ] && main="$main#error makeCWrapper: $p takes 1 argument"$'\n' + ;; + --append-flags) + flags="${params[n + 1]}" + flagsAfter="$flagsAfter $flags" + uses_assert=1 + n=$((n + 1)) + [ $n -ge "$length" ] && main="$main#error makeCWrapper: $p takes 1 argument"$'\n' + ;; + --argv0) + argv0=$(escapeStringLiteral "${params[n + 1]}") + inherit_argv0= + n=$((n + 1)) + [ $n -ge "$length" ] && main="$main#error makeCWrapper: $p takes 1 argument"$'\n' + ;; + --inherit-argv0) + # Whichever comes last of --argv0 and --inherit-argv0 wins + inherit_argv0=1 + ;; + *) # Using an error macro, we will make sure the compiler gives an understandable error message + main="$main#error makeCWrapper: Unknown argument ${p}"$'\n' + ;; + esac + done + [[ -z "$flagsBefore" && -z "$flagsAfter" ]] || main="$main"${main:+$'\n'}$(addFlags "$flagsBefore" "$flagsAfter")$'\n'$'\n' + [ -z "$inherit_argv0" ] && main="${main}argv[0] = \"${argv0:-${executable}}\";"$'\n' + main="${main}return execv(\"${executable}\", argv);"$'\n' + + [ -z "$uses_asprintf" ] || printf '%s\n' "#define _GNU_SOURCE /* See feature_test_macros(7) */" + printf '%s\n' "#include <unistd.h>" + printf '%s\n' "#include <stdlib.h>" + [ -z "$uses_assert" ] || printf '%s\n' "#include <assert.h>" + [ -z "$uses_stdio" ] || printf '%s\n' "#include <stdio.h>" + [ -z "$uses_assert_success" ] || printf '\n%s\n' "#define assert_success(e) do { if ((e) < 0) { perror(#e); abort(); } } while (0)" + [ -z "$uses_prefix" ] || printf '\n%s\n' "$(setEnvPrefixFn)" + [ -z "$uses_suffix" ] || printf '\n%s\n' "$(setEnvSuffixFn)" + printf '\n%s' "int main(int argc, char **argv) {" + printf '\n%s' "$(indent4 "$main")" + printf '\n%s\n' "}" +} + +addFlags() { + local n flag before after var + + # Disable file globbing, since bash will otherwise try to find + # filenames matching the the value to be prefixed/suffixed if + # it contains characters considered wildcards, such as `?` and + # `*`. We want the value as is, except we also want to split + # it on on the separator; hence we can't quote it. + local reenableGlob=0 + if [[ ! -o noglob ]]; then + reenableGlob=1 + fi + set -o noglob + # shellcheck disable=SC2086 + before=($1) after=($2) + if (( reenableGlob )); then + set +o noglob + fi + + var="argv_tmp" + printf '%s\n' "char **$var = calloc(${#before[@]} + argc + ${#after[@]} + 1, sizeof(*$var));" + printf '%s\n' "assert($var != NULL);" + printf '%s\n' "${var}[0] = argv[0];" + for ((n = 0; n < ${#before[@]}; n += 1)); do + flag=$(escapeStringLiteral "${before[n]}") + printf '%s\n' "${var}[$((n + 1))] = \"$flag\";" + done + printf '%s\n' "for (int i = 1; i < argc; ++i) {" + printf '%s\n' " ${var}[${#before[@]} + i] = argv[i];" + printf '%s\n' "}" + for ((n = 0; n < ${#after[@]}; n += 1)); do + flag=$(escapeStringLiteral "${after[n]}") + printf '%s\n' "${var}[${#before[@]} + argc + $n] = \"$flag\";" + done + printf '%s\n' "${var}[${#before[@]} + argc + ${#after[@]}] = NULL;" + printf '%s\n' "argv = $var;" +} + +# chdir DIR +changeDir() { + local dir + dir=$(escapeStringLiteral "$1") + printf '%s' "assert_success(chdir(\"$dir\"));" +} + +# prefix ENV SEP VAL +setEnvPrefix() { + local env sep val + env=$(escapeStringLiteral "$1") + sep=$(escapeStringLiteral "$2") + val=$(escapeStringLiteral "$3") + printf '%s' "set_env_prefix(\"$env\", \"$sep\", \"$val\");" + assertValidEnvName "$1" +} + +# suffix ENV SEP VAL +setEnvSuffix() { + local env sep val + env=$(escapeStringLiteral "$1") + sep=$(escapeStringLiteral "$2") + val=$(escapeStringLiteral "$3") + printf '%s' "set_env_suffix(\"$env\", \"$sep\", \"$val\");" + assertValidEnvName "$1" +} + +# setEnv KEY VALUE +setEnv() { + local key value + key=$(escapeStringLiteral "$1") + value=$(escapeStringLiteral "$2") + printf '%s' "putenv(\"$key=$value\");" + assertValidEnvName "$1" +} + +# setDefaultEnv KEY VALUE +setDefaultEnv() { + local key value + key=$(escapeStringLiteral "$1") + value=$(escapeStringLiteral "$2") + printf '%s' "assert_success(setenv(\"$key\", \"$value\", 0));" + assertValidEnvName "$1" +} + +# unsetEnv KEY +unsetEnv() { + local key + key=$(escapeStringLiteral "$1") + printf '%s' "assert_success(unsetenv(\"$key\"));" + assertValidEnvName "$1" +} + +# Makes it safe to insert STRING within quotes in a C String Literal. +# escapeStringLiteral STRING +escapeStringLiteral() { + local result + result=${1//$'\\'/$'\\\\'} + result=${result//\"/'\"'} + result=${result//$'\n'/"\n"} + result=${result//$'\r'/"\r"} + printf '%s' "$result" +} + +# Indents every non-empty line by 4 spaces. To avoid trailing whitespace, we don't indent empty lines +# indent4 TEXT_BLOCK +indent4() { + printf '%s' "$1" | awk '{ if ($0 != "") { print " "$0 } else { print $0 }}' +} + +assertValidEnvName() { + case "$1" in + *=*) printf '\n%s\n' "#error Illegal environment variable name \`$1\` (cannot contain \`=\`)";; + "") printf '\n%s\n' "#error Environment variable name can't be empty.";; + esac +} + +setEnvPrefixFn() { + printf '%s' "\ +void set_env_prefix(char *env, char *sep, char *prefix) { + char *existing = getenv(env); + if (existing) { + char *val; + assert_success(asprintf(&val, \"%s%s%s\", prefix, sep, existing)); + assert_success(setenv(env, val, 1)); + free(val); + } else { + assert_success(setenv(env, prefix, 1)); + } +} +" +} + +setEnvSuffixFn() { + printf '%s' "\ +void set_env_suffix(char *env, char *sep, char *suffix) { + char *existing = getenv(env); + if (existing) { + char *val; + assert_success(asprintf(&val, \"%s%s%s\", existing, sep, suffix)); + assert_success(setenv(env, val, 1)); + free(val); + } else { + assert_success(setenv(env, suffix, 1)); + } +} +" +} + +# Embed a C string which shows up as readable text in the compiled binary wrapper, +# giving instructions for recreating the wrapper. +# Keep in sync with makeBinaryWrapper.extractCmd +docstring() { + printf '%s' "const char * DOCSTRING = \"$(escapeStringLiteral " + + +# ------------------------------------------------------------------------------------ +# The C-code for this binary wrapper has been generated using the following command: + + +makeCWrapper $(formatArgs "$@") + + +# (Use \`nix-shell -p makeBinaryWrapper\` to get access to makeCWrapper in your shell) +# ------------------------------------------------------------------------------------ + + +")\";" +} + +# formatArgs EXECUTABLE ARGS +formatArgs() { + printf '%s' "${1@Q}" + shift + while [ $# -gt 0 ]; do + case "$1" in + --set) + formatArgsLine 2 "$@" + shift 2 + ;; + --set-default) + formatArgsLine 2 "$@" + shift 2 + ;; + --unset) + formatArgsLine 1 "$@" + shift 1 + ;; + --prefix) + formatArgsLine 3 "$@" + shift 3 + ;; + --suffix) + formatArgsLine 3 "$@" + shift 3 + ;; + --chdir) + formatArgsLine 1 "$@" + shift 1 + ;; + --add-flags) + formatArgsLine 1 "$@" + shift 1 + ;; + --append-flags) + formatArgsLine 1 "$@" + shift 1 + ;; + --argv0) + formatArgsLine 1 "$@" + shift 1 + ;; + --inherit-argv0) + formatArgsLine 0 "$@" + ;; + esac + shift + done + printf '%s\n' "" +} + +# formatArgsLine ARG_COUNT ARGS +formatArgsLine() { + local ARG_COUNT LENGTH + ARG_COUNT=$1 + LENGTH=$# + shift + printf '%s' $' \\\n '"$1" + shift + while [ "$ARG_COUNT" -gt $((LENGTH - $# - 2)) ]; do + printf ' %s' "${1@Q}" + shift + done +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/make-coverage-analysis-report.sh b/nixpkgs/pkgs/build-support/setup-hooks/make-coverage-analysis-report.sh new file mode 100644 index 000000000000..9108b4c50355 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/make-coverage-analysis-report.sh @@ -0,0 +1,25 @@ +postPhases+=" coverageReportPhase" + +coverageReportPhase() { + lcov --directory . --capture --output-file app.info + set -o noglob + lcov --remove app.info ${lcovFilter:-"/nix/store/*"} > app2.info + set +o noglob + mv app2.info app.info + + mkdir -p $out/coverage + genhtml app.info $lcovExtraTraceFiles -o $out/coverage > log + + # Grab the overall coverage percentage so that Hydra can plot it over time. + mkdir -p $out/nix-support + lineCoverage="$(sed 's/.*lines\.*: \([0-9\.]\+\)%.*/\1/; t ; d' log)" + functionCoverage="$(sed 's/.*functions\.*: \([0-9\.]\+\)%.*/\1/; t ; d' log)" + if [ -z "$lineCoverage" -o -z "$functionCoverage" ]; then + echo "failed to get coverage statistics" + exit 1 + fi + echo "lineCoverage $lineCoverage %" >> $out/nix-support/hydra-metrics + echo "functionCoverage $functionCoverage %" >> $out/nix-support/hydra-metrics + + echo "report coverage $out/coverage" >> $out/nix-support/hydra-build-products +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/make-symlinks-relative.sh b/nixpkgs/pkgs/build-support/setup-hooks/make-symlinks-relative.sh new file mode 100644 index 000000000000..b07b0c5ae804 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/make-symlinks-relative.sh @@ -0,0 +1,37 @@ +# symlinks are often created in postFixup +# don't use fixupOutputHooks, it is before postFixup +postFixupHooks+=(_makeSymlinksRelativeInAllOutputs) + +# For every symlink in $output that refers to another file in $output +# ensure that the symlink is relative. This removes references to the output +# has from the resulting store paths and thus the NAR files. +_makeSymlinksRelative() { + local symlinkTarget + + if [ "${dontRewriteSymlinks-}" ] || [ ! -e "$prefix" ]; then + return + fi + + while IFS= read -r -d $'\0' f; do + symlinkTarget=$(readlink "$f") + if [[ "$symlinkTarget"/ != "$prefix"/* ]]; then + # skip this symlink as it doesn't point to $prefix + continue + fi + + if [ ! -e "$symlinkTarget" ]; then + echo "the symlink $f is broken, it points to $symlinkTarget (which is missing)" + fi + + echo "rewriting symlink $f to be relative to $prefix" + ln -snrf "$symlinkTarget" "$f" + + done < <(find $prefix -type l -print0) +} + +_makeSymlinksRelativeInAllOutputs() { + local output + for output in $(getAllOutputNames); do + prefix="${!output}" _makeSymlinksRelative + done +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/make-wrapper.sh b/nixpkgs/pkgs/build-support/setup-hooks/make-wrapper.sh new file mode 100644 index 000000000000..11b332bfc3eb --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/make-wrapper.sh @@ -0,0 +1,221 @@ +# Assert that FILE exists and is executable +# +# assertExecutable FILE +assertExecutable() { + local file="$1" + [[ -f "$file" && -x "$file" ]] || \ + die "Cannot wrap '$file' because it is not an executable file" +} + +# construct an executable file that wraps the actual executable +# makeWrapper EXECUTABLE OUT_PATH ARGS + +# ARGS: +# --argv0 NAME : set the name of the executed process to NAME +# (if unset or empty, defaults to EXECUTABLE) +# --inherit-argv0 : the executable inherits argv0 from the wrapper. +# (use instead of --argv0 '$0') +# --set VAR VAL : add VAR with value VAL to the executable's environment +# --set-default VAR VAL : like --set, but only adds VAR if not already set in +# the environment +# --unset VAR : remove VAR from the environment +# --chdir DIR : change working directory (use instead of --run "cd DIR") +# --run COMMAND : run command before the executable +# --add-flags ARGS : prepend ARGS to the invocation of the executable +# (that is, *before* any arguments passed on the command line) +# --append-flags ARGS : append ARGS to the invocation of the executable +# (that is, *after* any arguments passed on the command line) + +# --prefix ENV SEP VAL : suffix/prefix ENV with VAL, separated by SEP +# --suffix +# --prefix-each ENV SEP VALS : like --prefix, but VALS is a list +# --suffix-each ENV SEP VALS : like --suffix, but VALS is a list +# --prefix-contents ENV SEP FILES : like --suffix-each, but contents of FILES +# are read first and used as VALS +# --suffix-contents +makeWrapper() { makeShellWrapper "$@"; } +makeShellWrapper() { + local original="$1" + local wrapper="$2" + local params varName value command separator n fileNames + local argv0 flagsBefore flagsAfter flags + + assertExecutable "$original" + + # Write wrapper code which adds `value` to the beginning or end of + # the list variable named by `varName`, depending on the `mode` + # specified. + # + # A value which is already part of the list will not be added + # again. If this is the case and the `suffix` mode is used, the + # list won't be touched at all. The `prefix` mode will however + # move the last matching instance of the value to the beginning + # of the list. Any remaining duplicates of the value will be left + # as-is. + addValue() { + local mode="$1" # `prefix` or `suffix` to add to the beginning or end respectively + local varName="$2" # name of list variable to add to + local separator="$3" # character used to separate elements of list + local value="$4" # one value, or multiple values separated by `separator`, to add to list + + # Disable file globbing, since bash will otherwise try to find + # filenames matching the the value to be prefixed/suffixed if + # it contains characters considered wildcards, such as `?` and + # `*`. We want the value as is, except we also want to split + # it on on the separator; hence we can't quote it. + local reenableGlob=0 + if [[ ! -o noglob ]]; then + reenableGlob=1 + fi + set -o noglob + + if [[ -n "$value" ]]; then + local old_ifs=$IFS + IFS=$separator + + if [[ "$mode" == '--prefix'* ]]; then + # Keep the order of the components as written when + # prefixing; normally, they would be added in the + # reverse order. + local tmp= + for v in $value; do + tmp=$v${tmp:+$separator}$tmp + done + value="$tmp" + fi + for v in $value; do + { + echo "$varName=\${$varName:+${separator@Q}\$$varName${separator@Q}}" # add separators on both ends unless empty + if [[ "$mode" == '--prefix'* ]]; then # -- in prefix mode -- + echo "$varName=\${$varName/${separator@Q}${v@Q}${separator@Q}/${separator@Q}}" # remove the first instance of the value (if any) + echo "$varName=${v@Q}\$$varName" # prepend the value + elif [[ "$mode" == '--suffix'* ]]; then # -- in suffix mode -- + echo "if [[ \$$varName != *${separator@Q}${v@Q}${separator@Q}* ]]; then" # if the value isn't already in the list + echo " $varName=\$$varName${v@Q}" # append the value + echo "fi" + else + echo "unknown mode $mode!" 1>&2 + exit 1 + fi + echo "$varName=\${$varName#${separator@Q}}" # remove leading separator + echo "$varName=\${$varName%${separator@Q}}" # remove trailing separator + echo "export $varName" + } >> "$wrapper" + done + IFS=$old_ifs + fi + + if (( reenableGlob )); then + set +o noglob + fi + } + + mkdir -p "$(dirname "$wrapper")" + + echo "#! @shell@ -e" > "$wrapper" + + params=("$@") + for ((n = 2; n < ${#params[*]}; n += 1)); do + p="${params[$n]}" + + if [[ "$p" == "--set" ]]; then + varName="${params[$((n + 1))]}" + value="${params[$((n + 2))]}" + n=$((n + 2)) + echo "export $varName=${value@Q}" >> "$wrapper" + elif [[ "$p" == "--set-default" ]]; then + varName="${params[$((n + 1))]}" + value="${params[$((n + 2))]}" + n=$((n + 2)) + echo "export $varName=\${$varName-${value@Q}}" >> "$wrapper" + elif [[ "$p" == "--unset" ]]; then + varName="${params[$((n + 1))]}" + n=$((n + 1)) + echo "unset $varName" >> "$wrapper" + elif [[ "$p" == "--chdir" ]]; then + dir="${params[$((n + 1))]}" + n=$((n + 1)) + echo "cd ${dir@Q}" >> "$wrapper" + elif [[ "$p" == "--run" ]]; then + command="${params[$((n + 1))]}" + n=$((n + 1)) + echo "$command" >> "$wrapper" + elif [[ ("$p" == "--suffix") || ("$p" == "--prefix") ]]; then + varName="${params[$((n + 1))]}" + separator="${params[$((n + 2))]}" + value="${params[$((n + 3))]}" + n=$((n + 3)) + addValue "$p" "$varName" "$separator" "$value" + elif [[ ("$p" == "--suffix-each") || ("$p" == "--prefix-each") ]]; then + varName="${params[$((n + 1))]}" + separator="${params[$((n + 2))]}" + values="${params[$((n + 3))]}" + n=$((n + 3)) + for value in $values; do + addValue "$p" "$varName" "$separator" "$value" + done + elif [[ ("$p" == "--suffix-contents") || ("$p" == "--prefix-contents") ]]; then + varName="${params[$((n + 1))]}" + separator="${params[$((n + 2))]}" + fileNames="${params[$((n + 3))]}" + n=$((n + 3)) + for fileName in $fileNames; do + contents="$(cat "$fileName")" + addValue "$p" "$varName" "$separator" "$contents" + done + elif [[ "$p" == "--add-flags" ]]; then + flags="${params[$((n + 1))]}" + n=$((n + 1)) + flagsBefore="${flagsBefore-} $flags" + elif [[ "$p" == "--append-flags" ]]; then + flags="${params[$((n + 1))]}" + n=$((n + 1)) + flagsAfter="${flagsAfter-} $flags" + elif [[ "$p" == "--argv0" ]]; then + argv0="${params[$((n + 1))]}" + n=$((n + 1)) + elif [[ "$p" == "--inherit-argv0" ]]; then + # Whichever comes last of --argv0 and --inherit-argv0 wins + argv0='$0' + else + die "makeWrapper doesn't understand the arg $p" + fi + done + + echo exec ${argv0:+-a \"$argv0\"} \""$original"\" \ + "${flagsBefore-}" '"$@"' "${flagsAfter-}" >> "$wrapper" + + chmod +x "$wrapper" +} + +addSuffix() { + suffix="$1" + shift + for name in "$@"; do + echo "$name$suffix" + done +} + +filterExisting() { + for fn in "$@"; do + if test -e "$fn"; then + echo "$fn" + fi + done +} + +# Syntax: wrapProgram <PROGRAM> <MAKE-WRAPPER FLAGS...> +wrapProgram() { wrapProgramShell "$@"; } +wrapProgramShell() { + local prog="$1" + local hidden + + assertExecutable "$prog" + + hidden="$(dirname "$prog")/.$(basename "$prog")"-wrapped + while [ -e "$hidden" ]; do + hidden="${hidden}_" + done + mv "$prog" "$hidden" + makeShellWrapper "$hidden" "$prog" --inherit-argv0 "${@:2}" +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/move-build-tree.sh b/nixpkgs/pkgs/build-support/setup-hooks/move-build-tree.sh new file mode 100644 index 000000000000..2718070f3933 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/move-build-tree.sh @@ -0,0 +1,12 @@ +prePhases+=" moveBuildDir" + +moveBuildDir() { + mkdir -p $out/.build + cd $out/.build +} + +postPhases+=" removeBuildDir" + +removeBuildDir() { + rm -rf $out/.build +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/move-docs.sh b/nixpkgs/pkgs/build-support/setup-hooks/move-docs.sh new file mode 100644 index 000000000000..3f961155d201 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/move-docs.sh @@ -0,0 +1,27 @@ +# This setup hook moves $out/{man,doc,info} to $out/share. + +preFixupHooks+=(_moveToShare) + +_moveToShare() { + if [ -n "$__structuredAttrs" ]; then + if [ -z "${forceShare-}" ]; then + forceShare=( man doc info ) + fi + else + forceShare=( ${forceShare:-man doc info} ) + fi + + if [[ -z "$out" ]]; then return; fi + + for d in "${forceShare[@]}"; do + if [ -d "$out/$d" ]; then + if [ -d "$out/share/$d" ]; then + echo "both $d/ and share/$d/ exist!" + else + echo "moving $out/$d to $out/share/$d" + mkdir -p $out/share + mv $out/$d $out/share/ + fi + fi + done +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/move-lib64.sh b/nixpkgs/pkgs/build-support/setup-hooks/move-lib64.sh new file mode 100644 index 000000000000..9517af797323 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/move-lib64.sh @@ -0,0 +1,22 @@ +# This setup hook, for each output, moves everything in $output/lib64 +# to $output/lib, and replaces $output/lib64 with a symlink to +# $output/lib. The rationale is that lib64 directories are unnecessary +# in Nix (since 32-bit and 64-bit builds of a package are in different +# store paths anyway). +# If the move would overwrite anything, it should fail on rmdir. + +fixupOutputHooks+=(_moveLib64) + +_moveLib64() { + if [ "${dontMoveLib64-}" = 1 ]; then return; fi + if [ ! -e "$prefix/lib64" -o -L "$prefix/lib64" ]; then return; fi + echo "moving $prefix/lib64/* to $prefix/lib" + mkdir -p $prefix/lib + shopt -s dotglob + for i in $prefix/lib64/*; do + mv --no-clobber "$i" $prefix/lib + done + shopt -u dotglob + rmdir $prefix/lib64 + ln -s lib $prefix/lib64 +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/move-sbin.sh b/nixpkgs/pkgs/build-support/setup-hooks/move-sbin.sh new file mode 100644 index 000000000000..1c0c4dc9f2d9 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/move-sbin.sh @@ -0,0 +1,19 @@ +# This setup hook, for each output, moves everything in $output/sbin +# to $output/bin, and replaces $output/sbin with a symlink to +# $output/bin. + +fixupOutputHooks+=(_moveSbin) + +_moveSbin() { + if [ "${dontMoveSbin-}" = 1 ]; then return; fi + if [ ! -e "$prefix/sbin" -o -L "$prefix/sbin" ]; then return; fi + echo "moving $prefix/sbin/* to $prefix/bin" + mkdir -p $prefix/bin + shopt -s dotglob + for i in $prefix/sbin/*; do + mv "$i" $prefix/bin + done + shopt -u dotglob + rmdir $prefix/sbin + ln -s bin $prefix/sbin +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/move-systemd-user-units.sh b/nixpkgs/pkgs/build-support/setup-hooks/move-systemd-user-units.sh new file mode 100755 index 000000000000..33e89898262f --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/move-systemd-user-units.sh @@ -0,0 +1,25 @@ +# shellcheck shell=bash + +# This setup hook, for each output, moves everything in +# $output/lib/systemd/user to $output/share/systemd/user, and replaces +# $output/lib/systemd/user with a symlink to +# $output/share/systemd/user. + +fixupOutputHooks+=(_moveSystemdUserUnits) + +_moveSystemdUserUnits() { + if [ "${dontMoveSystemdUserUnits:-0}" = 1 ]; then return; fi + if [ ! -e "${prefix:?}/lib/systemd/user" ]; then return; fi + local source="$prefix/lib/systemd/user" + local target="$prefix/share/systemd/user" + echo "moving $source/* to $target" + mkdir -p "$target" + ( + shopt -s dotglob + for i in "$source"/*; do + mv "$i" "$target" + done + ) + rmdir "$source" + ln -s "$target" "$source" +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/mpi-check-hook/default.nix b/nixpkgs/pkgs/build-support/setup-hooks/mpi-check-hook/default.nix new file mode 100644 index 000000000000..2834cfcc44ff --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/mpi-check-hook/default.nix @@ -0,0 +1,5 @@ +{ callPackage, makeSetupHook }: + +makeSetupHook { + name = "mpi-checkPhase-hook"; +} ./mpi-check-hook.sh diff --git a/nixpkgs/pkgs/build-support/setup-hooks/mpi-check-hook/mpi-check-hook.sh b/nixpkgs/pkgs/build-support/setup-hooks/mpi-check-hook/mpi-check-hook.sh new file mode 100644 index 000000000000..fca1f7b7f932 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/mpi-check-hook/mpi-check-hook.sh @@ -0,0 +1,54 @@ +preCheckHooks+=('setupMpiCheck') +preInstallCheckHooks+=('setupMpiCheck') + + +setupMpiCheck() { + # Find out which MPI implementation we are using + # and set safe defaults that are guaranteed to run + # on any build machine + + mpiType="NONE" + + # OpenMPI signature + if command ompi_info &> /dev/null; then + mpiType="openmpi" + fi + + # MPICH based implementations + if command mpichversion &> /dev/null; then + if [ "$mpiType" != "NONE" ]; then + echo "WARNING: found OpenMPI and MPICH/MVAPICH executables" + fi + + version=$(mpichversion) + if [[ "$version" == *"MPICH"* ]]; then + mpiType="MPICH" + fi + if [[ "$version" == *"MVAPICH"* ]]; then + mpiType="MVAPICH" + fi + fi + + echo "Found MPI implementation: $mpiType" + + case $mpiType in + openmpi) + # make sure the test starts even if we have less than the requested amount of cores + export OMPI_MCA_rmaps_base_oversubscribe=1 + # Disable CPU pinning + export OMPI_MCA_hwloc_base_binding_policy=none + ;; + MPICH) + # Fix to make mpich run in a sandbox + export HYDRA_IFACE=lo + ;; + MVAPICH) + # Disable CPU pinning + export MV2_ENABLE_AFFINITY=0 + ;; + esac + + # Limit number of OpenMP threads. Default is "all cores". + export OMP_NUM_THREADS=1 +} + diff --git a/nixpkgs/pkgs/build-support/setup-hooks/multiple-outputs.sh b/nixpkgs/pkgs/build-support/setup-hooks/multiple-outputs.sh new file mode 100644 index 000000000000..45096d833b42 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/multiple-outputs.sh @@ -0,0 +1,214 @@ +# The base package for automatic multiple-output splitting. Used in stdenv as well. +preConfigureHooks+=(_multioutConfig) +preFixupHooks+=(_multioutDocs) +preFixupHooks+=(_multioutDevs) +postFixupHooks+=(_multioutPropagateDev) + +# _assignFirst varName otherVarNames* +# +# Set the value of the variable named $varName to the first of otherVarNames +# that refers to a non-empty variable name. +# +# If none of otherVarNames refers to a non-empty variable, the error message is +# specific to this function's use case, which is setting up the output variables. +_assignFirst() { + local varName="$1" + local _var + local REMOVE=REMOVE # slightly hacky - we allow REMOVE (i.e. not a variable name) + shift + for _var in "$@"; do + if [ -n "${!_var-}" ]; then eval "${varName}"="${_var}"; return; fi + done + echo + echo "error: _assignFirst: could not find a non-empty variable whose name to assign to ${varName}." + echo " The following variables were all unset or empty:" + echo " $*" + if [ -z "${out:-}" ]; then + echo ' If you do not want an "out" output in your derivation, make sure to define' + echo ' the other specific required outputs. This can be achieved by picking one' + echo " of the above as an output." + echo ' You do not have to remove "out" if you want to have a different default' + echo ' output, because the first output is taken as a default.' + echo + fi + return 1 # none found +} + +# Same as _assignFirst, but only if "$1" = "" +_overrideFirst() { + if [ -z "${!1-}" ]; then + _assignFirst "$@" + fi +} + + +# Setup chains of sane default values with easy overridability. +# The variables are global to be usable anywhere during the build. +# Typical usage in package is defining outputBin = "dev"; + +_overrideFirst outputDev "dev" "out" +_overrideFirst outputBin "bin" "out" + +_overrideFirst outputInclude "$outputDev" + +# so-libs are often among the main things to keep, and so go to $out +_overrideFirst outputLib "lib" "out" + +_overrideFirst outputDoc "doc" "out" +_overrideFirst outputDevdoc "devdoc" REMOVE # documentation for developers +# man and info pages are small and often useful to distribute with binaries +_overrideFirst outputMan "man" "$outputBin" +_overrideFirst outputDevman "devman" "devdoc" "$outputMan" +_overrideFirst outputInfo "info" "$outputBin" + + +# Add standard flags to put files into the desired outputs. +_multioutConfig() { + if [ "$(getAllOutputNames)" = "out" ] || [ -z "${setOutputFlags-1}" ]; then return; fi; + + # try to detect share/doc/${shareDocName} + # Note: sadly, $configureScript detection comes later in configurePhase, + # and reordering would cause more trouble than worth. + if [ -z "${shareDocName:-}" ]; then + local confScript="${configureScript:-}" + if [ -z "$confScript" ] && [ -x ./configure ]; then + confScript=./configure + fi + if [ -f "$confScript" ]; then + local shareDocName="$(sed -n "s/^PACKAGE_TARNAME='\(.*\)'$/\1/p" < "$confScript")" + fi + # PACKAGE_TARNAME sometimes contains garbage. + if [ -z "$shareDocName" ] || echo "$shareDocName" | grep -q '[^a-zA-Z0-9_-]'; then + shareDocName="$(echo "$name" | sed 's/-[^a-zA-Z].*//')" + fi + fi + + prependToVar configureFlags \ + --bindir="${!outputBin}"/bin --sbindir="${!outputBin}"/sbin \ + --includedir="${!outputInclude}"/include --oldincludedir="${!outputInclude}"/include \ + --mandir="${!outputMan}"/share/man --infodir="${!outputInfo}"/share/info \ + --docdir="${!outputDoc}"/share/doc/"${shareDocName}" \ + --libdir="${!outputLib}"/lib --libexecdir="${!outputLib}"/libexec \ + --localedir="${!outputLib}"/share/locale + + prependToVar installFlags \ + pkgconfigdir="${!outputDev}"/lib/pkgconfig \ + m4datadir="${!outputDev}"/share/aclocal aclocaldir="${!outputDev}"/share/aclocal +} + + +# Add rpath prefixes to library paths, and avoid stdenv doing it for $out. +_addRpathPrefix "${!outputLib}" +NIX_NO_SELF_RPATH=1 + + +# Move subpaths that match pattern $1 from under any output/ to the $2 output/ +# Beware: only globbing patterns are accepted, e.g.: * ? [abc] +# A special target "REMOVE" is allowed: moveToOutput foo REMOVE +moveToOutput() { + local patt="$1" + local dstOut="$2" + local output + for output in $(getAllOutputNames); do + if [ "${!output}" = "$dstOut" ]; then continue; fi + local srcPath + for srcPath in "${!output}"/$patt; do + # apply to existing files/dirs, *including* broken symlinks + if [ ! -e "$srcPath" ] && [ ! -L "$srcPath" ]; then continue; fi + + if [ "$dstOut" = REMOVE ]; then + echo "Removing $srcPath" + rm -r "$srcPath" + else + local dstPath="$dstOut${srcPath#${!output}}" + echo "Moving $srcPath to $dstPath" + + if [ -d "$dstPath" ] && [ -d "$srcPath" ] + then # attempt directory merge + # check the case of trying to move an empty directory + rmdir "$srcPath" --ignore-fail-on-non-empty + if [ -d "$srcPath" ]; then + mv -t "$dstPath" "$srcPath"/* + rmdir "$srcPath" + fi + else # usual move + mkdir -p "$(readlink -m "$dstPath/..")" + mv "$srcPath" "$dstPath" + fi + fi + + # remove empty directories, printing iff at least one gets removed + local srcParent="$(readlink -m "$srcPath/..")" + if [ -n "$(find "$srcParent" -maxdepth 0 -type d -empty 2>/dev/null)" ]; then + echo "Removing empty $srcParent/ and (possibly) its parents" + rmdir -p --ignore-fail-on-non-empty "$srcParent" \ + 2> /dev/null || true # doesn't ignore failure for some reason + fi + done + done +} + +# Move documentation to the desired outputs. +_multioutDocs() { + local REMOVE=REMOVE # slightly hacky - we expand ${!outputFoo} + + moveToOutput share/info "${!outputInfo}" + moveToOutput share/doc "${!outputDoc}" + moveToOutput share/gtk-doc "${!outputDevdoc}" + moveToOutput share/devhelp/books "${!outputDevdoc}" + + # the default outputMan is in $bin + moveToOutput share/man "${!outputMan}" + moveToOutput share/man/man3 "${!outputDevman}" +} + +# Move development-only stuff to the desired outputs. +_multioutDevs() { + if [ "$(getAllOutputNames)" = "out" ] || [ -z "${moveToDev-1}" ]; then return; fi; + moveToOutput include "${!outputInclude}" + # these files are sometimes provided even without using the corresponding tool + moveToOutput lib/pkgconfig "${!outputDev}" + moveToOutput share/pkgconfig "${!outputDev}" + moveToOutput lib/cmake "${!outputDev}" + moveToOutput share/aclocal "${!outputDev}" + # don't move *.la, as libtool needs them in the directory of the library + + for f in "${!outputDev}"/{lib,share}/pkgconfig/*.pc; do + echo "Patching '$f' includedir to output ${!outputInclude}" + sed -i "/^includedir=/s,=\${prefix},=${!outputInclude}," "$f" + done +} + +# Make the "dev" propagate other outputs needed for development. +_multioutPropagateDev() { + if [ "$(getAllOutputNames)" = "out" ]; then return; fi; + + local outputFirst + for outputFirst in $(getAllOutputNames); do + break + done + local propagaterOutput="$outputDev" + if [ -z "$propagaterOutput" ]; then + propagaterOutput="$outputFirst" + fi + + # Default value: propagate binaries, includes and libraries + if [ -z "${propagatedBuildOutputs+1}" ]; then + local po_dirty="$outputBin $outputInclude $outputLib" + set +o pipefail + propagatedBuildOutputs=`echo "$po_dirty" \ + | tr -s ' ' '\n' | grep -v -F "$propagaterOutput" \ + | sort -u | tr '\n' ' ' ` + set -o pipefail + fi + + # The variable was explicitly set to empty or we resolved it so + if [ -z "$propagatedBuildOutputs" ]; then + return + fi + + mkdir -p "${!propagaterOutput}"/nix-support + for output in $propagatedBuildOutputs; do + echo -n " ${!output}" >> "${!propagaterOutput}"/nix-support/propagated-build-inputs + done +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/patch-ppd-files/default.nix b/nixpkgs/pkgs/build-support/setup-hooks/patch-ppd-files/default.nix new file mode 100644 index 000000000000..854f857020aa --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/patch-ppd-files/default.nix @@ -0,0 +1,18 @@ +{ lib +, makeSetupHook +, which +, callPackage +}: + +makeSetupHook { + name = "patch-ppd-files"; + substitutions = { + which = lib.getBin which; + awkscript = ./patch-ppd-lines.awk; + }; + passthru.tests.test = callPackage ./test.nix {}; + meta = { + description = "setup hook to patch executable paths in ppd files"; + maintainers = [ lib.maintainers.yarny ]; + }; +} ./patch-ppd-hook.sh diff --git a/nixpkgs/pkgs/build-support/setup-hooks/patch-ppd-files/patch-ppd-hook.sh b/nixpkgs/pkgs/build-support/setup-hooks/patch-ppd-files/patch-ppd-hook.sh new file mode 100644 index 000000000000..77322b245b27 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/patch-ppd-files/patch-ppd-hook.sh @@ -0,0 +1,183 @@ +fixupOutputHooks+=(_patchPpdFileCommands4fixupOutputHooks) + + + +# Install a hook for the `fixupPhase`: +# If the variable `ppdFileCommands` contains a list of +# executable names, the hook calls `patchPpdFileCommands` +# on each output's `/share/cups/model` and `/share/ppds` +# directories in order to replace calls to those executables. + +_patchPpdFileCommands4fixupOutputHooks () { + [[ -n $ppdFileCommands ]] || return 0 + if [[ -d $prefix/share/cups/model ]]; then + patchPpdFileCommands "$prefix/share/cups/model" $ppdFileCommands + fi + if [[ -d $prefix/share/ppds ]]; then + patchPpdFileCommands "$prefix/share/ppds" $ppdFileCommands + fi +} + + + +# patchPpdFileCommands PPD-ROOT PROGNAME... +# +# Look for ppd files in the directory PPD-ROOT. +# Descend into subdirectories, even if they are symlinks. +# However, ignore ppd files that don't belong to the same +# prefix ($NIX_STORE/$package_name) as PPD-ROOT-DIR does, +# to avoid stepping into other package's directories. +# ppd files may be gzipped; if the are, +# uncompress them, later recompress them. +# Skip symlinks to ppd files. +# PPD-ROOT may also be a single ppd file. +# +# Look for the PROGNAME executable in outputs and `buildInputs`, +# then look for PROGNAME invocations in the ppd files, +# without path or with common paths like `/usr/bin/$PROGNAME`. +# Replace those invocations with an absolute path to the +# corresponding executable from the outputs or `buildInputs`. +# Executables are searched where CUPS would search them, +# i.e., in `/bin` and `/lib/cups/filter`. +# +# As soon as an executable's path is replaced as +# described above, the package containing the binary +# is added to the list of propagated build inputs. +# This ensures the executable's package is still +# recognized as runtime dependency of the ppd file +# even if the ppd file is compressed lateron. +# +# PROGNAME may not contain spaces or tabs. +# The function will also likely fail or produce +# broken results if PROGNAME contains characters that +# require shell or regex escaping (e.g. a backslash). + +patchPpdFileCommands () { + + local bin binnew binold binoldgrep cupspath path ppdroot ppdrootprefix + + # we will store some temporary data here + pushd "$(mktemp -d --tmpdir patch-ppd-file-commands.XXXX)" + + # remember the ppd root path + [[ "$1" == $NIX_STORE/* ]] # ensure it's a store directory + ppdroot=$1 + shift # now "$@" is the list of binaries + ppdrootprefix=${ppdroot%"/${ppdroot#"$NIX_STORE"/*/}"} + + # create `cupspath` (where we should look for binaries), + # with these priorities + # * outputs of current build before buildInputs + # * `/lib/cups/filter' before `/bin` + # * add HOST_PATH at end, so we don't miss anything + for path in $(getAllOutputNames); do + addToSearchPath cupspath "${!path}/lib/cups/filter" + addToSearchPath cupspath "${!path}/bin" + done + for path in ${pkgsHostTarget+"${pkgsHostTarget[@]}"}; do + addToSearchPath cupspath "$path/lib/cups/filter" + addToSearchPath cupspath "$path/bin" + done + while read -r -d : path; do + addToSearchPath cupspath "$path" + done <<< "${HOST_PATH:+"${HOST_PATH}:"}" + + # create list of compressed ppd files + # so we can recompress them later + find -L "$ppdroot" -type f -iname '*.ppd.gz' '!' -xtype l -print0 > gzipped + + # decompress gzipped ppd files + echo "patchPpdFileCommands: decompressing $(grep -cz '^' < gzipped) gzipped ppd file(s) in $ppdroot" + xargs -0r -n 64 -P "$NIX_BUILD_CORES" gunzip < gzipped + + # create list of all ppd files to be checked + find -L "$ppdroot" -type f -iname '*.ppd' '!' -xtype l -print0 > ppds + + for bin in "$@"; do + + # discover new path + binnew=$(PATH=$cupspath '@which@/bin/which' "$bin") + echo "patchPpdFileCommands: located binary $binnew" + + # for each binary, we look for the name itself, but + # also for a couple of common paths that might be used + for binold in {/usr,}/{lib/cups/filter,sbin,bin}/"$bin" "$bin"; do + + # escape regex characters in the old command string + binoldgrep=$(sed 's,[]$.*[\^],\\&,g' <<< "$binold") + # ...and surround old command with some regex + # that singles out shell command invocations + # to avoid replacing other strings that might contain the + # command name by accident (like "perl" in "perl-script") + binoldgrep='\(^\|[;&| '$'\t''"`(]\)'"$binoldgrep"'\($\|[);&| '$'\t''"`<>]\)' + # this string is used to *quickly* filter out + # unaffected files before the (slower) awk script runs; + # note that a similar regex is build in the awk script; + # if `binoldgrep` is changed, the awk script should also be checked + + # create list of likely affected files + # (might yield exit status != 0 if there are no matches) + xargs -0r grep -lZ "$binoldgrep" < ppds > ppds-to-patch || true + + echo "patchPpdFileCommands: $(grep -cz '^' < ppds-to-patch) ppd file(s) contain $binold" + + # actually patch affected ppd files with awk; + # this takes some time but can be parallelized; + # speed up with LC_ALL=C, https://stackoverflow.com/a/33850386 + LC_ALL=C xargs -0r -n 64 -P "$NIX_BUILD_CORES" \ + awk -i inplace -v old="${binold//\\/\\\\}" -v new="${binnew//\\/\\\\}" -f "@awkscript@" \ + < ppds-to-patch + + done + + # create list of affected files + xargs -0r grep -lZF "$binnew" < ppds > patched-ppds || true + + echo "patchPpdFileCommands: $(grep -cz '^' < patched-ppds) ppd file(s) patched with $binnew" + + # if the new command is contained in a file, + # remember the new path so we can add it to + # the list of propagated dependencies later + if [[ -s patched-ppds ]]; then + printf '%s\0' "${binnew%"/${binnew#"${NIX_STORE}"/*/}"}" >> dependencies + fi + + done + + # recompress ppd files that have been decompressed before + echo "patchPpdFileCommands: recompressing $(grep -cz '^' < gzipped) gzipped ppd file(s)" + # we can't just hand over the paths of the uncompressed files + # to gzip as it would add the lower-cased extension ".gz" + # even for files where the original was named ".GZ" + xargs -0r -n 1 -P "$NIX_BUILD_CORES" \ + "$SHELL" -c 'gzip -9nS ".${0##*.}" "${0%.*}"' \ + < gzipped + + # enlist dependencies for propagation; + # this is needed in case ppd files are compressed later + # (Nix won't find dependency paths in compressed files) + if [[ -s dependencies ]]; then + + # weed out duplicates from the dependency list first + sort -zu dependencies > sorted-dependencies + + mkdir -p "$ppdrootprefix/nix-support" + while IFS= read -r -d '' path; do + printWords "$path" >> "$ppdrootprefix/nix-support/propagated-build-inputs" + # stdenv writes it's own `propagated-build-inputs`, + # based on the variable `propagatedBuildInputs`, + # but only to one output (`outputDev`). + # So we also add our dependencies to that variable. + # If our file survives as written above, great! + # If stdenv overwrits it, + # our dependencies will still be added to the file. + # The end result might contain too many + # propagated dependencies for multi-output packages, + # but never a broken package. + propagatedBuildInputs+=("$path") + done < sorted-dependencies + fi + + popd + +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/patch-ppd-files/patch-ppd-lines.awk b/nixpkgs/pkgs/build-support/setup-hooks/patch-ppd-files/patch-ppd-lines.awk new file mode 100644 index 000000000000..ddb9171fff32 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/patch-ppd-files/patch-ppd-lines.awk @@ -0,0 +1,50 @@ +BEGIN { + + # ppd file keys are separated from their values by a colon, + # but "options" may reside between the key name and the colon; + # options are separated from the key by spaces + # (we also permit tabs to be on the safe side) + FS = "[: \t]"; + + # escape regex characters in the old and new command strings + gsub(/[]\\.^$(){}|*+?[]/, "\\\\&", old); + gsub(/\\/, "\\\\&", new); + # ...and surround old command with some regex + # that singles out shell command invocations + # to avoid replacing other strings that might contain the + # command name by accident (like "perl" in "perl-script") + new = "\\1" new "\\2"; + old = "(^|[;&| \\t\"`(])" old "($|[);&| \\t\"`<>])"; + # note that a similar regex is build in the shell script to + # filter out unaffected files before this awk script is called; + # if the regex here is changed, the shell script should also be checked + + # list of PPD keys that contain executable names or scripts, see + # https://refspecs.linuxfoundation.org/LSB_4.0.0/LSB-Printing/LSB-Printing/ppdext.html + # https://www.cups.org/doc/spec-ppd.html + cmds["*APAutoSetupTool"] = ""; + cmds["*APPrinterLowInkTool"] = ""; + cmds["*FoomaticRIPCommandLine"] = ""; + cmds["*FoomaticRIPPostPipe"] = ""; + cmds["*cupsFilter"] = ""; + cmds["*cupsFilter2"] = ""; + cmds["*cupsPreFilter"] = ""; + +} + +# since comments always start with "*%", +# this mechanism also properly recognizes (and ignores) them + +{ + + # if the current line starts a new key, + # check if it is a command-containing key; + # also reset the `isCmd` flag if a new file begins + if ($0 ~ /^\*/ || FNR == 1) { isCmd = ($1 in cmds) } + + # replace commands if the current keys might contain commands + if (isCmd) { $0 = gensub(old, new, "g") } + + print + +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/patch-ppd-files/test.nix b/nixpkgs/pkgs/build-support/setup-hooks/patch-ppd-files/test.nix new file mode 100644 index 000000000000..4f2996b23510 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/patch-ppd-files/test.nix @@ -0,0 +1,40 @@ +{ substituteAll +, diffutils +, stdenv +, patchPpdFilesHook +}: + +let + input = substituteAll { + src = ./test.ppd; + keep = "cmp"; + patch = "cmp"; + pathkeep = "/bin/cmp"; + pathpatch = "/bin/cmp"; + }; + + output = substituteAll { + src = ./test.ppd; + keep = "cmp"; + patch = "${diffutils}/bin/cmp"; + pathkeep = "/bin/cmp"; + pathpatch = "${diffutils}/bin/cmp"; + }; +in + +stdenv.mkDerivation { + name = "${patchPpdFilesHook.name}-test"; + buildInputs = [ diffutils ]; + nativeBuildInputs = [ diffutils patchPpdFilesHook ]; + dontUnpack = true; + dontInstall = true; + ppdFileCommands = [ "cmp" ]; + preFixup = '' + install -D "${input}" "${placeholder "out"}/share/cups/model/test.ppd" + install -D "${input}" "${placeholder "out"}/share/ppds/test.ppd" + ''; + postFixup = '' + diff --color --report-identical-files "${output}" "${placeholder "out"}/share/cups/model/test.ppd" + diff --color --report-identical-files "${output}" "${placeholder "out"}/share/ppds/test.ppd" + ''; +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/patch-ppd-files/test.ppd b/nixpkgs/pkgs/build-support/setup-hooks/patch-ppd-files/test.ppd new file mode 100644 index 000000000000..d0ca11ccfe6d --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/patch-ppd-files/test.ppd @@ -0,0 +1,22 @@ +*% This comment: might look like a command @keep@ +*% but it should be left untouched +*SomeKey: do not replace this @keep@ +*APAutoSetupTool: do replace this @patch@ +*FoomaticRIPCommandLine: "patch also @patch@ +in a multi-line command @patch@ +and another line @patch@ +*SomeKey: "stop patching on new non-command key @keep@ +and remember the key in the next line @keep@" +*cupsFilter option: recognize keys with options @patch@ +*cupsFilter : handle strange spacing;@patch@ +*cupsFilter : handle tabulator @patch@ +*cupsFilter: patch common paths @pathpatch@ +*cupsFilter: patch quoted commands "@patch@" +*cupsFilter: patch commands in subshell (@patch@) +*cupsFilter: patch commands in subshell `@pathpatch@` +*cupsFilter: keep uncommon paths /fancy/@pathkeep@ +*cupsFilter: keep entangled commands-@keep@ +*cupsFilter: keep entangled commands\@keep@ +*cupsFilter: keep entangled commands @keep@() +*cupsFilter: keep entangled commands @pathkeep@-cmd +*%cupsFilter: This comment should also be left as is @pathkeep@ diff --git a/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/default.nix b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/default.nix new file mode 100644 index 000000000000..f16644528f00 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/default.nix @@ -0,0 +1,60 @@ +{ lib +, callPackage +, makeSetupHook +, gnused +}: +let + tests = import ./test { inherit callPackage; }; +in +{ + patchRcPathBash = makeSetupHook + { + name = "patch-rc-path-bash"; + meta = with lib; { + description = "Setup-hook to inject source-time PATH prefix to a Bash/Ksh/Zsh script"; + maintainers = with maintainers; [ ShamrockLee ]; + }; + passthru.tests = { + inherit (tests) test-bash; + }; + } ./patch-rc-path-bash.sh; + patchRcPathCsh = makeSetupHook + { + name = "patch-rc-path-csh"; + substitutions = { + sed = "${gnused}/bin/sed"; + }; + meta = with lib; { + description = "Setup-hook to inject source-time PATH prefix to a Csh script"; + maintainers = with maintainers; [ ShamrockLee ]; + }; + passthru.tests = { + inherit (tests) test-csh; + }; + } ./patch-rc-path-csh.sh; + patchRcPathFish = makeSetupHook + { + name = "patch-rc-path-fish"; + meta = with lib; { + description = "Setup-hook to inject source-time PATH prefix to a Fish script"; + maintainers = with maintainers; [ ShamrockLee ]; + }; + passthru.tests = { + inherit (tests) test-fish; + }; + } ./patch-rc-path-fish.sh; + patchRcPathPosix = makeSetupHook + { + name = "patch-rc-path-posix"; + substitutions = { + sed = "${gnused}/bin/sed"; + }; + meta = with lib; { + description = "Setup-hook to inject source-time PATH prefix to a POSIX shell script"; + maintainers = with maintainers; [ ShamrockLee ]; + }; + passthru.tests = { + inherit (tests) test-posix; + }; + } ./patch-rc-path-posix.sh; +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/patch-rc-path-bash.sh b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/patch-rc-path-bash.sh new file mode 100644 index 000000000000..b98b983861b0 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/patch-rc-path-bash.sh @@ -0,0 +1,50 @@ +patchRcPathBash(){ + local FILE_TO_PATCH="$1" + local SOURCETIME_PATH="$2" + local FILE_TO_WORK_ON="$(mktemp "$(basename "$FILE_TO_PATCH").XXXXXX.tmp")" + cat <<EOF >> "$FILE_TO_WORK_ON" +# Lines to add to PATH the source-time utilities for Nixpkgs packaging +if [[ -n "\${NIXPKGS_SOURCETIME_PATH-}" ]]; then + NIXPKGS_SOURCETIME_PATH_OLD="\$NIXPKGS_SOURCETIME_PATH;\${NIXPKGS_SOURCETIME_PATH_OLD-}" +fi +NIXPKGS_SOURCETIME_PATH="$SOURCETIME_PATH" +if [[ -n "\$PATH" ]]; then + PATH="\$NIXPKGS_SOURCETIME_PATH:\$PATH" +else + PATH="\$NIXPKGS_SOURCETIME_PATH" +fi +export PATH +# End of lines to add to PATH source-time utilities for Nixpkgs packaging +EOF + cat "$FILE_TO_PATCH" >> "$FILE_TO_WORK_ON" + cat <<EOF >> "$FILE_TO_WORK_ON" +# Lines to clean up inside PATH the source-time utilities for Nixpkgs packaging +if [[ -n "\${PATH-}" ]]; then + # Remove the inserted section + PATH="\${PATH/\$NIXPKGS_SOURCETIME_PATH}" + # Remove the duplicated colons + PATH="\${PATH//::/:}" + # Remove the prefixing colon + if [[ -n "\$PATH" && "\${PATH:0:1}" == ":" ]]; then + PATH="\${PATH:1}" + fi + # Remove the trailing colon + if [[ -n "\$PATH" && "\${PATH:\${#PATH}-1}" == ":" ]]; then + PATH="\${PATH::}" + fi + export PATH +fi +if [[ -n "\${NIXPKGS_SOURCETIME_PATH_OLD-}" ]]; then + IFS="" read -r -d ";" NIXPKGS_SOURCETIME_PATH <<< "\$NIXPKGS_SOURCETIME_PATH_OLD" + NIXPKGS_SOURCETIME_PATH_OLD="\${NIXPKGS_SOURCETIME_PATH_OLD:\${#NIXPKGS_SOURCETIME_PATH}+1}" +else + unset NIXPKGS_SOURCETIME_PATH +fi +if [[ -z "\${NIXPKGS_SOURCETIME_PATH_OLD-}" ]]; then + unset NIXPKGS_SOURCETIME_PATH_OLD +fi +# End of lines to clean up inside PATH the source-time utilities for Nixpkgs packaging +EOF + cat "$FILE_TO_WORK_ON" > "$FILE_TO_PATCH" + rm "$FILE_TO_WORK_ON" +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/patch-rc-path-csh.sh b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/patch-rc-path-csh.sh new file mode 100644 index 000000000000..5e2367003ade --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/patch-rc-path-csh.sh @@ -0,0 +1,57 @@ +patchRcPathCsh(){ + local FILE_TO_PATCH="$1" + local SOURCETIME_PATH="$2" + local FILE_TO_WORK_ON="$(mktemp "$(basename "$FILE_TO_PATCH").XXXXXX.tmp")" + cat <<EOF >> "$FILE_TO_WORK_ON" +# Lines to add to PATH the source-time utilities for Nixpkgs packaging +if (\$?NIXPKGS_SOURCETIME_PATH) then + if ("\$NIXPKGS_SOURCETIME_PATH" != "") then + if (\$?NIXPKGS_SOURCETIME_PATH_OLD) then + if ("\$NIXPKGS_SOURCETIME_PATH_OLD" != "") + set NIXPKGS_SOURCETIME_PATH_OLD = (\$NIXPKGS_SOURCETIME_PATH \$NIXPKGS_SOURCETIME_PATH_OLD) + else + set NIXPKGS_SOURCETIME_PATH_OLD = \$NIXPKGS_SOURCETIME_PATH + endif + else + set NIXPKGS_SOURCETIME_PATH_OLD = \$NIXPKGS_SOURCETIME_PATH + endif + endif +endif +set NIXPKGS_SOURCETIME_PATH = "$SOURCETIME_PATH" +if (! \$?PATH) then + setenv PATH "" +endif +if ("\$PATH" != "") then + setenv PATH "\${NIXPKGS_SOURCETIME_PATH}:\$PATH" +else + setenv PATH "\$NIXPKGS_SOURCETIME_PATH" +endif +# End of lines to add to PATH source-time utilities for Nixpkgs packaging +EOF + cat "$FILE_TO_PATCH" >> "$FILE_TO_WORK_ON" + cat <<EOF >> "$FILE_TO_WORK_ON" +# Lines to clean up inside PATH the source-time utilities for Nixpkgs packaging +if (\$?PATH) then + if ("\$PATH" != "") then + # Remove the inserted section, the duplicated colons, and the leading and trailing colon + setenv PATH \`echo "\$PATH" | @sed@ "s#\${NIXPKGS_SOURCETIME_PATH}##" | @sed@ "s#::#:#g" | @sed@ "s#^:##" | @sed@ 's#:\$##'\` + endif +endif +if (\$?NIXPKGS_SOURCETIME_PATH_OLD) then + if ("\$NIXPKGS_SOURCETIME_PATH_OLD" != "") then + set NIXPKGS_SOURCETIME_PATH = \$NIXPKGS_SOURCETIME_PATH_OLD[1] + set NIXPKGS_SOURCETIME_PATH_OLD = \$NIXPKGS_SOURCETIME_PATH_OLD[2-] + else + unset NIXPKGS_SOURCETIME_PATH + endif + if (NIXPKGS_SOURCETIME_PATH_OLD == "") then + unset NIXPKGS_SOURCETIME_PATH_OLD + endif +else + unset NIXPKGS_SOURCETIME_PATH +endif +# End of lines to clean up inside PATH the source-time utilities for Nixpkgs packaging +EOF + cat "$FILE_TO_WORK_ON" > "$FILE_TO_PATCH" + rm "$FILE_TO_WORK_ON" +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/patch-rc-path-fish.sh b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/patch-rc-path-fish.sh new file mode 100644 index 000000000000..3d3e08c57a11 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/patch-rc-path-fish.sh @@ -0,0 +1,50 @@ +patchRcPathFish(){ + local FILE_TO_PATCH="$1" + local SOURCETIME_PATH="$2" + local FILE_TO_WORK_ON="$(mktemp "$(basename "$FILE_TO_PATCH").XXXXXX.tmp")" + cat <<EOF >> "$FILE_TO_WORK_ON" +# Lines to add to PATH the source-time utilities for Nixpkgs packaging +if set -q NIXPKGS_SOURCETIME_PATH && test (count \$NIXPKGS_SOURCETIME_PATH) -gt 0 + set --unpath NIXPKGS_SOURCETIME_PATH_OLD "\$NIXPKGS_SOURCETIME_PATH" \$NIXPKGS_SOURCETIME_PATH_OLD +end +set --path NIXPKGS_SOURCETIME_PATH $SOURCETIME_PATH +set -g --path PATH \$NIXPKGS_SOURCETIME_PATH \$PATH +# End of lines to add to PATH source-time utilities for Nixpkgs packaging +EOF + cat "$FILE_TO_PATCH" >> "$FILE_TO_WORK_ON" + cat <<EOF >> "$FILE_TO_WORK_ON" +# Lines to clean up inside PATH the source-time utilities for Nixpkgs packaging +if set -q PATH && test "\$PATH" != "" && test (count \$PATH) -ge (count \$NIXPKGS_SOURCETIME_PATH) + # Remove the inserted section + for i in (seq 0 (math (count \$PATH) - (count \$NIXPKGS_SOURCETIME_PATH))) + for j in (seq 1 (count \$NIXPKGS_SOURCETIME_PATH)) + if test \$PATH[(math \$i + \$j)] != \$NIXPKGS_SOURCETIME_PATH[\$j] + set i -1 + break + end + end + if test \$i -eq -1 + continue + end + if test \$i -eq 0 + set -g --path PATH \$PATH[(math (count \$NIXPKGS_SOURCETIME_PATH) + 1)..] + else + set -g --path PATH \$PATH[..\$i] \$PATH[(math (count \$NIXPKGS_SOURCETIME_PATH) + 1 + \$i)..] + end + break + end +end +if set -q NIXPKGS_SOURCETIME_PATH_OLD && test (count \$NIXPKGS_SOURCETIME_PATH_OLD) -gt 0 + set --path NIXPKGS_SOURCETIME_PATH \$NIXPKGS_SOURCETIME_PATH_OLD[1] + set --unpath NIXPKGS_SOURCETIME_PATH_OLD \$NIXPKGS_SOURCETIME_PATH_OLD[2..] +else + set -e NIXPKGS_SOURCETIME_PATH +end +if set -q NIXPKGS_SOURCETIME_PATH_OLD && test (count \$NIXPKGS_SOURCETIME_PATH_OLD) -eq 0 + set -e NIXPKGS_SOURCETIME_PATH_OLD +end +# End of lines to clean up inside PATH the source-time utilities for Nixpkgs packaging +EOF + cat "$FILE_TO_WORK_ON" > "$FILE_TO_PATCH" + rm "$FILE_TO_WORK_ON" +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/patch-rc-path-posix.sh b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/patch-rc-path-posix.sh new file mode 100644 index 000000000000..a3740d4436d9 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/patch-rc-path-posix.sh @@ -0,0 +1,39 @@ +patchRcPathPosix(){ + local FILE_TO_PATCH="$1" + local SOURCETIME_PATH="$2" + local FILE_TO_WORK_ON="$(mktemp "$(basename "$FILE_TO_PATCH").XXXXXX.tmp")" + cat <<EOF >> "$FILE_TO_WORK_ON" +# Lines to add to PATH the source-time utilities for Nixpkgs packaging +if [ -n "\${NIXPKGS_SOURCETIME_PATH-}" ]; then + NIXPKGS_SOURCETIME_PATH_OLD="\$NIXPKGS_SOURCETIME_PATH;\${NIXPKGS_SOURCETIME_PATH_OLD-}" +fi +NIXPKGS_SOURCETIME_PATH="$SOURCETIME_PATH" +if [ -n "\$PATH" ]; then + PATH="\$NIXPKGS_SOURCETIME_PATH:\$PATH"; +else + PATH="\$NIXPKGS_SOURCETIME_PATH" +fi +export PATH +# End of lines to add to PATH source-time utilities for Nixpkgs packaging +EOF + cat "$FILE_TO_PATCH" >> "$FILE_TO_WORK_ON" + cat <<EOF >> "$FILE_TO_WORK_ON" +# Lines to clean up inside PATH the source-time utilities for Nixpkgs packaging +if [ -n "\${PATH-}" ]; then + PATH="\$(echo "\$PATH" | @sed@ "s#\$NIXPKGS_SOURCETIME_PATH##" | @sed@ "s#::#:#g" | @sed@ "s#^:##" | @sed@ "s#:\\\$##")" + export PATH +fi +if [ -n "\${NIXPKGS_SOURCETIME_PATH_OLD-}" ]; then + NIXPKGS_SOURCETIME_PATH="\$(echo "\$NIXPKGS_SOURCETIME_PATH_OLD" | @sed@ "s#\\([^;]\\);.*#\\1#")" + NIXPKGS_SOURCETIME_PATH_OLD="\$(echo "\$NIXPKGS_SOURCETIME_PATH_OLD" | @sed@ "s#[^;];\\(.*\\)#\\1#")" +else + unset NIXPKGS_SOURCETIME_PATH +fi +if [ -z "\${NIXPKGS_SOURCETIME_PATH_OLD-}" ]; then + unset NIXPKGS_SOURCETIME_PATH_OLD +fi +# End of lines to clean up inside PATH the source-time utilities for Nixpkgs packaging +EOF + cat "$FILE_TO_WORK_ON" > "$FILE_TO_PATCH" + rm "$FILE_TO_WORK_ON" +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/default.nix b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/default.nix new file mode 100644 index 000000000000..82bc160387ee --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/default.nix @@ -0,0 +1,442 @@ +{ callPackage }: + +{ + test-bash = callPackage + ( + { lib + , runCommandLocal + , bash + , hello + , ksh + , patchRcPathBash + , shellcheck + , zsh + }: + runCommandLocal "patch-rc-path-bash-test" + { + nativeBuildInputs = [ + bash + ksh + patchRcPathBash + shellcheck + zsh + ]; + meta = { + description = "Package test of patchActivateBash"; + inherit (patchRcPathBash.meta) maintainers; + }; + } + '' + set -eu -o pipefail + + + # Check the setup hook script + + echo "Running shellcheck against ${./test-sourcing-bash}" + shellcheck -s bash --exclude SC1090 ${./test-sourcing-bash} + shellcheck -s ksh --exclude SC1090 ${./test-sourcing-bash} + + + # Test patching a blank file + + echo > blank.bash + + echo "Generating blank_patched.bash from blank.bash" + cp blank.bash blank_patched.bash + patchRcPathBash blank_patched.bash "$PWD/delta:$PWD/foxtrot" + + echo "Running shellcheck against blank_patched.bash" + shellcheck -s bash blank_patched.bash + shellcheck -s ksh blank_patched.bash + + echo "Testing in Bash if blank.bash and blank_patched.bash modifies PATH the same way" + bash ${./test-sourcing-bash} ./blank.bash ./blank_patched.bash + + echo "Testing in Ksh if blank.bash and blank_patched.bash modifies PATH the same way" + ksh ${./test-sourcing-bash} "$PWD/blank.bash" "$PWD/blank_patched.bash" + + echo "Testing in Zsh if blank.bash and blank_patched.bash modifies PATH the same way" + zsh ${./test-sourcing-bash} ./blank.bash ./blank_patched.bash + + + # Test patching silent_hello + + echo "hello > /dev/null" > silent_hello.bash + + echo "Generating silent_hello_patched.bash from silent_hello.bash" + cp silent_hello.bash silent_hello_patched.bash + patchRcPathBash silent_hello_patched.bash "${hello}/bin" + + echo "Running shellcheck against silent_hello_patched.bash" + shellcheck -s bash silent_hello_patched.bash + + echo "Testing in Bash if silent_hello_patched.bash get sourced without error" + bash -eu -o pipefail -c ". ./silent_hello_patched.bash" + + echo "Testing in Ksh if silent_hello_patched.bash get sourced without error" + ksh -eu -o pipefail -c ". ./silent_hello_patched.bash" + + echo "Testing in Zsh if silent_hello_patched.bash get sourced without error" + zsh -eu -o pipefail -c ". ./silent_hello_patched.bash" + + + # Check the sample source + + echo "Running shellcheck against sample_source.bash" + shellcheck -s bash ${./sample_source.bash} + shellcheck -s ksh ${./sample_source.bash} + + + # Test patching the sample source + + cp ${./sample_source.bash} sample_source_patched.bash + chmod u+w sample_source_patched.bash + + echo "Generating sample_source_patched.bash from ./sample_source.bash" + patchRcPathBash sample_source_patched.bash "$PWD/delta:$PWD/foxtrot" + + echo "Running shellcheck against sample_source_patched.bash" + shellcheck -s bash sample_source_patched.bash + + echo "Testing in Bash if sample_source.bash and sample_source_patched.bash modifies PATH the same way" + bash ${./test-sourcing-bash} ${./sample_source.bash} ./sample_source_patched.bash + + echo "Testing in Ksh if sample_source.bash and sample_source_patched.bash modifies PATH the same way" + ksh ${./test-sourcing-bash} ${./sample_source.bash} "$PWD/sample_source_patched.bash" + + echo "Testing in Zsh if sample_source.bash and sample_source_patched.bash modifies PATH the same way" + zsh ${./test-sourcing-bash} ${./sample_source.bash} ./sample_source_patched.bash + + + # Test double-patching the sample source + + echo "Patching again sample_source_patched.bash" + patchRcPathBash sample_source_patched.bash "$PWD/foxtrot:$PWD/golf" + + echo "Running shellcheck against sample_source_patched.bash" + shellcheck -s bash sample_source_patched.bash + shellcheck -s ksh sample_source_patched.bash + + echo "Testing in Bash if sample_source.bash and sample_source_patched.bash modifies PATH the same way" + bash ${./test-sourcing-bash} ${./sample_source.bash} ./sample_source_patched.bash + + echo "Testing in Ksh if sample_source.bash and sample_source_patched.bash modifies PATH the same way" + ksh ${./test-sourcing-bash} ${./sample_source.bash} "$PWD/sample_source_patched.bash" + + echo "Testing in Zsh if sample_source.bash and sample_source_patched.bash modifies PATH the same way" + zsh ${./test-sourcing-bash} ${./sample_source.bash} ./sample_source_patched.bash + + + # Create a dummy output + touch "$out" + '' + ) + { }; + + + + test-csh = callPackage + ( + { lib + , runCommandLocal + , gnused + , hello + , patchRcPathCsh + , tcsh + }: + runCommandLocal "patch-rc-path-csh-test" + { + nativeBuildInputs = [ + patchRcPathCsh + tcsh + ]; + meta = { + description = "Package test of patchActivateCsh"; + inherit (patchRcPathCsh.meta) maintainers; + }; + } + '' + set -eu -o pipefail + + + # Test patching a blank file + + echo > blank.csh + + echo "Generating blank_patched.csh from blank.csh" + cp blank.csh blank_patched.csh + patchRcPathCsh blank_patched.csh "$PWD/delta:$PWD/foxtrot" + + echo "Testing in Csh if blank.csh and blank_patched.csh modifies PATH the same way" + tcsh -e ${./test-sourcing-csh} blank.csh blank_patched.csh + + + # Test patching silent_hello file + + echo "hello > /dev/null" > silent_hello.csh + + echo "Generating silent_hello_patched.csh from silent_hello.csh" + cp silent_hello.csh silent_hello_patched.csh + patchRcPathCsh silent_hello_patched.csh "${hello}/bin" + + echo "Testing in Csh if silent_hello_patched.csh get sourced without errer" + tcsh -e -c "source silent_hello_patched.csh" + + + # Generate the sample source + + substitute ${./sample_source.csh.in} sample_source.csh --replace @sed@ ${gnused}/bin/sed + chmod u+rw sample_source.csh + + + # Test patching the sample source + + echo "Generating sample_source_patched.csh from sample_source.csh" + cp sample_source.csh sample_source_patched.csh + chmod u+w sample_source_patched.csh + patchRcPathCsh sample_source_patched.csh "$PWD/delta:$PWD/foxtrot" + + echo "Testing in Csh if sample_source.csh and sample_source_patched.csh modifies PATH the same way" + tcsh -e ${./test-sourcing-csh} sample_source.csh sample_source_patched.csh + + + # Test double-patching the sample source + + echo "Patching again sample_source_patched.csh from sample_source.csh" + patchRcPathCsh sample_source_patched.csh "$PWD/foxtrot:$PWD/golf" + + echo "Testing in Csh if sample_source.csh and sample_source_patched.csh modifies PATH the same way" + tcsh -e ${./test-sourcing-csh} sample_source.csh sample_source_patched.csh + + + # Create a dummy output + touch "$out" + '' + ) + { }; + + + + test-fish = callPackage + ( + { lib + , runCommandLocal + , fish + , hello + , patchRcPathFish + }: + runCommandLocal "patch-rc-path-fish-test" + { + nativeBuildInputs = [ + fish + patchRcPathFish + ]; + meta = { + description = "Package test of patchActivateFish"; + inherit (patchRcPathFish.meta) maintainers; + }; + } + '' + set -eu -o pipefail + + + # Test patching a blank file + + echo > blank.fish + + echo "Generating blank_patched.fish from blank.fish" + cp blank.fish blank_patched.fish + patchRcPathFish blank_patched.fish "$PWD/delta:$PWD/foxtrot" + + echo "Testing in Fish if blank.fish and blank_patched.fish modifies PATH the same way" + HOME_TEMP="$(mktemp -d temporary_home_XXXXXX)" + HOME="$HOME_TEMP" fish ${./test-sourcing-fish} blank.fish blank_patched.fish + rm -r "$HOME_TEMP" + + + # Test patching silent_hello file + + echo "hello > /dev/null" > silent_hello.fish + + echo "Generating silent_hello_patched.fish from silent_hello.fish" + cp silent_hello.fish silent_hello_patched.fish + patchRcPathFish silent_hello_patched.fish "${hello}/bin" + + echo "Testing in Fish if silent_hello_patched.fish get sourced without error" + HOME_TEMP="$(mktemp -d temporary_home_XXXXXX)" + HOME="$HOME_TEMP" fish -c "source silent_hello_patched.fish" + rm -r "$HOME_TEMP" + + + # Test patching the sample source + + cp ${./sample_source.fish} sample_source_patched.fish + chmod u+w sample_source_patched.fish + + echo "Generating sample_source_patched.fish from ${./sample_source.fish}" + patchRcPathFish sample_source_patched.fish "$PWD/delta:$PWD/foxtrot" + echo "Testing in Fish if sample_source.fish and sample_source_patched.fish modifies PATH the same way" + HOME_TEMP="$(mktemp -d temporary_home_XXXXXX)" + HOME="$HOME_TEMP" fish ${./test-sourcing-fish} ${./sample_source.fish} sample_source_patched.fish + rm -r "$HOME_TEMP" + + + # Test double-patching the sample source + + echo "Patching again sample_source_patched.fish from ${./sample_source.fish}" + patchRcPathFish sample_source_patched.fish "$PWD/foxtrot:$PWD/golf" + + echo "Testing in Fish if sample_source.fish and sample_source_patched.fish modifies PATH the same way" + HOME_TEMP="$(mktemp -d temporary_home_XXXXXX)" + HOME="$HOME_TEMP" fish ${./test-sourcing-fish} ${./sample_source.fish} sample_source_patched.fish + rm -r "$HOME_TEMP" + + + # Create a dummy output + touch "$out" + '' + ) + { }; + + + + test-posix = callPackage + ( + { lib + , runCommandLocal + , bash + , dash + , gnused + , hello + , ksh + , patchRcPathPosix + , shellcheck + }: + runCommandLocal "patch-rc-path-posix-test" + { + nativeBuildInputs = [ + bash + dash + ksh + patchRcPathPosix + shellcheck + ]; + meta = { + description = "Package test of patchActivatePosix"; + inherit (patchRcPathPosix.meta) maintainers; + }; + } + '' + set -eu -o pipefail + + + # Check the setup hook script + + echo "Running shellcheck against ${./test-sourcing-posix}" + shellcheck -s sh --exclude SC1090 ${./test-sourcing-posix} + shellcheck -s dash --exclude SC1090 ${./test-sourcing-posix} + + + # Test patching a blank file + + echo > blank.sh + + echo "Generating blank_patched.sh from blank.sh" + cp blank.sh blank_patched.sh + patchRcPathPosix blank_patched.sh "$PWD/delta:$PWD/foxtrot" + + echo "Running shellcheck against blank_patched.sh" + shellcheck -s sh blank_patched.sh + shellcheck -s dash blank_patched.sh + + echo "Testing in Bash if blank.sh and blank_patched.sh modifies PATH the same way" + bash --posix ${./test-sourcing-posix} ./blank.sh ./blank_patched.sh + + echo "Testing in Dash if blank.sh and blank_patched.sh modifies PATH the same way" + dash ${./test-sourcing-posix} ./blank.sh ./blank_patched.sh + + echo "Testing in Ksh if ./blank.sh and ./blank_patched.sh modifies PATH the same way" + ksh ${./test-sourcing-posix} "$PWD/blank.sh" "$PWD/blank_patched.sh" + + + # Test patching silent_hello file + + echo "hello > /dev/null" > silent_hello.sh + + echo "Generating silent_hello_patched.sh from silent_hello.sh" + cp silent_hello.sh silent_hello_patched.sh + patchRcPathPosix silent_hello_patched.sh "${hello}/bin" + + echo "Running shellcheck against silent_hello_patched.sh" + shellcheck -s sh silent_hello_patched.sh + shellcheck -s dash silent_hello_patched.sh + + echo "Testing in Bash if silent_hello_patched.sh get sourced without error" + bash --posix -eu -c ". ./silent_hello_patched.sh" + + echo "Testing in Dash if silent_hello_patched.sh get sourced without error" + dash -eu -c ". ./silent_hello_patched.sh" + + echo "Testing in Ksh if silent_hello_patched.sh get sourced without error" + ksh -eu -c ". $PWD/silent_hello_patched.sh" + + + # Generate the sample source "$PWD/delta:$PWD/foxtrot" "$PWD/delta:$PWD/foxtrot" + + substitute ${./sample_source.sh.in} sample_source.sh --replace @sed@ ${gnused}/bin/sed + chmod u+rw sample_source.sh + + + # Check the sample source + + echo "Running shellcheck against sample_source.sh" + shellcheck -s sh sample_source.sh + shellcheck -s dash sample_source.sh + + + # Test patching the sample source + + echo "Generating sample_source_patched.sh from sample_source.sh" + cp sample_source.sh sample_source_patched.sh + chmod u+w sample_source_patched.sh + patchRcPathPosix sample_source_patched.sh "$PWD/delta:$PWD/foxtrot" + + echo "Running shellcheck against sample_source_patched.sh" + shellcheck -s sh sample_source_patched.sh + shellcheck -s dash sample_source_patched.sh + + echo "Testing in Bash if sample_source.bash and sample_source_patched.bash modifies PATH the same way" + bash --posix ${./test-sourcing-posix} "./sample_source.sh" "./sample_source_patched.sh" + + echo "Testing in Dash if sample_source.sh and sample_source_patched.sh modifies PATH the same way" + dash ${./test-sourcing-posix} "./sample_source.sh" "./sample_source_patched.sh" + + echo "Testing in Ksh if sample_source.sh and sample_source_patched.sh modifies PATH the same way" + ksh ${./test-sourcing-posix} "$PWD/sample_source.sh" "$PWD/sample_source_patched.sh" + + + # Test double-patching the sample source + + echo "Patching again sample_source_patched.sh" + patchRcPathPosix sample_source_patched.sh "$PWD/foxtrot:$PWD/golf" + + echo "Running shellcheck against sample_source_patched.sh" + shellcheck -s sh sample_source_patched.sh + shellcheck -s dash sample_source_patched.sh + + echo "Testing in Bash if sample_source.bash and sample_source_patched.bash modifies PATH the same way" + bash --posix ${./test-sourcing-posix} "./sample_source.sh" "./sample_source_patched.sh" + + echo "Testing in Dash if sample_source.sh and sample_source_patched.sh modifies PATH the same way" + dash ${./test-sourcing-posix} "./sample_source.sh" "./sample_source_patched.sh" + + echo "Testing in Ksh if sample_source.sh and sample_source_patched.sh modifies PATH the same way" + ksh ${./test-sourcing-posix} "$PWD/sample_source.sh" "$PWD/sample_source_patched.sh" + + + # Create a dummy output + touch "$out" + '' + ) + { }; +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/sample_source.bash b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/sample_source.bash new file mode 100644 index 000000000000..6cb043e4e70c --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/sample_source.bash @@ -0,0 +1,2 @@ +PATH="$PWD/charlie:${PATH/:$PWD\/bravo}" +export PATH diff --git a/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/sample_source.csh.in b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/sample_source.csh.in new file mode 100644 index 000000000000..9606458c037e --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/sample_source.csh.in @@ -0,0 +1 @@ +setenv PATH $PWD/charlie:`echo "$PATH" | @sed@ "s#:$PWD/bravo##"` diff --git a/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/sample_source.fish b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/sample_source.fish new file mode 100644 index 000000000000..f638fe5e24d1 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/sample_source.fish @@ -0,0 +1,9 @@ +begin + for p in $PATH + if test $p != "$PWD/bravo" + set TEMPORARY_PATH $TEMPORARY_PATH $p + end + end + set -g PATH $TEMPORARY_PATH +end +set PATH "$PWD/charlie" $PATH diff --git a/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/sample_source.sh.in b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/sample_source.sh.in new file mode 100644 index 000000000000..42e64a1ffc08 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/sample_source.sh.in @@ -0,0 +1,2 @@ +PATH="$PWD/charlie:$(echo "$PATH" | @sed@ "s#:$PWD/bravo##")" +export PATH diff --git a/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/test-sourcing-bash b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/test-sourcing-bash new file mode 100644 index 000000000000..1b6cc54d8f93 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/test-sourcing-bash @@ -0,0 +1,21 @@ +#!/usr/bin/env bash + +set -eu -o pipefail + +UNPATCHED_SOURCE_FILE="$1" +PATCHED_SOURCE_FILE="$2" +ORIG_PATH="$PWD/alfa:$PWD/bravo" +RESULT_PATH_FROM_UNPATCHED="$( + PATH="$ORIG_PATH"; export PATH + . "$UNPATCHED_SOURCE_FILE" + echo "$PATH" +)" +RESULT_PATH_FROM_PATCHED="$( + PATH="$ORIG_PATH"; export PATH + . "$PATCHED_SOURCE_FILE" + echo "$PATH" +)" +if [[ "$RESULT_PATH_FROM_UNPATCHED" != "$RESULT_PATH_FROM_PATCHED" ]]; then + echo "Result path mismatched: $UNPATCHED_SOURCE_FILE ($RESULT_PATH_FROM_UNPATCHED) and $PATCHED_SOURCE_FILE ($RESULT_PATH_FROM_PATCHED)" >&2 + exit 1 +fi diff --git a/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/test-sourcing-csh b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/test-sourcing-csh new file mode 100644 index 000000000000..7ddb2ddc1bdc --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/test-sourcing-csh @@ -0,0 +1,13 @@ +#/usr/bin/env tcsh + +set UNPATCHED_SOURCE_FILE = "$1" +set PATCHED_SOURCE_FILE = "$2" +set ORIG_PATH = "${PWD}/alfa:${PWD}/bravo" + +set RESULT_PATH_FROM_UNPATCHED = `setenv PATH "$ORIG_PATH"; source $UNPATCHED_SOURCE_FILE; echo $PATH` +set RESULT_PATH_FROM_PATCHED = `setenv PATH "$ORIG_PATH"; source $PATCHED_SOURCE_FILE; echo $PATH` + +if ($RESULT_PATH_FROM_UNPATCHED != $RESULT_PATH_FROM_PATCHED) then + echo "Result path mismatched: $UNPATCHED_SOURCE_FILE ($RESULT_PATH_FROM_UNPATCHED) and $PATCHED_SOURCE_FILE ($RESULT_PATH_FROM_PATCHED)" > /dev/stderr + exit 1 +endif diff --git a/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/test-sourcing-fish b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/test-sourcing-fish new file mode 100644 index 000000000000..fcce014331e5 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/test-sourcing-fish @@ -0,0 +1,13 @@ +#/usr/bin/env fish + +set UNPATCHED_SOURCE_FILE $argv[1] +set PATCHED_SOURCE_FILE $argv[2] +set ORIG_PATH "$PWD/alfa:$PWD/bravo" + +set RESULT_PATH_FROM_UNPATCHED (fish -c "set -g PATH \"$ORIG_PATH\"; source $UNPATCHED_SOURCE_FILE; echo \"\$PATH\"") +set RESULT_PATH_FROM_PATCHED (fish -c "set -g PATH \"$ORIG_PATH\"; source $PATCHED_SOURCE_FILE; echo \"\$PATH\"") + +if test "$RESULT_PATH_FROM_UNPATCHED" != "$RESULT_PATH_FROM_PATCHED" + echo "Result path mismatched: $UNPATCHED_SOURCE_FILE ($RESULT_PATH_FROM_UNPATCHED) and $PATCHED_SOURCE_FILE ($RESULT_PATH_FROM_PATCHED)" >&2 + exit 1 +end diff --git a/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/test-sourcing-posix b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/test-sourcing-posix new file mode 100644 index 000000000000..6039b4dcf097 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/patch-rc-path-hooks/test/test-sourcing-posix @@ -0,0 +1,21 @@ +#!/bin/sh + +set -eu + +UNPATCHED_SOURCE_FILE="$1" +PATCHED_SOURCE_FILE="$2" +ORIG_PATH="$PWD/alfa:$PWD/bravo" +RESULT_PATH_FROM_UNPATCHED="$( + PATH="$ORIG_PATH"; export PATH + . "$UNPATCHED_SOURCE_FILE" + echo "$PATH" +)" +RESULT_PATH_FROM_PATCHED="$( + PATH="$ORIG_PATH"; export PATH + . "$PATCHED_SOURCE_FILE" + echo "$PATH" +)" +if [ "$RESULT_PATH_FROM_UNPATCHED" != "$RESULT_PATH_FROM_PATCHED" ]; then + echo "Result path mismatched: $UNPATCHED_SOURCE_FILE ($RESULT_PATH_FROM_UNPATCHED) and $PATCHED_SOURCE_FILE ($RESULT_PATH_FROM_PATCHED)" > /dev/stderr + exit 1 +fi diff --git a/nixpkgs/pkgs/build-support/setup-hooks/patch-shebangs.sh b/nixpkgs/pkgs/build-support/setup-hooks/patch-shebangs.sh new file mode 100644 index 000000000000..a22f989362c4 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/patch-shebangs.sh @@ -0,0 +1,145 @@ +# This setup hook causes the fixup phase to rewrite all script +# interpreter file names (`#! /path') to paths found in $PATH. E.g., +# /bin/sh will be rewritten to /nix/store/<hash>-some-bash/bin/sh. +# /usr/bin/env gets special treatment so that ".../bin/env python" is +# rewritten to /nix/store/<hash>/bin/python. Interpreters that are +# already in the store are left untouched. +# A script file must be marked as executable, otherwise it will not be +# considered. + +fixupOutputHooks+=(patchShebangsAuto) + +# Run patch shebangs on a directory or file. +# Can take multiple paths as arguments. +# patchShebangs [--build | --host | --update] [--] PATH... + +# Flags: +# --build : Lookup commands available at build-time +# --host : Lookup commands available at runtime +# --update : Update shebang paths that are in Nix store + +# Example use cases, +# $ patchShebangs --host /nix/store/...-hello-1.0/bin +# $ patchShebangs --build configure + +patchShebangs() { + local pathName + local update + + while [[ $# -gt 0 ]]; do + case "$1" in + --host) + pathName=HOST_PATH + shift + ;; + --build) + pathName=PATH + shift + ;; + --update) + update=true + shift + ;; + --) + shift + break + ;; + -*|--*) + echo "Unknown option $1 supplied to patchShebangs" >&2 + return 1 + ;; + *) + break + ;; + esac + done + + echo "patching script interpreter paths in $@" + local f + local oldPath + local newPath + local arg0 + local args + local oldInterpreterLine + local newInterpreterLine + + if [[ $# -eq 0 ]]; then + echo "No arguments supplied to patchShebangs" >&2 + return 0 + fi + + local f + while IFS= read -r -d $'\0' f; do + isScript "$f" || continue + + read -r oldInterpreterLine < "$f" + read -r oldPath arg0 args <<< "${oldInterpreterLine:2}" + + if [[ -z "${pathName:-}" ]]; then + if [[ -n $strictDeps && $f == "$NIX_STORE"* ]]; then + pathName=HOST_PATH + else + pathName=PATH + fi + fi + + if [[ "$oldPath" == *"/bin/env" ]]; then + if [[ $arg0 == "-S" ]]; then + arg0=${args%% *} + args=${args#* } + newPath="$(PATH="${!pathName}" command -v "env" || true)" + args="-S $(PATH="${!pathName}" command -v "$arg0" || true) $args" + + # Check for unsupported 'env' functionality: + # - options: something starting with a '-' besides '-S' + # - environment variables: foo=bar + elif [[ $arg0 == "-"* || $arg0 == *"="* ]]; then + echo "$f: unsupported interpreter directive \"$oldInterpreterLine\" (set dontPatchShebangs=1 and handle shebang patching yourself)" >&2 + exit 1 + else + newPath="$(PATH="${!pathName}" command -v "$arg0" || true)" + fi + else + if [[ -z $oldPath ]]; then + # If no interpreter is specified linux will use /bin/sh. Set + # oldpath="/bin/sh" so that we get /nix/store/.../sh. + oldPath="/bin/sh" + fi + + newPath="$(PATH="${!pathName}" command -v "$(basename "$oldPath")" || true)" + + args="$arg0 $args" + fi + + # Strip trailing whitespace introduced when no arguments are present + newInterpreterLine="$newPath $args" + newInterpreterLine=${newInterpreterLine%${newInterpreterLine##*[![:space:]]}} + + if [[ -n "$oldPath" && ( "$update" == true || "${oldPath:0:${#NIX_STORE}}" != "$NIX_STORE" ) ]]; then + if [[ -n "$newPath" && "$newPath" != "$oldPath" ]]; then + echo "$f: interpreter directive changed from \"$oldInterpreterLine\" to \"$newInterpreterLine\"" + # escape the escape chars so that sed doesn't interpret them + escapedInterpreterLine=${newInterpreterLine//\\/\\\\} + + # Preserve times, see: https://github.com/NixOS/nixpkgs/pull/33281 + timestamp=$(stat --printf "%y" "$f") + sed -i -e "1 s|.*|#\!$escapedInterpreterLine|" "$f" + touch --date "$timestamp" "$f" + fi + fi + done < <(find "$@" -type f -perm -0100 -print0) +} + +patchShebangsAuto () { + if [[ -z "${dontPatchShebangs-}" && -e "$prefix" ]]; then + + # Dev output will end up being run on the build platform. An + # example case of this is sdl2-config. Otherwise, we can just + # use the runtime path (--host). + if [[ "$output" != out && "$output" = "$outputDev" ]]; then + patchShebangs --build "$prefix" + else + patchShebangs --host "$prefix" + fi + fi +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/postgresql-test-hook/default.nix b/nixpkgs/pkgs/build-support/setup-hooks/postgresql-test-hook/default.nix new file mode 100644 index 000000000000..e9e77b0bbe6f --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/postgresql-test-hook/default.nix @@ -0,0 +1,8 @@ +{ callPackage, makeSetupHook }: + +makeSetupHook { + name = "postgresql-test-hook"; + passthru.tests = { + simple = callPackage ./test.nix { }; + }; +} ./postgresql-test-hook.sh diff --git a/nixpkgs/pkgs/build-support/setup-hooks/postgresql-test-hook/postgresql-test-hook.sh b/nixpkgs/pkgs/build-support/setup-hooks/postgresql-test-hook/postgresql-test-hook.sh new file mode 100644 index 000000000000..3eec67d60feb --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/postgresql-test-hook/postgresql-test-hook.sh @@ -0,0 +1,81 @@ +preCheckHooks+=('postgresqlStart') +postCheckHooks+=('postgresqlStop') + + +postgresqlStart() { + + # Add default environment variable values + # + # Client variables: + # - https://www.postgresql.org/docs/current/libpq-envars.html + # + # Server variables: + # - only PGDATA: https://www.postgresql.org/docs/current/creating-cluster.html + + if [[ "${PGDATA:-}" == "" ]]; then + PGDATA="$NIX_BUILD_TOP/postgresql" + fi + export PGDATA + + if [[ "${PGHOST:-}" == "" ]]; then + mkdir -p "$NIX_BUILD_TOP/run/postgresql" + PGHOST="$NIX_BUILD_TOP/run/postgresql" + fi + export PGHOST + + if [[ "${PGUSER:-}" == "" ]]; then + PGUSER="test_user" + fi + export PGUSER + + if [[ "${PGDATABASE:-}" == "" ]]; then + PGDATABASE="test_db" + fi + export PGDATABASE + + if [[ "${postgresqlTestUserOptions:-}" == "" ]]; then + postgresqlTestUserOptions="LOGIN" + fi + + if [[ "${postgresqlTestSetupSQL:-}" == "" ]]; then + postgresqlTestSetupSQL="$(cat <<EOF + CREATE ROLE "$PGUSER" $postgresqlTestUserOptions; + CREATE DATABASE "$PGDATABASE" OWNER '$PGUSER'; +EOF + )" + fi + + if [[ "${postgresqlTestSetupCommands:-}" == "" ]]; then + postgresqlTestSetupCommands='echo "$postgresqlTestSetupSQL" | PGUSER=postgres psql postgres' + fi + + if ! type initdb >/dev/null; then + echo >&2 'initdb not found. Did you add postgresql to the nativeCheckInputs?' + false + fi + echo 'initializing postgresql' + initdb -U postgres + + # Move the socket + echo "unix_socket_directories = '$NIX_BUILD_TOP/run/postgresql'" >>"$PGDATA/postgresql.conf" + + # TCP ports can be a problem in some sandboxes, + # so we disable tcp listening by default + if ! [[ "${postgresqlEnableTCP:-}" = 1 ]]; then + echo "listen_addresses = ''" >>"$PGDATA/postgresql.conf" + fi + + echo 'starting postgresql' + eval "${postgresqlStartCommands:-pg_ctl start}" + + echo 'setting up postgresql' + eval "$postgresqlTestSetupCommands" + + runHook postgresqlTestSetupPost + +} + +postgresqlStop() { + echo 'stopping postgresql' + pg_ctl stop +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/postgresql-test-hook/test.nix b/nixpkgs/pkgs/build-support/setup-hooks/postgresql-test-hook/test.nix new file mode 100644 index 000000000000..9881ed1016cc --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/postgresql-test-hook/test.nix @@ -0,0 +1,30 @@ +{ postgresql, postgresqlTestHook, stdenv }: + +stdenv.mkDerivation { + name = "postgresql-test-hook-test"; + buildInputs = [ postgresqlTestHook ]; + nativeCheckInputs = [ postgresql ]; + dontUnpack = true; + doCheck = true; + passAsFile = ["sql"]; + sql = '' + CREATE TABLE hello ( + message text + ); + INSERT INTO hello VALUES ('it '||'worked'); + SELECT * FROM hello; + ''; + postgresqlTestSetupPost = '' + TEST_POST_HOOK_RAN=1 + ''; + checkPhase = '' + runHook preCheck + psql <$sqlPath | grep 'it worked' + TEST_RAN=1 + runHook postCheck + ''; + installPhase = '' + [[ $TEST_RAN == 1 && $TEST_POST_HOOK_RAN == 1 ]] + touch $out + ''; +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/prune-libtool-files.sh b/nixpkgs/pkgs/build-support/setup-hooks/prune-libtool-files.sh new file mode 100644 index 000000000000..0ec56549645c --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/prune-libtool-files.sh @@ -0,0 +1,22 @@ +# Clear dependency_libs in libtool files for shared libraries. + +# Shared libraries already encode their dependencies with locations. .la +# files do not always encode those locations, and sometimes encode the +# locations in the wrong Nix output. .la files are not needed for shared +# libraries, but without dependency_libs they do not hurt either. + +fixupOutputHooks+=(_pruneLibtoolFiles) + +_pruneLibtoolFiles() { + if [ "${dontPruneLibtoolFiles-}" ] || [ ! -e "$prefix" ]; then + return + fi + + # Libtool uses "dlname" and "library_names" fields for shared libraries and + # the "old_library" field for static libraries. We are processing only + # those .la files that do not describe static libraries. + find "$prefix" -type f -name '*.la' \ + -exec grep -q '^# Generated by .*libtool' {} \; \ + -exec grep -q "^old_library=''" {} \; \ + -exec sed -i {} -e "/^dependency_libs='[^']/ c dependency_libs='' #pruned" \; +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/reproducible-builds.sh b/nixpkgs/pkgs/build-support/setup-hooks/reproducible-builds.sh new file mode 100644 index 000000000000..5e27ce8a25fe --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/reproducible-builds.sh @@ -0,0 +1,11 @@ +# Use the last part of the out path as hash input for the build. +# This should ensure that it is deterministic across rebuilds of the same +# derivation and not easily collide with other builds. +# We also truncate the hash so that it cannot cause reference cycles. +NIX_CFLAGS_COMPILE="${NIX_CFLAGS_COMPILE:-} -frandom-seed=$( + randSeed=${NIX_OUTPATH_USED_AS_RANDOM_SEED:-$out} + outbase="${randSeed##*/}" + randomseed="${outbase:0:10}" + echo $randomseed +)" +export NIX_CFLAGS_COMPILE diff --git a/nixpkgs/pkgs/build-support/setup-hooks/role.bash b/nixpkgs/pkgs/build-support/setup-hooks/role.bash new file mode 100644 index 000000000000..bfd6b61f0aed --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/role.bash @@ -0,0 +1,71 @@ +# Since the same derivation can be depended on in multiple ways, we need to +# accumulate *each* role (i.e. host and target platforms relative the depending +# derivation) in which the derivation is used. +# +# The role is intended to be used as part of other variables names like +# - $NIX_SOMETHING${role_post} + +function getRole() { + case $1 in + -1) + role_post='_FOR_BUILD' + ;; + 0) + role_post='' + ;; + 1) + role_post='_FOR_TARGET' + ;; + *) + echo "@name@: used as improper sort of dependency" >&2 + return 1 + ;; + esac +} + +# `hostOffset` describes how the host platform of the package is slid relative +# to the depending package. `targetOffset` likewise describes the target +# platform of the package. Both are brought into scope of the setup hook defined +# for dependency whose setup hook is being processed relative to the package +# being built. + +function getHostRole() { + getRole "$hostOffset" +} +function getTargetRole() { + getRole "$targetOffset" +} + +# `depHostOffset` describes how the host platform of the dependencies are slid +# relative to the depending package. `depTargetOffset` likewise describes the +# target platform of dependenices. Both are brought into scope of the +# environment hook defined for the dependency being applied relative to the +# package being built. + +function getHostRoleEnvHook() { + getRole "$depHostOffset" +} +function getTargetRoleEnvHook() { + getRole "$depTargetOffset" +} + +# This variant is intended specifically for code-producing tool wrapper scripts +# `NIX_@wrapperName@_TARGET_*_@suffixSalt@` tracks this (needs to be an exported +# env var so can't use fancier data structures). +function getTargetRoleWrapper() { + case $targetOffset in + -1) + export NIX_@wrapperName@_TARGET_BUILD_@suffixSalt@=1 + ;; + 0) + export NIX_@wrapperName@_TARGET_HOST_@suffixSalt@=1 + ;; + 1) + export NIX_@wrapperName@_TARGET_TARGET_@suffixSalt@=1 + ;; + *) + echo "@name@: used as improper sort of dependency" >&2 + return 1 + ;; + esac +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/separate-debug-info.sh b/nixpkgs/pkgs/build-support/setup-hooks/separate-debug-info.sh new file mode 100644 index 000000000000..3a16ac4fee90 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/separate-debug-info.sh @@ -0,0 +1,52 @@ +export NIX_SET_BUILD_ID=1 +export NIX_LDFLAGS+=" --compress-debug-sections=zlib" +export NIX_CFLAGS_COMPILE+=" -ggdb -Wa,--compress-debug-sections" +export RUSTFLAGS+=" -g" + +fixupOutputHooks+=(_separateDebugInfo) + +_separateDebugInfo() { + [ -e "$prefix" ] || return 0 + + local dst="${debug:-$out}" + if [ "$prefix" = "$dst" ]; then return 0; fi + + # in case there is nothing to strip, don't fail the build + mkdir -p "$dst" + + dst="$dst/lib/debug/.build-id" + + # Find executables and dynamic libraries. + local i + while IFS= read -r -d $'\0' i; do + if ! isELF "$i"; then continue; fi + + [ -z "${READELF:-}" ] && echo "_separateDebugInfo: '\$READELF' variable is empty, skipping." 1>&2 && break + [ -z "${OBJCOPY:-}" ] && echo "_separateDebugInfo: '\$OBJCOPY' variable is empty, skipping." 1>&2 && break + + # Extract the Build ID. FIXME: there's probably a cleaner way. + local id="$($READELF -n "$i" | sed 's/.*Build ID: \([0-9a-f]*\).*/\1/; t; d')" + if [ "${#id}" != 40 ]; then + echo "could not find build ID of $i, skipping" >&2 + continue + fi + + # Extract the debug info. + echo "separating debug info from $i (build ID $id)" + mkdir -p "$dst/${id:0:2}" + + # This may fail, e.g. if the binary is for a different + # architecture than we're building for. (This happens with + # firmware blobs in QEMU.) + ( + if [ -f "$dst/${id:0:2}/${id:2}.debug" ] + then + echo "separate-debug-info: warning: multiple files with build id $id found, overwriting" + fi + $OBJCOPY --only-keep-debug "$i" "$dst/${id:0:2}/${id:2}.debug" + + # Also a create a symlink <original-name>.debug. + ln -sfn ".build-id/${id:0:2}/${id:2}.debug" "$dst/../$(basename "$i")" + ) || rmdir -p "$dst/${id:0:2}" + done < <(find "$prefix" -type f -print0 | sort -z) +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/set-java-classpath.sh b/nixpkgs/pkgs/build-support/setup-hooks/set-java-classpath.sh new file mode 100644 index 000000000000..445fa56d61de --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/set-java-classpath.sh @@ -0,0 +1,13 @@ +# This setup hook adds every JAR in the share/java subdirectories of +# the build inputs to $CLASSPATH. + +export CLASSPATH + +addPkgToClassPath () { + local jar + for jar in $1/share/java/*.jar; do + export CLASSPATH=''${CLASSPATH-}''${CLASSPATH:+:}''${jar} + done +} + +addEnvHooks "$targetOffset" addPkgToClassPath diff --git a/nixpkgs/pkgs/build-support/setup-hooks/set-source-date-epoch-to-latest.sh b/nixpkgs/pkgs/build-support/setup-hooks/set-source-date-epoch-to-latest.sh new file mode 100644 index 000000000000..ae34ffec4854 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/set-source-date-epoch-to-latest.sh @@ -0,0 +1,34 @@ +updateSourceDateEpoch() { + local path="$1" + + # Get the last modification time of all regular files, sort them, + # and get the most recent. Maybe we should use + # https://github.com/0-wiz-0/findnewest here. + local -a res=($(find "$path" -type f -not -newer "$NIX_BUILD_TOP/.." -printf '%T@ %p\0' \ + | sort -n --zero-terminated | tail -n1 --zero-terminated | head -c -1)) + local time="${res[0]//\.[0-9]*/}" # remove the fraction part + local newestFile="${res[1]}" + + # Update $SOURCE_DATE_EPOCH if the most recent file we found is newer. + if [ "${time:-0}" -gt "$SOURCE_DATE_EPOCH" ]; then + echo "setting SOURCE_DATE_EPOCH to timestamp $time of file $newestFile" + export SOURCE_DATE_EPOCH="$time" + + # Warn if the new timestamp is too close to the present. This + # may indicate that we were being applied to a file generated + # during the build, or that an unpacker didn't restore + # timestamps properly. + local now="$(date +%s)" + if [ "$time" -gt $((now - 60)) ]; then + echo "warning: file $newestFile may be generated; SOURCE_DATE_EPOCH may be non-deterministic" + fi + fi +} + +postUnpackHooks+=(_updateSourceDateEpochFromSourceRoot) + +_updateSourceDateEpochFromSourceRoot() { + if [ -n "$sourceRoot" ]; then + updateSourceDateEpoch "$sourceRoot" + fi +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/setup-debug-info-dirs.sh b/nixpkgs/pkgs/build-support/setup-hooks/setup-debug-info-dirs.sh new file mode 100644 index 000000000000..96bf48cf123a --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/setup-debug-info-dirs.sh @@ -0,0 +1,5 @@ +setupDebugInfoDirs () { + addToSearchPath NIX_DEBUG_INFO_DIRS $1/lib/debug +} + +addEnvHooks "$targetOffset" setupDebugInfoDirs diff --git a/nixpkgs/pkgs/build-support/setup-hooks/shorten-perl-shebang.sh b/nixpkgs/pkgs/build-support/setup-hooks/shorten-perl-shebang.sh new file mode 100644 index 000000000000..825da1bde962 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/shorten-perl-shebang.sh @@ -0,0 +1,88 @@ +# This setup hook modifies a Perl script so that any "-I" flags in its shebang +# line are rewritten into a "use lib ..." statement on the next line. This gets +# around a limitation in Darwin, which will not properly handle a script whose +# shebang line exceeds 511 characters. +# +# Each occurrence of "-I /path/to/lib1" or "-I/path/to/lib2" is removed from +# the shebang line, along with the single space that preceded it. These library +# paths are placed into a new line of the form +# +# use lib "/path/to/lib1", "/path/to/lib2"; +# +# immediately following the shebang line. If a library appeared in the original +# list more than once, only its first occurrence will appear in the output +# list. In other words, the libraries are deduplicated, but the ordering of the +# first appearance of each one is preserved. +# +# Any flags other than "-I" in the shebang line are left as-is, and the +# interpreter is also left alone (although the script will abort if the +# interpreter does not seem to be either "perl" or else "env" with "perl" as +# its argument). Each line after the shebang line is left unchanged. Each file +# is modified in place. +# +# Usage: +# shortenPerlShebang SCRIPT... + +shortenPerlShebang() { + while [ $# -gt 0 ]; do + _shortenPerlShebang "$1" + shift + done +} + +_shortenPerlShebang() { + local program="$1" + + echo "shortenPerlShebang: rewriting shebang line in $program" + + if ! isScript "$program"; then + die "shortenPerlShebang: refusing to modify $program because it is not a script" + fi + + local temp="$(mktemp)" + + gawk ' + (NR == 1) { + if (!($0 ~ /\/(perl|env +perl)\>/)) { + print "shortenPerlShebang: script does not seem to be a Perl script" > "/dev/stderr" + exit 1 + } + idx = 0 + while (match($0, / -I ?([^ ]+)/, pieces)) { + matches[idx] = pieces[1] + idx++ + $0 = gensub(/ -I ?[^ ]+/, "", 1, $0) + } + print $0 + if (idx > 0) { + prefix = "use lib " + for (idx in matches) { + path = matches[idx] + if (!(path in seen)) { + printf "%s\"%s\"", prefix, path + seen[path] = 1 + prefix = ", " + } + } + print ";" + } + } + (NR > 1 ) { + print + } + ' "$program" > "$temp" || die + # Preserve the mode of the original file + cp --preserve=mode --attributes-only "$program" "$temp" + mv "$temp" "$program" + + # Measure the new shebang line length and make sure it's okay. We subtract + # one to account for the trailing newline that "head" included in its + # output. + local new_length=$(( $(head -n 1 "$program" | wc -c) - 1 )) + + # Darwin is okay when the shebang line contains 511 characters, but not + # when it contains 512 characters. + if [ $new_length -ge 512 ]; then + die "shortenPerlShebang: shebang line is $new_length characters--still too long for Darwin!" + fi +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/strip.sh b/nixpkgs/pkgs/build-support/setup-hooks/strip.sh new file mode 100644 index 000000000000..ce41e6ea0562 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/strip.sh @@ -0,0 +1,98 @@ +# This setup hook strips libraries and executables in the fixup phase. + +fixupOutputHooks+=(_doStrip) + +_doStrip() { + # We don't bother to strip build platform code because it shouldn't make it + # to $out anyways---if it does, that's a bigger problem that a lack of + # stripping will help catch. + local -ra flags=(dontStripHost dontStripTarget) + local -ra debugDirs=(stripDebugList stripDebugListTarget) + local -ra allDirs=(stripAllList stripAllListTarget) + local -ra stripCmds=(STRIP STRIP_FOR_TARGET) + local -ra ranlibCmds=(RANLIB RANLIB_FOR_TARGET) + + # TODO(structured-attrs): This doesn't work correctly if one of + # the items in strip*List or strip*Flags contains a space, + # even with structured attrs enabled. This is OK for now + # because very few packages set any of these, and it doesn't + # affect any of them. + # + # After __structuredAttrs = true is universal, come back and + # push arrays all the way through this logic. + + # Strip only host paths by default. Leave targets as is. + stripDebugList=${stripDebugList[*]:-lib lib32 lib64 libexec bin sbin} + stripDebugListTarget=${stripDebugListTarget[*]:-} + stripAllList=${stripAllList[*]:-} + stripAllListTarget=${stripAllListTarget[*]:-} + + local i + for i in ${!stripCmds[@]}; do + local -n flag="${flags[$i]}" + local -n debugDirList="${debugDirs[$i]}" + local -n allDirList="${allDirs[$i]}" + local -n stripCmd="${stripCmds[$i]}" + local -n ranlibCmd="${ranlibCmds[$i]}" + + # `dontStrip` disables them all + if [[ "${dontStrip-}" || "${flag-}" ]] || ! type -f "${stripCmd-}" 2>/dev/null 1>&2 + then continue; fi + + stripDirs "$stripCmd" "$ranlibCmd" "$debugDirList" "${stripDebugFlags[*]:--S -p}" + stripDirs "$stripCmd" "$ranlibCmd" "$allDirList" "${stripAllFlags[*]:--s -p}" + done +} + +stripDirs() { + local cmd="$1" + local ranlibCmd="$2" + local paths="$3" + local stripFlags="$4" + local excludeFlags=() + local pathsNew= + + [ -z "$cmd" ] && echo "stripDirs: Strip command is empty" 1>&2 && exit 1 + [ -z "$ranlibCmd" ] && echo "stripDirs: Ranlib command is empty" 1>&2 && exit 1 + + local pattern + if [ -n "${stripExclude:-}" ]; then + for pattern in "${stripExclude[@]}"; do + excludeFlags+=(-a '!' '(' -name "$pattern" -o -wholename "$prefix/$pattern" ')' ) + done + fi + + local p + for p in ${paths}; do + if [ -e "$prefix/$p" ]; then + pathsNew="${pathsNew} $prefix/$p" + fi + done + paths=${pathsNew} + + if [ -n "${paths}" ]; then + echo "stripping (with command $cmd and flags $stripFlags) in $paths" + local striperr + striperr="$(mktemp --tmpdir="$TMPDIR" 'striperr.XXXXXX')" + # Do not strip lib/debug. This is a directory used by setup-hooks/separate-debug-info.sh. + find $paths -type f "${excludeFlags[@]}" -a '!' -path "$prefix/lib/debug/*" -print0 | + # Make sure we process files under symlinks only once. Otherwise + # 'strip` can corrupt files when writes to them in parallel: + # https://github.com/NixOS/nixpkgs/issues/246147#issuecomment-1657072039 + xargs -r -0 -n1 -- realpath -z | sort -u -z | + + xargs -r -0 -n1 -P "$NIX_BUILD_CORES" -- $cmd $stripFlags 2>"$striperr" || exit_code=$? + # xargs exits with status code 123 if some but not all of the + # processes fail. We don't care if some of the files couldn't + # be stripped, so ignore specifically this code. + [[ "$exit_code" = 123 || -z "$exit_code" ]] || (cat "$striperr" 1>&2 && exit 1) + + rm "$striperr" + # 'strip' does not normally preserve archive index in .a files. + # This usually causes linking failures against static libs like: + # ld: ...-i686-w64-mingw32-stage-final-gcc-13.0.0-lib/i686-w64-mingw32/lib/libstdc++.dll.a: + # error adding symbols: archive has no index; run ranlib to add one + # Restore the index by running 'ranlib'. + find $paths -name '*.a' -type f -exec $ranlibCmd '{}' \; 2>/dev/null + fi +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/update-autotools-gnu-config-scripts.sh b/nixpkgs/pkgs/build-support/setup-hooks/update-autotools-gnu-config-scripts.sh new file mode 100644 index 000000000000..ebd3afa05d94 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/update-autotools-gnu-config-scripts.sh @@ -0,0 +1,12 @@ +preConfigurePhases+=" updateAutotoolsGnuConfigScriptsPhase" + +updateAutotoolsGnuConfigScriptsPhase() { + if [ -n "${dontUpdateAutotoolsGnuConfigScripts-}" ]; then return; fi + + for script in config.sub config.guess; do + for f in $(find . -type f -name "$script"); do + echo "Updating Autotools / GNU config script to a newer upstream version: $f" + cp -f "@gnu_config@/$script" "$f" + done + done +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/use-old-cxx-abi.sh b/nixpkgs/pkgs/build-support/setup-hooks/use-old-cxx-abi.sh new file mode 100644 index 000000000000..53335d7a9a7a --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/use-old-cxx-abi.sh @@ -0,0 +1 @@ +export NIX_CFLAGS_COMPILE+=" -D_GLIBCXX_USE_CXX11_ABI=0" diff --git a/nixpkgs/pkgs/build-support/setup-hooks/validate-pkg-config.sh b/nixpkgs/pkgs/build-support/setup-hooks/validate-pkg-config.sh new file mode 100644 index 000000000000..c212a1f5301a --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/validate-pkg-config.sh @@ -0,0 +1,18 @@ +# This setup hook validates each pkgconfig file in each output. + +fixupOutputHooks+=(_validatePkgConfig) + +_validatePkgConfig() { + local bail=0 + for pc in $(find "$prefix" -name '*.pc'); do + # Do not fail immediately. It's nice to see all errors when + # there are multiple pkgconfig files. + if ! $PKG_CONFIG --validate "$pc"; then + bail=1 + fi + done + + if [ $bail -eq 1 ]; then + exit 1 + fi +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/win-dll-link.sh b/nixpkgs/pkgs/build-support/setup-hooks/win-dll-link.sh new file mode 100644 index 000000000000..14594bcba937 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/win-dll-link.sh @@ -0,0 +1,89 @@ +fixupOutputHooks+=(_linkDLLs) + +addEnvHooks "$targetOffset" linkDLLGetFolders + +linkDLLGetFolders() { + addToSearchPath "LINK_DLL_FOLDERS" "$1/lib" + addToSearchPath "LINK_DLL_FOLDERS" "$1/bin" +} + +_linkDLLs() { + linkDLLsInfolder "$prefix/bin" +} + +# Try to links every known dependency of exe/dll in the folder of the 1str input +# into said folder, so they are found on invocation. +# (DLLs are first searched in the directory of the running exe file.) +# The links are relative, so relocating whole /nix/store won't break them. +linkDLLsInfolder() { + ( + local folder + folder="$1" + if [ ! -d "$folder" ]; then + echo "Not linking DLLs in the non-existent folder $folder" + return + fi + cd "$folder" || exit + + # Use associative arrays as set + local filesToChecks + local filesDone + declare -A filesToChecks # files that still needs to have their dependancies checked + declare -A filesDone # files that had their dependancies checked and who is copied to the bin folder if found + + markFileAsDone() { + if [ ! "${filesDone[$1]+a}" ]; then filesDone[$1]=a; fi + if [ "${filesToChecks[$1]+a}" ]; then unset 'filesToChecks[$1]'; fi + } + + addFileToLink() { + if [ "${filesDone[$1]+a}" ]; then return; fi + if [ ! "${filesToChecks[$1]+a}" ]; then filesToChecks[$1]=a; fi + } + + # Compose path list where DLLs should be located: + # prefix $PATH by currently-built outputs + local DLLPATH="" + local outName + for outName in $(getAllOutputNames); do + addToSearchPath DLLPATH "${!outName}/bin" + done + DLLPATH="$DLLPATH:$LINK_DLL_FOLDERS" + + echo DLLPATH="'$DLLPATH'" + + for peFile in *.{exe,dll}; do + if [ -e "./$peFile" ]; then + addFileToLink "$peFile" + fi + done + + local searchPaths + readarray -td: searchPaths < <(printf -- "%s" "$DLLPATH") + + local linkCount=0 + while [ ${#filesToChecks[*]} -gt 0 ]; do + local listOfDlls=("${!filesToChecks[@]}") + local file=${listOfDlls[0]} + markFileAsDone "$file" + if [ ! -e "./$file" ]; then + local pathsFound + readarray -d '' pathsFound < <(find "${searchPaths[@]}" -name "$file" -type f -print0) + if [ ${#pathsFound[@]} -eq 0 ]; then continue; fi + local dllPath + dllPath="${pathsFound[0]}" + CYGWIN+=" winsymlinks:nativestrict" ln -sr "$dllPath" . + echo "linking $dllPath" + file="$dllPath" + linkCount=$((linkCount + 1)) + fi + # local dep_file + # Look at the file’s dependancies + for dep_file in $($OBJDUMP -p "$file" | sed -n 's/.*DLL Name: \(.*\)/\1/p' | sort -u); do + addFileToLink "$dep_file" + done + done + + echo "Created $linkCount DLL link(s) in $folder" + ) +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/default.nix b/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/default.nix new file mode 100644 index 000000000000..3c5199be3132 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/default.nix @@ -0,0 +1,203 @@ +{ stdenv +, lib +, makeSetupHook +, makeWrapper +, gobject-introspection +, isGraphical ? true +, gtk3 +, librsvg +, dconf +, callPackage +, wrapGAppsHook +, targetPackages +}: + +makeSetupHook { + name = "wrap-gapps-hook"; + propagatedBuildInputs = [ + # We use the wrapProgram function. + makeWrapper + ] ++ lib.optionals isGraphical [ + # TODO: remove this, packages should depend on GTK explicitly. + gtk3 + + librsvg + ]; + + # depsTargetTargetPropagated will essentially be buildInputs when wrapGAppsHook is placed into nativeBuildInputs + # the librsvg and gtk3 above should be removed but kept to not break anything that implicitly depended on its binaries + depsTargetTargetPropagated = assert (lib.assertMsg (!targetPackages ? raw) "wrapGAppsHook must be in nativeBuildInputs"); lib.optionals isGraphical [ + # librsvg provides a module for gdk-pixbuf to allow rendering + # SVG icons. Most icon themes are SVG-based and so are some + # graphics in GTK (e.g. cross for closing window in window title bar) + # so it is pretty much required for applications using GTK. + librsvg + + # TODO: remove this, packages should depend on GTK explicitly. + gtk3 + ] ++ lib.optionals (!stdenv.isDarwin) [ + # It is highly probable that a program will use GSettings, + # at minimum through GTK file chooser dialogue. + # Let’s add a GIO module for “dconf” GSettings backend + # to avoid falling back to “memory” backend. This is + # required for GSettings-based settings to be persisted. + # Unfortunately, it also requires the user to have dconf + # D-Bus service enabled globally (e.g. through a NixOS module). + dconf.lib + ]; + passthru = { + tests = let + sample-project = ./tests/sample-project; + + testLib = callPackage ./tests/lib.nix { }; + inherit (testLib) expectSomeLineContainingYInFileXToMentionZ; + in rec { + # Simple derivation containing a program and a daemon. + basic = stdenv.mkDerivation { + name = "basic"; + + src = sample-project; + + strictDeps = true; + nativeBuildInputs = [ wrapGAppsHook ]; + + installFlags = [ "bin-foo" "libexec-bar" ]; + }; + + # The wrapper for executable files should add path to dconf GIO module. + basic-contains-dconf = let + tested = basic; + in testLib.runTest "basic-contains-dconf" ( + testLib.skip stdenv.isDarwin '' + ${expectSomeLineContainingYInFileXToMentionZ "${tested}/bin/foo" "GIO_EXTRA_MODULES" "${dconf.lib}/lib/gio/modules"} + ${expectSomeLineContainingYInFileXToMentionZ "${tested}/libexec/bar" "GIO_EXTRA_MODULES" "${dconf.lib}/lib/gio/modules"} + '' + ); + + basic-contains-gdk-pixbuf = let + tested = basic; + in testLib.runTest "basic-contains-gdk-pixbuf" ( + testLib.skip stdenv.isDarwin '' + ${expectSomeLineContainingYInFileXToMentionZ "${tested}/bin/foo" "GDK_PIXBUF_MODULE_FILE" "${lib.getLib librsvg}/lib/gdk-pixbuf-2.0/2.10.0/loaders.cache"} + ${expectSomeLineContainingYInFileXToMentionZ "${tested}/libexec/bar" "GDK_PIXBUF_MODULE_FILE" "${lib.getLib librsvg}/lib/gdk-pixbuf-2.0/2.10.0/loaders.cache"} + '' + ); + + # Simple derivation containing a gobject-introspection typelib. + typelib-Mahjong = stdenv.mkDerivation { + name = "typelib-Mahjong"; + + src = sample-project; + + strictDeps = true; + + installFlags = [ "typelib-Mahjong" ]; + }; + + # Simple derivation using a typelib. + typelib-user = stdenv.mkDerivation { + name = "typelib-user"; + + src = sample-project; + + strictDeps = true; + nativeBuildInputs = [ + gobject-introspection + wrapGAppsHook + ]; + + buildInputs = [ + typelib-Mahjong + ]; + + installFlags = [ "bin-foo" "libexec-bar" ]; + }; + + # Testing cooperation with gobject-introspection setup hook, + # which should populate GI_TYPELIB_PATH variable with paths + # to typelibs among the derivation’s dependencies. + # The resulting GI_TYPELIB_PATH should be picked up by the wrapper. + typelib-user-has-gi-typelib-path = let + tested = typelib-user; + in testLib.runTest "typelib-user-has-gi-typelib-path" '' + ${expectSomeLineContainingYInFileXToMentionZ "${tested}/bin/foo" "GI_TYPELIB_PATH" "${typelib-Mahjong}/lib/girepository-1.0"} + ${expectSomeLineContainingYInFileXToMentionZ "${tested}/libexec/bar" "GI_TYPELIB_PATH" "${typelib-Mahjong}/lib/girepository-1.0"} + ''; + + # Simple derivation containing a gobject-introspection typelib in lib output. + typelib-Bechamel = stdenv.mkDerivation { + name = "typelib-Bechamel"; + + outputs = [ "out" "lib" ]; + + src = sample-project; + + strictDeps = true; + + makeFlags = [ + "LIBDIR=${placeholder "lib"}/lib" + ]; + + installFlags = [ "typelib-Bechamel" ]; + }; + + # Simple derivation using a typelib from non-default output. + typelib-multiout-user = stdenv.mkDerivation { + name = "typelib-multiout-user"; + + src = sample-project; + + strictDeps = true; + nativeBuildInputs = [ + gobject-introspection + wrapGAppsHook + ]; + + buildInputs = [ + typelib-Bechamel + ]; + + installFlags = [ "bin-foo" "libexec-bar" ]; + }; + + # Testing cooperation with gobject-introspection setup hook, + # which should populate GI_TYPELIB_PATH variable with paths + # to typelibs among the derivation’s dependencies, + # even when they are not in default output. + # The resulting GI_TYPELIB_PATH should be picked up by the wrapper. + typelib-multiout-user-has-gi-typelib-path = let + tested = typelib-multiout-user; + in testLib.runTest "typelib-multiout-user-has-gi-typelib-path" '' + ${expectSomeLineContainingYInFileXToMentionZ "${tested}/bin/foo" "GI_TYPELIB_PATH" "${typelib-Bechamel.lib}/lib/girepository-1.0"} + ${expectSomeLineContainingYInFileXToMentionZ "${tested}/libexec/bar" "GI_TYPELIB_PATH" "${typelib-Bechamel.lib}/lib/girepository-1.0"} + ''; + + # Simple derivation that contains a typelib as well as a program using it. + typelib-self-user = stdenv.mkDerivation { + name = "typelib-self-user"; + + src = sample-project; + + strictDeps = true; + nativeBuildInputs = [ + gobject-introspection + wrapGAppsHook + ]; + + installFlags = [ "typelib-Cow" "bin-foo" "libexec-bar" ]; + }; + + # Testing cooperation with gobject-introspection setup hook, + # which should add the path to derivation’s own typelibs + # to GI_TYPELIB_PATH variable. + # The resulting GI_TYPELIB_PATH should be picked up by the wrapper. + # https://github.com/NixOS/nixpkgs/issues/85515 + typelib-self-user-has-gi-typelib-path = let + tested = typelib-self-user; + in testLib.runTest "typelib-self-user-has-gi-typelib-path" '' + ${expectSomeLineContainingYInFileXToMentionZ "${tested}/bin/foo" "GI_TYPELIB_PATH" "${typelib-self-user}/lib/girepository-1.0"} + ${expectSomeLineContainingYInFileXToMentionZ "${tested}/libexec/bar" "GI_TYPELIB_PATH" "${typelib-self-user}/lib/girepository-1.0"} + ''; + }; + }; +} ./wrap-gapps-hook.sh diff --git a/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/lib.nix b/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/lib.nix new file mode 100644 index 000000000000..59fa9de24f9d --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/lib.nix @@ -0,0 +1,31 @@ +{ lib, runCommand }: + +rec { + runTest = name: body: runCommand name { strictDeps = true; } '' + set -o errexit + ${body} + touch $out + ''; + + skip = cond: text: + if cond then '' + echo "Skipping test $name" > /dev/stderr + '' else text; + + fail = text: '' + echo "FAIL: $name: ${text}" > /dev/stderr + exit 1 + ''; + + expectSomeLineContainingYInFileXToMentionZ = file: filter: expected: '' + file=${lib.escapeShellArg file} filter=${lib.escapeShellArg filter} expected=${lib.escapeShellArg expected} + + if ! grep --text --quiet "$filter" "$file"; then + ${fail "The file “$file” should include a line containing “$filter”."} + fi + + if ! grep --text "$filter" "$file" | grep --text --quiet "$expected"; then + ${fail "The file “$file” should include a line containing “$filter” that also contains “$expected”."} + fi + ''; +} diff --git a/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/sample-project/Makefile b/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/sample-project/Makefile new file mode 100644 index 000000000000..5d234db11a0b --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/sample-project/Makefile @@ -0,0 +1,30 @@ +PREFIX = $(out) +BINDIR = $(PREFIX)/bin +LIBEXECDIR = $(PREFIX)/libexec +LIBDIR = $(PREFIX)/lib +TYPELIBDIR = $(LIBDIR)/girepository-1.0 + +all: + echo "Compiling…" +install: + echo "Installing…" + +bin: + mkdir -p $(BINDIR) +# Adds `bin-${foo}` targets, that install `${foo}` executable to `$(BINDIR)`. +bin-%: bin + touch $(BINDIR)/$(@:bin-%=%) + chmod +x $(BINDIR)/$(@:bin-%=%) + +libexec: + mkdir -p $(LIBEXECDIR) +# Adds `libexec-${foo}` targets, that install `${foo}` executable to `$(LIBEXECDIR)`. +libexec-%: libexec + touch $(LIBEXECDIR)/$(@:libexec-%=%) + chmod +x $(LIBEXECDIR)/$(@:libexec-%=%) + +typelib: + mkdir -p $(TYPELIBDIR) +# Adds `typelib-${foo}` targets, that install `${foo}-1.0.typelib` file to `$(TYPELIBDIR)`. +typelib-%: typelib + touch $(TYPELIBDIR)/$(@:typelib-%=%)-1.0.typelib diff --git a/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/wrap-gapps-hook.sh b/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/wrap-gapps-hook.sh new file mode 100644 index 000000000000..0acf4a8e6f8d --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/wrap-gapps-hook.sh @@ -0,0 +1,89 @@ +# shellcheck shell=bash +gappsWrapperArgs=() + +find_gio_modules() { + if [ -d "$1/lib/gio/modules" ] && [ -n "$(ls -A "$1/lib/gio/modules")" ] ; then + gappsWrapperArgs+=(--prefix GIO_EXTRA_MODULES : "$1/lib/gio/modules") + fi +} + +addEnvHooks "${targetOffset:?}" find_gio_modules + +gappsWrapperArgsHook() { + if [ -n "$GDK_PIXBUF_MODULE_FILE" ]; then + gappsWrapperArgs+=(--set GDK_PIXBUF_MODULE_FILE "$GDK_PIXBUF_MODULE_FILE") + fi + + if [ -n "$GSETTINGS_SCHEMAS_PATH" ]; then + gappsWrapperArgs+=(--prefix XDG_DATA_DIRS : "$GSETTINGS_SCHEMAS_PATH") + fi + + # Check for prefix as well + if [ -d "${prefix:?}/share" ]; then + gappsWrapperArgs+=(--prefix XDG_DATA_DIRS : "$prefix/share") + fi + + if [ -d "$prefix/lib/gio/modules" ] && [ -n "$(ls -A "$prefix/lib/gio/modules")" ]; then + gappsWrapperArgs+=(--prefix GIO_EXTRA_MODULES : "$prefix/lib/gio/modules") + fi + + for v in ${wrapPrefixVariables:-} GST_PLUGIN_SYSTEM_PATH_1_0 GI_TYPELIB_PATH GRL_PLUGIN_PATH; do + if [ -n "${!v}" ]; then + gappsWrapperArgs+=(--prefix "$v" : "${!v}") + fi + done +} + +preFixupPhases+=" gappsWrapperArgsHook" + +wrapGApp() { + local program="$1" + shift 1 + wrapProgram "$program" "${gappsWrapperArgs[@]}" "$@" +} + +# Note: $gappsWrapperArgs still gets defined even if ${dontWrapGApps-} is set. +wrapGAppsHook() { + # guard against running multiple times (e.g. due to propagation) + [ -z "$wrapGAppsHookHasRun" ] || return 0 + wrapGAppsHookHasRun=1 + + if [[ -z "${dontWrapGApps:-}" ]]; then + targetDirsThatExist=() + targetDirsRealPath=() + + # wrap binaries + targetDirs=("${prefix}/bin" "${prefix}/libexec") + for targetDir in "${targetDirs[@]}"; do + if [[ -d "${targetDir}" ]]; then + targetDirsThatExist+=("${targetDir}") + targetDirsRealPath+=("$(realpath "${targetDir}")/") + find "${targetDir}" -type f -executable -print0 | + while IFS= read -r -d '' file; do + echo "Wrapping program '${file}'" + wrapGApp "${file}" + done + fi + done + + # wrap links to binaries that point outside targetDirs + # Note: links to binaries within targetDirs do not need + # to be wrapped as the binaries have already been wrapped + if [[ ${#targetDirsThatExist[@]} -ne 0 ]]; then + find "${targetDirsThatExist[@]}" -type l -xtype f -executable -print0 | + while IFS= read -r -d '' linkPath; do + linkPathReal=$(realpath "${linkPath}") + for targetPath in "${targetDirsRealPath[@]}"; do + if [[ "$linkPathReal" == "$targetPath"* ]]; then + echo "Not wrapping link: '$linkPath' (already wrapped)" + continue 2 + fi + done + echo "Wrapping link: '$linkPath'" + wrapGApp "${linkPath}" + done + fi + fi +} + +fixupOutputHooks+=(wrapGAppsHook) diff --git a/nixpkgs/pkgs/build-support/setup-systemd-units.nix b/nixpkgs/pkgs/build-support/setup-systemd-units.nix new file mode 100644 index 000000000000..4c7ee86669f5 --- /dev/null +++ b/nixpkgs/pkgs/build-support/setup-systemd-units.nix @@ -0,0 +1,83 @@ +# | Build a script to install and start a set of systemd units on any +# systemd-based system. +# +# Creates a symlink at /etc/systemd-static/${namespace} for slightly +# improved atomicity. +{ writeScriptBin +, bash +, coreutils +, systemd +, runCommand +, lib +}: + { units # : AttrSet String (Either Path { path : Path, wanted-by : [ String ] }) + # ^ A set whose names are unit names and values are + # either paths to the corresponding unit files or a set + # containing the path and the list of units this unit + # should be wanted-by (none by default). + # + # The names should include the unit suffix + # (e.g. ".service") + , namespace # : String + # The namespace for the unit files, to allow for + # multiple independent unit sets managed by + # `setupSystemdUnits`. + }: + let static = runCommand "systemd-static" {} + '' + mkdir -p $out + ${lib.concatStringsSep "\n" (lib.mapAttrsToList (nm: file: + "ln -sv ${file.path or file} $out/${nm}" + ) units)} + ''; + add-unit-snippet = name: file: + '' + oldUnit=$(readlink -f "$unitDir/${name}" || echo "$unitDir/${name}") + if [ -f "$oldUnit" -a "$oldUnit" != "${file.path or file}" ]; then + unitsToStop+=("${name}") + fi + ln -sf "/etc/systemd-static/${namespace}/${name}" \ + "$unitDir/.${name}.tmp" + mv -T "$unitDir/.${name}.tmp" "$unitDir/${name}" + ${lib.concatStringsSep "\n" (map (unit: + '' + mkdir -p "$unitDir/${unit}.wants" + ln -sf "../${name}" \ + "$unitDir/${unit}.wants/.${name}.tmp" + mv -T "$unitDir/${unit}.wants/.${name}.tmp" \ + "$unitDir/${unit}.wants/${name}" + '' + ) file.wanted-by or [])} + unitsToStart+=("${name}") + ''; + in + writeScriptBin "setup-systemd-units" + '' + #!${bash}/bin/bash -e + export PATH=${coreutils}/bin:${systemd}/bin + + unitDir=/etc/systemd/system + if [ ! -w "$unitDir" ]; then + unitDir=/nix/var/nix/profiles/default/lib/systemd/system + mkdir -p "$unitDir" + fi + declare -a unitsToStop unitsToStart + + oldStatic=$(readlink -f /etc/systemd-static/${namespace} || true) + if [ "$oldStatic" != "${static}" ]; then + ${lib.concatStringsSep "\n" + (lib.mapAttrsToList add-unit-snippet units)} + if [ ''${#unitsToStop[@]} -ne 0 ]; then + echo "Stopping unit(s) ''${unitsToStop[@]}" >&2 + systemctl stop "''${unitsToStop[@]}" + fi + mkdir -p /etc/systemd-static + ln -sfT ${static} /etc/systemd-static/.${namespace}.tmp + mv -T /etc/systemd-static/.${namespace}.tmp /etc/systemd-static/${namespace} + systemctl daemon-reload + echo "Starting unit(s) ''${unitsToStart[@]}" >&2 + systemctl start "''${unitsToStart[@]}" + else + echo "Units unchanged, doing nothing" >&2 + fi + '' diff --git a/nixpkgs/pkgs/build-support/singularity-tools/default.nix b/nixpkgs/pkgs/build-support/singularity-tools/default.nix new file mode 100644 index 000000000000..9689e4124590 --- /dev/null +++ b/nixpkgs/pkgs/build-support/singularity-tools/default.nix @@ -0,0 +1,122 @@ +{ runCommand +, lib +, stdenv +, storeDir ? builtins.storeDir +, writeScript +, singularity +, writeReferencesToFile +, bash +, vmTools +, gawk +, util-linux +, runtimeShell +, e2fsprogs +}: +rec { + shellScript = name: text: + writeScript name '' + #!${runtimeShell} + set -e + ${text} + ''; + + mkLayer = + { name + , contents ? [ ] + # May be "apptainer" instead of "singularity" + , projectName ? (singularity.projectName or "singularity") + }: + runCommand "${projectName}-layer-${name}" + { + inherit contents; + } '' + mkdir $out + for f in $contents ; do + cp -ra $f $out/ + done + ''; + + buildImage = + let + defaultSingularity = singularity; + in + { name + , contents ? [ ] + , diskSize ? 1024 + , runScript ? "#!${stdenv.shell}\nexec /bin/sh" + , runAsRoot ? null + , memSize ? 512 + , singularity ? defaultSingularity + }: + let + projectName = singularity.projectName or "singularity"; + layer = mkLayer { + inherit name; + contents = contents ++ [ bash runScriptFile ]; + inherit projectName; + }; + runAsRootFile = shellScript "run-as-root.sh" runAsRoot; + runScriptFile = shellScript "run-script.sh" runScript; + result = vmTools.runInLinuxVM ( + runCommand "${projectName}-image-${name}.img" + { + buildInputs = [ singularity e2fsprogs util-linux gawk ]; + layerClosure = writeReferencesToFile layer; + preVM = vmTools.createEmptyImage { + size = diskSize; + fullName = "${projectName}-run-disk"; + }; + inherit memSize; + } + '' + rm -rf $out + mkdir disk + mkfs -t ext3 -b 4096 /dev/${vmTools.hd} + mount /dev/${vmTools.hd} disk + mkdir -p disk/img + cd disk/img + mkdir proc sys dev + + # Run root script + ${lib.optionalString (runAsRoot != null) '' + mkdir -p ./${storeDir} + mount --rbind ${storeDir} ./${storeDir} + unshare -imnpuf --mount-proc chroot ./ ${runAsRootFile} + umount -R ./${storeDir} + ''} + + # Build /bin and copy across closure + mkdir -p bin ./${builtins.storeDir} + for f in $(cat $layerClosure) ; do + cp -ar $f ./$f + done + + for c in ${toString contents} ; do + for f in $c/bin/* ; do + if [ ! -e bin/$(basename $f) ] ; then + ln -s $f bin/ + fi + done + done + + # Create runScript and link shell + if [ ! -e bin/sh ]; then + ln -s ${runtimeShell} bin/sh + fi + mkdir -p .${projectName}.d + ln -s ${runScriptFile} .${projectName}.d/runscript + + # Fill out .${projectName}.d + mkdir -p .${projectName}.d/env + touch .${projectName}.d/env/94-appsbase.sh + + cd .. + mkdir -p /var/lib/${projectName}/mnt/{container,final,overlay,session,source} + echo "root:x:0:0:System administrator:/root:/bin/sh" > /etc/passwd + echo > /etc/resolv.conf + TMPDIR=$(pwd -P) ${projectName} build $out ./img + ''); + + in + result; +} diff --git a/nixpkgs/pkgs/build-support/snap/default.nix b/nixpkgs/pkgs/build-support/snap/default.nix new file mode 100644 index 000000000000..ba5271868911 --- /dev/null +++ b/nixpkgs/pkgs/build-support/snap/default.nix @@ -0,0 +1,4 @@ +{ callPackage, hello }: +{ + makeSnap = callPackage ./make-snap.nix { }; +} diff --git a/nixpkgs/pkgs/build-support/snap/make-snap.nix b/nixpkgs/pkgs/build-support/snap/make-snap.nix new file mode 100644 index 000000000000..cef7500bcbaf --- /dev/null +++ b/nixpkgs/pkgs/build-support/snap/make-snap.nix @@ -0,0 +1,84 @@ +{ + runCommand, squashfsTools, closureInfo, lib, jq, writeText +}: + +{ + # The meta parameter is the contents of the `snap.yaml`, NOT the + # `snapcraft.yaml`. + # + # - `snap.yaml` is what is inside of the final Snap, + # - `snapcraft.yaml` is used by `snapcraft` to build snaps + # + # Since we skip the `snapcraft` tool, we skip the `snapcraft.yaml` + # file. For more information: + # + # https://docs.snapcraft.io/snap-format + # + # Note: unsquashfs'ing an existing snap from the store can be helpful + # for determining what you you're missing. + # + meta +}: let + snap_yaml = let + # Validate the snap's meta contains a name. + # Also: automatically set the `base` parameter and the layout for + # the `/nix` bind. + validate = { name, ... } @ args: + args // { + # Combine the provided arguments with the required options. + + # base: built from https://github.com/NixOS/snapd-nix-base + # and published as The NixOS Foundation on the Snapcraft store. + base = "nix-base"; + layout = (args.layout or {}) // { + # Bind mount the Snap's root nix directory to `/nix` in the + # execution environment's filesystem namespace. + "/nix".bind = "$SNAP/nix"; + }; + }; + in writeText "snap.yaml" + (builtins.toJSON (validate meta)); + + # These are specifically required by snapd, so don't change them + # unless you've verified snapcraft / snapd can handle them. Best bet + # is to just mirror this list against how snapcraft creates images. + # from: https://github.com/snapcore/snapcraft/blob/b88e378148134383ffecf3658e3a940b67c9bcc9/snapcraft/internal/lifecycle/_packer.py#L96-L98 + mksquashfs_args = [ + "-noappend" "-comp" "xz" "-no-xattrs" "-no-fragments" + + # Note: We want -all-root every time, since all the files are + # owned by root anyway. This is true for Nix, but not true for + # other builds. + # from: https://github.com/snapcore/snapcraft/blob/b88e378148134383ffecf3658e3a940b67c9bcc9/snapcraft/internal/lifecycle/_packer.py#L100 + "-all-root" + ]; + +in runCommand "squashfs.img" { + nativeBuildInputs = [ squashfsTools jq ]; + + closureInfo = closureInfo { + rootPaths = [ snap_yaml ]; + }; +} '' + root=$PWD/root + mkdir $root + + ( + # Put the snap.yaml in to `/meta/snap.yaml`, setting the version + # to the hash part of the store path + mkdir $root/meta + version=$(echo $out | cut -d/ -f4 | cut -d- -f1) + cat ${snap_yaml} | jq ". + { version: \"$version\" }" \ + > $root/meta/snap.yaml + ) + + ( + # Copy the store closure in to the root + mkdir -p $root/nix/store + cat $closureInfo/store-paths | xargs -I{} cp -r {} $root/nix/store/ + ) + + # Generate the squashfs image. + mksquashfs $root $out \ + ${lib.concatStringsSep " " mksquashfs_args} +'' diff --git a/nixpkgs/pkgs/build-support/source-from-head-fun.nix b/nixpkgs/pkgs/build-support/source-from-head-fun.nix new file mode 100644 index 000000000000..938df1efd18a --- /dev/null +++ b/nixpkgs/pkgs/build-support/source-from-head-fun.nix @@ -0,0 +1,16 @@ +/* + purpose: mantain bleeding edge head sources. + + you run + app --update + app --publish + to create source snapshots + + The documentation is availible at https://github.com/MarcWeber/nix-repository-manager/raw/master/README + +*/ +{ config }: + localTarName: publishedSrcSnapshot: + if config.sourceFromHead.useLocalRepos or false then + "${config.sourceFromHead.managedRepoDir or "/set/sourceFromHead.managedRepoDir/please"}/dist/${localTarName}" + else publishedSrcSnapshot diff --git a/nixpkgs/pkgs/build-support/src-only/default.nix b/nixpkgs/pkgs/build-support/src-only/default.nix new file mode 100644 index 000000000000..2b0db0e267aa --- /dev/null +++ b/nixpkgs/pkgs/build-support/src-only/default.nix @@ -0,0 +1,22 @@ +{ stdenv }: +# srcOnly is a utility builder that only fetches and unpacks the given `src`, +# and optionally patching with `patches` or adding build inputs. +# +# It can be invoked directly, or be used to wrap an existing derivation. Eg: +# +# > srcOnly pkgs.hello +# +attrs: +let + args = if builtins.hasAttr "drvAttrs" attrs then attrs.drvAttrs else attrs; + name = if builtins.hasAttr "name" args then args.name else "${args.pname}-${args.version}"; +in +stdenv.mkDerivation (args // { + name = "${name}-source"; + installPhase = "cp -r . $out"; + outputs = [ "out" ]; + separateDebugInfo = false; + dontUnpack = false; + dontInstall = false; + phases = ["unpackPhase" "patchPhase" "installPhase"]; +}) diff --git a/nixpkgs/pkgs/build-support/substitute-files/substitute-all-files.nix b/nixpkgs/pkgs/build-support/substitute-files/substitute-all-files.nix new file mode 100644 index 000000000000..682e976dcfe5 --- /dev/null +++ b/nixpkgs/pkgs/build-support/substitute-files/substitute-all-files.nix @@ -0,0 +1,26 @@ +{ lib, stdenv }: + +args: + +stdenv.mkDerivation ({ + name = if args ? name then args.name else baseNameOf (toString args.src); + builder = builtins.toFile "builder.sh" '' + source $stdenv/setup + set -o pipefail + + eval "$preInstall" + + args= + + pushd "$src" + echo -ne "${lib.concatStringsSep "\\0" args.files}" | xargs -0 -n1 -I {} -- find {} -type f -print0 | while read -d "" line; do + mkdir -p "$out/$(dirname "$line")" + substituteAll "$line" "$out/$line" + done + popd + + eval "$postInstall" + ''; + preferLocalBuild = true; + allowSubstitutes = false; +} // args) diff --git a/nixpkgs/pkgs/build-support/substitute/substitute-all.nix b/nixpkgs/pkgs/build-support/substitute/substitute-all.nix new file mode 100644 index 000000000000..57b160bbe901 --- /dev/null +++ b/nixpkgs/pkgs/build-support/substitute/substitute-all.nix @@ -0,0 +1,12 @@ +{ stdenvNoCC }: + +args: + +# see the substituteAll in the nixpkgs documentation for usage and constaints +stdenvNoCC.mkDerivation ({ + name = if args ? name then args.name else baseNameOf (toString args.src); + builder = ./substitute-all.sh; + inherit (args) src; + preferLocalBuild = true; + allowSubstitutes = false; +} // args) diff --git a/nixpkgs/pkgs/build-support/substitute/substitute-all.sh b/nixpkgs/pkgs/build-support/substitute/substitute-all.sh new file mode 100644 index 000000000000..ec220481fcc0 --- /dev/null +++ b/nixpkgs/pkgs/build-support/substitute/substitute-all.sh @@ -0,0 +1,19 @@ +source $stdenv/setup + +eval "$preInstall" + +args= + +target=$out +if test -n "$dir"; then + target=$out/$dir/$name + mkdir -p $out/$dir +fi + +substituteAll $src $target + +if test -n "$isExecutable"; then + chmod +x $target +fi + +eval "$postInstall" diff --git a/nixpkgs/pkgs/build-support/substitute/substitute.nix b/nixpkgs/pkgs/build-support/substitute/substitute.nix new file mode 100644 index 000000000000..7f0332334585 --- /dev/null +++ b/nixpkgs/pkgs/build-support/substitute/substitute.nix @@ -0,0 +1,14 @@ +{ stdenvNoCC }: + +args: + +# This is a wrapper around `substitute` in the stdenv. +# The `replacements` attribute should be a list of list of arguments +# to `substitute`, such as `[ "--replace" "sourcetext" "replacementtext" ]` +stdenvNoCC.mkDerivation ({ + name = if args ? name then args.name else baseNameOf (toString args.src); + builder = ./substitute.sh; + inherit (args) src; + preferLocalBuild = true; + allowSubstitutes = false; +} // args // { replacements = args.replacements; }) diff --git a/nixpkgs/pkgs/build-support/substitute/substitute.sh b/nixpkgs/pkgs/build-support/substitute/substitute.sh new file mode 100644 index 000000000000..dbac275a80ed --- /dev/null +++ b/nixpkgs/pkgs/build-support/substitute/substitute.sh @@ -0,0 +1,18 @@ +source $stdenv/setup + +args= + +target=$out +if test -n "$dir"; then + target=$out/$dir/$name + mkdir -p $out/$dir +fi + +substitute $src $target $replacements + +if test -n "$isExecutable"; then + chmod +x $target +fi + +eval "$postInstall" + diff --git a/nixpkgs/pkgs/build-support/templaterpm/default.nix b/nixpkgs/pkgs/build-support/templaterpm/default.nix new file mode 100644 index 000000000000..56c543e8a930 --- /dev/null +++ b/nixpkgs/pkgs/build-support/templaterpm/default.nix @@ -0,0 +1,25 @@ +{lib, stdenv, makeWrapper, python, toposort, rpm}: + +stdenv.mkDerivation { + pname = "nix-template-rpm"; + version = "0.1"; + + nativeBuildInputs = [ makeWrapper ]; + buildInputs = [ python toposort rpm ]; + + dontUnpack = true; + + installPhase = '' + mkdir -p $out/bin + cp ${./nix-template-rpm.py} $out/bin/nix-template-rpm + wrapProgram $out/bin/nix-template-rpm \ + --set PYTHONPATH "${rpm}/lib/${python.libPrefix}/site-packages":"${toposort}/lib/${python.libPrefix}/site-packages" + ''; + + meta = with lib; { + description = "Create templates of nix expressions from RPM .spec files"; + maintainers = with maintainers; [ ]; + platforms = platforms.unix; + hydraPlatforms = []; + }; +} diff --git a/nixpkgs/pkgs/build-support/templaterpm/nix-template-rpm.py b/nixpkgs/pkgs/build-support/templaterpm/nix-template-rpm.py new file mode 100755 index 000000000000..db8c0f2064c2 --- /dev/null +++ b/nixpkgs/pkgs/build-support/templaterpm/nix-template-rpm.py @@ -0,0 +1,518 @@ +#!/bin/env python + +import sys +import os +import subprocess +import argparse +import re +import shutil +import rpm +import urlparse +import traceback +import toposort + + + + + +class SPECTemplate(object): + def __init__(self, specFilename, outputDir, inputDir=None, buildRootInclude=None, translateTable=None, repositoryDir=None, allPackagesDir=None, maintainer="MAINTAINER"): + rpm.addMacro("buildroot","$out") + rpm.addMacro("_libdir","lib") + rpm.addMacro("_libexecdir","libexec") + rpm.addMacro("_sbindir","sbin") + rpm.addMacro("_sysconfdir","etc") + rpm.addMacro("_topdir","SPACER_DIR_FOR_REMOVAL") + rpm.addMacro("_sourcedir","SOURCE_DIR_SPACER") + + self.packageGroups = [ "ocaml", "python" ] + + ts = rpm.TransactionSet() + + self.specFilename = specFilename + self.spec = ts.parseSpec(specFilename) + + self.inputDir = inputDir + self.buildRootInclude = buildRootInclude + self.repositoryDir = repositoryDir + self.allPackagesDir = allPackagesDir + self.maintainer = maintainer + + self.translateTable = translateTable + + self.facts = self.getFacts() + self.key = self.getSelfKey() + + tmpDir = os.path.join(outputDir, self.rewriteName(self.spec.sourceHeader['name'])) + if self.translateTable is not None: + self.relOutputDir = self.translateTable.path(self.key,tmpDir) + else: + self.relOutputDir = tmpDir + + self.final_output_dir = os.path.normpath( self.relOutputDir ) + + if self.repositoryDir is not None: + self.potential_repository_dir = os.path.normpath( os.path.join(self.repositoryDir,self.relOutputDir) ) + + + + def rewriteCommands(self, string): + string = string.replace('SPACER_DIR_FOR_REMOVAL/','') + string = string.replace('SPACER_DIR_FOR_REMOVAL','') + string = '\n'.join(map(lambda line: ' '.join(map(lambda x: x.replace('SOURCE_DIR_SPACER/',('${./' if (self.buildRootInclude is None) else '${buildRoot}/usr/share/buildroot/SOURCES/'))+('}' if (self.buildRootInclude is None) else '') if x.startswith('SOURCE_DIR_SPACER/') else x, line.split(' '))), string.split('\n'))) + string = string.replace('\n','\n ') + string = string.rstrip() + return string + + + def rewriteName(self, string): + parts = string.split('-') + parts = filter(lambda x: not x == "devel", parts) + parts = filter(lambda x: not x == "doc", parts) + if len(parts) > 1 and parts[0] in self.packageGroups: + return parts[0] + '-' + ''.join(parts[1:2] + map(lambda x: x.capitalize(), parts[2:])) + else: + return ''.join(parts[:1] + map(lambda x: x.capitalize(), parts[1:])) + + + def rewriteInputs(self,target,inputs): + camelcase = lambda l: l[:1] + map(lambda x: x.capitalize(), l[1:]) + filterDevel = lambda l: filter(lambda x: not x == "devel", l) + filterDoc = lambda l: filter(lambda x: not x == "doc", l) + rewrite = lambda l: ''.join(camelcase(filterDoc(filterDevel(l)))) + + def filterPackageGroup(target): + if target is None: + return [ rewrite(x.split('-')) for x in inputs if (not x.split('-')[0] in self.packageGroups) or (len(x.split('-')) == 1) ] + elif target in self.packageGroups: + return [ target + '_' + rewrite(x.split('-')[1:]) for x in inputs if (x.split('-')[0] == target) and (len(x.split('-')) > 1)] + else: + raise Exception("Unknown target") + return [] + + if target is None: + packages = filterPackageGroup(None) + packages.sort() + elif target in self.packageGroups: + packages = filterPackageGroup(target) + packages.sort() + elif target == "ALL": + packages = [] + for t in [None] + self.packageGroups: + tmp = filterPackageGroup(t) + tmp.sort() + packages += tmp + else: + raise Exception("Unknown target") + packages = [] + + return packages + + + def getBuildInputs(self,target=None): + inputs = self.rewriteInputs(target,self.spec.sourceHeader['requires']) + if self.translateTable is not None: + return map(lambda x: self.translateTable.name(x), inputs) + else: + return inputs + + def getSelfKey(self): + name = self.spec.sourceHeader['name'] + if len(name.split('-')) > 1 and name.split('-')[0] in self.packageGroups: + key = self.rewriteInputs(name.split('-')[0], [self.spec.sourceHeader['name']])[0] + else: + key = self.rewriteInputs(None, [self.spec.sourceHeader['name']])[0] + return key + + def getSelf(self): + if self.translateTable is not None: + return self.translateTable.name(self.key) + else: + return self.key + + + + + def copyPatches(self, input_dir, output_dir): + patches = [source for (source, _, flag) in self.spec.sources if flag==2] + for filename in patches: + shutil.copyfile(os.path.join(input_dir, filename), os.path.join(output_dir, filename)) + + + def copySources(self, input_dir, output_dir): + filenames = [source for (source, _, flag) in self.spec.sources if flag==1 if not urlparse.urlparse(source).scheme in ["http", "https"] ] + for filename in filenames: + shutil.copyfile(os.path.join(input_dir, filename), os.path.join(output_dir, filename)) + + + def getFacts(self): + facts = {} + facts["name"] = self.rewriteName(self.spec.sourceHeader['name']) + facts["version"] = self.spec.sourceHeader['version'] + + facts["url"] = [] + facts["sha256"] = [] + sources = [source for (source, _, flag) in self.spec.sources if flag==1 if urlparse.urlparse(source).scheme in ["http", "https"] ] + for url in sources: + p = subprocess.Popen(['nix-prefetch-url', url], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + output, err = p.communicate() + sha256 = output[:-1] #remove new line + facts["url"].append(url) + facts["sha256"].append(sha256) + + patches = [source for (source, _, flag) in self.spec.sources if flag==2] + if self.buildRootInclude is None: + facts["patches"] = map(lambda x: './'+x, patches) + else: + facts["patches"] = map(lambda x: '"${buildRoot}/usr/share/buildroot/SOURCES/'+x+'"', reversed(patches)) + + return facts + + + @property + def name(self): + out = ' name = "' + self.facts["name"] + '-' + self.facts["version"] + '";\n' + out += ' version = "' + self.facts['version'] + '";\n' + return out + + + @property + def src(self): + sources = [source for (source, _, flag) in self.spec.sources if flag==1 if urlparse.urlparse(source).scheme in ["http", "https"] ] + out = '' + for (url,sha256) in zip(self.facts['url'],self.facts['sha256']): + out += ' src = fetchurl {\n' + out += ' url = "' + url + '";\n' + out += ' sha256 = "' + sha256 + '";\n' + out += ' };\n' + return out + + + @property + def patch(self): + out = ' patches = [ ' + ' '.join(self.facts['patches']) + ' ];\n' + return out + + + @property + def buildInputs(self): + out = ' buildInputs = [ ' + out += ' '.join(self.getBuildInputs("ALL")) + out += ' ];\n' + return out + + + @property + def configure(self): + out = ' configurePhase = \'\'\n ' + self.rewriteCommands(self.spec.prep) + '\n \'\';\n'; + return out + + + @property + def build(self): + out = ' buildPhase = \'\'\n ' + self.rewriteCommands(self.spec.build) + '\n \'\';\n'; + return out + + + @property + def install(self): + out = ' installPhase = \'\'\n ' + self.rewriteCommands(self.spec.install) + '\n \'\';\n'; + return out + + @property + def ocamlExtra(self): + if "ocaml" in self.getBuildInputs("ALL"): + return ' createFindlibDestdir = true;\n' + else: + return '' + + + @property + def meta(self): + out = ' meta = with lib; {\n' + out += ' homepage = ' + self.spec.sourceHeader['url'] + ';\n' + out += ' description = "' + self.spec.sourceHeader['summary'] + '";\n' + out += ' license = lib.licenses.' + self.spec.sourceHeader['license'] + ';\n' + out += ' platforms = [ "i686-linux" "x86_64-linux" ];\n' + out += ' maintainers = with lib.maintainers; [ ' + self.maintainer + ' ];\n' + out += ' };\n' + out += '}\n' + return out + + + def __str__(self): + head = '{lib, stdenv, fetchurl, ' + ', '.join(self.getBuildInputs("ALL")) + '}:\n\n' + head += 'stdenv.mkDerivation {\n' + body = [ self.name, self.src, self.patch, self.buildInputs, self.configure, self.build, self.ocamlExtra, self.install, self.meta ] + return head + '\n'.join(body) + + + def getTemplate(self): + head = '{lib, stdenv, buildRoot, fetchurl, ' + ', '.join(self.getBuildInputs("ALL")) + '}:\n\n' + head += 'let\n' + head += ' buildRootInput = (import "${buildRoot}/usr/share/buildroot/buildRootInput.nix") { fetchurl=fetchurl; buildRoot=buildRoot; };\n' + head += 'in\n\n' + head += 'stdenv.mkDerivation {\n' + head += ' inherit (buildRootInput.'+self.rewriteName(self.spec.sourceHeader['name'])+') name version src;\n' + head += ' patches = buildRootInput.'+self.rewriteName(self.spec.sourceHeader['name'])+'.patches ++ [];\n\n' + body = [ self.buildInputs, self.configure, self.build, self.ocamlExtra, self.install, self.meta ] + return head + '\n'.join(body) + + + def getInclude(self): + head = self.rewriteName(self.spec.sourceHeader['name']) + ' = {\n' + body = [ self.name, self.src, self.patch ] + return head + '\n'.join(body) + '};\n' + + + def __cmp__(self,other): + if self.getSelf() in other.getBuildInputs("ALL"): + return 1 + else: + return -1 + + + def callPackage(self): + callPackage = ' ' + self.getSelf() + ' = callPackage ' + os.path.relpath(self.final_output_dir, self.allPackagesDir) + ' {' + newline = False; + for target in self.packageGroups: + tmp = self.getBuildInputs(target) + if len(tmp) > 0: + newline = True; + callPackage += '\n ' + 'inherit (' + target + 'Packages) ' + ' '.join(tmp) + ';' + if newline: + callPackage += '\n };' + else: + callPackage += ' };' + return callPackage + + + + def generateCombined(self): + if not os.path.exists(self.final_output_dir): + os.makedirs(self.final_output_dir) + + if self.inputDir is not None: + self.copySources(self.inputDir, self.final_output_dir) + self.copyPatches(self.inputDir, self.final_output_dir) + + nixfile = open(os.path.join(self.final_output_dir,'default.nix'), 'w') + nixfile.write(str(self)) + nixfile.close() + + shutil.copyfile(self.specFilename, os.path.join(self.final_output_dir, os.path.basename(self.specFilename))) + + + + def generateSplit(self): + if not os.path.exists(self.final_output_dir): + os.makedirs(self.final_output_dir) + + nixfile = open(os.path.join(self.final_output_dir,'default.nix'), 'w') + nixfile.write(self.getTemplate()) + nixfile.close() + + return self.getInclude() + + + + + + +class NixTemplate(object): + def __init__(self, nixfile): + self.nixfile = nixfile + self.original = { "name":None, "version":None, "url":None, "sha256":None, "patches":None } + self.update = { "name":None, "version":None, "url":None, "sha256":None, "patches":None } + self.matchedLines = {} + + if os.path.isfile(nixfile): + with file(nixfile, 'r') as infile: + for (n,line) in enumerate(infile): + name = re.match(r'^\s*name\s*=\s*"(.*?)"\s*;\s*$', line) + version = re.match(r'^\s*version\s*=\s*"(.*?)"\s*;\s*$', line) + url = re.match(r'^\s*url\s*=\s*"?(.*?)"?\s*;\s*$', line) + sha256 = re.match(r'^\s*sha256\s*=\s*"(.*?)"\s*;\s*$', line) + patches = re.match(r'^\s*patches\s*=\s*(\[.*?\])\s*;\s*$', line) + if name is not None and self.original["name"] is None: + self.original["name"] = name.group(1) + self.matchedLines[n] = "name" + if version is not None and self.original["version"] is None: + self.original["version"] = version.group(1) + self.matchedLines[n] = "version" + if url is not None and self.original["url"] is None: + self.original["url"] = url.group(1) + self.matchedLines[n] = "url" + if sha256 is not None and self.original["sha256"] is None: + self.original["sha256"] = sha256.group(1) + self.matchedLines[n] = "sha256" + if patches is not None and self.original["patches"] is None: + self.original["patches"] = patches.group(1) + self.matchedLines[n] = "patches" + + + def generateUpdated(self, nixOut): + nixTemplateFile = open(os.path.normpath(self.nixfile),'r') + nixOutFile = open(os.path.normpath(nixOut),'w') + for (n,line) in enumerate(nixTemplateFile): + if self.matchedLines.has_key(n) and self.update[self.matchedLines[n]] is not None: + nixOutFile.write(line.replace(self.original[self.matchedLines[n]], self.update[self.matchedLines[n]], 1)) + else: + nixOutFile.write(line) + nixTemplateFile.close() + nixOutFile.close() + + + def loadUpdate(self,orig): + if orig.has_key("name") and orig.has_key("version"): + self.update["name"] = orig["name"] + '-' + orig["version"] + self.update["version"] = orig["version"] + if orig.has_key("url") and orig.has_key("sha256") and len(orig["url"])>0: + self.update["url"] = orig["url"][0] + self.update["sha256"] = orig["sha256"][0] + for url in orig["url"][1:-1]: + sys.stderr.write("WARNING: URL has been dropped: %s\n" % url) + if orig.has_key("patches"): + self.update["patches"] = '[ ' + ' '.join(orig['patches']) + ' ]' + + +class TranslationTable(object): + def __init__(self): + self.tablePath = {} + self.tableName = {} + + def update(self, key, path, name=None): + self.tablePath[key] = path + if name is not None: + self.tableName[key] = name + + def readTable(self, tableFile): + with file(tableFile, 'r') as infile: + for line in infile: + match = re.match(r'^(.+?)\s+(.+?)\s+(.+?)\s*$', line) + if match is not None: + if not self.tablePath.has_key(match.group(1)): + self.tablePath[match.group(1)] = match.group(2) + if not self.tableName.has_key(match.group(1)): + self.tableName[match.group(1)] = match.group(3) + else: + match = re.match(r'^(.+?)\s+(.+?)\s*$', line) + if not self.tablePath.has_key(match.group(1)): + self.tablePath[match.group(1)] = match.group(2) + + def writeTable(self, tableFile): + outFile = open(os.path.normpath(tableFile),'w') + keys = self.tablePath.keys() + keys.sort() + for k in keys: + if self.tableName.has_key(k): + outFile.write( k + " " + self.tablePath[k] + " " + self.tableName[k] + "\n" ) + else: + outFile.write( k + " " + self.tablePath[k] + "\n" ) + outFile.close() + + def name(self, key): + if self.tableName.has_key(key): + return self.tableName[key] + else: + return key + + def path(self, key, orig): + if self.tablePath.has_key(key): + return self.tablePath[key] + else: + return orig + + + + + +if __name__ == "__main__": + #Parse command line options + parser = argparse.ArgumentParser(description="Generate .nix templates from RPM spec files") + parser.add_argument("specs", metavar="SPEC", nargs="+", help="spec file") + parser.add_argument("-o", "--output", metavar="OUT_DIR", required=True, help="output directory") + parser.add_argument("-b", "--buildRoot", metavar="BUILDROOT_DIR", default=None, help="buildroot output directory") + parser.add_argument("-i", "--inputSources", metavar="IN_DIR", default=None, help="sources input directory") + parser.add_argument("-m", "--maintainer", metavar="MAINTAINER", default="__NIX_MAINTAINER__", help="package maintainer") + parser.add_argument("-r", "--repository", metavar="REP_DIR", default=None, help="nix repository to compare output against") + parser.add_argument("-t", "--translate", metavar="TRANSLATE_TABLE", default=None, help="path of translation table for name and path") + parser.add_argument("-u", "--translateOut", metavar="TRANSLATE_OUT", default=None, help="output path for updated translation table") + parser.add_argument("-a", "--allPackages", metavar="ALL_PACKAGES", default=None, help="top level dir to call packages from") + args = parser.parse_args() + + allPackagesDir = os.path.normpath( os.path.dirname(args.allPackages) ) + if not os.path.exists(allPackagesDir): + os.makedirs(allPackagesDir) + + buildRootContent = {} + nameMap = {} + + newTable = TranslationTable() + if args.translate is not None: + table = TranslationTable() + table.readTable(args.translate) + newTable.readTable(args.translate) + else: + table = None + + for specPath in args.specs: + try: + sys.stderr.write("INFO: generate nix file from: %s\n" % specPath) + + spec = SPECTemplate(specPath, args.output, args.inputSources, args.buildRoot, table, args.repository, allPackagesDir, args.maintainer) + if args.repository is not None: + if os.path.exists(os.path.join(spec.potential_repository_dir,'default.nix')): + nixTemplate = NixTemplate(os.path.join(spec.potential_repository_dir,'default.nix')) + nixTemplate.loadUpdate(spec.facts) + if not os.path.exists(spec.final_output_dir): + os.makedirs(spec.final_output_dir) + nixTemplate.generateUpdated(os.path.join(spec.final_output_dir,'default.nix')) + else: + sys.stderr.write("WARNING: Repository does not contain template: %s\n" % os.path.join(spec.potential_repository_dir,'default.nix')) + if args.buildRoot is None: + spec.generateCombined() + else: + buildRootContent[spec.key] = spec.generateSplit() + else: + if args.buildRoot is None: + spec.generateCombined() + else: + buildRootContent[spec.key] = spec.generateSplit() + + newTable.update(spec.key,spec.relOutputDir,spec.getSelf()) + nameMap[spec.getSelf()] = spec + + except Exception, e: + sys.stderr.write("ERROR: %s failed with:\n%s\n%s\n" % (specPath,e.message,traceback.format_exc())) + + if args.translateOut is not None: + if not os.path.exists(os.path.dirname(os.path.normpath(args.translateOut))): + os.makedirs(os.path.dirname(os.path.normpath(args.translateOut))) + newTable.writeTable(args.translateOut) + + graph = {} + for k, v in nameMap.items(): + graph[k] = set(v.getBuildInputs("ALL")) + + sortedSpecs = toposort.toposort_flatten(graph) + sortedSpecs = filter( lambda x: x in nameMap.keys(), sortedSpecs) + + allPackagesFile = open(os.path.normpath( args.allPackages ), 'w') + allPackagesFile.write( '\n\n'.join(map(lambda x: x.callPackage(), map(lambda x: nameMap[x], sortedSpecs))) ) + allPackagesFile.close() + + if args.buildRoot is not None: + buildRootFilename = os.path.normpath( args.buildRoot ) + if not os.path.exists(os.path.dirname(buildRootFilename)): + os.makedirs(os.path.dirname(buildRootFilename)) + buildRootFile = open(buildRootFilename, 'w') + buildRootFile.write( "{ fetchurl, buildRoot }: {\n\n" ) + keys = buildRootContent.keys() + keys.sort() + for k in keys: + buildRootFile.write( buildRootContent[k] + '\n' ) + buildRootFile.write( "}\n" ) + buildRootFile.close() + + diff --git a/nixpkgs/pkgs/build-support/testers/default.nix b/nixpkgs/pkgs/build-support/testers/default.nix new file mode 100644 index 000000000000..fc10597e3e12 --- /dev/null +++ b/nixpkgs/pkgs/build-support/testers/default.nix @@ -0,0 +1,150 @@ +{ pkgs, buildPackages, lib, callPackage, runCommand, stdenv, substituteAll, testers }: +# Documentation is in doc/builders/testers.chapter.md +{ + # See https://nixos.org/manual/nixpkgs/unstable/#tester-testBuildFailure + # or doc/builders/testers.chapter.md + testBuildFailure = drv: drv.overrideAttrs (orig: { + builder = buildPackages.bash; + args = [ + (substituteAll { coreutils = buildPackages.coreutils; src = ./expect-failure.sh; }) + orig.realBuilder or stdenv.shell + ] ++ orig.args or ["-e" (orig.builder or ../../stdenv/generic/default-builder.sh)]; + }); + + # See https://nixos.org/manual/nixpkgs/unstable/#tester-testEqualDerivation + # or doc/builders/testers.chapter.md + testEqualDerivation = callPackage ./test-equal-derivation.nix { }; + + # See https://nixos.org/manual/nixpkgs/unstable/#tester-testEqualContents + # or doc/builders/testers.chapter.md + testEqualContents = { + assertion, + actual, + expected, + }: runCommand "equal-contents-${lib.strings.toLower assertion}" { + inherit assertion actual expected; + } '' + echo "Checking:" + echo "$assertion" + if ! diff -U5 -r "$actual" "$expected" --color=always + then + echo + echo 'Contents must be equal, but were not!' + echo + echo "+: expected, at $expected" + echo "-: unexpected, at $actual" + exit 1 + else + find "$expected" -type f -executable > expected-executables | sort + find "$actual" -type f -executable > actual-executables | sort + if ! diff -U0 actual-executables expected-executables --color=always + then + echo + echo "Contents must be equal, but some files' executable bits don't match" + echo + echo "+: make this file executable in the actual contents" + echo "-: make this file non-executable in the actual contents" + exit 1 + else + echo "expected $expected and actual $actual match." + echo 'OK' + touch $out + fi + fi + ''; + + # See https://nixos.org/manual/nixpkgs/unstable/#tester-testVersion + # or doc/builders/testers.chapter.md + testVersion = + { package, + command ? "${package.meta.mainProgram or package.pname or package.name} --version", + version ? package.version, + }: runCommand "${package.name}-test-version" { nativeBuildInputs = [ package ]; meta.timeout = 60; } '' + if output=$(${command} 2>&1); then + if grep -Fw -- "${version}" - <<< "$output"; then + touch $out + else + echo "Version string '${version}' not found!" >&2 + echo "The output was:" >&2 + echo "$output" >&2 + exit 1 + fi + else + echo -n ${lib.escapeShellArg command} >&2 + echo " returned a non-zero exit code." >&2 + echo "$output" >&2 + exit 1 + fi + ''; + + # See doc/builders/testers.chapter.md or + # https://nixos.org/manual/nixpkgs/unstable/#tester-invalidateFetcherByDrvHash + invalidateFetcherByDrvHash = f: args: + let + drvPath = (f args).drvPath; + # It's safe to discard the context, because we don't access the path. + salt = builtins.unsafeDiscardStringContext (lib.substring 0 12 (baseNameOf drvPath)); + # New derivation incorporating the original drv hash in the name + salted = f (args // { name = "${args.name or "source"}-salted-${salt}"; }); + # Make sure we did change the derivation. If the fetcher ignores `name`, + # `invalidateFetcherByDrvHash` doesn't work. + checked = + if salted.drvPath == drvPath + then throw "invalidateFetcherByDrvHash: Adding the derivation hash to the fixed-output derivation name had no effect. Make sure the fetcher's name argument ends up in the derivation name. Otherwise, the fetcher will not be re-run when its implementation changes. This is important for testing." + else salted; + in checked; + + # See doc/builders/testers.chapter.md or + # https://nixos.org/manual/nixpkgs/unstable/#tester-runNixOSTest + runNixOSTest = + let nixos = import ../../../nixos/lib { + inherit lib; + }; + in testModule: + nixos.runTest { + _file = "pkgs.runNixOSTest implementation"; + imports = [ + (lib.setDefaultModuleLocation "the argument that was passed to pkgs.runNixOSTest" testModule) + ]; + hostPkgs = pkgs; + node.pkgs = pkgs; + }; + + # See doc/builders/testers.chapter.md or + # https://nixos.org/manual/nixpkgs/unstable/#tester-invalidateFetcherByDrvHash + nixosTest = + let + /* The nixos/lib/testing-python.nix module, preapplied with arguments that + * make sense for this evaluation of Nixpkgs. + */ + nixosTesting = + (import ../../../nixos/lib/testing-python.nix { + inherit (stdenv.hostPlatform) system; + inherit pkgs; + extraConfigurations = [( + { lib, ... }: { + config.nixpkgs.pkgs = lib.mkDefault pkgs; + } + )]; + }); + in + test: + let + loadedTest = if builtins.typeOf test == "path" + then import test + else test; + calledTest = lib.toFunction loadedTest pkgs; + in + nixosTesting.simpleTest calledTest; + + hasPkgConfigModule = + { moduleName, ... }@args: + lib.warn "testers.hasPkgConfigModule has been deprecated in favor of testers.hasPkgConfigModules. It accepts a list of strings via the moduleNames argument instead of a single moduleName." ( + testers.hasPkgConfigModules (builtins.removeAttrs args [ "moduleName" ] // { + moduleNames = [ moduleName ]; + }) + ); + hasPkgConfigModules = callPackage ./hasPkgConfigModules/tester.nix { }; + + testMetaPkgConfig = callPackage ./testMetaPkgConfig/tester.nix { }; +} diff --git a/nixpkgs/pkgs/build-support/testers/expect-failure.sh b/nixpkgs/pkgs/build-support/testers/expect-failure.sh new file mode 100644 index 000000000000..9c725d48bf34 --- /dev/null +++ b/nixpkgs/pkgs/build-support/testers/expect-failure.sh @@ -0,0 +1,71 @@ +# Run a builder, flip exit code, save log and fix outputs +# +# Sub-goals: +# - Delegate to another original builder passed via args +# - Save the build log to output for further checks +# - Make the derivation succeed if the original builder fails +# - Make the derivation fail if the original builder returns exit code 0 +# +# Requirements: +# This runs before, without and after stdenv. Do not modify the environment; +# especially not before invoking the original builder. For example, use +# "@" substitutions instead of PATH. +# Do not export any variables. + +# Stricter bash +set -eu + +# ------------------------ +# Run the original builder + +echo "testBuildFailure: Expecting non-zero exit from builder and args: ${*@Q}" + +("$@" 2>&1) | @coreutils@/bin/tee $TMPDIR/testBuildFailure.log \ + | while IFS= read -r ln; do + echo "original builder: $ln" + done + +r=${PIPESTATUS[0]} +if [[ $r = 0 ]]; then + echo "testBuildFailure: The builder did not fail, but a failure was expected!" + exit 1 +fi +echo "testBuildFailure: Original builder produced exit code: $r" + +# ----------------------------------------- +# Write the build log to the default output +# +# # from stdenv setup.sh +getAllOutputNames() { + if [ -n "$__structuredAttrs" ]; then + echo "${!outputs[*]}" + else + echo "$outputs" + fi +} + +outs=( $(getAllOutputNames) ) +defOut=${outs[0]} +defOutPath=${!defOut} + +if [[ ! -d $defOutPath ]]; then + if [[ -e $defOutPath ]]; then + @coreutils@/bin/mv $defOutPath $TMPDIR/out-node + @coreutils@/bin/mkdir $defOutPath + @coreutils@/bin/mv $TMPDIR/out-node $defOutPath/result + fi +fi + +@coreutils@/bin/mkdir -p $defOutPath +@coreutils@/bin/mv $TMPDIR/testBuildFailure.log $defOutPath/testBuildFailure.log +echo $r >$defOutPath/testBuildFailure.exit + +# ------------------------------------------------------ +# Put empty directories in place for any missing outputs + +for outputName in ${outputs:-out}; do + outputPath="${!outputName}" + if [[ ! -e "${outputPath}" ]]; then + @coreutils@/bin/mkdir "${outputPath}"; + fi +done diff --git a/nixpkgs/pkgs/build-support/testers/hasPkgConfigModules/tester.nix b/nixpkgs/pkgs/build-support/testers/hasPkgConfigModules/tester.nix new file mode 100644 index 000000000000..755559038271 --- /dev/null +++ b/nixpkgs/pkgs/build-support/testers/hasPkgConfigModules/tester.nix @@ -0,0 +1,49 @@ +# Static arguments +{ lib, runCommand, pkg-config }: + +# Tester arguments +{ package, + moduleNames ? package.meta.pkgConfigModules, + testName ? "check-pkg-config-${lib.concatStringsSep "-" moduleNames}", +}: + +runCommand testName { + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ package ]; + inherit moduleNames; + meta = { + description = "Test whether ${package.name} exposes pkg-config modules ${lib.concatStringsSep ", " moduleNames}."; + } + # Make sure licensing info etc is preserved, as this is a concern for e.g. cache.nixos.org, + # as hydra can't check this meta info in dependencies. + # The test itself is just Nixpkgs, with MIT license. + // builtins.intersectAttrs + { + available = throw "unused"; + broken = throw "unused"; + insecure = throw "unused"; + license = throw "unused"; + maintainers = throw "unused"; + platforms = throw "unused"; + unfree = throw "unused"; + unsupported = throw "unused"; + } + package.meta; + } '' + for moduleName in $moduleNames; do + echo "checking pkg-config module $moduleName in $buildInputs" + set +e + version="$(pkg-config --modversion $moduleName)" + r=$? + set -e + if [[ $r = 0 ]]; then + echo "✅ pkg-config module $moduleName exists and has version $version" + printf '%s\t%s\n' "$moduleName" "$version" >> "$out" + else + echo "These modules were available in the input propagation closure:" + pkg-config --list-all + echo "❌ pkg-config module $moduleName was not found" + false + fi + done + '' diff --git a/nixpkgs/pkgs/build-support/testers/hasPkgConfigModules/tests.nix b/nixpkgs/pkgs/build-support/testers/hasPkgConfigModules/tests.nix new file mode 100644 index 000000000000..96569498fb15 --- /dev/null +++ b/nixpkgs/pkgs/build-support/testers/hasPkgConfigModules/tests.nix @@ -0,0 +1,49 @@ +# cd nixpkgs +# nix-build -A tests.testers.hasPkgConfigModule +{ lib, testers, zlib, openssl, runCommand }: + +lib.recurseIntoAttrs { + + zlib-has-zlib = testers.hasPkgConfigModules { + package = zlib; + moduleNames = [ "zlib" ]; + }; + + zlib-has-meta-pkgConfigModules = testers.hasPkgConfigModules { + package = zlib; + }; + + openssl-has-openssl = testers.hasPkgConfigModules { + package = openssl; + moduleNames = [ "openssl" ]; + }; + + openssl-has-all-meta-pkgConfigModules = testers.hasPkgConfigModules { + package = openssl; + }; + + zlib-does-not-have-ylib = runCommand "zlib-does-not-have-ylib" { + failed = testers.testBuildFailure ( + testers.hasPkgConfigModules { + package = zlib; + moduleNames = [ "ylib" ]; + } + ); + } '' + echo 'it logs a relevant error message' + { + grep -F "pkg-config module ylib was not found" $failed/testBuildFailure.log + } + + echo 'it logs which pkg-config modules are available, to be helpful' + { + # grep -v: the string zlib does also occur in a store path in an earlier message, which isn't particularly helpful + grep -v "checking pkg-config module" < $failed/testBuildFailure.log \ + | grep -F "zlib" + } + + # done + touch $out + ''; + +} diff --git a/nixpkgs/pkgs/build-support/testers/test-equal-derivation.nix b/nixpkgs/pkgs/build-support/testers/test-equal-derivation.nix new file mode 100644 index 000000000000..610d5f585576 --- /dev/null +++ b/nixpkgs/pkgs/build-support/testers/test-equal-derivation.nix @@ -0,0 +1,26 @@ +{ lib, runCommand, emptyFile, nix-diff }: + +assertion: a: b: +let + drvA = builtins.unsafeDiscardOutputDependency a.drvPath or (throw "testEqualDerivation second argument must be a package"); + drvB = builtins.unsafeDiscardOutputDependency b.drvPath or (throw "testEqualDerivation third argument must be a package"); + name = + if a?name + then "testEqualDerivation-${a.name}" + else "testEqualDerivation"; +in +if drvA == drvB then + emptyFile +else + runCommand name + { + inherit assertion drvA drvB; + nativeBuildInputs = [ nix-diff ]; + } '' + echo "$assertion" + echo "However, the derivations differ:" + echo + echo nix-diff $drvA $drvB + nix-diff $drvA $drvB + exit 1 + '' diff --git a/nixpkgs/pkgs/build-support/testers/test/README.md b/nixpkgs/pkgs/build-support/testers/test/README.md new file mode 100644 index 000000000000..2d6b4bdc43fe --- /dev/null +++ b/nixpkgs/pkgs/build-support/testers/test/README.md @@ -0,0 +1,8 @@ +# Tests _for the testers_ + + cd nixpkgs + nix-build -A tests.testers + +Tests generally derive their own correctness from simplicity, which in the +case of testers (themselves functions) does not always work out. +Hence the need for tests that test the testers. diff --git a/nixpkgs/pkgs/build-support/testers/test/default.nix b/nixpkgs/pkgs/build-support/testers/test/default.nix new file mode 100644 index 000000000000..c48c9f299ebf --- /dev/null +++ b/nixpkgs/pkgs/build-support/testers/test/default.nix @@ -0,0 +1,213 @@ +{ testers, lib, pkgs, hello, runCommand, ... }: +let + pkgs-with-overlay = pkgs.extend(final: prev: { + proof-of-overlay-hello = prev.hello; + }); + + dummyVersioning = { + revision = "test"; + versionSuffix = "test"; + label = "test"; + }; + +in +lib.recurseIntoAttrs { + hasPkgConfigModules = pkgs.callPackage ../hasPkgConfigModules/tests.nix { }; + + runNixOSTest-example = pkgs-with-overlay.testers.runNixOSTest ({ lib, ... }: { + name = "runNixOSTest-test"; + nodes.machine = { pkgs, ... }: { + system.nixos = dummyVersioning; + environment.systemPackages = [ pkgs.proof-of-overlay-hello pkgs.figlet ]; + }; + testScript = '' + machine.succeed("hello | figlet >/dev/console") + ''; + }); + + # Check that the wiring of nixosTest is correct. + # Correct operation of the NixOS test driver should be asserted elsewhere. + nixosTest-example = pkgs-with-overlay.testers.nixosTest ({ lib, pkgs, figlet, ... }: { + name = "nixosTest-test"; + nodes.machine = { pkgs, ... }: { + system.nixos = dummyVersioning; + environment.systemPackages = [ pkgs.proof-of-overlay-hello figlet ]; + }; + testScript = '' + machine.succeed("hello | figlet >/dev/console") + ''; + }); + + testBuildFailure = lib.recurseIntoAttrs { + happy = runCommand "testBuildFailure-happy" { + failed = testers.testBuildFailure (runCommand "fail" {} '' + echo ok-ish >$out + + echo failing though + echo also stderr 1>&2 + echo 'line\nwith-\bbackslashes' + printf "incomplete line - no newline" + + exit 3 + ''); + } '' + grep -F 'ok-ish' $failed/result + + grep -F 'failing though' $failed/testBuildFailure.log + grep -F 'also stderr' $failed/testBuildFailure.log + grep -F 'line\nwith-\bbackslashes' $failed/testBuildFailure.log + grep -F 'incomplete line - no newline' $failed/testBuildFailure.log + + [[ 3 = $(cat $failed/testBuildFailure.exit) ]] + + touch $out + ''; + + helloDoesNotFail = runCommand "testBuildFailure-helloDoesNotFail" { + failed = testers.testBuildFailure (testers.testBuildFailure hello); + + # Add hello itself as a prerequisite, so we don't try to run this test if + # there's an actual failure in hello. + inherit hello; + } '' + echo "Checking $failed/testBuildFailure.log" + grep -F 'testBuildFailure: The builder did not fail, but a failure was expected' $failed/testBuildFailure.log >/dev/null + [[ 1 = $(cat $failed/testBuildFailure.exit) ]] + touch $out + echo 'All good.' + ''; + + multiOutput = runCommand "testBuildFailure-multiOutput" { + failed = testers.testBuildFailure (runCommand "fail" { + # dev will be the default output + outputs = ["dev" "doc" "out"]; + } '' + echo i am failing + exit 1 + ''); + } '' + grep -F 'i am failing' $failed/testBuildFailure.log >/dev/null + [[ 1 = $(cat $failed/testBuildFailure.exit) ]] + + # Checking our note that dev is the default output + echo $failed/_ | grep -- '-dev/_' >/dev/null + echo 'All good.' + touch $out + ''; + }; + + testEqualContents = lib.recurseIntoAttrs { + happy = testers.testEqualContents { + assertion = "The same directory contents at different paths are recognized as equal"; + expected = runCommand "expected" {} '' + mkdir -p $out/c + echo a >$out/a + echo b >$out/b + echo d >$out/c/d + ''; + actual = runCommand "actual" {} '' + mkdir -p $out/c + echo a >$out/a + echo b >$out/b + echo d >$out/c/d + ''; + }; + + unequalExe = + runCommand "testEqualContents-unequalExe" { + log = testers.testBuildFailure (testers.testEqualContents { + assertion = "The same directory contents at different paths are recognized as equal"; + expected = runCommand "expected" {} '' + mkdir -p $out/c + echo a >$out/a + chmod a+x $out/a + echo b >$out/b + echo d >$out/c/d + ''; + actual = runCommand "actual" {} '' + mkdir -p $out/c + echo a >$out/a + echo b >$out/b + chmod a+x $out/b + echo d >$out/c/d + ''; + }); + } '' + ( + set -x + grep -F -- "executable bits don't match" $log/testBuildFailure.log + grep -E -- '+.*-actual/a' $log/testBuildFailure.log + grep -E -- '-.*-actual/b' $log/testBuildFailure.log + grep -F -- "--- actual-executables" $log/testBuildFailure.log + grep -F -- "+++ expected-executables" $log/testBuildFailure.log + ) || { + echo "Test failed: could not find pattern in build log $log" + exit 1 + } + echo 'All good.' + touch $out + ''; + + fileDiff = + runCommand "testEqualContents-fileDiff" { + log = testers.testBuildFailure (testers.testEqualContents { + assertion = "The same directory contents at different paths are recognized as equal"; + expected = runCommand "expected" {} '' + mkdir -p $out/c + echo a >$out/a + echo b >$out/b + echo d >$out/c/d + ''; + actual = runCommand "actual" {} '' + mkdir -p $out/c + echo a >$out/a + echo B >$out/b + echo d >$out/c/d + ''; + }); + } '' + ( + set -x + grep -F -- "Contents must be equal but were not" $log/testBuildFailure.log + grep -E -- '+++ .*-actual/b' $log/testBuildFailure.log + grep -E -- '--- .*-actual/b' $log/testBuildFailure.log + grep -F -- "-B" $log/testBuildFailure.log + grep -F -- "+b" $log/testBuildFailure.log + ) || { + echo "Test failed: could not find pattern in build log $log" + exit 1 + } + echo 'All good.' + touch $out + ''; + + fileMissing = + runCommand "testEqualContents-fileMissing" { + log = testers.testBuildFailure (testers.testEqualContents { + assertion = "The same directory contents at different paths are recognized as equal"; + expected = runCommand "expected" {} '' + mkdir -p $out/c + echo a >$out/a + echo b >$out/b + echo d >$out/c/d + ''; + actual = runCommand "actual" {} '' + mkdir -p $out/c + echo a >$out/a + echo d >$out/c/d + ''; + }); + } '' + ( + set -x + grep -F -- "Contents must be equal but were not" $log/testBuildFailure.log + grep -E -- 'Only in .*-expected: b' $log/testBuildFailure.log + ) || { + echo "Test failed: could not find pattern in build log $log" + exit 1 + } + echo 'All good.' + touch $out + ''; + }; +} diff --git a/nixpkgs/pkgs/build-support/testers/testMetaPkgConfig/tester.nix b/nixpkgs/pkgs/build-support/testers/testMetaPkgConfig/tester.nix new file mode 100644 index 000000000000..7892a29e4c28 --- /dev/null +++ b/nixpkgs/pkgs/build-support/testers/testMetaPkgConfig/tester.nix @@ -0,0 +1,12 @@ +{ lib, runCommand, testers }: + +package: + +runCommand "check-meta-pkg-config-modules-for-${package.name}" { + meta = { + description = "Test whether ${package.name} exposes all pkg-config modules ${toString package.meta.pkgConfigModules}"; + }; + dependsOn = testers.hasPkgConfigModules { inherit package; }; +} '' + echo "found all of ${toString package.meta.pkgConfigModules}" > "$out" +'' diff --git a/nixpkgs/pkgs/build-support/trivial-builders/default.nix b/nixpkgs/pkgs/build-support/trivial-builders/default.nix new file mode 100644 index 000000000000..f7adfad455bd --- /dev/null +++ b/nixpkgs/pkgs/build-support/trivial-builders/default.nix @@ -0,0 +1,940 @@ +{ lib, stdenv, stdenvNoCC, lndir, runtimeShell, shellcheck, haskell }: + +let + inherit (lib) + optionalAttrs + warn + ; +in + +rec { + + /* Run the shell command `buildCommand' to produce a store path named + `name'. The attributes in `env' are added to the environment + prior to running the command. By default `runCommand` runs in a + stdenv with no compiler environment. `runCommandCC` uses the default + stdenv, `pkgs.stdenv`. + + Example: + + + runCommand "name" {envVariable = true;} ''echo hello > $out'' + runCommandCC "name" {} ''gcc -o myfile myfile.c; cp myfile $out''; + + + The `*Local` variants force a derivation to be built locally, + it is not substituted. + + This is intended for very cheap commands (<1s execution time). + It saves on the network roundrip and can speed up a build. + + It is the same as adding the special fields + + `preferLocalBuild = true;` + `allowSubstitutes = false;` + + to a derivation’s attributes. + */ + runCommand = name: env: runCommandWith { + stdenv = stdenvNoCC; + runLocal = false; + inherit name; + derivationArgs = env; + }; + runCommandLocal = name: env: runCommandWith { + stdenv = stdenvNoCC; + runLocal = true; + inherit name; + derivationArgs = env; + }; + + runCommandCC = name: env: runCommandWith { + stdenv = stdenv; + runLocal = false; + inherit name; + derivationArgs = env; + }; + # `runCommandCCLocal` left out on purpose. + # We shouldn’t force the user to have a cc in scope. + + /* Generalized version of the `runCommand`-variants + which does customized behavior via a single + attribute set passed as the first argument + instead of having a lot of variants like + `runCommand*`. Additionally it allows changing + the used `stdenv` freely and has a more explicit + approach to changing the arguments passed to + `stdenv.mkDerivation`. + */ + runCommandWith = + let + # prevent infinite recursion for the default stdenv value + defaultStdenv = stdenv; + in + { + # which stdenv to use, defaults to a stdenv with a C compiler, pkgs.stdenv + stdenv ? defaultStdenv + # whether to build this derivation locally instead of substituting + , runLocal ? false + # extra arguments to pass to stdenv.mkDerivation + , derivationArgs ? {} + # name of the resulting derivation + , name + # TODO(@Artturin): enable strictDeps always + }: buildCommand: + stdenv.mkDerivation ({ + enableParallelBuilding = true; + inherit buildCommand name; + passAsFile = [ "buildCommand" ] + ++ (derivationArgs.passAsFile or []); + } + // lib.optionalAttrs (! derivationArgs?meta) { + pos = let args = builtins.attrNames derivationArgs; in + if builtins.length args > 0 + then builtins.unsafeGetAttrPos (builtins.head args) derivationArgs + else null; + } + // (lib.optionalAttrs runLocal { + preferLocalBuild = true; + allowSubstitutes = false; + }) + // builtins.removeAttrs derivationArgs [ "passAsFile" ]); + + + /* Writes a text file to the nix store. + The contents of text is added to the file in the store. + + Example: + + + # Writes my-file to /nix/store/<store path> + writeTextFile { + name = "my-file"; + text = '' + Contents of File + ''; + } + + + See also the `writeText` helper function below. + + + # Writes executable my-file to /nix/store/<store path>/bin/my-file + writeTextFile { + name = "my-file"; + text = '' + Contents of File + ''; + executable = true; + destination = "/bin/my-file"; + } + + + */ + writeTextFile = + { name # the name of the derivation + , text + , executable ? false # run chmod +x ? + , destination ? "" # relative path appended to $out eg "/bin/foo" + , checkPhase ? "" # syntax checks, e.g. for scripts + , meta ? { } + , allowSubstitutes ? false + , preferLocalBuild ? true + }: + let + matches = builtins.match "/bin/([^/]+)" destination; + in + runCommand name + { inherit text executable checkPhase allowSubstitutes preferLocalBuild; + passAsFile = [ "text" ]; + meta = lib.optionalAttrs (executable && matches != null) { + mainProgram = lib.head matches; + } // meta; + } + '' + target=$out${lib.escapeShellArg destination} + mkdir -p "$(dirname "$target")" + + if [ -e "$textPath" ]; then + mv "$textPath" "$target" + else + echo -n "$text" > "$target" + fi + + if [ -n "$executable" ]; then + chmod +x "$target" + fi + + eval "$checkPhase" + ''; + + /* + Writes a text file to nix store with no optional parameters available. + + Example: + + + # Writes contents of file to /nix/store/<store path> + writeText "my-file" + '' + Contents of File + ''; + + + */ + writeText = name: text: writeTextFile {inherit name text;}; + + /* + Writes a text file to nix store in a specific directory with no + optional parameters available. + + Example: + + + # Writes contents of file to /nix/store/<store path>/share/my-file + writeTextDir "share/my-file" + '' + Contents of File + ''; + + + */ + writeTextDir = path: text: writeTextFile { + inherit text; + name = builtins.baseNameOf path; + destination = "/${path}"; + }; + + /* + Writes a text file to /nix/store/<store path> and marks the file as + executable. + + If passed as a build input, will be used as a setup hook. This makes setup + hooks more efficient to create: you don't need a derivation that copies + them to $out/nix-support/setup-hook, instead you can use the file as is. + + Example: + + + # Writes my-file to /nix/store/<store path> and makes executable + writeScript "my-file" + '' + Contents of File + ''; + + + */ + writeScript = name: text: writeTextFile {inherit name text; executable = true;}; + + /* + Writes a text file to /nix/store/<store path>/bin/<name> and + marks the file as executable. + + Example: + + + + # Writes my-file to /nix/store/<store path>/bin/my-file and makes executable. + writeScriptBin "my-file" + '' + Contents of File + ''; + + + */ + writeScriptBin = name: text: writeTextFile { + inherit name text; + executable = true; + destination = "/bin/${name}"; + }; + + /* + Similar to writeScript. Writes a Shell script and checks its syntax. + Automatically includes interpreter above the contents passed. + + Example: + + + # Writes my-file to /nix/store/<store path> and makes executable. + writeShellScript "my-file" + '' + Contents of File + ''; + + + */ + writeShellScript = name: text: + writeTextFile { + inherit name; + executable = true; + text = '' + #!${runtimeShell} + ${text} + ''; + checkPhase = '' + ${stdenv.shellDryRun} "$target" + ''; + }; + + /* + Similar to writeShellScript and writeScriptBin. + Writes an executable Shell script to /nix/store/<store path>/bin/<name> and checks its syntax. + Automatically includes interpreter above the contents passed. + + Example: + + + # Writes my-file to /nix/store/<store path>/bin/my-file and makes executable. + writeShellScriptBin "my-file" + '' + Contents of File + ''; + + + */ + writeShellScriptBin = name : text : + writeTextFile { + inherit name; + executable = true; + destination = "/bin/${name}"; + text = '' + #!${runtimeShell} + ${text} + ''; + checkPhase = '' + ${stdenv.shellDryRun} "$target" + ''; + meta.mainProgram = name; + }; + + /* + Similar to writeShellScriptBin and writeScriptBin. + Writes an executable Shell script to /nix/store/<store path>/bin/<name> and + checks its syntax with shellcheck and the shell's -n option. + Individual checks can be foregone by putting them in the excludeShellChecks + list, e.g. [ "SC2016" ]. + Automatically includes sane set of shellopts (errexit, nounset, pipefail) + and handles creation of PATH based on runtimeInputs + + Note that the checkPhase uses stdenv.shell for the test run of the script, + while the generated shebang uses runtimeShell. If, for whatever reason, + those were to mismatch you might lose fidelity in the default checks. + + Example: + + Writes my-file to /nix/store/<store path>/bin/my-file and makes executable. + + + writeShellApplication { + name = "my-file"; + runtimeInputs = [ curl w3m ]; + text = '' + curl -s 'https://nixos.org' | w3m -dump -T text/html + ''; + } + + */ + writeShellApplication = + { name + , text + , runtimeInputs ? [ ] + , meta ? { } + , checkPhase ? null + , excludeShellChecks ? [ ] + }: + writeTextFile { + inherit name meta; + executable = true; + destination = "/bin/${name}"; + allowSubstitutes = true; + preferLocalBuild = false; + text = '' + #!${runtimeShell} + set -o errexit + set -o nounset + set -o pipefail + '' + lib.optionalString (runtimeInputs != [ ]) '' + + export PATH="${lib.makeBinPath runtimeInputs}:$PATH" + '' + '' + + ${text} + ''; + + checkPhase = + # GHC (=> shellcheck) isn't supported on some platforms (such as risc-v) + # but we still want to use writeShellApplication on those platforms + let + shellcheckSupported = lib.meta.availableOn stdenv.buildPlatform shellcheck.compiler; + excludeOption = lib.optionalString (excludeShellChecks != [ ]) "--exclude '${lib.concatStringsSep "," excludeShellChecks}'"; + shellcheckCommand = lib.optionalString shellcheckSupported '' + # use shellcheck which does not include docs + # pandoc takes long to build and documentation isn't needed for just running the cli + ${lib.getExe (haskell.lib.compose.justStaticExecutables shellcheck.unwrapped)} ${excludeOption} "$target" + ''; + in + if checkPhase == null then '' + runHook preCheck + ${stdenv.shellDryRun} "$target" + ${shellcheckCommand} + runHook postCheck + '' + else checkPhase; + }; + + # Create a C binary + writeCBin = pname: code: + runCommandCC pname + { + inherit pname code; + executable = true; + passAsFile = ["code"]; + # Pointless to do this on a remote machine. + preferLocalBuild = true; + allowSubstitutes = false; + meta = { + mainProgram = pname; + }; + } + '' + n=$out/bin/${pname} + mkdir -p "$(dirname "$n")" + mv "$codePath" code.c + $CC -x c code.c -o "$n" + ''; + + + /* concat a list of files to the nix store. + The contents of files are added to the file in the store. + + Example: + + + # Writes my-file to /nix/store/<store path> + concatTextFile { + name = "my-file"; + files = [ drv1 "${drv2}/path/to/file" ]; + } + + + See also the `concatText` helper function below. + + + # Writes executable my-file to /nix/store/<store path>/bin/my-file + concatTextFile { + name = "my-file"; + files = [ drv1 "${drv2}/path/to/file" ]; + executable = true; + destination = "/bin/my-file"; + } + + + */ + concatTextFile = + { name # the name of the derivation + , files + , executable ? false # run chmod +x ? + , destination ? "" # relative path appended to $out eg "/bin/foo" + , checkPhase ? "" # syntax checks, e.g. for scripts + , meta ? { } + }: + runCommandLocal name + { inherit files executable checkPhase meta destination; } + '' + file=$out$destination + mkdir -p "$(dirname "$file")" + cat $files > "$file" + + if [ -n "$executable" ]; then + chmod +x "$file" + fi + + eval "$checkPhase" + ''; + + + /* + Writes a text file to nix store with no optional parameters available. + + Example: + + + # Writes contents of files to /nix/store/<store path> + concatText "my-file" [ file1 file2 ] + + + */ + concatText = name: files: concatTextFile { inherit name files; }; + + /* + Writes a text file to nix store with and mark it as executable. + + Example: + # Writes contents of files to /nix/store/<store path> + concatScript "my-file" [ file1 file2 ] + + */ + concatScript = name: files: concatTextFile { inherit name files; executable = true; }; + + + /* + Create a forest of symlinks to the files in `paths'. + + This creates a single derivation that replicates the directory structure + of all the input paths. + + BEWARE: it may not "work right" when the passed paths contain symlinks to directories. + + Example: + + + # adds symlinks of hello to current build. + symlinkJoin { name = "myhello"; paths = [ pkgs.hello ]; } + + + + + # adds symlinks of hello and stack to current build and prints "links added" + symlinkJoin { name = "myexample"; paths = [ pkgs.hello pkgs.stack ]; postBuild = "echo links added"; } + + + This creates a derivation with a directory structure like the following: + + + /nix/store/sglsr5g079a5235hy29da3mq3hv8sjmm-myexample + |-- bin + | |-- hello -> /nix/store/qy93dp4a3rqyn2mz63fbxjg228hffwyw-hello-2.10/bin/hello + | `-- stack -> /nix/store/6lzdpxshx78281vy056lbk553ijsdr44-stack-2.1.3.1/bin/stack + `-- share + |-- bash-completion + | `-- completions + | `-- stack -> /nix/store/6lzdpxshx78281vy056lbk553ijsdr44-stack-2.1.3.1/share/bash-completion/completions/stack + |-- fish + | `-- vendor_completions.d + | `-- stack.fish -> /nix/store/6lzdpxshx78281vy056lbk553ijsdr44-stack-2.1.3.1/share/fish/vendor_completions.d/stack.fish + ... + + + symlinkJoin and linkFarm are similar functions, but they output + derivations with different structure. + + symlinkJoin is used to create a derivation with a familiar directory + structure (top-level bin/, share/, etc), but with all actual files being symlinks to + the files in the input derivations. + + symlinkJoin is used many places in nixpkgs to create a single derivation + that appears to contain binaries, libraries, documentation, etc from + multiple input derivations. + + linkFarm is instead used to create a simple derivation with symlinks to + other derivations. A derivation created with linkFarm is often used in CI + as a easy way to build multiple derivations at once. + */ + symlinkJoin = + args_@{ name + , paths + , preferLocalBuild ? true + , allowSubstitutes ? false + , postBuild ? "" + , ... + }: + let + args = removeAttrs args_ [ "name" "postBuild" ] + // { + inherit preferLocalBuild allowSubstitutes; + passAsFile = [ "paths" ]; + }; # pass the defaults + in runCommand name args + '' + mkdir -p $out + for i in $(cat $pathsPath); do + ${lndir}/bin/lndir -silent $i $out + done + ${postBuild} + ''; + + /* + Quickly create a set of symlinks to derivations. + + This creates a simple derivation with symlinks to all inputs. + + entries can be a list of attribute sets like + + [ { name = "name" ; path = "/nix/store/..."; } ] + + + or an attribute set name -> path like: + + { name = "/nix/store/..."; other = "/nix/store/..."; } + + + Example: + + # Symlinks hello and stack paths in store to current $out/hello-test and + # $out/foobar. + linkFarm "myexample" [ { name = "hello-test"; path = pkgs.hello; } { name = "foobar"; path = pkgs.stack; } ] + + This creates a derivation with a directory structure like the following: + + /nix/store/qc5728m4sa344mbks99r3q05mymwm4rw-myexample + |-- foobar -> /nix/store/6lzdpxshx78281vy056lbk553ijsdr44-stack-2.1.3.1 + `-- hello-test -> /nix/store/qy93dp4a3rqyn2mz63fbxjg228hffwyw-hello-2.10 + + + See the note on symlinkJoin for the difference between linkFarm and symlinkJoin. + */ + linkFarm = name: entries: + let + entries' = + if (lib.isAttrs entries) then entries + # We do this foldl to have last-wins semantics in case of repeated entries + else if (lib.isList entries) then lib.foldl (a: b: a // { "${b.name}" = b.path; }) { } entries + else throw "linkFarm entries must be either attrs or a list!"; + + linkCommands = lib.mapAttrsToList (name: path: '' + mkdir -p "$(dirname ${lib.escapeShellArg "${name}"})" + ln -s ${lib.escapeShellArg "${path}"} ${lib.escapeShellArg "${name}"} + '') entries'; + in + runCommand name { + preferLocalBuild = true; + allowSubstitutes = false; + passthru.entries = entries'; + } '' + mkdir -p $out + cd $out + ${lib.concatStrings linkCommands} + ''; + + /* + Easily create a linkFarm from a set of derivations. + + This calls linkFarm with a list of entries created from the list of input + derivations. It turns each input derivation into an attribute set + like { name = drv.name ; path = drv }, and passes this to linkFarm. + + Example: + + # Symlinks the hello, gcc, and ghc derivations in $out + linkFarmFromDrvs "myexample" [ pkgs.hello pkgs.gcc pkgs.ghc ] + + This creates a derivation with a directory structure like the following: + + + /nix/store/m3s6wkjy9c3wy830201bqsb91nk2yj8c-myexample + |-- gcc-wrapper-9.2.0 -> /nix/store/fqhjxf9ii4w4gqcsx59fyw2vvj91486a-gcc-wrapper-9.2.0 + |-- ghc-8.6.5 -> /nix/store/gnf3s07bglhbbk4y6m76sbh42siym0s6-ghc-8.6.5 + `-- hello-2.10 -> /nix/store/k0ll91c4npk4lg8lqhx00glg2m735g74-hello-2.10 + + */ + linkFarmFromDrvs = name: drvs: + let mkEntryFromDrv = drv: { name = drv.name; path = drv; }; + in linkFarm name (map mkEntryFromDrv drvs); + + + # docs in doc/builders/special/makesetuphook.section.md + makeSetupHook = + { name ? lib.warn "calling makeSetupHook without passing a name is deprecated." "hook" + , deps ? [ ] + # hooks go in nativeBuildInput so these will be nativeBuildInput + , propagatedBuildInputs ? [ ] + # these will be buildInputs + , depsTargetTargetPropagated ? [ ] + , meta ? { } + , passthru ? { } + , substitutions ? { } + }: + script: + runCommand name + (substitutions // { + # TODO(@Artturin:) substitutions should be inside the env attrset + # but users are likely passing non-substitution arguments through substitutions + # turn off __structuredAttrs to unbreak substituteAll + __structuredAttrs = false; + inherit meta; + inherit depsTargetTargetPropagated; + propagatedBuildInputs = + # remove list conditionals before 23.11 + lib.warnIf (!lib.isList deps) "'deps' argument to makeSetupHook must be a list. content of deps: ${toString deps}" + (lib.warnIf (deps != [ ]) "'deps' argument to makeSetupHook is deprecated and will be removed in release 23.11., Please use propagatedBuildInputs instead. content of deps: ${toString deps}" + propagatedBuildInputs ++ (if lib.isList deps then deps else [ deps ])); + strictDeps = true; + # TODO 2023-01, no backport: simplify to inherit passthru; + passthru = passthru + // optionalAttrs (substitutions?passthru) + (warn "makeSetupHook (name = ${lib.strings.escapeNixString name}): `substitutions.passthru` is deprecated. Please set `passthru` directly." + substitutions.passthru); + }) + ('' + mkdir -p $out/nix-support + cp ${script} $out/nix-support/setup-hook + recordPropagatedDependencies + '' + lib.optionalString (substitutions != {}) '' + substituteAll ${script} $out/nix-support/setup-hook + ''); + + + # Write the references (i.e. the runtime dependencies in the Nix store) of `path' to a file. + + writeReferencesToFile = path: runCommand "runtime-deps" + { + exportReferencesGraph = ["graph" path]; + } + '' + touch $out + while read path; do + echo $path >> $out + read dummy + read nrRefs + for ((i = 0; i < nrRefs; i++)); do read ref; done + done < graph + ''; + + /* + Write the set of references to a file, that is, their immediate dependencies. + + This produces the equivalent of `nix-store -q --references`. + */ + writeDirectReferencesToFile = path: runCommand "runtime-references" + { + exportReferencesGraph = ["graph" path]; + inherit path; + } + '' + touch ./references + while read p; do + read dummy + read nrRefs + if [[ $p == $path ]]; then + for ((i = 0; i < nrRefs; i++)); do + read ref; + echo $ref >>./references + done + else + for ((i = 0; i < nrRefs; i++)); do + read ref; + done + fi + done < graph + sort ./references >$out + ''; + + + /* + Extract a string's references to derivations and paths (its + context) and write them to a text file, removing the input string + itself from the dependency graph. This is useful when you want to + make a derivation depend on the string's references, but not its + contents (to avoid unnecessary rebuilds, for example). + + Note that this only works as intended on Nix >= 2.3. + */ + writeStringReferencesToFile = string: + /* + The basic operation this performs is to copy the string context + from `string' to a second string and wrap that string in a + derivation. However, that alone is not enough, since nothing in the + string refers to the output paths of the derivations/paths in its + context, meaning they'll be considered build-time dependencies and + removed from the wrapper derivation's closure. Putting the + necessary output paths in the new string is however not very + straightforward - the attrset returned by `getContext' contains + only references to derivations' .drv-paths, not their output + paths. In order to "convert" them, we try to extract the + corresponding paths from the original string using regex. + */ + let + # Taken from https://github.com/NixOS/nix/blob/130284b8508dad3c70e8160b15f3d62042fc730a/src/libutil/hash.cc#L84 + nixHashChars = "0123456789abcdfghijklmnpqrsvwxyz"; + context = builtins.getContext string; + derivations = lib.filterAttrs (n: v: v ? outputs) context; + # Objects copied from outside of the store, such as paths and + # `builtins.fetch*`ed ones + sources = lib.attrNames (lib.filterAttrs (n: v: v ? path) context); + packages = + lib.mapAttrs' + (name: value: + { + inherit value; + name = lib.head (builtins.match "${builtins.storeDir}/[${nixHashChars}]+-(.*)\.drv" name); + }) + derivations; + # The syntax of output paths differs between outputs named `out` + # and other, explicitly named ones. For explicitly named ones, + # the output name is suffixed as `-name`, but `out` outputs + # aren't suffixed at all, and thus aren't easily distinguished + # from named output paths. Therefore, we find all the named ones + # first so we can use them to remove false matches when looking + # for `out` outputs (see the definition of `outputPaths`). + namedOutputPaths = + lib.flatten + (lib.mapAttrsToList + (name: value: + (map + (output: + lib.filter + lib.isList + (builtins.split "(${builtins.storeDir}/[${nixHashChars}]+-${name}-${output})" string)) + (lib.remove "out" value.outputs))) + packages); + # Only `out` outputs + outputPaths = + lib.flatten + (lib.mapAttrsToList + (name: value: + if lib.elem "out" value.outputs then + lib.filter + (x: lib.isList x && + # If the matched path is in `namedOutputPaths`, + # it's a partial match of an output path where + # the output name isn't `out` + lib.all (o: !lib.hasPrefix (lib.head x) o) namedOutputPaths) + (builtins.split "(${builtins.storeDir}/[${nixHashChars}]+-${name})" string) + else + []) + packages); + allPaths = lib.concatStringsSep "\n" (lib.unique (sources ++ namedOutputPaths ++ outputPaths)); + allPathsWithContext = builtins.appendContext allPaths context; + in + if builtins ? getContext then + writeText "string-references" allPathsWithContext + else + writeDirectReferencesToFile (writeText "string-file" string); + + + /* Print an error message if the file with the specified name and + hash doesn't exist in the Nix store. This function should only + be used by non-redistributable software with an unfree license + that we need to require the user to download manually. It produces + packages that cannot be built automatically. + + Example: + + requireFile { + name = "my-file"; + url = "http://example.com/download/"; + sha256 = "ffffffffffffffffffffffffffffffffffffffffffffffffffff"; + } + + */ + requireFile = { name ? null + , sha256 ? null + , sha1 ? null + , hash ? null + , url ? null + , message ? null + , hashMode ? "flat" + } : + assert (message != null) || (url != null); + assert (sha256 != null) || (sha1 != null) || (hash != null); + assert (name != null) || (url != null); + let msg = + if message != null then message + else '' + Unfortunately, we cannot download file ${name_} automatically. + Please go to ${url} to download it yourself, and add it to the Nix store + using either + nix-store --add-fixed ${hashAlgo} ${name_} + or + nix-prefetch-url --type ${hashAlgo} file:///path/to/${name_} + ''; + hashAlgo = if hash != null then (builtins.head (lib.strings.splitString "-" hash)) + else if sha256 != null then "sha256" + else "sha1"; + hashAlgo_ = if hash != null then "" else hashAlgo; + hash_ = if hash != null then hash + else if sha256 != null then sha256 + else sha1; + name_ = if name == null then baseNameOf (toString url) else name; + in + stdenvNoCC.mkDerivation { + name = name_; + outputHashMode = hashMode; + outputHashAlgo = hashAlgo_; + outputHash = hash_; + preferLocalBuild = true; + allowSubstitutes = false; + builder = writeScript "restrict-message" '' + source ${stdenvNoCC}/setup + cat <<_EOF_ + + *** + ${msg} + *** + + _EOF_ + exit 1 + ''; + }; + + + /* + Copy a path to the Nix store. + Nix automatically copies files to the store before stringifying paths. + If you need the store path of a file, ${copyPathToStore <path>} can be + shortened to ${<path>}. + */ + copyPathToStore = builtins.filterSource (p: t: true); + + + /* + Copy a list of paths to the Nix store. + */ + copyPathsToStore = builtins.map copyPathToStore; + + /* Applies a list of patches to a source directory. + + Example: + + # Patching nixpkgs: + + applyPatches { + src = pkgs.path; + patches = [ + (pkgs.fetchpatch { + url = "https://github.com/NixOS/nixpkgs/commit/1f770d20550a413e508e081ddc08464e9d08ba3d.patch"; + sha256 = "1nlzx171y3r3jbk0qhvnl711kmdk57jlq4na8f8bs8wz2pbffymr"; + }) + ]; + } + + */ + applyPatches = + { src + , name ? (if builtins.typeOf src == "path" + then builtins.baseNameOf src + else + if builtins.isAttrs src && builtins.hasAttr "name" src + then src.name + else throw "applyPatches: please supply a `name` argument because a default name can only be computed when the `src` is a path or is an attribute set with a `name` attribute." + ) + "-patched" + , patches ? [] + , postPatch ? "" + , ... + }@args: stdenvNoCC.mkDerivation { + inherit name src patches postPatch; + preferLocalBuild = true; + allowSubstitutes = false; + phases = "unpackPhase patchPhase installPhase"; + installPhase = "cp -R ./ $out"; + } + # Carry `meta` information from the underlying `src` if present. + // (optionalAttrs (src?meta) { inherit (src) meta; }) + // (removeAttrs args [ "src" "name" "patches" "postPatch" ]); + + /* An immutable file in the store with a length of 0 bytes. */ + emptyFile = runCommand "empty-file" { + outputHashAlgo = "sha256"; + outputHashMode = "recursive"; + outputHash = "0ip26j2h11n1kgkz36rl4akv694yz65hr72q4kv4b3lxcbi65b3p"; + preferLocalBuild = true; + } "touch $out"; + + /* An immutable empty directory in the store. */ + emptyDirectory = runCommand "empty-directory" { + outputHashAlgo = "sha256"; + outputHashMode = "recursive"; + outputHash = "0sjjj9z1dhilhpc8pq4154czrb79z9cm044jvn75kxcjv6v5l2m5"; + preferLocalBuild = true; + } "mkdir $out"; +} diff --git a/nixpkgs/pkgs/build-support/trivial-builders/test-overriding.nix b/nixpkgs/pkgs/build-support/trivial-builders/test-overriding.nix new file mode 100644 index 000000000000..a16bbbee1b1b --- /dev/null +++ b/nixpkgs/pkgs/build-support/trivial-builders/test-overriding.nix @@ -0,0 +1,110 @@ +# Check that overriding works for trivial-builders like +# `writeShellScript` via `overrideAttrs`. This is useful +# to override the `checkPhase`, e. g. if you want +# to disable extglob in `writeShellScript`. +# +# Run using `nix-build -A tests.trivial-builders.overriding`. +{ lib +, stdenv +, runtimeShell +, runCommand +, callPackage +, writeShellScript +, writeTextFile +, writeShellScriptBin +}: + +let + extglobScript = '' + shopt -s extglob + touch success + echo @(success|failure) + rm success + ''; + + simpleCase = case: + writeShellScript "test-trivial-overriding-${case}" extglobScript; + + callPackageCase = case: callPackage ( + { writeShellScript }: + writeShellScript "test-trivial-callpackage-overriding-${case}" extglobScript + ) { }; + + binCase = case: + writeShellScriptBin "test-trivial-overriding-bin-${case}" extglobScript; + + # building this derivation would fail without overriding + textFileCase = writeTextFile { + name = "test-trivial-overriding-text-file"; + checkPhase = "false"; + text = '' + #!${runtimeShell} + echo success + ''; + executable = true; + }; + + disallowExtglob = x: x.overrideAttrs (_: { + checkPhase = '' + ${stdenv.shell} -n "$target" + ''; + }); + + # Run old checkPhase, but only succeed if it fails. + # This HACK is required because we can't introspect build failures + # in nix: With `assertFail` we want to make sure that the default + # `checkPhase` would fail if extglob was used in the script. + assertFail = x: x.overrideAttrs (old: { + checkPhase = '' + if + ${old.checkPhase} + then exit 1; fi + ''; + }); + + mkCase = case: outcome: isBin: + let + drv = lib.pipe outcome ([ case ] ++ lib.optionals (outcome == "fail") [ disallowExtglob assertFail ]); + in if isBin then "${drv}/bin/${drv.name}" else drv; + + writeTextOverrides = { + # Make sure extglob works by default + simpleSucc = mkCase simpleCase "succ" false; + # Ensure it's possible to fail; in this case extglob is not enabled + simpleFail = mkCase simpleCase "fail" false; + # Do the same checks after wrapping with callPackage + # to make sure callPackage doesn't mess with the override + callpSucc = mkCase callPackageCase "succ" false; + callpFail = mkCase callPackageCase "fail" false; + # Do the same check using `writeShellScriptBin` + binSucc = mkCase binCase "succ" true; + binFail = mkCase binCase "fail" true; + # Check that we can also override plain writeTextFile + textFileSuccess = textFileCase.overrideAttrs (_: { + checkPhase = "true"; + }); + }; + + # `runTest` forces nix to build the script of our test case and + # run its `checkPhase` which is our main interest. Additionally + # it executes the script and thus makes sure that extglob also + # works at run time. + runTest = script: + let + name = script.name or (builtins.baseNameOf script); + in writeShellScript "run-${name}" '' + if [ "$(${script})" != "success" ]; then + echo "Failed in ${name}" + exit 1 + fi + ''; +in + +runCommand "test-writeShellScript-overriding" { + passthru = { inherit writeTextOverrides; }; +} '' + ${lib.concatMapStrings (test: '' + ${runTest test} + '') (lib.attrValues writeTextOverrides)} + touch "$out" +'' diff --git a/nixpkgs/pkgs/build-support/trivial-builders/test/concat-test.nix b/nixpkgs/pkgs/build-support/trivial-builders/test/concat-test.nix new file mode 100644 index 000000000000..5ce435619069 --- /dev/null +++ b/nixpkgs/pkgs/build-support/trivial-builders/test/concat-test.nix @@ -0,0 +1,12 @@ +{ callPackage, lib, pkgs, runCommand, concatText, writeText, hello, emptyFile }: +let + stri = writeText "pathToTest"; + txt1 = stri "abc"; + txt2 = stri hello; + res = concatText "textToTest" [ txt1 txt2 ]; +in +runCommand "test-concatPaths" { } '' + diff -U3 <(cat ${txt1} ${txt2}) ${res} + diff -U3 ${concatText "void" []} ${emptyFile} + touch $out +'' diff --git a/nixpkgs/pkgs/build-support/trivial-builders/test/default.nix b/nixpkgs/pkgs/build-support/trivial-builders/test/default.nix new file mode 100644 index 000000000000..cbd1b388ef66 --- /dev/null +++ b/nixpkgs/pkgs/build-support/trivial-builders/test/default.nix @@ -0,0 +1,34 @@ +/* + Run all tests with: + + cd nixpkgs + nix-build -A tests.trivial-builders + + or run a specific test with: + + cd nixpkgs + nix-build -A tests.trivial-builders.foo + +*/ + +{ callPackage, lib, stdenv }: +let + inherit (lib) recurseIntoAttrs; +in +recurseIntoAttrs { + concat = callPackage ./concat-test.nix {}; + linkFarm = callPackage ./link-farm.nix {}; + overriding = callPackage ../test-overriding.nix {}; + references = + # VM test not supported beyond linux yet + if stdenv.hostPlatform.isLinux + then callPackage ./references.nix {} + else null; + writeCBin = callPackage ./writeCBin.nix {}; + writeShellApplication = callPackage ./writeShellApplication.nix {}; + writeScriptBin = callPackage ./writeScriptBin.nix {}; + writeShellScript = callPackage ./write-shell-script.nix {}; + writeShellScriptBin = callPackage ./writeShellScriptBin.nix {}; + writeStringReferencesToFile = callPackage ./writeStringReferencesToFile.nix {}; + writeTextFile = callPackage ./write-text-file.nix {}; +} diff --git a/nixpkgs/pkgs/build-support/trivial-builders/test/invoke-writeDirectReferencesToFile.nix b/nixpkgs/pkgs/build-support/trivial-builders/test/invoke-writeDirectReferencesToFile.nix new file mode 100644 index 000000000000..ead3f7a2f571 --- /dev/null +++ b/nixpkgs/pkgs/build-support/trivial-builders/test/invoke-writeDirectReferencesToFile.nix @@ -0,0 +1,4 @@ +{ pkgs ? import ../../../.. { config = {}; overlays = []; } }: +pkgs.lib.mapAttrs + (k: v: pkgs.writeDirectReferencesToFile v) + (import ./sample.nix { inherit pkgs; }) diff --git a/nixpkgs/pkgs/build-support/trivial-builders/test/invoke-writeReferencesToFile.nix b/nixpkgs/pkgs/build-support/trivial-builders/test/invoke-writeReferencesToFile.nix new file mode 100644 index 000000000000..99c6c2f7dcc4 --- /dev/null +++ b/nixpkgs/pkgs/build-support/trivial-builders/test/invoke-writeReferencesToFile.nix @@ -0,0 +1,4 @@ +{ pkgs ? import ../../../.. { config = {}; overlays = []; } }: +pkgs.lib.mapAttrs + (k: v: pkgs.writeReferencesToFile v) + (import ./sample.nix { inherit pkgs; }) diff --git a/nixpkgs/pkgs/build-support/trivial-builders/test/link-farm.nix b/nixpkgs/pkgs/build-support/trivial-builders/test/link-farm.nix new file mode 100644 index 000000000000..1ebfc707632f --- /dev/null +++ b/nixpkgs/pkgs/build-support/trivial-builders/test/link-farm.nix @@ -0,0 +1,45 @@ +{ linkFarm, hello, writeTextFile, runCommand }: +let + foo = writeTextFile { + name = "foo"; + text = "foo"; + }; + + linkFarmFromList = linkFarm "linkFarmFromList" [ + { name = "foo"; path = foo; } + { name = "hello"; path = hello; } + ]; + + linkFarmWithRepeats = linkFarm "linkFarmWithRepeats" [ + { name = "foo"; path = foo; } + { name = "hello"; path = hello; } + { name = "foo"; path = hello; } + ]; + + linkFarmFromAttrs = linkFarm "linkFarmFromAttrs" { + inherit foo hello; + }; +in +runCommand "test-linkFarm" { } '' + function assertPathEquals() { + local a b; + a="$(realpath "$1")" + b="$(realpath "$2")" + if [ "$a" != "$b" ]; then + echo "path mismatch!" + echo "a: $1 -> $a" + echo "b: $2 -> $b" + exit 1 + fi + } + + assertPathEquals "${linkFarmFromList}/foo" "${foo}" + assertPathEquals "${linkFarmFromList}/hello" "${hello}" + + assertPathEquals "${linkFarmWithRepeats}/foo" "${hello}" + assertPathEquals "${linkFarmWithRepeats}/hello" "${hello}" + + assertPathEquals "${linkFarmFromAttrs}/foo" "${foo}" + assertPathEquals "${linkFarmFromAttrs}/hello" "${hello}" + touch $out +'' diff --git a/nixpkgs/pkgs/build-support/trivial-builders/test/references-test.sh b/nixpkgs/pkgs/build-support/trivial-builders/test/references-test.sh new file mode 100755 index 000000000000..473ca6e10769 --- /dev/null +++ b/nixpkgs/pkgs/build-support/trivial-builders/test/references-test.sh @@ -0,0 +1,62 @@ +#!/usr/bin/env bash + +# -------------------------------------------------------------------------- # +# +# trivial-builders test +# +# -------------------------------------------------------------------------- # +# +# This file can be run independently (quick): +# +# $ pkgs/build-support/trivial-builders/references-test.sh +# +# or in the build sandbox with a ~20s VM overhead +# +# $ nix-build -A tests.trivial-builders.references +# +# -------------------------------------------------------------------------- # + +# strict bash +set -euo pipefail + +# debug +# set -x +# PS4='+(${BASH_SOURCE}:${LINENO}): ${FUNCNAME[0]:+${FUNCNAME[0]}(): }' + +cd "$(dirname ${BASH_SOURCE[0]})" # nixpkgs root + +if [[ -z ${SAMPLE:-} ]]; then + echo "Running the script directly is currently not supported." + echo "If you need to iterate, remove the raw path, which is not returned by nix-build." + exit 1 +# sample=( `nix-build --no-out-link sample.nix` ) +# directRefs=( `nix-build --no-out-link invoke-writeDirectReferencesToFile.nix` ) +# references=( `nix-build --no-out-link invoke-writeReferencesToFile.nix` ) +# echo "sample: ${#sample[@]}" +# echo "direct: ${#directRefs[@]}" +# echo "indirect: ${#references[@]}" +else + # Injected by Nix (to avoid evaluating in a derivation) + # turn them into arrays + sample=($SAMPLE) + directRefs=($DIRECT_REFS) + references=($REFERENCES) +fi + +echo >&2 Testing direct references... +for i in "${!sample[@]}"; do + echo >&2 Checking '#'$i ${sample[$i]} ${directRefs[$i]} + diff -U3 \ + <(sort <${directRefs[$i]}) \ + <(nix-store -q --references ${sample[$i]} | sort) +done + +echo >&2 Testing closure... +for i in "${!sample[@]}"; do + echo >&2 Checking '#'$i ${sample[$i]} ${references[$i]} + diff -U3 \ + <(sort <${references[$i]}) \ + <(nix-store -q --requisites ${sample[$i]} | sort) +done + +echo 'OK!' diff --git a/nixpkgs/pkgs/build-support/trivial-builders/test/references.nix b/nixpkgs/pkgs/build-support/trivial-builders/test/references.nix new file mode 100644 index 000000000000..7c8ea83f3c8b --- /dev/null +++ b/nixpkgs/pkgs/build-support/trivial-builders/test/references.nix @@ -0,0 +1,52 @@ +{ lib, testers, pkgs, writeText, hello, figlet, stdenvNoCC }: + +# -------------------------------------------------------------------------- # +# +# trivial-builders test +# +# -------------------------------------------------------------------------- # +# +# This file can be run independently (quick): +# +# $ pkgs/build-support/trivial-builders/references-test.sh +# +# or in the build sandbox with a ~20s VM overhead +# +# $ nix-build -A tests.trivial-builders.references +# +# -------------------------------------------------------------------------- # + +let + invokeSamples = file: + lib.concatStringsSep " " ( + lib.attrValues (import file { inherit pkgs; }) + ); +in +testers.nixosTest { + name = "nixpkgs-trivial-builders"; + nodes.machine = { ... }: { + virtualisation.writableStore = true; + + # Test runs without network, so we don't substitute and prepare our deps + nix.settings.substituters = lib.mkForce []; + environment.etc."pre-built-paths".source = writeText "pre-built-paths" ( + builtins.toJSON [hello figlet stdenvNoCC] + ); + environment.variables = { + SAMPLE = invokeSamples ./sample.nix; + REFERENCES = invokeSamples ./invoke-writeReferencesToFile.nix; + DIRECT_REFS = invokeSamples ./invoke-writeDirectReferencesToFile.nix; + }; + }; + testScript = + '' + machine.succeed(""" + ${./references-test.sh} 2>/dev/console + """) + ''; + meta = { + maintainers = with lib.maintainers; [ + roberth + ]; + }; +} diff --git a/nixpkgs/pkgs/build-support/trivial-builders/test/sample.nix b/nixpkgs/pkgs/build-support/trivial-builders/test/sample.nix new file mode 100644 index 000000000000..a4eedce8417e --- /dev/null +++ b/nixpkgs/pkgs/build-support/trivial-builders/test/sample.nix @@ -0,0 +1,29 @@ +{ pkgs ? import ../../../.. { config = { }; overlays = [ ]; } }: +let + inherit (pkgs) + figlet + zlib + hello + writeText + runCommand + ; +in +{ + hello = hello; + figlet = figlet; + zlib = zlib; + zlib-dev = zlib.dev; + norefs = writeText "hi" "hello"; + norefsDup = writeText "hi" "hello"; + helloRef = writeText "hi" "hello ${hello}"; + helloRefDup = writeText "hi" "hello ${hello}"; + path = ./invoke-writeReferencesToFile.nix; + pathLike.outPath = ./invoke-writeReferencesToFile.nix; + helloFigletRef = writeText "hi" "hello ${hello} ${figlet}"; + selfRef = runCommand "self-ref-1" {} "echo $out >$out"; + selfRef2 = runCommand "self-ref-2" {} ''echo "${figlet}, $out" >$out''; + inherit (pkgs) + emptyFile + emptyDirectory + ; +} diff --git a/nixpkgs/pkgs/build-support/trivial-builders/test/write-shell-script.nix b/nixpkgs/pkgs/build-support/trivial-builders/test/write-shell-script.nix new file mode 100644 index 000000000000..a5c9f1fae42f --- /dev/null +++ b/nixpkgs/pkgs/build-support/trivial-builders/test/write-shell-script.nix @@ -0,0 +1,14 @@ +{ lib, writeShellScript }: let + output = "hello"; +in (writeShellScript "test-script" '' + echo ${lib.escapeShellArg output} +'').overrideAttrs (old: { + checkPhase = old.checkPhase or "" + '' + expected=${lib.escapeShellArg output} + got=$("$target") + if [[ "$got" != "$expected" ]]; then + echo "wrong output: expected $expected, got $got" + exit 1 + fi + ''; +}) diff --git a/nixpkgs/pkgs/build-support/trivial-builders/test/write-text-file.nix b/nixpkgs/pkgs/build-support/trivial-builders/test/write-text-file.nix new file mode 100644 index 000000000000..2e6685c1980b --- /dev/null +++ b/nixpkgs/pkgs/build-support/trivial-builders/test/write-text-file.nix @@ -0,0 +1,71 @@ +/* + To run: + + cd nixpkgs + nix-build -A tests.trivial-builders.writeTextFile + + or to run an individual test case + + cd nixpkgs + nix-build -A tests.trivial-builders.writeTextFile.foo +*/ +{ lib, runCommand, runtimeShell, writeTextFile }: +let + veryWeirdName = ''here's a name with some "bad" characters, like spaces and quotes''; +in +lib.recurseIntoAttrs { + + different-exe-name = + let + pkg = writeTextFile { + name = "bar"; + destination = "/bin/foo"; + executable = true; + text = '' + #!${runtimeShell} + echo hi + ''; + }; + in + assert pkg.meta.mainProgram == "foo"; + assert baseNameOf (lib.getExe pkg) == "foo"; + assert pkg.name == "bar"; + runCommand "test-writeTextFile-different-exe-name" {} '' + PATH="${lib.makeBinPath [ pkg ]}:$PATH" + x=$(foo) + [[ "$x" == hi ]] + touch $out + ''; + + weird-name = writeTextFile { + name = "weird-names"; + destination = "/etc/${veryWeirdName}"; + text = ''passed!''; + checkPhase = '' + # intentionally hardcode everything here, to make sure + # Nix does not mess with file paths + + name="here's a name with some \"bad\" characters, like spaces and quotes" + fullPath="$out/etc/$name" + + if [ -f "$fullPath" ]; then + echo "[PASS] File exists!" + else + echo "[FAIL] File was not created at expected path!" + exit 1 + fi + + content=$(<"$fullPath") + expected="passed!" + + if [ "$content" = "$expected" ]; then + echo "[PASS] Contents match!" + else + echo "[FAIL] File contents don't match!" + echo " Expected: $expected" + echo " Got: $content" + exit 2 + fi + ''; + }; +} diff --git a/nixpkgs/pkgs/build-support/trivial-builders/test/writeCBin.nix b/nixpkgs/pkgs/build-support/trivial-builders/test/writeCBin.nix new file mode 100644 index 000000000000..56cab45b3801 --- /dev/null +++ b/nixpkgs/pkgs/build-support/trivial-builders/test/writeCBin.nix @@ -0,0 +1,43 @@ +/* + Run with: + + cd nixpkgs + nix-build -A tests.trivial-builders.writeCBin +*/ + +{ lib, writeCBin, runCommand }: +let + output = "hello"; + pkg = writeCBin "test-script" '' + #include <stdio.h> + int main () { + printf("hello\n"); + return 0; + } + ''; +in + assert pkg.meta.mainProgram == "test-script"; + runCommand "test-writeCBin" { } '' + + echo Testing with getExe... + + target=${lib.getExe pkg} + expected=${lib.escapeShellArg output} + got=$("$target") + if [[ "$got" != "$expected" ]]; then + echo "wrong output: expected $expected, got $got" + exit 1 + fi + + echo Testing with makeBinPath... + + PATH="${lib.makeBinPath [ pkg ]}:$PATH" + got=$(test-script) + if [[ "$got" != "$expected" ]]; then + echo "wrong output: expected $expected, got $got" + exit 1 + fi + + touch $out + '' + diff --git a/nixpkgs/pkgs/build-support/trivial-builders/test/writeScriptBin.nix b/nixpkgs/pkgs/build-support/trivial-builders/test/writeScriptBin.nix new file mode 100644 index 000000000000..1487443130da --- /dev/null +++ b/nixpkgs/pkgs/build-support/trivial-builders/test/writeScriptBin.nix @@ -0,0 +1,39 @@ +/* + Run with: + + cd nixpkgs + nix-build -A tests.trivial-builders.writeShellScriptBin +*/ + +{ lib, writeScriptBin, runCommand }: +let + output = "hello"; + pkg = writeScriptBin "test-script" '' + echo ${lib.escapeShellArg output} + ''; +in + assert pkg.meta.mainProgram == "test-script"; + runCommand "test-writeScriptBin" { } '' + + echo Testing with getExe... + + target=${lib.getExe pkg} + expected=${lib.escapeShellArg output} + got=$("$target") + if [[ "$got" != "$expected" ]]; then + echo "wrong output: expected $expected, got $got" + exit 1 + fi + + echo Testing with makeBinPath... + + PATH="${lib.makeBinPath [ pkg ]}:$PATH" + got=$(test-script) + if [[ "$got" != "$expected" ]]; then + echo "wrong output: expected $expected, got $got" + exit 1 + fi + + touch $out + '' + diff --git a/nixpkgs/pkgs/build-support/trivial-builders/test/writeShellApplication.nix b/nixpkgs/pkgs/build-support/trivial-builders/test/writeShellApplication.nix new file mode 100644 index 000000000000..6ce6f0720fcf --- /dev/null +++ b/nixpkgs/pkgs/build-support/trivial-builders/test/writeShellApplication.nix @@ -0,0 +1,29 @@ +/* + Run with: + + cd nixpkgs + nix-build -A tests.trivial-builders.writeShellApplication +*/ + +{ lib, writeShellApplication, runCommand }: +let + pkg = writeShellApplication { + name = "test-script"; + excludeShellChecks = [ "SC2016" ]; + text = '' + echo -e '#!/usr/bin/env bash\n' \ + 'echo "$SHELL"' > /tmp/something.sh # this line would normally + # ...cause shellcheck error + ''; + }; +in + assert pkg.meta.mainProgram == "test-script"; + runCommand "test-writeShellApplication" { } '' + + echo Testing if writeShellApplication builds without shellcheck error... + + target=${lib.getExe pkg} + + touch $out + '' + diff --git a/nixpkgs/pkgs/build-support/trivial-builders/test/writeShellScriptBin.nix b/nixpkgs/pkgs/build-support/trivial-builders/test/writeShellScriptBin.nix new file mode 100644 index 000000000000..e93410e25bcb --- /dev/null +++ b/nixpkgs/pkgs/build-support/trivial-builders/test/writeShellScriptBin.nix @@ -0,0 +1,39 @@ +/* + Run with: + + cd nixpkgs + nix-build -A tests.trivial-builders.writeShellScriptBin +*/ + +{ lib, writeShellScriptBin, runCommand }: +let + output = "hello"; + pkg = writeShellScriptBin "test-script" '' + echo ${lib.escapeShellArg output} + ''; +in + assert pkg.meta.mainProgram == "test-script"; + runCommand "test-writeShellScriptBin" { } '' + + echo Testing with getExe... + + target=${lib.getExe pkg} + expected=${lib.escapeShellArg output} + got=$("$target") + if [[ "$got" != "$expected" ]]; then + echo "wrong output: expected $expected, got $got" + exit 1 + fi + + echo Testing with makeBinPath... + + PATH="${lib.makeBinPath [ pkg ]}:$PATH" + got=$(test-script) + if [[ "$got" != "$expected" ]]; then + echo "wrong output: expected $expected, got $got" + exit 1 + fi + + touch $out + '' + diff --git a/nixpkgs/pkgs/build-support/trivial-builders/test/writeStringReferencesToFile.nix b/nixpkgs/pkgs/build-support/trivial-builders/test/writeStringReferencesToFile.nix new file mode 100644 index 000000000000..b93b43b74aa4 --- /dev/null +++ b/nixpkgs/pkgs/build-support/trivial-builders/test/writeStringReferencesToFile.nix @@ -0,0 +1,18 @@ +{ callPackage, lib, pkgs, runCommand, writeText, writeStringReferencesToFile }: +let + sample = import ./sample.nix { inherit pkgs; }; + samplePaths = lib.unique (lib.attrValues sample); + stri = x: "${x}"; + sampleText = writeText "sample-text" (lib.concatStringsSep "\n" (lib.unique (map stri samplePaths))); + stringReferencesText = + writeStringReferencesToFile + ((lib.concatMapStringsSep "fillertext" + stri + (lib.attrValues sample)) + '' + STORE=${builtins.storeDir};\nsystemctl start bar-foo.service + ''); +in +runCommand "test-writeStringReferencesToFile" { } '' + diff -U3 <(sort ${stringReferencesText}) <(sort ${sampleText}) + touch $out +'' diff --git a/nixpkgs/pkgs/build-support/vm/deb/deb-closure.pl b/nixpkgs/pkgs/build-support/vm/deb/deb-closure.pl new file mode 100644 index 000000000000..2d331e18dfeb --- /dev/null +++ b/nixpkgs/pkgs/build-support/vm/deb/deb-closure.pl @@ -0,0 +1,180 @@ +use strict; +use Dpkg::Control; +use Dpkg::Deps; +use File::Basename; + +my $packagesFile = shift @ARGV; +my $urlPrefix = shift @ARGV; +my @toplevelPkgs = @ARGV; + + +my %packages; + + +# Parse the Packages file. +open PACKAGES, "<$packagesFile" or die; + +while (1) { + my $cdata = Dpkg::Control->new(type => CTRL_INFO_PKG); + last if not $cdata->parse(\*PACKAGES, $packagesFile); + die unless defined $cdata->{Package}; + #print STDERR $cdata->{Package}, "\n"; + $packages{$cdata->{Package}} = $cdata; +} + +close PACKAGES; + + +# Flatten a Dpkg::Deps dependency value into a list of package names. +sub getDeps { + my $deps = shift; + #print "$deps\n"; + if ($deps->isa('Dpkg::Deps::AND')) { + my @res = (); + foreach my $dep ($deps->get_deps()) { + push @res, getDeps($dep); + } + return @res; + } elsif ($deps->isa('Dpkg::Deps::OR')) { + # Arbitrarily pick the first alternative. + return getDeps(($deps->get_deps())[0]); + } elsif ($deps->isa('Dpkg::Deps::Simple')) { + return ($deps->{package}); + } else { + die "unknown dep type"; + } +} + + +# Process the "Provides" and "Replaces" fields to be able to resolve +# virtual dependencies. +my %provides; + +foreach my $cdata (sort {$a->{Package} cmp $b->{Package}} (values %packages)) { + if (defined $cdata->{Provides}) { + my @provides = getDeps(Dpkg::Deps::deps_parse($cdata->{Provides})); + foreach my $name (@provides) { + #die "conflicting provide: $name\n" if defined $provides{$name}; + #warn "provide by $cdata->{Package} conflicts with package with the same name: $name\n"; + next if defined $packages{$name}; + $provides{$name} = $cdata->{Package}; + } + } + # Treat "Replaces" like "Provides". + if (defined $cdata->{Replaces}) { + my @replaces = getDeps(Dpkg::Deps::deps_parse($cdata->{Replaces})); + foreach my $name (@replaces) { + next if defined $packages{$name}; + $provides{$name} = $cdata->{Package}; + } + } +} + + +# Determine the closure of a package. +my %donePkgs; +my %depsUsed; +my @order = (); + +sub closePackage { + my $pkgName = shift; + print STDERR ">>> $pkgName\n"; + my $cdata = $packages{$pkgName}; + + if (!defined $cdata) { + die "unknown (virtual) package $pkgName" + unless defined $provides{$pkgName}; + print STDERR "virtual $pkgName: using $provides{$pkgName}\n"; + $pkgName = $provides{$pkgName}; + $cdata = $packages{$pkgName}; + } + + die "unknown package $pkgName" unless defined $cdata; + return if defined $donePkgs{$pkgName}; + $donePkgs{$pkgName} = 1; + + if (defined $cdata->{Provides}) { + foreach my $name (getDeps(Dpkg::Deps::deps_parse($cdata->{Provides}))) { + $provides{$name} = $cdata->{Package}; + } + } + + my @depNames = (); + + if (defined $cdata->{Depends}) { + print STDERR " $pkgName: $cdata->{Depends}\n"; + my $deps = Dpkg::Deps::deps_parse($cdata->{Depends}); + die unless defined $deps; + push @depNames, getDeps($deps); + } + + if (defined $cdata->{'Pre-Depends'}) { + print STDERR " $pkgName: $cdata->{'Pre-Depends'}\n"; + my $deps = Dpkg::Deps::deps_parse($cdata->{'Pre-Depends'}); + die unless defined $deps; + push @depNames, getDeps($deps); + } + + foreach my $depName (@depNames) { + closePackage($depName); + } + + push @order, $pkgName; + $depsUsed{$pkgName} = \@depNames; +} + +foreach my $pkgName (@toplevelPkgs) { + closePackage $pkgName; +} + + +# Generate the output Nix expression. +print "# This is a generated file. Do not modify!\n"; +print "# Following are the Debian packages constituting the closure of: @toplevelPkgs\n\n"; +print "{fetchurl}:\n\n"; +print "[\n\n"; + +# Output the packages in strongly connected components. +my %done; +my %forward; +my $newComponent = 1; +foreach my $pkgName (@order) { + $done{$pkgName} = 1; + my $cdata = $packages{$pkgName}; + my @deps = @{$depsUsed{$pkgName}}; + foreach my $dep (@deps) { + $dep = $provides{$dep} if defined $provides{$dep}; + $forward{$dep} = 1 unless defined $done{$dep}; + } + delete $forward{$pkgName}; + + print " [\n\n" if $newComponent; + $newComponent = 0; + + my $origName = basename $cdata->{Filename}; + my $cleanedName = $origName; + $cleanedName =~ s/~//g; + + print " (fetchurl {\n"; + print " url = \"$urlPrefix/$cdata->{Filename}\";\n"; + print " sha256 = \"$cdata->{SHA256}\";\n"; + print " name = \"$cleanedName\";\n" if $cleanedName ne $origName; + print " })\n"; + print "\n"; + + if (keys %forward == 0) { + print " ]\n\n"; + $newComponent = 1; + } +} + +foreach my $pkgName (@order) { + my $cdata = $packages{$pkgName}; +} + +print "]\n"; + +if ($newComponent != 1) { + print STDERR "argh: ", keys %forward, "\n"; + exit 1; +} diff --git a/nixpkgs/pkgs/build-support/vm/default.nix b/nixpkgs/pkgs/build-support/vm/default.nix new file mode 100644 index 000000000000..4ec5531192dc --- /dev/null +++ b/nixpkgs/pkgs/build-support/vm/default.nix @@ -0,0 +1,1212 @@ +{ lib +, pkgs +, kernel ? pkgs.linux +, img ? pkgs.stdenv.hostPlatform.linux-kernel.target +, storeDir ? builtins.storeDir +, rootModules ? + [ "virtio_pci" "virtio_mmio" "virtio_blk" "virtio_balloon" "virtio_rng" "ext4" "unix" "9p" "9pnet_virtio" "crc32c_generic" ] + ++ pkgs.lib.optional pkgs.stdenv.hostPlatform.isx86 "rtc_cmos" +}: + +let + inherit (pkgs) bash bashInteractive busybox cpio coreutils e2fsprogs fetchurl kmod rpm + stdenv util-linux + buildPackages writeScript writeText runCommand; +in +rec { + qemu-common = import ../../../nixos/lib/qemu-common.nix { inherit lib pkgs; }; + + qemu = buildPackages.qemu_kvm; + + modulesClosure = pkgs.makeModulesClosure { + inherit kernel rootModules; + firmware = kernel; + }; + + + hd = "vda"; # either "sda" or "vda" + + initrdUtils = runCommand "initrd-utils" + { nativeBuildInputs = [ buildPackages.nukeReferences ]; + allowedReferences = [ "out" modulesClosure ]; # prevent accidents like glibc being included in the initrd + } + '' + mkdir -p $out/bin + mkdir -p $out/lib + + # Copy what we need from Glibc. + cp -p \ + ${pkgs.stdenv.cc.libc}/lib/ld-*.so.? \ + ${pkgs.stdenv.cc.libc}/lib/libc.so.* \ + ${pkgs.stdenv.cc.libc}/lib/libm.so.* \ + ${pkgs.stdenv.cc.libc}/lib/libresolv.so.* \ + $out/lib + + # Copy BusyBox. + cp -pd ${pkgs.busybox}/bin/* $out/bin + + # Run patchelf to make the programs refer to the copied libraries. + for i in $out/bin/* $out/lib/*; do if ! test -L $i; then nuke-refs $i; fi; done + + for i in $out/bin/*; do + if [ -f "$i" -a ! -L "$i" ]; then + echo "patching $i..." + patchelf --set-interpreter $out/lib/ld-*.so.? --set-rpath $out/lib $i || true + fi + done + ''; # */ + + + stage1Init = writeScript "vm-run-stage1" '' + #! ${initrdUtils}/bin/ash -e + + export PATH=${initrdUtils}/bin + + mkdir /etc + echo -n > /etc/fstab + + mount -t proc none /proc + mount -t sysfs none /sys + + echo 2 > /proc/sys/vm/panic_on_oom + + for o in $(cat /proc/cmdline); do + case $o in + mountDisk=1) + mountDisk=1 + ;; + command=*) + set -- $(IFS==; echo $o) + command=$2 + ;; + out=*) + set -- $(IFS==; echo $o) + export out=$2 + ;; + esac + done + + echo "loading kernel modules..." + for i in $(cat ${modulesClosure}/insmod-list); do + insmod $i || echo "warning: unable to load $i" + done + + mount -t devtmpfs devtmpfs /dev + ln -s /proc/self/fd /dev/fd + ln -s /proc/self/fd/0 /dev/stdin + ln -s /proc/self/fd/1 /dev/stdout + ln -s /proc/self/fd/2 /dev/stderr + + ifconfig lo up + + mkdir /fs + + if test -z "$mountDisk"; then + mount -t tmpfs none /fs + else + mount /dev/${hd} /fs + fi + + mkdir -p /fs/dev + mount -o bind /dev /fs/dev + + mkdir -p /fs/dev/shm /fs/dev/pts + mount -t tmpfs -o "mode=1777" none /fs/dev/shm + mount -t devpts none /fs/dev/pts + + echo "mounting Nix store..." + mkdir -p /fs${storeDir} + mount -t 9p store /fs${storeDir} -o trans=virtio,version=9p2000.L,cache=loose,msize=131072 + + mkdir -p /fs/tmp /fs/run /fs/var + mount -t tmpfs -o "mode=1777" none /fs/tmp + mount -t tmpfs -o "mode=755" none /fs/run + ln -sfn /run /fs/var/run + + echo "mounting host's temporary directory..." + mkdir -p /fs/tmp/xchg + mount -t 9p xchg /fs/tmp/xchg -o trans=virtio,version=9p2000.L,msize=131072 + + mkdir -p /fs/proc + mount -t proc none /fs/proc + + mkdir -p /fs/sys + mount -t sysfs none /fs/sys + + mkdir -p /fs/etc + ln -sf /proc/mounts /fs/etc/mtab + echo "127.0.0.1 localhost" > /fs/etc/hosts + # Ensures tools requiring /etc/passwd will work (e.g. nix) + if [ ! -e /fs/etc/passwd ]; then + echo "root:x:0:0:System administrator:/root:/bin/sh" > /fs/etc/passwd + fi + + echo "starting stage 2 ($command)" + exec switch_root /fs $command $out + ''; + + + initrd = pkgs.makeInitrd { + contents = [ + { object = stage1Init; + symlink = "/init"; + } + ]; + }; + + + stage2Init = writeScript "vm-run-stage2" '' + #! ${bash}/bin/sh + source /tmp/xchg/saved-env + + # Set the system time from the hardware clock. Works around an + # apparent KVM > 1.5.2 bug. + ${util-linux}/bin/hwclock -s + + export NIX_STORE=${storeDir} + export NIX_BUILD_TOP=/tmp + export TMPDIR=/tmp + export PATH=/empty + out="$1" + cd "$NIX_BUILD_TOP" + + if ! test -e /bin/sh; then + ${coreutils}/bin/mkdir -p /bin + ${coreutils}/bin/ln -s ${bash}/bin/sh /bin/sh + fi + + # Set up automatic kernel module loading. + export MODULE_DIR=${kernel}/lib/modules/ + ${coreutils}/bin/cat <<EOF > /run/modprobe + #! ${bash}/bin/sh + export MODULE_DIR=$MODULE_DIR + exec ${kmod}/bin/modprobe "\$@" + EOF + ${coreutils}/bin/chmod 755 /run/modprobe + echo /run/modprobe > /proc/sys/kernel/modprobe + + # For debugging: if this is the second time this image is run, + # then don't start the build again, but instead drop the user into + # an interactive shell. + if test -n "$origBuilder" -a ! -e /.debug; then + exec < /dev/null + ${coreutils}/bin/touch /.debug + $origBuilder $origArgs + echo $? > /tmp/xchg/in-vm-exit + + ${busybox}/bin/mount -o remount,ro dummy / + + ${busybox}/bin/poweroff -f + else + export PATH=/bin:/usr/bin:${coreutils}/bin + echo "Starting interactive shell..." + echo "(To run the original builder: \$origBuilder \$origArgs)" + exec ${busybox}/bin/setsid ${bashInteractive}/bin/bash < /dev/${qemu-common.qemuSerialDevice} &> /dev/${qemu-common.qemuSerialDevice} + fi + ''; + + + qemuCommandLinux = '' + ${qemu-common.qemuBinary qemu} \ + -nographic -no-reboot \ + -device virtio-rng-pci \ + -virtfs local,path=${storeDir},security_model=none,mount_tag=store \ + -virtfs local,path=$TMPDIR/xchg,security_model=none,mount_tag=xchg \ + ''${diskImage:+-drive file=$diskImage,if=virtio,cache=unsafe,werror=report} \ + -kernel ${kernel}/${img} \ + -initrd ${initrd}/initrd \ + -append "console=${qemu-common.qemuSerialDevice} panic=1 command=${stage2Init} out=$out mountDisk=$mountDisk loglevel=4" \ + $QEMU_OPTS + ''; + + + vmRunCommand = qemuCommand: writeText "vm-run" '' + export > saved-env + + PATH=${coreutils}/bin + mkdir xchg + mv saved-env xchg/ + + eval "$preVM" + + if [ "$enableParallelBuilding" = 1 ]; then + if [ ''${NIX_BUILD_CORES:-0} = 0 ]; then + QEMU_OPTS+=" -smp cpus=$(nproc)" + else + QEMU_OPTS+=" -smp cpus=$NIX_BUILD_CORES" + fi + fi + + # Write the command to start the VM to a file so that the user can + # debug inside the VM if the build fails (when Nix is called with + # the -K option to preserve the temporary build directory). + cat > ./run-vm <<EOF + #! ${bash}/bin/sh + ''${diskImage:+diskImage=$diskImage} + TMPDIR=$TMPDIR + cd $TMPDIR + ${qemuCommand} + EOF + + mkdir -p -m 0700 $out + + chmod +x ./run-vm + source ./run-vm + + if ! test -e xchg/in-vm-exit; then + echo "Virtual machine didn't produce an exit code." + exit 1 + fi + + exitCode="$(cat xchg/in-vm-exit)" + if [ "$exitCode" != "0" ]; then + exit "$exitCode" + fi + + eval "$postVM" + ''; + + /* + A bash script fragment that produces a disk image at `destination`. + */ + createEmptyImage = { + # Disk image size in MiB + size, + # Name that will be written to ${destination}/nix-support/full-name + fullName, + # Where to write the image files, defaulting to $out + destination ? "$out" + }: '' + mkdir -p ${destination} + diskImage=${destination}/disk-image.qcow2 + ${qemu}/bin/qemu-img create -f qcow2 $diskImage "${toString size}M" + + mkdir ${destination}/nix-support + echo "${fullName}" > ${destination}/nix-support/full-name + ''; + + + defaultCreateRootFS = '' + mkdir /mnt + ${e2fsprogs}/bin/mkfs.ext4 /dev/${hd} + ${util-linux}/bin/mount -t ext4 /dev/${hd} /mnt + + if test -e /mnt/.debug; then + exec ${bash}/bin/sh + fi + touch /mnt/.debug + + mkdir /mnt/proc /mnt/dev /mnt/sys + ''; + + + /* Run a derivation in a Linux virtual machine (using Qemu/KVM). By + default, there is no disk image; the root filesystem is a tmpfs, + and the nix store is shared with the host (via the 9P protocol). + Thus, any pure Nix derivation should run unmodified, e.g. the + call + + runInLinuxVM patchelf + + will build the derivation `patchelf' inside a VM. The attribute + `preVM' can optionally contain a shell command to be evaluated + *before* the VM is started (i.e., on the host). The attribute + `memSize' specifies the memory size of the VM in megabytes, + defaulting to 512. The attribute `diskImage' can optionally + specify a file system image to be attached to /dev/sda. (Note + that currently we expect the image to contain a filesystem, not a + full disk image with a partition table etc.) + + If the build fails and Nix is run with the `-K' option, a script + `run-vm' will be left behind in the temporary build directory + that allows you to boot into the VM and debug it interactively. */ + + runInLinuxVM = drv: lib.overrideDerivation drv ({ memSize ? 512, QEMU_OPTS ? "", args, builder, ... }: { + requiredSystemFeatures = [ "kvm" ]; + builder = "${bash}/bin/sh"; + args = ["-e" (vmRunCommand qemuCommandLinux)]; + origArgs = args; + origBuilder = builder; + QEMU_OPTS = "${QEMU_OPTS} -m ${toString memSize}"; + passAsFile = []; # HACK fix - see https://github.com/NixOS/nixpkgs/issues/16742 + }); + + + extractFs = {file, fs ? null} : + runInLinuxVM ( + stdenv.mkDerivation { + name = "extract-file"; + buildInputs = [ util-linux ]; + buildCommand = '' + ln -s ${kernel}/lib /lib + ${kmod}/bin/modprobe loop + ${kmod}/bin/modprobe ext4 + ${kmod}/bin/modprobe hfs + ${kmod}/bin/modprobe hfsplus + ${kmod}/bin/modprobe squashfs + ${kmod}/bin/modprobe iso9660 + ${kmod}/bin/modprobe ufs + ${kmod}/bin/modprobe cramfs + + mkdir -p $out + mkdir -p tmp + mount -o loop,ro,ufstype=44bsd ${lib.optionalString (fs != null) "-t ${fs} "}${file} tmp || + mount -o loop,ro ${lib.optionalString (fs != null) "-t ${fs} "}${file} tmp + cp -Rv tmp/* $out/ || exit 0 + ''; + }); + + + extractMTDfs = {file, fs ? null} : + runInLinuxVM ( + stdenv.mkDerivation { + name = "extract-file-mtd"; + buildInputs = [ pkgs.util-linux pkgs.mtdutils ]; + buildCommand = '' + ln -s ${kernel}/lib /lib + ${kmod}/bin/modprobe mtd + ${kmod}/bin/modprobe mtdram total_size=131072 + ${kmod}/bin/modprobe mtdchar + ${kmod}/bin/modprobe mtdblock + ${kmod}/bin/modprobe jffs2 + ${kmod}/bin/modprobe zlib + + mkdir -p $out + mkdir -p tmp + + dd if=${file} of=/dev/mtd0 + mount ${lib.optionalString (fs != null) "-t ${fs} "}/dev/mtdblock0 tmp + + cp -R tmp/* $out/ + ''; + }); + + + /* Like runInLinuxVM, but run the build not using the stdenv from + the Nix store, but using the tools provided by /bin, /usr/bin + etc. from the specified filesystem image, which typically is a + filesystem containing a non-NixOS Linux distribution. */ + + runInLinuxImage = drv: runInLinuxVM (lib.overrideDerivation drv (attrs: { + mountDisk = true; + + /* Mount `image' as the root FS, but use a temporary copy-on-write + image since we don't want to (and can't) write to `image'. */ + preVM = '' + diskImage=$(pwd)/disk-image.qcow2 + origImage=${attrs.diskImage} + if test -d "$origImage"; then origImage="$origImage/disk-image.qcow2"; fi + ${qemu}/bin/qemu-img create -F ${attrs.diskImageFormat} -b "$origImage" -f qcow2 $diskImage + ''; + + /* Inside the VM, run the stdenv setup script normally, but at the + very end set $PATH and $SHELL to the `native' paths for the + distribution inside the VM. */ + postHook = '' + PATH=/usr/bin:/bin:/usr/sbin:/sbin + SHELL=/bin/sh + eval "$origPostHook" + ''; + + origPostHook = lib.optionalString (attrs ? postHook) attrs.postHook; + + /* Don't run Nix-specific build steps like patchelf. */ + fixupPhase = "true"; + })); + + + /* Create a filesystem image of the specified size and fill it with + a set of RPM packages. */ + + fillDiskWithRPMs = + { size ? 4096, rpms, name, fullName, preInstall ? "", postInstall ? "" + , runScripts ? true, createRootFS ? defaultCreateRootFS + , QEMU_OPTS ? "", memSize ? 512 + , unifiedSystemDir ? false + }: + + runInLinuxVM (stdenv.mkDerivation { + inherit name preInstall postInstall rpms QEMU_OPTS memSize; + preVM = createEmptyImage {inherit size fullName;}; + + buildCommand = '' + ${createRootFS} + + chroot=$(type -tP chroot) + + # Make the Nix store available in /mnt, because that's where the RPMs live. + mkdir -p /mnt${storeDir} + ${util-linux}/bin/mount -o bind ${storeDir} /mnt${storeDir} + + # Newer distributions like Fedora 18 require /lib etc. to be + # symlinked to /usr. + ${lib.optionalString unifiedSystemDir '' + mkdir -p /mnt/usr/bin /mnt/usr/sbin /mnt/usr/lib /mnt/usr/lib64 + ln -s /usr/bin /mnt/bin + ln -s /usr/sbin /mnt/sbin + ln -s /usr/lib /mnt/lib + ln -s /usr/lib64 /mnt/lib64 + ${util-linux}/bin/mount -t proc none /mnt/proc + ''} + + echo "unpacking RPMs..." + set +o pipefail + for i in $rpms; do + echo "$i..." + ${rpm}/bin/rpm2cpio "$i" | chroot /mnt ${cpio}/bin/cpio -i --make-directories --unconditional + done + + eval "$preInstall" + + echo "initialising RPM DB..." + PATH=/usr/bin:/bin:/usr/sbin:/sbin $chroot /mnt \ + ldconfig -v || true + PATH=/usr/bin:/bin:/usr/sbin:/sbin $chroot /mnt \ + rpm --initdb + + ${util-linux}/bin/mount -o bind /tmp /mnt/tmp + + echo "installing RPMs..." + PATH=/usr/bin:/bin:/usr/sbin:/sbin $chroot /mnt \ + rpm -iv --nosignature ${lib.optionalString (!runScripts) "--noscripts"} $rpms + + echo "running post-install script..." + eval "$postInstall" + + rm /mnt/.debug + + ${util-linux}/bin/umount /mnt${storeDir} /mnt/tmp ${lib.optionalString unifiedSystemDir "/mnt/proc"} + ${util-linux}/bin/umount /mnt + ''; + + passthru = { inherit fullName; }; + }); + + + /* Generate a script that can be used to run an interactive session + in the given image. */ + + makeImageTestScript = image: writeScript "image-test" '' + #! ${bash}/bin/sh + if test -z "$1"; then + echo "Syntax: $0 <copy-on-write-temp-file>" + exit 1 + fi + diskImage="$1" + if ! test -e "$diskImage"; then + ${qemu}/bin/qemu-img create -b ${image}/disk-image.qcow2 -f qcow2 -F qcow2 "$diskImage" + fi + export TMPDIR=$(mktemp -d) + export out=/dummy + export origBuilder= + export origArgs= + mkdir $TMPDIR/xchg + export > $TMPDIR/xchg/saved-env + mountDisk=1 + ${qemuCommandLinux} + ''; + + + /* Build RPM packages from the tarball `src' in the Linux + distribution installed in the filesystem `diskImage'. The + tarball must contain an RPM specfile. */ + + buildRPM = attrs: runInLinuxImage (stdenv.mkDerivation ({ + prePhases = [ "prepareImagePhase" "sysInfoPhase" ]; + dontConfigure = true; + + outDir = "rpms/${attrs.diskImage.name}"; + + prepareImagePhase = '' + if test -n "$extraRPMs"; then + for rpmdir in $extraRPMs ; do + rpm -iv $(ls $rpmdir/rpms/*/*.rpm | grep -v 'src\.rpm' | sort | head -1) + done + fi + ''; + + sysInfoPhase = '' + echo "System/kernel: $(uname -a)" + if test -e /etc/fedora-release; then echo "Fedora release: $(cat /etc/fedora-release)"; fi + if test -e /etc/SuSE-release; then echo "SUSE release: $(cat /etc/SuSE-release)"; fi + echo "installed RPM packages" + rpm -qa --qf "%{Name}-%{Version}-%{Release} (%{Arch}; %{Distribution}; %{Vendor})\n" + ''; + + buildPhase = '' + eval "$preBuild" + + srcName="$(rpmspec --srpm -q --qf '%{source}' *.spec)" + cp "$src" "$srcName" # `ln' doesn't work always work: RPM requires that the file is owned by root + + export HOME=/tmp/home + mkdir $HOME + + rpmout=/tmp/rpmout + mkdir $rpmout $rpmout/SPECS $rpmout/BUILD $rpmout/RPMS $rpmout/SRPMS + + echo "%_topdir $rpmout" >> $HOME/.rpmmacros + + if [ `uname -m` = i686 ]; then extra="--target i686-linux"; fi + rpmbuild -vv $extra -ta "$srcName" + + eval "$postBuild" + ''; + + installPhase = '' + eval "$preInstall" + + mkdir -p $out/$outDir + find $rpmout -name "*.rpm" -exec cp {} $out/$outDir \; + + for i in $out/$outDir/*.rpm; do + echo "Generated RPM/SRPM: $i" + rpm -qip $i + done + + eval "$postInstall" + ''; # */ + } // attrs)); + + + /* Create a filesystem image of the specified size and fill it with + a set of Debian packages. `debs' must be a list of list of + .deb files, namely, the Debian packages grouped together into + strongly connected components. See deb/deb-closure.nix. */ + + fillDiskWithDebs = + { size ? 4096, debs, name, fullName, postInstall ? null, createRootFS ? defaultCreateRootFS + , QEMU_OPTS ? "", memSize ? 512 }: + + runInLinuxVM (stdenv.mkDerivation { + inherit name postInstall QEMU_OPTS memSize; + + debs = (lib.intersperse "|" debs); + + preVM = createEmptyImage {inherit size fullName;}; + + buildCommand = '' + ${createRootFS} + + PATH=$PATH:${lib.makeBinPath [ pkgs.dpkg pkgs.glibc pkgs.xz ]} + + # Unpack the .debs. We do this to prevent pre-install scripts + # (which have lots of circular dependencies) from barfing. + echo "unpacking Debs..." + + for deb in $debs; do + if test "$deb" != "|"; then + echo "$deb..." + dpkg-deb --extract "$deb" /mnt + fi + done + + # Make the Nix store available in /mnt, because that's where the .debs live. + mkdir -p /mnt/inst${storeDir} + ${util-linux}/bin/mount -o bind ${storeDir} /mnt/inst${storeDir} + ${util-linux}/bin/mount -o bind /proc /mnt/proc + ${util-linux}/bin/mount -o bind /dev /mnt/dev + + # Misc. files/directories assumed by various packages. + echo "initialising Dpkg DB..." + touch /mnt/etc/shells + touch /mnt/var/lib/dpkg/status + touch /mnt/var/lib/dpkg/available + touch /mnt/var/lib/dpkg/diversions + + # Now install the .debs. This is basically just to register + # them with dpkg and to make their pre/post-install scripts + # run. + echo "installing Debs..." + + export DEBIAN_FRONTEND=noninteractive + + oldIFS="$IFS" + IFS="|" + for component in $debs; do + IFS="$oldIFS" + echo + echo ">>> INSTALLING COMPONENT: $component" + debs= + for i in $component; do + debs="$debs /inst/$i"; + done + chroot=$(type -tP chroot) + + # Create a fake start-stop-daemon script, as done in debootstrap. + mv "/mnt/sbin/start-stop-daemon" "/mnt/sbin/start-stop-daemon.REAL" + echo "#!/bin/true" > "/mnt/sbin/start-stop-daemon" + chmod 755 "/mnt/sbin/start-stop-daemon" + + PATH=/usr/bin:/bin:/usr/sbin:/sbin $chroot /mnt \ + /usr/bin/dpkg --install --force-all $debs < /dev/null || true + + # Move the real start-stop-daemon back into its place. + mv "/mnt/sbin/start-stop-daemon.REAL" "/mnt/sbin/start-stop-daemon" + done + + echo "running post-install script..." + eval "$postInstall" + ln -sf dash /mnt/bin/sh + + rm /mnt/.debug + + ${util-linux}/bin/umount /mnt/inst${storeDir} + ${util-linux}/bin/umount /mnt/proc + ${util-linux}/bin/umount /mnt/dev + ${util-linux}/bin/umount /mnt + ''; + + passthru = { inherit fullName; }; + }); + + + /* Generate a Nix expression containing fetchurl calls for the + closure of a set of top-level RPM packages from the + `primary.xml.gz' file of a Fedora or openSUSE distribution. */ + + rpmClosureGenerator = + {name, packagesLists, urlPrefixes, packages, archs ? []}: + assert (builtins.length packagesLists) == (builtins.length urlPrefixes); + runCommand "${name}.nix" { + nativeBuildInputs = [ buildPackages.perl buildPackages.perlPackages.XMLSimple ]; + inherit archs; + } '' + ${lib.concatImapStrings (i: pl: '' + gunzip < ${pl} > ./packages_${toString i}.xml + '') packagesLists} + perl -w ${rpm/rpm-closure.pl} \ + ${lib.concatImapStrings (i: pl: "./packages_${toString i}.xml ${pl.snd} " ) (lib.zipLists packagesLists urlPrefixes)} \ + ${toString packages} > $out + ''; + + + /* Helper function that combines rpmClosureGenerator and + fillDiskWithRPMs to generate a disk image from a set of package + names. */ + + makeImageFromRPMDist = + { name, fullName, size ? 4096 + , urlPrefix ? "", urlPrefixes ? [urlPrefix] + , packagesList ? "", packagesLists ? [packagesList] + , packages, extraPackages ? [] + , preInstall ? "", postInstall ? "", archs ? ["noarch" "i386"] + , runScripts ? true, createRootFS ? defaultCreateRootFS + , QEMU_OPTS ? "", memSize ? 512 + , unifiedSystemDir ? false }: + + fillDiskWithRPMs { + inherit name fullName size preInstall postInstall runScripts createRootFS unifiedSystemDir QEMU_OPTS memSize; + rpms = import (rpmClosureGenerator { + inherit name packagesLists urlPrefixes archs; + packages = packages ++ extraPackages; + }) { inherit fetchurl; }; + }; + + + /* Like `rpmClosureGenerator', but now for Debian/Ubuntu releases + (i.e. generate a closure from a Packages.bz2 file). */ + + debClosureGenerator = + {name, packagesLists, urlPrefix, packages}: + + runCommand "${name}.nix" + { nativeBuildInputs = [ buildPackages.perl buildPackages.dpkg ]; } '' + for i in ${toString packagesLists}; do + echo "adding $i..." + case $i in + *.xz | *.lzma) + xz -d < $i >> ./Packages + ;; + *.bz2) + bunzip2 < $i >> ./Packages + ;; + *.gz) + gzip -dc < $i >> ./Packages + ;; + esac + done + + # Work around this bug: http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=452279 + sed -i ./Packages -e s/x86_64-linux-gnu/x86-64-linux-gnu/g + + perl -w ${deb/deb-closure.pl} \ + ./Packages ${urlPrefix} ${toString packages} > $out + ''; + + + /* Helper function that combines debClosureGenerator and + fillDiskWithDebs to generate a disk image from a set of package + names. */ + + makeImageFromDebDist = + { name, fullName, size ? 4096, urlPrefix + , packagesList ? "", packagesLists ? [packagesList] + , packages, extraPackages ? [], postInstall ? "" + , extraDebs ? [], createRootFS ? defaultCreateRootFS + , QEMU_OPTS ? "", memSize ? 512 }: + + let + expr = debClosureGenerator { + inherit name packagesLists urlPrefix; + packages = packages ++ extraPackages; + }; + in + (fillDiskWithDebs { + inherit name fullName size postInstall createRootFS QEMU_OPTS memSize; + debs = import expr {inherit fetchurl;} ++ extraDebs; + }) // {inherit expr;}; + + + /* The set of supported RPM-based distributions. */ + + rpmDistros = { + + # Note: no i386 release for Fedora >= 26 + fedora26x86_64 = + let version = "26"; + in { + name = "fedora-${version}-x86_64"; + fullName = "Fedora ${version} (x86_64)"; + packagesList = fetchurl rec { + url = "mirror://fedora/linux/releases/${version}/Everything/x86_64/os/repodata/${sha256}-primary.xml.gz"; + sha256 = "880055a50c05b20641530d09b23f64501a000b2f92fe252417c530178730a95e"; + }; + urlPrefix = "mirror://fedora/linux/releases/${version}/Everything/x86_64/os"; + archs = ["noarch" "x86_64"]; + packages = commonFedoraPackages ++ [ "cronie" "util-linux" ]; + unifiedSystemDir = true; + }; + + fedora27x86_64 = + let version = "27"; + in { + name = "fedora-${version}-x86_64"; + fullName = "Fedora ${version} (x86_64)"; + packagesList = fetchurl rec { + url = "mirror://fedora/linux/releases/${version}/Everything/x86_64/os/repodata/${sha256}-primary.xml.gz"; + sha256 = "48986ce4583cd09825c6d437150314446f0f49fa1a1bd62dcfa1085295030fe9"; + }; + urlPrefix = "mirror://fedora/linux/releases/${version}/Everything/x86_64/os"; + archs = ["noarch" "x86_64"]; + packages = commonFedoraPackages ++ [ "cronie" "util-linux" ]; + unifiedSystemDir = true; + }; + + centos6i386 = + let version = "6.9"; + in rec { + name = "centos-${version}-i386"; + fullName = "CentOS ${version} (i386)"; + urlPrefix = "mirror://centos/${version}/os/i386"; + packagesList = fetchurl rec { + url = "${urlPrefix}/repodata/${sha256}-primary.xml.gz"; + sha256 = "b826a45082ef68340325c0855f3d2e5d5a4d0f77d28ba3b871791d6f14a97aeb"; + }; + archs = ["noarch" "i386"]; + packages = commonCentOSPackages ++ [ "procps" ]; + }; + + centos6x86_64 = + let version = "6.9"; + in rec { + name = "centos-${version}-x86_64"; + fullName = "CentOS ${version} (x86_64)"; + urlPrefix = "mirror://centos/${version}/os/x86_64"; + packagesList = fetchurl rec { + url = "${urlPrefix}/repodata/${sha256}-primary.xml.gz"; + sha256 = "ed2b2d4ac98d774d4cd3e91467e1532f7e8b0275cfc91a0d214b532dcaf1e979"; + }; + archs = ["noarch" "x86_64"]; + packages = commonCentOSPackages ++ [ "procps" ]; + }; + + # Note: no i386 release for 7.x + centos7x86_64 = + let version = "7.4.1708"; + in rec { + name = "centos-${version}-x86_64"; + fullName = "CentOS ${version} (x86_64)"; + urlPrefix = "mirror://centos/${version}/os/x86_64"; + packagesList = fetchurl rec { + url = "${urlPrefix}/repodata/${sha256}-primary.xml.gz"; + sha256 = "b686d3a0f337323e656d9387b9a76ce6808b26255fc3a138b1a87d3b1cb95ed5"; + }; + archs = ["noarch" "x86_64"]; + packages = commonCentOSPackages ++ [ "procps-ng" ]; + }; + }; + + + /* The set of supported Dpkg-based distributions. */ + + debDistros = { + ubuntu1404i386 = { + name = "ubuntu-14.04-trusty-i386"; + fullName = "Ubuntu 14.04 Trusty (i386)"; + packagesLists = + [ (fetchurl { + url = "mirror://ubuntu/dists/trusty/main/binary-i386/Packages.bz2"; + sha256 = "1d5y3v3v079gdq45hc07ja0bjlmzqfwdwwlq0brwxi8m75k3iz7x"; + }) + (fetchurl { + url = "mirror://ubuntu/dists/trusty/universe/binary-i386/Packages.bz2"; + sha256 = "03x9w92by320rfklrqhcl3qpwmnxds9c8ijl5zhcb21d6dcz5z1a"; + }) + ]; + urlPrefix = "mirror://ubuntu"; + packages = commonDebPackages ++ [ "diffutils" "libc-bin" ]; + }; + + ubuntu1404x86_64 = { + name = "ubuntu-14.04-trusty-amd64"; + fullName = "Ubuntu 14.04 Trusty (amd64)"; + packagesLists = + [ (fetchurl { + url = "mirror://ubuntu/dists/trusty/main/binary-amd64/Packages.bz2"; + sha256 = "1hhzbyqfr5i0swahwnl5gfp5l9p9hspywb1vpihr3b74p1z935bh"; + }) + (fetchurl { + url = "mirror://ubuntu/dists/trusty/universe/binary-amd64/Packages.bz2"; + sha256 = "04560ba8s4z4v5iawknagrkn9q1nzvpn081ycmqvhh73p3p3g1jm"; + }) + ]; + urlPrefix = "mirror://ubuntu"; + packages = commonDebPackages ++ [ "diffutils" "libc-bin" ]; + }; + + ubuntu1604i386 = { + name = "ubuntu-16.04-xenial-i386"; + fullName = "Ubuntu 16.04 Xenial (i386)"; + packagesLists = + [ (fetchurl { + url = "mirror://ubuntu/dists/xenial/main/binary-i386/Packages.xz"; + sha256 = "13r75sp4slqy8w32y5dnr7pp7p3cfvavyr1g7gwnlkyrq4zx4ahy"; + }) + (fetchurl { + url = "mirror://ubuntu/dists/xenial/universe/binary-i386/Packages.xz"; + sha256 = "14fid1rqm3sc0wlygcvn0yx5aljf51c2jpd4x0zxij4019316hsh"; + }) + ]; + urlPrefix = "mirror://ubuntu"; + packages = commonDebPackages ++ [ "diffutils" "libc-bin" ]; + }; + + ubuntu1604x86_64 = { + name = "ubuntu-16.04-xenial-amd64"; + fullName = "Ubuntu 16.04 Xenial (amd64)"; + packagesLists = + [ (fetchurl { + url = "mirror://ubuntu/dists/xenial/main/binary-amd64/Packages.xz"; + sha256 = "110qnkhjkkwm316fbig3aivm2595ydz6zskc4ld5cr8ngcrqm1bn"; + }) + (fetchurl { + url = "mirror://ubuntu/dists/xenial/universe/binary-amd64/Packages.xz"; + sha256 = "0mm7gj491yi6q4v0n4qkbsm94s59bvqir6fk60j73w7y4la8rg68"; + }) + ]; + urlPrefix = "mirror://ubuntu"; + packages = commonDebPackages ++ [ "diffutils" "libc-bin" ]; + }; + + ubuntu1804i386 = { + name = "ubuntu-18.04-bionic-i386"; + fullName = "Ubuntu 18.04 Bionic (i386)"; + packagesLists = + [ (fetchurl { + url = "mirror://ubuntu/dists/bionic/main/binary-i386/Packages.xz"; + sha256 = "0f0v4131kwf7m7f8j3288rlqdxk1k3vqy74b7fcfd6jz9j8d840i"; + }) + (fetchurl { + url = "mirror://ubuntu/dists/bionic/universe/binary-i386/Packages.xz"; + sha256 = "1v75c0dqr0wp0dqd4hnci92qqs4hll8frqdbpswadgxm5chn91bw"; + }) + ]; + urlPrefix = "mirror://ubuntu"; + packages = commonDebPackages ++ [ "diffutils" "libc-bin" ]; + }; + + ubuntu1804x86_64 = { + name = "ubuntu-18.04-bionic-amd64"; + fullName = "Ubuntu 18.04 Bionic (amd64)"; + packagesLists = + [ (fetchurl { + url = "mirror://ubuntu/dists/bionic/main/binary-amd64/Packages.xz"; + sha256 = "1ls81bjyvmfz6i919kszl7xks1ibrh1xqhsk6698ackndkm0wp39"; + }) + (fetchurl { + url = "mirror://ubuntu/dists/bionic/universe/binary-amd64/Packages.xz"; + sha256 = "1832nqpn4ap95b3sj870xqayrza9in4kih9jkmjax27pq6x15v1r"; + }) + ]; + urlPrefix = "mirror://ubuntu"; + packages = commonDebPackages ++ [ "diffutils" "libc-bin" ]; + }; + + ubuntu2004i386 = { + name = "ubuntu-20.04-focal-i386"; + fullName = "Ubuntu 20.04 Focal (i386)"; + packagesLists = + [ (fetchurl { + url = "mirror://ubuntu/dists/focal/main/binary-i386/Packages.xz"; + sha256 = "sha256-7RAYURoN3RKYQAHpwBS9TIV6vCmpURpphyMJQmV4wLc="; + }) + (fetchurl { + url = "mirror://ubuntu/dists/focal/universe/binary-i386/Packages.xz"; + sha256 = "sha256-oA551xVE80volUPgkMyvzpQ1d+GhuZd4DAe7dXZnULM="; + }) + ]; + urlPrefix = "mirror://ubuntu"; + packages = commonDebPackages ++ [ "diffutils" "libc-bin" ]; + }; + + ubuntu2004x86_64 = { + name = "ubuntu-20.04-focal-amd64"; + fullName = "Ubuntu 20.04 Focal (amd64)"; + packagesLists = + [ (fetchurl { + url = "mirror://ubuntu/dists/focal/main/binary-amd64/Packages.xz"; + sha256 = "sha256-d1eSH/j+7Zw5NKDJk21EG6SiOL7j6myMHfXLzUP8mGE="; + }) + (fetchurl { + url = "mirror://ubuntu/dists/focal/universe/binary-amd64/Packages.xz"; + sha256 = "sha256-RqdG2seJvZU3rKVNsWgLnf9RwkgVMRE1A4IZnX2WudE="; + }) + ]; + urlPrefix = "mirror://ubuntu"; + packages = commonDebPackages ++ [ "diffutils" "libc-bin" ]; + }; + + ubuntu2204i386 = { + name = "ubuntu-22.04-jammy-i386"; + fullName = "Ubuntu 22.04 Jammy (i386)"; + packagesLists = + [ (fetchurl { + url = "mirror://ubuntu/dists/jammy/main/binary-i386/Packages.xz"; + sha256 = "sha256-iZBmwT0ep4v+V3sayybbOgZBOFFZwPGpOKtmuLMMVPQ="; + }) + (fetchurl { + url = "mirror://ubuntu/dists/jammy/universe/binary-i386/Packages.xz"; + sha256 = "sha256-DO2LdpZ9rDDBhWj2gvDWd0TJJVZHxKsYTKTi6GXjm1E="; + }) + ]; + urlPrefix = "mirror://ubuntu"; + packages = commonDebPackages ++ [ "diffutils" "libc-bin" ]; + }; + + ubuntu2204x86_64 = { + name = "ubuntu-22.04-jammy-amd64"; + fullName = "Ubuntu 22.04 Jammy (amd64)"; + packagesLists = + [ (fetchurl { + url = "mirror://ubuntu/dists/jammy/main/binary-amd64/Packages.xz"; + sha256 = "sha256-N8tX8VVMv6ccWinun/7hipqMF4K7BWjgh0t/9M6PnBE="; + }) + (fetchurl { + url = "mirror://ubuntu/dists/jammy/universe/binary-amd64/Packages.xz"; + sha256 = "sha256-0pyyTJP+xfQyVXBrzn60bUd5lSA52MaKwbsUpvNlXOI="; + }) + ]; + urlPrefix = "mirror://ubuntu"; + packages = commonDebPackages ++ [ "diffutils" "libc-bin" ]; + }; + + debian10i386 = { + name = "debian-10.13-buster-i386"; + fullName = "Debian 10.13 Buster (i386)"; + packagesList = fetchurl { + url = "https://snapshot.debian.org/archive/debian/20221126T084953Z/dists/buster/main/binary-i386/Packages.xz"; + hash = "sha256-n9JquhtZgxw3qr9BX0MQoY3ZTIHN0dit+iru3DC31UY="; + }; + urlPrefix = "https://snapshot.debian.org/archive/debian/20221126T084953Z"; + packages = commonDebianPackages; + }; + + debian10x86_64 = { + name = "debian-10.13-buster-amd64"; + fullName = "Debian 10.13 Buster (amd64)"; + packagesList = fetchurl { + url = "https://snapshot.debian.org/archive/debian/20221126T084953Z/dists/buster/main/binary-amd64/Packages.xz"; + hash = "sha256-YukIIB3u87jgp9oudwklsxyKVKjSL618wFgDSXiFmjU="; + }; + urlPrefix = "https://snapshot.debian.org/archive/debian/20221126T084953Z"; + packages = commonDebianPackages; + }; + + debian11i386 = { + name = "debian-11.6-bullseye-i386"; + fullName = "Debian 11.6 Bullseye (i386)"; + packagesList = fetchurl { + url = "https://snapshot.debian.org/archive/debian/20230131T034648Z/dists/bullseye/main/binary-i386/Packages.xz"; + hash = "sha256-z9eG7RlvelEnZAaeCfIO+XxTZVL3d+zTA7ShU43l/pw="; + }; + urlPrefix = "https://snapshot.debian.org/archive/debian/20230131T034648Z"; + packages = commonDebianPackages; + }; + + debian11x86_64 = { + name = "debian-11.6-bullseye-amd64"; + fullName = "Debian 11.6 Bullseye (amd64)"; + packagesList = fetchurl { + url = "https://snapshot.debian.org/archive/debian/20230131T034648Z/dists/bullseye/main/binary-amd64/Packages.xz"; + hash = "sha256-mz0eCWdn6uWt40OxsSPheHzEnMeLE52yR/vpb48/VF0="; + }; + urlPrefix = "https://snapshot.debian.org/archive/debian/20230131T034648Z"; + packages = commonDebianPackages; + }; + }; + + + /* Common packages for Fedora images. */ + commonFedoraPackages = [ + "autoconf" + "automake" + "basesystem" + "bzip2" + "curl" + "diffutils" + "fedora-release" + "findutils" + "gawk" + "gcc-c++" + "gzip" + "make" + "patch" + "perl" + "pkgconf-pkg-config" + "rpm" + "rpm-build" + "tar" + "unzip" + ]; + + commonCentOSPackages = [ + "autoconf" + "automake" + "basesystem" + "bzip2" + "curl" + "diffutils" + "centos-release" + "findutils" + "gawk" + "gcc-c++" + "gzip" + "make" + "patch" + "perl" + "pkgconfig" + "rpm" + "rpm-build" + "tar" + "unzip" + ]; + + commonRHELPackages = [ + "autoconf" + "automake" + "basesystem" + "bzip2" + "curl" + "diffutils" + "findutils" + "gawk" + "gcc-c++" + "gzip" + "make" + "patch" + "perl" + "pkgconfig" + "procps-ng" + "rpm" + "rpm-build" + "tar" + "unzip" + ]; + + /* Common packages for openSUSE images. */ + commonOpenSUSEPackages = [ + "aaa_base" + "autoconf" + "automake" + "bzip2" + "curl" + "diffutils" + "findutils" + "gawk" + "gcc-c++" + "gzip" + "make" + "patch" + "perl" + "pkg-config" + "rpm" + "tar" + "unzip" + "util-linux" + "gnu-getopt" + ]; + + + /* Common packages for Debian/Ubuntu images. */ + commonDebPackages = [ + "base-passwd" + "dpkg" + "libc6-dev" + "perl" + "bash" + "dash" + "gzip" + "bzip2" + "tar" + "grep" + "mawk" + "sed" + "findutils" + "g++" + "make" + "curl" + "patch" + "locales" + "coreutils" + # Needed by checkinstall: + "util-linux" + "file" + "dpkg-dev" + "pkg-config" + # Needed because it provides /etc/login.defs, whose absence causes + # the "passwd" post-installs script to fail. + "login" + "passwd" + ]; + + commonDebianPackages = commonDebPackages ++ [ "sysvinit" "diff" ]; + + + /* A set of functions that build the Linux distributions specified + in `rpmDistros' and `debDistros'. For instance, + `diskImageFuns.ubuntu1004x86_64 { }' builds an Ubuntu 10.04 disk + image containing the default packages specified above. Overrides + of the default image parameters can be given. In particular, + `extraPackages' specifies the names of additional packages from + the distribution that should be included in the image; `packages' + allows the entire set of packages to be overridden; and `size' + sets the size of the disk in megabytes. E.g., + `diskImageFuns.ubuntu1004x86_64 { extraPackages = ["firefox"]; + size = 8192; }' builds an 8 GiB image containing Firefox in + addition to the default packages. */ + diskImageFuns = + (lib.mapAttrs (name: as: as2: makeImageFromRPMDist (as // as2)) rpmDistros) // + (lib.mapAttrs (name: as: as2: makeImageFromDebDist (as // as2)) debDistros); + + + /* Shorthand for `diskImageFuns.<attr> { extraPackages = ... }'. */ + diskImageExtraFuns = + lib.mapAttrs (name: f: extraPackages: f { inherit extraPackages; }) diskImageFuns; + + + /* Default disk images generated from the `rpmDistros' and + `debDistros' sets. */ + diskImages = lib.mapAttrs (name: f: f {}) diskImageFuns; + +} diff --git a/nixpkgs/pkgs/build-support/vm/rpm/rpm-closure.pl b/nixpkgs/pkgs/build-support/vm/rpm/rpm-closure.pl new file mode 100644 index 000000000000..6442cd91a957 --- /dev/null +++ b/nixpkgs/pkgs/build-support/vm/rpm/rpm-closure.pl @@ -0,0 +1,184 @@ +use strict; +use XML::Simple; +use List::Util qw(min); + +my @packagesFiles = (); +my @urlPrefixes = (); + +# rpm-closure.pl (<package-file> <url-prefix>)+ <toplevel-pkg>+ + +while(-f $ARGV[0]) { + my $packagesFile = shift @ARGV; + my $urlPrefix = shift @ARGV; + push(@packagesFiles, $packagesFile); + push(@urlPrefixes, $urlPrefix); +} + + +sub rpmvercmp { + my ($version1, $version2) = @_; + my @vercmps1 = split /\./, $version1; + my @vercmps2 = split /\./, $version2; + my $l1 = scalar(@vercmps1); + my $l2 = scalar(@vercmps2); + my $l = min($l1, $l2); + + for(my $i=0; $i<$l; $i++) { + my $v1 = $vercmps1[$i]; + my $v2 = $vercmps2[$i]; + + if($v1 =~ /^[0-9]*$/ && $v2 =~ /^[0-9]*$/) { + if ( int($v1) > int($v2) ) { + return 1; + } + elsif ( int($v1) < int($v2) ) { + return -1; + } + } else { + if ( $v1 gt $v2 ) { + return 1; + } + elsif ( $v1 lt $v2 ) { + return -1; + } + } + } + if($l1 == $l2) { + return 0; + } elsif ($l1 > $l2) { + return 1; + } elsif ($l1 < $l2) { + return -1; + } +} + +my @toplevelPkgs = @ARGV; + +my @archs = split ' ', ($ENV{'archs'} or ""); + +my %pkgs; +for (my $i = 0; $i < scalar(@packagesFiles); $i++) { + my $packagesFile = $packagesFiles[$i]; + print STDERR "parsing packages in $packagesFile...\n"; + + my $xml = XMLin($packagesFile, ForceArray => ['package', 'rpm:entry', 'file'], KeyAttr => []) or die; + + print STDERR "$packagesFile contains $xml->{packages} packages\n"; + + foreach my $pkg (@{$xml->{'package'}}) { + if (scalar @archs > 0) { + my $arch = $pkg->{arch}; + my $found = 0; + foreach my $a (@archs) { $found = 1 if $arch eq $a; } + next if !$found; + } + if (defined $pkgs{$pkg->{name}}) { + my $earlierPkg = $pkgs{$pkg->{name}}; + print STDERR "WARNING: duplicate occurrence of package $pkg->{name}\n"; + # <version epoch="0" ver="1.28.0" rel="2.el6"/> + my $cmp = rpmvercmp($pkg->{'version'}->{ver}, $earlierPkg->{'version'}->{ver}); + if ($cmp > 0 || ($cmp == 0 && rpmvercmp($pkg->{'version'}->{rel}, $earlierPkg->{'version'}->{rel})>0)) { + print STDERR "WARNING: replaced package $pkg->{name} (".$earlierPkg->{'version'}->{ver}." ".$earlierPkg->{'version'}->{rel}.") with newer one (".$pkg->{'version'}->{ver}." ".$pkg->{'version'}->{rel}.")\n"; + $pkg->{urlPrefix} = $urlPrefixes[$i]; + $pkgs{$pkg->{name}} = $pkg; + } + next; + } + $pkg->{urlPrefix} = $urlPrefixes[$i]; + $pkgs{$pkg->{name}} = $pkg; + } +} + +my %provides; +PKG: foreach my $pkgName (sort(keys %pkgs)) { + #print STDERR "looking at $pkgName\n"; + my $pkg = $pkgs{$pkgName}; + + # Skip packages that conflict with a required package. + my $conflicts = $pkg->{format}->{'rpm:conflicts'}->{'rpm:entry'} // []; + foreach my $conflict (@{$conflicts}) { + next if $conflict->{flags} // "" eq "LT" || $conflict->{flags} // "" eq "LE"; + #print STDERR " $pkgName conflicts with $conflict->{name}\n"; + if (grep { $_ eq $conflict->{name} } @toplevelPkgs) { + print STDERR "skipping package $pkgName because it conflicts with a required package\n"; + next PKG; + } + } + + my $provides = $pkg->{format}->{'rpm:provides'}->{'rpm:entry'} or die; + foreach my $req (@{$provides}) { + #print STDERR " $pkgName provides $req->{name}\n"; + #die "multiple provides for $req->{name}" if defined $provides{$req->{name}}; + $provides{$req->{name}} = $pkgName; + } + + if (defined $pkg->{format}->{file}) { + foreach my $file (@{$pkg->{format}->{file}}) { + #print STDERR " provides file $file\n"; + $provides{$file} = $pkgName; + } + } +} + + +my %donePkgs; +my @needed = (); + +sub closePackage { + my $pkgName = shift; + + return if defined $donePkgs{$pkgName}; + $donePkgs{$pkgName} = 1; + + print STDERR ">>> $pkgName\n"; + + my $pkg = $pkgs{$pkgName} or die "package $pkgName doesn't exist"; + + my $requires = $pkg->{format}->{'rpm:requires'}->{'rpm:entry'} || []; + + my @deps = (); + foreach my $req (@{$requires}) { + next if $req->{name} =~ /^rpmlib\(/; + #print STDERR " needs $req->{name}\n"; + my $provider = $provides{$req->{name}}; + if (!defined $provider) { + print STDERR " WARNING: no provider for $req->{name}\n"; + next; + } + #print STDERR " satisfied by $provider\n"; + push @deps, $provider; + } + + closePackage($_) foreach @deps; + + push @needed, $pkgName; +} + + +foreach my $pkgName (@toplevelPkgs) { + closePackage $pkgName; +} + + +# Generate the output Nix expression. +print "# This is a generated file. Do not modify!\n"; +print "# Following are the RPM packages constituting the closure of: @toplevelPkgs\n\n"; +print "{fetchurl}:\n\n"; +print "[\n\n"; + +foreach my $pkgName (@needed) { + my $pkg = $pkgs{$pkgName}; + print " (fetchurl {\n"; + print " url = $pkg->{urlPrefix}/$pkg->{location}->{href};\n"; + if ($pkg->{checksum}->{type} eq "sha") { + print " sha1 = \"$pkg->{checksum}->{content}\";\n"; + } elsif ($pkg->{checksum}->{type} eq "sha256") { + print " sha256 = \"$pkg->{checksum}->{content}\";\n"; + } else { + die "unsupported hash type"; + } + print " })\n"; + print "\n"; +} + +print "]\n"; diff --git a/nixpkgs/pkgs/build-support/vm/test.nix b/nixpkgs/pkgs/build-support/vm/test.nix new file mode 100644 index 000000000000..ae6a10dea3b9 --- /dev/null +++ b/nixpkgs/pkgs/build-support/vm/test.nix @@ -0,0 +1,42 @@ +with import ../../.. { }; +with vmTools; + +{ + + + # Run the PatchELF derivation in a VM. + buildPatchelfInVM = runInLinuxVM patchelf; + + buildHelloInVM = runInLinuxVM hello; + + buildPcmanrmInVM = runInLinuxVM (pcmanfm.overrideAttrs (old: { + # goes out-of-memory with many cores + enableParallelBuilding = false; + })); + + testRPMImage = makeImageTestScript diskImages.fedora27x86_64; + + + buildPatchelfRPM = buildRPM { + name = "patchelf-rpm"; + src = patchelf.src; + diskImage = diskImages.fedora27x86_64; + diskImageFormat = "qcow2"; + }; + + + testUbuntuImage = makeImageTestScript diskImages.ubuntu1804i386; + + + buildInDebian = runInLinuxImage (stdenv.mkDerivation { + name = "deb-compile"; + src = patchelf.src; + diskImage = diskImages.ubuntu1804i386; + diskImageFormat = "qcow2"; + memSize = 512; + postHook = '' + dpkg-query --list + ''; + }); + +} diff --git a/nixpkgs/pkgs/build-support/wrapper-common/utils.bash b/nixpkgs/pkgs/build-support/wrapper-common/utils.bash new file mode 100644 index 000000000000..2faf96df15b4 --- /dev/null +++ b/nixpkgs/pkgs/build-support/wrapper-common/utils.bash @@ -0,0 +1,171 @@ +# Accumulate suffixes for taking in the right input parameters with the `mangle*` +# functions below. See setup-hook for details. +accumulateRoles() { + declare -ga role_suffixes=() + if [ "${NIX_@wrapperName@_TARGET_BUILD_@suffixSalt@:-}" ]; then + role_suffixes+=('_FOR_BUILD') + fi + if [ "${NIX_@wrapperName@_TARGET_HOST_@suffixSalt@:-}" ]; then + role_suffixes+=('') + fi + if [ "${NIX_@wrapperName@_TARGET_TARGET_@suffixSalt@:-}" ]; then + role_suffixes+=('_FOR_TARGET') + fi +} + +mangleVarListGeneric() { + local sep="$1" + shift + local var="$1" + shift + local -a role_suffixes=("$@") + + local outputVar="${var}_@suffixSalt@" + declare -gx "$outputVar"+='' + # For each role we serve, we accumulate the input parameters into our own + # cc-wrapper-derivation-specific environment variables. + for suffix in "${role_suffixes[@]}"; do + local inputVar="${var}${suffix}" + if [ -v "$inputVar" ]; then + export "${outputVar}+=${!outputVar:+$sep}${!inputVar}" + fi + done +} + +mangleVarList() { + mangleVarListGeneric " " "$@" +} + +mangleVarBool() { + local var="$1" + shift + local -a role_suffixes=("$@") + + local outputVar="${var}_@suffixSalt@" + declare -gxi "${outputVar}+=0" + for suffix in "${role_suffixes[@]}"; do + local inputVar="${var}${suffix}" + if [ -v "$inputVar" ]; then + # "1" in the end makes `let` return success error code when + # expression itself evaluates to zero. + # We don't use `|| true` because that would silence actual + # syntax errors from bad variable values. + let "${outputVar} |= ${!inputVar:-0}" "1" + fi + done +} + +# Combine a singular value from all roles. If multiple roles are being served, +# and the value differs in these roles then the request is impossible to +# satisfy and we abort immediately. +mangleVarSingle() { + local var="$1" + shift + local -a role_suffixes=("$@") + + local outputVar="${var}_@suffixSalt@" + for suffix in "${role_suffixes[@]}"; do + local inputVar="${var}${suffix}" + if [ -v "$inputVar" ]; then + if [ -v "$outputVar" ]; then + if [ "${!outputVar}" != "${!inputVar}" ]; then + { + echo "Multiple conflicting values defined for $outputVar" + echo "Existing value is ${!outputVar}" + echo "Attempting to set to ${!inputVar} via $inputVar" + } >&2 + + exit 1 + fi + else + declare -gx ${outputVar}="${!inputVar}" + fi + fi + done +} + +skip() { + if (( "${NIX_DEBUG:-0}" >= 1 )); then + echo "skipping impure path $1" >&2 + fi +} + +reject() { + echo "impure path \`$1' used in link" >&2 + exit 1 +} + + +# Checks whether a path is impure. E.g., `/lib/foo.so' is impure, but +# `/nix/store/.../lib/foo.so' isn't. +badPath() { + local p=$1 + + # Relative paths are okay (since they're presumably relative to + # the temporary build directory). + if [ "${p:0:1}" != / ]; then return 1; fi + + # Otherwise, the path should refer to the store or some temporary + # directory (including the build directory). + test \ + "$p" != "/dev/null" -a \ + "${p#"${NIX_STORE}"}" = "$p" -a \ + "${p#"${NIX_BUILD_TOP}"}" = "$p" -a \ + "${p#/tmp}" = "$p" -a \ + "${p#"${TMP:-/tmp}"}" = "$p" -a \ + "${p#"${TMPDIR:-/tmp}"}" = "$p" -a \ + "${p#"${TEMP:-/tmp}"}" = "$p" -a \ + "${p#"${TEMPDIR:-/tmp}"}" = "$p" +} + +expandResponseParams() { + declare -ga params=("$@") + local arg + for arg in "$@"; do + if [[ "$arg" == @* ]]; then + # phase separation makes this look useless + # shellcheck disable=SC2157 + if [ -x "@expandResponseParams@" ]; then + # params is used by caller + #shellcheck disable=SC2034 + readarray -d '' params < <("@expandResponseParams@" "$@") + return 0 + fi + fi + done +} + +checkLinkType() { + local arg + type="dynamic" + for arg in "$@"; do + if [[ "$arg" = -static ]]; then + type="static" + elif [[ "$arg" = -static-pie ]]; then + type="static-pie" + fi + done + echo "$type" +} + +# When building static-pie executables we cannot have rpath +# set. At least glibc requires rpath to be empty +filterRpathFlags() { + local linkType=$1 ret i + shift + + if [[ "$linkType" == "static-pie" ]]; then + while [[ "$#" -gt 0 ]]; do + i="$1"; shift 1 + if [[ "$i" == -rpath ]]; then + # also skip its argument + shift + else + ret+=("$i") + fi + done + else + ret=("$@") + fi + echo "${ret[@]}" +} diff --git a/nixpkgs/pkgs/build-support/writers/aliases.nix b/nixpkgs/pkgs/build-support/writers/aliases.nix new file mode 100644 index 000000000000..fb108a6fd857 --- /dev/null +++ b/nixpkgs/pkgs/build-support/writers/aliases.nix @@ -0,0 +1,35 @@ +lib: prev: + +let + # Removing recurseForDerivation prevents derivations of aliased attribute + # set to appear while listing all the packages available. + removeRecurseForDerivations = alias: with lib; + if alias.recurseForDerivations or false then + removeAttrs alias ["recurseForDerivations"] + else alias; + + # Disabling distribution prevents top-level aliases for non-recursed package + # sets from building on Hydra. + removeDistribute = alias: with lib; + if isDerivation alias then + dontDistribute alias + else alias; + + # Make sure that we are not shadowing something from + # writers. + checkInPkgs = n: alias: if builtins.hasAttr n prev + then throw "Alias ${n} is still in writers" + else alias; + + mapAliases = aliases: + lib.mapAttrs (n: alias: removeDistribute + (removeRecurseForDerivations + (checkInPkgs n alias))) + aliases; + +in +mapAliases ({ + /* Cleanup before 22.05, Added 2021-12-11 */ + writePython2 = "Python 2 is EOL and the use of writers.writePython2 is deprecated."; + writePython2Bin = "Python 2 is EOL and the use of writers.writePython2Bin is deprecated."; +}) diff --git a/nixpkgs/pkgs/build-support/writers/data.nix b/nixpkgs/pkgs/build-support/writers/data.nix new file mode 100644 index 000000000000..45ed5360eaeb --- /dev/null +++ b/nixpkgs/pkgs/build-support/writers/data.nix @@ -0,0 +1,64 @@ +{ lib, pkgs, formats, runCommand, dasel }: +let + daselBin = lib.getExe dasel; + + inherit (lib) + last + optionalString + types + ; +in +rec { + # Creates a transformer function that writes input data to disk, transformed + # by both the `input` and `output` arguments. + # + # Type: makeDataWriter :: input -> output -> nameOrPath -> data -> (any -> string) -> string -> string -> any -> derivation + # + # input :: T -> string: function that takes the nix data and returns a string + # output :: string: script that takes the $inputFile and write the result into $out + # nameOrPath :: string: if the name contains a / the files gets written to a sub-folder of $out. The derivation name is the basename of this argument. + # data :: T: the data that will be converted. + # + # Example: + # writeJSON = makeDataWriter { input = builtins.toJSON; output = "cp $inputPath $out"; }; + # myConfig = writeJSON "config.json" { hello = "world"; } + # + makeDataWriter = lib.warn "pkgs.writers.makeDataWriter is deprecated. Use pkgs.writeTextFile." ({ input ? lib.id, output ? "cp $inputPath $out" }: nameOrPath: data: + assert lib.or (types.path.check nameOrPath) (builtins.match "([0-9A-Za-z._])[0-9A-Za-z._-]*" nameOrPath != null); + let + name = last (builtins.split "/" nameOrPath); + in + runCommand name + { + input = input data; + passAsFile = [ "input" ]; + } '' + ${output} + + ${optionalString (types.path.check nameOrPath) '' + mv $out tmp + mkdir -p $out/$(dirname "${nameOrPath}") + mv tmp $out/${nameOrPath} + ''} + ''); + + inherit (pkgs) writeText; + + # Writes the content to a JSON file. + # + # Example: + # writeJSON "data.json" { hello = "world"; } + writeJSON = (pkgs.formats.json {}).generate; + + # Writes the content to a TOML file. + # + # Example: + # writeTOML "data.toml" { hello = "world"; } + writeTOML = (pkgs.formats.toml {}).generate; + + # Writes the content to a YAML file. + # + # Example: + # writeYAML "data.yaml" { hello = "world"; } + writeYAML = (pkgs.formats.yaml {}).generate; +} diff --git a/nixpkgs/pkgs/build-support/writers/default.nix b/nixpkgs/pkgs/build-support/writers/default.nix new file mode 100644 index 000000000000..a161322cd35b --- /dev/null +++ b/nixpkgs/pkgs/build-support/writers/default.nix @@ -0,0 +1,14 @@ +{ config, lib, callPackages }: + +let + aliases = if config.allowAliases then (import ./aliases.nix lib) else prev: {}; + + # Writers for JSON-like data structures + dataWriters = callPackages ./data.nix { }; + + # Writers for scripts + scriptWriters = callPackages ./scripts.nix { }; + + writers = scriptWriters // dataWriters; +in +writers // (aliases writers) diff --git a/nixpkgs/pkgs/build-support/writers/scripts.nix b/nixpkgs/pkgs/build-support/writers/scripts.nix new file mode 100644 index 000000000000..184ecee68777 --- /dev/null +++ b/nixpkgs/pkgs/build-support/writers/scripts.nix @@ -0,0 +1,379 @@ +{ pkgs, buildPackages, lib, stdenv, libiconv, mkNugetDeps, mkNugetSource, gixy }: +let + inherit (lib) + concatMapStringsSep + elem + escapeShellArg + last + optionalString + stringLength + strings + types + ; +in +rec { + # Base implementation for non-compiled executables. + # Takes an interpreter, for example `${pkgs.bash}/bin/bash` + # + # Examples: + # writeBash = makeScriptWriter { interpreter = "${pkgs.bash}/bin/bash"; } + # makeScriptWriter { interpreter = "${pkgs.dash}/bin/dash"; } "hello" "echo hello world" + makeScriptWriter = { interpreter, check ? "" }: nameOrPath: content: + assert lib.or (types.path.check nameOrPath) (builtins.match "([0-9A-Za-z._])[0-9A-Za-z._-]*" nameOrPath != null); + assert lib.or (types.path.check content) (types.str.check content); + let + name = last (builtins.split "/" nameOrPath); + in + + pkgs.runCommandLocal name ( + lib.optionalAttrs (nameOrPath == "/bin/${name}") { + meta.mainProgram = name; + } + // ( + if (types.str.check content) then { + inherit content interpreter; + passAsFile = [ "content" ]; + } else { + inherit interpreter; + contentPath = content; + } + ) + ) + '' + # On darwin a script cannot be used as an interpreter in a shebang but + # there doesn't seem to be a limit to the size of shebang and multiple + # arguments to the interpreter are allowed. + if [[ -n "${toString pkgs.stdenvNoCC.isDarwin}" ]] && isScript $interpreter + then + wrapperInterpreterLine=$(head -1 "$interpreter" | tail -c+3) + # Get first word from the line (note: xargs echo remove leading spaces) + wrapperInterpreter=$(echo "$wrapperInterpreterLine" | xargs echo | cut -d " " -f1) + + if isScript $wrapperInterpreter + then + echo "error: passed interpreter ($interpreter) is a script which has another script ($wrapperInterpreter) as an interpreter, which is not supported." + exit 1 + fi + + # This should work as long as wrapperInterpreter is a shell, which is + # the case for programs wrapped with makeWrapper, like + # python3.withPackages etc. + interpreterLine="$wrapperInterpreterLine $interpreter" + else + interpreterLine=$interpreter + fi + + echo "#! $interpreterLine" > $out + cat "$contentPath" >> $out + ${optionalString (check != "") '' + ${check} $out + ''} + chmod +x $out + ${optionalString (types.path.check nameOrPath) '' + mv $out tmp + mkdir -p $out/$(dirname "${nameOrPath}") + mv tmp $out/${nameOrPath} + ''} + ''; + + # Base implementation for compiled executables. + # Takes a compile script, which in turn takes the name as an argument. + # + # Examples: + # writeSimpleC = makeBinWriter { compileScript = name: "gcc -o $out $contentPath"; } + makeBinWriter = { compileScript, strip ? true }: nameOrPath: content: + assert lib.or (types.path.check nameOrPath) (builtins.match "([0-9A-Za-z._])[0-9A-Za-z._-]*" nameOrPath != null); + assert lib.or (types.path.check content) (types.str.check content); + let + name = last (builtins.split "/" nameOrPath); + in + pkgs.runCommand name ((if (types.str.check content) then { + inherit content; + passAsFile = [ "content" ]; + } else { + contentPath = content; + }) // lib.optionalAttrs (nameOrPath == "/bin/${name}") { + meta.mainProgram = name; + }) '' + ${compileScript} + ${lib.optionalString strip + "${lib.getBin buildPackages.bintools-unwrapped}/bin/${buildPackages.bintools-unwrapped.targetPrefix}strip -S $out"} + # Sometimes binaries produced for darwin (e. g. by GHC) won't be valid + # mach-o executables from the get-go, but need to be corrected somehow + # which is done by fixupPhase. + ${lib.optionalString pkgs.stdenvNoCC.hostPlatform.isDarwin "fixupPhase"} + ${optionalString (types.path.check nameOrPath) '' + mv $out tmp + mkdir -p $out/$(dirname "${nameOrPath}") + mv tmp $out/${nameOrPath} + ''} + ''; + + # Like writeScript but the first line is a shebang to bash + # + # Example: + # writeBash "example" '' + # echo hello world + # '' + writeBash = makeScriptWriter { + interpreter = "${pkgs.bash}/bin/bash"; + }; + + # Like writeScriptBin but the first line is a shebang to bash + writeBashBin = name: + writeBash "/bin/${name}"; + + # Like writeScript but the first line is a shebang to dash + # + # Example: + # writeDash "example" '' + # echo hello world + # '' + writeDash = makeScriptWriter { + interpreter = "${pkgs.dash}/bin/dash"; + }; + + # Like writeScriptBin but the first line is a shebang to dash + writeDashBin = name: + writeDash "/bin/${name}"; + + # Like writeScript but the first line is a shebang to fish + # + # Example: + # writeFish "example" '' + # echo hello world + # '' + writeFish = makeScriptWriter { + interpreter = "${pkgs.fish}/bin/fish --no-config"; + check = "${pkgs.fish}/bin/fish --no-config --no-execute"; # syntax check only + }; + + # Like writeScriptBin but the first line is a shebang to fish + writeFishBin = name: + writeFish "/bin/${name}"; + + # writeHaskell takes a name, an attrset with libraries and haskell version (both optional) + # and some haskell source code and returns an executable. + # + # Example: + # writeHaskell "missiles" { libraries = [ pkgs.haskellPackages.acme-missiles ]; } '' + # import Acme.Missiles + # + # main = launchMissiles + # ''; + writeHaskell = name: { + libraries ? [], + ghc ? pkgs.ghc, + ghcArgs ? [], + threadedRuntime ? true, + strip ? true + }: + let + appendIfNotSet = el: list: if elem el list then list else list ++ [ el ]; + ghcArgs' = if threadedRuntime then appendIfNotSet "-threaded" ghcArgs else ghcArgs; + + in makeBinWriter { + compileScript = '' + cp $contentPath tmp.hs + ${ghc.withPackages (_: libraries )}/bin/ghc ${lib.escapeShellArgs ghcArgs'} tmp.hs + mv tmp $out + ''; + inherit strip; + } name; + + # writeHaskellBin takes the same arguments as writeHaskell but outputs a directory (like writeScriptBin) + writeHaskellBin = name: + writeHaskell "/bin/${name}"; + + writeRust = name: { + rustc ? pkgs.rustc, + rustcArgs ? [], + strip ? true + }: + let + darwinArgs = lib.optionals stdenv.isDarwin [ "-L${lib.getLib libiconv}/lib" ]; + in + makeBinWriter { + compileScript = '' + cp "$contentPath" tmp.rs + PATH=${lib.makeBinPath [pkgs.gcc]} ${lib.getBin rustc}/bin/rustc ${lib.escapeShellArgs rustcArgs} ${lib.escapeShellArgs darwinArgs} -o "$out" tmp.rs + ''; + inherit strip; + } name; + + writeRustBin = name: + writeRust "/bin/${name}"; + + # writeJS takes a name an attributeset with libraries and some JavaScript sourcecode and + # returns an executable + # + # Example: + # writeJS "example" { libraries = [ pkgs.nodePackages.uglify-js ]; } '' + # var UglifyJS = require("uglify-js"); + # var code = "function add(first, second) { return first + second; }"; + # var result = UglifyJS.minify(code); + # console.log(result.code); + # '' + writeJS = name: { libraries ? [] }: content: + let + node-env = pkgs.buildEnv { + name = "node"; + paths = libraries; + pathsToLink = [ + "/lib/node_modules" + ]; + }; + in writeDash name '' + export NODE_PATH=${node-env}/lib/node_modules + exec ${pkgs.nodejs}/bin/node ${pkgs.writeText "js" content} "$@" + ''; + + # writeJSBin takes the same arguments as writeJS but outputs a directory (like writeScriptBin) + writeJSBin = name: + writeJS "/bin/${name}"; + + awkFormatNginx = builtins.toFile "awkFormat-nginx.awk" '' + awk -f + {sub(/^[ \t]+/,"");idx=0} + /\{/{ctx++;idx=1} + /\}/{ctx--} + {id="";for(i=idx;i<ctx;i++)id=sprintf("%s%s", id, "\t");printf "%s%s\n", id, $0} + ''; + + writeNginxConfig = name: text: pkgs.runCommandLocal name { + inherit text; + passAsFile = [ "text" ]; + nativeBuildInputs = [ gixy ]; + } /* sh */ '' + # nginx-config-formatter has an error - https://github.com/1connect/nginx-config-formatter/issues/16 + awk -f ${awkFormatNginx} "$textPath" | sed '/^\s*$/d' > $out + gixy $out + ''; + + # writePerl takes a name an attributeset with libraries and some perl sourcecode and + # returns an executable + # + # Example: + # writePerl "example" { libraries = [ pkgs.perlPackages.boolean ]; } '' + # use boolean; + # print "Howdy!\n" if true; + # '' + writePerl = name: { libraries ? [] }: + makeScriptWriter { + interpreter = "${pkgs.perl.withPackages (p: libraries)}/bin/perl"; + } name; + + # writePerlBin takes the same arguments as writePerl but outputs a directory (like writeScriptBin) + writePerlBin = name: + writePerl "/bin/${name}"; + + # makePythonWriter takes python and compatible pythonPackages and produces python script writer, + # which validates the script with flake8 at build time. If any libraries are specified, + # python.withPackages is used as interpreter, otherwise the "bare" python is used. + makePythonWriter = python: pythonPackages: buildPythonPackages: name: { libraries ? [], flakeIgnore ? [] }: + let + ignoreAttribute = optionalString (flakeIgnore != []) "--ignore ${concatMapStringsSep "," escapeShellArg flakeIgnore}"; + in + makeScriptWriter { + interpreter = + if libraries == [] + then python.interpreter + else (python.withPackages (ps: libraries)).interpreter + ; + check = optionalString python.isPy3k (writeDash "pythoncheck.sh" '' + exec ${buildPythonPackages.flake8}/bin/flake8 --show-source ${ignoreAttribute} "$1" + ''); + } name; + + # writePyPy2 takes a name an attributeset with libraries and some pypy2 sourcecode and + # returns an executable + # + # Example: + # writePyPy2 "test_pypy2" { libraries = [ pkgs.pypy2Packages.enum ]; } '' + # from enum import Enum + # + # class Test(Enum): + # a = "success" + # + # print Test.a + # '' + writePyPy2 = makePythonWriter pkgs.pypy2 pkgs.pypy2Packages buildPackages.pypy2Packages; + + # writePyPy2Bin takes the same arguments as writePyPy2 but outputs a directory (like writeScriptBin) + writePyPy2Bin = name: + writePyPy2 "/bin/${name}"; + + # writePython3 takes a name an attributeset with libraries and some python3 sourcecode and + # returns an executable + # + # Example: + # writePython3 "test_python3" { libraries = [ pkgs.python3Packages.pyyaml ]; } '' + # import yaml + # + # y = yaml.load(""" + # - test: success + # """) + # print(y[0]['test']) + # '' + writePython3 = makePythonWriter pkgs.python3 pkgs.python3Packages buildPackages.python3Packages; + + # writePython3Bin takes the same arguments as writePython3 but outputs a directory (like writeScriptBin) + writePython3Bin = name: + writePython3 "/bin/${name}"; + + # writePyPy3 takes a name an attributeset with libraries and some pypy3 sourcecode and + # returns an executable + # + # Example: + # writePyPy3 "test_pypy3" { libraries = [ pkgs.pypy3Packages.pyyaml ]; } '' + # import yaml + # + # y = yaml.load(""" + # - test: success + # """) + # print(y[0]['test']) + # '' + writePyPy3 = makePythonWriter pkgs.pypy3 pkgs.pypy3Packages buildPackages.pypy3Packages; + + # writePyPy3Bin takes the same arguments as writePyPy3 but outputs a directory (like writeScriptBin) + writePyPy3Bin = name: + writePyPy3 "/bin/${name}"; + + + makeFSharpWriter = { dotnet-sdk ? pkgs.dotnet-sdk, fsi-flags ? "", libraries ? _: [] }: nameOrPath: + let + fname = last (builtins.split "/" nameOrPath); + path = if strings.hasSuffix ".fsx" nameOrPath then nameOrPath else "${nameOrPath}.fsx"; + _nugetDeps = mkNugetDeps { name = "${fname}-nuget-deps"; nugetDeps = libraries; }; + + nuget-source = mkNugetSource { + name = "${fname}-nuget-source"; + description = "A Nuget source with the dependencies for ${fname}"; + deps = [ _nugetDeps ]; + }; + + fsi = writeBash "fsi" '' + export HOME=$NIX_BUILD_TOP/.home + export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1 + export DOTNET_CLI_TELEMETRY_OPTOUT=1 + export DOTNET_NOLOGO=1 + script="$1"; shift + ${dotnet-sdk}/bin/dotnet fsi --quiet --nologo --readline- ${fsi-flags} "$@" < "$script" + ''; + + in content: makeScriptWriter { + interpreter = fsi; + } path + '' + #i "nuget: ${nuget-source}/lib" + ${ content } + exit 0 + ''; + + writeFSharp = + makeFSharpWriter {}; + + writeFSharpBin = name: + writeFSharp "/bin/${name}"; + +} diff --git a/nixpkgs/pkgs/build-support/writers/test.nix b/nixpkgs/pkgs/build-support/writers/test.nix new file mode 100644 index 000000000000..005daf0be5b4 --- /dev/null +++ b/nixpkgs/pkgs/build-support/writers/test.nix @@ -0,0 +1,268 @@ +{ glib +, haskellPackages +, lib +, nodePackages +, perlPackages +, pypy2Packages +, python3Packages +, pypy3Packages +, runCommand +, testers +, writers +, writeText +}: +with writers; +let + expectSuccess = test: + runCommand "run-${test.name}" {} '' + if [[ "$(${test})" != success ]]; then + echo 'test ${test.name} failed' + exit 1 + fi + + touch $out + ''; + + expectSuccessBin = test: + runCommand "run-${test.name}" {} '' + if [[ "$(${lib.getExe test})" != success ]]; then + echo 'test ${test.name} failed' + exit 1 + fi + + touch $out + ''; + + expectDataEqual = { file, expected }: + let + expectedFile = writeText "${file.name}-expected" expected; + in + testers.testEqualContents { expected = expectedFile; actual = file; assertion = "${file.name} matches"; }; +in +lib.recurseIntoAttrs { + bin = lib.recurseIntoAttrs { + bash = expectSuccessBin (writeBashBin "test-writers-bash-bin" '' + if [[ "test" == "test" ]]; then echo "success"; fi + ''); + + dash = expectSuccessBin (writeDashBin "test-writers-dash-bin" '' + test '~' = '~' && echo 'success' + ''); + + fish = expectSuccessBin (writeFishBin "test-writers-fish-bin" '' + if test "test" = "test" + echo "success" + end + ''); + + rust = expectSuccessBin (writeRustBin "test-writers-rust-bin" {} '' + fn main(){ + println!("success") + } + ''); + + haskell = expectSuccessBin (writeHaskellBin "test-writers-haskell-bin" { libraries = [ haskellPackages.acme-default ]; } '' + import Data.Default + + int :: Int + int = def + + main :: IO () + main = case int of + 18871 -> putStrLn $ id "success" + _ -> print "fail" + ''); + + js = expectSuccessBin (writeJSBin "test-writers-js-bin" { libraries = [ nodePackages.semver ]; } '' + var semver = require('semver'); + + if (semver.valid('1.2.3')) { + console.log('success') + } else { + console.log('fail') + } + ''); + + perl = expectSuccessBin (writePerlBin "test-writers-perl-bin" { libraries = [ perlPackages.boolean ]; } '' + use boolean; + print "success\n" if true; + ''); + + pypy2 = expectSuccessBin (writePyPy2Bin "test-writers-pypy2-bin" { libraries = [ pypy2Packages.enum ]; } '' + from enum import Enum + + class Test(Enum): + a = "success" + + print Test.a + ''); + + python3 = expectSuccessBin (writePython3Bin "test-writers-python3-bin" { libraries = [ python3Packages.pyyaml ]; } '' + import yaml + + y = yaml.safe_load(""" + - test: success + """) + print(y[0]['test']) + ''); + + pypy3 = expectSuccessBin (writePyPy3Bin "test-writers-pypy3-bin" { libraries = [ pypy3Packages.pyyaml ]; } '' + import yaml + + y = yaml.safe_load(""" + - test: success + """) + print(y[0]['test']) + ''); + }; + + simple = lib.recurseIntoAttrs { + bash = expectSuccess (writeBash "test-writers-bash" '' + if [[ "test" == "test" ]]; then echo "success"; fi + ''); + + dash = expectSuccess (writeDash "test-writers-dash" '' + test '~' = '~' && echo 'success' + ''); + + fish = expectSuccess (writeFish "test-writers-fish" '' + if test "test" = "test" + echo "success" + end + ''); + + haskell = expectSuccess (writeHaskell "test-writers-haskell" { libraries = [ haskellPackages.acme-default ]; } '' + import Data.Default + + int :: Int + int = def + + main :: IO () + main = case int of + 18871 -> putStrLn $ id "success" + _ -> print "fail" + ''); + + js = expectSuccess (writeJS "test-writers-js" { libraries = [ nodePackages.semver ]; } '' + var semver = require('semver'); + + if (semver.valid('1.2.3')) { + console.log('success') + } else { + console.log('fail') + } + ''); + + perl = expectSuccess (writePerl "test-writers-perl" { libraries = [ perlPackages.boolean ]; } '' + use boolean; + print "success\n" if true; + ''); + + pypy2 = expectSuccess (writePyPy2 "test-writers-pypy2" { libraries = [ pypy2Packages.enum ]; } '' + from enum import Enum + + class Test(Enum): + a = "success" + + print Test.a + ''); + + python3 = expectSuccess (writePython3 "test-writers-python3" { libraries = [ python3Packages.pyyaml ]; } '' + import yaml + + y = yaml.safe_load(""" + - test: success + """) + print(y[0]['test']) + ''); + + pypy3 = expectSuccess (writePyPy3 "test-writers-pypy3" { libraries = [ pypy3Packages.pyyaml ]; } '' + import yaml + + y = yaml.safe_load(""" + - test: success + """) + print(y[0]['test']) + ''); + + fsharp = expectSuccess (makeFSharpWriter { + libraries = { fetchNuGet }: [ + (fetchNuGet { pname = "FSharp.SystemTextJson"; version = "0.17.4"; sha256 = "1bplzc9ybdqspii4q28l8gmfvzpkmgq5l1hlsiyg2h46w881lwg2"; }) + ]; + } "test-writers-fsharp" '' + #r "nuget: FSharp.SystemTextJson, 0.17.4" + + module Json = + open System.Text.Json + open System.Text.Json.Serialization + let options = JsonSerializerOptions() + options.Converters.Add(JsonFSharpConverter()) + let serialize<'a> (o: 'a) = JsonSerializer.Serialize<'a>(o, options) + let deserialize<'a> (str: string) = JsonSerializer.Deserialize<'a>(str, options) + + type Letter = A | B + let a = {| Hello = Some "World"; Letter = A |} + if a |> Json.serialize |> Json.deserialize |> (=) a + then "success" + else "failed" + |> printfn "%s" + ''); + + pypy2NoLibs = expectSuccess (writePyPy2 "test-writers-pypy2-no-libs" {} '' + print("success") + ''); + + python3NoLibs = expectSuccess (writePython3 "test-writers-python3-no-libs" {} '' + print("success") + ''); + + pypy3NoLibs = expectSuccess (writePyPy3 "test-writers-pypy3-no-libs" {} '' + print("success") + ''); + + fsharpNoNugetDeps = expectSuccess (writeFSharp "test-writers-fsharp-no-nuget-deps" '' + printfn "success" + ''); + }; + + path = lib.recurseIntoAttrs { + bash = expectSuccess (writeBash "test-writers-bash-path" (writeText "test" '' + if [[ "test" == "test" ]]; then echo "success"; fi + '')); + + haskell = expectSuccess (writeHaskell "test-writers-haskell-path" { libraries = [ haskellPackages.acme-default ]; } (writeText "test" '' + import Data.Default + + int :: Int + int = def + + main :: IO () + main = case int of + 18871 -> putStrLn $ id "success" + _ -> print "fail" + '')); + }; + + data = { + json = expectDataEqual { + file = writeJSON "data.json" { hello = "world"; }; + expected = '' + { + "hello": "world" + } + ''; + }; + + toml = expectDataEqual { + file = writeTOML "data.toml" { hello = "world"; }; + expected = '' + hello = "world" + ''; + }; + + yaml = expectDataEqual { + file = writeYAML "data.yaml" { hello = "world"; }; + expected = "hello: world\n"; + }; + }; +} |