#!/usr/bin/env nix-shell #! nix-shell --pure #! nix-shell -i bash #! nix-shell -p curl cacert #! nix-shell -p git #! nix-shell -p nix #! nix-shell -p jq # How the refresher works: # # For a given list of : # 1. fetch latest successful '.build` job # 2. fetch oldest evaluation that contained that '.build', extract nixpkgs commit # 3. fetch all the `.build` artifacts from '$out/on-server/' directory # 4. calculate hashes and craft the commit message with the details on # how to upload the result to 'tarballs.nixos.org' usage() { cat >&2 <[,,...] The tool must be ran from the root directory of 'nixpkgs' repository. Synopsis: 'refresh-tarballs.bash' script fetches latest bootstrapFiles built by hydra, registers them in 'nixpkgs' and provides commands to upload seed files to 'tarballs.nixos.org'. This is usually done in the following cases: 1. Single target fix: current bootstrap files for a single target are problematic for some reason (target-specific bug). In this case we can refresh just that target as: \$ $0 --commit --targets=i686-unknown-linux-gnu 2. Routine refresh: all bootstrap files should be refreshed to avoid debugging problems that only occur on very old binaries. \$ $0 --commit --all-targets To get help on uploading refreshed binaries to 'tarballs.nixos.org' please have a look at . EOF exit 1 } # log helpers die() { echo "ERROR: $*" >&2 exit 1 } info() { echo "INFO: $*" >&2 } [[ ${#@} -eq 0 ]] && usage # known targets NATIVE_TARGETS=( aarch64-unknown-linux-gnu aarch64-unknown-linux-musl i686-unknown-linux-gnu x86_64-unknown-linux-gnu x86_64-unknown-linux-musl # TODO: add darwin here once a few prerequisites are satisfied: # - bootstrap-files are factored out into a separate file # - the build artifacts are factored out into an `on-server` # directory. Right onw if does not match `linux` layout. # #aarch64-apple-darwin #x86_64-apple-darwin ) is_native() { local t target=$1 for t in "${NATIVE_TARGETS[@]}"; do [[ $t == $target ]] && return 0 done return 1 } CROSS_TARGETS=( armv5tel-unknown-linux-gnueabi armv6l-unknown-linux-gnueabihf armv6l-unknown-linux-musleabihf armv7l-unknown-linux-gnueabihf mips64el-unknown-linux-gnuabi64 mips64el-unknown-linux-gnuabin32 mipsel-unknown-linux-gnu powerpc64le-unknown-linux-gnu riscv64-unknown-linux-gnu ) is_cross() { local t target=$1 for t in "${CROSS_TARGETS[@]}"; do [[ $t == $target ]] && return 0 done return 1 } # collect passed options targets=() commit=no for arg in "$@"; do case "$arg" in --all-targets) targets+=( ${CROSS_TARGETS[@]} ${NATIVE_TARGETS[@]} ) ;; --targets=*) # Convert "--targets=a,b,c" to targets=(a b c) bash array. comma_targets=${arg#--targets=} targets+=(${comma_targets//,/ }) ;; --commit) commit=yes ;; *) usage ;; esac done for target in "${targets[@]}"; do # Native and cross jobsets differ a bit. We'll have to pick the # one based on target name: if is_native $target; then jobset=nixpkgs/trunk job="stdenvBootstrapTools.${target}.build" elif is_cross $target; then jobset=nixpkgs/cross-trunk job="bootstrapTools.${target}.build" else die "'$target' is not present in either of 'NATIVE_TARGETS' or 'CROSS_TARGETS'. Please add one." fi # 'nixpkgs' prefix where we will write new tarball hashes case "$target" in *linux*) nixpkgs_prefix="pkgs/stdenv/linux" ;; *darwin*) nixpkgs_prefix="pkgs/stdenv/darwin" ;; *) die "don't know where to put '$target'" ;; esac # We enforce s3 prefix for all targets here. This slightly differs # from manual uploads targets where names were chosen inconsistently. s3_prefix="stdenv/$target" # resolve 'latest' build to the build 'id', construct the link. latest_build_uri="https://hydra.nixos.org/job/$jobset/$job/latest" latest_build="$target.latest-build" info "Fetching latest successful build from '${latest_build_uri}'" curl -s -H "Content-Type: application/json" -L "$latest_build_uri" > "$latest_build" [[ $? -ne 0 ]] && die "Failed to fetch latest successful build" latest_build_id=$(jq '.id' < "$latest_build") [[ $? -ne 0 ]] && die "Did not find 'id' in latest build" build_uri="https://hydra.nixos.org/build/${latest_build_id}" # We pick oldest jobset evaluation and extract the 'nicpkgs' commit. # # We use oldest instead of latest to make the result more stable # across unrelated 'nixpkgs' updates. Ideally two subsequent runs of # this refresher should produce the same output (provided there are # no bootstrapTools updates committed between the two runs). oldest_eval_id=$(jq '.jobsetevals|min' < "$latest_build") [[ $? -ne 0 ]] && die "Did not find 'jobsetevals' in latest build" eval_uri="https://hydra.nixos.org/eval/${oldest_eval_id}" eval_meta="$target.eval-meta" info "Fetching oldest eval details from '${eval_uri}' (can take a minute)" curl -s -H "Content-Type: application/json" -L "${eval_uri}" > "$eval_meta" [[ $? -ne 0 ]] && die "Failed to fetch eval metadata" nixpkgs_revision=$(jq --raw-output ".jobsetevalinputs.nixpkgs.revision" < "$eval_meta") [[ $? -ne 0 ]] && die "Failed to fetch revision" # Extract the build paths out of the build metadata drvpath=$(jq --raw-output '.drvpath' < "${latest_build}") [[ $? -ne 0 ]] && die "Did not find 'drvpath' in latest build" outpath=$(jq --raw-output '.buildoutputs.out.path' < "${latest_build}") [[ $? -ne 0 ]] && die "Did not find 'buildoutputs' in latest build" build_timestamp=$(jq --raw-output '.timestamp' < "${latest_build}") [[ $? -ne 0 ]] && die "Did not find 'timestamp' in latest build" build_time=$(TZ=UTC LANG=C date --date="@${build_timestamp}" --rfc-email) [[ $? -ne 0 ]] && die "Failed to format timestamp" info "Fetching bootstrap tools to calculate hashes from '${outpath}'" nix-store --realize "$outpath" [[ $? -ne 0 ]] && die "Failed to fetch '${outpath}' from hydra" fnames=() target_file="${nixpkgs_prefix}/bootstrap-files/${target}.nix" info "Writing '${target_file}'" { # header cat < { url = "http://tarballs.nixos.org/${s3_prefix}/${nixpkgs_revision}/$fname"; hash = "${sri}";$(printf "\n%s" "${executable_nix}") }; EOF done # footer cat < "${target_file}" target_file_commit_msg=${target}.commit_message cat > "$target_file_commit_msg" <