about summary refs log tree commit diff
path: root/nixpkgs/pkgs/development/tools/build-managers/bazel/bazel_4/update-srcDeps.py
diff options
context:
space:
mode:
Diffstat (limited to 'nixpkgs/pkgs/development/tools/build-managers/bazel/bazel_4/update-srcDeps.py')
-rwxr-xr-xnixpkgs/pkgs/development/tools/build-managers/bazel/bazel_4/update-srcDeps.py54
1 files changed, 0 insertions, 54 deletions
diff --git a/nixpkgs/pkgs/development/tools/build-managers/bazel/bazel_4/update-srcDeps.py b/nixpkgs/pkgs/development/tools/build-managers/bazel/bazel_4/update-srcDeps.py
deleted file mode 100755
index 9e998e05875e..000000000000
--- a/nixpkgs/pkgs/development/tools/build-managers/bazel/bazel_4/update-srcDeps.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env python3
-import sys
-import json
-
-if len(sys.argv) != 2:
-    print("usage: ./this-script src-deps.json < WORKSPACE", file=sys.stderr)
-    print("Takes the bazel WORKSPACE file and reads all archives into a json dict (by evaling it as python code)", file=sys.stderr)
-    print("Hail Eris.", file=sys.stderr)
-    sys.exit(1)
-
-http_archives = []
-
-# just the kw args are the dict { name, sha256, urls … }
-def http_archive(**kw):
-    http_archives.append(kw)
-# like http_file
-def http_file(**kw):
-    http_archives.append(kw)
-
-# this is inverted from http_archive/http_file and bundles multiple archives
-def distdir_tar(**kw):
-    for archive_name in kw['archives']:
-        http_archives.append({
-            "name": archive_name,
-            "sha256": kw['sha256'][archive_name],
-            "urls": kw['urls'][archive_name]
-        })
-
-# TODO?
-def git_repository(**kw):
-    print(json.dumps(kw, sort_keys=True, indent=4), file=sys.stderr)
-    sys.exit(1)
-
-# execute the WORKSPACE like it was python code in this module,
-# using all the function stubs from above.
-exec(sys.stdin.read())
-
-# transform to a dict with the names as keys
-d = { el['name']: el for el in http_archives }
-
-def has_urls(el):
-    return ('url' in el and el['url']) or ('urls' in el and el['urls'])
-def has_sha256(el):
-    return 'sha256' in el and el['sha256']
-bad_archives = list(filter(lambda el: not has_urls(el) or not has_sha256(el), d.values()))
-if bad_archives:
-    print('Following bazel dependencies are missing url or sha256', file=sys.stderr)
-    print('Check bazel sources for master or non-checksummed dependencies', file=sys.stderr)
-    for el in bad_archives:
-        print(json.dumps(el, sort_keys=True, indent=4), file=sys.stderr)
-    sys.exit(1)
-
-with open(sys.argv[1], "w") as f:
-    print(json.dumps(d, sort_keys=True, indent=4), file=f)