summary refs log tree commit diff
diff options
context:
space:
mode:
authorAllen Nelson <anelson@narrativescience.com>2015-11-03 16:26:29 -0600
committerAllen Nelson <anelson@narrativescience.com>2015-11-03 16:26:29 -0600
commitb8c784f061e5153f61332ce65c335a42c0ab7702 (patch)
treeb2a11138717e4616f8d047d55c2ecf9db4ce9224
parent58b862b750dbfe00bc3e935f5ac8857b2fd6a591 (diff)
downloadnixlib-b8c784f061e5153f61332ce65c335a42c0ab7702.tar
nixlib-b8c784f061e5153f61332ce65c335a42c0ab7702.tar.gz
nixlib-b8c784f061e5153f61332ce65c335a42c0ab7702.tar.bz2
nixlib-b8c784f061e5153f61332ce65c335a42c0ab7702.tar.lz
nixlib-b8c784f061e5153f61332ce65c335a42c0ab7702.tar.xz
nixlib-b8c784f061e5153f61332ce65c335a42c0ab7702.tar.zst
nixlib-b8c784f061e5153f61332ce65c335a42c0ab7702.zip
patch to remove large file test in numpy
-rw-r--r--pkgs/development/python-modules/numpy-no-large-files.patch35
-rw-r--r--pkgs/top-level/python-packages.nix6
2 files changed, 41 insertions, 0 deletions
diff --git a/pkgs/development/python-modules/numpy-no-large-files.patch b/pkgs/development/python-modules/numpy-no-large-files.patch
new file mode 100644
index 000000000000..0eb415606d3e
--- /dev/null
+++ b/pkgs/development/python-modules/numpy-no-large-files.patch
@@ -0,0 +1,35 @@
+--- numpy/lib/tests/test_format.py	2015-08-11 12:03:43.000000000 -0500
++++ numpy/lib/tests/test_format_no_large_files.py	2015-11-03 16:03:30.328084827 -0600
+@@ -810,32 +810,5 @@
+     format.write_array_header_1_0(s, d)
+     assert_raises(ValueError, format.read_array_header_1_0, s)
+ 
+-
+-def test_large_file_support():
+-    from nose import SkipTest
+-    if (sys.platform == 'win32' or sys.platform == 'cygwin'):
+-        raise SkipTest("Unknown if Windows has sparse filesystems")
+-    # try creating a large sparse file
+-    tf_name = os.path.join(tempdir, 'sparse_file')
+-    try:
+-        # seek past end would work too, but linux truncate somewhat
+-        # increases the chances that we have a sparse filesystem and can
+-        # avoid actually writing 5GB
+-        import subprocess as sp
+-        sp.check_call(["truncate", "-s", "5368709120", tf_name])
+-    except:
+-        raise SkipTest("Could not create 5GB large file")
+-    # write a small array to the end
+-    with open(tf_name, "wb") as f:
+-        f.seek(5368709120)
+-        d = np.arange(5)
+-        np.save(f, d)
+-    # read it back
+-    with open(tf_name, "rb") as f:
+-        f.seek(5368709120)
+-        r = np.load(f)
+-    assert_array_equal(r, d)
+-
+-
+ if __name__ == "__main__":
+     run_module_suite()
diff --git a/pkgs/top-level/python-packages.nix b/pkgs/top-level/python-packages.nix
index f6ad5d2a8974..5550b09645eb 100644
--- a/pkgs/top-level/python-packages.nix
+++ b/pkgs/top-level/python-packages.nix
@@ -10202,6 +10202,12 @@ let
     buildInputs = [ pkgs.gfortran self.nose ];
     propagatedBuildInputs = [ support.openblas ];
 
+    # This patch removes the test of large file support, which takes forever
+    # and can cause the machine to run out of disk space when run.
+    patchPhase = ''
+      patch -p0 < ${../development/python-modules/numpy-no-large-files.patch}
+    '';
+
     meta = {
       description = "Scientific tools for Python";
       homepage = "http://numpy.scipy.org/";