summary refs log tree commit diff
path: root/pkgs/development
diff options
context:
space:
mode:
authorDomen Kožar <domen@dev.si>2015-11-16 12:55:13 +0100
committerDomen Kožar <domen@dev.si>2015-11-16 12:55:13 +0100
commit5ca01c71d1c18e70cd4034ca74eaa90de6655a22 (patch)
treee2d624bb60e331da6d84e5d3570f0e0de7689e4f /pkgs/development
parent5900949ef2f6ab0cda133f4eb915176570b946c2 (diff)
parentb8c784f061e5153f61332ce65c335a42c0ab7702 (diff)
downloadnixlib-5ca01c71d1c18e70cd4034ca74eaa90de6655a22.tar
nixlib-5ca01c71d1c18e70cd4034ca74eaa90de6655a22.tar.gz
nixlib-5ca01c71d1c18e70cd4034ca74eaa90de6655a22.tar.bz2
nixlib-5ca01c71d1c18e70cd4034ca74eaa90de6655a22.tar.lz
nixlib-5ca01c71d1c18e70cd4034ca74eaa90de6655a22.tar.xz
nixlib-5ca01c71d1c18e70cd4034ca74eaa90de6655a22.tar.zst
nixlib-5ca01c71d1c18e70cd4034ca74eaa90de6655a22.zip
Merge pull request #10814 from NarrativeScience/no_large_file_test
numpy: patch to remove large file test
Diffstat (limited to 'pkgs/development')
-rw-r--r--pkgs/development/python-modules/numpy-no-large-files.patch35
1 files changed, 35 insertions, 0 deletions
diff --git a/pkgs/development/python-modules/numpy-no-large-files.patch b/pkgs/development/python-modules/numpy-no-large-files.patch
new file mode 100644
index 000000000000..0eb415606d3e
--- /dev/null
+++ b/pkgs/development/python-modules/numpy-no-large-files.patch
@@ -0,0 +1,35 @@
+--- numpy/lib/tests/test_format.py	2015-08-11 12:03:43.000000000 -0500
++++ numpy/lib/tests/test_format_no_large_files.py	2015-11-03 16:03:30.328084827 -0600
+@@ -810,32 +810,5 @@
+     format.write_array_header_1_0(s, d)
+     assert_raises(ValueError, format.read_array_header_1_0, s)
+ 
+-
+-def test_large_file_support():
+-    from nose import SkipTest
+-    if (sys.platform == 'win32' or sys.platform == 'cygwin'):
+-        raise SkipTest("Unknown if Windows has sparse filesystems")
+-    # try creating a large sparse file
+-    tf_name = os.path.join(tempdir, 'sparse_file')
+-    try:
+-        # seek past end would work too, but linux truncate somewhat
+-        # increases the chances that we have a sparse filesystem and can
+-        # avoid actually writing 5GB
+-        import subprocess as sp
+-        sp.check_call(["truncate", "-s", "5368709120", tf_name])
+-    except:
+-        raise SkipTest("Could not create 5GB large file")
+-    # write a small array to the end
+-    with open(tf_name, "wb") as f:
+-        f.seek(5368709120)
+-        d = np.arange(5)
+-        np.save(f, d)
+-    # read it back
+-    with open(tf_name, "rb") as f:
+-        f.seek(5368709120)
+-        r = np.load(f)
+-    assert_array_equal(r, d)
+-
+-
+ if __name__ == "__main__":
+     run_module_suite()