From d64d62b76df62e8b20eea060db05ef23c472354d Mon Sep 17 00:00:00 2001 From: Julian Taylor Date: Sat, 11 Jan 2014 13:21:45 +0100 Subject: BUG: fix large file support on 32 bit systems On linux large file support must be enabled and ftello used to avoid overflows. The result must not be converted to a size_t, but a long long. --- numpy/lib/tests/test_format.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) (limited to 'numpy/lib/tests/test_format.py') diff --git a/numpy/lib/tests/test_format.py b/numpy/lib/tests/test_format.py index b9be643c8..73b1e7c12 100644 --- a/numpy/lib/tests/test_format.py +++ b/numpy/lib/tests/test_format.py @@ -620,5 +620,29 @@ def test_bad_header(): format.write_array_header_1_0(s, d) assert_raises(ValueError, format.read_array_header_1_0, s) + +def test_large_file_support(): + from nose import SkipTest + # try creating a large sparse file + with tempfile.NamedTemporaryFile() as tf: + try: + import subprocess as sp + sp.check_call(["truncate", "-s", "5368709120", tf.name]) + except: + raise SkipTest("Could not create 5GB large file") + # write a small array to the end + f = open(tf.name, "wb") + f.seek(5368709120) + d = np.arange(5) + np.save(f, d) + f.close() + # read it back + f = open(tf.name, "rb") + f.seek(5368709120) + r = np.load(f) + f.close() + assert_array_equal(r, d) + + if __name__ == "__main__": run_module_suite() -- cgit v1.2.1