(https://github.com/Blosc/bloscpack/issues/99). OBS-URL: https://build.opensuse.org/package/show/devel:languages:python:numeric/python-bloscpack?expand=0&rev=13
1420 lines
49 KiB
Diff
1420 lines
49 KiB
Diff
---
|
|
bloscpack/testutil.py | 3 +--
|
|
test/test_headers.py | 2 +-
|
|
2 files changed, 2 insertions(+), 3 deletions(-)
|
|
|
|
--- a/bloscpack/testutil.py
|
|
+++ b/bloscpack/testutil.py
|
|
@@ -62,7 +62,7 @@ def create_tmp_files():
|
|
dcmp_file = path.join(tdir, 'file.dcmp')
|
|
# register the temp dir remover, safeguard against abort
|
|
atexit.register(atexit_tmpremover, tdir)
|
|
- yield tdir, in_file, out_file, dcmp_file
|
|
+ tdir(in_file, out_file, dcmp_file)
|
|
# context manager remover
|
|
shutil.rmtree(tdir)
|
|
|
|
@@ -74,7 +74,6 @@ def cmp_file(file1, file2):
|
|
|
|
|
|
def cmp_fp(fp1, fp2):
|
|
- import nose.tools as nt # nose is a testing dependency
|
|
chunk_size = reverse_pretty(DEFAULT_CHUNK_SIZE)
|
|
while True:
|
|
a = fp1.read(chunk_size)
|
|
@@ -82,4 +81,4 @@ def cmp_fp(fp1, fp2):
|
|
if a == b'' and b == b'':
|
|
return True
|
|
else:
|
|
- nt.assert_equal(a, b)
|
|
+ assert a == b
|
|
--- a/test/test_headers.py
|
|
+++ b/test/test_headers.py
|
|
@@ -1,4 +1,3 @@
|
|
-#!/usr/bin/env nosetests
|
|
# -*- coding: utf-8 -*-
|
|
# vim :set ft=py:
|
|
|
|
@@ -6,9 +5,8 @@
|
|
import struct
|
|
import sys
|
|
|
|
-
|
|
-import nose.tools as nt
|
|
-from nose.plugins.skip import SkipTest
|
|
+import pytest
|
|
+from unittest import SkipTest
|
|
import blosc
|
|
import numpy as np
|
|
|
|
@@ -39,9 +37,12 @@ from bloscpack.headers import (Bloscpack
|
|
|
|
|
|
def test_check_range():
|
|
- nt.assert_raises(TypeError, check_range, 'test', 'a', 0, 1)
|
|
- nt.assert_raises(ValueError, check_range, 'test', -1, 0, 1)
|
|
- nt.assert_raises(ValueError, check_range, 'test', 2, 0, 1)
|
|
+ with pytest.raises(TypeError):
|
|
+ check_range('test', 'a', 0, 1)
|
|
+ with pytest.raises(ValueError):
|
|
+ check_range('test', -1, 0, 1)
|
|
+ with pytest.raises(ValueError):
|
|
+ check_range('test', 2, 0, 1)
|
|
|
|
|
|
def test_create_options():
|
|
@@ -57,7 +58,7 @@ def test_create_options():
|
|
('00000001', {'offsets': True, 'metadata': False}),
|
|
('00000011', {'offsets': True, 'metadata': True}),
|
|
]:
|
|
- yield nt.assert_equal, expected_options, create_options(**kwargs)
|
|
+ assert expected_options == create_options(**kwargs)
|
|
|
|
|
|
def test_decode_options():
|
|
@@ -67,7 +68,7 @@ def test_decode_options():
|
|
({'metadata': True, 'offsets': False}, '00000010'),
|
|
({'metadata': True, 'offsets': True}, '00000011'),
|
|
]:
|
|
- yield nt.assert_equal, expected, decode_options(input)
|
|
+ assert expected == decode_options(input)
|
|
|
|
|
|
def test_decode_options_exceptions():
|
|
@@ -81,7 +82,8 @@ def test_decode_options_exceptions():
|
|
'00001100',
|
|
'11111100',
|
|
]:
|
|
- yield nt.assert_raises, ValueError, decode_options, broken_input
|
|
+ with pytest.raises(ValueError):
|
|
+ decode_options(broken_input)
|
|
|
|
|
|
def test_check_options_exceptions():
|
|
@@ -90,7 +92,9 @@ def test_check_options_exceptions():
|
|
0,
|
|
1,
|
|
]:
|
|
- yield nt.assert_raises, TypeError, check_options, broken_input
|
|
+ with pytest.raises(TypeError):
|
|
+ check_options(broken_input)
|
|
+
|
|
for broken_input in [
|
|
# check for lengths too small and too large
|
|
'0',
|
|
@@ -103,15 +107,16 @@ def test_check_options_exceptions():
|
|
'0000000a',
|
|
'aaaaaaaa',
|
|
]:
|
|
- yield nt.assert_raises, ValueError, check_options, broken_input
|
|
+ with pytest.raises(ValueError):
|
|
+ check_options(broken_input)
|
|
|
|
|
|
def test_create_metadata_options():
|
|
- nt.assert_equal('00000000', create_metadata_options())
|
|
+ assert '00000000' == create_metadata_options()
|
|
|
|
|
|
def test_decode_metadata_options():
|
|
- nt.assert_equal({}, decode_metadata_options('00000000'))
|
|
+ assert {} == decode_metadata_options('00000000')
|
|
|
|
|
|
def test_decode_metadata_options_exceptions():
|
|
@@ -125,7 +130,8 @@ def test_decode_metadata_options_excepti
|
|
'00001111',
|
|
'11111111',
|
|
]:
|
|
- yield nt.assert_raises, ValueError, decode_metadata_options, broken_input
|
|
+ with pytest.raises(ValueError):
|
|
+ decode_metadata_options(broken_input)
|
|
|
|
|
|
def test_decode_blosc_header_basic():
|
|
@@ -139,7 +145,7 @@ def test_decode_blosc_header_basic():
|
|
'nbytes': len(array_),
|
|
'typesize': blosc_args.typesize}
|
|
header_slice = dict((k, header[k]) for k in expected.keys())
|
|
- nt.assert_equal(expected, header_slice)
|
|
+ assert expected == header_slice
|
|
|
|
|
|
def test_decode_blosc_header_deactivate_shuffle():
|
|
@@ -154,7 +160,7 @@ def test_decode_blosc_header_deactivate_
|
|
'nbytes': len(array_),
|
|
'typesize': blosc_args.typesize}
|
|
header_slice = dict((k, header[k]) for k in expected.keys())
|
|
- nt.assert_equal(expected, header_slice)
|
|
+ assert expected == header_slice
|
|
|
|
|
|
def test_decode_blosc_header_uncompressible_data():
|
|
@@ -171,7 +177,7 @@ def test_decode_blosc_header_uncompressi
|
|
'flags': 0x13, # 1 for shuffle 2 for non-compressed 4 for small blocksize
|
|
'nbytes': len(array_),
|
|
'typesize': blosc_args.typesize}
|
|
- nt.assert_equal(expected, header)
|
|
+ assert expected == header
|
|
|
|
|
|
def test_decode_blosc_header_uncompressible_data_dont_split_false():
|
|
@@ -190,7 +196,7 @@ def test_decode_blosc_header_uncompressi
|
|
'nbytes': len(array_),
|
|
'typesize': blosc_args.typesize
|
|
}
|
|
- nt.assert_equal(expected, header)
|
|
+ assert expected == header
|
|
|
|
|
|
def test_decode_blosc_flags():
|
|
@@ -216,16 +222,13 @@ def test_decode_blosc_flags():
|
|
(0b01100000, {'codec': 'zlib'}),
|
|
(0b10000000, {'codec': 'zstd'}),
|
|
]:
|
|
- yield (nt.assert_equal,
|
|
- decode_blosc_flags(input_byte),
|
|
- gen_expected(new_params))
|
|
+ assert decode_blosc_flags(input_byte) == gen_expected(new_params)
|
|
|
|
|
|
def test_BloscPackHeader_constructor_exceptions():
|
|
- # uses nose test generators
|
|
-
|
|
def check(error_type, args_dict):
|
|
- nt.assert_raises(error_type, BloscpackHeader, **args_dict)
|
|
+ with pytest.raises(error_type):
|
|
+ BloscpackHeader(**args_dict)
|
|
|
|
for error_type, args_dict in [
|
|
(ValueError, {'format_version': -1}),
|
|
@@ -260,7 +263,7 @@ def test_BloscPackHeader_constructor_exc
|
|
(ValueError, {'chunk_size': 1,
|
|
'last_chunk': 2}),
|
|
]:
|
|
- yield check, error_type, args_dict
|
|
+ check(error_type, args_dict)
|
|
|
|
|
|
def test_BloscPackHeader_total_prospective_entries():
|
|
@@ -277,7 +280,7 @@ def test_BloscPackHeader_total_prospecti
|
|
]:
|
|
header = BloscpackHeader(nchunks=nchunks,
|
|
max_app_chunks=max_app_chunks)
|
|
- yield nt.assert_equal, expected, header.total_prospective_chunks
|
|
+ assert expected == header.total_prospective_chunks
|
|
|
|
|
|
def test_BloscpackHeader_encode():
|
|
@@ -294,7 +297,7 @@ def test_BloscpackHeader_encode():
|
|
raw[offset+len(replacement):]
|
|
|
|
# test with no arguments
|
|
- yield nt.assert_equal, raw, BloscpackHeader().encode()
|
|
+ assert raw == BloscpackHeader().encode()
|
|
|
|
for offset, replacement, kwargs in [
|
|
(4, struct.pack('<B', 23), {'format_version': 23}),
|
|
@@ -338,7 +341,7 @@ def test_BloscpackHeader_encode():
|
|
(16, b'\x01\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xff\x7f',
|
|
{'nchunks': 1, 'max_app_chunks': MAX_CHUNKS-1}),
|
|
]:
|
|
- yield nt.assert_equal, mod_raw(offset, replacement), \
|
|
+ assert mod_raw(offset, replacement) == \
|
|
BloscpackHeader(**kwargs).encode()
|
|
|
|
|
|
@@ -354,7 +357,7 @@ def test_BloscpackHeader_decode():
|
|
raw[offset+len(replacement):]
|
|
|
|
# check with no args
|
|
- yield nt.assert_equal, BloscpackHeader(), BloscpackHeader.decode(raw)
|
|
+ assert BloscpackHeader() == BloscpackHeader.decode(raw)
|
|
|
|
for kwargs, offset, replacement in [
|
|
# check with format_version
|
|
@@ -404,27 +407,21 @@ def test_BloscpackHeader_decode():
|
|
({'nchunks': 1, 'max_app_chunks': MAX_CHUNKS-1},
|
|
16, b'\x01\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xff\x7f'),
|
|
]:
|
|
- yield (nt.assert_equal,
|
|
- BloscpackHeader(**kwargs),
|
|
- BloscpackHeader.decode(mod_raw(offset, replacement)))
|
|
+ assert BloscpackHeader(**kwargs) == \
|
|
+ BloscpackHeader.decode(mod_raw(offset, replacement))
|
|
|
|
|
|
def test_BloscpackHeader_accessor_exceptions():
|
|
if sys.version_info[0:2] < (2, 7):
|
|
raise SkipTest
|
|
bloscpack_header = BloscpackHeader()
|
|
- nt.assert_raises_regexp(KeyError,
|
|
- 'foo not in BloscpackHeader',
|
|
- bloscpack_header.__getitem__,
|
|
- 'foo')
|
|
- nt.assert_raises_regexp(KeyError,
|
|
- 'foo not in BloscpackHeader',
|
|
- bloscpack_header.__setitem__,
|
|
- 'foo', 'bar')
|
|
- nt.assert_raises_regexp(NotImplementedError,
|
|
- 'BloscpackHeader does not support __delitem__ or derivatives',
|
|
- bloscpack_header.__delitem__,
|
|
- 'foo',)
|
|
+ with pytest.raises(KeyError, match='foo not in BloscpackHeader'):
|
|
+ bloscpack_header.__getitem__('foo')
|
|
+ with pytest.raises(KeyError, match='foo not in BloscpackHeader'):
|
|
+ bloscpack_header.__setitem__('foo', 'bar')
|
|
+ with pytest.raises(NotImplementedError,
|
|
+ match='BloscpackHeader does not support __delitem__ or derivatives'):
|
|
+ bloscpack_header.__delitem__('foo')
|
|
|
|
|
|
def test_MetadataHeader_encode():
|
|
@@ -432,7 +429,7 @@ def test_MetadataHeader_encode():
|
|
b'\x00\x00\x00\x00\x00\x00\x00\x00'\
|
|
b'\x00\x00\x00\x00\x00\x00\x00\x00'\
|
|
b'\x00\x00\x00\x00\x00\x00\x00\x00'
|
|
- yield nt.assert_equal, raw, MetadataHeader().encode()
|
|
+ assert raw == MetadataHeader().encode()
|
|
|
|
def mod_raw(offset, value):
|
|
return raw[0:offset] + value + \
|
|
@@ -451,7 +448,7 @@ def test_MetadataHeader_encode():
|
|
(20, b'\xff\xff\xff\xff', {'meta_comp_size': MAX_META_SIZE}),
|
|
(24, b'sesame', {'user_codec': b'sesame'}),
|
|
]:
|
|
- yield nt.assert_equal, mod_raw(offset, replacement), \
|
|
+ assert mod_raw(offset, replacement) == \
|
|
MetadataHeader(**kwargs).encode()
|
|
|
|
|
|
@@ -481,7 +478,7 @@ def test_MetadataHeader_decode():
|
|
return no_arg_input[0:offset] + value + \
|
|
no_arg_input[offset+len(value):]
|
|
|
|
- yield nt.assert_equal, no_arg_return, MetadataHeader.decode(no_arg_input)
|
|
+ assert no_arg_return == MetadataHeader.decode(no_arg_input)
|
|
|
|
for attribute, value, offset, replacement in [
|
|
('magic_format', b'JSON', 0, b'JSON'),
|
|
@@ -496,5 +493,5 @@ def test_MetadataHeader_decode():
|
|
('meta_comp_size', MAX_META_SIZE, 20, b'\xff\xff\xff\xff'),
|
|
('user_codec', b'sesame', 24, b'sesame'),
|
|
]:
|
|
- yield nt.assert_equal, copy_and_set_return(attribute, value), \
|
|
+ assert copy_and_set_return(attribute, value) == \
|
|
MetadataHeader.decode(copy_and_set_input(offset, replacement))
|
|
--- a/setup.py
|
|
+++ b/setup.py
|
|
@@ -3,7 +3,6 @@
|
|
# flake8: noqa
|
|
|
|
from setuptools import setup
|
|
-import sys
|
|
import io
|
|
|
|
with io.open('README.rst', encoding='utf-8') as f:
|
|
@@ -20,7 +19,7 @@ install_requires = [
|
|
]
|
|
|
|
tests_require = [
|
|
- 'nose',
|
|
+ 'pytest',
|
|
'cram>=0.6',
|
|
'mock',
|
|
'coverage',
|
|
--- a/test/test_append.py
|
|
+++ b/test/test_append.py
|
|
@@ -1,10 +1,9 @@
|
|
-#!/usr/bin/env nosetests
|
|
# -*- coding: utf-8 -*-
|
|
# vim :set ft=py:
|
|
|
|
|
|
import blosc
|
|
-import nose.tools as nt
|
|
+import pytest
|
|
import numpy as np
|
|
|
|
|
|
@@ -105,8 +104,8 @@ def test_append_fp():
|
|
max_app_chunks=160,
|
|
)
|
|
expected_orig_offsets = [1440]
|
|
- nt.assert_equal(expected_orig_bloscpack_header, orig_bloscpack_header)
|
|
- nt.assert_equal(expected_orig_offsets[0], orig_offsets[0])
|
|
+ assert expected_orig_bloscpack_header == orig_bloscpack_header
|
|
+ assert expected_orig_offsets[0] == orig_offsets[0]
|
|
|
|
# perform the append
|
|
reset_append_fp(orig, new, new_size)
|
|
@@ -126,8 +125,8 @@ def test_append_fp():
|
|
'metadata': False
|
|
}
|
|
expected_app_offsets = [1440]
|
|
- nt.assert_equal(expected_app_bloscpack_header, app_bloscpack_header)
|
|
- nt.assert_equal(expected_app_offsets[0], app_offsets[0])
|
|
+ assert expected_app_bloscpack_header == app_bloscpack_header
|
|
+ assert expected_app_offsets[0] == app_offsets[0]
|
|
|
|
# now check by unpacking
|
|
source = CompressedFPSource(orig)
|
|
@@ -137,8 +136,8 @@ def test_append_fp():
|
|
new.seek(0)
|
|
new_str = new.read()
|
|
dcmp_str = dcmp.read()
|
|
- nt.assert_equal(len(dcmp_str), len(new_str * 2))
|
|
- nt.assert_equal(dcmp_str, new_str * 2)
|
|
+ assert len(dcmp_str) == len(new_str * 2)
|
|
+ assert dcmp_str == new_str * 2
|
|
|
|
## TODO
|
|
# * check additional aspects of file integrity
|
|
@@ -154,8 +153,8 @@ def test_append():
|
|
unpack_file_from_file(out_file, dcmp_file)
|
|
in_content = open(in_file, 'rb').read()
|
|
dcmp_content = open(dcmp_file, 'rb').read()
|
|
- nt.assert_equal(len(dcmp_content), len(in_content) * 2)
|
|
- nt.assert_equal(dcmp_content, in_content * 2)
|
|
+ assert len(dcmp_content) == len(in_content) * 2
|
|
+ assert dcmp_content == in_content * 2
|
|
|
|
|
|
def test_append_into_last_chunk():
|
|
@@ -175,13 +174,13 @@ def test_append_into_last_chunk():
|
|
new.seek(0)
|
|
nchunks = reset_append_fp(orig, StringIO(new_content[:1023]), 1023)
|
|
bloscpack_header = reset_read_beginning(orig)[0]
|
|
- nt.assert_equal(nchunks, 1)
|
|
- nt.assert_equal(bloscpack_header['last_chunk'], 1023)
|
|
+ assert nchunks == 1
|
|
+ assert bloscpack_header['last_chunk'] == 1023
|
|
# now append into that last chunk
|
|
nchunks = reset_append_fp(orig, StringIO(new_content[:1023]), 1023)
|
|
bloscpack_header = reset_read_beginning(orig)[0]
|
|
- nt.assert_equal(nchunks, 0)
|
|
- nt.assert_equal(bloscpack_header['last_chunk'], 2046)
|
|
+ assert nchunks == 0
|
|
+ assert bloscpack_header['last_chunk'] == 2046
|
|
|
|
# now check by unpacking
|
|
source = CompressedFPSource(orig)
|
|
@@ -191,8 +190,8 @@ def test_append_into_last_chunk():
|
|
new.seek(0)
|
|
new_str = new.read()
|
|
dcmp_str = dcmp.read()
|
|
- nt.assert_equal(len(dcmp_str), len(new_str) + 2046)
|
|
- nt.assert_equal(dcmp_str, new_str + new_str[:1023] * 2)
|
|
+ assert len(dcmp_str) == len(new_str) + 2046
|
|
+ assert dcmp_str == new_str + new_str[:1023] * 2
|
|
|
|
|
|
def test_append_single_chunk():
|
|
@@ -210,25 +209,25 @@ def test_append_single_chunk():
|
|
# append a single chunk
|
|
reset_append_fp(orig, new, new_size)
|
|
bloscpack_header = reset_read_beginning(orig)[0]
|
|
- nt.assert_equal(bloscpack_header['nchunks'], 2)
|
|
+ assert bloscpack_header['nchunks'] == 2
|
|
|
|
# append a large content, that amounts to two chunks
|
|
new_content = new.read()
|
|
new.seek(0)
|
|
reset_append_fp(orig, StringIO(new_content * 2), new_size * 2)
|
|
bloscpack_header = reset_read_beginning(orig)[0]
|
|
- nt.assert_equal(bloscpack_header['nchunks'], 4)
|
|
+ assert bloscpack_header['nchunks'] == 4
|
|
|
|
# append half a chunk
|
|
reset_append_fp(orig, StringIO(new_content[:len(new_content)]), new_size//2)
|
|
bloscpack_header = reset_read_beginning(orig)[0]
|
|
- nt.assert_equal(bloscpack_header['nchunks'], 5)
|
|
+ assert bloscpack_header['nchunks'] == 5
|
|
|
|
# append a few bytes
|
|
reset_append_fp(orig, StringIO(new_content[:1023]), 1024)
|
|
# make sure it is squashed into the lat chunk
|
|
bloscpack_header = reset_read_beginning(orig)[0]
|
|
- nt.assert_equal(bloscpack_header['nchunks'], 5)
|
|
+ assert bloscpack_header['nchunks'] == 5
|
|
|
|
|
|
def test_double_append():
|
|
@@ -241,8 +240,8 @@ def test_double_append():
|
|
unpack(source, sink)
|
|
dcmp.seek(0)
|
|
dcmp_str = dcmp.read()
|
|
- nt.assert_equal(len(dcmp_str), len(new_str) * 3)
|
|
- nt.assert_equal(dcmp_str, new_str * 3)
|
|
+ assert len(dcmp_str) == len(new_str) * 3
|
|
+ assert dcmp_str == new_str * 3
|
|
|
|
|
|
def test_append_metadata():
|
|
@@ -267,20 +266,22 @@ def test_append_metadata():
|
|
new.seek(0)
|
|
new_str = new.read()
|
|
dcmp_str = dcmp.read()
|
|
- nt.assert_equal(len(dcmp_str), len(new_str) * 2)
|
|
- nt.assert_equal(dcmp_str, new_str * 2)
|
|
+ assert len(dcmp_str) == len(new_str) * 2
|
|
+ assert dcmp_str == new_str * 2
|
|
|
|
|
|
def test_append_fp_no_offsets():
|
|
bloscpack_args = BloscpackArgs(offsets=False)
|
|
orig, new, new_size, dcmp = prep_array_for_append(bloscpack_args=bloscpack_args)
|
|
- nt.assert_raises(RuntimeError, append_fp, orig, new, new_size)
|
|
+ with pytest.raises(RuntimeError):
|
|
+ append_fp(orig, new, new_size)
|
|
|
|
|
|
def test_append_fp_not_enough_space():
|
|
bloscpack_args = BloscpackArgs(max_app_chunks=0)
|
|
orig, new, new_size, dcmp = prep_array_for_append(bloscpack_args=bloscpack_args)
|
|
- nt.assert_raises(NotEnoughSpace, append_fp, orig, new, new_size)
|
|
+ with pytest.raises(NotEnoughSpace):
|
|
+ append_fp(orig, new, new_size)
|
|
|
|
|
|
def test_mixing_clevel():
|
|
@@ -293,7 +294,7 @@ def test_mixing_clevel():
|
|
orig.seek(0)
|
|
# get a backup of the settings
|
|
bloscpack_header, metadata, metadata_header, offsets = \
|
|
- reset_read_beginning(orig)
|
|
+ reset_read_beginning(orig)
|
|
# compressed size of the last chunk, including checksum
|
|
last_chunk_compressed_size = orig_size - offsets[-1]
|
|
|
|
@@ -316,7 +317,7 @@ def test_mixing_clevel():
|
|
# * nchunks + 1 times the blosc and checksum overhead
|
|
appended_size = new_size + bloscpack_header['last_chunk'] + (nchunks+1) * (16 + 4)
|
|
# final size should be original plus appended data
|
|
- nt.assert_equal(final_size, appended_size + discounted_orig_size)
|
|
+ assert final_size == appended_size + discounted_orig_size
|
|
|
|
# check by unpacking
|
|
source = CompressedFPSource(orig)
|
|
@@ -326,8 +327,8 @@ def test_mixing_clevel():
|
|
new.seek(0)
|
|
new_str = new.read()
|
|
dcmp_str = dcmp.read()
|
|
- nt.assert_equal(len(dcmp_str), len(new_str * 2))
|
|
- nt.assert_equal(dcmp_str, new_str * 2)
|
|
+ assert len(dcmp_str) == len(new_str * 2)
|
|
+ assert dcmp_str == new_str * 2
|
|
|
|
|
|
def test_append_mix_shuffle():
|
|
@@ -360,8 +361,8 @@ def test_append_mix_shuffle():
|
|
dcmp_str = dcmp.read()
|
|
|
|
# now sanity check the length and content of the decompressed
|
|
- nt.assert_equal(len(dcmp_str), len(new_str) + to_append_fp_size)
|
|
- nt.assert_equal(dcmp_str, new_str + to_append.tostring())
|
|
+ assert len(dcmp_str) == len(new_str) + to_append_fp_size
|
|
+ assert dcmp_str == new_str + to_append.tostring()
|
|
|
|
# now get the first and the last chunk and check that the shuffle doesn't
|
|
# match
|
|
@@ -376,9 +377,9 @@ def test_append_mix_shuffle():
|
|
_read_compressed_chunk_fp(orig, checksum_impl)
|
|
decompressed_last = blosc.decompress(compressed_last)
|
|
# first chunk has shuffle active
|
|
- nt.assert_equal(blosc_header_zero['flags'], 1)
|
|
+ assert blosc_header_zero['flags'] == 1
|
|
# last chunk doesn't
|
|
- nt.assert_equal(blosc_header_last['flags'], 0)
|
|
+ assert blosc_header_last['flags'] == 0
|
|
|
|
|
|
def test_recreate_metadata():
|
|
@@ -393,21 +394,12 @@ def test_recreate_metadata():
|
|
user_codec=b'',
|
|
)
|
|
header_dict = old_meta_header
|
|
- nt.assert_raises(NoSuchSerializer,
|
|
- _recreate_metadata,
|
|
- header_dict,
|
|
- '',
|
|
- magic_format='NOSUCHSERIALIZER')
|
|
- nt.assert_raises(NoSuchCodec,
|
|
- _recreate_metadata,
|
|
- header_dict,
|
|
- '',
|
|
- codec='NOSUCHCODEC')
|
|
- nt.assert_raises(ChecksumLengthMismatch,
|
|
- _recreate_metadata,
|
|
- header_dict,
|
|
- '',
|
|
- checksum='adler32')
|
|
+ with pytest.raises(NoSuchSerializer):
|
|
+ _recreate_metadata(header_dict, '', magic_format='NOSUCHSERIALIZER')
|
|
+ with pytest.raises(NoSuchCodec):
|
|
+ _recreate_metadata(header_dict, '', codec='NOSUCHCODEC')
|
|
+ with pytest.raises(ChecksumLengthMismatch):
|
|
+ _recreate_metadata(header_dict, '', checksum='adler32')
|
|
|
|
|
|
def test_rewrite_metadata():
|
|
@@ -426,8 +418,8 @@ def test_rewrite_metadata():
|
|
# write the metadata section
|
|
_write_metadata(target_fp, test_metadata, metadata_args)
|
|
# check that the length is correct
|
|
- nt.assert_equal(METADATA_HEADER_LENGTH + metadata_args.max_meta_size,
|
|
- len(target_fp.getvalue()))
|
|
+ assert METADATA_HEADER_LENGTH + metadata_args.max_meta_size == \
|
|
+ len(target_fp.getvalue())
|
|
|
|
# now add stuff to the metadata
|
|
test_metadata['container'] = 'numpy'
|
|
@@ -442,20 +434,20 @@ def test_rewrite_metadata():
|
|
# correctly
|
|
target_fp.seek(0, 0)
|
|
result_metadata, result_header = _read_metadata(target_fp)
|
|
- nt.assert_equal(test_metadata, result_metadata)
|
|
- nt.assert_equal(new_metadata_length, result_header.meta_comp_size)
|
|
+ assert test_metadata == result_metadata
|
|
+ assert new_metadata_length == result_header.meta_comp_size
|
|
|
|
# make sure that NoChangeInMetadata is raised
|
|
target_fp.seek(0, 0)
|
|
- nt.assert_raises(NoChangeInMetadata, _rewrite_metadata_fp,
|
|
- target_fp, test_metadata, codec=None, level=None)
|
|
+ with pytest.raises(NoChangeInMetadata):
|
|
+ _rewrite_metadata_fp(target_fp, test_metadata, codec=None, level=None)
|
|
|
|
# make sure that ChecksumLengthMismatch is raised, needs modified metadata
|
|
target_fp.seek(0, 0)
|
|
test_metadata['fluxcompensator'] = 'back to the future'
|
|
- nt.assert_raises(ChecksumLengthMismatch, _rewrite_metadata_fp,
|
|
- target_fp, test_metadata,
|
|
- codec=None, level=None, checksum='sha512')
|
|
+ with pytest.raises(ChecksumLengthMismatch):
|
|
+ _rewrite_metadata_fp(target_fp, test_metadata, codec=None, level=None,
|
|
+ checksum='sha512')
|
|
|
|
# make sure if level is not None, this works
|
|
target_fp.seek(0, 0)
|
|
@@ -467,5 +459,5 @@ def test_rewrite_metadata():
|
|
for i in range(100):
|
|
test_metadata[str(i)] = str(i)
|
|
target_fp.seek(0, 0)
|
|
- nt.assert_raises(MetadataSectionTooSmall, _rewrite_metadata_fp,
|
|
- target_fp, test_metadata, codec=None, level=None)
|
|
+ with pytest.raises(MetadataSectionTooSmall):
|
|
+ _rewrite_metadata_fp(target_fp, test_metadata, codec=None, level=None)
|
|
--- a/test/test_args.py
|
|
+++ b/test/test_args.py
|
|
@@ -1,4 +1,3 @@
|
|
-#!/usr/bin/env nosetests
|
|
# -*- coding: utf-8 -*-
|
|
# vim :set ft=py:
|
|
|
|
@@ -6,8 +5,7 @@
|
|
from unittest import TestCase
|
|
|
|
|
|
-import nose.tools as nt
|
|
-
|
|
+import pytest
|
|
|
|
from bloscpack.args import (DEFAULT_BLOSC_ARGS,
|
|
DEFAULT_BLOSCPACK_ARGS,
|
|
@@ -35,97 +33,96 @@ from bloscpack.pretty import reverse_pre
|
|
def test_check_blosc_arguments():
|
|
missing = DEFAULT_BLOSC_ARGS.copy()
|
|
missing.pop('typesize')
|
|
- nt.assert_raises(ValueError, _check_blosc_args, missing)
|
|
+ with pytest.raises(ValueError):
|
|
+ _check_blosc_args(missing)
|
|
extra = DEFAULT_BLOSC_ARGS.copy()
|
|
extra['wtf'] = 'wtf'
|
|
- nt.assert_raises(ValueError, _check_blosc_args, extra)
|
|
+ with pytest.raises(ValueError):
|
|
+ _check_blosc_args(extra)
|
|
|
|
|
|
def test_check_bloscpack_arguments():
|
|
missing = DEFAULT_BLOSCPACK_ARGS.copy()
|
|
missing.pop('offsets')
|
|
- nt.assert_raises(ValueError, _check_bloscpack_args, missing)
|
|
+ with pytest.raises(ValueError):
|
|
+ _check_bloscpack_args(missing)
|
|
extra = DEFAULT_BLOSCPACK_ARGS.copy()
|
|
extra['wtf'] = 'wtf'
|
|
- nt.assert_raises(ValueError, _check_bloscpack_args, extra)
|
|
+ with pytest.raises(ValueError):
|
|
+ _check_bloscpack_args(extra)
|
|
|
|
|
|
def test_check_bloscpack_arguments_accpets_None_as_checksum():
|
|
args = BloscpackArgs(checksum=None)
|
|
- nt.assert_equal(args.checksum, 'None')
|
|
+ assert args.checksum == 'None'
|
|
|
|
|
|
def test_check_metadata_arguments():
|
|
missing = DEFAULT_METADATA_ARGS.copy()
|
|
missing.pop('magic_format')
|
|
- nt.assert_raises(ValueError, _check_metadata_arguments, missing)
|
|
+ with pytest.raises(ValueError):
|
|
+ _check_metadata_arguments(missing)
|
|
extra = DEFAULT_METADATA_ARGS.copy()
|
|
extra['wtf'] = 'wtf'
|
|
- nt.assert_raises(ValueError, _check_metadata_arguments, extra)
|
|
+ with pytest.raises(ValueError):
|
|
+ _check_metadata_arguments(extra)
|
|
|
|
|
|
def test_calculate_nchunks():
|
|
# check for zero or negative chunk_size
|
|
- nt.assert_raises(ValueError, calculate_nchunks,
|
|
- 23, chunk_size=0)
|
|
- nt.assert_raises(ValueError, calculate_nchunks,
|
|
- 23, chunk_size=-1)
|
|
-
|
|
- nt.assert_equal((9, 1, 1), calculate_nchunks(9, chunk_size=1))
|
|
- nt.assert_equal((5, 2, 1), calculate_nchunks(9, chunk_size=2))
|
|
- nt.assert_equal((3, 3, 3), calculate_nchunks(9, chunk_size=3))
|
|
- nt.assert_equal((3, 4, 1), calculate_nchunks(9, chunk_size=4))
|
|
- nt.assert_equal((2, 5, 4), calculate_nchunks(9, chunk_size=5))
|
|
- nt.assert_equal((2, 6, 3), calculate_nchunks(9, chunk_size=6))
|
|
- nt.assert_equal((2, 7, 2), calculate_nchunks(9, chunk_size=7))
|
|
- nt.assert_equal((2, 8, 1), calculate_nchunks(9, chunk_size=8))
|
|
- nt.assert_equal((1, 9, 9), calculate_nchunks(9, chunk_size=9))
|
|
+ with pytest.raises(ValueError):
|
|
+ calculate_nchunks(23, chunk_size=0)
|
|
+ with pytest.raises(ValueError):
|
|
+ calculate_nchunks(23, chunk_size=-1)
|
|
+
|
|
+ assert (9, 1, 1) == calculate_nchunks(9, chunk_size=1)
|
|
+ assert (5, 2, 1) == calculate_nchunks(9, chunk_size=2)
|
|
+ assert (3, 3, 3) == calculate_nchunks(9, chunk_size=3)
|
|
+ assert (3, 4, 1) == calculate_nchunks(9, chunk_size=4)
|
|
+ assert (2, 5, 4) == calculate_nchunks(9, chunk_size=5)
|
|
+ assert (2, 6, 3) == calculate_nchunks(9, chunk_size=6)
|
|
+ assert (2, 7, 2) == calculate_nchunks(9, chunk_size=7)
|
|
+ assert (2, 8, 1) == calculate_nchunks(9, chunk_size=8)
|
|
+ assert (1, 9, 9) == calculate_nchunks(9, chunk_size=9)
|
|
|
|
# check downgrade
|
|
- nt.assert_equal((1, 23, 23), calculate_nchunks(23, chunk_size=24))
|
|
+ assert (1, 23, 23) == calculate_nchunks(23, chunk_size=24)
|
|
|
|
# single byte file
|
|
- nt.assert_equal((1, 1, 1),
|
|
- calculate_nchunks(1, chunk_size=1))
|
|
+ assert (1, 1, 1) == calculate_nchunks(1, chunk_size=1)
|
|
|
|
# check that a zero length input is handled correctly
|
|
- nt.assert_equal((1, 0, 0),
|
|
- calculate_nchunks(0, chunk_size=1))
|
|
+ assert (1, 0, 0) == calculate_nchunks(0, chunk_size=1)
|
|
# check that the chunk_size is ignored in this case
|
|
- nt.assert_equal((1, 0, 0),
|
|
- calculate_nchunks(0, chunk_size=512))
|
|
+ assert (1, 0, 0) == calculate_nchunks(0, chunk_size=512)
|
|
# in_file_size must be strictly positive
|
|
- nt.assert_raises(ValueError, calculate_nchunks, -1)
|
|
+ with pytest.raises(ValueError):
|
|
+ calculate_nchunks(-1)
|
|
|
|
# check overflow of nchunks due to chunk_size being too small
|
|
# and thus stuff not fitting into the header
|
|
- nt.assert_raises(ChunkingException, calculate_nchunks,
|
|
- MAX_CHUNKS+1, chunk_size=1)
|
|
+ with pytest.raises(ChunkingException):
|
|
+ calculate_nchunks(MAX_CHUNKS+1, chunk_size=1)
|
|
|
|
# check that strings are converted correctly
|
|
- nt.assert_equal((6, 1048576, 209715),
|
|
- calculate_nchunks(reverse_pretty('5.2M')))
|
|
- nt.assert_equal((3, 2097152, 1258291),
|
|
- calculate_nchunks(reverse_pretty('5.2M'),
|
|
- chunk_size='2M'))
|
|
+ assert (6, 1048576, 209715) == calculate_nchunks(reverse_pretty('5.2M'))
|
|
+ assert (3, 2097152, 1258291) == \
|
|
+ calculate_nchunks(reverse_pretty('5.2M'), chunk_size='2M')
|
|
|
|
|
|
def test_handle_max_apps():
|
|
- nt.assert_equals(_handle_max_apps(True, 10, 10), 10)
|
|
- nt.assert_equals(_handle_max_apps(True, 10, lambda x: x*10), 100)
|
|
- nt.assert_equals(_handle_max_apps(True, 1, lambda x: MAX_CHUNKS),
|
|
- MAX_CHUNKS-1)
|
|
- nt.assert_equals(_handle_max_apps(True, 1, lambda x: MAX_CHUNKS+10),
|
|
- MAX_CHUNKS-1)
|
|
- nt.assert_equals(_handle_max_apps(True, 1, MAX_CHUNKS),
|
|
- MAX_CHUNKS-1)
|
|
- nt.assert_equals(_handle_max_apps(True, 10, MAX_CHUNKS),
|
|
- MAX_CHUNKS-10)
|
|
- nt.assert_raises(TypeError, _handle_max_apps, True, 10, 10.0)
|
|
- nt.assert_raises(ValueError, _handle_max_apps,
|
|
- True, 10, lambda x: -1)
|
|
- nt.assert_raises(ValueError, _handle_max_apps,
|
|
- True, 10, lambda x: 1.0)
|
|
+ assert _handle_max_apps(True, 10, 10) == 10
|
|
+ assert _handle_max_apps(True, 10, lambda x: x*10) == 100
|
|
+ assert _handle_max_apps(True, 1, lambda x: MAX_CHUNKS) == MAX_CHUNKS-1
|
|
+ assert _handle_max_apps(True, 1, lambda x: MAX_CHUNKS+10) == MAX_CHUNKS-1
|
|
+ assert _handle_max_apps(True, 1, MAX_CHUNKS) == MAX_CHUNKS-1
|
|
+ assert _handle_max_apps(True, 10, MAX_CHUNKS) == MAX_CHUNKS-10
|
|
+ with pytest.raises(TypeError):
|
|
+ _handle_max_apps(True, 10, 10.0)
|
|
+ with pytest.raises(ValueError):
|
|
+ _handle_max_apps(True, 10, lambda x: -1)
|
|
+ with pytest.raises(ValueError):
|
|
+ _handle_max_apps(True, 10, lambda x: 1.0)
|
|
|
|
|
|
class TestBloscArgs(TestCase):
|
|
--- a/test/test_checksums.py
|
|
+++ b/test/test_checksums.py
|
|
@@ -1,16 +1,11 @@
|
|
-#!/usr/bin/env nosetests
|
|
# -*- coding: utf-8 -*-
|
|
# vim :set ft=py:
|
|
|
|
-
|
|
-import nose.tools as nt
|
|
-
|
|
-
|
|
from bloscpack import checksums
|
|
|
|
|
|
def test_checksusm_exist():
|
|
- nt.assert_equal(len(checksums.CHECKSUMS), 9)
|
|
+ assert len(checksums.CHECKSUMS) == 9
|
|
checksums_avail = ['None',
|
|
'adler32',
|
|
'crc32',
|
|
@@ -20,7 +15,7 @@ def test_checksusm_exist():
|
|
'sha256',
|
|
'sha384',
|
|
'sha512']
|
|
- nt.assert_equal(checksums.CHECKSUMS_AVAIL, checksums_avail)
|
|
+ assert checksums.CHECKSUMS_AVAIL == checksums_avail
|
|
|
|
|
|
def test_checksusm_are_sane():
|
|
@@ -43,5 +38,5 @@ def test_checksusm_are_sane():
|
|
]
|
|
for i, csum in enumerate(checksums.CHECKSUMS):
|
|
digest = csum(b"\x23\x42\xbe\xef")
|
|
- yield nt.assert_equal, len(digest), csum.size
|
|
- yield nt.assert_equal, digest, csum_targets[i]
|
|
+ assert len(digest) == csum.size
|
|
+ assert digest == csum_targets[i]
|
|
--- a/test/test_cli.py
|
|
+++ b/test/test_cli.py
|
|
@@ -1,39 +1,33 @@
|
|
-#!/usr/bin/env nosetests
|
|
# -*- coding: utf-8 -*-
|
|
# vim :set ft=py:
|
|
|
|
|
|
from mock import patch, Mock
|
|
-import nose.tools as nt
|
|
+import pytest
|
|
|
|
from bloscpack import cli
|
|
from bloscpack.exceptions import FileNotFound
|
|
|
|
+
|
|
def test_parser():
|
|
# hmmm I guess we could override the error
|
|
- parser = cli.create_parser()
|
|
+ cli.create_parser()
|
|
|
|
|
|
@patch('os.path.exists')
|
|
def test_non_existing_input_file_raises_exception(mock_exists):
|
|
args = Mock(force=False)
|
|
mock_exists.return_value = False
|
|
- nt.assert_raises(FileNotFound,
|
|
- cli.check_files,
|
|
- 'nosuchfile',
|
|
- 'nosuchfile',
|
|
- args)
|
|
+ with pytest.raises(FileNotFound):
|
|
+ cli.check_files('nosuchfile', 'nosuchfile', args)
|
|
|
|
|
|
@patch('os.path.exists')
|
|
def test_existing_output_file_raises_exception(mock_exists):
|
|
args = Mock(force=False)
|
|
mock_exists.side_effects = [True, True]
|
|
- nt.assert_raises(FileNotFound,
|
|
- cli.check_files,
|
|
- 'anyfile',
|
|
- 'anyfile',
|
|
- args)
|
|
+ with pytest.raises(FileNotFound):
|
|
+ cli.check_files('anyfile', 'anyfile', args)
|
|
|
|
|
|
@patch('os.path.exists')
|
|
--- a/test/test_file_io.py
|
|
+++ b/test/test_file_io.py
|
|
@@ -1,4 +1,3 @@
|
|
-#!/usr/bin/env nosetests
|
|
# -*- coding: utf-8 -*-
|
|
# vim :set ft=py:
|
|
|
|
@@ -7,7 +6,7 @@ from __future__ import print_function
|
|
|
|
|
|
import blosc
|
|
-import nose.tools as nt
|
|
+import pytest
|
|
from mock import patch
|
|
import numpy as np
|
|
|
|
@@ -66,8 +65,8 @@ def test_offsets():
|
|
# First chunks should start after header and offsets
|
|
first = BLOSCPACK_HEADER_LENGTH + 8 * total_entries
|
|
# We assume that the others are correct
|
|
- nt.assert_equal(offsets[0], first)
|
|
- nt.assert_equal(736, offsets[0])
|
|
+ assert offsets[0] == first
|
|
+ assert 736 == offsets[0]
|
|
# try to read the second header
|
|
input_fp.seek(offsets[1], 0)
|
|
blosc_header_raw = input_fp.read(BLOSC_HEADER_LENGTH)
|
|
@@ -78,7 +77,7 @@ def test_offsets():
|
|
'typesize': 8}
|
|
blosc_header = decode_blosc_header(blosc_header_raw)
|
|
blosc_header_slice = dict((k, blosc_header[k]) for k in expected.keys())
|
|
- nt.assert_equal(expected, blosc_header_slice)
|
|
+ assert expected == blosc_header_slice
|
|
|
|
# now check the same thing again, but w/o any max_app_chunks
|
|
input_fp, output_fp = StringIO(), StringIO()
|
|
@@ -95,9 +94,9 @@ def test_offsets():
|
|
)
|
|
output_fp.seek(0, 0)
|
|
bloscpack_header = _read_bloscpack_header(output_fp)
|
|
- nt.assert_equal(0, bloscpack_header.max_app_chunks)
|
|
+ assert 0 == bloscpack_header.max_app_chunks
|
|
offsets = _read_offsets(output_fp, bloscpack_header)
|
|
- nt.assert_equal(96, offsets[0])
|
|
+ assert 96 == offsets[0]
|
|
|
|
|
|
def test_metadata():
|
|
@@ -106,7 +105,7 @@ def test_metadata():
|
|
'others': [],
|
|
}
|
|
received_metadata = pack_unpack_fp(1, metadata=test_metadata)
|
|
- nt.assert_equal(test_metadata, received_metadata)
|
|
+ assert test_metadata == received_metadata
|
|
|
|
|
|
def test_metadata_opportunisitic_compression():
|
|
@@ -117,7 +116,7 @@ def test_metadata_opportunisitic_compres
|
|
_write_metadata(target_fp, test_metadata, MetadataArgs())
|
|
target_fp.seek(0, 0)
|
|
metadata, header = _read_metadata(target_fp)
|
|
- nt.assert_equal('zlib', header['meta_codec'])
|
|
+ assert 'zlib' == header['meta_codec']
|
|
|
|
# now do the same thing, but use badly compressible metadata
|
|
test_metadata = "abc"
|
|
@@ -127,7 +126,7 @@ def test_metadata_opportunisitic_compres
|
|
target_fp.seek(0, 0)
|
|
metadata, header = _read_metadata(target_fp)
|
|
# but it wasn't of any use
|
|
- nt.assert_equal('None', header['meta_codec'])
|
|
+ assert 'None' == header['meta_codec']
|
|
|
|
|
|
def test_disable_offsets():
|
|
@@ -143,8 +142,8 @@ def test_disable_offsets():
|
|
bloscpack_args=bloscpack_args)
|
|
out_fp.seek(0)
|
|
bloscpack_header, metadata, metadata_header, offsets = \
|
|
- _read_beginning(out_fp)
|
|
- nt.assert_true(len(offsets) == 0)
|
|
+ _read_beginning(out_fp)
|
|
+ assert len(offsets) == 0
|
|
|
|
|
|
# this will cause a bug if we ever reach 255 format versions
|
|
@@ -154,8 +153,8 @@ def test_invalid_format():
|
|
with create_tmp_files() as (tdir, in_file, out_file, dcmp_file):
|
|
create_array(1, in_file)
|
|
pack_file_to_file(in_file, out_file, blosc_args=blosc_args)
|
|
- nt.assert_raises(FormatVersionMismatch,
|
|
- unpack_file_from_file, out_file, dcmp_file)
|
|
+ with pytest.raises(FormatVersionMismatch):
|
|
+ unpack_file_from_file(out_file, dcmp_file)
|
|
|
|
|
|
def test_file_corruption():
|
|
@@ -180,7 +179,8 @@ def test_file_corruption():
|
|
# write the flipped byte
|
|
input_fp.write(replace)
|
|
# now attempt to unpack it
|
|
- nt.assert_raises(ChecksumMismatch, unpack_file_from_file, out_file, dcmp_file)
|
|
+ with pytest.raises(ChecksumMismatch):
|
|
+ unpack_file_from_file(out_file, dcmp_file)
|
|
|
|
|
|
def pack_unpack(repeats, chunk_size=None, progress=False):
|
|
@@ -248,7 +248,7 @@ def test_pack_unpack_bytes_to_from_file(
|
|
with create_tmp_files() as (tdir, in_file, out_file, dcmp_file):
|
|
pack_bytes_to_file(input_bytes, out_file)
|
|
output_bytes, _ = unpack_bytes_from_file(out_file)
|
|
- nt.assert_equal(input_bytes, output_bytes)
|
|
+ assert input_bytes == output_bytes
|
|
|
|
|
|
def test_pack_unpack_bytes_bytes():
|
|
@@ -256,7 +256,7 @@ def test_pack_unpack_bytes_bytes():
|
|
b = a.tostring()
|
|
c = pack_bytes_to_bytes(b)
|
|
d, _ = unpack_bytes_from_bytes(c)
|
|
- nt.assert_equal(b, d)
|
|
+ assert b == d
|
|
|
|
|
|
def pack_unpack_hard():
|
|
--- a/test/test_log.py
|
|
+++ b/test/test_log.py
|
|
@@ -1,4 +1,4 @@
|
|
-import nose.tools as nt
|
|
+import pytest
|
|
from mock import patch
|
|
|
|
|
|
@@ -6,7 +6,8 @@ from bloscpack import log
|
|
|
|
|
|
def test_verbose():
|
|
- nt.assert_raises(TypeError, log.verbose, 'message', 'MAXIMUM')
|
|
+ with pytest.raises(TypeError):
|
|
+ log.verbose('message', 'MAXIMUM')
|
|
log.set_level(log.DEBUG)
|
|
# should probably hijack the print statement
|
|
log.verbose('notification')
|
|
@@ -20,4 +21,5 @@ def test_error(exit_mock):
|
|
|
|
|
|
def test_set_level_exception():
|
|
- nt.assert_raises(ValueError, log.set_level, 'NO_SUCH_LEVEL')
|
|
+ with pytest.raises(ValueError):
|
|
+ log.set_level('NO_SUCH_LEVEL')
|
|
--- a/test/test_memory_io.py
|
|
+++ b/test/test_memory_io.py
|
|
@@ -1,11 +1,6 @@
|
|
-#!/usr/bin/env nosetests
|
|
# -*- coding: utf-8 -*-
|
|
# vim :set ft=py:
|
|
|
|
-
|
|
-from nose import tools as nt
|
|
-
|
|
-
|
|
from bloscpack.abstract_io import (pack,
|
|
unpack,
|
|
)
|
|
@@ -42,7 +37,7 @@ def pack_unpack_mem(repeats, chunk_size=
|
|
print("Compressing")
|
|
in_fp.seek(0)
|
|
nchunks, chunk_size, last_chunk_size = \
|
|
- calculate_nchunks(in_fp_size, chunk_size)
|
|
+ calculate_nchunks(in_fp_size, chunk_size)
|
|
# let us play merry go round
|
|
source = PlainFPSource(in_fp)
|
|
sink = CompressedMemorySink()
|
|
@@ -50,7 +45,7 @@ def pack_unpack_mem(repeats, chunk_size=
|
|
source = CompressedMemorySource(sink)
|
|
sink = PlainMemorySink()
|
|
unpack(source, sink)
|
|
- nt.assert_equal(metadata, source.metadata)
|
|
+ assert metadata == source.metadata
|
|
source = PlainMemorySource(sink.chunks)
|
|
sink = CompressedFPSink(out_fp)
|
|
pack(source, sink, nchunks, chunk_size, last_chunk_size, metadata=metadata)
|
|
@@ -58,7 +53,7 @@ def pack_unpack_mem(repeats, chunk_size=
|
|
source = CompressedFPSource(out_fp)
|
|
sink = PlainFPSink(dcmp_fp)
|
|
unpack(source, sink)
|
|
- nt.assert_equal(metadata, source.metadata)
|
|
+ assert metadata == source.metadata
|
|
in_fp.seek(0)
|
|
dcmp_fp.seek(0)
|
|
cmp_fp(in_fp, dcmp_fp)
|
|
--- a/test/test_metacodecs.py
|
|
+++ b/test/test_metacodecs.py
|
|
@@ -1,11 +1,6 @@
|
|
-#!/usr/bin/env nosetests
|
|
# -*- coding: utf-8 -*-
|
|
# vim :set ft=py:
|
|
|
|
-
|
|
-import nose.tools as nt
|
|
-
|
|
-
|
|
from bloscpack.args import (DEFAULT_META_LEVEL,
|
|
)
|
|
from bloscpack.metacodecs import (CODECS,
|
|
@@ -14,8 +9,8 @@ from bloscpack.metacodecs import (CODECS
|
|
|
|
|
|
def test_codecs():
|
|
- nt.assert_equal(CODECS_AVAIL, ['None', 'zlib'])
|
|
+ assert CODECS_AVAIL == ['None', 'zlib']
|
|
random_str = b"4KzGCl7SxTsYLaerommsMWyZg1TXbV6wsR9Xk"
|
|
for i, c in enumerate(CODECS):
|
|
- nt.assert_equal(random_str, c.decompress(
|
|
- c.compress(random_str, DEFAULT_META_LEVEL)))
|
|
+ assert random_str == c.decompress(
|
|
+ c.compress(random_str, DEFAULT_META_LEVEL))
|
|
--- a/test/test_numpy_io.py
|
|
+++ b/test/test_numpy_io.py
|
|
@@ -1,12 +1,11 @@
|
|
-#!/usr/bin/env nosetests
|
|
# -*- coding: utf-8 -*-
|
|
# vim :set ft=py:
|
|
|
|
|
|
import numpy as np
|
|
import numpy.testing as npt
|
|
-import nose.tools as nt
|
|
import mock
|
|
+import pytest
|
|
|
|
|
|
from bloscpack.abstract_io import (pack,
|
|
@@ -77,7 +76,7 @@ def test_conv():
|
|
)
|
|
for input_, expected in test_data:
|
|
received = _conv(input_)
|
|
- yield nt.assert_equal, expected, received
|
|
+ assert expected == received
|
|
|
|
|
|
def test_unpack_exception():
|
|
@@ -87,14 +86,15 @@ def test_unpack_exception():
|
|
source = PlainFPSource(StringIO(a_str))
|
|
sink = CompressedFPSink(sio)
|
|
pack(source, sink, *calculate_nchunks(len(a_str)))
|
|
- nt.assert_raises(NotANumpyArray, unpack_ndarray_from_bytes, sio.getvalue())
|
|
+ with pytest.raises(NotANumpyArray):
|
|
+ unpack_ndarray_from_bytes, sio.getvalue()
|
|
|
|
|
|
def roundtrip_ndarray(ndarray):
|
|
- yield roundtrip_numpy_memory(ndarray)
|
|
- yield roundtrip_numpy_str(ndarray)
|
|
- yield roundtrip_numpy_file_pointers(ndarray)
|
|
- yield roundtrip_numpy_file(ndarray)
|
|
+ roundtrip_numpy_memory(ndarray)
|
|
+ roundtrip_numpy_str(ndarray)
|
|
+ roundtrip_numpy_file_pointers(ndarray)
|
|
+ roundtrip_numpy_file(ndarray)
|
|
|
|
|
|
def test_numpy_dtypes_shapes_order():
|
|
@@ -102,45 +102,45 @@ def test_numpy_dtypes_shapes_order():
|
|
# happy trail
|
|
a = np.arange(50)
|
|
for case in roundtrip_ndarray(a):
|
|
- yield case
|
|
+ case()
|
|
|
|
for dt in np.sctypes['int'] + np.sctypes['uint'] + np.sctypes['float']:
|
|
a = np.arange(64, dtype=dt)
|
|
for case in roundtrip_ndarray(a):
|
|
- yield case
|
|
+ case()
|
|
a = a.copy().reshape(8, 8)
|
|
for case in roundtrip_ndarray(a):
|
|
- yield case
|
|
+ case()
|
|
a = a.copy().reshape(4, 16)
|
|
for case in roundtrip_ndarray(a):
|
|
- yield case
|
|
+ case()
|
|
a = a.copy().reshape(4, 4, 4)
|
|
for case in roundtrip_ndarray(a):
|
|
- yield case
|
|
+ case()
|
|
a = np.asfortranarray(a)
|
|
- nt.assert_true(np.isfortran(a))
|
|
+ assert np.isfortran(a)
|
|
for case in roundtrip_ndarray(a):
|
|
- yield case
|
|
+ case()
|
|
|
|
# Fixed width string arrays
|
|
a = np.array(['abc', 'def', 'ghi'])
|
|
for case in roundtrip_ndarray(a):
|
|
- yield case
|
|
+ case()
|
|
|
|
# This actually get's cast to a fixed width string array
|
|
a = np.array([(1, 'abc'), (2, 'def'), (3, 'ghi')])
|
|
for case in roundtrip_ndarray(a):
|
|
- yield case
|
|
+ case()
|
|
|
|
## object arrays
|
|
#a = np.array([(1, 'abc'), (2, 'def'), (3, 'ghi')], dtype='object')
|
|
#for case in roundtrip_ndarray(a):
|
|
- # yield case
|
|
+ # case()
|
|
|
|
# structured array
|
|
a = np.array([('a', 1), ('b', 2)], dtype=[('a', 'S1'), ('b', 'f8')])
|
|
for case in roundtrip_ndarray(a):
|
|
- yield case
|
|
+ case()
|
|
|
|
# record array
|
|
a = np.array([(1, 'O', 1)],
|
|
@@ -148,7 +148,7 @@ def test_numpy_dtypes_shapes_order():
|
|
('symbol', '|S1'),
|
|
('index', 'int32')]))
|
|
for case in roundtrip_ndarray(a):
|
|
- yield case
|
|
+ case()
|
|
|
|
# and a nested record array
|
|
dt = [('year', '<i4'),
|
|
@@ -162,28 +162,30 @@ def test_numpy_dtypes_shapes_order():
|
|
('ARG', 12.)))],
|
|
dt)
|
|
for case in roundtrip_ndarray(a):
|
|
- yield case
|
|
+ case()
|
|
|
|
# what about endianess
|
|
a = np.arange(10, dtype='>i8')
|
|
for case in roundtrip_ndarray(a):
|
|
- yield case
|
|
+ case()
|
|
|
|
# empty array
|
|
a = np.array([], dtype='f8')
|
|
for case in roundtrip_ndarray(a):
|
|
- yield case
|
|
+ case()
|
|
|
|
|
|
def test_reject_object_array():
|
|
a = np.array([(1, 'abc'), (2, 'def'), (3, 'ghi')], dtype='object')
|
|
- nt.assert_raises(ObjectNumpyArrayRejection, roundtrip_numpy_memory, a)
|
|
+ with pytest.raises(ObjectNumpyArrayRejection):
|
|
+ roundtrip_numpy_memory(a)
|
|
|
|
|
|
def test_reject_nested_object_array():
|
|
a = np.array([(1, 'abc'), (2, 'def'), (3, 'ghi')],
|
|
- dtype=[('a', int), ('b', 'object')])
|
|
- nt.assert_raises(ObjectNumpyArrayRejection, roundtrip_numpy_memory, a)
|
|
+ dtype=[('a', int), ('b', 'object')])
|
|
+ with pytest.raises(ObjectNumpyArrayRejection):
|
|
+ roundtrip_numpy_memory(a)
|
|
|
|
|
|
def test_backwards_compat():
|
|
@@ -209,23 +211,22 @@ def test_backwards_compat():
|
|
c = pack_ndarray_to_bytes(a)
|
|
# should not raise a SyntaxError
|
|
d = unpack_ndarray_from_bytes(c)
|
|
- yield npt.assert_array_equal, a, d
|
|
+ npt.assert_array_equali(a, d)
|
|
|
|
|
|
def test_itemsize_chunk_size_mismatch():
|
|
a = np.arange(1000)
|
|
# typesize of the array is 8, let's glitch the typesize
|
|
for i in [1, 2, 3, 5, 6, 7, 9, 10, 11, 13, 14, 15]:
|
|
- yield nt.assert_raises, \
|
|
- ChunkSizeTypeSizeMismatch, \
|
|
- pack_ndarray_to_bytes, a, i
|
|
+ with pytest.raises(ChunkSizeTypeSizeMismatch):
|
|
+ pack_ndarray_to_bytes(a, i)
|
|
|
|
|
|
def test_larger_arrays():
|
|
for dt in ('uint64', 'int64', 'float64'):
|
|
a = np.arange(2e4, dtype=dt)
|
|
for case in roundtrip_ndarray(a):
|
|
- yield case
|
|
+ case()
|
|
|
|
|
|
def huge_arrays():
|
|
@@ -233,7 +234,7 @@ def huge_arrays():
|
|
# needs plenty of memory
|
|
a = np.arange(1e8, dtype=dt)
|
|
for case in roundtrip_ndarray(a):
|
|
- yield case
|
|
+ case()
|
|
|
|
|
|
def test_alternate_cname():
|
|
@@ -249,7 +250,7 @@ def test_alternate_cname():
|
|
sink = CompressedMemorySink()
|
|
pack_ndarray(array_, sink, blosc_args=blosc_args)
|
|
blosc_header = decode_blosc_header(sink.chunks[0])
|
|
- yield nt.assert_equal, blosc_header['flags'] >> 5, int_id
|
|
+ assert blosc_header['flags'] >> 5 == int_id
|
|
|
|
|
|
def test_typesize_is_set_correctly_with_default_blosc_args():
|
|
@@ -257,7 +258,7 @@ def test_typesize_is_set_correctly_with_
|
|
sink = CompressedMemorySink()
|
|
pack_ndarray(a, sink)
|
|
expected_args = BloscArgs(typesize=1)
|
|
- nt.assert_equal(expected_args, sink.blosc_args)
|
|
+ assert expected_args == sink.blosc_args
|
|
|
|
|
|
def test_typesize_is_set_correctly_with_custom_blosc_args():
|
|
@@ -266,11 +267,11 @@ def test_typesize_is_set_correctly_with_
|
|
input_args = BloscArgs(clevel=9)
|
|
pack_ndarray(a, sink, blosc_args=input_args)
|
|
expected_args = BloscArgs(clevel=9, typesize=1)
|
|
- nt.assert_equal(expected_args, sink.blosc_args)
|
|
+ assert expected_args == sink.blosc_args
|
|
|
|
|
|
def test_roundtrip_slice():
|
|
a = np.arange(100).reshape((10, 10))
|
|
s = a[3:5, 3:5]
|
|
for case in roundtrip_ndarray(s):
|
|
- yield case
|
|
+ case()
|
|
--- a/test/test_pretty.py
|
|
+++ b/test/test_pretty.py
|
|
@@ -1,9 +1,8 @@
|
|
-#!/usr/bin/env nosetests
|
|
# -*- coding: utf-8 -*-
|
|
# vim :set ft=py:
|
|
|
|
|
|
-import nose.tools as nt
|
|
+import pytest
|
|
|
|
|
|
from bloscpack.pretty import (pretty_size,
|
|
@@ -13,17 +12,18 @@ from bloscpack.pretty import (pretty_siz
|
|
|
|
def test_pretty_filesieze():
|
|
|
|
- nt.assert_equal('0B', pretty_size(0))
|
|
- nt.assert_equal('9.0T', pretty_size(9898989898879))
|
|
- nt.assert_equal('4.78G', pretty_size(5129898234))
|
|
- nt.assert_equal('12.3M', pretty_size(12898234))
|
|
- nt.assert_equal('966.7K', pretty_size(989898))
|
|
- nt.assert_equal('128.0B', pretty_size(128))
|
|
- nt.assert_equal(0, reverse_pretty('0B'))
|
|
- nt.assert_equal(8, reverse_pretty('8B'))
|
|
- nt.assert_equal(8192, reverse_pretty('8K'))
|
|
- nt.assert_equal(134217728, reverse_pretty('128M'))
|
|
- nt.assert_equal(2147483648, reverse_pretty('2G'))
|
|
- nt.assert_equal(2199023255552, reverse_pretty('2T'))
|
|
+ assert '0B' == pretty_size(0)
|
|
+ assert '9.0T' == pretty_size(9898989898879)
|
|
+ assert '4.78G' == pretty_size(5129898234)
|
|
+ assert '12.3M' == pretty_size(12898234)
|
|
+ assert '966.7K' == pretty_size(989898)
|
|
+ assert '128.0B' == pretty_size(128)
|
|
+ assert 0 == reverse_pretty('0B')
|
|
+ assert 8 == reverse_pretty('8B')
|
|
+ assert 8192 == reverse_pretty('8K')
|
|
+ assert 134217728 == reverse_pretty('128M')
|
|
+ assert 2147483648 == reverse_pretty('2G')
|
|
+ assert 2199023255552 == reverse_pretty('2T')
|
|
# can't handle Petabytes, yet
|
|
- nt.assert_raises(ValueError, reverse_pretty, '2P')
|
|
+ with pytest.raises(ValueError):
|
|
+ reverse_pretty('2P')
|
|
--- a/test/test_serializers.py
|
|
+++ b/test/test_serializers.py
|
|
@@ -2,9 +2,6 @@
|
|
# -*- coding: utf-8 -*-
|
|
# vim :set ft=py:
|
|
|
|
-import nose.tools as nt
|
|
-
|
|
-
|
|
try:
|
|
from collections import OrderedDict
|
|
except ImportError: # pragma: no cover
|
|
@@ -17,11 +14,11 @@ from bloscpack.serializers import (SERIA
|
|
|
|
|
|
def test_serializers():
|
|
- nt.assert_equal(SERIALIZERS_AVAIL, [b'JSON'])
|
|
+ assert SERIALIZERS_AVAIL == [b'JSON']
|
|
output = '{"dtype":"float64","shape":[1024],"others":[]}'
|
|
input_ = OrderedDict([('dtype', "float64"),
|
|
('shape', [1024]),
|
|
('others', [])])
|
|
for s in SERIALIZERS:
|
|
- yield nt.assert_equal, output, s.dumps(input_)
|
|
- yield nt.assert_equal, input_, s.loads(output)
|
|
+ assert output == s.dumps(input_)
|
|
+ assert input_ == s.loads(output)
|
|
--- a/bloscpack/file_io.py
|
|
+++ b/bloscpack/file_io.py
|
|
@@ -350,7 +350,7 @@ class PlainFPSource(PlainSource):
|
|
for num_bytes in ([self.chunk_size] *
|
|
(self.nchunks - 1) +
|
|
[self.last_chunk]):
|
|
- yield self.input_fp.read(num_bytes)
|
|
+ self.input_fp.read(num_bytes)
|
|
|
|
|
|
class CompressedFPSource(CompressedSource):
|
|
@@ -366,7 +366,7 @@ class CompressedFPSource(CompressedSourc
|
|
def __iter__(self):
|
|
for i in xrange(self.nchunks):
|
|
compressed, header, digest = _read_compressed_chunk_fp(self.input_fp, self.checksum_impl)
|
|
- yield compressed, digest
|
|
+ compressed(digest)
|
|
|
|
|
|
class PlainFPSink(PlainSink):
|
|
--- a/bloscpack/memory_io.py
|
|
+++ b/bloscpack/memory_io.py
|
|
@@ -19,7 +19,7 @@ class PlainMemorySource(PlainSource):
|
|
|
|
def __iter__(self):
|
|
for c in self.chunks:
|
|
- yield c
|
|
+ c()
|
|
|
|
|
|
class CompressedMemorySource(CompressedSource):
|
|
@@ -42,7 +42,7 @@ class CompressedMemorySource(CompressedS
|
|
for i in xrange(self.nchunks):
|
|
compressed = self.chunks[i]
|
|
digest = self.checksums[i] if self.checksum else None
|
|
- yield compressed, digest
|
|
+ compressed(digest)
|
|
|
|
|
|
class PlainMemorySink(PlainSink):
|
|
--- a/bloscpack/numpy_io.py
|
|
+++ b/bloscpack/numpy_io.py
|
|
@@ -90,9 +90,9 @@ class PlainNumpySource(PlainSource):
|
|
self.nitems = int(self.chunk_size / self.ndarray.itemsize)
|
|
offset = self.ptr
|
|
for i in xrange(self.nchunks - 1):
|
|
- yield offset, self.nitems
|
|
+ offset(self.nitems)
|
|
offset += self.chunk_size
|
|
- yield offset, int(self.last_chunk / self.ndarray.itemsize)
|
|
+ offset(int(self.last_chunk / self.ndarray.itemsize))
|
|
|
|
|
|
def _conv(descr):
|