Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix test #73

Merged
merged 2 commits into from
Oct 17, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,29 +1,29 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.4.0
rev: v4.5.0
hooks:
- id: check-merge-conflict
- id: debug-statements
- id: mixed-line-ending
- id: check-case-conflict
- id: check-yaml
- repo: https://github.com/asottile/reorder_python_imports
rev: v2.4.0
rev: v3.12.0
hooks:
- id: reorder-python-imports
- repo: https://github.com/asottile/pyupgrade
rev: v2.12.0
rev: v3.15.0
hooks:
- id: pyupgrade
args: [--py3-plus, --py36-plus]
- repo: https://github.com/psf/black
rev: 20.8b1
rev: 23.9.1
hooks:
- id: black
language_version: python3
- repo: https://gitlab.com/pycqa/flake8
rev: 3.9.1
- repo: https://github.com/pycqa/flake8
rev: 6.1.0
hooks:
- id: flake8
args: [--config=.flake8]
additional_dependencies: ["flake8-bugbear==20.1.4", "flake8-builtins==1.5.3"]
additional_dependencies: ["flake8-bugbear==23.9.16", "flake8-builtins==2.1.0"]
9 changes: 4 additions & 5 deletions tests/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,11 +288,10 @@ def test_bad_file_format(self):
with self.assertRaises(TestException):
self.run_tszip([str(self.trees_path)])
mocked_exit.assert_called_once_with(
f"Error loading '{self.trees_path}': File not in kastore format. If this"
f" file was generated by msprime < 0.6.0 (June 2018) it uses the old"
f" HDF5-based format which can no longer be read directly. Please"
f" convert to the new kastore format using the ``tskit upgrade``"
f" command."
f"Error loading '{self.trees_path}': File not in kastore format. Either"
f" the file is corrupt or it is not a tskit tree sequence file. It may"
f" be a legacy HDF file upgradable with `tskit upgrade` or a compressed"
f" tree sequence file that can be decompressed with `tszip`."
)

def test_compress_stdout_keep(self):
Expand Down
12 changes: 6 additions & 6 deletions tests/test_compression.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,36 +57,36 @@ def test_int16(self):
self.verify(
np.int16,
np.array(
[2 ** 15 - 1, -(2 ** 15) + 1, 2 ** 7 + 1, -(2 ** 7) - 1], dtype=np.int64
[2**15 - 1, -(2**15) + 1, 2**7 + 1, -(2**7) - 1], dtype=np.int64
),
)

def test_uint16(self):
self.verify(np.uint16, np.array([256, 2 ** 16 - 1], dtype=np.uint64))
self.verify(np.uint16, np.array([256, 2**16 - 1], dtype=np.uint64))

def test_int32(self):
self.verify(
np.int32,
np.array(
[2 ** 31 - 1, -(2 ** 31) + 1, 2 ** 15 + 1, -(2 ** 15) - 1],
[2**31 - 1, -(2**31) + 1, 2**15 + 1, -(2**15) - 1],
dtype=np.int64,
),
)

def test_uint32(self):
self.verify(np.uint32, np.array([2 ** 16 + 1, 2 ** 32 - 1], dtype=np.uint64))
self.verify(np.uint32, np.array([2**16 + 1, 2**32 - 1], dtype=np.uint64))

def test_int64(self):
self.verify(
np.int64,
np.array(
[2 ** 63 - 1, -(2 ** 63) + 1, 2 ** 31 + 1, -(2 ** 31) - 1],
[2**63 - 1, -(2**63) + 1, 2**31 + 1, -(2**31) - 1],
dtype=np.int64,
),
)

def test_uint64(self):
self.verify(np.uint64, np.array([2 ** 32 + 1, 2 ** 64 - 1], dtype=np.uint64))
self.verify(np.uint64, np.array([2**32 + 1, 2**64 - 1], dtype=np.uint64))

def test_float32(self):
self.verify(np.float32, np.array([0.1, 1e-3], dtype=np.float32))
Expand Down
13 changes: 6 additions & 7 deletions tszip/compression.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def compress(ts, destination, variants_only=False):
else:
# Assume that destination is a file-like object open in "wb" mode.
with open(filename, "rb") as source:
chunk_size = 2 ** 10 # 1MiB
chunk_size = 2**10 # 1MiB
for chunk in iter(functools.partial(source.read, chunk_size), b""):
destination.write(chunk)

Expand Down Expand Up @@ -167,7 +167,6 @@ def compress(self, root, compressor):


def compress_zarr(ts, root, variants_only=False):

provenance_dict = provenance.get_provenance_dict({"variants_only": variants_only})

if variants_only:
Expand Down Expand Up @@ -345,12 +344,12 @@ def visitor(array):

arrays.sort(key=lambda x: x.nbytes_stored)
max_name_len = max(len(array.name) for array in arrays)
stored = [
storeds = [
humanize.naturalsize(array.nbytes_stored, binary=True) for array in arrays
]
max_stored_len = max(len(size) for size in stored)
actual = [humanize.naturalsize(array.nbytes, binary=True) for array in arrays]
max_actual_len = max(len(size) for size in actual)
max_stored_len = max(len(size) for size in storeds)
actuals = [humanize.naturalsize(array.nbytes, binary=True) for array in arrays]
max_actual_len = max(len(size) for size in actuals)

line = "File: {}\t{}".format(
path, humanize.naturalsize(os.path.getsize(path), binary=True)
Expand All @@ -372,7 +371,7 @@ def visitor(array):
"ratio",
)
print(line)
for array, stored, actual in zip(arrays, stored, actual):
for array, stored, actual in zip(arrays, storeds, actuals):
ratio = 0
if array.nbytes > 0:
ratio = array.nbytes_stored / array.nbytes
Expand Down