From 0b81d04dd150b587b1644d6ed7523fc160ce61ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Oriol=20Tint=C3=B3?= <oriol.tinto@lmu.de> Date: Mon, 19 Jun 2023 10:15:23 +0200 Subject: [PATCH] Add consistency tests for SZ. --- tests/test_sz.py | 84 +++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 83 insertions(+), 1 deletion(-) diff --git a/tests/test_sz.py b/tests/test_sz.py index 420c278..8153790 100644 --- a/tests/test_sz.py +++ b/tests/test_sz.py @@ -1,3 +1,5 @@ +import numpy as np + from utils import wrapper, TestClass folders = None @@ -13,13 +15,93 @@ class TestSZ(TestClass): analyze_files(file_paths=input_path, compressor="sz") def test_compress_sz_pw_rel(self): - compression = "lossy,sz,pw_rel,0.1" + compression = "lossy,sz,pw_rel,0.001" wrapper(self, compression=compression) + def test_consistency_sz_pw_rel(self): + import enstools.compression.api + from enstools.encoding.api import VariableEncoding + import enstools.io + tolerance = 0.001 + compression = f"lossy,sz,pw_rel,{tolerance}" + # Check that the compression without specifying compression parameters works + + datasets = ["dataset_%iD.nc" % dimension for dimension in range(1, 4)] + for dataset_name in datasets: + input_path = self.input_directory_path / dataset_name + + # Check that the output file can be loaded + with enstools.io.read(input_path) as ds: + for var in ds.data_vars: + data_array = ds[var] + encoding = VariableEncoding(specification=compression) + compressed_da, _ = enstools.compression.api.emulate_compression_on_data_array( + data_array=data_array, + compression_specification=encoding, + in_place=False, + ) + diff = compressed_da - data_array + diff /= data_array + + assert (np.abs(diff.values) < (data_array.values * tolerance)).all() + def test_compress_sz_abs(self): compression = "lossy,sz,abs,0.01" wrapper(self, compression=compression) + def test_consistency_sz_abs(self): + import enstools.compression.api + from enstools.encoding.api import VariableEncoding + import enstools.io + tolerance = 0.01 + compression = f"lossy,sz,abs,{tolerance}" + # Check that the compression without specifying compression parameters works + + datasets = ["dataset_%iD.nc" % dimension for dimension in range(1, 4)] + for dataset_name in datasets: + input_path = self.input_directory_path / dataset_name + + # Check that the output file can be loaded + with enstools.io.read(input_path) as ds: + for var in ds.data_vars: + data_array = ds[var] + encoding = VariableEncoding(specification=compression) + compressed_da, _ = enstools.compression.api.emulate_compression_on_data_array( + data_array=data_array, + compression_specification=encoding, + in_place=False, + ) + diff = compressed_da - data_array + assert (np.abs(diff.values) < tolerance).all() + def test_compress_sz_rel(self): compression = "lossy,sz,rel,0.001" wrapper(self, compression=compression) + + + def test_consistency_sz_rel(self): + import enstools.compression.api + from enstools.encoding.api import VariableEncoding + import enstools.io + tolerance = 0.01 + compression = f"lossy,sz,rel,{tolerance}" + # Check that the compression without specifying compression parameters works + + datasets = ["dataset_%iD.nc" % dimension for dimension in range(1, 4)] + for dataset_name in datasets: + input_path = self.input_directory_path / dataset_name + + # Check that the output file can be loaded + with enstools.io.read(input_path) as ds: + for var in ds.data_vars: + data_array = ds[var] + encoding = VariableEncoding(specification=compression) + compressed_da, _ = enstools.compression.api.emulate_compression_on_data_array( + data_array=data_array, + compression_specification=encoding, + in_place=False, + ) + abs_tolerance = float(data_array.max() - data_array.min()) * tolerance + diff = compressed_da - data_array + assert (np.abs(diff.values) < abs_tolerance).all() + -- GitLab