From 1d13c236aeaa408a7a165379749f4e015b7e287b Mon Sep 17 00:00:00 2001 From: Andrej Fast Date: Thu, 7 Mar 2024 14:16:04 +0100 Subject: [PATCH 01/18] fixed wrong score for two cf-checks which resulted in appearance of warnings even if there is nothing to warn --- compliance_checker/cf/cf_1_6.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/compliance_checker/cf/cf_1_6.py b/compliance_checker/cf/cf_1_6.py index dd571c07..cc9b544f 100644 --- a/compliance_checker/cf/cf_1_6.py +++ b/compliance_checker/cf/cf_1_6.py @@ -425,7 +425,7 @@ def check_fill_value_equal_missing_value(self, ds): return Result( BaseCheck.MEDIUM, - (len(fails), total), + (total - len(fails), total), self.section_titles["2.5"], msgs=fails, ) @@ -455,7 +455,7 @@ def check_valid_range_or_valid_min_max_present(self, ds): return Result( BaseCheck.MEDIUM, - (len(fails), total), + (total - len(fails), total), self.section_titles["2.5"], msgs=fails, ) From 9f98bc69beba5f93f7613d23341f37598815500b Mon Sep 17 00:00:00 2001 From: Andrej Fast Date: Thu, 7 Mar 2024 14:16:28 +0100 Subject: [PATCH 02/18] removed a very verbose debug print statement which resulted in a flooded CLI --- compliance_checker/cf/cf_1_7.py | 1 - 1 file changed, 1 deletion(-) diff --git a/compliance_checker/cf/cf_1_7.py b/compliance_checker/cf/cf_1_7.py index e47ca240..c8718e1a 100644 --- a/compliance_checker/cf/cf_1_7.py +++ b/compliance_checker/cf/cf_1_7.py @@ -396,7 +396,6 @@ def check_cell_boundaries_interval(self, ds): reasoning, ) ret_val.append(result) - print(ret_val) return ret_val def check_cell_measures(self, ds): From 74f356bdaa34a3f863cb86019a409f6acf67eb49 Mon Sep 17 00:00:00 2001 From: Rob Cermak Date: Mon, 18 Mar 2024 23:26:03 -0700 Subject: [PATCH 03/18] pytest fixes * base.py: Sometimes cfutil is None * protocols/netcdf.py: - Return False right away if content_type is None - Support extended type strings: "application/x-netcdf;ver=4" * suite.py: If text for xml fails, look for netcdf one more time * Convert suite.run() calls to suite.run_all() calls * Update hyrax url * Update thredds url * Add some development notes for troubleshooting tests with pytest using the vcr feature. --- compliance_checker/base.py | 5 +++-- compliance_checker/protocols/netcdf.py | 6 +++++- compliance_checker/suite.py | 5 +++++ compliance_checker/tests/test_cf.py | 3 ++- .../tests/test_cf_integration.py | 12 +++++++---- compliance_checker/tests/test_protocols.py | 11 ++++++++-- compliance_checker/tests/test_suite.py | 21 ++++++++++++------- docs/source/development.md | 11 ++++++++++ docs/source/faq.md | 6 ++++-- docs/source/index.rst | 1 + 10 files changed, 62 insertions(+), 19 deletions(-) create mode 100644 docs/source/development.md diff --git a/compliance_checker/base.py b/compliance_checker/base.py index c2674e85..dbad8518 100644 --- a/compliance_checker/base.py +++ b/compliance_checker/base.py @@ -192,8 +192,9 @@ def __del__(self): inadvertently mutated by other functions. """ - cfutil.get_geophysical_variables.cache_clear() - cfutil.get_time_variables.cache_clear() + if cfutil is not None: + cfutil.get_geophysical_variables.cache_clear() + cfutil.get_time_variables.cache_clear() class BaseNCCheck: diff --git a/compliance_checker/protocols/netcdf.py b/compliance_checker/protocols/netcdf.py index 415a94ec..9bcfa1db 100644 --- a/compliance_checker/protocols/netcdf.py +++ b/compliance_checker/protocols/netcdf.py @@ -90,6 +90,10 @@ def is_remote_netcdf(ds_str): else: content_type = head_req.headers.get("content-type") + if content_type is None: + return False + # if the Content-Type header returned was "application/x-netcdf", # or a netCDF file (not OPeNDAP) we can open this into a Dataset - return content_type == "application/x-netcdf" + # Add support for application/x-netcdf;ver=4 + return content_type.split(";")[0] == "application/x-netcdf" diff --git a/compliance_checker/suite.py b/compliance_checker/suite.py index d0e84769..733328df 100644 --- a/compliance_checker/suite.py +++ b/compliance_checker/suite.py @@ -871,6 +871,11 @@ def load_remote_dataset(self, ds_str): content_type = response.headers.get("content-type") if content_type.split(";")[0] == "text/xml": return self.process_doc(response.content) + elif content_type.split(";")[0] == "application/x-netcdf": + return Dataset( + urlparse(response.url).path, + memory=response.content, + ) else: raise ValueError( f"Unknown service with content-type: {content_type}", diff --git a/compliance_checker/tests/test_cf.py b/compliance_checker/tests/test_cf.py index 360683be..6f79053d 100644 --- a/compliance_checker/tests/test_cf.py +++ b/compliance_checker/tests/test_cf.py @@ -1794,7 +1794,8 @@ def test_64bit(self): dataset = self.load_dataset(STATIC_FILES["ints64"]) suite = CheckSuite() suite.checkers = {"cf": CF1_6Check} - suite.run(dataset, "cf") + #suite.run(dataset, "cf") + suite.run_all(dataset, ["cf"], skip_checks=["cf"]) def test_variable_feature_check(self): # non-compliant dataset -- 1/1 fail diff --git a/compliance_checker/tests/test_cf_integration.py b/compliance_checker/tests/test_cf_integration.py index 5d162672..978c9047 100644 --- a/compliance_checker/tests/test_cf_integration.py +++ b/compliance_checker/tests/test_cf_integration.py @@ -247,7 +247,8 @@ def get_results(self, check_results, checksuite): ], # must be specified to load this param at runtime, instead of at collection ) def test_cf_integration(self, loaded_dataset, expected_messages, cs): - check_results = cs.run(loaded_dataset, [], "cf") + #check_results = cs.run(loaded_dataset, [], "cf") + check_results = cs.run_all(loaded_dataset, ["cf"], skip_checks=[]) scored, out_of, messages = self.get_results(check_results, cs) assert scored < out_of @@ -272,14 +273,16 @@ def test_cf_integration(self, loaded_dataset, expected_messages, cs): indirect=["loaded_dataset"], ) def test_no_incorrect_errors(self, cs, loaded_dataset, wrong_message): - check_results = cs.run(loaded_dataset, [], True, "cf") + #check_results = cs.run(loaded_dataset, [], True, "cf") + check_results = cs.run_all(loaded_dataset, ["cf"], skip_checks=[]) messages = self.get_results(check_results, cs)[-1] assert wrong_message not in "".join(messages) @pytest.mark.parametrize("loaded_dataset", ["fvcom"], indirect=True) def test_fvcom(self, cs, loaded_dataset): - check_results = cs.run(loaded_dataset, [], True, "cf") + #check_results = cs.run(loaded_dataset, [], True, "cf") + check_results = cs.run_all(loaded_dataset, ["cf"], skip_checks=[]) scored, out_of, messages = self.get_results(check_results, cs) assert scored < out_of @@ -307,6 +310,7 @@ def test_ncei_templates(self, cs, loaded_dataset): Tests some of the NCEI NetCDF templates, which usually should get a perfect score. """ - check_results = cs.run(loaded_dataset, [], "cf") + #check_results = cs.run(loaded_dataset, [], "cf") + check_results = cs.run_all(loaded_dataset, ["cf"], skip_checks=[]) scored, out_of, messages = self.get_results(check_results, cs) assert scored < out_of diff --git a/compliance_checker/tests/test_protocols.py b/compliance_checker/tests/test_protocols.py index f43bce68..faa68d41 100644 --- a/compliance_checker/tests/test_protocols.py +++ b/compliance_checker/tests/test_protocols.py @@ -38,7 +38,10 @@ def test_hyrax(): """ Tests that a connection can be made to Hyrax """ - url = "http://test.opendap.org:8080/opendap/ioos/mday_joinExist.ncml" + # Returns: error 405 + # url = "http://test.opendap.org:8080/opendap/ioos/mday_joinExist.ncml" + # More direct file + url = "http://test.opendap.org:8080/opendap/ioos/mday_joinExist.ncml.dap.nc4" cs = CheckSuite() ds = cs.load_dataset(url) assert ds is not None @@ -48,13 +51,17 @@ def test_thredds(): """ Tests that a connection can be made to a remote THREDDS endpoint """ - url = "http://thredds.ucar.edu/thredds/dodsC/grib/NCEP/GFS/Global_0p25deg_ana/TP" + # Returns: error 400 + #url = "http://thredds.ucar.edu/thredds/dodsC/grib/NCEP/GFS/Global_0p25deg_ana/TP" + # Use a smaller dataset + url = "https://thredds.ucar.edu/thredds/ncss/grid/grib/NCEP/GFS/Global_0p25deg_ana/TP?var=Temperature_altitude_above_msl&accept=netcdf3" cs = CheckSuite() ds = cs.load_dataset(url) assert ds is not None +@pytest.mark.skip(reason="The thredds endpoint is no longer serving SOS.") def test_sos(): """ Tests that a connection can be made to an SOS endpoint diff --git a/compliance_checker/tests/test_suite.py b/compliance_checker/tests/test_suite.py index 54f49a95..ad6d525c 100644 --- a/compliance_checker/tests/test_suite.py +++ b/compliance_checker/tests/test_suite.py @@ -63,16 +63,19 @@ def test_suite(self): # BWA: what's the purpose of this test? Just to see if the suite # runs without errors? ds = self.cs.load_dataset(static_files["2dim"]) - self.cs.run(ds, [], "acdd") + #self.cs.run(ds, [], "acdd") + self.cs.run_all(ds, ["acdd"], skip_checks=[]) def test_suite_pathlib(self): path_obj = Path(static_files["2dim"]) ds = self.cs.load_dataset(path_obj) - self.cs.run(ds, [], "acdd") + #self.cs.run(ds, [], "acdd") + self.cs.run_all(ds, ["acdd"], skip_checks=[]) def test_unicode_formatting(self): ds = self.cs.load_dataset(static_files["bad_region"]) - score_groups = self.cs.run(ds, [], "cf") + #score_groups = self.cs.run(ds, [], "cf") + score_groups = self.cs.run_all(ds, ["cf"], skip_checks=[]) limit = 2 for checker, rpair in score_groups.items(): @@ -163,7 +166,8 @@ def test_group_func(self): # This is checking for issue #183, where group_func results in # IndexError: list index out of range ds = self.cs.load_dataset(static_files["bad_data_type"]) - score_groups = self.cs.run(ds, [], "cf") + #score_groups = self.cs.run(ds, [], "cf") + score_groups = self.cs.run_all(ds, ["cf"], skip_checks=[]) limit = 2 for checker, rpair in score_groups.items(): @@ -198,7 +202,8 @@ def test_cdl_file(self): # Testing whether you can run compliance checker on a .cdl file # Load the cdl file ds = self.cs.load_dataset(static_files["test_cdl"]) - vals = self.cs.run(ds, [], "cf") + #vals = self.cs.run(ds, [], "cf") + vals = self.cs.run_all(ds, ["cf"], skip_checks=[]) limit = 2 for checker, rpair in vals.items(): @@ -221,7 +226,8 @@ def test_cdl_file(self): # Ok now load the nc file that it came from ds = self.cs.load_dataset(static_files["test_cdl_nc"]) - vals = self.cs.run(ds, [], "cf") + #vals = self.cs.run(ds, [], "cf") + vals = self.cs.run_all(ds, ["cf"], skip_checks=[]) limit = 2 for checker, rpair in vals.items(): @@ -259,7 +265,8 @@ def test_standard_output_score_header(self): of potential issues, rather than the weighted score """ ds = self.cs.load_dataset(static_files["bad_region"]) - score_groups = self.cs.run(ds, [], "cf") + #score_groups = self.cs.run(ds, [], "cf") + score_groups = self.cs.run_all(ds, ["cf"], skip_checks=[]) limit = 2 groups, errors = score_groups["cf"] score_list, all_passed, out_of = self.cs.standard_output( diff --git a/docs/source/development.md b/docs/source/development.md new file mode 100644 index 00000000..67b35b67 --- /dev/null +++ b/docs/source/development.md @@ -0,0 +1,11 @@ +# Developer Notes + +## pytest + +When running the python test suite, there may be test errors. Certain +tests `record` responses to remote queries for information. If tests +fail, they will appear to continue to fail as the queries are cached. + +To perform tests using fresh queries from remote services, use +`pytest --disable-vcr`. In certain cases, clearing the cache is +also advised, use `pytest --clear-cache`. diff --git a/docs/source/faq.md b/docs/source/faq.md index f461ce88..d95859bb 100644 --- a/docs/source/faq.md +++ b/docs/source/faq.md @@ -45,6 +45,8 @@ The Compliance Checker is completely open-source and available on [GitHub](https ## Disclaimer -The objective of the IOOS Compliance Checker is to check your file against our interpretation of select dataset metadata standards to use as a guideline in generating compliant files. -The compliance checker should not be considered the authoritative source on whether your file is 100% "compliant". +The objective of the IOOS Compliance Checker is to check your file against +our interpretation of select dataset metadata standards to use as a +guideline in generating compliant files. The compliance checker should +not be considered the authoritative source on whether your file is 100% "compliant". Instead, we recommend that users use the results as a guide to work towards compliance. diff --git a/docs/source/index.rst b/docs/source/index.rst index 0f67a152..bcf62b59 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -10,6 +10,7 @@ Python tool to check your datasets against compliance standards. quickintro compliance_checker_api faq + development Indices and tables ================== From b06e777e4272ee0a657f8bd14bd5a61173569c83 Mon Sep 17 00:00:00 2001 From: Rob Cermak Date: Mon, 18 Mar 2024 23:49:36 -0700 Subject: [PATCH 04/18] Linter updates --- compliance_checker/tests/test_cf.py | 2 +- compliance_checker/tests/test_cf_integration.py | 8 ++++---- compliance_checker/tests/test_protocols.py | 2 +- compliance_checker/tests/test_suite.py | 14 +++++++------- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/compliance_checker/tests/test_cf.py b/compliance_checker/tests/test_cf.py index 6f79053d..24a58c78 100644 --- a/compliance_checker/tests/test_cf.py +++ b/compliance_checker/tests/test_cf.py @@ -1794,7 +1794,7 @@ def test_64bit(self): dataset = self.load_dataset(STATIC_FILES["ints64"]) suite = CheckSuite() suite.checkers = {"cf": CF1_6Check} - #suite.run(dataset, "cf") + # suite.run(dataset, "cf") suite.run_all(dataset, ["cf"], skip_checks=["cf"]) def test_variable_feature_check(self): diff --git a/compliance_checker/tests/test_cf_integration.py b/compliance_checker/tests/test_cf_integration.py index 978c9047..06e8ddf1 100644 --- a/compliance_checker/tests/test_cf_integration.py +++ b/compliance_checker/tests/test_cf_integration.py @@ -247,7 +247,7 @@ def get_results(self, check_results, checksuite): ], # must be specified to load this param at runtime, instead of at collection ) def test_cf_integration(self, loaded_dataset, expected_messages, cs): - #check_results = cs.run(loaded_dataset, [], "cf") + # check_results = cs.run(loaded_dataset, [], "cf") check_results = cs.run_all(loaded_dataset, ["cf"], skip_checks=[]) scored, out_of, messages = self.get_results(check_results, cs) @@ -273,7 +273,7 @@ def test_cf_integration(self, loaded_dataset, expected_messages, cs): indirect=["loaded_dataset"], ) def test_no_incorrect_errors(self, cs, loaded_dataset, wrong_message): - #check_results = cs.run(loaded_dataset, [], True, "cf") + # check_results = cs.run(loaded_dataset, [], True, "cf") check_results = cs.run_all(loaded_dataset, ["cf"], skip_checks=[]) messages = self.get_results(check_results, cs)[-1] @@ -281,7 +281,7 @@ def test_no_incorrect_errors(self, cs, loaded_dataset, wrong_message): @pytest.mark.parametrize("loaded_dataset", ["fvcom"], indirect=True) def test_fvcom(self, cs, loaded_dataset): - #check_results = cs.run(loaded_dataset, [], True, "cf") + # check_results = cs.run(loaded_dataset, [], True, "cf") check_results = cs.run_all(loaded_dataset, ["cf"], skip_checks=[]) scored, out_of, messages = self.get_results(check_results, cs) assert scored < out_of @@ -310,7 +310,7 @@ def test_ncei_templates(self, cs, loaded_dataset): Tests some of the NCEI NetCDF templates, which usually should get a perfect score. """ - #check_results = cs.run(loaded_dataset, [], "cf") + # check_results = cs.run(loaded_dataset, [], "cf") check_results = cs.run_all(loaded_dataset, ["cf"], skip_checks=[]) scored, out_of, messages = self.get_results(check_results, cs) assert scored < out_of diff --git a/compliance_checker/tests/test_protocols.py b/compliance_checker/tests/test_protocols.py index faa68d41..509b055d 100644 --- a/compliance_checker/tests/test_protocols.py +++ b/compliance_checker/tests/test_protocols.py @@ -52,7 +52,7 @@ def test_thredds(): Tests that a connection can be made to a remote THREDDS endpoint """ # Returns: error 400 - #url = "http://thredds.ucar.edu/thredds/dodsC/grib/NCEP/GFS/Global_0p25deg_ana/TP" + # url = "http://thredds.ucar.edu/thredds/dodsC/grib/NCEP/GFS/Global_0p25deg_ana/TP" # Use a smaller dataset url = "https://thredds.ucar.edu/thredds/ncss/grid/grib/NCEP/GFS/Global_0p25deg_ana/TP?var=Temperature_altitude_above_msl&accept=netcdf3" diff --git a/compliance_checker/tests/test_suite.py b/compliance_checker/tests/test_suite.py index ad6d525c..f520f399 100644 --- a/compliance_checker/tests/test_suite.py +++ b/compliance_checker/tests/test_suite.py @@ -63,18 +63,18 @@ def test_suite(self): # BWA: what's the purpose of this test? Just to see if the suite # runs without errors? ds = self.cs.load_dataset(static_files["2dim"]) - #self.cs.run(ds, [], "acdd") + # self.cs.run(ds, [], "acdd") self.cs.run_all(ds, ["acdd"], skip_checks=[]) def test_suite_pathlib(self): path_obj = Path(static_files["2dim"]) ds = self.cs.load_dataset(path_obj) - #self.cs.run(ds, [], "acdd") + # self.cs.run(ds, [], "acdd") self.cs.run_all(ds, ["acdd"], skip_checks=[]) def test_unicode_formatting(self): ds = self.cs.load_dataset(static_files["bad_region"]) - #score_groups = self.cs.run(ds, [], "cf") + # score_groups = self.cs.run(ds, [], "cf") score_groups = self.cs.run_all(ds, ["cf"], skip_checks=[]) limit = 2 @@ -166,7 +166,7 @@ def test_group_func(self): # This is checking for issue #183, where group_func results in # IndexError: list index out of range ds = self.cs.load_dataset(static_files["bad_data_type"]) - #score_groups = self.cs.run(ds, [], "cf") + # score_groups = self.cs.run(ds, [], "cf") score_groups = self.cs.run_all(ds, ["cf"], skip_checks=[]) limit = 2 @@ -202,7 +202,7 @@ def test_cdl_file(self): # Testing whether you can run compliance checker on a .cdl file # Load the cdl file ds = self.cs.load_dataset(static_files["test_cdl"]) - #vals = self.cs.run(ds, [], "cf") + # vals = self.cs.run(ds, [], "cf") vals = self.cs.run_all(ds, ["cf"], skip_checks=[]) limit = 2 @@ -226,7 +226,7 @@ def test_cdl_file(self): # Ok now load the nc file that it came from ds = self.cs.load_dataset(static_files["test_cdl_nc"]) - #vals = self.cs.run(ds, [], "cf") + # vals = self.cs.run(ds, [], "cf") vals = self.cs.run_all(ds, ["cf"], skip_checks=[]) limit = 2 @@ -265,7 +265,7 @@ def test_standard_output_score_header(self): of potential issues, rather than the weighted score """ ds = self.cs.load_dataset(static_files["bad_region"]) - #score_groups = self.cs.run(ds, [], "cf") + # score_groups = self.cs.run(ds, [], "cf") score_groups = self.cs.run_all(ds, ["cf"], skip_checks=[]) limit = 2 groups, errors = score_groups["cf"] From afd0d4ec20494747b4f22f050eefd36031090e04 Mon Sep 17 00:00:00 2001 From: Chris Barker Date: Thu, 11 Apr 2024 18:40:48 -0700 Subject: [PATCH 05/18] removed last of pkg_resources. --- compliance_checker/base.py | 7 ++--- compliance_checker/cf/util.py | 6 ++++- compliance_checker/cfutil.py | 5 +++- compliance_checker/suite.py | 36 +++++++++++++++++--------- compliance_checker/tests/conftest.py | 5 +++- compliance_checker/tests/resources.py | 5 +++- compliance_checker/tests/test_cli.py | 3 ++- compliance_checker/tests/test_suite.py | 8 ++++-- requirements.txt | 1 + 9 files changed, 54 insertions(+), 22 deletions(-) diff --git a/compliance_checker/base.py b/compliance_checker/base.py index c2674e85..64c2685c 100644 --- a/compliance_checker/base.py +++ b/compliance_checker/base.py @@ -191,9 +191,10 @@ def __del__(self): are cleared before the next checker uses it. Some caches were inadvertently mutated by other functions. """ - - cfutil.get_geophysical_variables.cache_clear() - cfutil.get_time_variables.cache_clear() + # odd errors -- module getting deleted before this object? + if cfutil is not None: + cfutil.get_geophysical_variables.cache_clear() + cfutil.get_time_variables.cache_clear() class BaseNCCheck: diff --git a/compliance_checker/cf/util.py b/compliance_checker/cf/util.py index 6f100653..61e44f75 100644 --- a/compliance_checker/cf/util.py +++ b/compliance_checker/cf/util.py @@ -5,7 +5,11 @@ import requests from cf_units import Unit -from importlib_resources import files +try: + from importlib.resources import files +except ImportError: + from importlib_resources import files + from lxml import etree from netCDF4 import Dataset diff --git a/compliance_checker/cfutil.py b/compliance_checker/cfutil.py index 245c6fd3..2f16ac22 100644 --- a/compliance_checker/cfutil.py +++ b/compliance_checker/cfutil.py @@ -9,7 +9,10 @@ from functools import lru_cache, partial from cf_units import Unit -from importlib_resources import files +try: + from importlib.resources import files +except ImportError: + from importlib_resources import files _UNITLESS_DB = None _SEA_NAMES = None diff --git a/compliance_checker/suite.py b/compliance_checker/suite.py index 7d922110..fd29af1b 100644 --- a/compliance_checker/suite.py +++ b/compliance_checker/suite.py @@ -13,6 +13,7 @@ import warnings from collections import defaultdict from datetime import datetime, timezone +from packaging import version from operator import itemgetter from pathlib import Path from urllib.parse import urlparse @@ -22,8 +23,10 @@ from netCDF4 import Dataset from owslib.sos import SensorObservationService from owslib.swe.sensor.sml import SensorML -from packaging.version import parse -from pkg_resources import working_set +if sys.version_info >= (3, 9): + import importlib.metadata as impmd +else: + import importlib_metadata as impmd from compliance_checker import __version__, tempnc from compliance_checker.base import BaseCheck, GenericFile, Result, fix_return_value @@ -71,10 +74,11 @@ def _get_generator_plugins(cls): Return a list of classes from external plugins that are used to generate checker classes """ - + # NOTE: updated to not use pkg_resources, but + # not tested -- it is ever used? if not hasattr(cls, "suite_generators"): - gens = working_set.iter_entry_points("compliance_checker.generators") - cls.suite_generators = [x.resolve() for x in gens] + gens = impmd.entry_points(group='compliance_checker.generators') + cls.suite_generators = [x.load() for x in gens] return cls.suite_generators @@ -136,7 +140,9 @@ def load_all_available_checkers(cls): Helper method to retrieve all sub checker classes derived from various base classes. """ - cls._load_checkers(working_set.iter_entry_points("compliance_checker.suites")) + checkers = impmd.entry_points(group='compliance_checker.suites') + cls._load_checkers(checkers) + @classmethod def _load_checkers(cls, checkers): @@ -147,7 +153,8 @@ def _load_checkers(cls, checkers): for c in checkers: try: - check_obj = c.resolve() + # check_obj = c.resolve() + check_obj = c.load() if hasattr(check_obj, "_cc_spec") and hasattr( check_obj, "_cc_spec_version", @@ -186,8 +193,8 @@ def _load_checkers(cls, checkers): for spec, versions in itertools.groupby(ver_checkers, itemgetter(0)): version_nums = [v[-1] for v in versions] try: - latest_version = str(max(parse(v) for v in version_nums)) - # if the version can't be parsed, do it according to character collation + latest_version = str(max(version.parse(v) for v in version_nums)) + # if the version can't be parsed, sort according to character collation except ValueError: latest_version = max(version_nums) cls.checkers[spec] = cls.checkers[spec + ":latest"] = cls.checkers[ @@ -764,9 +771,14 @@ def generate_dataset(self, cdl_path): :param str cdl_path: Absolute path to cdl file that is used to generate netCDF file """ - if isinstance(cdl_path, str): - cdl_path = Path(cdl_path) - ds_str = cdl_path.with_suffix(".nc") + # better to update the following code with Path object -- some day + cdl_path = os.fspath(cdl_path) + if ( + ".cdl" in cdl_path + ): # it's possible the filename doesn't have the .cdl extension + ds_str = cdl_path.replace(".cdl", ".nc") + else: + ds_str = cdl_path + ".nc" # generate netCDF-4 file iostat = subprocess.run( diff --git a/compliance_checker/tests/conftest.py b/compliance_checker/tests/conftest.py index 2c662c16..178ab73a 100644 --- a/compliance_checker/tests/conftest.py +++ b/compliance_checker/tests/conftest.py @@ -3,7 +3,10 @@ from itertools import chain import pytest -from importlib_resources import files +try: + from importlib.resources import files +except ImportError: + from importlib_resources import files from netCDF4 import Dataset from compliance_checker.cf import util diff --git a/compliance_checker/tests/resources.py b/compliance_checker/tests/resources.py index 7bbca9d6..e61fa290 100644 --- a/compliance_checker/tests/resources.py +++ b/compliance_checker/tests/resources.py @@ -1,6 +1,9 @@ import subprocess -from importlib_resources import files +try: + from importlib.resources import files +except ImportError: + from importlib_resources import files def get_filename(path): diff --git a/compliance_checker/tests/test_cli.py b/compliance_checker/tests/test_cli.py index fc6b4f94..cb3c21a7 100644 --- a/compliance_checker/tests/test_cli.py +++ b/compliance_checker/tests/test_cli.py @@ -91,7 +91,8 @@ def checker_1(): def checker_2(): return Namespace(_cc_spec="checker_2", _cc_spec_version="2.2") - mock_checkers = [Namespace(resolve=checker_1), Namespace(resolve=checker_2)] + mock_checkers = [Namespace(load=checker_1), + Namespace(load=checker_2)] with pytest.warns(DeprecationWarning): CheckSuite._load_checkers(mock_checkers) diff --git a/compliance_checker/tests/test_suite.py b/compliance_checker/tests/test_suite.py index 8c8987df..7faa2eea 100644 --- a/compliance_checker/tests/test_suite.py +++ b/compliance_checker/tests/test_suite.py @@ -3,7 +3,11 @@ from pathlib import Path import numpy as np -from importlib_resources import files + +try: + from importlib.resources import files +except ImportError: + from importlib_resources import files from compliance_checker.acdd import ACDDBaseCheck from compliance_checker.base import BaseCheck, GenericFile, Result @@ -83,7 +87,7 @@ def test_generate_dataset_netCDF4(self): # create netCDF4 file ds_name = self.cs.generate_dataset(static_files["netCDF4"]) # check if correct name is return - assert ds_name == static_files["netCDF4"].with_suffix(".nc") + assert ds_name == str(static_files["netCDF4"].with_suffix(".nc")) # check if netCDF4 file was created assert os.path.isfile(static_files["netCDF4"].with_suffix(".nc")) diff --git a/requirements.txt b/requirements.txt index 6277c633..46bbf078 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,7 @@ cf-units>=2 cftime>=1.1.0 importlib-resources # drop this when dropping Python 3.8 +importlib-metadata # drop this when dropping Python 3.8 isodate>=0.6.1 jinja2>=2.7.3 lxml>=3.2.1 From f03b3b60ad581157d7e5babaf672fb99470efa71 Mon Sep 17 00:00:00 2001 From: Chris Barker Date: Thu, 11 Apr 2024 18:50:24 -0700 Subject: [PATCH 06/18] using importlib_metadata for 3.10_ only --- compliance_checker/suite.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compliance_checker/suite.py b/compliance_checker/suite.py index fd29af1b..ec3631d0 100644 --- a/compliance_checker/suite.py +++ b/compliance_checker/suite.py @@ -23,7 +23,7 @@ from netCDF4 import Dataset from owslib.sos import SensorObservationService from owslib.swe.sensor.sml import SensorML -if sys.version_info >= (3, 9): +if sys.version_info >= (3, 10): import importlib.metadata as impmd else: import importlib_metadata as impmd From b957367322394e7bc450b2bc4eb7064f72d98769 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Sat, 13 Apr 2024 14:41:15 +0200 Subject: [PATCH 07/18] update pre-commits --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f0f1a089..805ce058 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,7 +19,7 @@ repos: - test_requirements.txt - repo: https://github.com/psf/black - rev: 24.3.0 + rev: 24.4.0 hooks: - id: black language_version: python3 @@ -31,7 +31,7 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.3.5 + rev: v0.3.7 hooks: - id: ruff From ebe9fc8123d973b5c46bb682ba8fa069c1dcf267 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Sat, 13 Apr 2024 14:41:20 +0200 Subject: [PATCH 08/18] only essential pkg_resource fixes --- compliance_checker/base.py | 7 +++-- compliance_checker/cf/util.py | 6 +---- compliance_checker/cfutil.py | 5 +--- compliance_checker/suite.py | 36 ++++++++++---------------- compliance_checker/tests/conftest.py | 5 +--- compliance_checker/tests/resources.py | 5 +--- compliance_checker/tests/test_cli.py | 3 +-- compliance_checker/tests/test_suite.py | 8 ++---- 8 files changed, 24 insertions(+), 51 deletions(-) diff --git a/compliance_checker/base.py b/compliance_checker/base.py index 64c2685c..c2674e85 100644 --- a/compliance_checker/base.py +++ b/compliance_checker/base.py @@ -191,10 +191,9 @@ def __del__(self): are cleared before the next checker uses it. Some caches were inadvertently mutated by other functions. """ - # odd errors -- module getting deleted before this object? - if cfutil is not None: - cfutil.get_geophysical_variables.cache_clear() - cfutil.get_time_variables.cache_clear() + + cfutil.get_geophysical_variables.cache_clear() + cfutil.get_time_variables.cache_clear() class BaseNCCheck: diff --git a/compliance_checker/cf/util.py b/compliance_checker/cf/util.py index 61e44f75..6f100653 100644 --- a/compliance_checker/cf/util.py +++ b/compliance_checker/cf/util.py @@ -5,11 +5,7 @@ import requests from cf_units import Unit -try: - from importlib.resources import files -except ImportError: - from importlib_resources import files - +from importlib_resources import files from lxml import etree from netCDF4 import Dataset diff --git a/compliance_checker/cfutil.py b/compliance_checker/cfutil.py index 2f16ac22..245c6fd3 100644 --- a/compliance_checker/cfutil.py +++ b/compliance_checker/cfutil.py @@ -9,10 +9,7 @@ from functools import lru_cache, partial from cf_units import Unit -try: - from importlib.resources import files -except ImportError: - from importlib_resources import files +from importlib_resources import files _UNITLESS_DB = None _SEA_NAMES = None diff --git a/compliance_checker/suite.py b/compliance_checker/suite.py index ec3631d0..d261a1cc 100644 --- a/compliance_checker/suite.py +++ b/compliance_checker/suite.py @@ -13,20 +13,17 @@ import warnings from collections import defaultdict from datetime import datetime, timezone -from packaging import version from operator import itemgetter from pathlib import Path from urllib.parse import urlparse +import importlib_metadata import requests from lxml import etree as ET from netCDF4 import Dataset from owslib.sos import SensorObservationService from owslib.swe.sensor.sml import SensorML -if sys.version_info >= (3, 10): - import importlib.metadata as impmd -else: - import importlib_metadata as impmd +from packaging.version import parse from compliance_checker import __version__, tempnc from compliance_checker.base import BaseCheck, GenericFile, Result, fix_return_value @@ -74,10 +71,11 @@ def _get_generator_plugins(cls): Return a list of classes from external plugins that are used to generate checker classes """ - # NOTE: updated to not use pkg_resources, but - # not tested -- it is ever used? + if not hasattr(cls, "suite_generators"): - gens = impmd.entry_points(group='compliance_checker.generators') + gens = importlib_metadata.entry_points( + groups="compliance_checker.generators", + ) cls.suite_generators = [x.load() for x in gens] return cls.suite_generators @@ -140,9 +138,9 @@ def load_all_available_checkers(cls): Helper method to retrieve all sub checker classes derived from various base classes. """ - checkers = impmd.entry_points(group='compliance_checker.suites') - cls._load_checkers(checkers) - + cls._load_checkers( + importlib_metadata.entry_points(group="compliance_checker.suites"), + ) @classmethod def _load_checkers(cls, checkers): @@ -153,7 +151,6 @@ def _load_checkers(cls, checkers): for c in checkers: try: - # check_obj = c.resolve() check_obj = c.load() if hasattr(check_obj, "_cc_spec") and hasattr( check_obj, @@ -193,8 +190,8 @@ def _load_checkers(cls, checkers): for spec, versions in itertools.groupby(ver_checkers, itemgetter(0)): version_nums = [v[-1] for v in versions] try: - latest_version = str(max(version.parse(v) for v in version_nums)) - # if the version can't be parsed, sort according to character collation + latest_version = str(max(parse(v) for v in version_nums)) + # if the version can't be parsed, do it according to character collation except ValueError: latest_version = max(version_nums) cls.checkers[spec] = cls.checkers[spec + ":latest"] = cls.checkers[ @@ -771,14 +768,9 @@ def generate_dataset(self, cdl_path): :param str cdl_path: Absolute path to cdl file that is used to generate netCDF file """ - # better to update the following code with Path object -- some day - cdl_path = os.fspath(cdl_path) - if ( - ".cdl" in cdl_path - ): # it's possible the filename doesn't have the .cdl extension - ds_str = cdl_path.replace(".cdl", ".nc") - else: - ds_str = cdl_path + ".nc" + if isinstance(cdl_path, str): + cdl_path = Path(cdl_path) + ds_str = cdl_path.with_suffix(".nc") # generate netCDF-4 file iostat = subprocess.run( diff --git a/compliance_checker/tests/conftest.py b/compliance_checker/tests/conftest.py index 178ab73a..2c662c16 100644 --- a/compliance_checker/tests/conftest.py +++ b/compliance_checker/tests/conftest.py @@ -3,10 +3,7 @@ from itertools import chain import pytest -try: - from importlib.resources import files -except ImportError: - from importlib_resources import files +from importlib_resources import files from netCDF4 import Dataset from compliance_checker.cf import util diff --git a/compliance_checker/tests/resources.py b/compliance_checker/tests/resources.py index e61fa290..7bbca9d6 100644 --- a/compliance_checker/tests/resources.py +++ b/compliance_checker/tests/resources.py @@ -1,9 +1,6 @@ import subprocess -try: - from importlib.resources import files -except ImportError: - from importlib_resources import files +from importlib_resources import files def get_filename(path): diff --git a/compliance_checker/tests/test_cli.py b/compliance_checker/tests/test_cli.py index cb3c21a7..c86cd897 100644 --- a/compliance_checker/tests/test_cli.py +++ b/compliance_checker/tests/test_cli.py @@ -91,8 +91,7 @@ def checker_1(): def checker_2(): return Namespace(_cc_spec="checker_2", _cc_spec_version="2.2") - mock_checkers = [Namespace(load=checker_1), - Namespace(load=checker_2)] + mock_checkers = [Namespace(load=checker_1), Namespace(load=checker_2)] with pytest.warns(DeprecationWarning): CheckSuite._load_checkers(mock_checkers) diff --git a/compliance_checker/tests/test_suite.py b/compliance_checker/tests/test_suite.py index 7faa2eea..8c8987df 100644 --- a/compliance_checker/tests/test_suite.py +++ b/compliance_checker/tests/test_suite.py @@ -3,11 +3,7 @@ from pathlib import Path import numpy as np - -try: - from importlib.resources import files -except ImportError: - from importlib_resources import files +from importlib_resources import files from compliance_checker.acdd import ACDDBaseCheck from compliance_checker.base import BaseCheck, GenericFile, Result @@ -87,7 +83,7 @@ def test_generate_dataset_netCDF4(self): # create netCDF4 file ds_name = self.cs.generate_dataset(static_files["netCDF4"]) # check if correct name is return - assert ds_name == str(static_files["netCDF4"].with_suffix(".nc")) + assert ds_name == static_files["netCDF4"].with_suffix(".nc") # check if netCDF4 file was created assert os.path.isfile(static_files["netCDF4"].with_suffix(".nc")) From 064aac92f9287af62950edb93141bd89068f2632 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Sat, 13 Apr 2024 14:41:32 +0200 Subject: [PATCH 09/18] sort --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 46bbf078..fd3bbe9a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ cf-units>=2 cftime>=1.1.0 -importlib-resources # drop this when dropping Python 3.8 importlib-metadata # drop this when dropping Python 3.8 +importlib-resources # drop this when dropping Python 3.8 isodate>=0.6.1 jinja2>=2.7.3 lxml>=3.2.1 From 6aa0f2698e52b018023ae861dcb9bbc1bdcdc77e Mon Sep 17 00:00:00 2001 From: Benjamin Adams Date: Mon, 15 Apr 2024 11:39:34 -0400 Subject: [PATCH 10/18] Fix valid range test after changes in #1053 --- compliance_checker/tests/test_cf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compliance_checker/tests/test_cf.py b/compliance_checker/tests/test_cf.py index 24a58c78..203b2ee1 100644 --- a/compliance_checker/tests/test_cf.py +++ b/compliance_checker/tests/test_cf.py @@ -436,7 +436,7 @@ def test_check_valid_range_or_valid_min_max_present(self): ] assert result.msgs == expected_msgs - assert result.value[0] == result.value[1] + assert result.value[0] < result.value[1] def test_check_fill_value_outside_valid_range(self): """ From b551c2ff5f6e9ad698cc1f039ea16c286ef11790 Mon Sep 17 00:00:00 2001 From: Benjamin Adams Date: Mon, 15 Apr 2024 17:38:18 -0400 Subject: [PATCH 11/18] Fix initial importlib check suite load Fixes loading initial available checkers via importlib. An invalid argument had been passed with kwarg `groups` instead of `group`. --- compliance_checker/suite.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compliance_checker/suite.py b/compliance_checker/suite.py index 93cc9356..17002da4 100644 --- a/compliance_checker/suite.py +++ b/compliance_checker/suite.py @@ -74,7 +74,7 @@ def _get_generator_plugins(cls): if not hasattr(cls, "suite_generators"): gens = importlib_metadata.entry_points( - groups="compliance_checker.generators", + group="compliance_checker.generators", ) cls.suite_generators = [x.load() for x in gens] From 584f896a3dcea39155fbc91ff0380524d4526c59 Mon Sep 17 00:00:00 2001 From: Benjamin Adams Date: Mon, 15 Apr 2024 18:09:35 -0400 Subject: [PATCH 12/18] Fix valid range with valid min/max check logic Fixes a logic error in valid_range with valid_min/valid_max which resulted in erroneous results. --- compliance_checker/cf/cf_1_6.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/compliance_checker/cf/cf_1_6.py b/compliance_checker/cf/cf_1_6.py index d56e2aa5..3cc38a1e 100644 --- a/compliance_checker/cf/cf_1_6.py +++ b/compliance_checker/cf/cf_1_6.py @@ -443,15 +443,19 @@ def check_valid_range_or_valid_min_max_present(self, ds): total = 0 for variable in ds.variables.values(): - if hasattr(variable, "valid_max") and ( - hasattr(variable, "valid_min") or hasattr(variable, "valid_range") - ): - total = total + 1 - - fails.append( - f"For the variable {variable.name} the valid_range attribute must not be present " - "if the valid_min and/or valid_max attributes are present", - ) + if (hasattr(variable, "valid_max") or + hasattr(variable, "valid_min")): + total += 1 + # if there's also valid_range in addition to + # valid_min/valid_max, this is not compliant + if hasattr(variable, "valid_range"): + fails.append( + f"For the variable {variable.name} the valid_range attribute must not be present " + "if the valid_min and/or valid_max attributes are present", + ) + # *Just* valid_range should be added to total as well + elif hasattr(variable, "valid_range"): + total += 1 return Result( BaseCheck.MEDIUM, From dcd1edfe6a320e0e3cebc71f0c6d98fa7794c789 Mon Sep 17 00:00:00 2001 From: Benjamin Adams Date: Mon, 15 Apr 2024 18:11:07 -0400 Subject: [PATCH 13/18] Rename function check_valid_range_or_valid_min_max_present Renames check function `check_valid_range_or_valid_min_max_present` to `check_valid_range_and_valid_min_max_present` for clarity. Existing compliance checker plugins checked do not reference this function so this appears to be a safe operation. --- compliance_checker/cf/cf_1_6.py | 2 +- compliance_checker/tests/test_cf.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/compliance_checker/cf/cf_1_6.py b/compliance_checker/cf/cf_1_6.py index 3cc38a1e..f4ff4710 100644 --- a/compliance_checker/cf/cf_1_6.py +++ b/compliance_checker/cf/cf_1_6.py @@ -430,7 +430,7 @@ def check_fill_value_equal_missing_value(self, ds): msgs=fails, ) - def check_valid_range_or_valid_min_max_present(self, ds): + def check_valid_range_and_valid_min_max_present(self, ds): """ The valid_range attribute must not be present if the valid_min and/or valid_max attributes are present. This according to 2.5.1 Requirements. diff --git a/compliance_checker/tests/test_cf.py b/compliance_checker/tests/test_cf.py index 203b2ee1..292bfd82 100644 --- a/compliance_checker/tests/test_cf.py +++ b/compliance_checker/tests/test_cf.py @@ -396,7 +396,7 @@ def test_check_fill_value_equal_missing_value(self): assert result.msgs == expected_msgs - def test_check_valid_range_or_valid_min_max_present(self): + def test_check_valid_range_and_valid_min_max_present(self): """ 2.5.1 Missing data, valid and actual range of data Requirements: @@ -426,7 +426,7 @@ def test_check_valid_range_or_valid_min_max_present(self): dataset.variables["c"][1] = 2 dataset.variables["c"].setncattr("valid_range", [-10, 10]) - result = self.cf.check_valid_range_or_valid_min_max_present(dataset) + result = self.cf.check_valid_range_and_valid_min_max_present(dataset) # check if the test fails when when variable "a" is checked. expected_msgs = [ From 0f684864c09fb74d72fc6b2aca766a49501682fc Mon Sep 17 00:00:00 2001 From: Benjamin Adams Date: Mon, 15 Apr 2024 18:23:01 -0400 Subject: [PATCH 14/18] Black formatting fixes --- compliance_checker/cf/cf_1_6.py | 3 +-- compliance_checker/tests/test_cf.py | 10 ++++++---- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/compliance_checker/cf/cf_1_6.py b/compliance_checker/cf/cf_1_6.py index f4ff4710..f059e0f7 100644 --- a/compliance_checker/cf/cf_1_6.py +++ b/compliance_checker/cf/cf_1_6.py @@ -443,8 +443,7 @@ def check_valid_range_and_valid_min_max_present(self, ds): total = 0 for variable in ds.variables.values(): - if (hasattr(variable, "valid_max") or - hasattr(variable, "valid_min")): + if hasattr(variable, "valid_max") or hasattr(variable, "valid_min"): total += 1 # if there's also valid_range in addition to # valid_min/valid_max, this is not compliant diff --git a/compliance_checker/tests/test_cf.py b/compliance_checker/tests/test_cf.py index 292bfd82..c86c115c 100644 --- a/compliance_checker/tests/test_cf.py +++ b/compliance_checker/tests/test_cf.py @@ -1300,7 +1300,9 @@ def test_check_time_coordinate(self): # NB: >= 60 seconds is nonstandard, but isn't actually a CF requirement # until CF 1.9 onwards dataset.variables["time"].units = "months since 0-1-1 23:00:60" - dataset.variables["time"].climatology = ( + dataset.variables[ + "time" + ].climatology = ( "nonexistent_variable_reference_only_used_to_test_year_zero_failure" ) results = self.cf.check_time_coordinate(dataset) @@ -2951,9 +2953,9 @@ def test_bad_lsid(self): messages = results[0].msgs assert results[0].value[0] < results[0].value[1] assert len(messages) == 1 - taxon_lsid[0] = ( - "http://www.lsid.info/urn:lsid:marinespecies.org:taxname:99999999999" - ) + taxon_lsid[ + 0 + ] = "http://www.lsid.info/urn:lsid:marinespecies.org:taxname:99999999999" results = self.cf.check_taxa(dataset) assert messages[0].startswith( "Taxon id must match one of the following forms:", From d7d861effde00938947b60036f65947956407bef Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 16 Apr 2024 09:15:28 +0000 Subject: [PATCH 15/18] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- compliance_checker/tests/test_cf.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/compliance_checker/tests/test_cf.py b/compliance_checker/tests/test_cf.py index c86c115c..292bfd82 100644 --- a/compliance_checker/tests/test_cf.py +++ b/compliance_checker/tests/test_cf.py @@ -1300,9 +1300,7 @@ def test_check_time_coordinate(self): # NB: >= 60 seconds is nonstandard, but isn't actually a CF requirement # until CF 1.9 onwards dataset.variables["time"].units = "months since 0-1-1 23:00:60" - dataset.variables[ - "time" - ].climatology = ( + dataset.variables["time"].climatology = ( "nonexistent_variable_reference_only_used_to_test_year_zero_failure" ) results = self.cf.check_time_coordinate(dataset) @@ -2953,9 +2951,9 @@ def test_bad_lsid(self): messages = results[0].msgs assert results[0].value[0] < results[0].value[1] assert len(messages) == 1 - taxon_lsid[ - 0 - ] = "http://www.lsid.info/urn:lsid:marinespecies.org:taxname:99999999999" + taxon_lsid[0] = ( + "http://www.lsid.info/urn:lsid:marinespecies.org:taxname:99999999999" + ) results = self.cf.check_taxa(dataset) assert messages[0].startswith( "Taxon id must match one of the following forms:", From 2a6196a2d6a933b6137931191fd53f687878797c Mon Sep 17 00:00:00 2001 From: Benjamin Adams Date: Tue, 16 Apr 2024 12:10:44 -0400 Subject: [PATCH 16/18] No-op MockNetCDF __dealloc__ method Overrides `__dealloc__` method in `MockNetCDF` class to prevent suppress exception messages on tests via no-op/`pass` operation. --- compliance_checker/tests/helpers.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/compliance_checker/tests/helpers.py b/compliance_checker/tests/helpers.py index a07c1aa3..40a9ea87 100644 --- a/compliance_checker/tests/helpers.py +++ b/compliance_checker/tests/helpers.py @@ -23,6 +23,10 @@ def __init__(self, filename=None): persist=False, ) + # suppress usual dealloc routine to prevent caught exception messages + # from printing + def __dealloc__(self): + pass class MockTimeSeries(MockNetCDF): """ From 76b4eb67a7c89108fe5a387205be7773b8e12559 Mon Sep 17 00:00:00 2001 From: Benjamin Adams Date: Tue, 16 Apr 2024 12:46:08 -0400 Subject: [PATCH 17/18] Suppress MockNetCDF __dealloc__ exceptions rather than overriding --- compliance_checker/tests/helpers.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/compliance_checker/tests/helpers.py b/compliance_checker/tests/helpers.py index 40a9ea87..78b4649c 100644 --- a/compliance_checker/tests/helpers.py +++ b/compliance_checker/tests/helpers.py @@ -1,6 +1,6 @@ import tempfile -from netCDF4 import Dataset +from netCDF4._netCDF4 import Dataset class MockNetCDF(Dataset): @@ -26,7 +26,10 @@ def __init__(self, filename=None): # suppress usual dealloc routine to prevent caught exception messages # from printing def __dealloc__(self): - pass + try: + super().__dealloc__() + except AttributeError: + pass class MockTimeSeries(MockNetCDF): """ From 64988635f71fa988ceac44dbe8cdb4748d1d1764 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 16 Apr 2024 17:05:40 +0000 Subject: [PATCH 18/18] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- compliance_checker/tests/helpers.py | 1 + 1 file changed, 1 insertion(+) diff --git a/compliance_checker/tests/helpers.py b/compliance_checker/tests/helpers.py index 78b4649c..3642e123 100644 --- a/compliance_checker/tests/helpers.py +++ b/compliance_checker/tests/helpers.py @@ -31,6 +31,7 @@ def __dealloc__(self): except AttributeError: pass + class MockTimeSeries(MockNetCDF): """ Mock time series with time dimension and time, lon, lat, and depth