Skip to content

Commit

Permalink
Merge pull request #1069 from ocefpaf/sys_excepthook
Browse files Browse the repository at this point in the history
Should fix more but now all "Error in sys.excepthook:"
  • Loading branch information
benjwadams authored May 21, 2024
2 parents 651b8c7 + b137db3 commit 1200543
Show file tree
Hide file tree
Showing 14 changed files with 211 additions and 228 deletions.
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ repos:
- test_requirements.txt

- repo: https://github.com/psf/black
rev: 24.4.0
rev: 24.4.2
hooks:
- id: black
language_version: python3
Expand All @@ -31,12 +31,12 @@ repos:


- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.3.7
rev: v0.4.4
hooks:
- id: ruff

- repo: https://github.com/tox-dev/pyproject-fmt
rev: 1.7.0
rev: 2.0.4
hooks:
- id: pyproject-fmt

Expand Down
2 changes: 1 addition & 1 deletion cchecker.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,7 @@ def main():
check_suite.load_generated_checkers(args)

if args.version:
print("IOOS compliance checker version %s" % __version__)
print(f"IOOS compliance checker version {__version__}")
sys.exit(0)

options_dict = parse_options(args.option) if args.option else defaultdict(set)
Expand Down
2 changes: 1 addition & 1 deletion compliance_checker/acdd.py
Original file line number Diff line number Diff line change
Expand Up @@ -626,7 +626,7 @@ def check_time_extents(self, ds):
BaseCheck.MEDIUM,
False,
"time_coverage_extents_match",
["Failed to retrieve and convert times for variables %s." % timevar],
[f"Failed to retrieve and convert times for variables {timevar}."],
)

start_dt = abs(time0 - t_min)
Expand Down
12 changes: 0 additions & 12 deletions compliance_checker/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
from owslib.swe.observation.sos100 import SensorObservationService_1_0_0
from owslib.swe.sensor.sml import SensorML

import compliance_checker.cfutil as cfutil
from compliance_checker import __version__
from compliance_checker.util import kvp_convert

Expand Down Expand Up @@ -185,17 +184,6 @@ def get_test_ctx(self, severity, name, variable=None):
)
return self._defined_results[name][variable][severity]

def __del__(self):
"""
Finalizer. Ensure any caches shared by multiple checkers
are cleared before the next checker uses it. Some caches were
inadvertently mutated by other functions.
"""

if cfutil is not None:
cfutil.get_geophysical_variables.cache_clear()
cfutil.get_time_variables.cache_clear()


class BaseNCCheck:
"""
Expand Down
6 changes: 3 additions & 3 deletions compliance_checker/cf/cf_1_6.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,7 @@ def check_names_unique(self, ds):
names[k.lower()] += 1

fails = [
"Variables are not case sensitive. Duplicate variables named: %s" % k
f"Variables are not case sensitive. Duplicate variables named: {k}"
for k, v in names.items()
if v > 1
]
Expand Down Expand Up @@ -1822,7 +1822,7 @@ def check_time_coordinate(self, ds):
BaseCheck.HIGH,
False,
self.section_titles["4.4"],
["%s does not have units" % name],
[f"{name} does not have units"],
)
ret_val.append(result)
continue
Expand All @@ -1833,7 +1833,7 @@ def check_time_coordinate(self, ds):
correct_units = util.units_temporal(variable.units)
reasoning = None
if not correct_units:
reasoning = ["%s does not have correct time units" % name]
reasoning = [f"{name} does not have correct time units"]
result = Result(
BaseCheck.HIGH,
correct_units,
Expand Down
63 changes: 34 additions & 29 deletions compliance_checker/cf/cf_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -495,11 +495,12 @@ def _find_aux_coord_vars(self, ds, refresh=False):
:return: List of variable names (str) that are defined to be auxiliary
coordinate variables.
"""
ds_str = ds.__str__()
if self._aux_coords.get(ds, None) and refresh is False:
return self._aux_coords[ds]
return self._aux_coords[ds_str]

self._aux_coords[ds] = cfutil.get_auxiliary_coordinate_variables(ds)
return self._aux_coords[ds]
self._aux_coords[ds_str] = cfutil.get_auxiliary_coordinate_variables(ds)
return self._aux_coords[ds_str]

def _find_boundary_vars(self, ds, refresh=False):
"""
Expand All @@ -512,12 +513,13 @@ def _find_boundary_vars(self, ds, refresh=False):
:rtype: list
:return: A list containing strings with boundary variable names.
"""
ds_str = ds.__str__()
if self._boundary_vars.get(ds, None) and refresh is False:
return self._boundary_vars[ds]
return self._boundary_vars[ds_str]

self._boundary_vars[ds] = cfutil.get_cell_boundary_variables(ds)
self._boundary_vars[ds_str] = cfutil.get_cell_boundary_variables(ds)

return self._boundary_vars[ds]
return self._boundary_vars[ds_str]

def _find_ancillary_vars(self, ds, refresh=False):
"""
Expand All @@ -541,26 +543,26 @@ def _find_ancillary_vars(self, ds, refresh=False):
:return: List of variable names (str) that are defined as ancillary
variables in the dataset ds.
"""

ds_str = ds.__str__()
# Used the cached version if it exists and is not empty
if self._ancillary_vars.get(ds, None) and refresh is False:
return self._ancillary_vars[ds]
return self._ancillary_vars[ds_str]

# Invalidate the cache at all costs
self._ancillary_vars[ds] = []
self._ancillary_vars[ds_str] = []

for var in ds.variables.values():
if hasattr(var, "ancillary_variables"):
for anc_name in var.ancillary_variables.split(" "):
if anc_name in ds.variables:
self._ancillary_vars[ds].append(anc_name)
self._ancillary_vars[ds_str].append(anc_name)

if hasattr(var, "grid_mapping"):
gm_name = var.grid_mapping
if gm_name in ds.variables:
self._ancillary_vars[ds].append(gm_name)
self._ancillary_vars[ds_str].append(gm_name)

return self._ancillary_vars[ds]
return self._ancillary_vars[ds_str]

def _find_clim_vars(self, ds, refresh=False):
"""
Expand All @@ -573,15 +575,15 @@ def _find_clim_vars(self, ds, refresh=False):
:return: A list containing strings with geophysical variable
names.
"""

ds_str = ds.__str__()
if self._clim_vars.get(ds, None) and refresh is False:
return self._clim_vars[ds]
return self._clim_vars[ds_str]

climatology_variable = cfutil.get_climatology_variable(ds)
if climatology_variable:
self._clim_vars[ds].append(climatology_variable)
self._clim_vars[ds_str].append(climatology_variable)

return self._clim_vars[ds]
return self._clim_vars[ds_str]

def _find_cf_standard_name_table(self, ds):
"""
Expand Down Expand Up @@ -693,12 +695,13 @@ def _find_coord_vars(self, ds, refresh=False):
:return: A list of variables names (str) that are defined as coordinate
variables in the dataset ds.
"""
if ds in self._coord_vars and refresh is False:
return self._coord_vars[ds]
ds_str = ds.__str__()
if ds_str in self._coord_vars and refresh is False:
return self._coord_vars[ds_str]

self._coord_vars[ds] = cfutil.get_coordinate_variables(ds)
self._coord_vars[ds_str] = cfutil.get_coordinate_variables(ds)

return self._coord_vars[ds]
return self._coord_vars[ds_str]

def _find_geophysical_vars(self, ds, refresh=False):
"""
Expand All @@ -712,12 +715,13 @@ def _find_geophysical_vars(self, ds, refresh=False):
:return: A list containing strings with geophysical variable
names.
"""
ds_str = ds.__str__()
if self._geophysical_vars.get(ds, None) and refresh is False:
return self._geophysical_vars[ds]
return self._geophysical_vars[ds_str]

self._geophysical_vars[ds] = cfutil.get_geophysical_variables(ds)
self._geophysical_vars[ds_str] = cfutil.get_geophysical_variables(ds)

return self._geophysical_vars[ds]
return self._geophysical_vars[ds_str]

def _find_metadata_vars(self, ds, refresh=False):
"""
Expand All @@ -731,10 +735,11 @@ def _find_metadata_vars(self, ds, refresh=False):
variable candidates.
"""
ds_str = ds.__str__()
if self._metadata_vars.get(ds, None) and refresh is False:
return self._metadata_vars[ds]
return self._metadata_vars[ds_str]

self._metadata_vars[ds] = []
self._metadata_vars[ds_str] = []
for name, var in ds.variables.items():
if name in self._find_ancillary_vars(ds) or name in self._find_coord_vars(
ds,
Expand All @@ -749,17 +754,17 @@ def _find_metadata_vars(self, ds, refresh=False):
"platform_id",
"surface_altitude",
):
self._metadata_vars[ds].append(name)
self._metadata_vars[ds_str].append(name)

elif getattr(var, "cf_role", "") != "":
self._metadata_vars[ds].append(name)
self._metadata_vars[ds_str].append(name)

elif (
getattr(var, "standard_name", None) is None and len(var.dimensions) == 0
):
self._metadata_vars[ds].append(name)
self._metadata_vars[ds_str].append(name)

return self._metadata_vars[ds]
return self._metadata_vars[ds_str]

def _get_coord_axis_map(self, ds):
"""
Expand Down
11 changes: 5 additions & 6 deletions compliance_checker/cf/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,9 +199,9 @@ def __init__(self, entrynode):
def _get(self, entrynode, attrname, required=False):
vals = entrynode.xpath(attrname)
if len(vals) > 1:
raise Exception("Multiple attrs (%s) found" % attrname)
raise Exception(f"Multiple attrs ({attrname}) found")
elif required and len(vals) == 0:
raise Exception("Required attr (%s) not found" % attrname)
raise Exception(f"Required attr ({attrname}) not found")

return vals[0].text

Expand Down Expand Up @@ -236,22 +236,21 @@ def __len__(self):

def __getitem__(self, key):
if not (key in self._names or key in self._aliases):
raise KeyError("%s not found in standard name table" % key)
raise KeyError(f"{key} not found in standard name table")

if key in self._aliases:
idx = self._aliases.index(key)
entryids = self._root.xpath("alias")[idx].xpath("entry_id")

if len(entryids) != 1:
raise Exception(
"Inconsistency in standard name table, could not lookup alias for %s"
% key,
f"Inconsistency in standard name table, could not lookup alias for {key}",
)

key = entryids[0].text

if key not in self._names:
raise KeyError("%s not found in standard name table" % key)
raise KeyError(f"{key} not found in standard name table")

idx = self._names.index(key)
entry = self.NameEntry(self._root.xpath("entry")[idx])
Expand Down
Loading

0 comments on commit 1200543

Please sign in to comment.