Skip to content

Commit

Permalink
Merge pull request #1039 from ocefpaf/remove_pkg_resources
Browse files Browse the repository at this point in the history
Remove pkg resources
  • Loading branch information
ocefpaf authored Apr 11, 2024
2 parents dada5a2 + d3ee89b commit ee4f7b6
Show file tree
Hide file tree
Showing 14 changed files with 53 additions and 72 deletions.
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
rev: v4.6.0
hooks:
- id: trailing-whitespace
exclude: compliance_checker/tests/data
Expand All @@ -19,7 +19,7 @@ repos:
- test_requirements.txt

- repo: https://github.com/psf/black
rev: 24.1.1
rev: 24.3.0
hooks:
- id: black
language_version: python3
Expand All @@ -31,7 +31,7 @@ repos:


- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.2.0
rev: v0.3.5
hooks:
- id: ruff

Expand Down
7 changes: 1 addition & 6 deletions compliance_checker/cf/cf_1_6.py
Original file line number Diff line number Diff line change
Expand Up @@ -2764,12 +2764,7 @@ def check_cell_boundaries(self, ds):
):
valid = False
reasoning.append(
"Dimension {} of boundary variable (for {}) must have at least {} elements to form a simplex/closed cell with previous dimensions {}.".format(
boundary_variable.name,
variable.name,
len(variable.dimensions) + 1,
boundary_variable.dimensions[:-1],
),
f"Dimension {boundary_variable.name} of boundary variable (for {variable.name}) must have at least {len(variable.dimensions) + 1} elements to form a simplex/closed cell with previous dimensions {boundary_variable.dimensions[:-1]}.",
)
result = Result(
BaseCheck.MEDIUM,
Expand Down
7 changes: 1 addition & 6 deletions compliance_checker/cf/cf_1_7.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,12 +283,7 @@ def check_cell_boundaries(self, ds):
):
valid = False
reasoning.append(
"Dimension {} of boundary variable (for {}) must have at least {} elements to form a simplex/closed cell with previous dimensions {}.".format(
boundary_variable.name,
variable.name,
len(variable.dimensions) + 1,
boundary_variable.dimensions[:-1],
),
f"Dimension {boundary_variable.name} of boundary variable (for {variable.name}) must have at least {len(variable.dimensions) + 1} elements to form a simplex/closed cell with previous dimensions {boundary_variable.dimensions[:-1]}.",
)

# 7.1 Required 3/5:
Expand Down
8 changes: 4 additions & 4 deletions compliance_checker/cf/util.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import importlib.resources
import itertools
import os
import sys
Expand All @@ -7,7 +8,6 @@
from cf_units import Unit
from lxml import etree
from netCDF4 import Dataset
from pkg_resources import resource_filename

# copied from paegan
# paegan may depend on these later
Expand Down Expand Up @@ -284,9 +284,9 @@ def download_cf_standard_name_table(version, location=None):
if (
location is None
): # This case occurs when updating the packaged version from command line
location = resource_filename(
"compliance_checker",
"data/cf-standard-name-table.xml",
location = (
importlib.resources.files("compliance_checker")
/ "data/cf-standard-name-table.xml"
)

if version == "latest":
Expand Down
4 changes: 2 additions & 2 deletions compliance_checker/cfutil.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,13 @@
compliance_checker/cfutil.py
"""
import csv
import importlib.resources
import re
import warnings
from collections import defaultdict
from functools import lru_cache, partial

from cf_units import Unit
from pkg_resources import resource_filename

_UNITLESS_DB = None
_SEA_NAMES = None
Expand Down Expand Up @@ -128,7 +128,7 @@ def get_sea_names():
if _SEA_NAMES is None:
buf = {}
with open(
resource_filename("compliance_checker", "data/seanames.csv"),
importlib.resources.files("compliance_checker") / "data/seanames.csv",
) as f:
reader = csv.reader(f)
for code, sea_name in reader:
Expand Down
1 change: 1 addition & 0 deletions compliance_checker/protocols/netcdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ def is_netcdf(url):
:param str url: Location of file on the file system
"""
# Try an obvious exclusion of remote resources
url = str(url)
if url.startswith("http"):
return False

Expand Down
16 changes: 6 additions & 10 deletions compliance_checker/suite.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
import warnings
from collections import defaultdict
from datetime import datetime, timezone
from distutils.version import StrictVersion
from operator import itemgetter
from pathlib import Path
from urllib.parse import urlparse
Expand All @@ -23,6 +22,7 @@
from netCDF4 import Dataset
from owslib.sos import SensorObservationService
from owslib.swe.sensor.sml import SensorML
from packaging.version import parse
from pkg_resources import working_set

from compliance_checker import __version__, tempnc
Expand Down Expand Up @@ -186,9 +186,8 @@ def _load_checkers(cls, checkers):
for spec, versions in itertools.groupby(ver_checkers, itemgetter(0)):
version_nums = [v[-1] for v in versions]
try:
latest_version = str(max(StrictVersion(v) for v in version_nums))
# if the version can't be parsed as a StrictVersion, parse
# according to character collation
latest_version = str(max(parse(v) for v in version_nums))
# if the version can't be parsed, do it according to character collation
except ValueError:
latest_version = max(version_nums)
cls.checkers[spec] = cls.checkers[spec + ":latest"] = cls.checkers[
Expand Down Expand Up @@ -765,12 +764,9 @@ def generate_dataset(self, cdl_path):
:param str cdl_path: Absolute path to cdl file that is used to generate netCDF file
"""
if (
".cdl" in cdl_path
): # it's possible the filename doesn't have the .cdl extension
ds_str = cdl_path.replace(".cdl", ".nc")
else:
ds_str = cdl_path + ".nc"
if isinstance(cdl_path, str):
cdl_path = Path(cdl_path)
ds_str = cdl_path.with_suffix(".nc")

# generate netCDF-4 file
iostat = subprocess.run(
Expand Down
5 changes: 3 additions & 2 deletions compliance_checker/tests/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import unittest
from pathlib import Path

from netCDF4 import Dataset

Expand All @@ -25,8 +26,8 @@ def load_dataset(self, nc_dataset):
"""
Return a loaded NC Dataset for the given path
"""
if not isinstance(nc_dataset, str):
raise ValueError("nc_dataset should be a string")
if not isinstance(nc_dataset, (str, Path)):
raise ValueError("nc_dataset should be a valid path")

nc_dataset = Dataset(nc_dataset, "r")
self.addCleanup(nc_dataset.close)
Expand Down
7 changes: 4 additions & 3 deletions compliance_checker/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
import importlib.resources
import os
import subprocess
from itertools import chain
from pathlib import Path

import pytest
from netCDF4 import Dataset
from pkg_resources import resource_filename

from compliance_checker.cf import util
from compliance_checker.suite import CheckSuite
Expand All @@ -27,7 +26,9 @@ def static_files(cdl_stem):
Returns the Path to a valid nc dataset\n
replaces the old STATIC_FILES dict
"""
datadir = Path(resource_filename("compliance_checker", "tests/data")).resolve()
datadir = (
importlib.resources.files("compliance_checker").joinpath("tests/data").resolve()
)
assert datadir.exists(), f"{datadir} not found"

cdl_paths = glob_down(datadir, f"{cdl_stem}.cdl", 3)
Expand Down
10 changes: 4 additions & 6 deletions compliance_checker/tests/resources.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,14 @@
import os
import importlib.resources
import subprocess

from pkg_resources import resource_filename


def get_filename(path):
"""
Returns the path to a valid dataset
"""
filename = resource_filename("compliance_checker", path)
nc_path = filename.replace(".cdl", ".nc")
if not os.path.exists(nc_path):
filename = importlib.resources.files("compliance_checker") / path
nc_path = filename.with_suffix(".nc")
if not nc_path.exists():
generate_dataset(filename, nc_path)
return nc_path

Expand Down
4 changes: 1 addition & 3 deletions compliance_checker/tests/test_cf_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,7 @@
'§2.6.1 Conventions global attribute does not contain "CF-1.8"',
f"standard_name visibility is not defined in Standard Name Table v{std_names._version}. Possible close match(es): ['visibility_in_air']",
'Standard name modifier "data_quality" for variable visibility_qc is not a valid modifier according to CF Appendix C',
"standard_name wind_direction is not defined in Standard Name Table v{}. Possible close match(es): ['wind_to_direction', 'wind_from_direction', 'wind_gust_from_direction']".format(
std_names._version,
),
f"standard_name wind_direction is not defined in Standard Name Table v{std_names._version}. Possible close match(es): ['wind_to_direction', 'wind_from_direction', 'wind_gust_from_direction']",
'Standard name modifier "data_quality" for variable wind_direction_qc is not a valid modifier according to CF Appendix C',
f"standard_name wind_gust is not defined in Standard Name Table v{std_names._version}. Possible close match(es): ['y_wind_gust', 'x_wind_gust', 'wind_speed_of_gust']",
'Standard name modifier "data_quality" for variable wind_gust_qc is not a valid modifier according to CF Appendix C',
Expand Down
43 changes: 19 additions & 24 deletions compliance_checker/tests/test_suite.py
Original file line number Diff line number Diff line change
@@ -1,35 +1,30 @@
import importlib.resources
import os
import unittest
from pathlib import Path

import numpy as np
from pkg_resources import resource_filename

from compliance_checker.acdd import ACDDBaseCheck
from compliance_checker.base import BaseCheck, GenericFile, Result
from compliance_checker.suite import CheckSuite

static_files = {
"2dim": resource_filename("compliance_checker", "tests/data/2dim-grid.nc"),
"bad_region": resource_filename("compliance_checker", "tests/data/bad_region.nc"),
"bad_data_type": resource_filename(
"compliance_checker",
"tests/data/bad_data_type.nc",
),
"test_cdl": resource_filename("compliance_checker", "tests/data/test_cdl.cdl"),
"test_cdl_nc": resource_filename(
"compliance_checker",
"tests/data/test_cdl_nc_file.nc",
),
"empty": resource_filename("compliance_checker", "tests/data/non-comp/empty.file"),
"ru07": resource_filename(
"compliance_checker",
"tests/data/ru07-20130824T170228_rt0.nc",
),
"netCDF4": resource_filename(
"compliance_checker",
"tests/data/test_cdl_nc4_file.cdl",
),
"2dim": importlib.resources.files("compliance_checker") / "tests/data/2dim-grid.nc",
"bad_region": importlib.resources.files("compliance_checker")
/ "tests/data/bad_region.nc",
"bad_data_type": importlib.resources.files("compliance_checker")
/ "tests/data/bad_data_type.nc",
"test_cdl": importlib.resources.files("compliance_checker")
/ "tests/data/test_cdl.cdl",
"test_cdl_nc": importlib.resources.files("compliance_checker")
/ "tests/data/test_cdl_nc_file.nc",
"empty": importlib.resources.files("compliance_checker")
/ "tests/data/non-comp/empty.file",
"ru07": importlib.resources.files("compliance_checker")
/ "tests/data/ru07-20130824T170228_rt0.nc",
"netCDF4": importlib.resources.files("compliance_checker")
/ "tests/data/test_cdl_nc4_file.cdl",
}


Expand Down Expand Up @@ -95,9 +90,9 @@ def test_generate_dataset_netCDF4(self):
# create netCDF4 file
ds_name = self.cs.generate_dataset(static_files["netCDF4"])
# check if correct name is return
assert ds_name == static_files["netCDF4"].replace(".cdl", ".nc")
assert ds_name == static_files["netCDF4"].with_suffix(".nc")
# check if netCDF4 file was created
assert os.path.isfile(static_files["netCDF4"].replace(".cdl", ".nc"))
assert os.path.isfile(static_files["netCDF4"].with_suffix(".nc"))

def test_include_checks(self):
ds = self.cs.load_dataset(static_files["bad_data_type"])
Expand Down Expand Up @@ -242,7 +237,7 @@ def test_cdl_file(self):
)
ds.close()

nc_file_path = static_files["test_cdl"].replace(".cdl", ".nc")
nc_file_path = static_files["test_cdl"].with_suffix(".nc")
self.addCleanup(os.remove, nc_file_path)

# Ok the scores should be equal!
Expand Down
6 changes: 3 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ requires = [
]

[tool.ruff]
select = [
lint.select = [
"A", # flake8-builtins
"B", # flake8-bugbear
"C4", # flake8-comprehensions
Expand All @@ -24,11 +24,11 @@ exclude = [
"compliance_checker/cf/cf.py",
]

ignore = [
lint.ignore = [
"E501",
]

[tool.ruff.per-file-ignores]
[tool.ruff.lint.per-file-ignores]
"docs/source/conf.py" = [
"E402",
"A001",
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ jinja2>=2.7.3
lxml>=3.2.1
netcdf4>=1.6.4
owsLib>=0.8.3
packaging
pendulum>=1.2.4
pygeoif>=0.6
pyproj>=2.2.1
Expand Down

0 comments on commit ee4f7b6

Please sign in to comment.