diff --git a/pyposeidon/dem.py b/pyposeidon/dem.py index 683febc1..1273e8b9 100644 --- a/pyposeidon/dem.py +++ b/pyposeidon/dem.py @@ -21,7 +21,7 @@ import pyresample import xarray as xr -from pyposeidon.utils.fix import fix_dem, fix, resample +from pyposeidon.dem_tools import fix_dem, fix, resample from pyposeidon import tools NCORES = max(1, multiprocessing.cpu_count() - 1) diff --git a/pyposeidon/utils/fix.py b/pyposeidon/dem_tools.py similarity index 71% rename from pyposeidon/utils/fix.py rename to pyposeidon/dem_tools.py index f781f2e9..66c6541f 100644 --- a/pyposeidon/utils/fix.py +++ b/pyposeidon/dem_tools.py @@ -22,13 +22,25 @@ from tqdm.auto import tqdm from pyposeidon.utils.coastfix import simplify +from pyposeidon.utils.topology import ( + MakeTriangleFaces_periodic, + MakeQuadFaces, + tria_to_df, + tria_to_df_3d, + quads_to_df, +) +from pyposeidon.utils.pos import to_st, to_global_pos, to_sq +import pyposeidon.dem as pdem + # logging setup import logging logger = logging.getLogger(__name__) -def get_tiles(data, chunks=None): +def get_tiles(data, **kwargs): + + chunks = kwargs.get("chunks", None) # chuck if chunks: @@ -39,7 +51,9 @@ def get_tiles(data, chunks=None): ilons = data.elevation.chunk("auto").chunks[1] if len(ilons) == 1: - ilons = (int(ilons[0] / 2), int(ilons[0] / 2)) + half = int(ilons[0] / 2) + rest = ilons[0] - half + ilons = (half, rest) idx = [sum(ilons[:i]) for i in range(len(ilons) + 1)] jdx = [sum(ilats[:i]) for i in range(len(ilats) + 1)] @@ -52,9 +66,9 @@ def get_tiles(data, chunks=None): return perms -def fix_dem(dem, coastline, buffer=0.0, **kwargs): +def fix_dem(dem, coastline, dbuffer=0.0, **kwargs): - perms = get_tiles(dem) + perms = get_tiles(dem, **kwargs) i = 0 check = True @@ -70,10 +84,10 @@ def fix_dem(dem, coastline, buffer=0.0, **kwargs): lat2 = dem.latitude.data[j1:j2][-1] # buffer lat/lon - blon1 = lon1 - buffer - blon2 = lon2 + buffer - blat1 = lat1 - buffer - blat2 = lat2 + buffer + blon1 = lon1 - dbuffer + blon2 = lon2 + dbuffer + blat1 = lat1 - dbuffer + blat2 = lat2 + dbuffer # de = dem.sel(lon=slice(blon1,blon2)).sel(lat=slice(blat1,blat2)) de = dem_range(dem, blon1, blon2, blat1, blat2) @@ -82,7 +96,11 @@ def fix_dem(dem, coastline, buffer=0.0, **kwargs): ide = de_.sel(latitude=slice(lat1, lat2)).sel(longitude=slice(lon1, lon2)) - ide.to_netcdf("./fixtmp/ide{:03d}.nc".format(i)) + # set enconding + comp = dict(zlib=True, complevel=1, dtype="float32") + encoding = {var: comp for var in ide.data_vars} + + ide.to_netcdf("./fixtmp/ide{:03d}.nc".format(i), encoding=encoding) i += 1 check = check and check_ @@ -504,8 +522,6 @@ def resample(dem, xw, yw, var=None, wet=True, flag=None, reset_flag=False, funct # Define points with positive bathymetry x, y = np.meshgrid(dem.longitude, dem.latitude) - print(f"reset_flag={reset_flag}") - if reset_flag: flag = 0 @@ -615,6 +631,252 @@ def dem_range(data, lon_min, lon_max, lat_min, lat_max): c = np.sign(np.mean([lon_min, lon_max])) dem["longitude"] = dem["longitude"] + c * 360.0 + if lon_0 == 0: + + g1 = data.isel(longitude=slice(-4, data.longitude.size - 1), latitude=slice(lat_0, lat_1)) + g1 = g1.assign_coords({"longitude": g1.longitude.values - 360.0}) + + dem = xr.concat([g1, dem], dim="longitude") + + if lon_1 == data.longitude.size: + + g3 = data.isel(longitude=slice(1, 4), latitude=slice(lat_0, lat_1)) + g3 = g3.assign_coords({"longitude": g3.longitude.values + 360.0}) + + dem = xr.concat([dem, g3], dim="longitude") + dem_data = xr.merge([dem]) return dem_data + + +def get_dem_gradient( + dem, + slope_parameter=20, + filter_quotient=50, + min_edge_length=None, + max_edge_length=None, + min_elevation_cutoff=-50.0, +): + try: + dd = dem.adjusted.data + except: + dd = dem.elevation.data + + dd[dd >= min_elevation_cutoff] = min_elevation_cutoff + + dx, dy = np.gradient(dem.longitude)[0], np.gradient(dem.latitude)[0] + + mean_latitude = np.mean(dem.latitude.data) + + # convert to meters + meters_per_degree = ( + 111132.92 + - 559.82 * np.cos(2 * mean_latitude) + + 1.175 * np.cos(4 * mean_latitude) + - 0.0023 * np.cos(6 * mean_latitude) + ) + dy *= meters_per_degree + dx *= meters_per_degree + + by, bx = _earth_gradient(dd, dy, dx) # get slope in x and y directions + + bs = np.sqrt(bx**2 + by**2) # get overall slope + + eps = 1e-10 # small number to approximate derivative + dp = np.clip(dd, None, -1) + gvalues = (2 * np.pi / slope_parameter) * np.abs(dp) / (bs + eps) + gvalues /= meters_per_degree + + if max_edge_length: + gvalues[gvalues > max_edge_length] = max_edge_length + + if min_edge_length: + gvalues[gvalues < min_edge_length] = min_edge_length + + return gvalues + + +def _earth_gradient(F, dx, dy): + """ + earth_gradient(F,HX,HY), where F is 2-D, uses the spacing + specified by HX and HY. HX and HY can either be scalars to specify + the spacing between coordinates or vectors to specify the + coordinates of the points. If HX and HY are vectors, their length + must match the corresponding dimension of F. + """ + Fy, Fx = np.zeros(F.shape), np.zeros(F.shape) + + # Forward diferences on edges + Fx[:, 0] = (F[:, 1] - F[:, 0]) / dx + Fx[:, -1] = (F[:, -1] - F[:, -2]) / dx + Fy[0, :] = (F[1, :] - F[0, :]) / dy + Fy[-1, :] = (F[-1, :] - F[-2, :]) / dy + + # Central Differences on interior + Fx[:, 1:-1] = (F[:, 2:] - F[:, :-2]) / (2 * dx) + Fy[1:-1, :] = (F[2:, :] - F[:-2, :]) / (2 * dy) + + return Fy, Fx + + +def compute_dem_gradient( + dem, + slope_parameter=20, + filter_quotient=50, + min_edge_length=None, + max_edge_length=None, + min_elevation_cutoff=-50.0, + **kwargs, +): + + i = 0 + + if not os.path.exists("./fixtmp/"): + os.makedirs("./fixtmp/") + + perms = get_tiles(dem, **kwargs) + + for (i1, i2), (j1, j2) in tqdm(perms, total=len(perms)): + + lon1 = dem.longitude.data[i1:i2][0] + lon2 = dem.longitude.data[i1:i2][-1] + lat1 = dem.latitude.data[j1:j2][0] + lat2 = dem.latitude.data[j1:j2][-1] + + de = dem_range(dem, lon1, lon2, lat1, lat2) + + # de_, check_, flag = fix(de, coastline) + + gvalues = get_dem_gradient( + de, + slope_parameter=slope_parameter, + filter_quotient=filter_quotient, + min_edge_length=min_edge_length, + max_edge_length=max_edge_length, + min_elevation_cutoff=min_elevation_cutoff, + ) + + de = de.assign({"gradient": (["latitude", "longitude"], gvalues)}) + + ide = de.sel(latitude=slice(lat1, lat2)).sel(longitude=slice(lon1, lon2)) + + # set enconding + comp = dict(zlib=True, complevel=1, dtype="float32") + encoding = {var: comp for var in ide.data_vars} + + ide.to_netcdf("./fixtmp/ide{:03d}.nc".format(i), encoding=encoding) + i += 1 + + # check = check and check_ + + ifiles = glob("./fixtmp/ide*") + + fdem = xr.open_mfdataset(ifiles) + + ##cleanup + shutil.rmtree("./fixtmp") + + return fdem + + +def scale_dem(b, res_min, res_max, **kwargs): + # b.columns = ["z"] + + b.loc[b.z >= -10, "z"] = -1.0e-4 # normalize to only negative values + + b.z = np.sqrt(-b.z) / 0.5 # scale + + # adjust scale + + bg = b.z.values + + a2 = (bg - bg.min()) / (bg.max() - bg.min()) + + d2 = res_min + a2 * (res_max - res_min) + + b["d2"] = d2 + + nodes = b.reset_index(drop=True) + + nodes["z"] = 0 + + # subspace = kwargs.get('subspace', None) + + # if subspace is not None: + # mask = ... + # hfun[mask] = + + return nodes + + +def make_bgmesh(df, fpos, dem=None, scale=True, **kwargs): + lon_min = df.bounds.minx.min() + lon_max = df.bounds.maxx.max() + lat_min = df.bounds.miny.min() + lat_max = df.bounds.maxy.max() + + kwargs_ = kwargs.copy() + kwargs_.pop("lon_min", None) + kwargs_.pop("lon_max", None) + kwargs_.pop("lat_min", None) + kwargs_.pop("lat_max", None) + + if not dem: + dem = kwargs.get("dem_source", None) + + if not isinstance(dem, xr.Dataset): + logger.info("Reading DEM") + dem = pdem.Dem(lon_min=lon_min, lon_max=lon_max, lat_min=lat_min, lat_max=lat_max, **kwargs_) + dem = dem.Dataset + + res_min = kwargs.get("resolution_min", 0.01) + res_max = kwargs.get("resolution_max", 0.5) + + logger.info("Evaluating bgmesh") + + var = kwargs.get("var", "adjusted") + logger.info(f"using '{var}' variable data") + + # scale bathymetry + try: + b = dem[var].to_dataframe() + except: + b = dem.elevation.to_dataframe() + logger.info(f"variable {var} not present switching to 'elevation' data") + + b = b.reset_index() + + dims = b.columns[:2] + + b.columns = [dims[0], dims[1], "z"] + + if scale: + nodes = scale_dem(b, res_min, res_max, **kwargs) + else: + nodes = b + nodes["d2"] = b.z.values + nodes["z"] = 0 + + x = dem[dims[0]].values + y = dem[dims[1]].values + + quad = MakeQuadFaces(x.shape[0], y.shape[0]) + elems = pd.DataFrame(quad, columns=["a", "b", "c", "d"]) + df = quads_to_df(elems, nodes) + + dh = xr.Dataset( + { + "h": ( + [dims[0], dims[1]], + nodes.d2.values.flatten().reshape((x.shape[0], y.shape[0])), + ) + }, + coords={dims[0]: (dims[0], x), dims[1]: (dims[1], y)}, + ) + + logger.info("Saving bgmesh to {}".format(fpos)) + + to_sq(df, fpos) # save bgmesh + + return dh diff --git a/pyposeidon/utils/bfs.py b/pyposeidon/utils/bfs.py deleted file mode 100644 index 5f60412a..00000000 --- a/pyposeidon/utils/bfs.py +++ /dev/null @@ -1,81 +0,0 @@ -""" -Utility functions - -""" - -# Copyright 2018 European Union -# This file is part of pyposeidon. -# Licensed under the EUPL, Version 1.2 or – as soon they will be approved by the European Commission - subsequent versions of the EUPL (the "Licence"). -# Unless required by applicable law or agreed to in writing, software distributed under the Licence is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the Licence for the specific language governing permissions and limitations under the Licence. - - -from collections import deque - - -class Solution(object): - def append_if(self, queue, x, y, island_counter): - b = [[0, 1], [1, 0]], [[0, 1], [-1, 0]], [[0, -1], [1, 0]], [[0, -1], [-1, 0]] - - """Append to the queue only if in bounds of the grid and the cell value is 1.""" - if 0 <= x < len(self.grid) and 0 <= y < len(self.grid[0]): - if self.grid[x][y] == "1": - # queue.append((x, y)) - for [i, j], [k, l] in b: - try: - if self.grid[x + i][y + j] in [ - "1", - island_counter, - ] and self.grid[ - x + k - ][y + l] in [ - "1", - island_counter, - ]: - queue.append((x, y)) - break - except: - pass - - def mark_neighbors(self, row, col, island_counter): - """Mark all the cells in the current island with value = 2. Breadth-first search.""" - queue = deque() - - queue.append((row, col)) - while queue: - x, y = queue.pop() - self.grid[x][y] = island_counter - - self.append_if(queue, x - 1, y, island_counter) - self.append_if(queue, x, y - 1, island_counter) - self.append_if(queue, x + 1, y, island_counter) - self.append_if(queue, x, y + 1, island_counter) - self.append_if(queue, x - 1, y - 1, island_counter) - self.append_if(queue, x + 1, y - 1, island_counter) - self.append_if(queue, x + 1, y + 1, island_counter) - self.append_if(queue, x - 1, y + 1, island_counter) - - def numIslands(self, grid): - """ - :type grid: List[List[str]] - :rtype: int - """ - - if not grid or len(grid) == 0 or len(grid[0]) == 0: - return 0 - - self.grid = grid - - row_length = len(grid) - col_length = len(grid[0]) - - island_counter = 0 - for row in range(row_length): - for col in range(col_length): - if self.grid[row][col] == "1": - # found an island - island_counter += 1 - - self.mark_neighbors(row, col, island_counter) - - return island_counter diff --git a/pyposeidon/utils/global_bgmesh.py b/pyposeidon/utils/global_bgmesh.py index f5467df5..e3bfa14f 100644 --- a/pyposeidon/utils/global_bgmesh.py +++ b/pyposeidon/utils/global_bgmesh.py @@ -7,9 +7,8 @@ import pyposeidon.boundary as pb from pyposeidon.utils.stereo import to_3d, to_lat_lon, stereo_to_3d -from pyposeidon.utils.pos import to_sq, to_st +from pyposeidon.utils.pos import to_sq from pyposeidon.utils.hfun import to_hfun_mesh, to_hfun_grid -from pyposeidon.utils.scale import scale_dem from pyposeidon.utils.topology import ( MakeQuadFaces, quads_to_df, @@ -18,13 +17,13 @@ ) import pyposeidon.dem as pdem import pyposeidon.mesh as pmesh -from pyposeidon.utils.fix import dem_range, resample +from pyposeidon.dem_tools import dem_range, resample, get_tiles, scale_dem import logging logger = logging.getLogger(__name__) -def make_bgmesh_global(dfb, fpos, dem, **kwargs): +def make_bgmesh_global(dfb, fpos, dem, scale=True, **kwargs): mesh_generator = kwargs.get("mesh_generator", None) bk = dfb.copy() @@ -49,10 +48,10 @@ def make_bgmesh_global(dfb, fpos, dem, **kwargs): ci = 4 * R**2 / (ui**2 + vi**2 + 4 * R**2) # set weight field - scale = bgm_res / ci.flatten() + scaled = bgm_res / ci.flatten() # save as bgmesh - nodes = pd.DataFrame({"longitude": ui.flatten(), "latitude": vi.flatten(), "z": 0, "d2": scale}) + nodes = pd.DataFrame({"longitude": ui.flatten(), "latitude": vi.flatten(), "z": 0, "d2": scaled}) # tesselation quad = MakeQuadFaces(npu, npu) @@ -102,28 +101,9 @@ def make_bgmesh_global(dfb, fpos, dem, **kwargs): m["x"].data = clon m["y"].data = clat - # Select DEM - # try: - # dm = dem.adjusted.to_dataframe() - # except: - # dm = dem.elevation.to_dataframe() + m["depth"][:] = np.nan # reset depth - # lon = dem.longitude.values - # lat = dem.latitude.values - - # X, Y = np.meshgrid(lon, lat) - # Stereo -> lat/lon - # clon, clat = to_lat_lon(x0, y0) - - # resample bathymetry - # gdem = dm.values.flatten() - - # orig = pyresample.geometry.SwathDefinition(lons=X.flatten(), lats=Y.flatten()) # original bathymetry points - # targ = pyresample.geometry.SwathDefinition(lons=clon, lats=clat) # wet points - - # bw = pyresample.kd_tree.resample_nearest(orig, gdem, targ, radius_of_influence=50000, fill_value=0) - - dem_on_mesh(m, dem) + dem_on_mesh(m, dem, **kwargs) bw = m.depth.data @@ -133,7 +113,12 @@ def make_bgmesh_global(dfb, fpos, dem, **kwargs): res_min = kwargs.get("resolution_min", 0.01) res_max = kwargs.get("resolution_max", 0.5) - nodes = scale_dem(bz, res_min, res_max, **kwargs) + if scale: + nodes = scale_dem(bz, res_min, res_max, **kwargs) + else: + nodes = bz + nodes["d2"] = bz.z.values + nodes["z"] = 0 nodes["u"] = x0 nodes["v"] = y0 @@ -145,7 +130,9 @@ def make_bgmesh_global(dfb, fpos, dem, **kwargs): return nodes, elems -def fillv(dem, perms, m, buffer=0.0): +def fillv(dem, perms, m, **kwargs): + + gbuffer = kwargs.get("gbuffer", 5) for (i1, i2), (j1, j2) in tqdm(perms, total=len(perms)): @@ -155,46 +142,57 @@ def fillv(dem, perms, m, buffer=0.0): lat2 = dem.latitude.data[j1:j2][-1] # buffer lat/lon - blon1 = lon1 - buffer - blon2 = lon2 + buffer - blat1 = lat1 - buffer - blat2 = lat2 + buffer + blon1 = lon1 - gbuffer + blon2 = lon2 + gbuffer + blat1 = lat1 - gbuffer + blat2 = lat2 + gbuffer - # de = dem.sel(lon=slice(blon1,blon2)).sel(lat=slice(blat1,blat2)) - de = dem_range(dem, blon1, blon2, blat1, blat2) + de = dem_range(dem, lon1, lon2, lat1, lat2) # subset mesh - indices_of_nodes_in_bbox = np.where( - (m.y >= lat1 - buffer / 2) - & (m.y <= lat2 + buffer / 2) - & (m.x >= lon1 - buffer / 2) - & (m.x <= lon2 + buffer / 2) - )[0] + indices_of_nodes_in_bbox = np.where((m.y >= blat1) & (m.y <= blat2) & (m.x >= blon1) & (m.x <= blon2))[0] bm = m.isel(nSCHISM_hgrid_node=indices_of_nodes_in_bbox) ids = np.argwhere(np.isnan(bm.depth.values)).flatten() - grid_x, grid_y = bm.x.data, bm.y.data + xw, yw = bm.x.data[ids], bm.y.data[ids] + + var = kwargs.get("var", "adjusted") + logger.info(f"using '{var}' variable data") + + if var not in dem.data_vars: + var = "elevation" + logger.info(f"variable {var} not present switching to 'elevation' data") + + function = kwargs.get("function", "nearest") + logger.info(f"using {function} resample function") + + # Define points with positive bathymetry + x, y = np.meshgrid(de.longitude, de.latitude) + + # fill the nan, if present, with values in order to compute values there if needed. + # de[var].data[np.isnan(de[var].values)] = 9999.0 - bd = resample(de, grid_x, grid_y, var="adjusted", wet=True, flag=0, function="gauss") + orig = pyresample.geometry.SwathDefinition(lons=x, lats=y) # original bathymetry points + targ = pyresample.geometry.SwathDefinition(lons=xw, lats=yw) # wet points - m["depth"].loc[dict(nSCHISM_hgrid_node=indices_of_nodes_in_bbox)] = -bd + mdem = de[var].values.astype(float) + if function == "nearest": + bw = pyresample.kd_tree.resample_nearest(orig, mdem, targ, radius_of_influence=100000, fill_value=np.nan) -def dem_on_mesh(mesh, dem): + elif function == "gauss": + bw = pyresample.kd_tree.resample_gauss( + orig, mdem, targ, radius_of_influence=500000, neighbours=10, sigmas=250000, fill_value=np.nan + ) - ilats = dem.elevation.chunk("auto").chunks[0] - ilons = dem.elevation.chunk("auto").chunks[1] + m["depth"].loc[dict(nSCHISM_hgrid_node=indices_of_nodes_in_bbox[ids])] = bw - if len(ilons) == 1: - ilons = (int(ilons[0] / 2), int(ilons[0] / 2)) - idx = [sum(ilons[:i]) for i in range(len(ilons) + 1)] - jdx = [sum(ilats[:i]) for i in range(len(ilats) + 1)] +def dem_on_mesh(mesh, dem, **kwargs): - blon = list(zip(idx[:-1], idx[1:])) - blat = list(zip(jdx[:-1], jdx[1:])) + logger.info("resample dem on mesh") - perms = [(x, y) for x in blon for y in blat] + perms = get_tiles(dem, **kwargs) - fillv(dem, perms, mesh, buffer=5) + fillv(dem, perms, mesh, **kwargs) diff --git a/pyposeidon/utils/hfun.py b/pyposeidon/utils/hfun.py index a10fb41d..4eb60810 100644 --- a/pyposeidon/utils/hfun.py +++ b/pyposeidon/utils/hfun.py @@ -6,7 +6,7 @@ from .limgrad import limgrad2 from pyposeidon.utils.stereo import to_lat_lon, to_stereo from pyposeidon.utils.topology import MakeTriangleFaces, MakeTriangleFaces_periodic -from pyposeidon.utils.scale import scale_dem +from pyposeidon.dem_tools import scale_dem import pyposeidon import math diff --git a/pyposeidon/utils/scale.py b/pyposeidon/utils/scale.py deleted file mode 100644 index 7222ee35..00000000 --- a/pyposeidon/utils/scale.py +++ /dev/null @@ -1,32 +0,0 @@ -import numpy as np -import xarray as xr - - -def scale_dem(b, res_min, res_max, **kwargs): - # b.columns = ["z"] - - b.loc[b.z >= -10, "z"] = -1.0e-4 # normalize to only negative values - - b.z = np.sqrt(-b.z) / 0.5 # scale - - # adjust scale - - bg = b.z.values - - a2 = (bg - bg.min()) / (bg.max() - bg.min()) - - d2 = res_min + a2 * (res_max - res_min) - - b["d2"] = d2 - - nodes = b.reset_index(drop=True) - - nodes["z"] = 0 - - # subspace = kwargs.get('subspace', None) - - # if subspace is not None: - # mask = ... - # hfun[mask] = - - return nodes diff --git a/tests/test_dem_tile.py b/tests/test_dem_tile.py new file mode 100644 index 00000000..50470511 --- /dev/null +++ b/tests/test_dem_tile.py @@ -0,0 +1,39 @@ +import pyposeidon.dem as pdem +import pytest +import geopandas as gp +import cartopy.feature as cf + + +from . import DATA_DIR + +COAST_FILE = (DATA_DIR / "ocean.zip").as_posix() + +DEM_SOURCES = pytest.mark.parametrize( + "dem_source", + [ + pytest.param(DATA_DIR / "dem.nc", id="local netcdf"), + ], +) + + +@pytest.fixture(scope="session") +def coasts(): + # coast = gp.read_file(COAST_FILE).drop("FID", axis=1) + cr = "h" + coast = cf.NaturalEarthFeature( + category="physical", name="land", scale="{}m".format({"l": 110, "i": 50, "h": 10}[cr]) + ) + coast = gp.GeoDataFrame(geometry=[x for x in coast.geometries()]) + return coast + + +@pytest.mark.slow +@DEM_SOURCES +def test_dem_adjust(coasts, dem_source): + # Just elevation + df1 = pdem.Dem(dem_source=dem_source) # get dem + check1 = df1.adjust(coasts, tiles=False) + df2 = pdem.Dem(dem_source=dem_source) + check2 = df2.adjust(coasts, tiles=True) + + assert check1 == check2 & df1.Dataset.equals(df2.Dataset)