From 399af816246b2b6a6db5e3a95754d02451afa907 Mon Sep 17 00:00:00 2001 From: Robert Wilson Date: Thu, 13 Jun 2024 10:12:37 +0100 Subject: [PATCH] reduce number of warnings in gridded matchups --- ecoval/fixers.py | 11 + ecoval/gridded.py | 661 +++++++++++++++++++++++----------------------- 2 files changed, 344 insertions(+), 328 deletions(-) diff --git a/ecoval/fixers.py b/ecoval/fixers.py index e7dc4de..495271a 100644 --- a/ecoval/fixers.py +++ b/ecoval/fixers.py @@ -23,6 +23,17 @@ def ignore_warning(x): True if the warning should be ignored False if the warning should not be ignored """ + if "Adding a time series with the same number of time steps" in x: + return True + # did not have valid years + if "did not have valid years" in x: + return True + if "There is only file in the dataset. No need to merge" in x: + return True + if "time bounds unsupporte" in x: + return True + if "deflate" in x: + return True if "None of the points are contained" in x: return True if "0 as the fill value" in x: diff --git a/ecoval/gridded.py b/ecoval/gridded.py index 08cd8e4..195a0c1 100644 --- a/ecoval/gridded.py +++ b/ecoval/gridded.py @@ -369,363 +369,368 @@ def gridded_matchup( # ds_surface.cdo_command("bottomvalue") # figure out the start and end year - start_year = min(ds_surface.years) - end_year = max(ds_surface.years) - if vv_source == "woa": - # ds_surface = ds.copy() - ds_vertical.ensemble_mean(nco = True) - - # Read in the monthly observational data - vv_file = nc.create_ensemble(dir_var) - vv_file = [x for x in vv_file if "annual" not in x] - # except: - # vv_file = nc.create_ensemble(dir_var) - ds_obs = nc.open_data( - vv_file, - checks=False, - ) - if vv_source == "occci": - ds_obs.subset(variable="chlor_a") - ds_obs.subset(years = range(start_year, end_year + 1)) - - # read in the annual observational data for WOA - if vv_source == "woa": + with warnings.catch_warnings(record=True) as w: + start_year = min(ds_surface.years) + end_year = max(ds_surface.years) + if vv_source == "woa": + # ds_surface = ds.copy() + ds_vertical.ensemble_mean(nco = True) + + # Read in the monthly observational data vv_file = nc.create_ensemble(dir_var) - vv_file = [x for x in vv_file if "annual" in x] - ds_obs_annual = nc.open_data( + vv_file = [x for x in vv_file if "annual" not in x] + # except: + # vv_file = nc.create_ensemble(dir_var) + ds_obs = nc.open_data( vv_file, checks=False, ) - ds_obs_annual.rename({ds_obs_annual.variables[0]: "observation"}) - if len(ds_obs_annual.variables) > 1: - raise ValueError(f"Please ensure only one variable in {vv}!") + if vv_source == "occci": + ds_obs.subset(variable="chlor_a") + ds_obs.subset(years = range(start_year, end_year + 1)) - obs_years = ds_obs.years + # read in the annual observational data for WOA + if vv_source == "woa": + vv_file = nc.create_ensemble(dir_var) + vv_file = [x for x in vv_file if "annual" in x] + ds_obs_annual = nc.open_data( + vv_file, + checks=False, + ) + ds_obs_annual.rename({ds_obs_annual.variables[0]: "observation"}) + if len(ds_obs_annual.variables) > 1: + raise ValueError(f"Please ensure only one variable in {vv}!") + + obs_years = ds_obs.years + + tidy_warnings(w) + + with warnings.catch_warnings(record=True) as w: + if vv_source != "woa": + if len(obs_years) == 1: + ds_surface.merge("time") + ds_surface.tmean("month") + else: + ds_surface.merge("time") + ds_surface.tmean(["year", "month"]) + + amm7 = False + if domain == "nws": + if max(ds_surface.contents.npoints) == 111375: + ds_surface.fix_amm7_grid() + amm7 = True + ds_surface.subset(lon=[-19, 9], lat=[41, 64.3]) + + if vv in ["poc", "doc"]: + if strict: + ds_obs.subset(years=years) + ds_obs.merge("time") + ds_obs.tmean("month") + ds_surface.tmean("month") - if vv_source != "woa": - if len(obs_years) == 1: + if vv in ["temperature"]: + if strict: + ds_obs.subset(years=years) + ds_obs.subset(years=years) + ds_obs.tmean(["year", "month"]) + ds_obs.merge("time") + ds_obs.tmean(["year", "month"]) + + if vv in ["salinity"] and domain != "nws": + if vv_source != "woa": + ds_obs.top() + sub_years = [x for x in ds_vertical.years if x in ds_obs.years] + ds_obs.subset(years=sub_years) + ds_surface.subset(years = sub_years) + ds_obs.merge("time") + ds_obs.tmean("month") ds_surface.merge("time") ds_surface.tmean("month") - else: + ds_obs_annual.subset(years = sub_years) + ds_obs_annual.tmean() + if vv in ["chlorophyll"] and domain != "nws": + ds_obs.top() + sub_years = [x for x in ds_surface.years if x in ds_obs.years] + ds_obs.subset(years=sub_years) + ds_surface.subset(years = sub_years) + ds_obs.merge("time") + ds_obs.tmean("month") ds_surface.merge("time") - ds_surface.tmean(["year", "month"]) + ds_surface.tmean("month") - amm7 = False - if domain == "nws": - if max(ds_surface.contents.npoints) == 111375: - ds_surface.fix_amm7_grid() - amm7 = True - ds_surface.subset(lon=[-19, 9], lat=[41, 64.3]) - if vv in ["poc", "doc"]: - if strict: - ds_obs.subset(years=years) - ds_obs.merge("time") - ds_obs.tmean("month") - ds_surface.tmean("month") + if vv not in ["poc", "temperature"]: + if len(ds_obs.times) > 12: + ds_obs.subset(years=years) + + if vv_source == "occci": + ds_obs.subset(variable="chlor_a") + + ds_xr = ds_surface.to_xarray() + lon_name = [x for x in ds_xr.coords if "lon" in x] + lat_name = [x for x in ds_xr.coords if "lat" in x] + lon = ds_xr[lon_name[0]].values + lat = ds_xr[lat_name[0]].values + lon_max = lon.max() + lon_min = lon.min() + lat_max = lat.max() + lat_min = lat.min() + + # figure out the lon/lat extent in the model + lons = [lon_min, lon_max] + lats = [lat_min, lat_max] + # start of with the raw coords + # This will not work with nemo, which outputs the grid incorrectly + # so we will check if the step between the first lon/lat and the second lon/lat is + # far bigger than the rest. If this is the case, the first should be ignored + # get the lon/lat values + lon_vals = ds_xr[lon_name[0]].values + lat_vals = ds_xr[lat_name[0]].values + # make them unique and ordered, and 1d + lon_vals = np.unique(lon_vals) + # make a list + lon_vals = lon_vals.tolist() + diff_1 = lon_vals[1] - lon_vals[0] + diff_2 = lon_vals[2] - lon_vals[1] + diff_3 = lon_vals[3] - lon_vals[2] + if diff_1 / diff_2 > 10: + if diff_1 / diff_3 > 10: + lons[0] = lon_vals[1] + # do it for lats + lat_vals = np.unique(lat_vals) + lat_vals = lat_vals.tolist() + diff_1 = lat_vals[1] - lat_vals[0] + diff_2 = lat_vals[2] - lat_vals[1] + diff_3 = lat_vals[2] - lat_vals[1] + if diff_1 / diff_2 > 10: + if diff_1 / diff_3 > 10: + lats[0] = lat_vals[1] + + lon_min_model = lons[0] + lon_max_model = lons[1] + lat_min_model = lats[0] + lat_max_model = lats[1] + + # now do the same for the obs + ds_xr = ds_obs.to_xarray() + lon_name = [x for x in ds_xr.coords if "lon" in x] + lat_name = [x for x in ds_xr.coords if "lat" in x] + lon = ds_xr[lon_name[0]].values + lat = ds_xr[lat_name[0]].values + lon_max = lon.max() + lon_min = lon.min() + lat_max = lat.max() + lat_min = lat.min() + + lon_min = max(lon_min, lon_min_model) + lon_max = min(lon_max, lon_max_model) + lat_min = max(lat_min, lat_min_model) + lat_max = min(lat_max, lat_max_model) + + lons = [lon_min, lon_max] + lats = [lat_min, lat_max] + + if domain != "global": + ds_surface.subset(lon=lons, lat=lats) + ds_obs.subset(lon=lons, lat=lats) + + if domain == "global": + model_extent = get_extent(ds_surface[0]) + obs_extent = get_extent(ds_obs[0]) + lon_min = max(model_extent[0], obs_extent[0]) + lon_max = min(model_extent[1], obs_extent[1]) + lat_min = max(model_extent[2], obs_extent[2]) + lat_max = min(model_extent[3], obs_extent[3]) + # make sure lon_min is greater than -180 + if lon_min < -180: + lon_min = -180 + if lon_max > 180: + lon_max = 180 + if lat_min < -90: + lat_min = -90 + if lat_max > 90: + lat_max = 90 + + lons = [lon_min, lon_max] + lats = [lat_min, lat_max] + ds_surface.subset(lon=lons, lat=lats) + ds_obs.subset(lon=lons, lat=lats) + + n1 = ds_obs.contents.npoints[0] + n2 = ds_surface.contents.npoints[0] + + if n1 >= n2: + ds_obs.regrid(ds_surface, method="nn") + else: + ds_surface.regrid(ds_obs, method="nn") + + ds_obs.rename({ds_obs.variables[0]: "observation"}) + ds_surface.merge("time") + ds_surface.rename({ds_surface.variables[0]: "model"}) + ds_surface.run() + ds_obs.run() + + # it is possible the years do not overlap, e.g. with satellite Chl + if len(ds_surface.times) > 12: + years1 = ds_surface.years + years2 = ds_obs.years + all_years = [x for x in years1 if x in years2] + if len(all_years) != len(years1): + if len(all_years) != len(years2): + ds_obs.subset(years=all_years) + ds_surface.subset(years=all_years) + ds_obs.run() + ds_surface.run() + if len(ds_obs) > 1: + ds_obs.merge("time") + + ds_obs.run() + ds_surface.run() + + if vv == "doc": + ds_obs * 12.011 + ds_surface + (40 * 12.011) - if vv in ["temperature"]: - if strict: - ds_obs.subset(years=years) - ds_obs.subset(years=years) - ds_obs.tmean(["year", "month"]) - ds_obs.merge("time") - ds_obs.tmean(["year", "month"]) - - if vv in ["salinity"] and domain != "nws": if vv_source != "woa": ds_obs.top() - sub_years = [x for x in ds_vertical.years if x in ds_obs.years] - ds_obs.subset(years=sub_years) - ds_surface.subset(years = sub_years) - ds_obs.merge("time") - ds_obs.tmean("month") - ds_surface.merge("time") - ds_surface.tmean("month") - ds_obs_annual.subset(years = sub_years) - ds_obs_annual.tmean() - if vv in ["chlorophyll"] and domain != "nws": + + if vv_source == "woa": + levels = ds_obs_annual.levels + levels = [x for x in levels if x >= np.min(ds_vertical.levels)] + ds1 = ds_vertical.copy() + ds1.merge("time") + ds1.tmean() + ds1.rename({ds1.variables[0]: "model"}) + if ds_thickness is not None: + ds1.vertical_interp(levels, thickness = ds_thickness) + else: + ds1.vertical_interp(levels, fixed = True) + if n1 >= n2: + ds_obs_annual.regrid(ds1, method="nn") + else: + ds1.regrid(ds_obs_annual, method="nn") + ds_obs_annual.vertical_interp(levels, fixed = True) + ds_obs_annual.set_date(year = 2000, month = 1, day = 1) + ds1.set_date(year = 2000, month = 1, day = 1) + ds_obs_annual.run() + ds1.run() + ds_obs_annual.append(ds1) + ds_obs_annual.merge("variable") + + if surface == "top": + ds_surface.top() + else: + ds_surface.bottom() ds_obs.top() - sub_years = [x for x in ds_surface.years if x in ds_obs.years] - ds_obs.subset(years=sub_years) - ds_surface.subset(years = sub_years) - ds_obs.merge("time") - ds_obs.tmean("month") - ds_surface.merge("time") - ds_surface.tmean("month") + if vv_source == "occci": + years = [x for x in ds_obs.years if x in ds_surface.years] + years = list(set(years)) - if vv not in ["poc", "temperature"]: - if len(ds_obs.times) > 12: ds_obs.subset(years=years) + ds_obs.tmean(["year", "month"]) + ds_obs.merge("time") + ds_obs.tmean(["year", "month"]) + ds_surface.subset(years=years) + ds_surface.tmean(["year", "month"]) - if vv_source == "occci": - ds_obs.subset(variable="chlor_a") - - ds_xr = ds_surface.to_xarray() - lon_name = [x for x in ds_xr.coords if "lon" in x] - lat_name = [x for x in ds_xr.coords if "lat" in x] - lon = ds_xr[lon_name[0]].values - lat = ds_xr[lat_name[0]].values - lon_max = lon.max() - lon_min = lon.min() - lat_max = lat.max() - lat_min = lat.min() - - # figure out the lon/lat extent in the model - lons = [lon_min, lon_max] - lats = [lat_min, lat_max] - # start of with the raw coords - # This will not work with nemo, which outputs the grid incorrectly - # so we will check if the step between the first lon/lat and the second lon/lat is - # far bigger than the rest. If this is the case, the first should be ignored - # get the lon/lat values - lon_vals = ds_xr[lon_name[0]].values - lat_vals = ds_xr[lat_name[0]].values - # make them unique and ordered, and 1d - lon_vals = np.unique(lon_vals) - # make a list - lon_vals = lon_vals.tolist() - diff_1 = lon_vals[1] - lon_vals[0] - diff_2 = lon_vals[2] - lon_vals[1] - diff_3 = lon_vals[3] - lon_vals[2] - if diff_1 / diff_2 > 10: - if diff_1 / diff_3 > 10: - lons[0] = lon_vals[1] - # do it for lats - lat_vals = np.unique(lat_vals) - lat_vals = lat_vals.tolist() - diff_1 = lat_vals[1] - lat_vals[0] - diff_2 = lat_vals[2] - lat_vals[1] - diff_3 = lat_vals[2] - lat_vals[1] - if diff_1 / diff_2 > 10: - if diff_1 / diff_3 > 10: - lats[0] = lat_vals[1] - - lon_min_model = lons[0] - lon_max_model = lons[1] - lat_min_model = lats[0] - lat_max_model = lats[1] - - # now do the same for the obs - ds_xr = ds_obs.to_xarray() - lon_name = [x for x in ds_xr.coords if "lon" in x] - lat_name = [x for x in ds_xr.coords if "lat" in x] - lon = ds_xr[lon_name[0]].values - lat = ds_xr[lat_name[0]].values - lon_max = lon.max() - lon_min = lon.min() - lat_max = lat.max() - lat_min = lat.min() - - lon_min = max(lon_min, lon_min_model) - lon_max = min(lon_max, lon_max_model) - lat_min = max(lat_min, lat_min_model) - lat_max = min(lat_max, lat_max_model) - - lons = [lon_min, lon_max] - lats = [lat_min, lat_max] - - if domain != "global": - ds_surface.subset(lon=lons, lat=lats) - ds_obs.subset(lon=lons, lat=lats) - - if domain == "global": - model_extent = get_extent(ds_surface[0]) - obs_extent = get_extent(ds_obs[0]) - lon_min = max(model_extent[0], obs_extent[0]) - lon_max = min(model_extent[1], obs_extent[1]) - lat_min = max(model_extent[2], obs_extent[2]) - lat_max = min(model_extent[3], obs_extent[3]) - # make sure lon_min is greater than -180 - if lon_min < -180: - lon_min = -180 - if lon_max > 180: - lon_max = 180 - if lat_min < -90: - lat_min = -90 - if lat_max > 90: - lat_max = 90 + ds_obs.run() + ds_surface.run() + ds2 = ds_surface.copy() + if len(ds_surface.times) == 12: + ds_surface.set_year(2000) + + if len(ds_surface.times) > 12: + # at this point, we need to identify the years that are common to both + ds_times = ds_surface.times + ds_years = [x.year for x in ds_times] + ds_months = [x.month for x in ds_times] - lons = [lon_min, lon_max] - lats = [lat_min, lat_max] - ds_surface.subset(lon=lons, lat=lats) - ds_obs.subset(lon=lons, lat=lats) + df_surface = pd.DataFrame({"year": ds_years, "month": ds_months}) - n1 = ds_obs.contents.npoints[0] - n2 = ds_surface.contents.npoints[0] + ds_times = ds_obs.times + ds_years = [x.year for x in ds_times] + ds_months = [x.month for x in ds_times] + df_obs = pd.DataFrame({"year": ds_years, "month": ds_months}) + sel_years = list( + df_surface.merge(df_obs).groupby("year").count() + # only 12 + .query("month == 12") + .reset_index() + .year + .values + ) + ds_surface.subset(years=sel_years) + ds_obs.subset(years=sel_years) - if n1 >= n2: - ds_obs.regrid(ds_surface, method="nn") - else: - ds_surface.regrid(ds_obs, method="nn") - - ds_obs.rename({ds_obs.variables[0]: "observation"}) - ds_surface.merge("time") - ds_surface.rename({ds_surface.variables[0]: "model"}) - ds_surface.run() - ds_obs.run() - - # it is possible the years do not overlap, e.g. with satellite Chl - if len(ds_surface.times) > 12: - years1 = ds_surface.years - years2 = ds_obs.years - all_years = [x for x in years1 if x in years2] - if len(all_years) != len(years1): - if len(all_years) != len(years2): - ds_obs.subset(years=all_years) - ds_surface.subset(years=all_years) - ds_obs.run() - ds_surface.run() - if len(ds_obs) > 1: - ds_obs.merge("time") - - ds_obs.run() - ds_surface.run() - - if vv == "doc": - ds_obs * 12.011 - ds_surface + (40 * 12.011) - - if vv_source != "woa": - ds_obs.top() - if vv_source == "woa": - levels = ds_obs_annual.levels - levels = [x for x in levels if x >= np.min(ds_vertical.levels)] - ds1 = ds_vertical.copy() - ds1.merge("time") - ds1.tmean() - ds1.rename({ds1.variables[0]: "model"}) - if ds_thickness is not None: - ds1.vertical_interp(levels, thickness = ds_thickness) - else: - ds1.vertical_interp(levels, fixed = True) - if n1 >= n2: - ds_obs_annual.regrid(ds1, method="nn") + + + ds_obs.append(ds_surface) + + if len(ds_surface.times) > 12: + ds_obs.merge("variable", match=["year", "month"]) else: - ds1.regrid(ds_obs_annual, method="nn") - ds_obs_annual.vertical_interp(levels, fixed = True) - ds_obs_annual.set_date(year = 2000, month = 1, day = 1) - ds1.set_date(year = 2000, month = 1, day = 1) - ds_obs_annual.run() - ds1.run() - ds_obs_annual.append(ds1) - ds_obs_annual.merge("variable") - - if surface == "top": - ds_surface.top() - else: - ds_surface.bottom() - ds_obs.top() - - if vv_source == "occci": - years = [x for x in ds_obs.years if x in ds_surface.years] - years = list(set(years)) - - ds_obs.subset(years=years) - ds_obs.tmean(["year", "month"]) - ds_obs.merge("time") - ds_obs.tmean(["year", "month"]) - ds_surface.subset(years=years) - ds_surface.tmean(["year", "month"]) - - ds_obs.run() - ds_surface.run() - ds2 = ds_surface.copy() - # ds2.to_nc("foo.nc", zip = True) - if len(ds_surface.times) == 12: - ds_surface.set_year(2000) - - if len(ds_surface.times) > 12: - # at this point, we need to identify the years that are common to both - ds_times = ds_surface.times - ds_years = [x.year for x in ds_times] - ds_months = [x.month for x in ds_times] - - df_surface = pd.DataFrame({"year": ds_years, "month": ds_months}) - - ds_times = ds_obs.times - ds_years = [x.year for x in ds_times] - ds_months = [x.month for x in ds_times] - df_obs = pd.DataFrame({"year": ds_years, "month": ds_months}) - sel_years = list( - df_surface.merge(df_obs).groupby("year").count() - # only 12 - .query("month == 12") - .reset_index() - .year - .values - ) - ds_surface.subset(years=sel_years) - ds_obs.subset(years=sel_years) + ds_obs.merge("variable", match="month") + ds_obs.nco_command(f"ncatted -O -a start_year,global,o,c,{start_year}") + ds_obs.nco_command(f"ncatted -O -a end_year,global,o,c,{end_year}") + ds_obs.set_fill(-9999) + ds_mask = ds_obs.copy() + ds_mask.assign( mask_these=lambda x: -1e30 * ((isnan(x.observation) + isnan(x.model)) > 0), drop=True,) + ds_mask.as_missing([-1e40, -1e20]) + ds_obs + ds_mask + + # fix the co2 flux units + if vv == "co2flux": + ds_obs.assign(model=lambda x: x.model * -0.365) + ds_obs.set_units({"model": "mol/m2/yr"}) + ds_obs.set_units({"observation": "mol/m2/yr"}) + + # figure out if the temperature is in degrees C + if vv == "temperature": + if ds_obs.to_xarray().model.max() > 100: + ds_obs.assign(model=lambda x: x.model - 273.15) + if ds_obs.to_xarray().observation.max() > 100: + ds_obs.assign(observation=lambda x: x.observation - 273.15) + # set the units + ds_obs.set_units({"model": "degrees C"}) + ds_obs.set_units({"observation": "degrees C"}) + # # now, we need to exclude data outside the lon/lat range with data + + out_file = f"matched/gridded/{domain}/{vv}/{vv_source}_{vv}_surface.nc" + # check directory exists for out_file + if not os.path.exists(os.path.dirname(out_file)): + os.makedirs(os.path.dirname(out_file)) + # remove the file if it exists + if os.path.exists(out_file): + os.remove(out_file) + ds_obs.set_precision("F32") + if vv == "salinity" and domain != "nws": + ds_obs.tmean("month") + ds_surface = ds_obs.copy() + if vv_source == "woa": + ds_surface.top() + if lon_lim is not None and lat_lim is not None: + ds_surface.subset(lon=lon_lim, lat=lat_lim) + ds_surface.run() + regrid_later = False + if is_latlon(ds_surface[0]) is False: + extent = get_extent(ds_surface[0]) + lons = [extent[0], extent[1]] + lats = [extent[2], extent[3]] + resolution = get_resolution(ds_surface[0]) + lon_res = resolution[0] + lat_res = resolution[1] + ds_surface.to_latlon(lon=lons, lat=lats, res=[lon_res, lat_res], method = "nn") + regrid_later = True - ds_obs.append(ds_surface) - if len(ds_surface.times) > 12: - ds_obs.merge("variable", match=["year", "month"]) - else: - ds_obs.merge("variable", match="month") - ds_obs.nco_command(f"ncatted -O -a start_year,global,o,c,{start_year}") - ds_obs.nco_command(f"ncatted -O -a end_year,global,o,c,{end_year}") - ds_obs.set_fill(-9999) - ds_mask = ds_obs.copy() - ds_mask.assign( mask_these=lambda x: -1e30 * ((isnan(x.observation) + isnan(x.model)) > 0), drop=True,) - ds_mask.as_missing([-1e40, -1e20]) - ds_obs + ds_mask - - # fix the co2 flux units - if vv == "co2flux": - ds_obs.assign(model=lambda x: x.model * -0.365) - ds_obs.set_units({"model": "mol/m2/yr"}) - ds_obs.set_units({"observation": "mol/m2/yr"}) - - # figure out if the temperature is in degrees C - if vv == "temperature": - if ds_obs.to_xarray().model.max() > 100: - ds_obs.assign(model=lambda x: x.model - 273.15) - if ds_obs.to_xarray().observation.max() > 100: - ds_obs.assign(observation=lambda x: x.observation - 273.15) - # set the units - ds_obs.set_units({"model": "degrees C"}) - ds_obs.set_units({"observation": "degrees C"}) - # # now, we need to exclude data outside the lon/lat range with data - - out_file = f"matched/gridded/{domain}/{vv}/{vv_source}_{vv}_surface.nc" - # check directory exists for out_file - if not os.path.exists(os.path.dirname(out_file)): - os.makedirs(os.path.dirname(out_file)) - # remove the file if it exists - if os.path.exists(out_file): - os.remove(out_file) - ds_obs.set_precision("F32") - if vv == "salinity" and domain != "nws": - ds_obs.tmean("month") - ds_surface = ds_obs.copy() - if vv_source == "woa": - ds_surface.top() - if lon_lim is not None and lat_lim is not None: - ds_surface.subset(lon=lon_lim, lat=lat_lim) - - - ds_surface.run() - - regrid_later = False - if is_latlon(ds_surface[0]) is False: - extent = get_extent(ds_surface[0]) - lons = [extent[0], extent[1]] - lats = [extent[2], extent[3]] - resolution = get_resolution(ds_surface[0]) - lon_res = resolution[0] - lat_res = resolution[1] - ds_surface.to_latlon(lon=lons, lat=lats, res=[lon_res, lat_res], method = "nn") - regrid_later = True - - - ds_surface.to_nc(out_file, zip=True, overwrite=True) + ds_surface.to_nc(out_file, zip=True, overwrite=True) + + tidy_warnings(w) # now do the masking etc.