diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index dbebfe8f6e..3f8fe65065 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -33,6 +33,17 @@ # Change characteristics - Is this a breaking change (a change in existing functionality)? YES/NO - Does this change require a documentation update? YES/NO +- Does this change require an update to any of the following submodules? YES/NO (If YES, please add a link to any PRs that are pending.) + - [ ] EMC verif-global + - [ ] GDAS + - [ ] GFS-utils + - [ ] GSI + - [ ] GSI-monitor + - [ ] GSI-utils + - [ ] UFS-utils + - [ ] UFS-weather-model + - [ ] wxflow + # How has this been tested? _dir. For all other directories, the + names will follow --> _dir. + """ + + rel_path_dict = {} + for key, value in self.task_config.items(): + if isinstance(value, str): + if root_path in value: + rel_path = value.replace(root_path, "") + rel_key = (key[4:] if key.startswith("COMIN_") else key).lower() + "_dir" + rel_path_dict[rel_key] = rel_path + + return rel_path_dict + + @staticmethod + @logit(logger) + def _construct_arcdir_set(arcdir_j2yaml, arch_dict) -> Dict: + """Construct the list of files to send to the ARCDIR and Fit2Obs + directories from a template. + + TODO Copying Fit2Obs data doesn't belong in archiving should be + moved elsewhere. + + Parameters + ---------- + arcdir_j2yaml: str + The filename of the ARCDIR jinja template to parse. + + arch_dict: Dict + The context dictionary to parse arcdir_j2yaml with. + + Return + ------ + arcdir_set : Dict + FileHandler dictionary (i.e. with top level "mkdir" and "copy" keys) + containing all directories that need to be created and what data + files need to be copied to the ARCDIR and the Fit2Obs directory. + """ + + # Get the FileHandler dictionary for creating directories and copying + # to the ARCDIR and VFYARC directories. + arcdir_set = parse_j2yaml(arcdir_j2yaml, + arch_dict, + allow_missing=True) + + return arcdir_set + + @staticmethod + @logit(logger) + def _rename_cyclone_expt(arch_dict) -> None: + + # Rename the experiment in the tracker files from "AVNO" to the + # first 4 letters of PSLOT. + pslot4 = arch_dict.PSLOT.upper() + if len(arch_dict.PSLOT) > 4: + pslot4 = arch_dict.PSLOT[0:4].upper() + + track_dir_in = arch_dict.COMIN_ATMOS_TRACK + track_dir_out = arch_dict.COMOUT_ATMOS_TRACK + run = arch_dict.RUN + cycle_HH = strftime(arch_dict.current_cycle, "%H") + + if run == "gfs": + in_track_file = (track_dir_in + "/avno.t" + + cycle_HH + "z.cycle.trackatcfunix") + in_track_p_file = (track_dir_in + "/avnop.t" + + cycle_HH + "z.cycle.trackatcfunixp") + elif run == "gdas": + in_track_file = (track_dir_in + "/gdas.t" + + cycle_HH + "z.cycle.trackatcfunix") + in_track_p_file = (track_dir_in + "/gdasp.t" + + cycle_HH + "z.cycle.trackatcfunixp") + + if not os.path.isfile(in_track_file): + # Do not attempt to archive the outputs + return + + out_track_file = track_dir_out + "/atcfunix." + run + "." + to_YMDH(arch_dict.current_cycle) + out_track_p_file = track_dir_out + "/atcfunixp." + run + "." + to_YMDH(arch_dict.current_cycle) + + def replace_string_from_to_file(filename_in, filename_out, search_str, replace_str): + + """Write a new file from the contents of an input file while searching + and replacing ASCII strings. To prevent partial file creation, a + temporary file is created and moved to the final location only + after the search/replace is finished. + + Parameters + ---------- + filename_in : str + Input filename + + filename_out : str + Output filename + + search_str : str + ASCII string to search for + + replace_str : str + ASCII string to replace the search_str with + """ + with open(filename_in) as old_file: + lines = old_file.readlines() + + out_lines = [line.replace(search_str, replace_str) for line in lines] + + with open("/tmp/track_file", "w") as new_file: + new_file.writelines(out_lines) + + shutil.move("tmp/track_file", filename_out) + + replace_string_from_to_file(in_track_file, out_track_file, "AVNO", pslot4) + replace_string_from_to_file(in_track_p_file, out_track_p_file, "AVNO", pslot4) + + return diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index da41574fc9..4e9d37335c 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -11,7 +11,7 @@ FileHandler, add_to_datetime, to_fv3time, to_timedelta, to_YMDH, chdir, - parse_yamltmpl, parse_j2yaml, save_as_yaml, + parse_j2yaml, save_as_yaml, logit, Executable, WorkflowException) @@ -28,32 +28,35 @@ class AtmAnalysis(Analysis): def __init__(self, config): super().__init__(config) - _res = int(self.config.CASE[1:]) - _res_anl = int(self.config.CASE_ANL[1:]) - _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config.assim_freq}H") / 2) - _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmvar.yaml") + _res = int(self.task_config.CASE[1:]) + _res_anl = int(self.task_config.CASE_ANL[1:]) + _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config.assim_freq}H") / 2) + _jedi_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmvar.yaml") # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( { 'npx_ges': _res + 1, 'npy_ges': _res + 1, - 'npz_ges': self.config.LEVS - 1, - 'npz': self.config.LEVS - 1, + 'npz_ges': self.task_config.LEVS - 1, + 'npz': self.task_config.LEVS - 1, 'npx_anl': _res_anl + 1, 'npy_anl': _res_anl + 1, - 'npz_anl': self.config.LEVS - 1, + 'npz_anl': self.task_config.LEVS - 1, 'ATM_WINDOW_BEGIN': _window_begin, - 'ATM_WINDOW_LENGTH': f"PT{self.config.assim_freq}H", - 'OPREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN - 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN - 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", - 'fv3jedi_yaml': _fv3jedi_yaml, + 'ATM_WINDOW_LENGTH': f"PT{self.task_config.assim_freq}H", + 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", + 'jedi_yaml': _jedi_yaml, + 'atm_obsdatain_path': f"{self.task_config.DATA}/obs/", + 'atm_obsdataout_path': f"{self.task_config.DATA}/diags/", + 'BKG_TSTEP': "PT1H" # Placeholder for 4D applications } ) - # task_config is everything that this task should need - self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) + # Extend task_config with local_dict + self.task_config = AttrDict(**self.task_config, **local_dict) @logit(logger) def initialize(self: Analysis) -> None: @@ -71,41 +74,38 @@ def initialize(self: Analysis) -> None: super().initialize() # stage CRTM fix files - crtm_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_crtm_coeff.yaml') - logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}") - crtm_fix_list = parse_j2yaml(crtm_fix_list_path, self.task_config) + logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") + crtm_fix_list = parse_j2yaml(self.task_config.CRTM_FIX_YAML, self.task_config) FileHandler(crtm_fix_list).sync() # stage fix files - jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_jedi_fix.yaml') - logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}") - jedi_fix_list = parse_j2yaml(jedi_fix_list_path, self.task_config) + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) FileHandler(jedi_fix_list).sync() # stage static background error files, otherwise it will assume ID matrix - logger.debug(f"Stage files for STATICB_TYPE {self.task_config.STATICB_TYPE}") - FileHandler(self.get_berror_dict(self.task_config)).sync() + logger.info(f"Stage files for STATICB_TYPE {self.task_config.STATICB_TYPE}") + if self.task_config.STATICB_TYPE != 'identity': + berror_staging_dict = parse_j2yaml(self.task_config.BERROR_STAGING_YAML, self.task_config) + else: + berror_staging_dict = {} + FileHandler(berror_staging_dict).sync() # stage ensemble files for use in hybrid background error if self.task_config.DOHYBVAR: logger.debug(f"Stage ensemble files for DOHYBVAR {self.task_config.DOHYBVAR}") - localconf = AttrDict() - keys = ['COM_ATMOS_RESTART_TMPL', 'previous_cycle', 'ROTDIR', 'RUN', - 'NMEM_ENS', 'DATA', 'current_cycle', 'ntiles'] - for key in keys: - localconf[key] = self.task_config[key] - localconf.RUN = 'enkf' + self.task_config.RUN - localconf.dirname = 'ens' - FileHandler(self.get_fv3ens_dict(localconf)).sync() + fv3ens_staging_dict = parse_j2yaml(self.task_config.FV3ENS_STAGING_YAML, self.task_config) + FileHandler(fv3ens_staging_dict).sync() # stage backgrounds - FileHandler(self.get_bkg_dict(AttrDict(self.task_config))).sync() + logger.info(f"Staging background files from {self.task_config.VAR_BKG_STAGING_YAML}") + bkg_staging_dict = parse_j2yaml(self.task_config.VAR_BKG_STAGING_YAML, self.task_config) + FileHandler(bkg_staging_dict).sync() # generate variational YAML file - logger.debug(f"Generate variational YAML file: {self.task_config.fv3jedi_yaml}") - varda_yaml = parse_j2yaml(self.task_config.ATMVARYAML, self.task_config) - save_as_yaml(varda_yaml, self.task_config.fv3jedi_yaml) - logger.info(f"Wrote variational YAML to: {self.task_config.fv3jedi_yaml}") + logger.debug(f"Generate variational YAML file: {self.task_config.jedi_yaml}") + save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) + logger.info(f"Wrote variational YAML to: {self.task_config.jedi_yaml}") # need output dir for diags and anl logger.debug("Create empty output [anl, diags] directories to receive output from executable") @@ -116,14 +116,16 @@ def initialize(self: Analysis) -> None: FileHandler({'mkdir': newdirs}).sync() @logit(logger) - def execute(self: Analysis) -> None: + def variational(self: Analysis) -> None: chdir(self.task_config.DATA) - exec_cmd = Executable(self.task_config.APRUN_ATMANL) - exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_var.x') + exec_cmd = Executable(self.task_config.APRUN_ATMANLVAR) + exec_name = os.path.join(self.task_config.DATA, 'gdas.x') exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg(self.task_config.fv3jedi_yaml) + exec_cmd.add_default_arg('fv3jedi') + exec_cmd.add_default_arg('variational') + exec_cmd.add_default_arg(self.task_config.jedi_yaml) try: logger.debug(f"Executing {exec_cmd}") @@ -135,6 +137,31 @@ def execute(self: Analysis) -> None: pass + @logit(logger) + def init_fv3_increment(self: Analysis) -> None: + # Setup JEDI YAML file + self.task_config.jedi_yaml = os.path.join(self.task_config.DATA, + f"{self.task_config.JCB_ALGO}.yaml") + save_as_yaml(self.get_jedi_config(self.task_config.JCB_ALGO), self.task_config.jedi_yaml) + + # Link JEDI executable to run directory + self.task_config.jedi_exe = self.link_jediexe() + + @logit(logger) + def fv3_increment(self: Analysis) -> None: + # Run executable + exec_cmd = Executable(self.task_config.APRUN_ATMANLFV3INC) + exec_cmd.add_default_arg(self.task_config.jedi_exe) + exec_cmd.add_default_arg(self.task_config.jedi_yaml) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + @logit(logger) def finalize(self: Analysis) -> None: """Finalize a global atm analysis @@ -152,7 +179,7 @@ def finalize(self: Analysis) -> None: atmstat = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.APREFIX}atmstat") # get list of diag files to put in tarball - diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc4')) + diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc')) logger.info(f"Compressing {len(diags)} diag files to {atmstat}.gz") @@ -170,9 +197,9 @@ def finalize(self: Analysis) -> None: archive.add(diaggzip, arcname=os.path.basename(diaggzip)) # copy full YAML from executable to ROTDIR - logger.info(f"Copying {self.task_config.fv3jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS}") - src = os.path.join(self.task_config.DATA, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmvar.yaml") - dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmvar.yaml") + logger.info(f"Copying {self.task_config.jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS}") + src = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmvar.yaml") + dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmvar.yaml") logger.debug(f"Copying {src} to {dest}") yaml_copy = { 'mkdir': [self.task_config.COM_ATMOS_ANALYSIS], @@ -212,235 +239,17 @@ def finalize(self: Analysis) -> None: } FileHandler(bias_copy).sync() - # Create UFS model readable atm increment file from UFS-DA atm increment - logger.info("Create UFS model readable atm increment file from UFS-DA atm increment") - self.jedi2fv3inc() + # Copy FV3 atm increment to comrot directory + logger.info("Copy UFS model readable atm increment file") + cdate = to_fv3time(self.task_config.current_cycle) + cdate_inc = cdate.replace('.', '_') + src = os.path.join(self.task_config.DATA, 'anl', f"atminc.{cdate_inc}z.nc4") + dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f'{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.nc') + logger.debug(f"Copying {src} to {dest}") + inc_copy = { + 'copy': [[src, dest]] + } + FileHandler(inc_copy).sync() def clean(self): super().clean() - - @logit(logger) - def get_bkg_dict(self, task_config: Dict[str, Any]) -> Dict[str, List[str]]: - """Compile a dictionary of model background files to copy - - This method constructs a dictionary of FV3 restart files (coupler, core, tracer) - that are needed for global atm DA and returns said dictionary for use by the FileHandler class. - - Parameters - ---------- - task_config: Dict - a dictionary containing all of the configuration needed for the task - - Returns - ---------- - bkg_dict: Dict - a dictionary containing the list of model background files to copy for FileHandler - """ - # NOTE for now this is FV3 restart files and just assumed to be fh006 - - # get FV3 restart files, this will be a lot simpler when using history files - rst_dir = os.path.join(task_config.COM_ATMOS_RESTART_PREV) # for now, option later? - run_dir = os.path.join(task_config.DATA, 'bkg') - - # Start accumulating list of background files to copy - bkglist = [] - - # atm DA needs coupler - basename = f'{to_fv3time(task_config.current_cycle)}.coupler.res' - bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - - # atm DA needs core, srf_wnd, tracer, phy_data, sfc_data - for ftype in ['core', 'srf_wnd', 'tracer']: - template = f'{to_fv3time(self.task_config.current_cycle)}.fv_{ftype}.res.tile{{tilenum}}.nc' - for itile in range(1, task_config.ntiles + 1): - basename = template.format(tilenum=itile) - bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - - for ftype in ['phy_data', 'sfc_data']: - template = f'{to_fv3time(self.task_config.current_cycle)}.{ftype}.tile{{tilenum}}.nc' - for itile in range(1, task_config.ntiles + 1): - basename = template.format(tilenum=itile) - bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - - bkg_dict = { - 'mkdir': [run_dir], - 'copy': bkglist, - } - return bkg_dict - - @logit(logger) - def get_berror_dict(self, config: Dict[str, Any]) -> Dict[str, List[str]]: - """Compile a dictionary of background error files to copy - - This method will construct a dictionary of either bump of gsibec background - error files for global atm DA and return said dictionary for use by the - FileHandler class. - - Parameters - ---------- - config: Dict - a dictionary containing all of the configuration needed - - Returns - ---------- - berror_dict: Dict - a dictionary containing the list of atm background error files to copy for FileHandler - """ - SUPPORTED_BERROR_STATIC_MAP = {'identity': self._get_berror_dict_identity, - 'bump': self._get_berror_dict_bump, - 'gsibec': self._get_berror_dict_gsibec} - - try: - berror_dict = SUPPORTED_BERROR_STATIC_MAP[config.STATICB_TYPE](config) - except KeyError: - raise KeyError(f"{config.STATICB_TYPE} is not a supported background error type.\n" + - f"Currently supported background error types are:\n" + - f'{" | ".join(SUPPORTED_BERROR_STATIC_MAP.keys())}') - - return berror_dict - - @staticmethod - @logit(logger) - def _get_berror_dict_identity(config: Dict[str, Any]) -> Dict[str, List[str]]: - """Identity BE does not need any files for staging. - - This is a private method and should not be accessed directly. - - Parameters - ---------- - config: Dict - a dictionary containing all of the configuration needed - Returns - ---------- - berror_dict: Dict - Empty dictionary [identity BE needs not files to stage] - """ - logger.info(f"Identity background error does not use staged files. Return empty dictionary") - return {} - - @staticmethod - @logit(logger) - def _get_berror_dict_bump(config: Dict[str, Any]) -> Dict[str, List[str]]: - """Compile a dictionary of atm bump background error files to copy - - This method will construct a dictionary of atm bump background error - files for global atm DA and return said dictionary to the parent - - This is a private method and should not be accessed directly. - - Parameters - ---------- - config: Dict - a dictionary containing all of the configuration needed - - Returns - ---------- - berror_dict: Dict - a dictionary of atm bump background error files to copy for FileHandler - """ - # BUMP atm static-B needs nicas, cor_rh, cor_rv and stddev files. - b_dir = config.BERROR_DATA_DIR - b_datestr = to_fv3time(config.BERROR_DATE) - berror_list = [] - for ftype in ['cor_rh', 'cor_rv', 'stddev']: - coupler = f'{b_datestr}.{ftype}.coupler.res' - berror_list.append([ - os.path.join(b_dir, coupler), os.path.join(config.DATA, 'berror', coupler) - ]) - - template = '{b_datestr}.{ftype}.fv_tracer.res.tile{{tilenum}}.nc' - for itile in range(1, config.ntiles + 1): - tracer = template.format(tilenum=itile) - berror_list.append([ - os.path.join(b_dir, tracer), os.path.join(config.DATA, 'berror', tracer) - ]) - - nproc = config.ntiles * config.layout_x * config.layout_y - for nn in range(1, nproc + 1): - berror_list.append([ - os.path.join(b_dir, f'nicas_aero_nicas_local_{nproc:06}-{nn:06}.nc'), - os.path.join(config.DATA, 'berror', f'nicas_aero_nicas_local_{nproc:06}-{nn:06}.nc') - ]) - - # create dictionary of background error files to stage - berror_dict = { - 'mkdir': [os.path.join(config.DATA, 'berror')], - 'copy': berror_list, - } - return berror_dict - - @staticmethod - @logit(logger) - def _get_berror_dict_gsibec(config: Dict[str, Any]) -> Dict[str, List[str]]: - """Compile a dictionary of atm gsibec background error files to copy - - This method will construct a dictionary of atm gsibec background error - files for global atm DA and return said dictionary to the parent - - This is a private method and should not be accessed directly. - - Parameters - ---------- - config: Dict - a dictionary containing all of the configuration needed - - Returns - ---------- - berror_dict: Dict - a dictionary of atm gsibec background error files to copy for FileHandler - """ - # GSI atm static-B needs namelist and coefficient files. - b_dir = os.path.join(config.HOMEgfs, 'fix', 'gdas', 'gsibec', config.CASE_ANL) - berror_list = [] - for ftype in ['gfs_gsi_global.nml', 'gsi-coeffs-gfs-global.nc4']: - berror_list.append([ - os.path.join(b_dir, ftype), - os.path.join(config.DATA, 'berror', ftype) - ]) - - # create dictionary of background error files to stage - berror_dict = { - 'mkdir': [os.path.join(config.DATA, 'berror')], - 'copy': berror_list, - } - return berror_dict - - @logit(logger) - def jedi2fv3inc(self: Analysis) -> None: - """Generate UFS model readable analysis increment - - This method writes a UFS DA atm increment in UFS model readable format. - This includes: - - write UFS-DA atm increments using variable names expected by UFS model - - compute and write delp increment - - compute and write hydrostatic delz increment - - Please note that some of these steps are temporary and will be modified - once the modle is able to directly read atm increments. - - """ - # Select the atm guess file based on the analysis and background resolutions - # Fields from the atm guess are used to compute the delp and delz increments - case_anl = int(self.task_config.CASE_ANL[1:]) - case = int(self.task_config.CASE[1:]) - - file = f"{self.task_config.GPREFIX}" + "atmf006" + f"{'' if case_anl == case else '.ensres'}" + ".nc" - atmges_fv3 = os.path.join(self.task_config.COM_ATMOS_HISTORY_PREV, file) - - # Set the path/name to the input UFS-DA atm increment file (atminc_jedi) - # and the output UFS model atm increment file (atminc_fv3) - cdate = to_fv3time(self.task_config.current_cycle) - cdate_inc = cdate.replace('.', '_') - atminc_jedi = os.path.join(self.task_config.DATA, 'anl', f'atminc.{cdate_inc}z.nc4') - atminc_fv3 = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atminc.nc") - - # Reference the python script which does the actual work - incpy = os.path.join(self.task_config.HOMEgfs, 'ush/jediinc2fv3.py') - - # Execute incpy to create the UFS model atm increment file - cmd = Executable(incpy) - cmd.add_default_arg(atmges_fv3) - cmd.add_default_arg(atminc_jedi) - cmd.add_default_arg(atminc_fv3) - logger.debug(f"Executing {cmd}") - cmd(output='stdout', error='stderr') diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 9cf84c07c7..bd5112050e 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -11,7 +11,7 @@ FileHandler, add_to_datetime, to_fv3time, to_timedelta, to_YMDH, to_YMD, chdir, - parse_yamltmpl, parse_j2yaml, save_as_yaml, + parse_j2yaml, save_as_yaml, logit, Executable, WorkflowException, @@ -29,28 +29,31 @@ class AtmEnsAnalysis(Analysis): def __init__(self, config): super().__init__(config) - _res = int(self.config.CASE_ENS[1:]) - _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config.assim_freq}H") / 2) - _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmens.yaml") + _res = int(self.task_config.CASE_ENS[1:]) + _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config.assim_freq}H") / 2) + _jedi_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmens.yaml") # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( { 'npx_ges': _res + 1, 'npy_ges': _res + 1, - 'npz_ges': self.config.LEVS - 1, - 'npz': self.config.LEVS - 1, + 'npz_ges': self.task_config.LEVS - 1, + 'npz': self.task_config.LEVS - 1, 'ATM_WINDOW_BEGIN': _window_begin, - 'ATM_WINDOW_LENGTH': f"PT{self.config.assim_freq}H", - 'OPREFIX': f"{self.config.EUPD_CYC}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN - 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN - 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", - 'fv3jedi_yaml': _fv3jedi_yaml, + 'ATM_WINDOW_LENGTH': f"PT{self.task_config.assim_freq}H", + 'OPREFIX': f"{self.task_config.EUPD_CYC}.t{self.task_config.cyc:02d}z.", + 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", + 'jedi_yaml': _jedi_yaml, + 'atm_obsdatain_path': f"./obs/", + 'atm_obsdataout_path': f"./diags/", + 'BKG_TSTEP': "PT1H" # Placeholder for 4D applications } ) - # task_config is everything that this task should need - self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) + # Extend task_config with local_dict + self.task_config = AttrDict(**self.task_config, **local_dict) @logit(logger) def initialize(self: Analysis) -> None: @@ -74,54 +77,25 @@ def initialize(self: Analysis) -> None: """ super().initialize() - # Make member directories in DATA for background and in DATA and ROTDIR for analysis files - # create template dictionary for output member analysis directories - template_inc = self.task_config.COM_ATMOS_ANALYSIS_TMPL - tmpl_inc_dict = { - 'ROTDIR': self.task_config.ROTDIR, - 'RUN': self.task_config.RUN, - 'YMD': to_YMD(self.task_config.current_cycle), - 'HH': self.task_config.current_cycle.strftime('%H') - } - dirlist = [] - for imem in range(1, self.task_config.NMEM_ENS + 1): - dirlist.append(os.path.join(self.task_config.DATA, 'bkg', f'mem{imem:03d}')) - dirlist.append(os.path.join(self.task_config.DATA, 'anl', f'mem{imem:03d}')) - - # create output directory path for member analysis - tmpl_inc_dict['MEMDIR'] = f"mem{imem:03d}" - incdir = Template.substitute_structure(template_inc, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_inc_dict.get) - dirlist.append(incdir) - - FileHandler({'mkdir': dirlist}).sync() - # stage CRTM fix files - crtm_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_crtm_coeff.yaml') - logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}") - crtm_fix_list = parse_j2yaml(crtm_fix_list_path, self.task_config) + logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") + crtm_fix_list = parse_j2yaml(self.task_config.CRTM_FIX_YAML, self.task_config) FileHandler(crtm_fix_list).sync() # stage fix files - jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_jedi_fix.yaml') - logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}") - jedi_fix_list = parse_j2yaml(jedi_fix_list_path, self.task_config) + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) FileHandler(jedi_fix_list).sync() # stage backgrounds - logger.debug(f"Stage ensemble member background files") - localconf = AttrDict() - keys = ['COM_ATMOS_RESTART_TMPL', 'previous_cycle', 'ROTDIR', 'RUN', - 'NMEM_ENS', 'DATA', 'current_cycle', 'ntiles'] - for key in keys: - localconf[key] = self.task_config[key] - localconf.dirname = 'bkg' - FileHandler(self.get_fv3ens_dict(localconf)).sync() + logger.info(f"Stage ensemble member background files") + bkg_staging_dict = parse_j2yaml(self.task_config.LGETKF_BKG_STAGING_YAML, self.task_config) + FileHandler(bkg_staging_dict).sync() # generate ensemble da YAML file - logger.debug(f"Generate ensemble da YAML file: {self.task_config.fv3jedi_yaml}") - ensda_yaml = parse_j2yaml(self.task_config.ATMENSYAML, self.task_config) - save_as_yaml(ensda_yaml, self.task_config.fv3jedi_yaml) - logger.info(f"Wrote ensemble da YAML to: {self.task_config.fv3jedi_yaml}") + logger.debug(f"Generate ensemble da YAML file: {self.task_config.jedi_yaml}") + save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) + logger.info(f"Wrote ensemble da YAML to: {self.task_config.jedi_yaml}") # need output dir for diags and anl logger.debug("Create empty output [anl, diags] directories to receive output from executable") @@ -132,7 +106,7 @@ def initialize(self: Analysis) -> None: FileHandler({'mkdir': newdirs}).sync() @logit(logger) - def execute(self: Analysis) -> None: + def letkf(self: Analysis) -> None: """Execute a global atmens analysis This method will execute a global atmens analysis using JEDI. @@ -150,10 +124,13 @@ def execute(self: Analysis) -> None: """ chdir(self.task_config.DATA) - exec_cmd = Executable(self.task_config.APRUN_ATMENSANL) - exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_letkf.x') + exec_cmd = Executable(self.task_config.APRUN_ATMENSANLLETKF) + exec_name = os.path.join(self.task_config.DATA, 'gdas.x') + exec_cmd.add_default_arg(exec_name) - exec_cmd.add_default_arg(self.task_config.fv3jedi_yaml) + exec_cmd.add_default_arg('fv3jedi') + exec_cmd.add_default_arg('localensembleda') + exec_cmd.add_default_arg(self.task_config.jedi_yaml) try: logger.debug(f"Executing {exec_cmd}") @@ -165,6 +142,31 @@ def execute(self: Analysis) -> None: pass + @logit(logger) + def init_fv3_increment(self: Analysis) -> None: + # Setup JEDI YAML file + self.task_config.jedi_yaml = os.path.join(self.task_config.DATA, + f"{self.task_config.JCB_ALGO}.yaml") + save_as_yaml(self.get_jedi_config(self.task_config.JCB_ALGO), self.task_config.jedi_yaml) + + # Link JEDI executable to run directory + self.task_config.jedi_exe = self.link_jediexe() + + @logit(logger) + def fv3_increment(self: Analysis) -> None: + # Run executable + exec_cmd = Executable(self.task_config.APRUN_ATMENSANLFV3INC) + exec_cmd.add_default_arg(self.task_config.jedi_exe) + exec_cmd.add_default_arg(self.task_config.jedi_yaml) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + @logit(logger) def finalize(self: Analysis) -> None: """Finalize a global atmens analysis @@ -188,7 +190,7 @@ def finalize(self: Analysis) -> None: atmensstat = os.path.join(self.task_config.COM_ATMOS_ANALYSIS_ENS, f"{self.task_config.APREFIX}atmensstat") # get list of diag files to put in tarball - diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc4')) + diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc')) logger.info(f"Compressing {len(diags)} diag files to {atmensstat}.gz") @@ -206,9 +208,9 @@ def finalize(self: Analysis) -> None: archive.add(diaggzip, arcname=os.path.basename(diaggzip)) # copy full YAML from executable to ROTDIR - logger.info(f"Copying {self.task_config.fv3jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS_ENS}") - src = os.path.join(self.task_config.DATA, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmens.yaml") - dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS_ENS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmens.yaml") + logger.info(f"Copying {self.task_config.jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS_ENS}") + src = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmens.yaml") + dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS_ENS, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmens.yaml") logger.debug(f"Copying {src} to {dest}") yaml_copy = { 'mkdir': [self.task_config.COM_ATMOS_ANALYSIS_ENS], @@ -216,42 +218,6 @@ def finalize(self: Analysis) -> None: } FileHandler(yaml_copy).sync() - # Create UFS model readable atm increment file from UFS-DA atm increment - logger.info("Create UFS model readable atm increment file from UFS-DA atm increment") - self.jedi2fv3inc() - - def clean(self): - super().clean() - - @logit(logger) - def jedi2fv3inc(self: Analysis) -> None: - """Generate UFS model readable analysis increment - - This method writes a UFS DA atm increment in UFS model readable format. - This includes: - - write UFS-DA atm increments using variable names expected by UFS model - - compute and write delp increment - - compute and write hydrostatic delz increment - - Please note that some of these steps are temporary and will be modified - once the modle is able to directly read atm increments. - - Parameters - ---------- - Analysis: parent class for GDAS task - - Returns - ---------- - None - """ - # Select the atm guess file based on the analysis and background resolutions - # Fields from the atm guess are used to compute the delp and delz increments - cdate = to_fv3time(self.task_config.current_cycle) - cdate_inc = cdate.replace('.', '_') - - # Reference the python script which does the actual work - incpy = os.path.join(self.task_config.HOMEgfs, 'ush/jediinc2fv3.py') - # create template dictionaries template_inc = self.task_config.COM_ATMOS_ANALYSIS_TMPL tmpl_inc_dict = { @@ -261,14 +227,10 @@ def jedi2fv3inc(self: Analysis) -> None: 'HH': self.task_config.current_cycle.strftime('%H') } - template_ges = self.task_config.COM_ATMOS_HISTORY_TMPL - tmpl_ges_dict = { - 'ROTDIR': self.task_config.ROTDIR, - 'RUN': self.task_config.RUN, - 'YMD': to_YMD(self.task_config.previous_cycle), - 'HH': self.task_config.previous_cycle.strftime('%H') - } - + # copy FV3 atm increment to comrot directory + logger.info("Copy UFS model readable atm increment file") + cdate = to_fv3time(self.task_config.current_cycle) + cdate_inc = cdate.replace('.', '_') # loop over ensemble members for imem in range(1, self.task_config.NMEM_ENS + 1): memchar = f"mem{imem:03d}" @@ -276,20 +238,15 @@ def jedi2fv3inc(self: Analysis) -> None: # create output path for member analysis increment tmpl_inc_dict['MEMDIR'] = memchar incdir = Template.substitute_structure(template_inc, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_inc_dict.get) + src = os.path.join(self.task_config.DATA, 'anl', memchar, f"atminc.{cdate_inc}z.nc4") + dest = os.path.join(incdir, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.nc") - # rewrite UFS-DA atmens increments - tmpl_ges_dict['MEMDIR'] = memchar - gesdir = Template.substitute_structure(template_ges, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_ges_dict.get) - atmges_fv3 = os.path.join(gesdir, f"{self.task_config.CDUMP}.t{self.task_config.previous_cycle.hour:02d}z.atmf006.nc") - atminc_jedi = os.path.join(self.task_config.DATA, 'anl', memchar, f'atminc.{cdate_inc}z.nc4') - atminc_fv3 = os.path.join(incdir, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atminc.nc") - - # Execute incpy to create the UFS model atm increment file - # TODO: use MPMD or parallelize with mpi4py - # See https://github.com/NOAA-EMC/global-workflow/pull/1373#discussion_r1173060656 - cmd = Executable(incpy) - cmd.add_default_arg(atmges_fv3) - cmd.add_default_arg(atminc_jedi) - cmd.add_default_arg(atminc_fv3) - logger.debug(f"Executing {cmd}") - cmd(output='stdout', error='stderr') + # copy increment + logger.debug(f"Copying {src} to {dest}") + inc_copy = { + 'copy': [[src, dest]] + } + FileHandler(inc_copy).sync() + + def clean(self): + super().clean() diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py new file mode 100644 index 0000000000..9d64e621c9 --- /dev/null +++ b/ush/python/pygfs/task/marine_bmat.py @@ -0,0 +1,350 @@ +#!/usr/bin/env python3 + +import os +import glob +from logging import getLogger +import pygfs.utils.marine_da_utils as mdau + +from wxflow import (AttrDict, + FileHandler, + add_to_datetime, to_timedelta, + chdir, + parse_j2yaml, + logit, + Executable, + Task) + +logger = getLogger(__name__.split('.')[-1]) + + +class MarineBMat(Task): + """ + Class for global marine B-matrix tasks + """ + @logit(logger, name="MarineBMat") + def __init__(self, config): + super().__init__(config) + _home_gdas = os.path.join(self.task_config.HOMEgfs, 'sorc', 'gdas.cd') + _calc_scale_exec = os.path.join(self.task_config.HOMEgfs, 'ush', 'soca', 'calc_scales.py') + _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config.assim_freq}H") / 2) + _window_end = add_to_datetime(self.task_config.current_cycle, to_timedelta(f"{self.task_config.assim_freq}H") / 2) + + # compute the relative path from self.task_config.DATA to self.task_config.DATAenspert + if self.task_config.NMEM_ENS > 0: + _enspert_relpath = os.path.relpath(self.task_config.DATAenspert, self.task_config.DATA) + else: + _enspert_relpath = None + + # Create a local dictionary that is repeatedly used across this class + local_dict = AttrDict( + { + 'HOMEgdas': _home_gdas, + 'MARINE_WINDOW_BEGIN': _window_begin, + 'MARINE_WINDOW_END': _window_end, + 'MARINE_WINDOW_MIDDLE': self.task_config.current_cycle, + 'BERROR_YAML_DIR': os.path.join(_home_gdas, 'parm', 'soca', 'berror'), + 'GRID_GEN_YAML': os.path.join(_home_gdas, 'parm', 'soca', 'gridgen', 'gridgen.yaml'), + 'MARINE_ENSDA_STAGE_BKG_YAML_TMPL': os.path.join(_home_gdas, 'parm', 'soca', 'ensda', 'stage_ens_mem.yaml.j2'), + 'MARINE_DET_STAGE_BKG_YAML_TMPL': os.path.join(_home_gdas, 'parm', 'soca', 'soca_det_bkg_stage.yaml.j2'), + 'ENSPERT_RELPATH': _enspert_relpath, + 'CALC_SCALE_EXEC': _calc_scale_exec, + 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + } + ) + + # Extend task_config with local_dict + self.task_config = AttrDict(**self.task_config, **local_dict) + + @logit(logger) + def initialize(self: Task) -> None: + """Initialize a global B-matrix + + This method will initialize a global B-Matrix. + This includes: + - staging the deterministic backgrounds (middle of window) + - staging SOCA fix files + - staging static ensemble members (optional) + - staging ensemble members (optional) + - generating the YAML files for the JEDI and GDASApp executables + - creating output directories + """ + super().initialize() + + # stage fix files + logger.info(f"Staging SOCA fix files from {self.task_config.SOCA_INPUT_FIX_DIR}") + soca_fix_list = parse_j2yaml(self.task_config.SOCA_FIX_YAML_TMPL, self.task_config) + FileHandler(soca_fix_list).sync() + + # prepare the MOM6 input.nml + mdau.prep_input_nml(self.task_config) + + # stage backgrounds + # TODO(G): Check ocean backgrounds dates for consistency + bkg_list = parse_j2yaml(self.task_config.MARINE_DET_STAGE_BKG_YAML_TMPL, self.task_config) + FileHandler(bkg_list).sync() + for cice_fname in ['./INPUT/cice.res.nc', './bkg/ice.bkg.f006.nc', './bkg/ice.bkg.f009.nc']: + mdau.cice_hist2fms(cice_fname, cice_fname) + + # stage the grid generation yaml + FileHandler({'copy': [[self.task_config.GRID_GEN_YAML, + os.path.join(self.task_config.DATA, 'gridgen.yaml')]]}).sync() + + # generate the variance partitioning YAML file + logger.debug("Generate variance partitioning YAML file") + diagb_config = parse_j2yaml(path=os.path.join(self.task_config.BERROR_YAML_DIR, 'soca_diagb.yaml.j2'), + data=self.task_config) + diagb_config.save(os.path.join(self.task_config.DATA, 'soca_diagb.yaml')) + + # generate the vertical decorrelation scale YAML file + logger.debug("Generate the vertical correlation scale YAML file") + vtscales_config = parse_j2yaml(path=os.path.join(self.task_config.BERROR_YAML_DIR, 'soca_vtscales.yaml.j2'), + data=self.task_config) + vtscales_config.save(os.path.join(self.task_config.DATA, 'soca_vtscales.yaml')) + + # generate vertical diffusion scale YAML file + logger.debug("Generate vertical diffusion YAML file") + diffvz_config = parse_j2yaml(path=os.path.join(self.task_config.BERROR_YAML_DIR, 'soca_parameters_diffusion_vt.yaml.j2'), + data=self.task_config) + diffvz_config.save(os.path.join(self.task_config.DATA, 'soca_parameters_diffusion_vt.yaml')) + + # generate the horizontal diffusion YAML files + if True: # TODO(G): skip this section once we have optimized the scales + # stage the correlation scale configuration + logger.debug("Generate correlation scale YAML file") + FileHandler({'copy': [[os.path.join(self.task_config.BERROR_YAML_DIR, 'soca_setcorscales.yaml'), + os.path.join(self.task_config.DATA, 'soca_setcorscales.yaml')]]}).sync() + + # generate horizontal diffusion scale YAML file + logger.debug("Generate horizontal diffusion scale YAML file") + diffhz_config = parse_j2yaml(path=os.path.join(self.task_config.BERROR_YAML_DIR, 'soca_parameters_diffusion_hz.yaml.j2'), + data=self.task_config) + diffhz_config.save(os.path.join(self.task_config.DATA, 'soca_parameters_diffusion_hz.yaml')) + + # hybrid EnVAR case + if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: + # stage ensemble membersfiles for use in hybrid background error + logger.debug(f"Stage ensemble members for the hybrid background error") + mdau.stage_ens_mem(self.task_config) + + # generate ensemble recentering/rebalancing YAML file + logger.debug("Generate ensemble recentering YAML file") + ensrecenter_config = parse_j2yaml(path=os.path.join(self.task_config.BERROR_YAML_DIR, 'soca_ensb.yaml.j2'), + data=self.task_config) + ensrecenter_config.save(os.path.join(self.task_config.DATA, 'soca_ensb.yaml')) + + # generate ensemble weights YAML file + logger.debug("Generate ensemble recentering YAML file: {self.task_config.abcd_yaml}") + hybridweights_config = parse_j2yaml(path=os.path.join(self.task_config.BERROR_YAML_DIR, 'soca_ensweights.yaml.j2'), + data=self.task_config) + hybridweights_config.save(os.path.join(self.task_config.DATA, 'soca_ensweights.yaml')) + + # need output dir for ensemble perturbations and static B-matrix + logger.debug("Create empty diagb directories to receive output from executables") + FileHandler({'mkdir': [os.path.join(self.task_config.DATA, 'diagb')]}).sync() + + @logit(logger) + def gridgen(self: Task) -> None: + # link gdas_soca_gridgen.x + mdau.link_executable(self.task_config, 'gdas_soca_gridgen.x') + exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT) + exec_name = os.path.join(self.task_config.DATA, 'gdas_soca_gridgen.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('gridgen.yaml') + + mdau.run(exec_cmd) + + @logit(logger) + def variance_partitioning(self: Task) -> None: + # link the variance partitioning executable, gdas_soca_diagb.x + mdau.link_executable(self.task_config, 'gdas_soca_diagb.x') + exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT) + exec_name = os.path.join(self.task_config.DATA, 'gdas_soca_diagb.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('soca_diagb.yaml') + + mdau.run(exec_cmd) + + @logit(logger) + def horizontal_diffusion(self: Task) -> None: + """Generate the horizontal diffusion coefficients + """ + # link the executable that computes the correlation scales, gdas_soca_setcorscales.x, + # and prepare the command to run it + mdau.link_executable(self.task_config, 'gdas_soca_setcorscales.x') + exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT) + exec_name = os.path.join(self.task_config.DATA, 'gdas_soca_setcorscales.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('soca_setcorscales.yaml') + + # create a files containing the correlation scales + mdau.run(exec_cmd) + + # link the executable that computes the correlation scales, gdas_soca_error_covariance_toolbox.x, + # and prepare the command to run it + mdau.link_executable(self.task_config, 'gdas_soca_error_covariance_toolbox.x') + exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT) + exec_name = os.path.join(self.task_config.DATA, 'gdas_soca_error_covariance_toolbox.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('soca_parameters_diffusion_hz.yaml') + + # compute the coefficients of the diffusion operator + mdau.run(exec_cmd) + + @logit(logger) + def vertical_diffusion(self: Task) -> None: + """Generate the vertical diffusion coefficients + """ + # compute the vertical correlation scales based on the MLD + FileHandler({'copy': [[os.path.join(self.task_config.CALC_SCALE_EXEC), + os.path.join(self.task_config.DATA, 'calc_scales.x')]]}).sync() + exec_cmd = Executable("python") + exec_name = os.path.join(self.task_config.DATA, 'calc_scales.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('soca_vtscales.yaml') + mdau.run(exec_cmd) + + # link the executable that computes the correlation scales, gdas_soca_error_covariance_toolbox.x, + # and prepare the command to run it + mdau.link_executable(self.task_config, 'gdas_soca_error_covariance_toolbox.x') + exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT) + exec_name = os.path.join(self.task_config.DATA, 'gdas_soca_error_covariance_toolbox.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('soca_parameters_diffusion_vt.yaml') + + # compute the coefficients of the diffusion operator + mdau.run(exec_cmd) + + @logit(logger) + def ensemble_perturbations(self: Task) -> None: + """Generate the 3D ensemble of perturbation for the 3DEnVAR + + This method will generate ensemble perturbations re-balanced w.r.t the + deterministic background. + This includes: + - computing a storing the unbalanced ensemble perturbations' statistics + - recentering the ensemble members around the deterministic background and + accounting for the nonlinear steric recentering + - saving the recentered ensemble statistics + """ + mdau.link_executable(self.task_config, 'gdas_ens_handler.x') + exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT) + exec_name = os.path.join(self.task_config.DATA, 'gdas_ens_handler.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('soca_ensb.yaml') + + # generate the ensemble perturbations + mdau.run(exec_cmd) + + @logit(logger) + def hybrid_weight(self: Task) -> None: + """Generate the hybrid weights for the 3DEnVAR + + This method will generate the 3D fields hybrid weights for the 3DEnVAR for each + variables. + TODO(G): Currently implemented for the specific case of the static ensemble members only + """ + mdau.link_executable(self.task_config, 'gdas_socahybridweights.x') + exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT) + exec_name = os.path.join(self.task_config.DATA, 'gdas_socahybridweights.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('soca_ensweights.yaml') + + # compute the ensemble weights + mdau.run(exec_cmd) + + @logit(logger) + def execute(self: Task) -> None: + """Generate the full B-matrix + + This method will generate the full B-matrix according to the configuration. + """ + chdir(self.task_config.DATA) + self.gridgen() # TODO: This should be optional in case the geometry file was staged + self.variance_partitioning() + self.horizontal_diffusion() # TODO: Make this optional once we've converged on an acceptable set of scales + self.vertical_diffusion() + # hybrid EnVAR case + if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: + self.ensemble_perturbations() # TODO: refactor this from the old scripts + self.hybrid_weight() # TODO: refactor this from the old scripts + + @logit(logger) + def finalize(self: Task) -> None: + """Finalize the global B-matrix job + + This method will finalize the global B-matrix job. + This includes: + - copy the generated static, but cycle dependent background error files to the ROTDIR + - copy the generated YAML file from initialize to the ROTDIR + - keep the re-balanced ensemble perturbation files in DATAenspert + - ... + + """ + # Copy the soca grid if it was created + grid_file = os.path.join(self.task_config.DATA, 'soca_gridspec.nc') + if os.path.exists(grid_file): + logger.info(f"Copying the soca grid file to the ROTDIR") + FileHandler({'copy': [[grid_file, + os.path.join(self.task_config.COMOUT_OCEAN_BMATRIX, 'soca_gridspec.nc')]]}).sync() + + # Copy the diffusion coefficient files to the ROTDIR + logger.info(f"Copying the diffusion coefficient files to the ROTDIR") + diffusion_coeff_list = [] + for diff_type in ['hz', 'vt']: + src = os.path.join(self.task_config.DATA, f"{diff_type}_ocean.nc") + dest = os.path.join(self.task_config.COMOUT_OCEAN_BMATRIX, + f"{self.task_config.APREFIX}{diff_type}_ocean.nc") + diffusion_coeff_list.append([src, dest]) + + src = os.path.join(self.task_config.DATA, f"hz_ice.nc") + dest = os.path.join(self.task_config.COMOUT_ICE_BMATRIX, + f"{self.task_config.APREFIX}hz_ice.nc") + diffusion_coeff_list.append([src, dest]) + + FileHandler({'copy': diffusion_coeff_list}).sync() + + # Copy diag B files to ROTDIR + logger.info(f"Copying diag B files to the ROTDIR") + diagb_list = [] + window_end_iso = self.task_config.MARINE_WINDOW_END.strftime('%Y-%m-%dT%H:%M:%SZ') + + # ocean diag B + src = os.path.join(self.task_config.DATA, 'diagb', f"ocn.bkgerr_stddev.incr.{window_end_iso}.nc") + dst = os.path.join(self.task_config.COMOUT_OCEAN_BMATRIX, + f"{self.task_config.APREFIX}ocean.bkgerr_stddev.nc") + diagb_list.append([src, dst]) + + # ice diag B + src = os.path.join(self.task_config.DATA, 'diagb', f"ice.bkgerr_stddev.incr.{window_end_iso}.nc") + dst = os.path.join(self.task_config.COMOUT_ICE_BMATRIX, + f"{self.task_config.APREFIX}ice.bkgerr_stddev.nc") + diagb_list.append([src, dst]) + + FileHandler({'copy': diagb_list}).sync() + + # Copy the ensemble perturbation diagnostics to the ROTDIR + if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 3: + window_middle_iso = self.task_config.MARINE_WINDOW_MIDDLE.strftime('%Y-%m-%dT%H:%M:%SZ') + weight_list = [] + src = os.path.join(self.task_config.DATA, f"ocn.ens_weights.incr.{window_middle_iso}.nc") + dst = os.path.join(self.task_config.COMOUT_OCEAN_BMATRIX, + f"{self.task_config.APREFIX}ocean.ens_weights.nc") + weight_list.append([src, dst]) + + src = os.path.join(self.task_config.DATA, f"ice.ens_weights.incr.{window_middle_iso}.nc") + dst = os.path.join(self.task_config.COMOUT_ICE_BMATRIX, + f"{self.task_config.APREFIX}ice.ens_weights.nc") + weight_list.append([src, dst]) + + # TODO(G): missing ssh_steric_stddev, ssh_unbal_stddev, ssh_total_stddev and steric_explained_variance + + FileHandler({'copy': weight_list}).sync() + + # Copy the YAML files to the OCEAN ROTDIR + yamls = glob.glob(os.path.join(self.task_config.DATA, '*.yaml')) + yaml_list = [] + for yaml_file in yamls: + dest = os.path.join(self.task_config.COMOUT_OCEAN_BMATRIX, + f"{self.task_config.APREFIX}{os.path.basename(yaml_file)}") + yaml_list.append([yaml_file, dest]) + FileHandler({'copy': yaml_list}).sync() diff --git a/ush/python/pygfs/task/marine_letkf.py b/ush/python/pygfs/task/marine_letkf.py new file mode 100644 index 0000000000..36c26d594b --- /dev/null +++ b/ush/python/pygfs/task/marine_letkf.py @@ -0,0 +1,147 @@ +#!/usr/bin/env python3 + +import f90nml +from logging import getLogger +import os +from pygfs.task.analysis import Analysis +from typing import Dict +from wxflow import (AttrDict, + FileHandler, + logit, + parse_j2yaml, + to_timedelta, + to_YMDH) + +logger = getLogger(__name__.split('.')[-1]) + + +class MarineLETKF(Analysis): + """ + Class for global ocean and sea ice analysis LETKF task + """ + + @logit(logger, name="MarineLETKF") + def __init__(self, config: Dict) -> None: + """Constructor for ocean and sea ice LETKF task + Parameters: + ------------ + config: Dict + configuration, namely evironment variables + Returns: + -------- + None + """ + + logger.info("init") + super().__init__(config) + + _half_assim_freq = to_timedelta(f"{self.task_config.assim_freq}H") / 2 + _letkf_yaml_file = 'letkf.yaml' + _letkf_exec_args = [self.task_config.MARINE_LETKF_EXEC, + 'soca', + 'localensembleda', + _letkf_yaml_file] + + self.task_config.WINDOW_MIDDLE = self.task_config.current_cycle + self.task_config.WINDOW_BEGIN = self.task_config.current_cycle - _half_assim_freq + self.task_config.letkf_exec_args = _letkf_exec_args + self.task_config.letkf_yaml_file = _letkf_yaml_file + self.task_config.mom_input_nml_tmpl = os.path.join(self.task_config.DATA, 'mom_input.nml.tmpl') + self.task_config.mom_input_nml = os.path.join(self.task_config.DATA, 'mom_input.nml') + self.task_config.obs_dir = os.path.join(self.task_config.DATA, 'obs') + + @logit(logger) + def initialize(self): + """Method initialize for ocean and sea ice LETKF task + Parameters: + ------------ + None + Returns: + -------- + None + """ + + logger.info("initialize") + + # make directories and stage ensemble background files + ensbkgconf = AttrDict() + keys = ['previous_cycle', 'current_cycle', 'DATA', 'NMEM_ENS', + 'PARMgfs', 'ROTDIR', 'COM_OCEAN_HISTORY_TMPL', 'COM_ICE_HISTORY_TMPL'] + for key in keys: + ensbkgconf[key] = self.task_config[key] + ensbkgconf.RUN = 'enkfgdas' + soca_ens_bkg_stage_list = parse_j2yaml(self.task_config.SOCA_ENS_BKG_STAGE_YAML_TMPL, ensbkgconf) + FileHandler(soca_ens_bkg_stage_list).sync() + soca_fix_stage_list = parse_j2yaml(self.task_config.SOCA_FIX_YAML_TMPL, self.task_config) + FileHandler(soca_fix_stage_list).sync() + letkf_stage_list = parse_j2yaml(self.task_config.MARINE_LETKF_STAGE_YAML_TMPL, self.task_config) + FileHandler(letkf_stage_list).sync() + + obs_list = parse_j2yaml(self.task_config.OBS_YAML, self.task_config) + + # get the list of observations + obs_files = [] + for ob in obs_list['observers']: + obs_name = ob['obs space']['name'].lower() + obs_filename = f"{self.task_config.RUN}.t{self.task_config.cyc}z.{obs_name}.{to_YMDH(self.task_config.current_cycle)}.nc" + obs_files.append((obs_filename, ob)) + + obs_files_to_copy = [] + obs_to_use = [] + # copy obs from COMIN_OBS to DATA/obs + for obs_file, ob in obs_files: + obs_src = os.path.join(self.task_config.COMIN_OBS, obs_file) + obs_dst = os.path.join(self.task_config.DATA, self.task_config.obs_dir, obs_file) + if os.path.exists(obs_src): + obs_files_to_copy.append([obs_src, obs_dst]) + obs_to_use.append(ob) + else: + logger.warning(f"{obs_file} is not available in {self.task_config.COMIN_OBS}") + + # stage the desired obs files + FileHandler({'copy': obs_files_to_copy}).sync() + + # make the letkf.yaml + letkfconf = AttrDict() + keys = ['WINDOW_BEGIN', 'WINDOW_MIDDLE', 'RUN', 'gcyc', 'NMEM_ENS'] + for key in keys: + letkfconf[key] = self.task_config[key] + letkfconf.RUN = 'enkfgdas' + letkf_yaml = parse_j2yaml(self.task_config.MARINE_LETKF_YAML_TMPL, letkfconf) + letkf_yaml.observations.observers = obs_to_use + letkf_yaml.save(self.task_config.letkf_yaml_file) + + # swap date and stack size in mom_input.nml + domain_stack_size = self.task_config.DOMAIN_STACK_SIZE + ymdhms = [int(s) for s in self.task_config.WINDOW_BEGIN.strftime('%Y,%m,%d,%H,%M,%S').split(',')] + with open(self.task_config.mom_input_nml_tmpl, 'r') as nml_file: + nml = f90nml.read(nml_file) + nml['ocean_solo_nml']['date_init'] = ymdhms + nml['fms_nml']['domains_stack_size'] = int(domain_stack_size) + nml.write(self.task_config.mom_input_nml, force=True) # force to overwrite if necessary + + @logit(logger) + def run(self): + """Method run for ocean and sea ice LETKF task + Parameters: + ------------ + None + Returns: + -------- + None + """ + + logger.info("run") + + @logit(logger) + def finalize(self): + """Method finalize for ocean and sea ice LETKF task + Parameters: + ------------ + None + Returns: + -------- + None + """ + + logger.info("finalize") diff --git a/ush/python/pygfs/task/oceanice_products.py b/ush/python/pygfs/task/oceanice_products.py new file mode 100644 index 0000000000..98b57ae801 --- /dev/null +++ b/ush/python/pygfs/task/oceanice_products.py @@ -0,0 +1,356 @@ +#!/usr/bin/env python3 + +import os +from logging import getLogger +from typing import List, Dict, Any +from pprint import pformat +import xarray as xr + +from wxflow import (AttrDict, + parse_j2yaml, + FileHandler, + Jinja, + logit, + Task, + add_to_datetime, to_timedelta, + WorkflowException, + Executable) + +logger = getLogger(__name__.split('.')[-1]) + + +class OceanIceProducts(Task): + """Ocean Ice Products Task + """ + + VALID_COMPONENTS = ['ocean', 'ice'] + COMPONENT_RES_MAP = {'ocean': 'OCNRES', 'ice': 'ICERES'} + VALID_PRODUCT_GRIDS = {'mx025': ['1p00', '0p25'], + 'mx050': ['1p00', '0p50'], + 'mx100': ['1p00'], + 'mx500': ['5p00']} + + # These could be read from the yaml file + TRIPOLE_DIMS_MAP = {'mx025': [1440, 1080], 'mx050': [720, 526], 'mx100': [360, 320], 'mx500': [72, 35]} + LATLON_DIMS_MAP = {'0p25': [1440, 721], '0p50': [720, 361], '1p00': [360, 181], '5p00': [72, 36]} + + @logit(logger, name="OceanIceProducts") + def __init__(self, config: Dict[str, Any]) -> None: + """Constructor for the Ocean/Ice Productstask + + Parameters + ---------- + config : Dict[str, Any] + Incoming configuration for the task from the environment + + Returns + ------- + None + """ + super().__init__(config) + + if self.task_config.COMPONENT not in self.VALID_COMPONENTS: + raise NotImplementedError(f'{self.task_config.COMPONENT} is not a valid model component.\n' + + 'Valid model components are:\n' + + f'{", ".join(self.VALID_COMPONENTS)}') + + model_grid = f"mx{self.task_config[self.COMPONENT_RES_MAP[self.task_config.COMPONENT]]:03d}" + + valid_datetime = add_to_datetime(self.task_config.current_cycle, to_timedelta(f"{self.task_config.FORECAST_HOUR}H")) + + if self.task_config.COMPONENT == 'ice': + offset = int(self.task_config.current_cycle.strftime("%H")) % self.task_config.FHOUT_ICE_GFS + # For CICE cases where offset is not 0, forecast_hour needs to be adjusted based on the offset. + # TODO: Consider FHMIN when calculating offset. + if offset != 0: + forecast_hour = self.task_config.FORECAST_HOUR - int(self.task_config.current_cycle.strftime("%H")) + # For the first forecast hour, the interval may be different from the intervals of subsequent forecast hours + if forecast_hour <= self.task_config.FHOUT_ICE_GFS: + interval = self.task_config.FHOUT_ICE_GFS - int(self.task_config.current_cycle.strftime("%H")) + else: + interval = self.task_config.FHOUT_ICE_GFS + else: + forecast_hour = self.task_config.FORECAST_HOUR + interval = self.task_config.FHOUT_ICE_GFS + if self.task_config.COMPONENT == 'ocean': + forecast_hour = self.task_config.FORECAST_HOUR + interval = self.task_config.FHOUT_OCN_GFS + + # TODO: This is a bit of a hack, but it works for now + # FIXME: find a better way to provide the averaging period + avg_period = f"{forecast_hour-interval:03d}-{forecast_hour:03d}" + + # Extend task_config with localdict + localdict = AttrDict( + {'component': self.task_config.COMPONENT, + 'forecast_hour': forecast_hour, + 'valid_datetime': valid_datetime, + 'avg_period': avg_period, + 'model_grid': model_grid, + 'interval': interval, + 'product_grids': self.VALID_PRODUCT_GRIDS[model_grid]} + ) + self.task_config = AttrDict(**self.task_config, **localdict) + + # Read the oceanice_products.yaml file for common configuration + logger.info(f"Read the ocean ice products configuration yaml file {self.task_config.OCEANICEPRODUCTS_CONFIG}") + self.task_config.oceanice_yaml = parse_j2yaml(self.task_config.OCEANICEPRODUCTS_CONFIG, self.task_config) + logger.debug(f"oceanice_yaml:\n{pformat(self.task_config.oceanice_yaml)}") + + @staticmethod + @logit(logger) + def initialize(config: Dict) -> None: + """Initialize the work directory by copying all the common fix data + + Parameters + ---------- + config : Dict + Configuration dictionary for the task + + Returns + ------- + None + """ + + # Copy static data to run directory + logger.info("Copy static data to run directory") + FileHandler(config.oceanice_yaml.ocnicepost.fix_data).sync() + + # Copy "component" specific model data to run directory (e.g. ocean/ice forecast output) + logger.info(f"Copy {config.component} data to run directory") + FileHandler(config.oceanice_yaml[config.component].data_in).sync() + + @staticmethod + @logit(logger) + def configure(config: Dict, product_grid: str) -> None: + """Configure the namelist for the product_grid in the work directory. + Create namelist 'ocnicepost.nml' from template + + Parameters + ---------- + config : Dict + Configuration dictionary for the task + product_grid : str + Target product grid to process + + Returns + ------- + None + """ + + # Make a localconf with the "component" specific configuration for parsing the namelist + localconf = AttrDict() + localconf.DATA = config.DATA + localconf.component = config.component + + localconf.source_tripole_dims = ', '.join(map(str, OceanIceProducts.TRIPOLE_DIMS_MAP[config.model_grid])) + localconf.target_latlon_dims = ', '.join(map(str, OceanIceProducts.LATLON_DIMS_MAP[product_grid])) + + localconf.maskvar = config.oceanice_yaml[config.component].namelist.maskvar + localconf.sinvar = config.oceanice_yaml[config.component].namelist.sinvar + localconf.cosvar = config.oceanice_yaml[config.component].namelist.cosvar + localconf.angvar = config.oceanice_yaml[config.component].namelist.angvar + localconf.debug = ".true." if config.oceanice_yaml.ocnicepost.namelist.debug else ".false." + + logger.debug(f"localconf:\n{pformat(localconf)}") + + # Configure the namelist and write to file + logger.info("Create namelist for ocnicepost.x") + nml_template = os.path.join(localconf.DATA, "ocnicepost.nml.jinja2") + nml_data = Jinja(nml_template, localconf).render + logger.debug(f"ocnicepost_nml:\n{nml_data}") + nml_file = os.path.join(localconf.DATA, "ocnicepost.nml") + with open(nml_file, "w") as fho: + fho.write(nml_data) + + @staticmethod + @logit(logger) + def execute(config: Dict, product_grid: str) -> None: + """Run the ocnicepost.x executable to interpolate and convert to grib2 + + Parameters + ---------- + config : Dict + Configuration dictionary for the task + product_grid : str + Target product grid to process + + Returns + ------- + None + """ + + # Run the ocnicepost.x executable + OceanIceProducts.interp(config.DATA, config.APRUN_OCNICEPOST, exec_name="ocnicepost.x") + + # Convert interpolated netCDF file to grib2 + OceanIceProducts.netCDF_to_grib2(config, product_grid) + + @staticmethod + @logit(logger) + def interp(workdir: str, aprun_cmd: str, exec_name: str = "ocnicepost.x") -> None: + """ + Run the interpolation executable to generate rectilinear netCDF file + + Parameters + ---------- + config : Dict + Configuration dictionary for the task + workdir : str + Working directory for the task + aprun_cmd : str + aprun command to use + exec_name : str + Name of the executable e.g. ocnicepost.x + + Returns + ------- + None + """ + os.chdir(workdir) + logger.debug(f"Current working directory: {os.getcwd()}") + + exec_cmd = Executable(aprun_cmd) + exec_cmd.add_default_arg(os.path.join(workdir, exec_name)) + + OceanIceProducts._call_executable(exec_cmd) + + @staticmethod + @logit(logger) + def netCDF_to_grib2(config: Dict, grid: str) -> None: + """Convert interpolated netCDF file to grib2 + + Parameters + ---------- + config : Dict + Configuration dictionary for the task + grid : str + Target product grid to process + + Returns + ------ + None + """ + + os.chdir(config.DATA) + + exec_cmd = Executable(config.oceanice_yaml.nc2grib2.script) + arguments = [config.component, grid, config.current_cycle.strftime("%Y%m%d%H"), config.avg_period] + if config.component == 'ocean': + levs = config.oceanice_yaml.ocean.namelist.ocean_levels + arguments.append(':'.join(map(str, levs))) + + logger.info(f"Executing {exec_cmd} with arguments {arguments}") + try: + exec_cmd(*arguments) + except OSError: + logger.exception(f"FATAL ERROR: Failed to execute {exec_cmd}") + raise OSError(f"{exec_cmd}") + except Exception: + logger.exception(f"FATAL ERROR: Error occurred during execution of {exec_cmd}") + raise WorkflowException(f"{exec_cmd}") + + @staticmethod + @logit(logger) + def subset(config: Dict) -> None: + """ + Subset a list of variables from a netcdf file and save to a new netcdf file. + Also save global attributes and history from the old netcdf file into new netcdf file + + Parameters + ---------- + config : Dict + Configuration dictionary for the task + + Returns + ------- + None + """ + + os.chdir(config.DATA) + + input_file = f"{config.component}.nc" + output_file = f"{config.component}_subset.nc" + varlist = config.oceanice_yaml[config.component].subset + + logger.info(f"Subsetting {varlist} from {input_file} to {output_file}") + + try: + # open the netcdf file + ds = xr.open_dataset(input_file) + + # subset the variables + ds_subset = ds[varlist] + + # save global attributes from the old netcdf file into new netcdf file + ds_subset.attrs = ds.attrs + + # save subsetted variables to a new netcdf file + ds_subset.to_netcdf(output_file) + + except FileNotFoundError: + logger.exception(f"FATAL ERROR: Input file not found: {input_file}") + raise FileNotFoundError(f"File not found: {input_file}") + + except IOError as err: + logger.exception(f"FATAL ERROR: IOError occurred during netCDF subset: {input_file}") + raise IOError(f"An I/O error occurred: {err}") + + except Exception as err: + logger.exception(f"FATAL ERROR: Error occurred during netCDF subset: {input_file}") + raise WorkflowException(f"{err}") + + finally: + # close the netcdf files + ds.close() + ds_subset.close() + + @staticmethod + @logit(logger) + def _call_executable(exec_cmd: Executable) -> None: + """Internal method to call executable + + Parameters + ---------- + exec_cmd : Executable + Executable to run + + Raises + ------ + OSError + Failure due to OS issues + WorkflowException + All other exceptions + """ + + logger.info(f"Executing {exec_cmd}") + try: + exec_cmd() + except OSError: + logger.exception(f"FATAL ERROR: Failed to execute {exec_cmd}") + raise OSError(f"{exec_cmd}") + except Exception: + logger.exception(f"FATAL ERROR: Error occurred during execution of {exec_cmd}") + raise WorkflowException(f"{exec_cmd}") + + @staticmethod + @logit(logger) + def finalize(config: Dict) -> None: + """Perform closing actions of the task. + Copy data back from the DATA/ directory to COM/ + + Parameters + ---------- + config: Dict + Configuration dictionary for the task + + Returns + ------- + None + """ + + # Copy "component" specific generated data to COM/ directory + data_out = config.oceanice_yaml[config.component].data_out + + logger.info(f"Copy processed data to COM/ directory") + FileHandler(data_out).sync() diff --git a/ush/python/pygfs/task/land_analysis.py b/ush/python/pygfs/task/snow_analysis.py similarity index 72% rename from ush/python/pygfs/task/land_analysis.py rename to ush/python/pygfs/task/snow_analysis.py index 307e875183..9656b00a8e 100644 --- a/ush/python/pygfs/task/land_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -11,7 +11,7 @@ FileHandler, to_fv3time, to_YMD, to_YMDH, to_timedelta, add_to_datetime, rm_p, - parse_j2yaml, parse_yamltmpl, save_as_yaml, + parse_j2yaml, save_as_yaml, Jinja, logit, Executable, @@ -21,44 +21,44 @@ logger = getLogger(__name__.split('.')[-1]) -class LandAnalysis(Analysis): +class SnowAnalysis(Analysis): """ - Class for global land analysis tasks + Class for global snow analysis tasks """ - NMEM_LANDENS = 2 # The size of the land ensemble is fixed at 2. Does this need to be a variable? + NMEM_SNOWENS = 2 - @logit(logger, name="LandAnalysis") + @logit(logger, name="SnowAnalysis") def __init__(self, config): super().__init__(config) - _res = int(self.config['CASE'][1:]) - _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config['assim_freq']}H") / 2) - _letkfoi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.RUN}.t{self.runtime_config['cyc']:02d}z.letkfoi.yaml") + _res = int(self.task_config['CASE'][1:]) + _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config['assim_freq']}H") / 2) + _letkfoi_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.letkfoi.yaml") # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( { 'npx_ges': _res + 1, 'npy_ges': _res + 1, - 'npz_ges': self.config.LEVS - 1, - 'npz': self.config.LEVS - 1, - 'LAND_WINDOW_BEGIN': _window_begin, - 'LAND_WINDOW_LENGTH': f"PT{self.config['assim_freq']}H", - 'OPREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z.", - 'APREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z.", + 'npz_ges': self.task_config.LEVS - 1, + 'npz': self.task_config.LEVS - 1, + 'SNOW_WINDOW_BEGIN': _window_begin, + 'SNOW_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H", + 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", 'jedi_yaml': _letkfoi_yaml } ) - # task_config is everything that this task should need - self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) + # Extend task_config with local_dict + self.task_config = AttrDict(**self.task_config, **local_dict) @logit(logger) def prepare_GTS(self) -> None: - """Prepare the GTS data for a global land analysis + """Prepare the GTS data for a global snow analysis - This method will prepare GTS data for a global land analysis using JEDI. + This method will prepare GTS data for a global snow analysis using JEDI. This includes: - processing GTS bufr snow depth observation data to IODA format @@ -74,7 +74,7 @@ def prepare_GTS(self) -> None: # create a temporary dict of all keys needed in this method localconf = AttrDict() keys = ['HOMEgfs', 'DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV', - 'OPREFIX', 'CASE', 'ntiles'] + 'OPREFIX', 'CASE', 'OCNRES', 'ntiles'] for key in keys: localconf[key] = self.task_config[key] @@ -99,7 +99,7 @@ def prepare_GTS(self) -> None: def _gtsbufr2iodax(exe, yaml_file): if not os.path.isfile(yaml_file): - logger.exception(f"{yaml_file} not found") + logger.exception(f"FATAL ERROR: {yaml_file} not found") raise FileNotFoundError(yaml_file) logger.info(f"Executing {exe}") @@ -114,7 +114,7 @@ def _gtsbufr2iodax(exe, yaml_file): # 1. generate bufr2ioda YAML files # 2. execute bufr2ioda.x for name in prep_gts_config.bufr2ioda.keys(): - gts_yaml = os.path.join(self.runtime_config.DATA, f"bufr_{name}_snow.yaml") + gts_yaml = os.path.join(self.task_config.DATA, f"bufr_{name}_snow.yaml") logger.info(f"Generate BUFR2IODA YAML file: {gts_yaml}") temp_yaml = parse_j2yaml(prep_gts_config.bufr2ioda[name], localconf) save_as_yaml(temp_yaml, gts_yaml) @@ -133,9 +133,9 @@ def _gtsbufr2iodax(exe, yaml_file): @logit(logger) def prepare_IMS(self) -> None: - """Prepare the IMS data for a global land analysis + """Prepare the IMS data for a global snow analysis - This method will prepare IMS data for a global land analysis using JEDI. + This method will prepare IMS data for a global snow analysis using JEDI. This includes: - staging model backgrounds - processing raw IMS observation data and prepare for conversion to IODA @@ -153,7 +153,7 @@ def prepare_IMS(self) -> None: # create a temporary dict of all keys needed in this method localconf = AttrDict() keys = ['DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV', - 'OPREFIX', 'CASE', 'OCNRES', 'ntiles'] + 'OPREFIX', 'CASE', 'OCNRES', 'ntiles', 'FIXgfs'] for key in keys: localconf[key] = self.task_config[key] @@ -198,7 +198,7 @@ def prepare_IMS(self) -> None: raise WorkflowException(f"An error occured during execution of {exe}") # Ensure the snow depth IMS file is produced by the above executable - input_file = f"IMSscf.{to_YMD(localconf.current_cycle)}.{localconf.CASE}.mx{localconf.OCNRES}_oro_data.nc" + input_file = f"IMSscf.{to_YMD(localconf.current_cycle)}.{localconf.CASE}_oro_data.nc" if not os.path.isfile(f"{os.path.join(localconf.DATA, input_file)}"): logger.exception(f"{self.task_config.CALCFIMSEXE} failed to produce {input_file}") raise FileNotFoundError(f"{os.path.join(localconf.DATA, input_file)}") @@ -232,7 +232,7 @@ def prepare_IMS(self) -> None: @logit(logger) def initialize(self) -> None: - """Initialize method for Land analysis + """Initialize method for snow analysis This method: - creates artifacts in the DATA directory by copying fix files - creates the JEDI LETKF yaml from the template @@ -241,7 +241,7 @@ def initialize(self) -> None: Parameters ---------- self : Analysis - Instance of the LandAnalysis object + Instance of the SnowAnalysis object """ super().initialize() @@ -249,30 +249,27 @@ def initialize(self) -> None: # create a temporary dict of all keys needed in this method localconf = AttrDict() keys = ['DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV', - 'OPREFIX', 'CASE', 'ntiles'] + 'OPREFIX', 'CASE', 'OCNRES', 'ntiles'] for key in keys: localconf[key] = self.task_config[key] # Make member directories in DATA for background dirlist = [] - for imem in range(1, LandAnalysis.NMEM_LANDENS + 1): + for imem in range(1, SnowAnalysis.NMEM_SNOWENS + 1): dirlist.append(os.path.join(localconf.DATA, 'bkg', f'mem{imem:03d}')) FileHandler({'mkdir': dirlist}).sync() # stage fix files - jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'land_jedi_fix.yaml') - logger.info(f"Staging JEDI fix files from {jedi_fix_list_path}") - jedi_fix_list = parse_yamltmpl(jedi_fix_list_path, self.task_config) + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) FileHandler(jedi_fix_list).sync() # stage backgrounds logger.info("Staging ensemble backgrounds") FileHandler(self.get_ens_bkg_dict(localconf)).sync() - # generate letkfoi YAML file - logger.info(f"Generate JEDI LETKF YAML file: {self.task_config.jedi_yaml}") - letkfoi_yaml = parse_j2yaml(self.task_config.JEDIYAML, self.task_config) - save_as_yaml(letkfoi_yaml, self.task_config.jedi_yaml) + # Write out letkfoi YAML file + save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) logger.info(f"Wrote letkfoi YAML to: {self.task_config.jedi_yaml}") # need output dir for diags and anl @@ -294,15 +291,15 @@ def execute(self) -> None: Parameters ---------- self : Analysis - Instance of the LandAnalysis object + Instance of the SnowAnalysis object """ # create a temporary dict of all keys needed in this method localconf = AttrDict() keys = ['HOMEgfs', 'DATA', 'current_cycle', - 'COM_ATMOS_RESTART_PREV', 'COM_LAND_ANALYSIS', 'APREFIX', - 'SNOWDEPTHVAR', 'BESTDDEV', 'CASE', 'ntiles', - 'APRUN_LANDANL', 'JEDIEXE', 'jedi_yaml', + 'COM_ATMOS_RESTART_PREV', 'COM_SNOW_ANALYSIS', 'APREFIX', + 'SNOWDEPTHVAR', 'BESTDDEV', 'CASE', 'OCNRES', 'ntiles', + 'APRUN_SNOWANL', 'JEDIEXE', 'jedi_yaml', 'DOIAU', 'SNOW_WINDOW_BEGIN', 'APPLY_INCR_NML_TMPL', 'APPLY_INCR_EXE', 'APRUN_APPLY_INCR'] for key in keys: localconf[key] = self.task_config[key] @@ -313,17 +310,27 @@ def execute(self) -> None: AttrDict({key: localconf[key] for key in ['DATA', 'ntiles', 'current_cycle']})) logger.info("Running JEDI LETKF") - self.execute_jediexe(localconf.DATA, - localconf.APRUN_LANDANL, - os.path.basename(localconf.JEDIEXE), - localconf.jedi_yaml) + exec_cmd = Executable(localconf.APRUN_SNOWANL) + exec_name = os.path.join(localconf.DATA, 'gdas.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('fv3jedi') + exec_cmd.add_default_arg('localensembleda') + exec_cmd.add_default_arg(localconf.jedi_yaml) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") logger.info("Creating analysis from backgrounds and increments") self.add_increments(localconf) @logit(logger) def finalize(self) -> None: - """Performs closing actions of the Land analysis task + """Performs closing actions of the Snow analysis task This method: - tar and gzip the output diag files and place in COM/ - copy the generated YAML file from initialize to the COM/ @@ -333,11 +340,11 @@ def finalize(self) -> None: Parameters ---------- self : Analysis - Instance of the LandAnalysis object + Instance of the SnowAnalysis object """ logger.info("Create diagnostic tarball of diag*.nc4 files") - statfile = os.path.join(self.task_config.COM_LAND_ANALYSIS, f"{self.task_config.APREFIX}landstat.tgz") + statfile = os.path.join(self.task_config.COM_SNOW_ANALYSIS, f"{self.task_config.APREFIX}snowstat.tgz") self.tgz_diags(statfile, self.task_config.DATA) logger.info("Copy full YAML to COM") @@ -350,22 +357,28 @@ def finalize(self) -> None: FileHandler(yaml_copy).sync() logger.info("Copy analysis to COM") - template = f'{to_fv3time(self.task_config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' + bkgtimes = [] + if self.task_config.DOIAU: + # need both beginning and middle of window + bkgtimes.append(self.task_config.SNOW_WINDOW_BEGIN) + bkgtimes.append(self.task_config.current_cycle) anllist = [] - for itile in range(1, self.task_config.ntiles + 1): - filename = template.format(tilenum=itile) - src = os.path.join(self.task_config.DATA, 'anl', filename) - dest = os.path.join(self.task_config.COM_LAND_ANALYSIS, filename) - anllist.append([src, dest]) + for bkgtime in bkgtimes: + template = f'{to_fv3time(bkgtime)}.sfc_data.tile{{tilenum}}.nc' + for itile in range(1, self.task_config.ntiles + 1): + filename = template.format(tilenum=itile) + src = os.path.join(self.task_config.DATA, 'anl', filename) + dest = os.path.join(self.task_config.COM_SNOW_ANALYSIS, filename) + anllist.append([src, dest]) FileHandler({'copy': anllist}).sync() logger.info('Copy increments to COM') - template = f'landinc.{to_fv3time(self.task_config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' + template = f'snowinc.{to_fv3time(self.task_config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' inclist = [] for itile in range(1, self.task_config.ntiles + 1): filename = template.format(tilenum=itile) src = os.path.join(self.task_config.DATA, 'anl', filename) - dest = os.path.join(self.task_config.COM_LAND_ANALYSIS, filename) + dest = os.path.join(self.task_config.COM_SNOW_ANALYSIS, filename) inclist.append([src, dest]) FileHandler({'copy': inclist}).sync() @@ -375,7 +388,7 @@ def get_bkg_dict(config: Dict) -> Dict[str, List[str]]: """Compile a dictionary of model background files to copy This method constructs a dictionary of FV3 RESTART files (coupler, sfc_data) - that are needed for global land DA and returns said dictionary for use by the FileHandler class. + that are needed for global snow DA and returns said dictionary for use by the FileHandler class. Parameters ---------- @@ -401,11 +414,11 @@ def get_bkg_dict(config: Dict) -> Dict[str, List[str]]: # Start accumulating list of background files to copy bkglist = [] - # land DA needs coupler + # snow DA needs coupler basename = f'{to_fv3time(config.current_cycle)}.coupler.res' bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - # land DA only needs sfc_data + # snow DA only needs sfc_data for ftype in ['sfc_data']: template = f'{to_fv3time(config.current_cycle)}.{ftype}.tile{{tilenum}}.nc' for itile in range(1, config.ntiles + 1): @@ -447,17 +460,17 @@ def get_ens_bkg_dict(config: Dict) -> Dict: # get FV3 sfc_data RESTART files; Note an ensemble is being created rst_dir = os.path.join(config.COM_ATMOS_RESTART_PREV) - for imem in range(1, LandAnalysis.NMEM_LANDENS + 1): + for imem in range(1, SnowAnalysis.NMEM_SNOWENS + 1): memchar = f"mem{imem:03d}" run_dir = os.path.join(config.DATA, 'bkg', memchar, 'RESTART') dirlist.append(run_dir) - # Land DA needs coupler + # Snow DA needs coupler basename = f'{to_fv3time(config.current_cycle)}.coupler.res' bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - # Land DA only needs sfc_data + # Snow DA only needs sfc_data for ftype in ['sfc_data']: template = f'{to_fv3time(config.current_cycle)}.{ftype}.tile{{tilenum}}.nc' for itile in range(1, config.ntiles + 1): @@ -491,7 +504,7 @@ def create_ensemble(vname: str, bestddev: float, config: Dict) -> None: """ # 2 ens members - offset = bestddev / np.sqrt(LandAnalysis.NMEM_LANDENS) + offset = bestddev / np.sqrt(SnowAnalysis.NMEM_SNOWENS) logger.info(f"Creating ensemble for LETKFOI by offsetting with {offset}") @@ -530,10 +543,13 @@ def add_increments(config: Dict) -> None: DATA current_cycle CASE + OCNRES ntiles APPLY_INCR_NML_TMPL APPLY_INCR_EXE APRUN_APPLY_INCR + DOIAU + SNOW_WINDOW_BEGIN Raises ------ @@ -545,38 +561,67 @@ def add_increments(config: Dict) -> None: # need backgrounds to create analysis from increments after LETKF logger.info("Copy backgrounds into anl/ directory for creating analysis from increments") - template = f'{to_fv3time(config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' + bkgtimes = [] + if config.DOIAU: + # want analysis at beginning and middle of window + bkgtimes.append(config.SNOW_WINDOW_BEGIN) + bkgtimes.append(config.current_cycle) anllist = [] - for itile in range(1, config.ntiles + 1): - filename = template.format(tilenum=itile) - src = os.path.join(config.COM_ATMOS_RESTART_PREV, filename) - dest = os.path.join(config.DATA, "anl", filename) - anllist.append([src, dest]) + for bkgtime in bkgtimes: + template = f'{to_fv3time(bkgtime)}.sfc_data.tile{{tilenum}}.nc' + for itile in range(1, config.ntiles + 1): + filename = template.format(tilenum=itile) + src = os.path.join(config.COM_ATMOS_RESTART_PREV, filename) + dest = os.path.join(config.DATA, "anl", filename) + anllist.append([src, dest]) FileHandler({'copy': anllist}).sync() - logger.info("Create namelist for APPLY_INCR_EXE") - nml_template = config.APPLY_INCR_NML_TMPL - nml_data = Jinja(nml_template, config).render - logger.debug(f"apply_incr_nml:\n{nml_data}") - - nml_file = os.path.join(config.DATA, "apply_incr_nml") - with open(nml_file, "w") as fho: - fho.write(nml_data) - - logger.info("Link APPLY_INCR_EXE into DATA/") - exe_src = config.APPLY_INCR_EXE - exe_dest = os.path.join(config.DATA, os.path.basename(exe_src)) - if os.path.exists(exe_dest): - rm_p(exe_dest) - os.symlink(exe_src, exe_dest) - - # execute APPLY_INCR_EXE to create analysis files - exe = Executable(config.APRUN_APPLY_INCR) - exe.add_default_arg(os.path.join(config.DATA, os.path.basename(exe_src))) - logger.info(f"Executing {exe}") - try: - exe() - except OSError: - raise OSError(f"Failed to execute {exe}") - except Exception: - raise WorkflowException(f"An error occured during execution of {exe}") + if config.DOIAU: + logger.info("Copying increments to beginning of window") + template_in = f'snowinc.{to_fv3time(config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' + template_out = f'snowinc.{to_fv3time(config.SNOW_WINDOW_BEGIN)}.sfc_data.tile{{tilenum}}.nc' + inclist = [] + for itile in range(1, config.ntiles + 1): + filename_in = template_in.format(tilenum=itile) + filename_out = template_out.format(tilenum=itile) + src = os.path.join(config.DATA, 'anl', filename_in) + dest = os.path.join(config.DATA, 'anl', filename_out) + inclist.append([src, dest]) + FileHandler({'copy': inclist}).sync() + + # loop over times to apply increments + for bkgtime in bkgtimes: + logger.info("Processing analysis valid: {bkgtime}") + logger.info("Create namelist for APPLY_INCR_EXE") + nml_template = config.APPLY_INCR_NML_TMPL + nml_config = { + 'current_cycle': bkgtime, + 'CASE': config.CASE, + 'DATA': config.DATA, + 'HOMEgfs': config.HOMEgfs, + 'OCNRES': config.OCNRES, + } + nml_data = Jinja(nml_template, nml_config).render + logger.debug(f"apply_incr_nml:\n{nml_data}") + + nml_file = os.path.join(config.DATA, "apply_incr_nml") + with open(nml_file, "w") as fho: + fho.write(nml_data) + + logger.info("Link APPLY_INCR_EXE into DATA/") + exe_src = config.APPLY_INCR_EXE + exe_dest = os.path.join(config.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + + # execute APPLY_INCR_EXE to create analysis files + exe = Executable(config.APRUN_APPLY_INCR) + exe.add_default_arg(os.path.join(config.DATA, os.path.basename(exe_src))) + logger.info(f"Executing {exe}") + try: + exe() + except OSError: + raise OSError(f"Failed to execute {exe}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exe}") diff --git a/ush/python/pygfs/task/upp.py b/ush/python/pygfs/task/upp.py index 7db50e1582..7e42e07c64 100644 --- a/ush/python/pygfs/task/upp.py +++ b/ush/python/pygfs/task/upp.py @@ -46,26 +46,27 @@ def __init__(self, config: Dict[str, Any]) -> None: """ super().__init__(config) - if self.config.UPP_RUN not in self.VALID_UPP_RUN: - raise NotImplementedError(f'{self.config.UPP_RUN} is not a valid UPP run type.\n' + + if self.task_config.UPP_RUN not in self.VALID_UPP_RUN: + raise NotImplementedError(f'{self.task_config.UPP_RUN} is not a valid UPP run type.\n' + 'Valid UPP_RUN values are:\n' + f'{", ".join(self.VALID_UPP_RUN)}') - valid_datetime = add_to_datetime(self.runtime_config.current_cycle, to_timedelta(f"{self.config.FORECAST_HOUR}H")) + valid_datetime = add_to_datetime(self.task_config.current_cycle, to_timedelta(f"{self.task_config.FORECAST_HOUR}H")) + # Extend task_config with localdict localdict = AttrDict( - {'upp_run': self.config.UPP_RUN, - 'forecast_hour': self.config.FORECAST_HOUR, + {'upp_run': self.task_config.UPP_RUN, + 'forecast_hour': self.task_config.FORECAST_HOUR, 'valid_datetime': valid_datetime, 'atmos_filename': f"atm_{valid_datetime.strftime('%Y%m%d%H%M%S')}.nc", 'flux_filename': f"sfc_{valid_datetime.strftime('%Y%m%d%H%M%S')}.nc" } ) - self.task_config = AttrDict(**self.config, **self.runtime_config, **localdict) + self.task_config = AttrDict(**self.task_config, **localdict) # Read the upp.yaml file for common configuration - logger.info(f"Read the UPP configuration yaml file {self.config.UPP_CONFIG}") - self.task_config.upp_yaml = parse_j2yaml(self.config.UPP_CONFIG, self.task_config) + logger.info(f"Read the UPP configuration yaml file {self.task_config.UPP_CONFIG}") + self.task_config.upp_yaml = parse_j2yaml(self.task_config.UPP_CONFIG, self.task_config) logger.debug(f"upp_yaml:\n{pformat(self.task_config.upp_yaml)}") @staticmethod diff --git a/ush/python/pygfs/utils/__init__.py b/ush/python/pygfs/utils/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/ush/python/pygfs/utils/marine_da_utils.py b/ush/python/pygfs/utils/marine_da_utils.py new file mode 100644 index 0000000000..016551878b --- /dev/null +++ b/ush/python/pygfs/utils/marine_da_utils.py @@ -0,0 +1,99 @@ +import f90nml +import os +from logging import getLogger +import xarray as xr + +from wxflow import (FileHandler, + logit, + WorkflowException, + AttrDict, + parse_j2yaml, + Executable, + jinja) + +logger = getLogger(__name__.split('.')[-1]) + + +@logit(logger) +def run(exec_cmd: Executable) -> None: + """Run the executable command + """ + logger.info(f"Executing {exec_cmd}") + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + + +@logit(logger) +def link_executable(task_config: AttrDict, exe_name: str) -> None: + """Link the executable to the DATA directory + """ + logger.info(f"Link executable {exe_name}") + logger.warn("WARNING: Linking is not permitted per EE2.") + exe_src = os.path.join(task_config.EXECgfs, exe_name) + exe_dest = os.path.join(task_config.DATA, exe_name) + if os.path.exists(exe_dest): + os.remove(exe_dest) + os.symlink(exe_src, exe_dest) + + +@logit(logger) +def prep_input_nml(task_config: AttrDict) -> None: + """Prepare the input.nml file + TODO: Use jinja2 instead of f90nml + """ + # stage input.nml + mom_input_nml_tmpl_src = os.path.join(task_config.HOMEgdas, 'parm', 'soca', 'fms', 'input.nml') + mom_input_nml_tmpl = os.path.join(task_config.DATA, 'mom_input.nml.tmpl') + FileHandler({'copy': [[mom_input_nml_tmpl_src, mom_input_nml_tmpl]]}).sync() + + # swap date and stacksize + domain_stack_size = task_config.DOMAIN_STACK_SIZE + ymdhms = [int(s) for s in task_config.MARINE_WINDOW_END.strftime('%Y,%m,%d,%H,%M,%S').split(',')] + with open(mom_input_nml_tmpl, 'r') as nml_file: + nml = f90nml.read(nml_file) + nml['ocean_solo_nml']['date_init'] = ymdhms + nml['fms_nml']['domains_stack_size'] = int(domain_stack_size) + nml.write('mom_input.nml') + + +@logit(logger) +def cice_hist2fms(input_filename: str, output_filename: str) -> None: + """ Reformat the CICE history file so it can be read by SOCA/FMS + Simple reformatting utility to allow soca/fms to read the CICE history files + """ + + # open the CICE history file + ds = xr.open_dataset(input_filename) + + if 'aicen' in ds.variables and 'hicen' in ds.variables and 'hsnon' in ds.variables: + logger.info(f"*** Already reformatted, skipping.") + return + + # rename the dimensions to xaxis_1 and yaxis_1 + ds = ds.rename({'ni': 'xaxis_1', 'nj': 'yaxis_1'}) + + # rename the variables + ds = ds.rename({'aice_h': 'aicen', 'hi_h': 'hicen', 'hs_h': 'hsnon'}) + + # Save the new netCDF file + ds.to_netcdf(output_filename, mode='w') + + +@logit(logger) +def stage_ens_mem(task_config: AttrDict) -> None: + """ Copy the ensemble members to the DATA directory + Copy the ensemble members to the DATA directory and reformat the CICE history files + """ + # Copy the ensemble members to the DATA directory + logger.info("---------------- Stage ensemble members") + ensbkgconf = AttrDict(task_config) + ensbkgconf.RUN = task_config.GDUMP_ENS + logger.debug(f"{jinja.Jinja(task_config.MARINE_ENSDA_STAGE_BKG_YAML_TMPL, ensbkgconf).render}") + letkf_stage_list = parse_j2yaml(task_config.MARINE_ENSDA_STAGE_BKG_YAML_TMPL, ensbkgconf) + logger.info(f"{letkf_stage_list}") + FileHandler(letkf_stage_list).sync() diff --git a/ush/radmon_err_rpt.sh b/ush/radmon_err_rpt.sh index 6ae6505624..c3d251d5cd 100755 --- a/ush/radmon_err_rpt.sh +++ b/ush/radmon_err_rpt.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/preamble.sh" +source "${USHgfs}/preamble.sh" ################################################################################ #### UNIX Script Documentation Block @@ -55,9 +55,6 @@ cycle2=${5:-${cycle2:?}} diag_rpt=${6:-${diag_rpt:?}} outfile=${7:-${outfile:?}} -# Directories -HOMEradmon=${HOMEradmon:-$(pwd)} - # Other variables err=0 RADMON_SUFFIX=${RADMON_SUFFIX} diff --git a/ush/radmon_verf_angle.sh b/ush/radmon_verf_angle.sh index f68d7c88cc..3dff2a6f98 100755 --- a/ush/radmon_verf_angle.sh +++ b/ush/radmon_verf_angle.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/preamble.sh" +source "${USHgfs}/preamble.sh" ################################################################################ #### UNIX Script Documentation Block @@ -29,8 +29,6 @@ source "${HOMEgfs}/ush/preamble.sh" # Imported Shell Variables: # RADMON_SUFFIX data source suffix # defauls to opr -# EXECgfs executable directory -# PARMmonitor parm directory # RAD_AREA global or regional flag # defaults to global # TANKverf_rad data repository @@ -83,7 +81,6 @@ which prep_step which startmsg # File names -export pgmout=${pgmout:-${jlogfile}} touch "${pgmout}" # Other variables @@ -101,7 +98,7 @@ fi err=0 angle_exec=radmon_angle.x -shared_scaninfo="${shared_scaninfo:-${PARMmonitor}/gdas_radmon_scaninfo.txt}" +shared_scaninfo="${shared_scaninfo:-${PARMgfs}/monitor/gdas_radmon_scaninfo.txt}" scaninfo=scaninfo.txt #-------------------------------------------------------------------- diff --git a/ush/radmon_verf_bcoef.sh b/ush/radmon_verf_bcoef.sh index ab1058711e..4274436154 100755 --- a/ush/radmon_verf_bcoef.sh +++ b/ush/radmon_verf_bcoef.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/preamble.sh" +source "${USHgfs}/preamble.sh" ################################################################################ #### UNIX Script Documentation Block @@ -69,7 +69,6 @@ fi echo " RADMON_NETCDF, netcdf_boolean = ${RADMON_NETCDF}, ${netcdf_boolean}" # File names -pgmout=${pgmout:-${jlogfile}} touch "${pgmout}" # Other variables diff --git a/ush/radmon_verf_bcor.sh b/ush/radmon_verf_bcor.sh index f1f97c247e..ea0a7842e6 100755 --- a/ush/radmon_verf_bcor.sh +++ b/ush/radmon_verf_bcor.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/preamble.sh" +source "${USHgfs}/preamble.sh" ################################################################################ #### UNIX Script Documentation Block @@ -65,7 +65,6 @@ source "${HOMEgfs}/ush/preamble.sh" #################################################################### # File names -pgmout=${pgmout:-${jlogfile}} touch "${pgmout}" # Other variables diff --git a/ush/radmon_verf_time.sh b/ush/radmon_verf_time.sh index 7f98407ec5..0e935826dd 100755 --- a/ush/radmon_verf_time.sh +++ b/ush/radmon_verf_time.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/preamble.sh" +source "${USHgfs}/preamble.sh" ################################################################################ #### UNIX Script Documentation Block @@ -33,8 +33,6 @@ source "${HOMEgfs}/ush/preamble.sh" # defaults to 1 (on) # RADMON_SUFFIX data source suffix # defauls to opr -# EXECgfs executable directory -# PARMmonitor parm data directory # RAD_AREA global or regional flag # defaults to global # TANKverf_rad data repository @@ -75,11 +73,9 @@ source "${HOMEgfs}/ush/preamble.sh" #################################################################### # File names -#pgmout=${pgmout:-${jlogfile}} -#touch $pgmout radmon_err_rpt=${radmon_err_rpt:-${USHgfs}/radmon_err_rpt.sh} -base_file=${base_file:-${PARMmonitor}/gdas_radmon_base.tar} +base_file=${base_file:-${PARMgfs}/monitor/gdas_radmon_base.tar} report=report.txt disclaimer=disclaimer.txt diff --git a/ush/rstprod.sh b/ush/rstprod.sh index acac0340bb..b48a6817e0 100755 --- a/ush/rstprod.sh +++ b/ush/rstprod.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "$HOMEgfs/ush/preamble.sh" +source "${USHgfs}/preamble.sh" #--------------------------------------------------------- # rstprod.sh diff --git a/ush/run_mpmd.sh b/ush/run_mpmd.sh index 24cb3f2656..e3fc2b7512 100755 --- a/ush/run_mpmd.sh +++ b/ush/run_mpmd.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -source "${HOMEgfs}/ush/preamble.sh" +source "${USHgfs}/preamble.sh" cmdfile=${1:?"run_mpmd requires an input file containing commands to execute in MPMD mode"} diff --git a/ush/syndat_getjtbul.sh b/ush/syndat_getjtbul.sh index c17067ff72..6596c6ef96 100755 --- a/ush/syndat_getjtbul.sh +++ b/ush/syndat_getjtbul.sh @@ -18,17 +18,10 @@ # Imported variables that must be passed in: # DATA - path to working directory # pgmout - string indicating path to for standard output file -# EXECSYND - path to syndat executable directory # TANK_TROPCY - path to home directory containing tropical cyclone record # data base -# Imported variables that can be passed in: -# jlogfile - path to job log file (skipped over by this script if not -# passed in) - -source "$HOMEgfs/ush/preamble.sh" - -EXECSYND=${EXECSYND:-${HOMESYND}/exec} +source "${USHgfs}/preamble.sh" cd $DATA @@ -52,8 +45,6 @@ hour=$(echo $CDATE10 | cut -c9-10) echo $PDYm1 pdym1=$PDYm1 -#pdym1=$(sh $utilscript/finddate.sh $pdy d-1) - echo " " >> $pgmout echo "Entering sub-shell syndat_getjtbul.sh to recover JTWC Bulletins" \ >> $pgmout @@ -123,7 +114,7 @@ fi [ -s jtwcbul ] && echo "Processing JTWC bulletin halfs into tcvitals records" >> $pgmout -pgm=$(basename $EXECSYND/syndat_getjtbul.x) +pgm=$(basename ${EXECgfs}/syndat_getjtbul.x) export pgm if [ -s prep_step ]; then set +u @@ -138,7 +129,7 @@ rm -f fnoc export FORT11=jtwcbul export FORT51=fnoc -time -p ${EXECSYND}/${pgm} >> $pgmout 2> errfile +time -p ${EXECgfs}/${pgm} >> $pgmout 2> errfile errget=$? ###cat errfile cat errfile >> $pgmout diff --git a/ush/syndat_qctropcy.sh b/ush/syndat_qctropcy.sh index cda9030577..8ec8f70b14 100755 --- a/ush/syndat_qctropcy.sh +++ b/ush/syndat_qctropcy.sh @@ -44,10 +44,6 @@ # COMSP - path to both output jtwc-fnoc file and output tcvitals file (this # tcvitals file is read by subsequent relocation processing and/or # subsequent program SYNDAT_SYNDATA) -# PARMSYND - path to syndat parm field directory -# EXECSYND - path to syndat executable directory -# FIXam - path to syndat fix field directory -# USHSYND - path to syndat ush directory # Imported variables that can be passed in: # ARCHSYND - path to syndat archive directory @@ -59,7 +55,7 @@ # data base # (Default: /dcom/us007003) # slmask - path to t126 32-bit gaussian land/sea mask file -# (Default: $FIXam/syndat_slmask.t126.gaussian) +# (Default: ${FIXgfs}/am/syndat_slmask.t126.gaussian) # copy_back - switch to copy updated files back to archive directory and # to tcvitals directory # (Default: YES) @@ -67,19 +63,13 @@ # (Default: not set) # TIMEIT - optional time and resource reporting (Default: not set) -source "$HOMEgfs/ush/preamble.sh" +source "${USHgfs}/preamble.sh" ARCHSYND=${ARCHSYND:-$COMROOTp3/gfs/prod/syndat} -HOMENHCp1=${HOMENHCp1:-/gpfs/?p1/nhc/save/guidance/storm-data/ncep} HOMENHC=${HOMENHC:-/gpfs/dell2/nhc/save/guidance/storm-data/ncep} TANK_TROPCY=${TANK_TROPCY:-${DCOMROOT}/us007003} -FIXam=${FIXam:-$HOMEgfs/fix/am} -USHSYND=${USHSYND:-$HOMEgfs/ush} -EXECSYND=${EXECSYND:-$HOMEgfs/exec} -PARMSYND=${PARMSYND:-$HOMEgfs/parm/relo} - -slmask=${slmask:-$FIXam/syndat_slmask.t126.gaussian} +slmask=${slmask:-${FIXgfs}/am/syndat_slmask.t126.gaussian} copy_back=${copy_back:-YES} files_override=${files_override:-""} @@ -188,12 +178,12 @@ if [ -n "$files_override" ]; then # for testing, typically want FILES=F fi echo " &INPUT RUNID = '${net}_${tmmark}_${cyc}', FILES = $files " > vitchk.inp -cat $PARMSYND/syndat_qctropcy.${RUN}.parm >> vitchk.inp +cat ${PARMgfs}/relo/syndat_qctropcy.${RUN}.parm >> vitchk.inp -# Copy the fixed fields from FIXam +# Copy the fixed fields -cp $FIXam/syndat_fildef.vit fildef.vit -cp $FIXam/syndat_stmnames stmnames +cp ${FIXgfs}/am/syndat_fildef.vit fildef.vit +cp ${FIXgfs}/am/syndat_stmnames stmnames rm -f nhc fnoc lthistry @@ -205,12 +195,9 @@ rm -f nhc fnoc lthistry # All are input to program syndat_qctropcy # ------------------------------------------------------------------ -if [ -s $HOMENHC/tcvitals ]; then - echo "tcvitals found" >> $pgmout - cp $HOMENHC/tcvitals nhc -elif [ -s $HOMENHCp1/tcvitals ]; then +if [ -s ${HOMENHC}/tcvitals ]; then echo "tcvitals found" >> $pgmout - cp $HOMENHCp1/tcvitals nhc + cp ${HOMENHC}/tcvitals nhc else echo "WARNING: tcvitals not found, create empty tcvitals" >> $pgmout > nhc @@ -221,17 +208,17 @@ touch nhc [ "$copy_back" = 'YES' ] && cat nhc >> $ARCHSYND/syndat_tcvitals.$year mv -f nhc nhc1 -$USHSYND/parse-storm-type.pl nhc1 > nhc +${USHgfs}/parse-storm-type.pl nhc1 > nhc cp -p nhc nhc.ORIG # JTWC/FNOC ... execute syndat_getjtbul script to write into working directory # as fnoc; copy to archive -$USHSYND/syndat_getjtbul.sh $CDATE10 +${USHgfs}/syndat_getjtbul.sh $CDATE10 touch fnoc [ "$copy_back" = 'YES' ] && cat fnoc >> $ARCHSYND/syndat_tcvitals.$year mv -f fnoc fnoc1 -$USHSYND/parse-storm-type.pl fnoc1 > fnoc +${USHgfs}/parse-storm-type.pl fnoc1 > fnoc if [ $SENDDBN = YES ]; then $DBNROOT/bin/dbn_alert MODEL SYNDAT_TCVITALS $job $ARCHSYND/syndat_tcvitals.$year @@ -245,7 +232,7 @@ cp $slmask slmask.126 # Execute program syndat_qctropcy -pgm=$(basename $EXECSYND/syndat_qctropcy.x) +pgm=$(basename ${EXECgfs}/syndat_qctropcy.x) export pgm if [ -s prep_step ]; then set +u @@ -259,7 +246,7 @@ fi echo "$CDATE10" > cdate10.dat export FORT11=slmask.126 export FORT12=cdate10.dat -${EXECSYND}/${pgm} >> $pgmout 2> errfile +${EXECgfs}/${pgm} >> $pgmout 2> errfile errqct=$? ###cat errfile cat errfile >> $pgmout @@ -323,28 +310,25 @@ diff nhc nhc.ORIG > /dev/null errdiff=$? ################################### -# Update NHC file in $HOMENHC +# Update NHC file in ${HOMENHC} ################################### if test "$errdiff" -ne '0' then if [ "$copy_back" = 'YES' -a ${envir} = 'prod' ]; then - if [ -s $HOMENHC/tcvitals ]; then - cp nhc $HOMENHC/tcvitals - fi - if [ -s $HOMENHCp1/tcvitals ]; then - cp nhc $HOMENHCp1/tcvitals + if [ -s ${HOMENHC}/tcvitals ]; then + cp nhc ${HOMENHC}/tcvitals fi err=$? if [ "$err" -ne '0' ]; then msg="###ERROR: Previous NHC Synthetic Data Record File \ -$HOMENHC/tcvitals not updated by syndat_qctropcy" +${HOMENHC}/tcvitals not updated by syndat_qctropcy" else msg="Previous NHC Synthetic Data Record File \ -$HOMENHC/tcvitals successfully updated by syndat_qctropcy" +${HOMENHC}/tcvitals successfully updated by syndat_qctropcy" fi set +x @@ -357,7 +341,7 @@ $HOMENHC/tcvitals successfully updated by syndat_qctropcy" else - msg="Previous NHC Synthetic Data Record File $HOMENHC/tcvitals \ + msg="Previous NHC Synthetic Data Record File ${HOMENHC}/tcvitals \ not changed by syndat_qctropcy" set +x echo diff --git a/ush/tropcy_relocate.sh b/ush/tropcy_relocate.sh index 01a21bd12c..11c0afb990 100755 --- a/ush/tropcy_relocate.sh +++ b/ush/tropcy_relocate.sh @@ -84,20 +84,13 @@ # envir String indicating environment under which job runs ('prod' # or 'test') # Default is "prod" -# HOMEALL String indicating parent directory path for some or -# all files under which job runs. -# If the imported variable MACHINE!=sgi, then the default is -# "/nw${envir}"; otherwise the default is -# "/disk1/users/snake/prepobs" -# HOMERELO String indicating parent directory path for relocation -# specific files. (May be under HOMEALL) # envir_getges String indicating environment under which GETGES utility -# ush runs (see documentation in $USHGETGES/getges.sh for +# ush runs (see documentation in ${USHgfs}/getges.sh for # more information) # Default is "$envir" # network_getges # String indicating job network under which GETGES utility -# ush runs (see documentation in $USHGETGES/getges.sh for +# ush runs (see documentation in ${USHgfs}/getges.sh for # more information) # Default is "global" unless the center relocation processing # date/time is not a multiple of 3-hrs, then the default is @@ -122,34 +115,20 @@ # POE_OPTS String indicating options to use with poe command # Default is "-pgmmodel mpmd -ilevel 2 -labelio yes \ # -stdoutmode ordered" -# USHGETGES String indicating directory path for GETGES utility ush -# file -# USHRELO String indicating directory path for RELOCATE ush files -# Default is "${HOMERELO}/ush" -# EXECRELO String indicating directory path for RELOCATE executables -# Default is "${HOMERELO}/exec" -# FIXRELO String indicating directory path for RELOCATE data fix- -# field files -# Default is "${HOMERELO}/fix" -# EXECUTIL String indicating directory path for utility program -# executables -# If the imported variable MACHINE!=sgi, then the default is -# "/nwprod/util/exec"; otherwise the default is -# "${HOMEALL}/util/exec" # RELOX String indicating executable path for RELOCATE_MV_NVORTEX # program -# Default is "$EXECRELO/relocate_mv_nvortex" +# Default is "${EXECgfs}/relocate_mv_nvortex" # SUPVX String indicating executable path for SUPVIT utility # program -# Default is "$EXECUTIL/supvit.x" +# Default is "${EXECgfs}/supvit.x" # GETTX String indicating executable path for GETTRK utility # program -# Default is "$EXECUTIL/gettrk" +# Default is "${EXECgfs}/gettrk" # BKGFREQ Frequency of background files for relocation # Default is "3" # SENDDBN String when set to "YES" alerts output files to $COMSP # NDATE String indicating executable path for NDATE utility program -# Default is "$EXECUTIL/ndate" +# Default is "${EXECgfs}/ndate" # # These do not have to be exported to this script. If they are, they will # be used by the script. If they are not, they will be skipped @@ -166,18 +145,18 @@ # # Modules and files referenced: # Herefile: RELOCATE_GES -# $USHRELO/tropcy_relocate_extrkr.sh -# $USHGETGES/getges.sh +# ${USHgfs}/tropcy_relocate_extrkr.sh +# ${USHgfs}/getges.sh # $NDATE (here and in child script -# $USHRELO/tropcy_relocate_extrkr.sh) +# ${USHgfs}/tropcy_relocate_extrkr.sh) # /usr/bin/poe # postmsg # $DATA/prep_step (here and in child script -# $USHRELO/tropcy_relocate_extrkr.sh) +# ${USHgfs}/tropcy_relocate_extrkr.sh) # $DATA/err_exit (here and in child script -# $USHRELO/tropcy_relocate_extrkr.sh) +# ${USHgfs}/tropcy_relocate_extrkr.sh) # $DATA/err_chk (here and in child script -# $USHRELO/tropcy_relocate_extrkr.sh) +# ${USHgfs}/tropcy_relocate_extrkr.sh) # NOTE: The last three scripts above are NOT REQUIRED utilities. # If $DATA/prep_step not found, a scaled down version of it is # executed in-line. If $DATA/err_exit or $DATA/err_chk are not @@ -188,7 +167,7 @@ # programs : # RELOCATE_MV_NVORTEX - executable $RELOX # T126 GRIB global land/sea mask: -# $FIXRELO/global_slmask.t126.grb +# ${FIXgfs}/am/global_slmask.t126.grb # SUPVIT - executable $SUPVX # GETTRK - executable $GETTX # @@ -204,7 +183,7 @@ # #### -source "$HOMEgfs/ush/preamble.sh" +source "${USHgfs}/preamble.sh" MACHINE=${MACHINE:-$(hostname -s | cut -c 1-3)} @@ -275,14 +254,6 @@ set_trace envir=${envir:-prod} -if [ $MACHINE != sgi ]; then - HOMEALL=${HOMEALL:-$OPSROOT} -else - HOMEALL=${HOMEALL:-/disk1/users/snake/prepobs} -fi - -HOMERELO=${HOMERELO:-${shared_global_home}} - envir_getges=${envir_getges:-$envir} if [ $modhr -eq 0 ]; then network_getges=${network_getges:-global} @@ -295,21 +266,12 @@ pgmout=${pgmout:-/dev/null} tstsp=${tstsp:-/tmp/null/} tmmark=${tmmark:-tm00} -USHRELO=${USHRELO:-${HOMERELO}/ush} -##USHGETGES=${USHGETGES:-/nwprod/util/ush} -##USHGETGES=${USHGETGES:-${HOMERELO}/ush} -USHGETGES=${USHGETGES:-${USHRELO}} - -EXECRELO=${EXECRELO:-${HOMERELO}/exec} - -FIXRELO=${FIXRELO:-${HOMERELO}/fix} - -RELOX=${RELOX:-$EXECRELO/relocate_mv_nvortex} +RELOX=${RELOX:-${EXECgfs}/relocate_mv_nvortex} export BKGFREQ=${BKGFREQ:-1} -SUPVX=${SUPVX:-$EXECRELO/supvit.x} -GETTX=${GETTX:-$EXECRELO/gettrk} +SUPVX=${SUPVX:-${EXECgfs}/supvit.x} +GETTX=${GETTX:-${EXECgfs}/gettrk} ################################################ # EXECUTE TROPICAL CYCLONE RELOCATION PROCESSING @@ -355,7 +317,7 @@ echo " relocation processing date/time" echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" echo set_trace - $USHGETGES/getges.sh -e $envir_getges -n $network_getges \ + ${USHgfs}/getges.sh -e $envir_getges -n $network_getges \ -v $CDATE10 -f $fhr -t tcvges tcvitals.m${fhr} set +x echo @@ -405,7 +367,7 @@ echo " relocation processing date/time" echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" echo set_trace - $USHGETGES/getges.sh -e $envir_getges -n $network_getges \ + ${USHgfs}/getges.sh -e $envir_getges -n $network_getges \ -v $CDATE10 -t $stype $sges errges=$? if test $errges -ne 0; then @@ -439,7 +401,7 @@ to center relocation date/time;" # ---------------------------------------------------------------------------- if [ $fhr = "0" ]; then - "${USHGETGES}/getges.sh" -e "${envir_getges}" -n "${network_getges}" -v "${CDATE10}" \ + "${USHgfs}/getges.sh" -e "${envir_getges}" -n "${network_getges}" -v "${CDATE10}" \ -t "${stype}" > "${COM_OBS}/${RUN}.${cycle}.sgesprep_pre-relocate_pathname.${tmmark}" cp "${COM_OBS}/${RUN}.${cycle}.sgesprep_pre-relocate_pathname.${tmmark}" \ "${COM_OBS}/${RUN}.${cycle}.sgesprep_pathname.${tmmark}" @@ -459,7 +421,7 @@ echo " relocation processing date/time" echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" echo set_trace - $USHGETGES/getges.sh -e $envir_getges -n $network_getges \ + ${USHgfs}/getges.sh -e $envir_getges -n $network_getges \ -v $CDATE10 -t $ptype $pges errges=$? if test $errges -ne 0; then @@ -541,7 +503,7 @@ else # $DATA/$RUN.$cycle.relocate.model_track.tm00 # -------------------------------------------- - $USHRELO/tropcy_relocate_extrkr.sh + ${USHgfs}/tropcy_relocate_extrkr.sh err=$? if [ $err -ne 0 ]; then @@ -550,12 +512,12 @@ else set +x echo - echo "$USHRELO/tropcy_relocate_extrkr.sh failed" + echo "${USHgfs}/tropcy_relocate_extrkr.sh failed" echo "ABNORMAL EXIT!!!!!!!!!!!" echo set_trace if [ -s $DATA/err_exit ]; then - $DATA/err_exit "Script $USHRELO/tropcy_relocate_extrkr.sh failed" + $DATA/err_exit "Script ${USHgfs}/tropcy_relocate_extrkr.sh failed" else exit 555 fi @@ -569,10 +531,10 @@ else rm fort.* fi - ln -sf $DATA/tcvitals.now1 fort.11 - ln -sf $DATA/model_track.all fort.30 - ln -sf $DATA/rel_inform1 fort.62 - ln -sf $DATA/tcvitals.relocate0 fort.65 + ${NLN} $DATA/tcvitals.now1 fort.11 + ${NLN} $DATA/model_track.all fort.30 + ${NLN} $DATA/rel_inform1 fort.62 + ${NLN} $DATA/tcvitals.relocate0 fort.65 i1=20 i2=53 @@ -586,8 +548,8 @@ else tpref=p$fhr fi - ln -sf $DATA/sg${tpref}prep fort.$i1 - ln -sf $DATA/sg${tpref}prep.relocate fort.$i2 + ${NLN} $DATA/sg${tpref}prep fort.$i1 + ${NLN} $DATA/sg${tpref}prep.relocate fort.$i2 i1=$((i1+1)) i2=$((i2+BKGFREQ)) diff --git a/ush/tropcy_relocate_extrkr.sh b/ush/tropcy_relocate_extrkr.sh index ede2318c4a..18e0851368 100755 --- a/ush/tropcy_relocate_extrkr.sh +++ b/ush/tropcy_relocate_extrkr.sh @@ -3,7 +3,7 @@ # This script is executed by the script tropcy_relocate.sh # -------------------------------------------------------- -source "$HOMEgfs/ush/preamble.sh" +source "${USHgfs}/preamble.sh" export machine=${machine:-ZEUS} export machine=$(echo $machine|tr '[a-z]' '[A-Z]') @@ -592,8 +592,8 @@ if [ -s fort.* ]; then rm fort.* fi -ln -s -f ${vdir}/vitals.${symd}${dishh} fort.31 -ln -s -f ${vdir}/vitals.upd.${cmodel}.${symd}${dishh} fort.51 +${NLN} ${vdir}/vitals.${symd}${dishh} fort.31 +${NLN} ${vdir}/vitals.upd.${cmodel}.${symd}${dishh} fort.51 ##$XLF_LINKSSH #if [ -z $XLF_LINKSSH ] ; then @@ -1528,19 +1528,19 @@ if [ -s fort.* ]; then rm fort.* fi -ln -s -f ${gribfile} fort.11 -ln -s -f ${vdir}/tmp.gfs.atcfunix.${symdh} fort.14 -ln -s -f ${vdir}/vitals.upd.${cmodel}.${symd}${dishh} fort.12 -ln -s -f ${ixfile} fort.31 -ln -s -f ${vdir}/trak.${cmodel}.all.${symdh} fort.61 -ln -s -f ${vdir}/trak.${cmodel}.atcf.${symdh} fort.62 -ln -s -f ${vdir}/trak.${cmodel}.radii.${symdh} fort.63 -ln -s -f ${vdir}/trak.${cmodel}.atcfunix.${symdh} fort.64 +${NLN} ${gribfile} fort.11 +${NLN} ${vdir}/tmp.gfs.atcfunix.${symdh} fort.14 +${NLN} ${vdir}/vitals.upd.${cmodel}.${symd}${dishh} fort.12 +${NLN} ${ixfile} fort.31 +${NLN} ${vdir}/trak.${cmodel}.all.${symdh} fort.61 +${NLN} ${vdir}/trak.${cmodel}.atcf.${symdh} fort.62 +${NLN} ${vdir}/trak.${cmodel}.radii.${symdh} fort.63 +${NLN} ${vdir}/trak.${cmodel}.atcfunix.${symdh} fort.64 if [ $BKGFREQ -eq 1 ]; then - ln -s -f ${FIXRELO}/${cmodel}.tracker_leadtimes_hrly fort.15 + ${NLN} ${FIXgfs}/am/${cmodel}.tracker_leadtimes_hrly fort.15 elif [ $BKGFREQ -eq 3 ]; then - ln -s -f ${FIXRELO}/${cmodel}.tracker_leadtimes fort.15 + ${NLN} ${FIXgfs}/am/${cmodel}.tracker_leadtimes fort.15 fi ##$XLF_LINKSSH diff --git a/ush/wafs_mkgbl.sh b/ush/wafs_mkgbl.sh new file mode 100755 index 0000000000..e6139bc9d3 --- /dev/null +++ b/ush/wafs_mkgbl.sh @@ -0,0 +1,152 @@ +# UTILITY SCRIPT NAME : wafs_mkgbl.sh +# AUTHOR : Mary Jacobs +# DATE WRITTEN : 11/06/96 +# +# Abstract: This utility script produces the GFS WAFS +# bulletins. +# +# Input: 2 arguments are passed to this script. +# 1st argument - Forecast Hour - format of 2I +# 2nd argument - In hours 12-30, the designator of +# a or b. +# +# Logic: If we are processing hours 12-30, we have the +# added variable of the a or b, and process +# accordingly. The other hours, the a or b is dropped. +# +echo "History: SEPT 1996 - First implementation of this utility script" +echo "History: AUG 1999 - Modified for implementation on IBM SP" +echo " - Allows users to run interactively" +# + +set -x +hour_list="$1" +sets_key=$2 +num=$# + +if test $num -ge 2 +then + echo " Appropriate number of arguments were passed" + set -x + if [ -z "$DATA" ] + then + export DATA=`pwd` + cd $DATA + setpdy.sh + . PDY + fi +else + echo "" + echo "Usage: wafs_mkgbl.sh \$hour [a|b]" + echo "" + exit 16 +fi + +echo " ------------------------------------------" +echo " BEGIN MAKING ${NET} WAFS PRODUCTS" +echo " ------------------------------------------" + +echo "Enter Make WAFS utility." + +for hour in $hour_list +do + ############################## + # Copy Input Field to $DATA + ############################## + + if test ! -f pgrbf${hour} + then +# cp $COMIN/${RUN}.${cycle}.pgrbf${hour} pgrbf${hour} + +# file name and forecast hour of GFS model data in Grib2 are 3 digits +# export fhr3=$hour +# if test $fhr3 -lt 100 +# then +# export fhr3="0$fhr3" +# fi + fhr3="$(printf "%03d" $(( 10#$hour )) )" + +# To solve Bugzilla #408: remove the dependency of grib1 files in gfs wafs job in next GFS upgrade +# Reason: It's not efficent if simply converting from grib2 to grib1 (costs 6 seconds with 415 records) +# Solution: Need to grep 'selected fields on selected levels' before CNVGRIB (costs 1 second with 92 records) + ${NLN} $COMIN/${RUN}.${cycle}.pgrb2.1p00.f$fhr3 pgrb2f${hour} + $WGRIB2 pgrb2f${hour} | grep -F -f $FIXgfs/grib_wafs.grb2to1.list | $WGRIB2 -i pgrb2f${hour} -grib pgrb2f${hour}.tmp +# on Cray, IOBUF_PARAMS has to used to speed up CNVGRIB +# export IOBUF_PARAMS='*:size=32M:count=4:verbose' + $CNVGRIB -g21 pgrb2f${hour}.tmp pgrbf${hour} +# unset IOBUF_PARAMS + fi + + # + # BAG - Put in fix on 20070925 to force the percision of U and V winds + # to default to 1 through the use of the grib_wafs.namelist file. + # + $COPYGB -g3 -i0 -N$FIXgfs/grib_wafs.namelist -x pgrbf${hour} tmp + mv tmp pgrbf${hour} + $GRBINDEX pgrbf${hour} pgrbif${hour} + + ############################## + # Process WAFS + ############################## + + if test $hour -ge '12' -a $hour -le '30' + then + sets=$sets_key + set +x + echo "We are processing the primary and secondary sets of hours." + echo "These sets are the a and b of hours 12-30." + set -x + else + # This is for hours 00/06 and 36-72. + unset sets + fi + + export pgm=wafs_makewafs + . prep_step + + export FORT11="pgrbf${hour}" + export FORT31="pgrbif${hour}" + export FORT51="xtrn.wfs${NET}${hour}${sets}" + export FORT53="com.wafs${hour}${sets}" + + startmsg + $EXECgfs/wafs_makewafs.x < $FIXgfs/grib_wfs${NET}${hour}${sets} >>$pgmout 2>errfile + export err=$?;err_chk + + + ############################## + # Post Files to PCOM + ############################## + + if test "$SENDCOM" = 'YES' + then + cp xtrn.wfs${NET}${hour}${sets} $PCOM/xtrn.wfs${NET}${cyc}${hour}${sets}.$jobsuffix +# cp com.wafs${hour}${sets} $PCOM/com.wafs${cyc}${hour}${sets}.$jobsuffix + +# if test "$SENDDBN_NTC" = 'YES' +# then +# if test "$NET" = 'gfs' +# then +# $DBNROOT/bin/dbn_alert MODEL GFS_WAFS $job \ +# $PCOM/com.wafs${cyc}${hour}${sets}.$jobsuffix +# $DBNROOT/bin/dbn_alert MODEL GFS_XWAFS $job \ +# $PCOM/xtrn.wfs${NET}${cyc}${hour}${sets}.$jobsuffix +# fi +# fi + fi + + ############################## + # Distribute Data + ############################## + + if [ "$SENDDBN_NTC" = 'YES' ] ; then + $DBNROOT/bin/dbn_alert GRIB_LOW $NET $job $PCOM/xtrn.wfs${NET}${cyc}${hour}${sets}.$jobsuffix + else + echo "xtrn.wfs${NET}${cyc}${hour}${sets}.$job file not posted to db_net." + fi + + echo "Wafs Processing $hour hour completed normally" + +done + +exit diff --git a/ush/wave_extractvars.sh b/ush/wave_extractvars.sh new file mode 100755 index 0000000000..32ee44986b --- /dev/null +++ b/ush/wave_extractvars.sh @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +################################################################################ +## UNIX Script Documentation Block +## Script name: wave_extractvars.sh +## Script description: Extracts variables from wave products +## and saves these variables in arcdir +####################### +# Main body starts here +####################### + +source "${USHgfs}/preamble.sh" + +subdata=${1} + +[[ -d "${subdata}" ]] || mkdir -p "${subdata}" + +for (( nh = FHOUT_WAV_EXTRACT; nh <= FHMAX_WAV; nh = nh + FHOUT_WAV_EXTRACT )); do + fnh=$(printf "%3.3d" "${nh}") + + infile=${COMIN_WAVE_GRID}/${RUN}wave.t${cyc}z.global.${wavres}.f${fnh}.grib2 + outfile=${subdata}/${RUN}wave.t${cyc}z.global.${wavres}.f${fnh}.grib2 + rm -f "${outfile}" # Remove outfile if it already exists before extraction + + if [[ -f "${infile}" ]]; then # Check if input file exists before extraction + # shellcheck disable=SC2312 + ${WGRIB2} "${infile}" | grep -F -f "${varlist_wav}" | ${WGRIB2} -i "${infile}" -append -grib "${outfile}" + else + echo "WARNING: ${infile} does not exist." + fi + copy_to_comout "${outfile}" "${ARC_RFCST_PROD_WAV}" +done # nh + +exit 0 diff --git a/ush/wave_grib2_sbs.sh b/ush/wave_grib2_sbs.sh index af28760269..99f89f3f37 100755 --- a/ush/wave_grib2_sbs.sh +++ b/ush/wave_grib2_sbs.sh @@ -25,7 +25,7 @@ # --------------------------------------------------------------------------- # # 0. Preparations -source "${HOMEgfs}/ush/preamble.sh" +source "${USHgfs}/preamble.sh" # 0.a Basic modes of operation @@ -72,7 +72,7 @@ if [[ -n ${waveMEMB} ]]; then ENSTAG=".${membTAG}${waveMEMB}" ; fi outfile="${WAV_MOD_TAG}.${cycle}${ENSTAG}.${grdnam}.${grdres}.f${FH3}.grib2" # Only create file if not present in COM -if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then +if [[ ! -s "${COMOUT_WAVE_GRID}/${outfile}.idx" ]]; then set +x echo ' ' @@ -82,8 +82,8 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then echo " Model ID : $WAV_MOD_TAG" set_trace - if [[ -z "${PDY}" ]] || [[ -z ${cyc} ]] || [[ -z "${cycle}" ]] || [[ -z "${EXECwave}" ]] || \ - [[ -z "${COM_WAVE_GRID}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${gribflags}" ]] || \ + if [[ -z "${PDY}" ]] || [[ -z ${cyc} ]] || [[ -z "${cycle}" ]] || [[ -z "${EXECgfs}" ]] || \ + [[ -z "${COMOUT_WAVE_GRID}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${gribflags}" ]] || \ [[ -z "${GRIDNR}" ]] || [[ -z "${MODNR}" ]] || \ [[ -z "${SENDDBN}" ]]; then set +x @@ -110,8 +110,8 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then # 0.e Links to working directory - ln -s "${DATA}/mod_def.${grdID}" "mod_def.ww3" - ln -s "${DATA}/output_${ymdh}0000/out_grd.${grdID}" "out_grd.ww3" + ${NLN} "${DATA}/mod_def.${grdID}" "mod_def.ww3" + ${NLN} "${DATA}/output_${ymdh}0000/out_grd.${grdID}" "out_grd.ww3" # --------------------------------------------------------------------------- # # 1. Generate GRIB file with all data @@ -138,11 +138,11 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then set +x echo " Run ww3_grib2" - echo " Executing ${EXECwave}/ww3_grib" + echo " Executing ${EXECgfs}/ww3_grib" set_trace export pgm=ww3_grib;. prep_step - "${EXECwave}/ww3_grib" > "grib2_${grdnam}_${FH3}.out" 2>&1 + "${EXECgfs}/ww3_grib" > "grib2_${grdnam}_${FH3}.out" 2>&1 export err=$?;err_chk if [ ! -s gribfile ]; then @@ -157,11 +157,11 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then fi if (( fhr > 0 )); then - ${WGRIB2} gribfile -set_date "${PDY}${cyc}" -set_ftime "${fhr} hour fcst" -grib "${COM_WAVE_GRID}/${outfile}" + ${WGRIB2} gribfile -set_date "${PDY}${cyc}" -set_ftime "${fhr} hour fcst" -grib "${COMOUT_WAVE_GRID}/${outfile}" err=$? else ${WGRIB2} gribfile -set_date "${PDY}${cyc}" -set_ftime "${fhr} hour fcst" \ - -set table_1.4 1 -set table_1.2 1 -grib "${COM_WAVE_GRID}/${outfile}" + -set table_1.4 1 -set table_1.2 1 -grib "${COMOUT_WAVE_GRID}/${outfile}" err=$? fi @@ -177,7 +177,7 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then fi # Create index - ${WGRIB2} -s "${COM_WAVE_GRID}/${outfile}" > "${COM_WAVE_GRID}/${outfile}.idx" + ${WGRIB2} -s "${COMOUT_WAVE_GRID}/${outfile}" > "${COMOUT_WAVE_GRID}/${outfile}.idx" # Create grib2 subgrid is this is the source grid if [[ "${grdID}" = "${WAV_SUBGRBSRC}" ]]; then @@ -186,14 +186,14 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then subgrbnam=$(echo ${!subgrb} | cut -d " " -f 21) subgrbres=$(echo ${!subgrb} | cut -d " " -f 22) subfnam="${WAV_MOD_TAG}.${cycle}${ENSTAG}.${subgrbnam}.${subgrbres}.f${FH3}.grib2" - ${COPYGB2} -g "${subgrbref}" -i0 -x "${COM_WAVE_GRID}/${outfile}" "${COM_WAVE_GRID}/${subfnam}" - ${WGRIB2} -s "${COM_WAVE_GRID}/${subfnam}" > "${COM_WAVE_GRID}/${subfnam}.idx" + ${COPYGB2} -g "${subgrbref}" -i0 -x "${COMOUT_WAVE_GRID}/${outfile}" "${COMOUT_WAVE_GRID}/${subfnam}" + ${WGRIB2} -s "${COMOUT_WAVE_GRID}/${subfnam}" > "${COMOUT_WAVE_GRID}/${subfnam}.idx" done fi # 1.e Save in /com - if [[ ! -s "${COM_WAVE_GRID}/${outfile}" ]]; then + if [[ ! -s "${COMOUT_WAVE_GRID}/${outfile}" ]]; then set +x echo ' ' echo '********************************************* ' @@ -205,7 +205,7 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then set_trace exit 4 fi - if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then + if [[ ! -s "${COMOUT_WAVE_GRID}/${outfile}.idx" ]]; then set +x echo ' ' echo '*************************************************** ' @@ -220,11 +220,11 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then if [[ "${SENDDBN}" = 'YES' ]] && [[ ${outfile} != *global.0p50* ]]; then set +x - echo " Alerting GRIB file as ${COM_WAVE_GRID}/${outfile}" - echo " Alerting GRIB index file as ${COM_WAVE_GRID}/${outfile}.idx" + echo " Alerting GRIB file as ${COMOUT_WAVE_GRID}/${outfile}" + echo " Alerting GRIB index file as ${COMOUT_WAVE_GRID}/${outfile}.idx" set_trace - "${DBNROOT}/bin/dbn_alert" MODEL "${alertName}_WAVE_GB2" "${job}" "${COM_WAVE_GRID}/${outfile}" - "${DBNROOT}/bin/dbn_alert" MODEL "${alertName}_WAVE_GB2_WIDX" "${job}" "${COM_WAVE_GRID}/${outfile}.idx" + "${DBNROOT}/bin/dbn_alert" MODEL "${alertName}_WAVE_GB2" "${job}" "${COMOUT_WAVE_GRID}/${outfile}" + "${DBNROOT}/bin/dbn_alert" MODEL "${alertName}_WAVE_GB2_WIDX" "${job}" "${COMOUT_WAVE_GRID}/${outfile}.idx" else echo "${outfile} is global.0p50 or SENDDBN is NO, no alert sent" fi @@ -245,7 +245,7 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then else set +x echo ' ' - echo " File ${COM_WAVE_GRID}/${outfile} found, skipping generation process" + echo " File ${COMOUT_WAVE_GRID}/${outfile} found, skipping generation process" echo ' ' set_trace fi diff --git a/ush/wave_grid_interp_sbs.sh b/ush/wave_grid_interp_sbs.sh index c11a75f89d..31b7808c16 100755 --- a/ush/wave_grid_interp_sbs.sh +++ b/ush/wave_grid_interp_sbs.sh @@ -25,7 +25,7 @@ # --------------------------------------------------------------------------- # # 0. Preparations -source "$HOMEgfs/ush/preamble.sh" +source "${USHgfs}/preamble.sh" # 0.a Basic modes of operation @@ -65,8 +65,8 @@ source "$HOMEgfs/ush/preamble.sh" echo " Model ID : $WAV_MOD_TAG" set_trace - if [[ -z "${PDY}" ]] || [[ -z "${cyc}" ]] || [[ -z "${cycle}" ]] || [[ -z "${EXECwave}" ]] || \ - [[ -z "${COM_WAVE_PREP}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${SENDDBN}" ]] || \ + if [[ -z "${PDY}" ]] || [[ -z "${cyc}" ]] || [[ -z "${cycle}" ]] || [[ -z "${EXECgfs}" ]] || \ + [[ -z "${COMOUT_WAVE_PREP}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${SENDDBN}" ]] || \ [ -z "${waveGRD}" ] then set +x @@ -75,7 +75,7 @@ source "$HOMEgfs/ush/preamble.sh" echo '*** EXPORTED VARIABLES IN postprocessor NOT SET ***' echo '***************************************************' echo ' ' - echo "${PDY}${cyc} ${cycle} ${EXECwave} ${COM_WAVE_PREP} ${WAV_MOD_TAG} ${SENDDBN} ${waveGRD}" + echo "${PDY}${cyc} ${cycle} ${EXECgfs} ${COMOUT_WAVE_PREP} ${WAV_MOD_TAG} ${SENDDBN} ${waveGRD}" set_trace exit 1 fi @@ -85,18 +85,16 @@ source "$HOMEgfs/ush/preamble.sh" rm -f ${DATA}/output_${ymdh}0000/out_grd.$grdID if [ ! -f ${DATA}/${grdID}_interp.inp.tmpl ]; then - cp $PARMwave/${grdID}_interp.inp.tmpl ${DATA} + cp "${PARMgfs}/wave/${grdID}_interp.inp.tmpl" "${DATA}/${grdID}_interp.inp.tmpl" fi - ln -sf ${DATA}/${grdID}_interp.inp.tmpl . + ${NLN} "${DATA}/${grdID}_interp.inp.tmpl" "${grdID}_interp.inp.tmpl" - for ID in $waveGRD - do - ln -sf ${DATA}/output_${ymdh}0000/out_grd.$ID . + for ID in ${waveGRD}; do + ${NLN} "${DATA}/output_${ymdh}0000/out_grd.${ID}" "out_grd.${ID}" done - for ID in $waveGRD $grdID - do - ln -sf ${DATA}/mod_def.$ID . + for ID in ${waveGRD} ${grdID}; do + ${NLN} "${DATA}/mod_def.${ID}" "mod_def.${ID}" done # --------------------------------------------------------------------------- # @@ -113,42 +111,42 @@ source "$HOMEgfs/ush/preamble.sh" wht_OK='no' if [ ! -f ${DATA}/ww3_gint.WHTGRIDINT.bin.${grdID} ]; then - if [ -f $FIXwave/ww3_gint.WHTGRIDINT.bin.${grdID} ] + if [ -f ${FIXgfs}/wave/ww3_gint.WHTGRIDINT.bin.${grdID} ] then set +x echo ' ' - echo " Copying $FIXwave/ww3_gint.WHTGRIDINT.bin.${grdID} " + echo " Copying ${FIXgfs}/wave/ww3_gint.WHTGRIDINT.bin.${grdID} " set_trace - cp $FIXwave/ww3_gint.WHTGRIDINT.bin.${grdID} ${DATA} + cp ${FIXgfs}/wave/ww3_gint.WHTGRIDINT.bin.${grdID} ${DATA} wht_OK='yes' else set +x echo ' ' - echo " Not found: $FIXwave/ww3_gint.WHTGRIDINT.bin.${grdID} " + echo " Not found: ${FIXgfs}/wave/ww3_gint.WHTGRIDINT.bin.${grdID} " fi fi # Check and link weights file if [ -f ${DATA}/ww3_gint.WHTGRIDINT.bin.${grdID} ] then - ln -s ${DATA}/ww3_gint.WHTGRIDINT.bin.${grdID} ./WHTGRIDINT.bin + ${NLN} ${DATA}/ww3_gint.WHTGRIDINT.bin.${grdID} ./WHTGRIDINT.bin fi # 1.b Run interpolation code set +x echo " Run ww3_gint - echo " Executing $EXECwave/ww3_gint + echo " Executing ${EXECgfs}/ww3_gint set_trace export pgm=ww3_gint;. prep_step - $EXECwave/ww3_gint 1> gint.${grdID}.out 2>&1 + ${EXECgfs}/ww3_gint 1> gint.${grdID}.out 2>&1 export err=$?;err_chk # Write interpolation file to main TEMP dir area if not there yet if [ "wht_OK" = 'no' ] then cp -f ./WHTGRIDINT.bin ${DATA}/ww3_gint.WHTGRIDINT.bin.${grdID} - cp -f ./WHTGRIDINT.bin ${FIXwave}/ww3_gint.WHTGRIDINT.bin.${grdID} + cp -f ./WHTGRIDINT.bin ${FIXgfs}/wave/ww3_gint.WHTGRIDINT.bin.${grdID} fi @@ -173,9 +171,9 @@ source "$HOMEgfs/ush/preamble.sh" # 1.c Save in /com set +x - echo " Saving GRID file as ${COM_WAVE_PREP}/${WAV_MOD_TAG}.out_grd.${grdID}.${PDY}${cyc}" + echo " Saving GRID file as ${COMOUT_WAVE_PREP}/${WAV_MOD_TAG}.out_grd.${grdID}.${PDY}${cyc}" set_trace - cp "${DATA}/output_${ymdh}0000/out_grd.${grdID}" "${COM_WAVE_PREP}/${WAV_MOD_TAG}.out_grd.${grdID}.${PDY}${cyc}" + cp "${DATA}/output_${ymdh}0000/out_grd.${grdID}" "${COMOUT_WAVE_PREP}/${WAV_MOD_TAG}.out_grd.${grdID}.${PDY}${cyc}" # if [ "$SENDDBN" = 'YES' ] # then diff --git a/ush/wave_grid_moddef.sh b/ush/wave_grid_moddef.sh index 5b1b212a16..1e8c44054a 100755 --- a/ush/wave_grid_moddef.sh +++ b/ush/wave_grid_moddef.sh @@ -20,7 +20,7 @@ # --------------------------------------------------------------------------- # # 0. Preparations -source "$HOMEgfs/ush/preamble.sh" +source "${USHgfs}/preamble.sh" # 0.a Basic modes of operation @@ -59,7 +59,7 @@ source "$HOMEgfs/ush/preamble.sh" # 0.c Define directories and the search path. # The tested variables should be exported by the postprocessor script. - if [ -z "$grdID" ] || [ -z "$EXECwave" ] || [ -z "$wave_sys_ver" ] + if [ -z "$grdID" ] || [ -z "${EXECgfs}" ] then set +x echo ' ' @@ -77,14 +77,22 @@ source "$HOMEgfs/ush/preamble.sh" set +x echo ' ' echo ' Creating mod_def file ...' - echo " Executing $EXECwave/ww3_grid" + echo " Executing ${EXECgfs}/ww3_grid" echo ' ' set_trace rm -f ww3_grid.inp - ln -sf ../ww3_grid.inp.$grdID ww3_grid.inp + ${NLN} ../ww3_grid.inp.$grdID ww3_grid.inp + + if [ -f ../${grdID}.msh ] + then + rm -f ${grdID}.msh + ${NLN} ../${grdID}.msh ${grdID}.msh + fi + + - $EXECwave/ww3_grid 1> grid_${grdID}.out 2>&1 + "${EXECgfs}/ww3_grid" 1> "grid_${grdID}.out" 2>&1 err=$? if [ "$err" != '0' ] @@ -99,10 +107,10 @@ source "$HOMEgfs/ush/preamble.sh" exit 3 fi - if [ -f mod_def.ww3 ] + if [[ -f mod_def.ww3 ]] then - cp mod_def.ww3 "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" - mv mod_def.ww3 ../mod_def.$grdID + cp mod_def.ww3 "${COMOUT_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" + mv mod_def.ww3 "../mod_def.${grdID}" else set +x echo ' ' @@ -118,6 +126,6 @@ source "$HOMEgfs/ush/preamble.sh" # 3. Clean up cd .. -rm -rf moddef_$grdID +rm -rf "moddef_${grdID}" # End of ww3_mod_def.sh ------------------------------------------------- # diff --git a/ush/wave_outp_cat.sh b/ush/wave_outp_cat.sh index f4bf6b2294..6ce3ce06cf 100755 --- a/ush/wave_outp_cat.sh +++ b/ush/wave_outp_cat.sh @@ -21,7 +21,7 @@ # --------------------------------------------------------------------------- # # 0. Preparations -source "$HOMEgfs/ush/preamble.sh" +source "${USHgfs}/preamble.sh" # 0.a Basic modes of operation bloc=$1 diff --git a/ush/wave_outp_spec.sh b/ush/wave_outp_spec.sh index 5acc0f95ab..37accbae49 100755 --- a/ush/wave_outp_spec.sh +++ b/ush/wave_outp_spec.sh @@ -22,7 +22,7 @@ # --------------------------------------------------------------------------- # # 0. Preparations -source "$HOMEgfs/ush/preamble.sh" +source "${USHgfs}/preamble.sh" # 0.a Basic modes of operation bloc=$1 @@ -31,6 +31,7 @@ source "$HOMEgfs/ush/preamble.sh" workdir=$4 YMDHE=$($NDATE $FHMAX_WAV_PNT $CDATE) + model_start_date=$(${NDATE} ${OFFSET_START_HOUR} "${PDY}${cyc}") cd $workdir @@ -73,21 +74,7 @@ source "$HOMEgfs/ush/preamble.sh" exit 1 else buoy=$bloc - grep $buoy ${DATA}/buoy_log.ww3 > tmp_list.loc - while read line - do - buoy_name=$(echo $line | awk '{print $2}') - if [ $buoy = $buoy_name ] - then - point=$(echo $line | awk '{ print $1 }') - set +x - echo " Location ID/# : $buoy (${point})" - echo " Spectral output start time : $ymdh " - echo ' ' - set_trace - break - fi - done < tmp_list.loc + point=$(awk "{if (\$2 == \"${buoy}\"){print \$1; exit} }" "${DATA}/buoy_log.ww3") if [ -z "$point" ] then set +x @@ -97,6 +84,11 @@ source "$HOMEgfs/ush/preamble.sh" echo ' ' set_trace exit 2 + else + set +x + echo " Location ID/# : $buoy (${point})" + echo " Spectral output start time : $ymdh " + echo ' ' fi fi @@ -104,7 +96,7 @@ source "$HOMEgfs/ush/preamble.sh" # 0.c Define directories and the search path. # The tested variables should be exported by the postprocessor script. - if [ -z "$CDATE" ] || [ -z "$dtspec" ] || [ -z "$EXECwave" ] || \ + if [ -z "$CDATE" ] || [ -z "$dtspec" ] || [ -z "${EXECgfs}" ] || \ [ -z "$WAV_MOD_TAG" ] || [ -z "${STA_DIR}" ] then set +x @@ -135,8 +127,8 @@ source "$HOMEgfs/ush/preamble.sh" # 0.f Links to mother directory - ln -s ${DATA}/output_${ymdh}0000/mod_def.${waveuoutpGRD} ./mod_def.ww3 - ln -s ${DATA}/output_${ymdh}0000/out_pnt.${waveuoutpGRD} ./out_pnt.ww3 + ${NLN} ${DATA}/output_${ymdh}0000/mod_def.${waveuoutpGRD} ./mod_def.ww3 + ${NLN} ${DATA}/output_${ymdh}0000/out_pnt.${waveuoutpGRD} ./out_pnt.ww3 # --------------------------------------------------------------------------- # # 2. Generate spectral data file @@ -170,11 +162,11 @@ source "$HOMEgfs/ush/preamble.sh" # 2.b Run the postprocessor set +x - echo " Executing $EXECwave/ww3_outp" + echo " Executing ${EXECgfs}/ww3_outp" set_trace export pgm=ww3_outp;. prep_step - $EXECwave/ww3_outp 1> outp_${specdir}_${buoy}.out 2>&1 + ${EXECgfs}/ww3_outp 1> outp_${specdir}_${buoy}.out 2>&1 export err=$?;err_chk @@ -196,31 +188,31 @@ source "$HOMEgfs/ush/preamble.sh" if [ -f $outfile ] then - if [ "${ymdh}" = "${CDATE}" ] + if [ "${ymdh}" = "${model_start_date}" ] then if [ "$specdir" = "bull" ] then - cat $outfile | sed -e '9,$d' >> ${STA_DIR}/${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.bull - cat $coutfile | sed -e '8,$d' >> ${STA_DIR}/c${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.cbull + sed '9,$d' "${outfile}" >> "${STA_DIR}/${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.bull" + sed '8,$d' "${coutfile}" >> "${STA_DIR}/c${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.cbull" else - cat $outfile >> ${STA_DIR}/${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.spec + cat $outfile >> "${STA_DIR}/${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.spec" fi elif [ "${ymdh}" = "${YMDHE}" ] then if [ "$specdir" = "bull" ] then - cat $outfile | sed -e '1,7d' >> ${STA_DIR}/${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.bull - cat $coutfile | sed -e '1,6d' >> ${STA_DIR}/c${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.cbull + sed '1,7d' "${outfile}" >> "${STA_DIR}/${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.bull" + sed '1,6d' "${coutfile}" >> "${STA_DIR}/c${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.cbull" else - cat $outfile | sed -n "/^${YMD} ${HMS}$/,\$p" >> ${STA_DIR}/${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.spec + sed -n "/^${YMD} ${HMS}$/,\$p" "${outfile}" >> "${STA_DIR}/${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.spec" fi else if [ "$specdir" = "bull" ] then - cat $outfile | sed -e '1,7d' | sed -e '2,$d' >> ${STA_DIR}/${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.bull - cat $coutfile | sed -e '1,6d' | sed -e '2,$d' >> ${STA_DIR}/c${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.cbull + sed '8q;d' "${outfile}" >> "${STA_DIR}/${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.bull" + sed '7q;d' "${coutfile}" >> "${STA_DIR}/c${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.cbull" else - cat $outfile | sed -n "/^${YMD} ${HMS}$/,\$p" >> ${STA_DIR}/${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.spec + sed -n "/^${YMD} ${HMS}$/,\$p" "${outfile}" >> "${STA_DIR}/${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.spec" fi fi else @@ -237,6 +229,6 @@ source "$HOMEgfs/ush/preamble.sh" # 3.b Clean up the rest cd .. -rm -rf ${specdir}_${bloc} +rm -rf "${specdir}_${bloc}" # End of ww3_outp_spec.sh ---------------------------------------------------- # diff --git a/ush/wave_prnc_cur.sh b/ush/wave_prnc_cur.sh index 6b1ab19db2..927710c581 100755 --- a/ush/wave_prnc_cur.sh +++ b/ush/wave_prnc_cur.sh @@ -22,7 +22,7 @@ ################################################################################ # -source "$HOMEgfs/ush/preamble.sh" +source "${USHgfs}/preamble.sh" ymdh_rtofs=$1 curfile=$2 @@ -46,7 +46,7 @@ mv -f "cur_temp3.nc" "cur_uv_${PDY}_${fext}${fh3}_flat.nc" # Convert to regular lat lon file # If weights need to be regenerated due to CDO ver change, use: # $CDO genbil,r4320x2160 rtofs_glo_2ds_f000_3hrly_prog.nc weights.nc -cp ${FIXwave}/weights_rtofs_to_r4320x2160.nc ./weights.nc +cp ${FIXgfs}/wave/weights_rtofs_to_r4320x2160.nc ./weights.nc # Interpolate to regular 5 min grid ${CDO} remap,r4320x2160,weights.nc "cur_uv_${PDY}_${fext}${fh3}_flat.nc" "cur_5min_01.nc" @@ -65,17 +65,17 @@ rm -f cur_temp[123].nc cur_5min_??.nc "cur_glo_uv_${PDY}_${fext}${fh3}.nc weight if [ ${flagfirst} = "T" ] then - sed -e "s/HDRFL/T/g" ${PARMwave}/ww3_prnc.cur.${WAVECUR_FID}.inp.tmpl > ww3_prnc.inp + sed -e "s/HDRFL/T/g" ${PARMgfs}/wave/ww3_prnc.cur.${WAVECUR_FID}.inp.tmpl > ww3_prnc.inp else - sed -e "s/HDRFL/F/g" ${PARMwave}/ww3_prnc.cur.${WAVECUR_FID}.inp.tmpl > ww3_prnc.inp + sed -e "s/HDRFL/F/g" ${PARMgfs}/wave/ww3_prnc.cur.${WAVECUR_FID}.inp.tmpl > ww3_prnc.inp fi rm -f cur.nc -ln -s "cur_glo_uv_${PDY}_${fext}${fh3}_5min.nc" "cur.nc" -ln -s "${DATA}/mod_def.${WAVECUR_FID}" ./mod_def.ww3 +${NLN} "cur_glo_uv_${PDY}_${fext}${fh3}_5min.nc" "cur.nc" +${NLN} "${DATA}/mod_def.${WAVECUR_FID}" ./mod_def.ww3 export pgm=ww3_prnc;. prep_step -$EXECwave/ww3_prnc 1> prnc_${WAVECUR_FID}_${ymdh_rtofs}.out 2>&1 +${EXECgfs}/ww3_prnc 1> prnc_${WAVECUR_FID}_${ymdh_rtofs}.out 2>&1 export err=$?; err_chk diff --git a/ush/wave_prnc_ice.sh b/ush/wave_prnc_ice.sh index 5ec1d7fc2e..be089c30bd 100755 --- a/ush/wave_prnc_ice.sh +++ b/ush/wave_prnc_ice.sh @@ -27,7 +27,7 @@ # --------------------------------------------------------------------------- # # 0. Preparations -source "$HOMEgfs/ush/preamble.sh" +source "${USHgfs}/preamble.sh" # 0.a Basic modes of operation @@ -36,7 +36,7 @@ source "$HOMEgfs/ush/preamble.sh" rm -rf ice mkdir ice cd ice - ln -s ${DATA}/postmsg . + ${NLN} "${DATA}/postmsg" postmsg # 0.b Define directories and the search path. # The tested variables should be exported by the postprocessor script. @@ -55,8 +55,8 @@ source "$HOMEgfs/ush/preamble.sh" echo "Making ice fields." if [[ -z "${YMDH}" ]] || [[ -z "${cycle}" ]] || \ - [[ -z "${COM_WAVE_PREP}" ]] || [[ -z "${FIXwave}" ]] || [[ -z "${EXECwave}" ]] || \ - [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${WAVEICE_FID}" ]] || [[ -z "${COM_OBS}" ]]; then + [[ -z "${COMOUT_WAVE_PREP}" ]] || [[ -z "${FIXgfs}" ]] || [[ -z "${EXECgfs}" ]] || \ + [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${WAVEICE_FID}" ]] || [[ -z "${COMIN_OBS}" ]]; then set +x echo ' ' @@ -71,13 +71,13 @@ source "$HOMEgfs/ush/preamble.sh" # 0.c Links to working directory - ln -s ${DATA}/mod_def.$WAVEICE_FID mod_def.ww3 + ${NLN} ${DATA}/mod_def.$WAVEICE_FID mod_def.ww3 # --------------------------------------------------------------------------- # # 1. Get the necessary files # 1.a Copy the ice data file - file=${COM_OBS}/${WAVICEFILE} + file=${COMIN_OBS}/${WAVICEFILE} if [ -f $file ] then @@ -144,7 +144,7 @@ source "$HOMEgfs/ush/preamble.sh" export pgm=ww3_prnc;. prep_step - $EXECwave/ww3_prnc 1> prnc_${WAVEICE_FID}_${cycle}.out 2>&1 + ${EXECgfs}/ww3_prnc 1> prnc_${WAVEICE_FID}_${cycle}.out 2>&1 export err=$?; err_chk if [ "$err" != '0' ] @@ -178,9 +178,9 @@ source "$HOMEgfs/ush/preamble.sh" fi set +x - echo " Saving ice.ww3 as ${COM_WAVE_PREP}/${icefile}" + echo " Saving ice.ww3 as ${COMOUT_WAVE_PREP}/${icefile}" set_trace - cp ice.ww3 "${COM_WAVE_PREP}/${icefile}" + cp ice.ww3 "${COMOUT_WAVE_PREP}/${icefile}" rm -f ice.ww3 # --------------------------------------------------------------------------- # diff --git a/ush/wave_tar.sh b/ush/wave_tar.sh index 1a8d6d6cc5..f82849854f 100755 --- a/ush/wave_tar.sh +++ b/ush/wave_tar.sh @@ -25,11 +25,11 @@ # --------------------------------------------------------------------------- # # 0. Preparations -source "$HOMEgfs/ush/preamble.sh" +source "${USHgfs}/preamble.sh" # 0.a Basic modes of operation - cd $DATA + cd "${DATA}" echo "Making TAR FILE" alertName=$(echo $RUN|tr [a-z] [A-Z]) @@ -47,7 +47,7 @@ source "$HOMEgfs/ush/preamble.sh" # 0.b Check if type set - if [ "$#" -lt '3' ] + if [[ "$#" -lt '3' ]] then set +x echo ' ' @@ -64,9 +64,9 @@ source "$HOMEgfs/ush/preamble.sh" fi filext=$type - if [ "$type" = "ibp" ]; then filext='spec'; fi - if [ "$type" = "ibpbull" ]; then filext='bull'; fi - if [ "$type" = "ibpcbull" ]; then filext='cbull'; fi + if [[ "$type" = "ibp" ]]; then filext='spec'; fi + if [[ "$type" = "ibpbull" ]]; then filext='bull'; fi + if [[ "$type" = "ibpcbull" ]]; then filext='cbull'; fi rm -rf TAR_${filext}_$ID @@ -76,7 +76,7 @@ source "$HOMEgfs/ush/preamble.sh" # 0.c Define directories and the search path. # The tested variables should be exported by the postprocessor script. - if [[ -z "${cycle}" ]] || [[ -z "${COM_WAVE_STATION}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || \ + if [[ -z "${cycle}" ]] || [[ -z "${COMOUT_WAVE_STATION}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || \ [[ -z "${SENDDBN}" ]] || [[ -z "${STA_DIR}" ]]; then set +x echo ' ' @@ -88,7 +88,7 @@ source "$HOMEgfs/ush/preamble.sh" exit 2 fi - cd ${STA_DIR}/${filext} + cd "${STA_DIR}/${filext}" # --------------------------------------------------------------------------- # # 2. Generate tar file (spectral files are compressed) @@ -98,21 +98,27 @@ source "$HOMEgfs/ush/preamble.sh" echo ' Making tar file ...' set_trace - count=0 countMAX=5 tardone='no' - - while [ "$count" -lt "$countMAX" ] && [ "$tardone" = 'no' ] + sleep_interval=10 + + while [[ "${tardone}" = "no" ]] do nf=$(ls | awk '/'$ID.*.$filext'/ {a++} END {print a}') nbm2=$(( $nb - 2 )) - if [ $nf -ge $nbm2 ] - then - tar -cf $ID.$cycle.${type}_tar ./$ID.*.$filext + if [[ "${nf}" -ge "${nbm2}" ]] + then + + tar -cf "${ID}.${cycle}.${type}_tar" ./${ID}.*.${filext} exit=$? + filename="${ID}.${cycle}.${type}_tar" + if ! wait_for_file "${filename}" "${sleep_interval}" "${countMAX}" ; then + echo "FATAL ERROR: File ${filename} not found after waiting $(( sleep_interval * (countMAX + 1) )) secs" + exit 3 + fi - if [ "$exit" != '0' ] + if [[ "${exit}" != '0' ]] then set +x echo ' ' @@ -124,21 +130,15 @@ source "$HOMEgfs/ush/preamble.sh" exit 3 fi - if [ -f "$ID.$cycle.${type}_tar" ] + if [[ -f "${ID}.${cycle}.${type}_tar" ]] then tardone='yes' fi - else - set +x - echo ' All files not found for tar. Sleeping 10 seconds and trying again ..' - set_trace - sleep 10 - count=$(expr $count + 1) fi done - if [ "$tardone" = 'no' ] + if [[ "${tardone}" = 'no' ]] then set +x echo ' ' @@ -150,15 +150,15 @@ source "$HOMEgfs/ush/preamble.sh" exit 3 fi - if [ "$type" = 'spec' ] + if [[ "${type}" = 'spec' ]] then - if [ -s $ID.$cycle.${type}_tar ] + if [[ -s "${ID}.${cycle}.${type}_tar" ]] then - file_name=$ID.$cycle.${type}_tar.gz - /usr/bin/gzip -c $ID.$cycle.${type}_tar > ${file_name} + file_name="${ID}.${cycle}.${type}_tar.gz" + /usr/bin/gzip -c "${ID}.${cycle}.${type}_tar" > "${file_name}" exit=$? - if [ "$exit" != '0' ] + if [[ "${exit}" != '0' ]] then set +x echo ' ' @@ -171,7 +171,7 @@ source "$HOMEgfs/ush/preamble.sh" fi fi else - file_name=$ID.$cycle.${type}_tar + file_name="${ID}.${cycle}.${type}_tar" fi # --------------------------------------------------------------------------- # @@ -179,14 +179,14 @@ source "$HOMEgfs/ush/preamble.sh" set +x echo ' ' - echo " Moving tar file ${file_name} to ${COM_WAVE_STATION} ..." + echo " Moving tar file ${file_name} to ${COMOUT_WAVE_STATION} ..." set_trace - cp "${file_name}" "${COM_WAVE_STATION}/." + cp "${file_name}" "${COMOUT_WAVE_STATION}/." exit=$? - if [ "$exit" != '0' ] + if [[ "${exit}" != '0' ]] then set +x echo ' ' @@ -198,21 +198,21 @@ source "$HOMEgfs/ush/preamble.sh" exit 4 fi - if [ "$SENDDBN" = 'YES' ] + if [[ "${SENDDBN}" = 'YES' ]] then set +x echo ' ' - echo " Alerting TAR file as ${COM_WAVE_STATION}/${file_name}" + echo " Alerting TAR file as ${COMOUT_WAVE_STATION}/${file_name}" echo ' ' set_trace "${DBNROOT}/bin/dbn_alert MODEL" "${alertName}_WAVE_TAR" "${job}" \ - "${COM_WAVE_STATION}/${file_name}" + "${COMOUT_WAVE_STATION}/${file_name}" fi # --------------------------------------------------------------------------- # # 4. Final clean up -cd $DATA +cd "${DATA}" if [[ ${KEEPDATA:-NO} == "NO" ]]; then set -v diff --git a/versions/build.gaea.ver b/versions/build.gaea.ver new file mode 100644 index 0000000000..b92fe8c1db --- /dev/null +++ b/versions/build.gaea.ver @@ -0,0 +1,6 @@ +export stack_intel_ver=2023.1.0 +export stack_cray_mpich_ver=8.1.25 +export spack_env=gsi-addon-dev + +source "${HOMEgfs:-}/versions/run.spack.ver" +export spack_mod_path="/ncrc/proj/epic/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core" diff --git a/versions/build.hercules.ver b/versions/build.hercules.ver index 5513466631..cab0c92111 100644 --- a/versions/build.hercules.ver +++ b/versions/build.hercules.ver @@ -1,3 +1,6 @@ export stack_intel_ver=2021.9.0 export stack_impi_ver=2021.9.0 +export intel_mkl_ver=2023.1.0 +export spack_env=gsi-addon-env source "${HOMEgfs:-}/versions/build.spack.ver" +export spack_mod_path="/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core" diff --git a/versions/build.jet.ver b/versions/build.jet.ver index ff85b1a801..55c0ea0bd1 100644 --- a/versions/build.jet.ver +++ b/versions/build.jet.ver @@ -1,3 +1,5 @@ export stack_intel_ver=2021.5.0 export stack_impi_ver=2021.5.1 +export spack_env=gsi-addon-dev source "${HOMEgfs:-}/versions/build.spack.ver" +export spack_mod_path="/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core" diff --git a/versions/build.orion.ver b/versions/build.orion.ver index ff85b1a801..834ecfc166 100644 --- a/versions/build.orion.ver +++ b/versions/build.orion.ver @@ -1,3 +1,5 @@ -export stack_intel_ver=2021.5.0 -export stack_impi_ver=2021.5.1 +export stack_intel_ver=2021.9.0 +export stack_impi_ver=2021.9.0 +export spack_env=gsi-addon-env-rocky9 source "${HOMEgfs:-}/versions/build.spack.ver" +export spack_mod_path="/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core" diff --git a/versions/build.s4.ver b/versions/build.s4.ver index a0aae51d87..e2731ccfb3 100644 --- a/versions/build.s4.ver +++ b/versions/build.s4.ver @@ -1,3 +1,5 @@ export stack_intel_ver=2021.5.0 export stack_impi_ver=2021.5.0 +export spack_env=gsi-addon-env source "${HOMEgfs:-}/versions/build.spack.ver" +export spack_mod_path="/data/prod/jedi/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core" diff --git a/versions/build.spack.ver b/versions/build.spack.ver index fb5b244bf5..808f85dd16 100644 --- a/versions/build.spack.ver +++ b/versions/build.spack.ver @@ -1,5 +1,4 @@ -export spack_stack_ver=1.5.1 -export spack_env=gsi-addon +export spack_stack_ver=1.6.0 export cmake_ver=3.23.1 @@ -11,7 +10,7 @@ export fms_ver=2023.02.01 export hdf5_ver=1.14.0 export netcdf_c_ver=4.9.2 -export netcdf_fortran_ver=4.6.0 +export netcdf_fortran_ver=4.6.1 export bacio_ver=2.4.1 export nemsio_ver=2.5.4 @@ -19,10 +18,10 @@ export sigio_ver=2.3.2 export w3emc_ver=2.10.0 export bufr_ver=11.7.0 export g2_ver=3.4.5 -export sp_ver=2.3.3 +export sp_ver=2.5.0 export ip_ver=4.3.0 export gsi_ncdiag_ver=1.1.2 export g2tmpl_ver=1.10.2 -export crtm_ver=2.4.0 +export crtm_ver=2.4.0.1 export wgrib2_ver=2.0.8 export grib_util_ver=1.3.0 diff --git a/versions/build.wcoss2.ver b/versions/build.wcoss2.ver index 046ff5c64e..3ae0b3a1cc 100644 --- a/versions/build.wcoss2.ver +++ b/versions/build.wcoss2.ver @@ -28,6 +28,6 @@ export wrf_io_ver=1.2.0 export ncio_ver=1.1.2 export ncdiag_ver=1.0.0 export g2tmpl_ver=1.10.2 -export crtm_ver=2.4.0 +export crtm_ver=2.4.0.1 export upp_ver=10.0.8 diff --git a/versions/fix.ver b/versions/fix.ver index 13d9b56dd2..5ca044ae3d 100644 --- a/versions/fix.ver +++ b/versions/fix.ver @@ -4,19 +4,23 @@ export aer_ver=20220805 export am_ver=20220805 export chem_ver=20220805 -export cice_ver=20231219 +export cice_ver=20240416 export cpl_ver=20230526 export datm_ver=20220805 export gdas_crtm_ver=20220805 export gdas_fv3jedi_ver=20220805 -export gdas_gsibec_ver=20221031 +export gdas_soca_ver=20240624 +export gdas_gsibec_ver=20240416 +export gdas_obs_ver=20240213 export glwu_ver=20220805 -export gsi_ver=20230911 +export gsi_ver=20240208 export lut_ver=20220805 -export mom6_ver=20231219 +export mom6_ver=20240416 export orog_ver=20231027 export reg2grb2_ver=20220805 export sfc_climo_ver=20220805 export ugwd_ver=20220805 export verif_ver=20220805 export wave_ver=20240105 +export orog_nest_ver=global-nest.20240419 +export ugwd_nest_ver=global-nest.20240419 diff --git a/versions/run.gaea.ver b/versions/run.gaea.ver new file mode 100644 index 0000000000..b92fe8c1db --- /dev/null +++ b/versions/run.gaea.ver @@ -0,0 +1,6 @@ +export stack_intel_ver=2023.1.0 +export stack_cray_mpich_ver=8.1.25 +export spack_env=gsi-addon-dev + +source "${HOMEgfs:-}/versions/run.spack.ver" +export spack_mod_path="/ncrc/proj/epic/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core" diff --git a/versions/run.hera.ver b/versions/run.hera.ver index b358f9d495..34f81bfe96 100644 --- a/versions/run.hera.ver +++ b/versions/run.hera.ver @@ -4,8 +4,10 @@ export spack_env=gsi-addon-dev-rocky8 export hpss_ver=hpss export ncl_ver=6.6.2 -export R_ver=3.5.0 -export gempak_ver=7.4.2 +export R_ver=3.6.1 + +export gempak_ver=7.17.0 +export perl_ver=5.38.0 source "${HOMEgfs:-}/versions/run.spack.ver" export spack_mod_path="/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core" diff --git a/versions/run.hercules.ver b/versions/run.hercules.ver index 43f1b2181d..ee8e4f8aea 100644 --- a/versions/run.hercules.ver +++ b/versions/run.hercules.ver @@ -1,12 +1,7 @@ export stack_intel_ver=2021.9.0 export stack_impi_ver=2021.9.0 export intel_mkl_ver=2023.1.0 - -export ncl_ver=6.6.2 -export perl_ver=5.36.0 +export spack_env=gsi-addon-env source "${HOMEgfs:-}/versions/run.spack.ver" - -# wgrib2 and cdo are different on Hercules from all the other systems -export wgrib2_ver=3.1.1 -export cdo_ver=2.2.0 +export spack_mod_path="/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core" diff --git a/versions/run.jet.ver b/versions/run.jet.ver index 18a82cab4f..3aa586ee42 100644 --- a/versions/run.jet.ver +++ b/versions/run.jet.ver @@ -1,9 +1,14 @@ export stack_intel_ver=2021.5.0 export stack_impi_ver=2021.5.1 +export spack_env=gsi-addon-dev-rocky8 export hpss_ver= export ncl_ver=6.6.2 export R_ver=4.0.2 export gempak_ver=7.4.2 +# Adding perl as a module; With Rocky8, perl packages will not be from the OS +export perl_ver=5.38.0 + source "${HOMEgfs:-}/versions/run.spack.ver" +export spack_mod_path="/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core" diff --git a/versions/run.orion.ver b/versions/run.orion.ver index 7671bc028d..59adda6b50 100644 --- a/versions/run.orion.ver +++ b/versions/run.orion.ver @@ -1,11 +1,10 @@ -export stack_intel_ver=2022.0.2 -export stack_impi_ver=2021.5.1 - -export ncl_ver=6.6.2 -export gempak_ver=7.5.1 +export stack_intel_ver=2021.9.0 +export stack_impi_ver=2021.9.0 +export spack_env=gsi-addon-env-rocky9 #For metplus jobs, not currently working with spack-stack #export met_ver=9.1.3 #export metplus_ver=3.1.1 source "${HOMEgfs:-}/versions/run.spack.ver" +export spack_mod_path="/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core" diff --git a/versions/run.s4.ver b/versions/run.s4.ver index 56817ef439..6d0f4cbaca 100644 --- a/versions/run.s4.ver +++ b/versions/run.s4.ver @@ -1,6 +1,8 @@ export stack_intel_ver=2021.5.0 export stack_impi_ver=2021.5.0 +export spack_env=gsi-addon-env export ncl_ver=6.4.0-precompiled source "${HOMEgfs:-}/versions/run.spack.ver" +export spack_mod_path="/data/prod/jedi/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core" diff --git a/versions/run.spack.ver b/versions/run.spack.ver index 80fa6acd1a..9aa5460c80 100644 --- a/versions/run.spack.ver +++ b/versions/run.spack.ver @@ -1,29 +1,35 @@ -export spack_stack_ver=1.5.1 -export spack_env=gsi-addon-dev-rocky8 -export python_ver=3.10.8 +export spack_stack_ver=1.6.0 +export python_ver=3.11.6 export jasper_ver=2.0.32 export libpng_ver=1.6.37 -export cdo_ver=2.0.5 +export cdo_ver=2.2.0 export nco_ver=5.0.6 export hdf5_ver=1.14.0 export netcdf_c_ver=4.9.2 -export netcdf_fortran_ver=4.6.0 +export netcdf_fortran_ver=4.6.1 export bufr_ver=11.7.0 export gsi_ncdiag_ver=1.1.2 export g2tmpl_ver=1.10.2 -export crtm_ver=2.4.0 +export crtm_ver=2.4.0.1 export wgrib2_ver=2.0.8 export grib_util_ver=1.3.0 -export prod_util_ver=1.2.2 +export prod_util_ver=2.1.1 export py_netcdf4_ver=1.5.8 -export py_pyyaml_ver=5.4.1 +export py_pyyaml_ver=6.0 export py_jinja2_ver=3.1.2 +export py_pandas_ver=1.5.3 +export py_python_dateutil_ver=2.8.2 +export py_f90nml_ver=1.4.3 + +export met_ver=9.1.3 +export metplus_ver=3.1.1 +export py_xarray_ver=2023.7.0 export obsproc_run_ver=1.1.2 -export prepobs_run_ver=1.0.1 +export prepobs_run_ver=1.0.2 export ens_tracker_ver=feature-GFSv17_com_reorg -export fit2obs_ver=1.0.0 +export fit2obs_ver=1.1.2 diff --git a/versions/run.wcoss2.ver b/versions/run.wcoss2.ver index a188cdea74..7f653dd50e 100644 --- a/versions/run.wcoss2.ver +++ b/versions/run.wcoss2.ver @@ -37,15 +37,17 @@ export bufr_dump_ver=1.0.0 export util_shared_ver=1.4.0 export g2tmpl_ver=1.10.2 export ncdiag_ver=1.0.0 -export crtm_ver=2.4.0 +export crtm_ver=2.4.0.1 export wgrib2_ver=2.0.8 +export met_ver=9.1.3 +export metplus_ver=3.1.1 # Development-only below export obsproc_run_ver=1.1.2 -export prepobs_run_ver=1.0.1 +export prepobs_run_ver=1.0.2 export ens_tracker_ver=feature-GFSv17_com_reorg -export fit2obs_ver=1.0.0 +export fit2obs_ver=1.1.2 export mos_ver=5.4.3 export mos_shared_ver=2.7.2 diff --git a/workflow/applications/applications.py b/workflow/applications/applications.py index d45b6a9abc..97a77c2c21 100644 --- a/workflow/applications/applications.py +++ b/workflow/applications/applications.py @@ -3,6 +3,7 @@ from typing import Dict, List, Any from datetime import timedelta from hosts import Host +from pathlib import Path from wxflow import Configuration, to_timedelta from abc import ABC, ABCMeta, abstractmethod @@ -31,7 +32,11 @@ def __init__(self, conf: Configuration) -> None: self.scheduler = Host().scheduler - _base = conf.parse_config('config.base') + # Save the configuration so we can source the config files when + # determining task resources + self.conf = conf + + _base = self.conf.parse_config('config.base') # Define here so the child __init__ functions can use it; will # be overwritten later during _init_finalize(). self._base = _base @@ -51,6 +56,7 @@ def __init__(self, conf: Configuration) -> None: self.do_ocean = _base.get('DO_OCN', False) self.do_ice = _base.get('DO_ICE', False) self.do_aero = _base.get('DO_AERO', False) + self.do_prep_obs_aero = _base.get('DO_PREP_OBS_AERO', False) self.do_bufrsnd = _base.get('DO_BUFRSND', False) self.do_gempak = _base.get('DO_GEMPAK', False) self.do_awips = _base.get('DO_AWIPS', False) @@ -64,30 +70,45 @@ def __init__(self, conf: Configuration) -> None: self.do_upp = not _base.get('WRITE_DOPOST', True) self.do_goes = _base.get('DO_GOES', False) self.do_mos = _base.get('DO_MOS', False) + self.do_extractvars = _base.get('DO_EXTRACTVARS', False) self.do_hpssarch = _base.get('HPSSARCH', False) self.nens = _base.get('NMEM_ENS', 0) - self.wave_cdumps = None + self.wave_runs = None if self.do_wave: - wave_cdump = _base.get('WAVE_CDUMP', 'BOTH').lower() - if wave_cdump in ['both']: - self.wave_cdumps = ['gfs', 'gdas'] - elif wave_cdump in ['gfs', 'gdas']: - self.wave_cdumps = [wave_cdump] - - def _init_finalize(self, conf: Configuration): + wave_run = _base.get('WAVE_RUN', 'BOTH').lower() + if wave_run in ['both']: + self.wave_runs = ['gfs', 'gdas'] + elif wave_run in ['gfs', 'gdas']: + self.wave_runs = [wave_run] + + self.aero_anl_runs = None + self.aero_fcst_runs = None + if self.do_aero: + aero_anl_run = _base.get('AERO_ANL_RUN', 'BOTH').lower() + if aero_anl_run in ['both']: + self.aero_anl_runs = ['gfs', 'gdas'] + elif aero_anl_run in ['gfs', 'gdas']: + self.aero_anl_runs = [aero_anl_run] + aero_fcst_run = _base.get('AERO_FCST_RUN', None).lower() + if aero_fcst_run in ['both']: + self.aero_fcst_runs = ['gfs', 'gdas'] + elif aero_fcst_run in ['gfs', 'gdas']: + self.aero_fcst_runs = [aero_fcst_run] + + def _init_finalize(self, *args): print("Finalizing initialize") # Get a list of all possible config_files that would be part of the application self.configs_names = self._get_app_configs() # Source the config_files for the jobs in the application - self.configs = self._source_configs(conf) + self.configs = self.source_configs() # Update the base config dictionary base on application - self.configs['base'] = self._update_base(self.configs['base']) + self.configs['base'] = self.update_base(self.configs['base']) # Save base in the internal state since it is often needed self._base = self.configs['base'] @@ -104,7 +125,7 @@ def _get_app_configs(self): @staticmethod @abstractmethod - def _update_base(base_in: Dict[str, Any]) -> Dict[str, Any]: + def update_base(base_in: Dict[str, Any]) -> Dict[str, Any]: ''' Make final updates to base and return an updated copy @@ -121,9 +142,9 @@ def _update_base(base_in: Dict[str, Any]) -> Dict[str, Any]: ''' pass - def _source_configs(self, conf: Configuration) -> Dict[str, Any]: + def source_configs(self, run: str = "gfs", log: bool = True) -> Dict[str, Any]: """ - Given the configuration object and jobs, + Given the configuration object used to initialize this application, source the configurations for each config and return a dictionary Every config depends on "config.base" """ @@ -131,7 +152,7 @@ def _source_configs(self, conf: Configuration) -> Dict[str, Any]: configs = dict() # Return config.base as well - configs['base'] = conf.parse_config('config.base') + configs['base'] = self.conf.parse_config('config.base') # Source the list of all config_files involved in the application for config in self.configs_names: @@ -145,20 +166,24 @@ def _source_configs(self, conf: Configuration) -> Dict[str, Any]: files += ['config.anal', 'config.eupd'] elif config in ['efcs']: files += ['config.fcst', 'config.efcs'] + elif config in ['atmanlinit', 'atmanlvar', 'atmanlfv3inc']: + files += ['config.atmanl', f'config.{config}'] + elif config in ['atmensanlinit', 'atmensanlletkf', 'atmensanlfv3inc']: + files += ['config.atmensanl', f'config.{config}'] elif 'wave' in config: files += ['config.wave', f'config.{config}'] else: files += [f'config.{config}'] - print(f'sourcing config.{config}') - configs[config] = conf.parse_config(files) + print(f'sourcing config.{config}') if log else 0 + configs[config] = self.conf.parse_config(files, RUN=run) return configs @abstractmethod def get_task_names(self) -> Dict[str, List[str]]: ''' - Create a list of task names for each CDUMP valid for the configuation. + Create a list of task names for each RUN valid for the configuation. Parameters ---------- @@ -166,7 +191,7 @@ def get_task_names(self) -> Dict[str, List[str]]: Returns ------- - Dict[str, List[str]]: Lists of tasks for each CDUMP. + Dict[str, List[str]]: Lists of tasks for each RUN. ''' pass diff --git a/workflow/applications/gefs.py b/workflow/applications/gefs.py index b2369e8dfc..364ee2c48b 100644 --- a/workflow/applications/gefs.py +++ b/workflow/applications/gefs.py @@ -14,22 +14,33 @@ def _get_app_configs(self): """ Returns the config_files that are involved in gefs """ - configs = ['stage_ic', 'fcst'] + configs = ['stage_ic', 'fcst', 'atmos_products'] if self.nens > 0: - configs += ['efcs'] + configs += ['efcs', 'atmos_ensstat'] if self.do_wave: - configs += ['waveinit'] + configs += ['waveinit', 'wavepostsbs', 'wavepostpnt'] + if self.do_wave_bnd: + configs += ['wavepostbndpnt', 'wavepostbndpntbll'] + + if self.do_ocean or self.do_ice: + configs += ['oceanice_products'] + + if self.do_aero: + configs += ['prep_emissions'] + + if self.do_extractvars: + configs += ['extractvars'] return configs @staticmethod - def _update_base(base_in): + def update_base(base_in): base_out = base_in.copy() base_out['INTERVAL_GFS'] = AppConfig.get_gfs_interval(base_in['gfs_cyc']) - base_out['CDUMP'] = 'gefs' + base_out['RUN'] = 'gefs' return base_out @@ -40,9 +51,32 @@ def get_task_names(self): if self.do_wave: tasks += ['waveinit'] + if self.do_aero: + tasks += ['prep_emissions'] + tasks += ['fcst'] if self.nens > 0: tasks += ['efcs'] - return {f"{self._base['CDUMP']}": tasks} + tasks += ['atmos_prod'] + + if self.nens > 0: + tasks += ['atmos_ensstat'] + + if self.do_ocean: + tasks += ['ocean_prod'] + + if self.do_ice: + tasks += ['ice_prod'] + + if self.do_wave: + tasks += ['wavepostsbs'] + if self.do_wave_bnd: + tasks += ['wavepostbndpnt', 'wavepostbndpntbll'] + tasks += ['wavepostpnt'] + + if self.do_extractvars: + tasks += ['extractvars'] + + return {f"{self._base['RUN']}": tasks} diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 1ff6cc3723..e049a7d422 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -16,18 +16,19 @@ def __init__(self, conf: Configuration): self.do_jediatmvar = self._base.get('DO_JEDIATMVAR', False) self.do_jediatmens = self._base.get('DO_JEDIATMENS', False) self.do_jediocnvar = self._base.get('DO_JEDIOCNVAR', False) - self.do_jedilandda = self._base.get('DO_JEDILANDDA', False) + self.do_jedisnowda = self._base.get('DO_JEDISNOWDA', False) self.do_mergensst = self._base.get('DO_MERGENSST', False) + self.do_vrfy_oceanda = self._base.get('DO_VRFY_OCEANDA', False) self.lobsdiag_forenkf = False - self.eupd_cdumps = None + self.eupd_runs = None if self.do_hybvar: self.lobsdiag_forenkf = self._base.get('lobsdiag_forenkf', False) - eupd_cdump = self._base.get('EUPD_CYC', 'gdas').lower() - if eupd_cdump in ['both']: - self.eupd_cdumps = ['gfs', 'gdas'] - elif eupd_cdump in ['gfs', 'gdas']: - self.eupd_cdumps = [eupd_cdump] + eupd_run = self._base.get('EUPD_CYC', 'gdas').lower() + if eupd_run in ['both']: + self.eupd_runs = ['gfs', 'gdas'] + elif eupd_run in ['gfs', 'gdas']: + self.eupd_runs = [eupd_run] def _get_app_configs(self): """ @@ -37,23 +38,26 @@ def _get_app_configs(self): configs = ['prep'] if self.do_jediatmvar: - configs += ['prepatmiodaobs', 'atmanlinit', 'atmanlrun', 'atmanlfinal'] + configs += ['prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal'] else: configs += ['anal', 'analdiag'] if self.do_jediocnvar: - configs += ['prepoceanobs', 'ocnanalprep', 'ocnanalbmat', - 'ocnanalrun', 'ocnanalchkpt', 'ocnanalpost', - 'ocnanalvrfy'] + configs += ['prepoceanobs', 'ocnanalprep', 'marinebmat', 'ocnanalrun'] + if self.do_hybvar: + configs += ['ocnanalecen'] + configs += ['ocnanalchkpt', 'ocnanalpost'] + if self.do_vrfy_oceanda: + configs += ['ocnanalvrfy'] - if self.do_ocean: - configs += ['ocnpost'] + if self.do_ocean or self.do_ice: + configs += ['oceanice_products'] configs += ['sfcanl', 'analcalc', 'fcst', 'upp', 'atmos_products', 'arch', 'cleanup'] if self.do_hybvar: if self.do_jediatmens: - configs += ['atmensanlinit', 'atmensanlrun', 'atmensanlfinal'] + configs += ['atmensanlinit', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal'] else: configs += ['eobs', 'eomg', 'ediag', 'eupd'] configs += ['ecen', 'esfc', 'efcs', 'echgres', 'epos', 'earc'] @@ -83,7 +87,9 @@ def _get_app_configs(self): configs += ['metp'] if self.do_gempak: - configs += ['gempak', 'npoess'] + configs += ['gempak'] + if self.do_goes: + configs += ['npoess'] if self.do_bufrsnd: configs += ['postsnd'] @@ -102,9 +108,11 @@ def _get_app_configs(self): if self.do_aero: configs += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] + if self.do_prep_obs_aero: + configs += ['prepobsaero'] - if self.do_jedilandda: - configs += ['preplandobs', 'landanl'] + if self.do_jedisnowda: + configs += ['prepsnowobs', 'snowanl'] if self.do_mos: configs += ['mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep', @@ -115,7 +123,7 @@ def _get_app_configs(self): return configs @staticmethod - def _update_base(base_in): + def update_base(base_in): return GFSCycledAppConfig.get_gfs_cyc_dates(base_in) @@ -130,23 +138,22 @@ def get_task_names(self): gdas_gfs_common_cleanup_tasks = ['arch', 'cleanup'] if self.do_jediatmvar: - gdas_gfs_common_tasks_before_fcst += ['prepatmiodaobs', 'atmanlinit', 'atmanlrun', 'atmanlfinal'] + gdas_gfs_common_tasks_before_fcst += ['prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal'] else: gdas_gfs_common_tasks_before_fcst += ['anal'] if self.do_jediocnvar: - gdas_gfs_common_tasks_before_fcst += ['prepoceanobs', 'ocnanalprep', - 'ocnanalbmat', 'ocnanalrun', - 'ocnanalchkpt', 'ocnanalpost', - 'ocnanalvrfy'] + gdas_gfs_common_tasks_before_fcst += ['prepoceanobs', 'ocnanalprep', 'marinebmat', 'ocnanalrun'] + if self.do_hybvar: + gdas_gfs_common_tasks_before_fcst += ['ocnanalecen'] + gdas_gfs_common_tasks_before_fcst += ['ocnanalchkpt', 'ocnanalpost'] + if self.do_vrfy_oceanda: + gdas_gfs_common_tasks_before_fcst += ['ocnanalvrfy'] gdas_gfs_common_tasks_before_fcst += ['sfcanl', 'analcalc'] - if self.do_aero: - gdas_gfs_common_tasks_before_fcst += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] - - if self.do_jedilandda: - gdas_gfs_common_tasks_before_fcst += ['preplandobs', 'landanl'] + if self.do_jedisnowda: + gdas_gfs_common_tasks_before_fcst += ['prepsnowobs', 'snowanl'] wave_prep_tasks = ['waveinit', 'waveprep'] wave_bndpnt_tasks = ['wavepostbndpnt', 'wavepostbndpntbll'] @@ -156,7 +163,7 @@ def get_task_names(self): hybrid_after_eupd_tasks = [] if self.do_hybvar: if self.do_jediatmens: - hybrid_tasks += ['atmensanlinit', 'atmensanlrun', 'atmensanlfinal', 'echgres'] + hybrid_tasks += ['atmensanlinit', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal', 'echgres'] else: hybrid_tasks += ['eobs', 'eupd', 'echgres'] hybrid_tasks += ['ediag'] if self.lobsdiag_forenkf else ['eomg'] @@ -168,16 +175,21 @@ def get_task_names(self): if not self.do_jediatmvar: gdas_tasks += ['analdiag'] - if self.do_wave and 'gdas' in self.wave_cdumps: + if self.do_wave and 'gdas' in self.wave_runs: gdas_tasks += wave_prep_tasks + if self.do_aero and 'gdas' in self.aero_anl_runs: + gdas_tasks += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] + if self.do_prep_obs_aero: + gdas_tasks += ['prepobsaero'] + gdas_tasks += ['atmanlupp', 'atmanlprod', 'fcst'] if self.do_upp: gdas_tasks += ['atmupp'] - gdas_tasks += ['atmprod'] + gdas_tasks += ['atmos_prod'] - if self.do_wave and 'gdas' in self.wave_cdumps: + if self.do_wave and 'gdas' in self.wave_runs: if self.do_wave_bnd: gdas_tasks += wave_bndpnt_tasks gdas_tasks += wave_post_tasks @@ -202,14 +214,25 @@ def get_task_names(self): # Collect "gfs" cycle tasks gfs_tasks = gdas_gfs_common_tasks_before_fcst.copy() - if self.do_wave and 'gfs' in self.wave_cdumps: + if self.do_wave and 'gfs' in self.wave_runs: gfs_tasks += wave_prep_tasks + if self.do_aero and 'gfs' in self.aero_anl_runs: + gfs_tasks += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] + if self.do_prep_obs_aero: + gfs_tasks += ['prepobsaero'] + gfs_tasks += ['atmanlupp', 'atmanlprod', 'fcst'] + if self.do_ocean: + gfs_tasks += ['ocean_prod'] + + if self.do_ice: + gfs_tasks += ['ice_prod'] + if self.do_upp: gfs_tasks += ['atmupp'] - gfs_tasks += ['atmprod'] + gfs_tasks += ['atmos_prod'] if self.do_goes: gfs_tasks += ['goesupp'] @@ -229,7 +252,7 @@ def get_task_names(self): if self.do_metp: gfs_tasks += ['metp'] - if self.do_wave and 'gfs' in self.wave_cdumps: + if self.do_wave and 'gfs' in self.wave_runs: if self.do_wave_bnd: gfs_tasks += wave_bndpnt_tasks gfs_tasks += wave_post_tasks @@ -245,11 +268,12 @@ def get_task_names(self): gfs_tasks += ['gempak'] gfs_tasks += ['gempakmeta'] gfs_tasks += ['gempakncdcupapgif'] - gfs_tasks += ['npoess_pgrb2_0p5deg'] - gfs_tasks += ['gempakpgrb2spec'] + if self.do_goes: + gfs_tasks += ['npoess_pgrb2_0p5deg'] + gfs_tasks += ['gempakpgrb2spec'] if self.do_awips: - gfs_tasks += ['awips_20km_1p0deg', 'awips_g2', 'fbwind'] + gfs_tasks += ['awips_20km_1p0deg', 'fbwind'] if self.do_mos: gfs_tasks += ['mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep', @@ -262,15 +286,15 @@ def get_task_names(self): tasks = dict() tasks['gdas'] = gdas_tasks - if self.do_hybvar and 'gdas' in self.eupd_cdumps: + if self.do_hybvar and 'gdas' in self.eupd_runs: enkfgdas_tasks = hybrid_tasks + hybrid_after_eupd_tasks tasks['enkfgdas'] = enkfgdas_tasks - # Add CDUMP=gfs tasks if running early cycle + # Add RUN=gfs tasks if running early cycle if self.gfs_cyc > 0: tasks['gfs'] = gfs_tasks - if self.do_hybvar and 'gfs' in self.eupd_cdumps: + if self.do_hybvar and 'gfs' in self.eupd_runs: enkfgfs_tasks = hybrid_tasks + hybrid_after_eupd_tasks enkfgfs_tasks.remove("echgres") tasks['enkfgfs'] = enkfgfs_tasks @@ -321,9 +345,4 @@ def get_gfs_cyc_dates(base: Dict[str, Any]) -> Dict[str, Any]: base_out['EDATE_GFS'] = edate_gfs base_out['INTERVAL_GFS'] = interval_gfs - fhmax_gfs = {} - for hh in ['00', '06', '12', '18']: - fhmax_gfs[hh] = base.get(f'FHMAX_GFS_{hh}', base.get('FHMAX_GFS_00', 120)) - base_out['FHMAX_GFS'] = fhmax_gfs - return base_out diff --git a/workflow/applications/gfs_forecast_only.py b/workflow/applications/gfs_forecast_only.py index 1145863210..caa545d1e1 100644 --- a/workflow/applications/gfs_forecast_only.py +++ b/workflow/applications/gfs_forecast_only.py @@ -25,7 +25,8 @@ def _get_app_configs(self): configs += ['atmos_products'] if self.do_aero: - configs += ['aerosol_init'] + if not self._base['EXP_WARM_START']: + configs += ['aerosol_init'] if self.do_tracker: configs += ['tracker'] @@ -49,7 +50,7 @@ def _get_app_configs(self): configs += ['awips'] if self.do_ocean or self.do_ice: - configs += ['ocnpost'] + configs += ['oceanice_products'] if self.do_wave: configs += ['waveinit', 'waveprep', 'wavepostsbs', 'wavepostpnt'] @@ -69,11 +70,11 @@ def _get_app_configs(self): return configs @staticmethod - def _update_base(base_in): + def update_base(base_in): base_out = base_in.copy() base_out['INTERVAL_GFS'] = AppConfig.get_gfs_interval(base_in['gfs_cyc']) - base_out['CDUMP'] = 'gfs' + base_out['RUN'] = 'gfs' return base_out @@ -87,7 +88,10 @@ def get_task_names(self): tasks = ['stage_ic'] if self.do_aero: - tasks += ['aerosol_init'] + aero_fcst_run = self._base.get('AERO_FCST_RUN', 'BOTH').lower() + if self._base['RUN'] in aero_fcst_run or aero_fcst_run == "both": + if not self._base['EXP_WARM_START']: + tasks += ['aerosol_init'] if self.do_wave: tasks += ['waveinit'] @@ -100,7 +104,10 @@ def get_task_names(self): if self.do_upp: tasks += ['atmupp'] - tasks += ['atmprod'] + tasks += ['atmos_prod'] + + if self.do_goes: + tasks += ['goesupp'] if self.do_goes: tasks += ['goesupp'] @@ -124,10 +131,13 @@ def get_task_names(self): tasks += ['gempak', 'gempakmeta', 'gempakncdcupapgif', 'gempakpgrb2spec'] if self.do_awips: - tasks += ['awips_20km_1p0deg', 'awips_g2', 'fbwind'] + tasks += ['awips_20km_1p0deg', 'fbwind'] - if self.do_ocean or self.do_ice: - tasks += ['ocnpost'] + if self.do_ocean: + tasks += ['ocean_prod'] + + if self.do_ice: + tasks += ['ice_prod'] if self.do_wave: if self.do_wave_bnd: @@ -146,4 +156,4 @@ def get_task_names(self): tasks += ['arch', 'cleanup'] # arch and cleanup **must** be the last tasks - return {f"{self._base['CDUMP']}": tasks} + return {f"{self._base['RUN']}": tasks} diff --git a/workflow/create_experiment.py b/workflow/create_experiment.py index 7e0f350c0f..1317f7be28 100755 --- a/workflow/create_experiment.py +++ b/workflow/create_experiment.py @@ -11,6 +11,14 @@ The yaml file are simply the arguments for these two scripts. After this scripts runs the experiment is ready for launch. +Environmental variables +----------------------- + pslot + Name of the experiment + + RUNTESTS + Root directory where the test EXPDIR and COMROOT will be placed + Output ------ Functionally an experiment is setup as a result running the two scripts described above @@ -18,7 +26,6 @@ """ import os -import sys from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter from pathlib import Path @@ -28,8 +35,6 @@ import setup_expt import setup_xml -from hosts import Host - _here = os.path.dirname(__file__) _top = os.path.abspath(os.path.join(os.path.abspath(_here), '..')) @@ -63,7 +68,9 @@ def input_args(): formatter_class=ArgumentDefaultsHelpFormatter) parser.add_argument( - '--yaml', help='full path to yaml file describing the experiment configuration', type=Path, required=True) + '-y', '--yaml', help='full path to yaml file describing the experiment configuration', type=Path, required=True) + parser.add_argument( + '-o', '--overwrite', help='overwrite previously created experiment', action="store_true", required=False) return parser.parse_args() @@ -77,18 +84,15 @@ def input_args(): data.update(os.environ) testconf = parse_j2yaml(path=user_inputs.yaml, data=data) - if 'skip_ci_on_hosts' in testconf: - host = Host() - if host.machine.lower() in [machine.lower() for machine in testconf.skip_ci_on_hosts]: - logger.info(f'Skipping creation of case: {testconf.arguments.pslot} on {host.machine.capitalize()}') - sys.exit(0) - # Create a list of arguments to setup_expt.py setup_expt_args = [testconf.experiment.system, testconf.experiment.mode] for kk, vv in testconf.arguments.items(): setup_expt_args.append(f"--{kk}") setup_expt_args.append(str(vv)) + if user_inputs.overwrite: + setup_expt_args.append("--overwrite") + logger.info(f"Call: setup_expt.main()") logger.debug(f"setup_expt.py {' '.join(setup_expt_args)}") setup_expt.main(setup_expt_args) diff --git a/workflow/gsl_template_hera.xml b/workflow/gsl_template_hera.xml index 6205d45ed4..8c2257cee4 100644 --- a/workflow/gsl_template_hera.xml +++ b/workflow/gsl_template_hera.xml @@ -6,7 +6,7 @@ Main workflow manager for Global Forecast System NOTES: - This workflow was automatically generated at 2023-06-13 23:31:49.582810 + This workflow was automatically generated at 2024-09-05 15:37:41.961069 --> + @@ -25,7 +26,7 @@ &EXPDIR;/logs/@Y@m@d@H.log - 202401140000 202401140000 24:00:00 + 202409050000 202409050000 24:00:00 @@ -41,32 +42,29 @@ &ROTDIR;/logs/@Y@m@d@H/gfsinit.log - RUN_ENVIRemc - HOMEgfs&HOMEgfs; - EXPDIR&EXPDIR; - ROTDIR&ROTDIR; - ICSDIR&ICSDIR; - CASE&CASE; - COMPONENT&COMPONENT; - NETgfs - CDUMPgfs - RUNgfs - CDATE@Y@m@d@H - PDY@Y@m@d - cyc@H - COMROOT/scratch1/NCEPDEV/global/glopara/com - DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + FHR3#fhr# + COMPONENTatmos - - - &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input - - - &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc - &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc - - + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/master/gfs.t@Hz.master.grb2f#fhr# + + @@ -79,7 +77,7 @@ gsd-fv3 batch hera - 05:00:00 + 06:00:00 56:ppn=40:tpp=1 &NATIVE_STR; @@ -112,15 +110,13 @@ - _f000-f012 _f018-f030 _f036-f048 _f054-f066 _f072-f084 _f090-f102 _f108-f120 - f012 f030 f048 f066 f084 f102 f120 - f000_f006_f012 f018_f024_f030 f036_f042_f048 f054_f060_f066 f072_f078_f084 f090_f096_f102 f108_f114_f120 + 000 003 006 009 012 015 018 021 024 027 030 033 036 039 042 045 048 051 054 057 060 063 066 069 072 075 078 081 084 087 090 093 096 099 102 105 108 111 114 117 120 - + &JOBS_DIR;/atmos_products.sh - &PSLOT;_gfsatmprod#grp#_@H + &PSLOT;_gfsatmprod_f#fhr#_@H gsd-fv3 batch hera @@ -128,31 +124,29 @@ 1:ppn=24:tpp=1 &NATIVE_STR; - &ROTDIR;/logs/@Y@m@d@H/gfsatmprod#grp#.log + &ROTDIR;/logs/@Y@m@d@H/gfsatmprod_f#fhr#.log RUN_ENVIRemc HOMEgfs&HOMEgfs; EXPDIR&EXPDIR; - ROTDIR&ROTDIR; NETgfs - CDUMPgfs RUNgfs CDATE@Y@m@d@H PDY@Y@m@d cyc@H COMROOT/scratch1/NCEPDEV/global/glopara/com DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; - FHRLST#lst# + FHR3#fhr# + COMPONENTatmos - &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/master/gfs.t@Hz.master.grb2#dep# + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/master/gfs.t@Hz.master.grb2#fhr# -