From 233c18815d2e8e6b344f161fd0c102a4fbdfb66d Mon Sep 17 00:00:00 2001 From: Dan Holdaway <27729500+danholdaway@users.noreply.github.com> Date: Mon, 6 May 2024 18:11:35 -0400 Subject: [PATCH 01/10] Single Executable for main GDAS JEDI applications (#2565) Changes that accompany GDAS PR (NOAA-EMC/GDASApp/pull/1075) that allows building of a single gdas executable, which should be more compliant with NCO requirements. Addresses https://github.com/NOAA-EMC/GDASApp/issues/1085 --- parm/config/gfs/config.aeroanl | 2 +- parm/config/gfs/config.atmanl | 2 +- parm/config/gfs/config.atmensanl | 2 +- parm/config/gfs/config.snowanl | 2 +- sorc/gdas.cd | 2 +- sorc/link_workflow.sh | 25 +++------------ ush/python/pygfs/task/aero_analysis.py | 4 ++- ush/python/pygfs/task/analysis.py | 40 ------------------------ ush/python/pygfs/task/atm_analysis.py | 19 ++++++++--- ush/python/pygfs/task/atmens_analysis.py | 4 ++- ush/python/pygfs/task/snow_analysis.py | 18 ++++++++--- 11 files changed, 43 insertions(+), 77 deletions(-) diff --git a/parm/config/gfs/config.aeroanl b/parm/config/gfs/config.aeroanl index 972f393feb..24a5e92644 100644 --- a/parm/config/gfs/config.aeroanl +++ b/parm/config/gfs/config.aeroanl @@ -18,7 +18,7 @@ export JEDI_FIX_YAML="${PARMgfs}/gdas/aero_jedi_fix.yaml.j2" export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ -export JEDIEXE="${EXECgfs}/fv3jedi_var.x" +export JEDIEXE="${EXECgfs}/gdas.x" if [[ "${DOIAU}" == "YES" ]]; then export aero_bkg_times="3,6,9" diff --git a/parm/config/gfs/config.atmanl b/parm/config/gfs/config.atmanl index 7cfd0cb47f..5eb692b473 100644 --- a/parm/config/gfs/config.atmanl +++ b/parm/config/gfs/config.atmanl @@ -28,6 +28,6 @@ export layout_y_atmanl=@LAYOUT_Y_ATMANL@ export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ -export JEDIEXE=${EXECgfs}/fv3jedi_var.x +export JEDIEXE=${EXECgfs}/gdas.x echo "END: config.atmanl" diff --git a/parm/config/gfs/config.atmensanl b/parm/config/gfs/config.atmensanl index 8e824b22f6..23eab7f7b9 100644 --- a/parm/config/gfs/config.atmensanl +++ b/parm/config/gfs/config.atmensanl @@ -18,6 +18,6 @@ export layout_y_atmensanl=@LAYOUT_Y_ATMENSANL@ export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ -export JEDIEXE=${EXECgfs}/fv3jedi_letkf.x +export JEDIEXE=${EXECgfs}/gdas.x echo "END: config.atmensanl" diff --git a/parm/config/gfs/config.snowanl b/parm/config/gfs/config.snowanl index 7b3ffa47f3..a2984f190b 100644 --- a/parm/config/gfs/config.snowanl +++ b/parm/config/gfs/config.snowanl @@ -11,7 +11,7 @@ source "${EXPDIR}/config.resources" snowanl export OBS_LIST="${PARMgfs}/gdas/snow/obs/lists/gdas_snow.yaml.j2" # Name of the JEDI executable and its yaml template -export JEDIEXE="${EXECgfs}/fv3jedi_letkf.x" +export JEDIEXE="${EXECgfs}/gdas.x" export JEDIYAML="${PARMgfs}/gdas/snow/letkfoi/letkfoi.yaml.j2" # Ensemble member properties diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 1b07517a22..70f1319139 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 1b07517a22cd569d35ee24d341c15a97fc6ad932 +Subproject commit 70f13191391d0909e92da47dc7d17ddf1dc4c6c6 diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 2a9d9d04db..0041ce083b 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -333,33 +333,16 @@ fi # GDASApp if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then - declare -a JEDI_EXE=("fv3jedi_addincrement.x" \ - "fv3jedi_diffstates.x" \ - "fv3jedi_ensvariance.x" \ - "fv3jedi_hofx.x" \ - "fv3jedi_var.x" \ - "fv3jedi_convertincrement.x" \ - "fv3jedi_dirac.x" \ - "fv3jedi_error_covariance_training.x" \ - "fv3jedi_letkf.x" \ - "fv3jedi_convertstate.x" \ - "fv3jedi_eda.x" \ - "fv3jedi_forecast.x" \ + declare -a JEDI_EXE=("gdas.x" \ + "gdas_soca_gridgen.x" \ + "gdas_soca_error_covariance_toolbox.x" \ + "gdas_soca_setcorscales.x" \ "fv3jedi_plot_field.x" \ - "fv3jedi_data_checker.py" \ - "fv3jedi_enshofx.x" \ - "fv3jedi_hofx_nomodel.x" \ - "fv3jedi_testdata_downloader.py" \ "fv3jedi_fv3inc.x" \ "gdas_ens_handler.x" \ "gdas_incr_handler.x" \ "gdas_obsprovider2ioda.x" \ "gdas_socahybridweights.x" \ - "soca_convertincrement.x" \ - "soca_error_covariance_training.x" \ - "soca_setcorscales.x" \ - "soca_gridgen.x" \ - "soca_var.x" \ "bufr2ioda.x" \ "calcfIMS.exe" \ "apply_incr.exe" ) diff --git a/ush/python/pygfs/task/aero_analysis.py b/ush/python/pygfs/task/aero_analysis.py index a61b7c82f3..16d2735090 100644 --- a/ush/python/pygfs/task/aero_analysis.py +++ b/ush/python/pygfs/task/aero_analysis.py @@ -109,8 +109,10 @@ def execute(self: Analysis) -> None: chdir(self.task_config.DATA) exec_cmd = Executable(self.task_config.APRUN_AEROANL) - exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_var.x') + exec_name = os.path.join(self.task_config.DATA, 'gdas.x') exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('fv3jedi') + exec_cmd.add_default_arg('variational') exec_cmd.add_default_arg(self.task_config.jedi_yaml) try: diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index 02011423b7..5a516a02c8 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -292,46 +292,6 @@ def get_fv3ens_dict(config: Dict[str, Any]) -> Dict[str, Any]: } return ens_dict - @staticmethod - @logit(logger) - def execute_jediexe(workdir: Union[str, os.PathLike], aprun_cmd: str, jedi_exec: str, jedi_yaml: str) -> None: - """ - Run a JEDI executable - - Parameters - ---------- - workdir : str | os.PathLike - Working directory where to run containing the necessary files and executable - aprun_cmd : str - Launcher command e.g. mpirun -np or srun, etc. - jedi_exec : str - Name of the JEDI executable e.g. fv3jedi_var.x - jedi_yaml : str | os.PathLike - Name of the yaml file to feed the JEDI executable e.g. fv3jedi_var.yaml - - Raises - ------ - OSError - Failure due to OS issues - WorkflowException - All other exceptions - """ - - os.chdir(workdir) - - exec_cmd = Executable(aprun_cmd) - exec_cmd.add_default_arg([os.path.join(workdir, jedi_exec), jedi_yaml]) - - logger.info(f"Executing {exec_cmd}") - try: - exec_cmd() - except OSError: - logger.exception(f"FATAL ERROR: Failed to execute {exec_cmd}") - raise OSError(f"{exec_cmd}") - except Exception: - logger.exception(f"FATAL ERROR: Error occured during execution of {exec_cmd}") - raise WorkflowException(f"{exec_cmd}") - @staticmethod @logit(logger) def tgz_diags(statfile: str, diagdir: str) -> None: diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index ebeb0c7ba6..47d291268e 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -118,8 +118,10 @@ def variational(self: Analysis) -> None: chdir(self.task_config.DATA) exec_cmd = Executable(self.task_config.APRUN_ATMANLVAR) - exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_var.x') + exec_name = os.path.join(self.task_config.DATA, 'gdas.x') exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('fv3jedi') + exec_cmd.add_default_arg('variational') exec_cmd.add_default_arg(self.task_config.jedi_yaml) try: @@ -144,10 +146,17 @@ def init_fv3_increment(self: Analysis) -> None: @logit(logger) def fv3_increment(self: Analysis) -> None: # Run executable - self.execute_jediexe(self.runtime_config.DATA, - self.task_config.APRUN_ATMANLFV3INC, - self.task_config.jedi_exe, - self.task_config.jedi_yaml) + exec_cmd = Executable(self.task_config.APRUN_ATMANLFV3INC) + exec_cmd.add_default_arg(self.task_config.jedi_exe) + exec_cmd.add_default_arg(self.task_config.jedi_yaml) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") @logit(logger) def finalize(self: Analysis) -> None: diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 1037b557c2..a1aecfe07c 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -148,8 +148,10 @@ def execute(self: Analysis) -> None: chdir(self.task_config.DATA) exec_cmd = Executable(self.task_config.APRUN_ATMENSANL) - exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_letkf.x') + exec_name = os.path.join(self.task_config.DATA, 'gdas.x') exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('fv3jedi') + exec_cmd.add_default_arg('localensembleda') exec_cmd.add_default_arg(self.task_config.jedi_yaml) try: diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index c149f140b6..fe21a67536 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -310,10 +310,20 @@ def execute(self) -> None: AttrDict({key: localconf[key] for key in ['DATA', 'ntiles', 'current_cycle']})) logger.info("Running JEDI LETKF") - self.execute_jediexe(localconf.DATA, - localconf.APRUN_SNOWANL, - os.path.basename(localconf.JEDIEXE), - localconf.jedi_yaml) + exec_cmd = Executable(localconf.APRUN_SNOWANL) + exec_name = os.path.join(localconf.DATA, 'gdas.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('fv3jedi') + exec_cmd.add_default_arg('localensembleda') + exec_cmd.add_default_arg(localconf.jedi_yaml) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") logger.info("Creating analysis from backgrounds and increments") self.add_increments(localconf) From 9b6f8404ac4507d14adc404b77cfdf002b55e832 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Tue, 7 May 2024 00:14:36 -0400 Subject: [PATCH 02/10] Add task to prepare emissions for GEFS (#2562) This PR: - introduces a task to prepare emissions for a forecast into the GEFS application. - adds configuration, j-job, rocoto job, ex-script and the python class for this job - updates GEFS workflow to be able to generate the XML to call this job. - updates the `fcst` and `efcs` job dependencies in the GEFS application to depend on `prep_emissions` if aerosols are turned ON. - provides a placeholder for @bbakernoaa to work on the details for preparing emissions. Co-authored-by: Walter Kolczynski - NOAA --- env/HERA.env | 4 ++ env/HERCULES.env | 4 ++ env/JET.env | 4 ++ env/ORION.env | 4 ++ env/S4.env | 4 ++ env/WCOSS2.env | 4 ++ jobs/JGLOBAL_PREP_EMISSIONS | 35 +++++++++++ jobs/rocoto/prep_emissions.sh | 23 +++++++ parm/config/gefs/config.prep_emissions | 11 ++++ parm/config/gefs/config.resources | 8 +++ scripts/exglobal_prep_emissions.py | 25 ++++++++ ush/python/pygfs/__init__.py | 16 +++++ ush/python/pygfs/task/aero_emissions.py | 82 +++++++++++++++++++++++++ workflow/applications/gefs.py | 6 ++ workflow/rocoto/gefs_tasks.py | 29 +++++++++ 15 files changed, 259 insertions(+) create mode 100755 jobs/JGLOBAL_PREP_EMISSIONS create mode 100755 jobs/rocoto/prep_emissions.sh create mode 100644 parm/config/gefs/config.prep_emissions create mode 100755 scripts/exglobal_prep_emissions.py create mode 100644 ush/python/pygfs/task/aero_emissions.py diff --git a/env/HERA.env b/env/HERA.env index 6ce99f8e90..68dbd4d396 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -49,6 +49,10 @@ elif [[ "${step}" = "prepsnowobs" ]]; then export APRUN_CALCFIMS="${launcher} -n 1" +elif [[ "${step}" = "prep_emissions" ]]; then + + export APRUN="${launcher} -n 1" + elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]] || [[ "${step}" = "wavepostpnt" ]]; then export CFP_MP="YES" diff --git a/env/HERCULES.env b/env/HERCULES.env index da5ad972f2..0b62120536 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -45,6 +45,10 @@ case ${step} in export APRUN_CALCFIMS="${launcher} -n 1" ;; + "prep_emissions") + + export APRUN="${launcher} -n 1" + ;; "waveinit" | "waveprep" | "wavepostsbs" | "wavepostbndpnt" | "wavepostpnt" | "wavepostbndpntbll") export CFP_MP="YES" diff --git a/env/JET.env b/env/JET.env index 3b4c2c2c53..976e42a025 100755 --- a/env/JET.env +++ b/env/JET.env @@ -37,6 +37,10 @@ elif [[ "${step}" = "prepsnowobs" ]]; then export APRUN_CALCFIMS="${launcher} -n 1" +elif [[ "${step}" = "prep_emissions" ]]; then + + export APRUN="${launcher} -n 1" + elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]] || [[ "${step}" = "wavepostpnt" ]]; then export CFP_MP="YES" diff --git a/env/ORION.env b/env/ORION.env index 6aac84a169..795346f0c6 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -44,6 +44,10 @@ elif [[ "${step}" = "prepsnowobs" ]]; then export APRUN_CALCFIMS="${launcher} -n 1" +elif [[ "${step}" = "prep_emissions" ]]; then + + export APRUN="${launcher} -n 1" + elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || \ [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostpnt" ]] || [[ "${step}" == "wavepostbndpntbll" ]]; then diff --git a/env/S4.env b/env/S4.env index 9cbf8b7bdb..ce68fddb89 100755 --- a/env/S4.env +++ b/env/S4.env @@ -37,6 +37,10 @@ elif [[ "${step}" = "prepsnowobs" ]]; then export APRUN_CALCFIMS="${launcher} -n 1" +elif [[ "${step}" = "prep_emissions" ]]; then + + export APRUN="${launcher} -n 1" + elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]] || [[ "${step}" = "wavepostpnt" ]]; then export CFP_MP="YES" diff --git a/env/WCOSS2.env b/env/WCOSS2.env index ba55495655..ff0121e034 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -31,6 +31,10 @@ elif [[ "${step}" = "prepsnowobs" ]]; then export APRUN_CALCFIMS="${launcher} -n 1" +elif [[ "${step}" = "prep_emissions" ]]; then + + export APRUN="${launcher} -n 1" + elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]] || [[ "${step}" = "wavepostpnt" ]]; then export USE_CFP="YES" diff --git a/jobs/JGLOBAL_PREP_EMISSIONS b/jobs/JGLOBAL_PREP_EMISSIONS new file mode 100755 index 0000000000..84edac8e50 --- /dev/null +++ b/jobs/JGLOBAL_PREP_EMISSIONS @@ -0,0 +1,35 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "prep_emissions" -c "base prep_emissions" + +############################################## +# Set variables used in the script +############################################## +# TODO: Set local variables used in this script e.g. GDATE may be needed for previous cycle + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Generate COM variables from templates +# TODO: Add necessary COMIN, COMOUT variables for this job + +############################################################### +# Run relevant script +EXSCRIPT=${PREP_EMISSIONS_PY:-${SCRgfs}/exglobal_prep_emissions.py} +${EXSCRIPT} +status=$? +(( status != 0 )) && ( echo "FATAL ERROR: Error executing ${EXSCRIPT}, ABORT!"; exit "${status}" ) + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/rocoto/prep_emissions.sh b/jobs/rocoto/prep_emissions.sh new file mode 100755 index 0000000000..0677073947 --- /dev/null +++ b/jobs/rocoto/prep_emissions.sh @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="prep_emissions" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/ush/python" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_PREP_EMISSIONS" +status=$? +exit "${status}" diff --git a/parm/config/gefs/config.prep_emissions b/parm/config/gefs/config.prep_emissions new file mode 100644 index 0000000000..fa411c27ad --- /dev/null +++ b/parm/config/gefs/config.prep_emissions @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.prep_emissions ########## +# aerosol emissions preprocessing specific + +echo "BEGIN: config.prep_emissions" + +# Get task specific resources +source "${EXPDIR}/config.resources" prep_emissions + +echo "END: config.prep_emissions" diff --git a/parm/config/gefs/config.resources b/parm/config/gefs/config.resources index 9bf62cf514..d98e437359 100644 --- a/parm/config/gefs/config.resources +++ b/parm/config/gefs/config.resources @@ -69,6 +69,14 @@ case ${step} in export memory_waveinit="2GB" ;; + "prep_emissions") + export wtime_prep_emissions="00:10:00" + export npe_prep_emissions=1 + export nth_prep_emissions=1 + export npe_node_prep_emissions=$(( npe_node_max / nth_prep_emissions )) + export memory_prep_emissions="1GB" + ;; + "fcst" | "efcs") export is_exclusive=True diff --git a/scripts/exglobal_prep_emissions.py b/scripts/exglobal_prep_emissions.py new file mode 100755 index 0000000000..ef0e709142 --- /dev/null +++ b/scripts/exglobal_prep_emissions.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +# exglobal_prep_emissions.py +# This script creates a emissions object +# which perform the pre-processing for aerosol emissions +import os + +from wxflow import Logger, cast_strdict_as_dtypedict +from pygfs import AerosolEmissions + + +# Initialize root logger +logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the emissions pre-processing task + emissions = AerosolEmissions(config) + emissions.initialize() + emissions.configure() + emissions.execute(emissions.task_config.DATA, emissions.task_config.APRUN) + emissions.finalize() diff --git a/ush/python/pygfs/__init__.py b/ush/python/pygfs/__init__.py index e69de29bb2..fa6b0b373e 100644 --- a/ush/python/pygfs/__init__.py +++ b/ush/python/pygfs/__init__.py @@ -0,0 +1,16 @@ + +import os + +from .task.analysis import Analysis +from .task.aero_emissions import AerosolEmissions +from .task.aero_analysis import AerosolAnalysis +from .task.atm_analysis import AtmAnalysis +from .task.atmens_analysis import AtmEnsAnalysis +from .task.snow_analysis import SnowAnalysis +from .task.upp import UPP +from .task.oceanice_products import OceanIceProducts +from .task.gfs_forecast import GFSForecast + +__docformat__ = "restructuredtext" +__version__ = "0.1.0" +pygfs_directory = os.path.dirname(__file__) diff --git a/ush/python/pygfs/task/aero_emissions.py b/ush/python/pygfs/task/aero_emissions.py new file mode 100644 index 0000000000..17d2f528e4 --- /dev/null +++ b/ush/python/pygfs/task/aero_emissions.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python3 + +import os +from logging import getLogger +from typing import Dict, Any, Union +from pprint import pformat + +from wxflow import (AttrDict, + parse_j2yaml, + FileHandler, + Jinja, + logit, + Task, + add_to_datetime, to_timedelta, + WorkflowException, + Executable, which) + +logger = getLogger(__name__.split('.')[-1]) + + +class AerosolEmissions(Task): + """Aerosol Emissions pre-processing Task + """ + + @logit(logger, name="AerosolEmissions") + def __init__(self, config: Dict[str, Any]) -> None: + """Constructor for the Aerosol Emissions task + + Parameters + ---------- + config : Dict[str, Any] + Incoming configuration for the task from the environment + + Returns + ------- + None + """ + super().__init__(config) + + local_variable = "something" + + localdict = AttrDict( + {'variable_used_repeatedly': local_variable} + ) + self.task_config = AttrDict(**self.config, **self.runtime_config, **localdict) + + @staticmethod + @logit(logger) + def initialize() -> None: + """Initialize the work directory + """ + + @staticmethod + @logit(logger) + def configure() -> None: + """Configure the artifacts in the work directory. + Copy run specific data to run directory + """ + + @staticmethod + @logit(logger) + def execute(workdir: Union[str, os.PathLike], aprun_cmd: str) -> None: + """Run the executable (if any) + + Parameters + ---------- + workdir : str | os.PathLike + work directory with the staged data, parm files, namelists, etc. + aprun_cmd : str + launcher command for executable.x + + Returns + ------- + None + """ + + @staticmethod + @logit(logger) + def finalize() -> None: + """Perform closing actions of the task. + Copy data back from the DATA/ directory to COM/ + """ diff --git a/workflow/applications/gefs.py b/workflow/applications/gefs.py index b14c1a9003..c165f9d1ca 100644 --- a/workflow/applications/gefs.py +++ b/workflow/applications/gefs.py @@ -27,6 +27,9 @@ def _get_app_configs(self): if self.do_ocean or self.do_ice: configs += ['oceanice_products'] + if self.do_aero: + configs += ['prep_emissions'] + return configs @staticmethod @@ -45,6 +48,9 @@ def get_task_names(self): if self.do_wave: tasks += ['waveinit'] + if self.do_aero: + tasks += ['prep_emissions'] + tasks += ['fcst'] if self.nens > 0: diff --git a/workflow/rocoto/gefs_tasks.py b/workflow/rocoto/gefs_tasks.py index 6ee079fdfa..86be494549 100644 --- a/workflow/rocoto/gefs_tasks.py +++ b/workflow/rocoto/gefs_tasks.py @@ -89,6 +89,27 @@ def waveinit(self): return task + def prep_emissions(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'stage_ic'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('prep_emissions') + task_name = 'prep_emissions' + task_dict = {'task_name': task_name, + 'resources': resources, + 'envars': self.envars, + 'cycledef': 'gefs', + 'command': f'{self.HOMEgfs}/jobs/rocoto/prep_emissions.sh', + 'job_name': f'{self.pslot}_{task_name}_@H', + 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', + 'maxtries': '&MAXTRIES;' + } + task = rocoto.create_task(task_dict) + + return task + def fcst(self): dependencies = [] dep_dict = {'type': 'task', 'name': f'stage_ic'} @@ -98,6 +119,10 @@ def fcst(self): dep_dict = {'type': 'task', 'name': f'wave_init'} dependencies.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_aero: + dep_dict = {'type': 'task', 'name': f'prep_emissions'} + dependencies.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies) resources = self.get_resource('fcst') @@ -125,6 +150,10 @@ def efcs(self): dep_dict = {'type': 'task', 'name': f'wave_init'} dependencies.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_aero: + dep_dict = {'type': 'task', 'name': f'prep_emissions'} + dependencies.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies) efcsenvars = self.envars.copy() From 0cf0349c1f88048806e68ab58e93a3261b7a0e95 Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Wed, 8 May 2024 02:04:16 -0400 Subject: [PATCH 03/10] Add CI test for products (#2567) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds a new version of the atm3DVar test that runs the full forecast length and produces most of the secondary products. For now, this test will only run on WCOSS due to gempak failures on other machines as well as computational needs. On other machines, the original version will run (the original version will not run on WCOSS). AWIPS remains off for now in this extended test due to a bug involving tocgrib2 and the convective precip fields (see #2566). The new test runs for 4½ cycles and the full 384-hr forecast length to ensure all gempak scripts are exercised. Since the cycle throttle is 3 and the bulk of the time is in the free forecast, the cycles run mostly concurrently so it doesn't extend the total test time too much beyond that of a single 384-hr forecast. Fixes a bug in NPOESS that was introduced when the post filenames were reverted to the previous format for the GOES products until the final filenames are determined (#2499). Also removes the AWIPS g2 job from the rocoto mesh to complete the retirement of grib1 products. Resolves #2132 Resolves #2445 --- .gitignore | 1 + ci/cases/pr/C96_atm3DVar.yaml | 3 ++ ci/cases/pr/C96_atm3DVar_extended.yaml | 22 +++++++++++ ci/cases/yamls/gfs_extended_ci.yaml | 12 ++++++ gempak/ush/gfs_meta_opc_na_ver | 4 +- gempak/ush/gfs_meta_opc_np_ver | 4 +- gempak/ush/gfs_meta_ver.sh | 2 +- parm/config/gefs/config.base | 6 +-- parm/config/gefs/yaml/defaults.yaml | 3 ++ parm/config/gfs/config.base | 24 ++++++------ parm/config/gfs/yaml/defaults.yaml | 7 ++++ scripts/exgfs_atmos_grib2_special_npoess.sh | 3 +- versions/run.hera.ver | 2 +- workflow/applications/gfs_cycled.py | 2 +- workflow/applications/gfs_forecast_only.py | 2 +- workflow/prod.yml | 11 ------ workflow/rocoto/gfs_tasks.py | 41 --------------------- workflow/rocoto/tasks.py | 2 +- 18 files changed, 74 insertions(+), 77 deletions(-) create mode 100644 ci/cases/pr/C96_atm3DVar_extended.yaml create mode 100644 ci/cases/yamls/gfs_extended_ci.yaml diff --git a/.gitignore b/.gitignore index 39d140fd65..943ad64e1a 100644 --- a/.gitignore +++ b/.gitignore @@ -106,6 +106,7 @@ parm/post/ice.csv parm/post/ocnicepost.nml.jinja2 parm/ufs/noahmptable.tbl parm/ufs/model_configure.IN +parm/ufs/model_configure_nest.IN parm/ufs/MOM_input_*.IN parm/ufs/MOM6_data_table.IN parm/ufs/ice_in.IN diff --git a/ci/cases/pr/C96_atm3DVar.yaml b/ci/cases/pr/C96_atm3DVar.yaml index d992938f7f..8a89ff25ec 100644 --- a/ci/cases/pr/C96_atm3DVar.yaml +++ b/ci/cases/pr/C96_atm3DVar.yaml @@ -15,3 +15,6 @@ arguments: gfs_cyc: 1 start: cold yaml: {{ HOMEgfs }}/ci/cases/yamls/gfs_defaults_ci.yaml + +skip_ci_on_hosts: + - wcoss2 diff --git a/ci/cases/pr/C96_atm3DVar_extended.yaml b/ci/cases/pr/C96_atm3DVar_extended.yaml new file mode 100644 index 0000000000..994d3ef3a0 --- /dev/null +++ b/ci/cases/pr/C96_atm3DVar_extended.yaml @@ -0,0 +1,22 @@ +experiment: + system: gfs + mode: cycled + +arguments: + pslot: {{ 'pslot' | getenv }} + app: ATM + resdetatmos: 96 + comroot: {{ 'RUNTESTS' | getenv }}/COMROOT + expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR + icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96C48 + idate: 2021122018 + edate: 2021122118 + nens: 0 + gfs_cyc: 4 + start: cold + yaml: {{ HOMEgfs }}/ci/cases/yamls/gfs_extended_ci.yaml + +skip_ci_on_hosts: + - hera + - orion + - hercules diff --git a/ci/cases/yamls/gfs_extended_ci.yaml b/ci/cases/yamls/gfs_extended_ci.yaml new file mode 100644 index 0000000000..4d4f79e0e8 --- /dev/null +++ b/ci/cases/yamls/gfs_extended_ci.yaml @@ -0,0 +1,12 @@ +defaults: + !INC {{ HOMEgfs }}/parm/config/gfs/yaml/defaults.yaml + +base: + ACCOUNT: {{ 'SLURM_ACCOUNT' | getenv }} + DO_GOES: "YES" + DO_BUFRSND: "YES" + DO_GEMPAK: "YES" + DO_AWIPS: "NO" + DO_NPOESS: "YES" + DO_GENESIS_FSU: "NO" + FHMAX_GFS: 384 diff --git a/gempak/ush/gfs_meta_opc_na_ver b/gempak/ush/gfs_meta_opc_na_ver index d38ddacee0..3aaf93db68 100755 --- a/gempak/ush/gfs_meta_opc_na_ver +++ b/gempak/ush/gfs_meta_opc_na_ver @@ -33,8 +33,8 @@ fcsthr="f00" # seq won't give us any splitting problems, ignore warnings # shellcheck disable=SC2207,SC2312 case ${cyc} in - 00 | 12) IFS=$'\n' lookbacks=($(seq 6 6 84) $(seq 96 12 120)) ;; - 06 | 18) IFS=$'\n' lookbacks=($(seq 6 6 84) $(seq 90 12 126)) ;; + 00 | 12) lookbacks=($(IFS=$'\n' seq 6 6 84) $(IFS=$'\n' seq 96 12 120)) ;; + 06 | 18) lookbacks=($(IFS=$'\n' seq 6 6 84) $(IFS=$'\n' seq 90 12 126)) ;; *) echo "FATAL ERROR: Invalid cycle ${cyc} passed to ${BASH_SOURCE[0]}" exit 100 diff --git a/gempak/ush/gfs_meta_opc_np_ver b/gempak/ush/gfs_meta_opc_np_ver index 9446417403..0968b55747 100755 --- a/gempak/ush/gfs_meta_opc_np_ver +++ b/gempak/ush/gfs_meta_opc_np_ver @@ -33,8 +33,8 @@ fcsthr="f00" # seq won't give us any splitting problems, ignore warnings # shellcheck disable=SC2207,SC2312 case ${cyc} in - 00 | 12) IFS=$'\n' lookbacks=($(seq 6 6 84) $(seq 96 12 120)) ;; - 06 | 18) IFS=$'\n' lookbacks=($(seq 6 6 84) $(seq 90 12 126)) ;; + 00 | 12) lookbacks=($(IFS=$'\n' seq 6 6 84) $(IFS=$'\n' seq 96 12 120)) ;; + 06 | 18) lookbacks=($(IFS=$'\n' seq 6 6 84) $(IFS=$'\n' seq 90 12 126)) ;; *) echo "FATAL ERROR: Invalid cycle ${cyc} passed to ${BASH_SOURCE[0]}" exit 100 diff --git a/gempak/ush/gfs_meta_ver.sh b/gempak/ush/gfs_meta_ver.sh index 1e00cd3094..eb8b5b15c6 100755 --- a/gempak/ush/gfs_meta_ver.sh +++ b/gempak/ush/gfs_meta_ver.sh @@ -32,7 +32,7 @@ MDL2="GFSHPC" #GENERATING THE METAFILES. # seq won't give us any splitting problems, ignore warnings # shellcheck disable=SC2207,SC2312 -IFS=$'\n' lookbacks=($(seq 6 6 180) $(seq 192 12 216)) +lookbacks=($(IFS=$'\n' seq 6 6 180) $(IFS=$'\n' seq 192 12 216)) for lookback in "${lookbacks[@]}"; do init_time="$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${lookback} hours")" init_PDY=${init_time:0:8} diff --git a/parm/config/gefs/config.base b/parm/config/gefs/config.base index bc37c5abbc..90a75e3639 100644 --- a/parm/config/gefs/config.base +++ b/parm/config/gefs/config.base @@ -47,9 +47,9 @@ export NOSCRUB="@NOSCRUB@" export BASE_GIT="@BASE_GIT@" # Toggle to turn on/off GFS downstream processing. -export DO_BUFRSND="NO" # BUFR sounding products -export DO_GEMPAK="NO" # GEMPAK products -export DO_AWIPS="NO" # AWIPS products +export DO_BUFRSND="@DO_BUFRSND@" # BUFR sounding products +export DO_GEMPAK="@DO_GEMPAK@" # GEMPAK products +export DO_AWIPS="@DO_AWIPS@" # AWIPS products # NO for retrospective parallel; YES for real-time parallel # arch.sh uses REALTIME for MOS. Need to set REALTIME=YES diff --git a/parm/config/gefs/yaml/defaults.yaml b/parm/config/gefs/yaml/defaults.yaml index d252e0d1b2..5c763ad29e 100644 --- a/parm/config/gefs/yaml/defaults.yaml +++ b/parm/config/gefs/yaml/defaults.yaml @@ -4,6 +4,9 @@ base: DO_JEDIOCNVAR: "NO" DO_JEDISNOWDA: "NO" DO_MERGENSST: "NO" + DO_BUFRSND: "NO" + DO_GEMPAK: "NO" + DO_AWIPS: "NO" KEEPDATA: "NO" FHMAX_GFS: 120 USE_OCN_PERTURB_FILES: "false" diff --git a/parm/config/gfs/config.base b/parm/config/gfs/config.base index ab35f717cb..8ee1a2c17e 100644 --- a/parm/config/gfs/config.base +++ b/parm/config/gfs/config.base @@ -65,18 +65,18 @@ export NOSCRUB="@NOSCRUB@" export BASE_GIT="@BASE_GIT@" # Toggle to turn on/off GFS downstream processing. -export DO_GOES="@DO_GOES@" # GOES products -export DO_BUFRSND="NO" # BUFR sounding products -export DO_GEMPAK="NO" # GEMPAK products -export DO_AWIPS="NO" # AWIPS products -export DO_NPOESS="NO" # NPOESS products -export DO_TRACKER="YES" # Hurricane track verification -export DO_GENESIS="YES" # Cyclone genesis verification -export DO_GENESIS_FSU="NO" # Cyclone genesis verification (FSU) -export DO_VERFOZN="YES" # Ozone data assimilation monitoring -export DO_VERFRAD="YES" # Radiance data assimilation monitoring -export DO_VMINMON="YES" # GSI minimization monitoring -export DO_MOS="NO" # GFS Model Output Statistics - Only supported on WCOSS2 +export DO_GOES="@DO_GOES@" # GOES products +export DO_BUFRSND="@DO_BUFRSND@" # BUFR sounding products +export DO_GEMPAK="@DO_GEMPAK@" # GEMPAK products +export DO_AWIPS="@DO_AWIPS@" # AWIPS products +export DO_NPOESS="@DO_NPOESS@" # NPOESS products +export DO_TRACKER="@DO_TRACKER@" # Hurricane track verification +export DO_GENESIS="@DO_GENESIS@" # Cyclone genesis verification +export DO_GENESIS_FSU="@DO_GENESIS_FSU@" # Cyclone genesis verification (FSU) +export DO_VERFOZN="YES" # Ozone data assimilation monitoring +export DO_VERFRAD="YES" # Radiance data assimilation monitoring +export DO_VMINMON="YES" # GSI minimization monitoring +export DO_MOS="NO" # GFS Model Output Statistics - Only supported on WCOSS2 # NO for retrospective parallel; YES for real-time parallel # arch.sh uses REALTIME for MOS. Need to set REALTIME=YES diff --git a/parm/config/gfs/yaml/defaults.yaml b/parm/config/gfs/yaml/defaults.yaml index 445fee144e..bdb5f47f04 100644 --- a/parm/config/gfs/yaml/defaults.yaml +++ b/parm/config/gfs/yaml/defaults.yaml @@ -6,6 +6,13 @@ base: DO_JEDISNOWDA: "NO" DO_MERGENSST: "NO" DO_GOES: "NO" + DO_BUFRSND: "NO" + DO_GEMPAK: "NO" + DO_AWIPS: "NO" + DO_NPOESS: "NO" + DO_TRACKER: "YES" + DO_GENESIS: "YES" + DO_GENESIS_FSU: "NO" FHMAX_GFS: 120 DO_VRFY_OCEANDA: "NO" GSI_SOILANAL: "NO" diff --git a/scripts/exgfs_atmos_grib2_special_npoess.sh b/scripts/exgfs_atmos_grib2_special_npoess.sh index 3877b50b77..8d182469ed 100755 --- a/scripts/exgfs_atmos_grib2_special_npoess.sh +++ b/scripts/exgfs_atmos_grib2_special_npoess.sh @@ -153,7 +153,8 @@ for (( fhr=SHOUR; fhr <= FHOUR; fhr = fhr + FHINC )); do # existence of the restart files ############################### export pgm="postcheck" - grib_file="${COM_ATMOS_MASTER}/${RUN}.t${cyc}z.goesmasterf${fhr3}.grb2" + # grib_file="${COM_ATMOS_MASTER}/${RUN}.t${cyc}z.goesmasterf${fhr3}.grb2" + grib_file="${COM_ATMOS_MASTER}/${RUN}.t${cyc}z.special.grb2f${fhr3}" if ! wait_for_file "${grib_file}" "${SLEEP_INT}" "${SLEEP_LOOP_MAX}"; then echo "FATAL ERROR: GOES master grib file ${grib_file} not available after max sleep time" export err=9 diff --git a/versions/run.hera.ver b/versions/run.hera.ver index d612619acc..6280e8e115 100644 --- a/versions/run.hera.ver +++ b/versions/run.hera.ver @@ -4,7 +4,7 @@ export spack_env=gsi-addon-dev-rocky8 export hpss_ver=hpss export ncl_ver=6.6.2 -export R_ver=3.5.0 +export R_ver=3.6.1 export gempak_ver=7.17.0 export perl_ver=5.38.0 diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index c2a6a32f02..4d785bc4da 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -264,7 +264,7 @@ def get_task_names(self): gfs_tasks += ['gempakpgrb2spec'] if self.do_awips: - gfs_tasks += ['awips_20km_1p0deg', 'awips_g2', 'fbwind'] + gfs_tasks += ['awips_20km_1p0deg', 'fbwind'] if self.do_mos: gfs_tasks += ['mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep', diff --git a/workflow/applications/gfs_forecast_only.py b/workflow/applications/gfs_forecast_only.py index 0a9648ee65..89881af8c9 100644 --- a/workflow/applications/gfs_forecast_only.py +++ b/workflow/applications/gfs_forecast_only.py @@ -124,7 +124,7 @@ def get_task_names(self): tasks += ['gempak', 'gempakmeta', 'gempakncdcupapgif', 'gempakpgrb2spec'] if self.do_awips: - tasks += ['awips_20km_1p0deg', 'awips_g2', 'fbwind'] + tasks += ['awips_20km_1p0deg', 'fbwind'] if self.do_ocean: tasks += ['ocean_prod'] diff --git a/workflow/prod.yml b/workflow/prod.yml index 64783dd611..55717772b5 100644 --- a/workflow/prod.yml +++ b/workflow/prod.yml @@ -113,17 +113,6 @@ suites: jgfs_atmos_awips_f( 3,27,6 ): edits: TRDRUN: 'NO' - awips_g2: - tasks: - jgfs_atmos_awips_g2_f( 0,64,6 ): - template: jgfs_atmos_awips_g2_master - triggers: - - task: jgfs_atmos_post_f( ) - edits: - FHRGRP: '( )' - FHRLST: 'f( )' - FCSTHR: '( )' - TRDRUN: 'YES' gempak: tasks: jgfs_atmos_gempak: diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index bba7bac3dd..6125a33dec 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -1433,47 +1433,6 @@ def awips_20km_1p0deg(self): return task - def awips_g2(self): - - deps = [] - dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - - awipsenvars = self.envars.copy() - awipsenvar_dict = {'FHRGRP': '#grp#', - 'FHRLST': '#lst#', - 'ROTDIR': self.rotdir} - for key, value in awipsenvar_dict.items(): - awipsenvars.append(rocoto.create_envar(name=key, value=str(value))) - - varname1, varname2, varname3 = 'grp', 'dep', 'lst' - varval1, varval2, varval3 = self._get_awipsgroups(self.cdump, self._configs['awips']) - var_dict = {varname1: varval1, varname2: varval2, varname3: varval3} - - resources = self.get_resource('awips') - - task_name = f'{self.cdump}awips_g2#{varname1}#' - task_dict = {'task_name': task_name, - 'resources': resources, - 'dependency': dependencies, - 'envars': awipsenvars, - 'cycledef': self.cdump.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/jobs/rocoto/awips_g2.sh', - 'job_name': f'{self.pslot}_{task_name}_@H', - 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', - 'maxtries': '&MAXTRIES;' - } - - metatask_dict = {'task_name': f'{self.cdump}awips_g2', - 'task_dict': task_dict, - 'var_dict': var_dict - } - - task = rocoto.create_task(metatask_dict) - - return task - def gempak(self): deps = [] diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index 3abae9b5b7..a8b4eb9fac 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -27,7 +27,7 @@ class Tasks: 'verfozn', 'verfrad', 'vminmon', 'metp', 'tracker', 'genesis', 'genesis_fsu', - 'postsnd', 'awips_g2', 'awips_20km_1p0deg', 'fbwind', + 'postsnd', 'awips_20km_1p0deg', 'fbwind', 'gempak', 'gempakmeta', 'gempakmetancdc', 'gempakncdcupapgif', 'gempakpgrb2spec', 'npoess_pgrb2_0p5deg' 'waveawipsbulls', 'waveawipsgridded', 'wavegempak', 'waveinit', 'wavepostbndpnt', 'wavepostbndpntbll', 'wavepostpnt', 'wavepostsbs', 'waveprep', From b405b7d3d11d384ce9fe3b9cd2180f315f7b38f2 Mon Sep 17 00:00:00 2001 From: Dan Holdaway <27729500+danholdaway@users.noreply.github.com> Date: Wed, 8 May 2024 20:52:48 -0400 Subject: [PATCH 04/10] Use JCB for assembling JEDI YAML files for atmospheric GDAS (#2477) Change the JEDI YAML assembly for the atmospheric GDAS to use the JEDI Configuration Builder (JCB) tool so that YAMLs can be made more portable and invoke the observation chronicle mechanism. Resolves #2476 Co-authored-by: danholdaway Co-authored-by: Walter Kolczynski - NOAA --- .gitignore | 7 +++++ .gitmodules | 4 +++ parm/config/gfs/config.atmanl | 10 ++++--- parm/config/gfs/config.atmanlfv3inc | 2 +- parm/config/gfs/config.atmensanl | 5 ++-- sorc/gdas.cd | 2 +- sorc/jcb | 1 + sorc/link_workflow.sh | 17 +++++++---- ush/python/pygfs/task/analysis.py | 38 ++++++++++++++++++++---- ush/python/pygfs/task/atm_analysis.py | 8 +++-- ush/python/pygfs/task/atmens_analysis.py | 3 ++ 11 files changed, 76 insertions(+), 21 deletions(-) create mode 160000 sorc/jcb diff --git a/.gitignore b/.gitignore index 943ad64e1a..04193fca0a 100644 --- a/.gitignore +++ b/.gitignore @@ -48,6 +48,8 @@ parm/gdas/io parm/gdas/ioda parm/gdas/snow parm/gdas/soca +parm/gdas/jcb-gdas +parm/gdas/jcb-algorithms parm/monitor parm/post/AEROSOL_LUTS.dat parm/post/nam_micro_lookup.dat @@ -195,3 +197,8 @@ versions/run.ver ush/python/wxflow workflow/wxflow ci/scripts/wxflow + +# jcb checkout and symlinks +ush/python/jcb +workflow/jcb +ci/scripts/jcb diff --git a/.gitmodules b/.gitmodules index 4851e232ee..ea1b5c06af 100644 --- a/.gitmodules +++ b/.gitmodules @@ -30,3 +30,7 @@ path = sorc/upp.fd url = https://github.com/NOAA-EMC/UPP.git ignore = dirty +[submodule "sorc/jcb"] + path = sorc/jcb + url = https://github.com/noaa-emc/jcb + fetchRecurseSubmodules = false diff --git a/parm/config/gfs/config.atmanl b/parm/config/gfs/config.atmanl index 5eb692b473..dd8ca80b11 100644 --- a/parm/config/gfs/config.atmanl +++ b/parm/config/gfs/config.atmanl @@ -5,18 +5,20 @@ echo "BEGIN: config.atmanl" -export OBS_LIST="${PARMgfs}/gdas/atm/obs/lists/gdas_prototype_3d.yaml.j2" -export JEDIYAML="${PARMgfs}/gdas/atm/variational/3dvar_drpcg.yaml.j2" +export JCB_BASE_YAML="${PARMgfs}/gdas/atm/jcb-base.yaml.j2" +export JCB_ALGO_YAML="${PARMgfs}/gdas/atm/jcb-prototype_3dvar.yaml.j2" + export STATICB_TYPE="gsibec" +export LOCALIZATION_TYPE="bump" export INTERP_METHOD='barycentric' if [[ ${DOHYBVAR} = "YES" ]]; then # shellcheck disable=SC2153 export CASE_ANL=${CASE_ENS} - export BERROR_YAML="${PARMgfs}/gdas/atm/berror/hybvar_${STATICB_TYPE}.yaml.j2" + export BERROR_YAML="background_error_hybrid_${STATICB_TYPE}_${LOCALIZATION_TYPE}" else export CASE_ANL=${CASE} - export BERROR_YAML="${PARMgfs}/gdas/atm/berror/staticb_${STATICB_TYPE}.yaml.j2" + export BERROR_YAML="background_error_static_${STATICB_TYPE}" fi export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" diff --git a/parm/config/gfs/config.atmanlfv3inc b/parm/config/gfs/config.atmanlfv3inc index 14c11d3dd3..ab7efa3a60 100644 --- a/parm/config/gfs/config.atmanlfv3inc +++ b/parm/config/gfs/config.atmanlfv3inc @@ -8,7 +8,7 @@ echo "BEGIN: config.atmanlfv3inc" # Get task specific resources . "${EXPDIR}/config.resources" atmanlfv3inc -export JEDIYAML=${PARMgfs}/gdas/atm/utils/fv3jedi_fv3inc_variational.yaml.j2 +export JCB_ALGO=fv3jedi_fv3inc_variational export JEDIEXE=${EXECgfs}/fv3jedi_fv3inc.x echo "END: config.atmanlfv3inc" diff --git a/parm/config/gfs/config.atmensanl b/parm/config/gfs/config.atmensanl index 23eab7f7b9..3484cb670d 100644 --- a/parm/config/gfs/config.atmensanl +++ b/parm/config/gfs/config.atmensanl @@ -5,8 +5,9 @@ echo "BEGIN: config.atmensanl" -export OBS_LIST="${PARMgfs}/gdas/atm/obs/lists/lgetkf_prototype.yaml.j2" -export JEDIYAML="${PARMgfs}/gdas/atm/lgetkf/lgetkf.yaml.j2" +export JCB_BASE_YAML="${PARMgfs}/gdas/atm/jcb-base.yaml.j2" +export JCB_ALGO_YAML="${PARMgfs}/gdas/atm/jcb-prototype_lgetkf.yaml.j2" + export INTERP_METHOD='barycentric' export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 70f1319139..2b2d417a96 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 70f13191391d0909e92da47dc7d17ddf1dc4c6c6 +Subproject commit 2b2d417a96528527d7d3e7eedaccf150dc075d92 diff --git a/sorc/jcb b/sorc/jcb new file mode 160000 index 0000000000..de75655d81 --- /dev/null +++ b/sorc/jcb @@ -0,0 +1 @@ +Subproject commit de75655d81ec2ee668d8d47bf4a43625c81dde7c diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 0041ce083b..c5d7243e8f 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -88,11 +88,17 @@ if [[ "${LINK_NEST:-OFF}" == "ON" ]] ; then source "${HOMEgfs}/versions/fix.nest.ver" fi -# Link wxflow in ush/python, workflow and ci/scripts +# Link python pacakges in ush/python +# TODO: This will be unnecessary when these are part of the virtualenv +packages=("wxflow" "jcb") +for package in "${packages[@]}"; do + cd "${HOMEgfs}/ush/python" || exit 1 + [[ -s "${package}" ]] && rm -f "${package}" + ${LINK} "${HOMEgfs}/sorc/${package}/src/${package}" . +done + +# Link wxflow in workflow and ci/scripts # TODO: This will be unnecessary when wxflow is part of the virtualenv -cd "${HOMEgfs}/ush/python" || exit 1 -[[ -s "wxflow" ]] && rm -f wxflow -${LINK} "${HOMEgfs}/sorc/wxflow/src/wxflow" . cd "${HOMEgfs}/workflow" || exit 1 [[ -s "wxflow" ]] && rm -f wxflow ${LINK} "${HOMEgfs}/sorc/wxflow/src/wxflow" . @@ -100,7 +106,6 @@ cd "${HOMEgfs}/ci/scripts" || exit 1 [[ -s "wxflow" ]] && rm -f wxflow ${LINK} "${HOMEgfs}/sorc/wxflow/src/wxflow" . - # Link fix directories if [[ -n "${FIX_DIR}" ]]; then if [[ ! -d "${HOMEgfs}/fix" ]]; then mkdir "${HOMEgfs}/fix" || exit 1; fi @@ -228,7 +233,7 @@ fi #------------------------------ if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then cd "${HOMEgfs}/parm/gdas" || exit 1 - declare -a gdasapp_comps=("aero" "atm" "io" "ioda" "snow" "soca") + declare -a gdasapp_comps=("aero" "atm" "io" "ioda" "snow" "soca" "jcb-gdas" "jcb-algorithms") for comp in "${gdasapp_comps[@]}"; do [[ -d "${comp}" ]] && rm -rf "${comp}" ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/parm/${comp}" . diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index 5a516a02c8..5464c25370 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -6,8 +6,9 @@ from logging import getLogger from pprint import pformat from netCDF4 import Dataset -from typing import List, Dict, Any, Union +from typing import List, Dict, Any, Union, Optional +from jcb import render from wxflow import (parse_j2yaml, FileHandler, rm_p, logit, Task, Executable, WorkflowException, to_fv3time, to_YMD, Template, TemplateConstants) @@ -46,11 +47,14 @@ def initialize(self) -> None: self.link_jediexe() @logit(logger) - def get_jedi_config(self) -> Dict[str, Any]: + def get_jedi_config(self, algorithm: Optional[str] = None) -> Dict[str, Any]: """Compile a dictionary of JEDI configuration from JEDIYAML template file Parameters ---------- + algorithm (optional) : str + Name of the algorithm to use in the JEDI configuration. Will override the algorithm + set in the self.config.JCB_<>_YAML file Returns ---------- @@ -60,7 +64,31 @@ def get_jedi_config(self) -> Dict[str, Any]: # generate JEDI YAML file logger.info(f"Generate JEDI YAML config: {self.task_config.jedi_yaml}") - jedi_config = parse_j2yaml(self.task_config.JEDIYAML, self.task_config, searchpath=self.gdasapp_j2tmpl_dir) + + if 'JCB_BASE_YAML' in self.task_config.keys(): + # Step 1: fill templates of the jcb base YAML file + jcb_config = parse_j2yaml(self.task_config.JCB_BASE_YAML, self.task_config) + + # Step 2: (optional) fill templates of algorithm override YAML and merge + if 'JCB_ALGO_YAML' in self.task_config.keys(): + jcb_algo_config = parse_j2yaml(self.task_config.JCB_ALGO_YAML, self.task_config) + jcb_config = {**jcb_config, **jcb_algo_config} + + # If algorithm is present override the algorithm in the JEDI config + if algorithm: + jcb_config['algorithm'] = algorithm + + # Step 3: generate the JEDI Yaml using JCB driving YAML + jedi_config = render(jcb_config) + elif 'JEDIYAML' in self.task_config.keys(): + # Generate JEDI YAML file (without using JCB) + logger.info(f"Generate JEDI YAML config: {self.task_config.jedi_yaml}") + jedi_config = parse_j2yaml(self.task_config.JEDIYAML, self.task_config, + searchpath=self.gdasapp_j2tmpl_dir) + logger.debug(f"JEDI config:\n{pformat(jedi_config)}") + else: + raise KeyError(f"Task config must contain JCB_BASE_YAML or JEDIYAML") + logger.debug(f"JEDI config:\n{pformat(jedi_config)}") return jedi_config @@ -82,7 +110,7 @@ def get_obs_dict(self) -> Dict[str, Any]: a dictionary containing the list of observation files to copy for FileHandler """ - logger.info(f"Extracting a list of observation files from {self.task_config.JEDIYAML}") + logger.info(f"Extracting a list of observation files from Jedi config file") observations = find_value_in_nested_dict(self.task_config.jedi_config, 'observations') logger.debug(f"observations:\n{pformat(observations)}") @@ -116,7 +144,7 @@ def get_bias_dict(self) -> Dict[str, Any]: a dictionary containing the list of observation bias files to copy for FileHandler """ - logger.info(f"Extracting a list of bias correction files from {self.task_config.JEDIYAML}") + logger.info(f"Extracting a list of bias correction files from Jedi config file") observations = find_value_in_nested_dict(self.task_config.jedi_config, 'observations') logger.debug(f"observations:\n{pformat(observations)}") diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 47d291268e..95545c57a4 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -49,6 +49,9 @@ def __init__(self, config): 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", 'jedi_yaml': _jedi_yaml, + 'atm_obsdatain_path': f"{self.runtime_config.DATA}/obs/", + 'atm_obsdataout_path': f"{self.runtime_config.DATA}/diags/", + 'BKG_TSTEP': "PT1H" # Placeholder for 4D applications } ) @@ -137,8 +140,9 @@ def variational(self: Analysis) -> None: @logit(logger) def init_fv3_increment(self: Analysis) -> None: # Setup JEDI YAML file - self.task_config.jedi_yaml = os.path.join(self.runtime_config.DATA, os.path.basename(self.task_config.JEDIYAML)) - save_as_yaml(self.get_jedi_config(), self.task_config.jedi_yaml) + self.task_config.jedi_yaml = os.path.join(self.runtime_config.DATA, + f"{self.task_config.JCB_ALGO}.yaml") + save_as_yaml(self.get_jedi_config(self.task_config.JCB_ALGO), self.task_config.jedi_yaml) # Link JEDI executable to run directory self.task_config.jedi_exe = self.link_jediexe() diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index a1aecfe07c..5aaacc42e8 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -46,6 +46,9 @@ def __init__(self, config): 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", 'jedi_yaml': _jedi_yaml, + 'atm_obsdatain_path': f"./obs/", + 'atm_obsdataout_path': f"./diags/", + 'BKG_TSTEP': "PT1H" # Placeholder for 4D applications } ) From c7b3973014480a20dd8e24edaeb83a9e9e68159f Mon Sep 17 00:00:00 2001 From: Jessica Meixner Date: Thu, 9 May 2024 11:36:58 -0400 Subject: [PATCH 05/10] Updates for cold start half cycle, then continuing with IAU for WCDA (#2560) This PR allows us to run C384 S2S with IAU, but starting with the first half-cycle as a cold-start. This will be necessary for cycled testing as we build towards the full system for GFSv17. This updates the copying of the restarts for RUN=gdas for both ocean and ice copying what the atm model is doing. It also reduced the amount of restart files from 4 to 3. Other updates: * Add DOJEDI ocean triggers for archiving certain files update from: @CatherineThomas-NOAA * Adds COPY_FINAL_RESTARTS option to turn on/off copying the last restart file to COM. Defaults to off... * Defines model_start_date_current_cycle & model_start_date_next_cycle to help with knowing which IC to grab. Refs #2546 Co-authored-by: Rahul Mahajan --- jobs/JGLOBAL_FORECAST | 2 +- parm/config/gefs/config.fcst | 1 + parm/config/gfs/config.fcst | 1 + scripts/exglobal_archive.sh | 5 +- ush/forecast_det.sh | 13 +--- ush/forecast_postdet.sh | 142 ++++++++++++++--------------------- ush/forecast_predet.sh | 9 ++- ush/hpssarch_gen.sh | 15 ++-- 8 files changed, 85 insertions(+), 103 deletions(-) diff --git a/jobs/JGLOBAL_FORECAST b/jobs/JGLOBAL_FORECAST index 6c4200dd6e..8d91be8a57 100755 --- a/jobs/JGLOBAL_FORECAST +++ b/jobs/JGLOBAL_FORECAST @@ -99,6 +99,6 @@ fi # Remove the Temporary working directory ########################################## cd "${DATAROOT}" || true -[[ "${KEEPDATA}" == "NO" ]] && rm -rf "${DATA} ${DATArestart}" # do not remove DATAjob. It contains DATAoutput +[[ "${KEEPDATA}" == "NO" ]] && rm -rf "${DATA}" "${DATArestart}" # do not remove DATAjob. It contains DATAoutput exit 0 diff --git a/parm/config/gefs/config.fcst b/parm/config/gefs/config.fcst index 5c592556c8..9e5904d689 100644 --- a/parm/config/gefs/config.fcst +++ b/parm/config/gefs/config.fcst @@ -6,6 +6,7 @@ echo "BEGIN: config.fcst" export USE_ESMF_THREADING="YES" # Toggle to use ESMF-managed threading or traditional threading in UFSWM +export COPY_FINAL_RESTARTS="NO" # Toggle to copy restarts from the end of GFS/GEFS Run (GDAS is handled seperately) # Turn off waves if not used for this CDUMP case ${WAVE_CDUMP} in diff --git a/parm/config/gfs/config.fcst b/parm/config/gfs/config.fcst index 63273e0fe4..81fda1942a 100644 --- a/parm/config/gfs/config.fcst +++ b/parm/config/gfs/config.fcst @@ -6,6 +6,7 @@ echo "BEGIN: config.fcst" export USE_ESMF_THREADING="YES" # Toggle to use ESMF-managed threading or traditional threading in UFSWM +export COPY_FINAL_RESTARTS="NO" # Toggle to copy restarts from the end of GFS/GEFS Run (GDAS is handled seperately) # Turn off waves if not used for this CDUMP case ${WAVE_CDUMP} in diff --git a/scripts/exglobal_archive.sh b/scripts/exglobal_archive.sh index 5842c76b57..acb926d0e6 100755 --- a/scripts/exglobal_archive.sh +++ b/scripts/exglobal_archive.sh @@ -237,7 +237,10 @@ if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then #gdasocean if [ "${DO_OCN}" = "YES" ]; then - targrp_list="${targrp_list} gdasocean gdasocean_analysis" + targrp_list="${targrp_list} gdasocean" + if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + targrp_list="${targrp_list} gdasocean_analysis" + fi fi #gdasice diff --git a/ush/forecast_det.sh b/ush/forecast_det.sh index de2a47c921..e4b9ded3d3 100755 --- a/ush/forecast_det.sh +++ b/ush/forecast_det.sh @@ -6,15 +6,9 @@ UFS_det(){ echo "SUB ${FUNCNAME[0]}: Run type determination for UFS" # Determine if the current cycle is a warm start (based on the availability of restarts) - if [[ "${DOIAU:-}" == "YES" ]]; then - if [[ -f "${COM_ATMOS_RESTART_PREV}/${current_cycle_begin:0:8}.${current_cycle_begin:8:2}0000.coupler.res" ]]; then - warm_start=".true." - fi - else - if [[ -f "${COM_ATMOS_RESTART_PREV}/${current_cycle:0:8}.${current_cycle:8:2}0000.coupler.res" ]]; then - warm_start=".true." - fi - fi + if [[ -f "${COM_ATMOS_RESTART_PREV}/${model_start_date_current_cycle:0:8}.${model_start_date_current_cycle:8:2}0000.coupler.res" ]]; then + warm_start=".true." + fi # If restarts were not available, this is likely a cold start if [[ "${warm_start}" == ".false." ]]; then @@ -30,6 +24,7 @@ UFS_det(){ # Since warm start is false, we cannot do IAU DOIAU="NO" IAU_OFFSET=0 + model_start_date_current_cycle=${current_cycle} # It is still possible that a restart is available from a previous forecast attempt # So we have to continue checking for restarts diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 8ea556055d..9c8858ec3d 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -48,11 +48,7 @@ FV3_postdet() { restart_date="${RERUN_DATE}" restart_dir="${DATArestart}/FV3_RESTART" else # "${RERUN}" == "NO" - if [[ "${DOIAU}" == "YES" ]]; then - restart_date="${current_cycle_begin}" - else - restart_date="${current_cycle}" - fi + restart_date="${model_start_date_current_cycle}" restart_dir="${COM_ATMOS_RESTART_PREV}" fi @@ -92,11 +88,10 @@ FV3_postdet() { # Need a coupler.res that is consistent with the model start time if [[ "${DOIAU}" == "YES" ]]; then local model_start_time="${previous_cycle}" - local model_current_time="${current_cycle_begin}" else local model_start_time="${current_cycle}" - local model_current_time="${current_cycle}" fi + local model_current_time="${model_start_date_current_cycle}" rm -f "${DATA}/INPUT/coupler.res" cat >> "${DATA}/INPUT/coupler.res" << EOF 3 (Calendar: no_calendar=0, thirty_day_months=1, julian=2, gregorian=3, noleap=4) @@ -258,13 +253,15 @@ FV3_out() { # Copy the final restart files at the end of the forecast segment # The final restart written at the end of the forecast does not include the valid date # TODO: verify the above statement since RM found that it did! - echo "Copying FV3 restarts for 'RUN=${RUN}' at the end of the forecast segment: ${forecast_end_cycle}" - for fv3_restart_file in "${fv3_restart_files[@]}"; do - restart_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.${fv3_restart_file}" - ${NCP} "${DATArestart}/FV3_RESTART/${restart_file}" \ - "${COM_ATMOS_RESTART}/${restart_file}" - done - + # TODO: For other components, this is only for gfs/gefs - check to see if this should also have this + if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then + echo "Copying FV3 restarts for 'RUN=${RUN}' at the end of the forecast segment: ${forecast_end_cycle}" + for fv3_restart_file in "${fv3_restart_files[@]}"; do + restart_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.${fv3_restart_file}" + ${NCP} "${DATArestart}/FV3_RESTART/${restart_file}" \ + "${COM_ATMOS_RESTART}/${restart_file}" + done + fi echo "SUB ${FUNCNAME[0]}: Output data for FV3 copied" } @@ -281,11 +278,7 @@ WW3_postdet() { restart_date="${RERUN_DATE}" restart_dir="${DATArestart}/WW3_RESTART" else - if [[ "${DOIAU}" == "YES" ]]; then - restart_date="${current_cycle_begin}" - else - restart_date="${current_cycle}" - fi + restart_date="${model_start_date_current_cycle}" restart_dir="${COM_WAVE_RESTART_PREV}" fi echo "Copying WW3 restarts for 'RUN=${RUN}' at '${restart_date}' from '${restart_dir}'" @@ -384,11 +377,7 @@ MOM6_postdet() { restart_date="${RERUN_DATE}" else # "${RERUN}" == "NO" restart_dir="${COM_OCEAN_RESTART_PREV}" - if [[ "${DOIAU}" == "YES" ]]; then - restart_date="${current_cycle_begin}" - else - restart_date="${current_cycle}" - fi + restart_date="${model_start_date_current_cycle}" fi # Copy MOM6 ICs @@ -489,11 +478,11 @@ MOM6_out() { # Coarser than 1/2 degree has a single MOM restart local mom6_restart_files mom6_restart_file restart_file mom6_restart_files=(MOM.res.nc) - # 1/4 degree resolution has 4 additional restarts + # 1/4 degree resolution has 3 additional restarts case "${OCNRES}" in "025") local nn - for (( nn = 1; nn <= 4; nn++ )); do + for (( nn = 1; nn <= 3; nn++ )); do mom6_restart_files+=("MOM.res_${nn}.nc") done ;; @@ -501,24 +490,22 @@ MOM6_out() { esac # Copy MOM6 restarts at the end of the forecast segment to COM for RUN=gfs|gefs - local restart_file - if [[ "${RUN}" == "gfs" || "${RUN}" == "gefs" ]]; then - echo "Copying MOM6 restarts for 'RUN=${RUN}' at ${forecast_end_cycle}" - for mom6_restart_file in "${mom6_restart_files[@]}"; do - restart_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.${mom6_restart_file}" - ${NCP} "${DATArestart}/MOM6_RESTART/${restart_file}" \ - "${COM_OCEAN_RESTART}/${restart_file}" - done + if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then + local restart_file + if [[ "${RUN}" == "gfs" || "${RUN}" == "gefs" ]]; then + echo "Copying MOM6 restarts for 'RUN=${RUN}' at ${forecast_end_cycle}" + for mom6_restart_file in "${mom6_restart_files[@]}"; do + restart_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.${mom6_restart_file}" + ${NCP} "${DATArestart}/MOM6_RESTART/${restart_file}" \ + "${COM_OCEAN_RESTART}/${restart_file}" + done + fi fi - # Copy restarts at the beginning/middle of the next assimilation cycle to COM for RUN=gdas|enkfgdas|enkfgfs + # Copy restarts for the next cycle for RUN=gdas|enkfgdas|enkfgfs if [[ "${RUN}" =~ "gdas" || "${RUN}" == "enkfgfs" ]]; then local restart_date - if [[ "${DOIAU}" == "YES" ]]; then # Copy restarts at the beginning of the next cycle from DATA to COM - restart_date="${next_cycle_begin}" - else # Copy restarts at the middle of the next cycle from DATA to COM - restart_date="${next_cycle}" - fi + restart_date="${model_start_date_next_cycle}" echo "Copying MOM6 restarts for 'RUN=${RUN}' at ${restart_date}" for mom6_restart_file in "${mom6_restart_files[@]}"; do restart_file="${restart_date:0:8}.${restart_date:8:2}0000.${mom6_restart_file}" @@ -526,7 +513,6 @@ MOM6_out() { "${COM_OCEAN_RESTART}/${restart_file}" done fi - } CICE_postdet() { @@ -539,11 +525,7 @@ CICE_postdet() { seconds=$(to_seconds "${restart_date:8:2}0000") # convert HHMMSS to seconds cice_restart_file="${DATArestart}/CICE_RESTART/cice_model.res.${restart_date:0:4}-${restart_date:4:2}-${restart_date:6:2}-${seconds}.nc" else # "${RERUN}" == "NO" - if [[ "${DOIAU}" == "YES" ]]; then - restart_date="${current_cycle_begin}" - else - restart_date="${current_cycle}" - fi + restart_date="${model_start_date_current_cycle}" cice_restart_file="${COM_ICE_RESTART_PREV}/${restart_date:0:8}.${restart_date:8:2}0000.cice_model.res.nc" fi @@ -554,8 +536,8 @@ CICE_postdet() { # Link iceh_ic file to COM. This is the initial condition file from CICE (f000) # TODO: Is this file needed in COM? Is this going to be used for generating any products? local vdate seconds vdatestr fhr fhr3 interval last_fhr - seconds=$(to_seconds "${current_cycle:8:2}0000") # convert HHMMSS to seconds - vdatestr="${current_cycle:0:4}-${current_cycle:4:2}-${current_cycle:6:2}-${seconds}" + seconds=$(to_seconds "${model_start_date_current_cycle:8:2}0000") # convert HHMMSS to seconds + vdatestr="${model_start_date_current_cycle:0:4}-${model_start_date_current_cycle:4:2}-${model_start_date_current_cycle:6:2}-${seconds}" ${NLN} "${COM_ICE_HISTORY}/${RUN}.ice.t${cyc}z.ic.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${vdatestr}.nc" # Link CICE forecast output files from DATA/CICE_OUTPUT to COM @@ -601,24 +583,22 @@ CICE_out() { ${NCP} "${DATA}/ice_in" "${COM_CONF}/ufs.ice_in" # Copy CICE restarts at the end of the forecast segment to COM for RUN=gfs|gefs - local seconds source_file target_file - if [[ "${RUN}" == "gfs" || "${RUN}" == "gefs" ]]; then - echo "Copying CICE restarts for 'RUN=${RUN}' at ${forecast_end_cycle}" - seconds=$(to_seconds "${forecast_end_cycle:8:2}0000") # convert HHMMSS to seconds - source_file="cice_model.res.${forecast_end_cycle:0:4}-${forecast_end_cycle:4:2}-${forecast_end_cycle:6:2}-${seconds}.nc" - target_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.cice_model.res.nc" - ${NCP} "${DATArestart}/CICE_RESTART/${source_file}" \ - "${COM_ICE_RESTART}/${target_file}" + if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then + local seconds source_file target_file + if [[ "${RUN}" == "gfs" || "${RUN}" == "gefs" ]]; then + echo "Copying CICE restarts for 'RUN=${RUN}' at ${forecast_end_cycle}" + seconds=$(to_seconds "${forecast_end_cycle:8:2}0000") # convert HHMMSS to seconds + source_file="cice_model.res.${forecast_end_cycle:0:4}-${forecast_end_cycle:4:2}-${forecast_end_cycle:6:2}-${seconds}.nc" + target_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.cice_model.res.nc" + ${NCP} "${DATArestart}/CICE_RESTART/${source_file}" \ + "${COM_ICE_RESTART}/${target_file}" + fi fi - # Copy restarts at the beginning/middle of the next assimilation cycle to COM for RUN=gdas|enkfgdas|enkfgfs + # Copy restarts for next cycle for RUN=gdas|enkfgdas|enkfgfs if [[ "${RUN}" =~ "gdas" || "${RUN}" == "enkfgfs" ]]; then local restart_date - if [[ "${DOIAU}" == "YES" ]]; then # Copy restarts at the beginning of the next cycle from DATA to COM - restart_date="${next_cycle_begin}" - else # Copy restarts at the middle of the next cycle from DATA to COM - restart_date="${next_cycle}" - fi + restart_date="${model_start_date_next_cycle}" echo "Copying CICE restarts for 'RUN=${RUN}' at ${restart_date}" seconds=$(to_seconds "${restart_date:8:2}0000") # convert HHMMSS to seconds source_file="cice_model.res.${restart_date:0:4}-${restart_date:4:2}-${restart_date:6:2}-${seconds}.nc" @@ -706,11 +686,7 @@ CMEPS_postdet() { seconds=$(to_seconds "${restart_date:8:2}0000") # convert HHMMSS to seconds cmeps_restart_file="${DATArestart}/CMEPS_RESTART/ufs.cpld.cpl.r.${restart_date:0:4}-${restart_date:4:2}-${restart_date:6:2}-${seconds}.nc" else # "${RERUN}" == "NO" - if [[ "${DOIAU}" == "YES" ]]; then - restart_date="${current_cycle_begin}" - else - restart_date="${current_cycle}" - fi + restart_date="${model_start_date_current_cycle}" cmeps_restart_file="${COM_MED_RESTART_PREV}/${restart_date:0:8}.${restart_date:8:2}0000.ufs.cpld.cpl.r.nc" fi @@ -740,26 +716,24 @@ CMEPS_out() { echo "SUB ${FUNCNAME[0]}: Copying output data for CMEPS mediator" # Copy mediator restarts at the end of the forecast segment to COM for RUN=gfs|gefs - echo "Copying mediator restarts for 'RUN=${RUN}' at ${forecast_end_cycle}" - local seconds source_file target_file - seconds=$(to_seconds "${forecast_end_cycle:8:2}"0000) - source_file="ufs.cpld.cpl.r.${forecast_end_cycle:0:4}-${forecast_end_cycle:4:2}-${forecast_end_cycle:6:2}-${seconds}.nc" - target_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.ufs.cpld.cpl.r.nc" - if [[ -f "${DATArestart}/CMEPS_RESTART/${source_file}" ]]; then - ${NCP} "${DATArestart}/CMEPS_RESTART/${source_file}" \ - "${COM_MED_RESTART}/${target_file}" - else - echo "Mediator restart '${DATArestart}/CMEPS_RESTART/${source_file}' not found." - fi + if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then + echo "Copying mediator restarts for 'RUN=${RUN}' at ${forecast_end_cycle}" + local seconds source_file target_file + seconds=$(to_seconds "${forecast_end_cycle:8:2}"0000) + source_file="ufs.cpld.cpl.r.${forecast_end_cycle:0:4}-${forecast_end_cycle:4:2}-${forecast_end_cycle:6:2}-${seconds}.nc" + target_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.ufs.cpld.cpl.r.nc" + if [[ -f "${DATArestart}/CMEPS_RESTART/${source_file}" ]]; then + ${NCP} "${DATArestart}/CMEPS_RESTART/${source_file}" \ + "${COM_MED_RESTART}/${target_file}" + else + echo "Mediator restart '${DATArestart}/CMEPS_RESTART/${source_file}' not found." + fi + fi - # Copy restarts at the beginning/middle of the next assimilation cycle to COM for RUN=gdas|enkfgdas|enkfgfs + # Copy restarts for the next cycle to COM for RUN=gdas|enkfgdas|enkfgfs if [[ "${RUN}" =~ "gdas" || "${RUN}" == "enkfgfs" ]]; then local restart_date - if [[ "${DOIAU}" == "YES" ]]; then # Copy restarts at the beginning of the next cycle from DATA to COM - restart_date="${next_cycle_begin}" - else # Copy restarts at the middle of the next cycle from DATA to COM - restart_date="${next_cycle}" - fi + restart_date="${model_start_date_next_cycle}" echo "Copying mediator restarts for 'RUN=${RUN}' at ${restart_date}" seconds=$(to_seconds "${restart_date:8:2}"0000) source_file="ufs.cpld.cpl.r.${restart_date:0:4}-${restart_date:4:2}-${restart_date:6:2}-${seconds}.nc" diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index c300067ce9..de414437b1 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -54,7 +54,14 @@ common_predet(){ current_cycle_begin=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} - ${half_window} hours" +%Y%m%d%H) current_cycle_end=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${half_window} hours" +%Y%m%d%H) next_cycle_begin=$(date --utc -d "${next_cycle:0:8} ${next_cycle:8:2} - ${half_window} hours" +%Y%m%d%H) - next_cycle_end=$(date --utc -d "${next_cycle:0:8} ${next_cycle:8:2} + ${half_window} hours" +%Y%m%d%H) + #Define model start date for current_cycle and next_cycle as the time the forecast will start + if [[ "${DOIAU:-}" == "YES" ]]; then + model_start_date_current_cycle="${current_cycle_begin}" + model_start_date_next_cycle="${next_cycle_begin}" + else + model_start_date_current_cycle=${current_cycle} + model_start_date_next_cycle=${next_cycle} + fi forecast_end_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${FHMAX} hours" +%Y%m%d%H) FHMIN=${FHMIN:-0} diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh index 1b4329c58f..101745da8e 100755 --- a/ush/hpssarch_gen.sh +++ b/ush/hpssarch_gen.sh @@ -560,13 +560,14 @@ if [[ ${type} == "gdas" ]]; then echo "${COM_MED_RESTART/${ROTDIR}\//}/*" } >> "${DATA}/gdasocean_restart.txt" - { - echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/${head}*" - echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/gdas.t??z.ocngrid.nc" - echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/diags" - echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/yaml" - } >> "${DATA}/gdasocean_analysis.txt" - + if [[ ${DO_JEDIOCNVAR} = "YES" ]]; then + { + echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/${head}*" + echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/gdas.t??z.ocngrid.nc" + echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/diags" + echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/yaml" + } >> "${DATA}/gdasocean_analysis.txt" + fi fi if [[ ${DO_ICE} = "YES" ]]; then From 2346c6161f75ae02369cbf30f30c6150d3e12b66 Mon Sep 17 00:00:00 2001 From: Innocent Souopgui <162634017+InnocentSouopgui-NOAA@users.noreply.github.com> Date: Thu, 9 May 2024 21:17:06 -0500 Subject: [PATCH 06/10] Migration to Rocky8 spack-stack installations on Jet (#2458) # Description Migrates Global Workflow to Rocky8 spack-stack installations on Jet. Jet has moved from CentOS7 to Rocky8. Resolves #2377 Refs NOAA-EMC/UPP#919 Refs NOAA-EMC/gfs-utils#60 Refs NOAA-EMC/GSI#732 Refs NOAA-EMC/GSI-Monitor#130 Refs NOAA-EMC/GSI-utils#33 --- modulefiles/module_base.jet.lua | 3 +++ modulefiles/module_gwsetup.jet.lua | 2 +- parm/config/gfs/config.resources | 5 +++-- sorc/gsi_enkf.fd | 2 +- sorc/gsi_monitor.fd | 2 +- sorc/upp.fd | 2 +- versions/run.jet.ver | 5 ++++- versions/run.spack.ver | 2 +- 8 files changed, 15 insertions(+), 8 deletions(-) diff --git a/modulefiles/module_base.jet.lua b/modulefiles/module_base.jet.lua index afd2701503..31f8aa676d 100644 --- a/modulefiles/module_base.jet.lua +++ b/modulefiles/module_base.jet.lua @@ -39,6 +39,9 @@ load(pathJoin("met", (os.getenv("met_ver") or "None"))) load(pathJoin("metplus", (os.getenv("metplus_ver") or "None"))) load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None"))) +-- Adding perl as a module; With Rocky8, perl packages will not be from the OS +load(pathJoin("perl", (os.getenv("perl_ver") or "None"))) + setenv("WGRIB2","wgrib2") setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None")) diff --git a/modulefiles/module_gwsetup.jet.lua b/modulefiles/module_gwsetup.jet.lua index 72c40469e4..bc14b19a79 100644 --- a/modulefiles/module_gwsetup.jet.lua +++ b/modulefiles/module_gwsetup.jet.lua @@ -4,7 +4,7 @@ Load environment to run GFS workflow setup scripts on Jet load(pathJoin("rocoto")) -prepend_path("MODULEPATH", "/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.6.0/envs/gsi-addon-dev/install/modulefiles/Core") +prepend_path("MODULEPATH", "/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.6.0/envs/gsi-addon-dev-rocky8/install/modulefiles/Core") local stack_intel_ver=os.getenv("stack_intel_ver") or "2021.5.0" local python_ver=os.getenv("python_ver") or "3.11.6" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 89953c7b84..3c6ccfff6f 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -699,7 +699,7 @@ case ${step} in case "${CASE}" in "C48" | "C96" | "C192") - declare -x "wtime_${step}"="00:15:00" + declare -x "wtime_${step}"="00:20:00" declare -x "wtime_${step}_gfs"="03:00:00" ;; "C384") @@ -747,6 +747,7 @@ case ${step} in exit 4 ;; esac + if [[ ${machine} == "JET" ]]; then unset memory_upp ; fi export npe_node_upp=${npe_upp} export nth_upp=1 @@ -1032,7 +1033,7 @@ case ${step} in export npe_node_esfc=$(( npe_node_max / nth_esfc )) export nth_cycle=${nth_esfc} export npe_node_cycle=$(( npe_node_max / nth_cycle )) - export memory_esfc="80GB" + if [[ ${machine} != "JET" ]]; then export memory_esfc="80G" ; fi ;; "epos") diff --git a/sorc/gsi_enkf.fd b/sorc/gsi_enkf.fd index 457510c72e..8e279f9c73 160000 --- a/sorc/gsi_enkf.fd +++ b/sorc/gsi_enkf.fd @@ -1 +1 @@ -Subproject commit 457510c72e486b7b01db09e5b1a6f407778dc772 +Subproject commit 8e279f9c734097f673b07e80f385b2623d13ba4a diff --git a/sorc/gsi_monitor.fd b/sorc/gsi_monitor.fd index 8efe38eade..f9d6f5f744 160000 --- a/sorc/gsi_monitor.fd +++ b/sorc/gsi_monitor.fd @@ -1 +1 @@ -Subproject commit 8efe38eadebbd5d50284aee44f6d8b6799a7f6e6 +Subproject commit f9d6f5f744462a449e70abed8c5860b1c4564ad8 diff --git a/sorc/upp.fd b/sorc/upp.fd index 4770a2f509..83e83a938b 160000 --- a/sorc/upp.fd +++ b/sorc/upp.fd @@ -1 +1 @@ -Subproject commit 4770a2f509b7122e76c4f004210031a58ae9502c +Subproject commit 83e83a938b5794a628d30e66a54902dabe58737d diff --git a/versions/run.jet.ver b/versions/run.jet.ver index d5b98bf514..3aa586ee42 100644 --- a/versions/run.jet.ver +++ b/versions/run.jet.ver @@ -1,11 +1,14 @@ export stack_intel_ver=2021.5.0 export stack_impi_ver=2021.5.1 -export spack_env=gsi-addon-dev +export spack_env=gsi-addon-dev-rocky8 export hpss_ver= export ncl_ver=6.6.2 export R_ver=4.0.2 export gempak_ver=7.4.2 +# Adding perl as a module; With Rocky8, perl packages will not be from the OS +export perl_ver=5.38.0 + source "${HOMEgfs:-}/versions/run.spack.ver" export spack_mod_path="/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core" diff --git a/versions/run.spack.ver b/versions/run.spack.ver index 6685d748cb..5640f7f5f8 100644 --- a/versions/run.spack.ver +++ b/versions/run.spack.ver @@ -32,4 +32,4 @@ export obsproc_run_ver=1.1.2 export prepobs_run_ver=1.0.1 export ens_tracker_ver=feature-GFSv17_com_reorg -export fit2obs_ver=1.0.0 +export fit2obs_ver=1.1.1 From 6a9c1372ecce9e50e4f6e10e56f6e504cde1afe6 Mon Sep 17 00:00:00 2001 From: TerrenceMcGuinness-NOAA Date: Fri, 10 May 2024 14:17:13 -0400 Subject: [PATCH 07/10] Do not use BUILT_semphore to force rebuilds when re-run (#2593) Remove the placement of the `BUILT_semaphore` file after the build in the Jenkins Pipeline and force it to rebuild any changes after a PR is re-ran. --- ci/Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/Jenkinsfile b/ci/Jenkinsfile index 5f64ba4d8d..8c70e31cc4 100644 --- a/ci/Jenkinsfile +++ b/ci/Jenkinsfile @@ -149,7 +149,7 @@ pipeline { } } sh(script: './link_workflow.sh') - sh(script: "echo ${HOMEgfs} > BUILT_semaphor") + //sh(script: "echo ${HOMEgfs} > BUILT_semaphor") } } if (env.CHANGE_ID && system == 'gfs') { From 4fb7c12c325702a47f27c802a5067efd33d0327c Mon Sep 17 00:00:00 2001 From: Fanglin Yang Date: Mon, 13 May 2024 16:37:51 -0400 Subject: [PATCH 08/10] Update damping and time-step (#2575) Updates the model to use explicit Rayleigh damping for u/v and implicit damping to w. This improves model stability and allows for longer timesteps. Also unifies the GDAS and GFS to use the same damping. Results from a test at the C1152 resolution (coupled model) can be found at https://www.emc.ncep.noaa.gov/gmb/wx24fy/C1152/newdamp/ Resolves #2574 Co-authored-by: Walter Kolczynski - NOAA Co-authored-by: Lisa Bengtsson Co-authored-by: Rahul Mahajan --- parm/config/gefs/config.fcst | 8 ------- parm/config/gefs/config.ufs | 45 ++++++++++++++++++++++++++++++++---- parm/config/gfs/config.fcst | 8 ------- parm/config/gfs/config.ufs | 45 ++++++++++++++++++++++++++++++++---- sorc/ufs_model.fd | 2 +- ush/parsing_namelists_FV3.sh | 1 + 6 files changed, 84 insertions(+), 25 deletions(-) diff --git a/parm/config/gefs/config.fcst b/parm/config/gefs/config.fcst index 9e5904d689..f91316c7d2 100644 --- a/parm/config/gefs/config.fcst +++ b/parm/config/gefs/config.fcst @@ -109,18 +109,10 @@ if (( gwd_opt == 2 )); then fi # Sponge layer settings -export tau=0. -export rf_cutoff=10. export d2_bg_k1=0.20 export d2_bg_k2=0.04 export dz_min=6 export n_sponge=42 -if (( LEVS == 128 )) && [[ "${CDUMP}" =~ "gdas" ]]; then - export tau=5.0 - export rf_cutoff=1.0e3 - export d2_bg_k1=0.20 - export d2_bg_k2=0.0 -fi # PBL/turbulance schemes export hybedmf=".false." diff --git a/parm/config/gefs/config.ufs b/parm/config/gefs/config.ufs index b8695b6dbb..9b42e4aa82 100644 --- a/parm/config/gefs/config.ufs +++ b/parm/config/gefs/config.ufs @@ -80,8 +80,14 @@ case "${fv3_res}" in export nthreads_fv3_gfs=1 export nthreads_ufs=1 export nthreads_ufs_gfs=1 - export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export xr_cnvcld=.false. # Do not pass conv. clouds to Xu-Randall cloud fraction + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="40.0,1.77,1.0,1.0" # settings for GSL drag suite + export k_split=1 + export n_split=4 + export tau=10.0 + export rf_cutoff=100.0 + export fv_sg_adj=3600 export knob_ugwp_tauamp=6.0e-3 # setting for UGWPv1 non-stationary GWD export WRITE_GROUP=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 @@ -98,16 +104,22 @@ case "${fv3_res}" in export nthreads_fv3_gfs=1 export nthreads_ufs=1 export nthreads_ufs_gfs=1 - export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export xr_cnvcld=".false." # Do not pass conv. clouds to Xu-Randall cloud fraction + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="20.0,2.5,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=3.0e-3 # setting for UGWPv1 non-stationary GWD + export k_split=1 + export n_split=4 + export tau=8.0 + export rf_cutoff=100.0 + export fv_sg_adj=1800 export WRITE_GROUP=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 export WRITE_GROUP_GFS=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 ;; "C192") - export DELTIM=450 + export DELTIM=600 export layout_x=4 export layout_y=6 export layout_x_gfs=4 @@ -119,6 +131,11 @@ case "${fv3_res}" in export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="10.0,3.5,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=1.5e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=4 + export tau=6.0 + export rf_cutoff=100.0 + export fv_sg_adj=1800 export WRITE_GROUP=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 export WRITE_GROUP_GFS=2 @@ -137,6 +154,11 @@ case "${fv3_res}" in export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="5.0,5.0,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.8e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=4 + export tau=4.0 + export rf_cutoff=100.0 + export fv_sg_adj=900 export WRITE_GROUP=2 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 export WRITE_GROUP_GFS=2 @@ -155,13 +177,18 @@ case "${fv3_res}" in export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="2.5,7.5,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.5e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=4 + export tau=3.0 + export rf_cutoff=100.0 + export fv_sg_adj=450 export WRITE_GROUP=2 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 export WRITE_GROUP_GFS=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 #Note this should be 10 for WCOSS2 ;; "C1152") - export DELTIM=120 + export DELTIM=150 export layout_x=8 export layout_y=16 export layout_x_gfs=8 @@ -173,6 +200,11 @@ case "${fv3_res}" in export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="1.67,8.8,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.35e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=6 + export tau=2.5 + export rf_cutoff=100.0 + export fv_sg_adj=450 export WRITE_GROUP=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available export WRITE_GROUP_GFS=4 @@ -191,6 +223,11 @@ case "${fv3_res}" in export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="0.625,14.1,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.13e-3 # setting for UGWPv1 non-stationary GWD + export k_split=4 + export n_split=5 + export tau=0.5 + export rf_cutoff=100.0 + export fv_sg_adj=300 export WRITE_GROUP=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available export WRITE_GROUP_GFS=4 diff --git a/parm/config/gfs/config.fcst b/parm/config/gfs/config.fcst index 81fda1942a..db7306d2e8 100644 --- a/parm/config/gfs/config.fcst +++ b/parm/config/gfs/config.fcst @@ -121,18 +121,10 @@ if (( gwd_opt == 2 )); then fi # Sponge layer settings -export tau=0. -export rf_cutoff=10. export d2_bg_k1=0.20 export d2_bg_k2=0.04 export dz_min=6 export n_sponge=42 -if (( LEVS == 128 )) && [[ "${CDUMP}" =~ "gdas" ]]; then - export tau=5.0 - export rf_cutoff=1.0e3 - export d2_bg_k1=0.20 - export d2_bg_k2=0.0 -fi # PBL/turbulance schemes export hybedmf=".false." diff --git a/parm/config/gfs/config.ufs b/parm/config/gfs/config.ufs index e37e46cf70..a7dabd3d0e 100644 --- a/parm/config/gfs/config.ufs +++ b/parm/config/gfs/config.ufs @@ -96,9 +96,15 @@ case "${fv3_res}" in export nthreads_fv3_gfs=1 export nthreads_ufs=1 export nthreads_ufs_gfs=1 - export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export xr_cnvcld=".false." # Do not pass conv. clouds to Xu-Randall cloud fraction + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="40.0,1.77,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=6.0e-3 # setting for UGWPv1 non-stationary GWD + export k_split=1 + export n_split=4 + export tau=10.0 + export rf_cutoff=100.0 + export fv_sg_adj=3600 export WRITE_GROUP=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 export WRITE_GROUP_GFS=1 @@ -132,9 +138,15 @@ case "${fv3_res}" in export nthreads_fv3_gfs=1 export nthreads_ufs=1 export nthreads_ufs_gfs=1 - export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export xr_cnvcld=.false. # Do not pass conv. clouds to Xu-Randall cloud fraction + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="20.0,2.5,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=3.0e-3 # setting for UGWPv1 non-stationary GWD + export k_split=1 + export n_split=4 + export tau=8.0 + export rf_cutoff=100.0 + export fv_sg_adj=1800 export WRITE_GROUP=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 export WRITE_GROUP_GFS=1 @@ -160,7 +172,7 @@ case "${fv3_res}" in export WRITE_GROUP_GFS=2 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=15 else - export DELTIM=450 + export DELTIM=600 export layout_x=4 export layout_y=6 export layout_x_gfs=4 @@ -172,6 +184,11 @@ case "${fv3_res}" in export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="10.0,3.5,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=1.5e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=4 + export tau=6.0 + export rf_cutoff=100.0 + export fv_sg_adj=1800 export WRITE_GROUP=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 export WRITE_GROUP_GFS=2 @@ -211,6 +228,11 @@ case "${fv3_res}" in export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="5.0,5.0,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.8e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=4 + export tau=4.0 + export rf_cutoff=100.0 + export fv_sg_adj=900 export WRITE_GROUP=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 export WRITE_GROUP_GFS=4 @@ -252,6 +274,11 @@ case "${fv3_res}" in export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="2.5,7.5,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.5e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=4 + export tau=3.0 + export rf_cutoff=100.0 + export fv_sg_adj=450 export WRITE_GROUP=2 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 export WRITE_GROUP_GFS=4 @@ -259,7 +286,7 @@ case "${fv3_res}" in fi ;; "C1152") - export DELTIM=120 + export DELTIM=150 export layout_x=8 export layout_y=16 export layout_x_gfs=8 @@ -271,6 +298,11 @@ case "${fv3_res}" in export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="1.67,8.8,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.35e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=6 + export tau=2.5 + export rf_cutoff=100.0 + export fv_sg_adj=450 export WRITE_GROUP=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available export WRITE_GROUP_GFS=4 @@ -289,6 +321,11 @@ case "${fv3_res}" in export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="0.625,14.1,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.13e-3 # setting for UGWPv1 non-stationary GWD + export k_split=4 + export n_split=5 + export tau=0.5 + export rf_cutoff=100.0 + export fv_sg_adj=300 export WRITE_GROUP=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available export WRITE_GROUP_GFS=4 diff --git a/sorc/ufs_model.fd b/sorc/ufs_model.fd index 7fdb58cad0..c54e98637e 160000 --- a/sorc/ufs_model.fd +++ b/sorc/ufs_model.fd @@ -1 +1 @@ -Subproject commit 7fdb58cad0dad2f62ce7813c6719554d1c5a17af +Subproject commit c54e98637ead81b1fc1e336bd0443c8bfb6faf01 diff --git a/ush/parsing_namelists_FV3.sh b/ush/parsing_namelists_FV3.sh index 9e6e67f9cc..b88849d7e7 100755 --- a/ush/parsing_namelists_FV3.sh +++ b/ush/parsing_namelists_FV3.sh @@ -360,6 +360,7 @@ cat >> input.nml < Date: Mon, 13 May 2024 22:57:38 +0000 Subject: [PATCH 09/10] Limit gfswavepostpnt to 40 PEs/node (#2588) This fixes the slow runtime of the gfswavepostpnt job on Hercules. The job is very I/O intensive and does not scale well to large nodes, so limit the number of jobs/node to 40. Resolves #2587 --- parm/config/gefs/config.resources | 20 +++++++++++++++++--- parm/config/gfs/config.resources | 20 +++++++++++++++++--- 2 files changed, 34 insertions(+), 6 deletions(-) diff --git a/parm/config/gefs/config.resources b/parm/config/gefs/config.resources index d98e437359..04d55ae082 100644 --- a/parm/config/gefs/config.resources +++ b/parm/config/gefs/config.resources @@ -252,13 +252,19 @@ case ${step} in export memory_wavepostsbs="10GB" ;; + # The wavepost*pnt* jobs are I/O heavy and do not scale well to large nodes. + # Limit the number of tasks/node to 40. "wavepostbndpnt") export wtime_wavepostbndpnt="01:00:00" export npe_wavepostbndpnt=240 export nth_wavepostbndpnt=1 export npe_node_wavepostbndpnt=$(( npe_node_max / nth_wavepostbndpnt )) - export NTASKS=${npe_wavepostbndpnt} export is_exclusive=True + if [[ ${npe_node_wavepostbndpnt} -gt 40 ]]; then + export npe_node_wavepostbndpnt=40 + export is_exclusive=False + fi + export NTASKS=${npe_wavepostbndpnt} ;; "wavepostbndpntbll") @@ -266,8 +272,12 @@ case ${step} in export npe_wavepostbndpntbll=448 export nth_wavepostbndpntbll=1 export npe_node_wavepostbndpntbll=$(( npe_node_max / nth_wavepostbndpntbll )) - export NTASKS=${npe_wavepostbndpntbll} export is_exclusive=True + if [[ ${npe_node_wavepostbndpntbll} -gt 40 ]]; then + export npe_node_wavepostbndpntbll=40 + export is_exclusive=False + fi + export NTASKS=${npe_wavepostbndpntbll} ;; "wavepostpnt") @@ -275,8 +285,12 @@ case ${step} in export npe_wavepostpnt=200 export nth_wavepostpnt=1 export npe_node_wavepostpnt=$(( npe_node_max / nth_wavepostpnt )) - export NTASKS=${npe_wavepostpnt} export is_exclusive=True + if [[ ${npe_node_wavepostpnt} -gt 40 ]]; then + export npe_node_wavepostpnt=40 + export is_exclusive=False + fi + export NTASKS=${npe_wavepostpnt} ;; *) diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 3c6ccfff6f..e2893d6337 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -140,13 +140,19 @@ case ${step} in export memory_wavepostsbs_gfs="10GB" ;; + # The wavepost*pnt* jobs are I/O heavy and do not scale well to large nodes. + # Limit the number of tasks/node to 40. "wavepostbndpnt") export wtime_wavepostbndpnt="01:00:00" export npe_wavepostbndpnt=240 export nth_wavepostbndpnt=1 export npe_node_wavepostbndpnt=$(( npe_node_max / nth_wavepostbndpnt )) - export NTASKS=${npe_wavepostbndpnt} export is_exclusive=True + if [[ ${npe_node_wavepostbndpnt} -gt 40 ]]; then + export npe_node_wavepostbndpnt=40 + export is_exclusive=False + fi + export NTASKS=${npe_wavepostbndpnt} ;; "wavepostbndpntbll") @@ -154,8 +160,12 @@ case ${step} in export npe_wavepostbndpntbll=448 export nth_wavepostbndpntbll=1 export npe_node_wavepostbndpntbll=$(( npe_node_max / nth_wavepostbndpntbll )) - export NTASKS=${npe_wavepostbndpntbll} export is_exclusive=True + if [[ ${npe_node_wavepostbndpntbll} -gt 40 ]]; then + export npe_node_wavepostbndpntbll=40 + export is_exclusive=False + fi + export NTASKS=${npe_wavepostbndpntbll} ;; "wavepostpnt") @@ -163,8 +173,12 @@ case ${step} in export npe_wavepostpnt=200 export nth_wavepostpnt=1 export npe_node_wavepostpnt=$(( npe_node_max / nth_wavepostpnt )) - export NTASKS=${npe_wavepostpnt} export is_exclusive=True + if [[ ${npe_node_wavepostpnt} -gt 40 ]]; then + export npe_node_wavepostpnt=40 + export is_exclusive=False + fi + export NTASKS=${npe_wavepostpnt} ;; "wavegempak") From c3f87660d3f5ba4432eb9362785aef0a9a4f33b1 Mon Sep 17 00:00:00 2001 From: "Kate.Zhang" Date: Fri, 17 May 2024 21:07:27 +0000 Subject: [PATCH 10/10] Update ice namelist --- ush/parsing_namelists_CICE.sh | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/ush/parsing_namelists_CICE.sh b/ush/parsing_namelists_CICE.sh index dc67777af9..08e8fc70f5 100755 --- a/ush/parsing_namelists_CICE.sh +++ b/ush/parsing_namelists_CICE.sh @@ -57,7 +57,14 @@ local CICE_NPT=${npt} local CICE_RUNTYPE=${runtype} local CICE_RUNID="unknown" local CICE_USE_RESTART_TIME=${use_restart_time} -local CICE_RESTART_DIR="./CICE_RESTART/" +local CICE_RESTART_FORMAT="pnetcdf2" +local CICE_RESTART_IOTASKS=-99 +local CICE_RESTART_REARR="box" +local CICE_RESTART_ROOT=-99 +local CICE_RESTART_STRIDE=-99 +local CICE_RESTART_CHUNK=0,0 +local CICE_RESTART_DEFLATE=0 +local CICE_RESTART_DIR="./RESTART/" local CICE_RESTART_FILE="cice_model.res" local CICE_DUMPFREQ="y" # "h","d","m" or "y" for restarts at intervals of "hours", "days", "months" or "years" local CICE_DUMPFREQ_N=10000 # Set this to a really large value, as cice, mom6 and cmeps restart interval is controlled by ufs.configure @@ -68,6 +75,14 @@ if [[ "${RUN}" =~ "gdas" ]]; then else local CICE_HIST_AVG=".true., .true., .true., .true., .true." # GFS long forecaset wants averaged over CICE_HISTFREQ_N fi +local CICE_HISTORY_FORMAT="pnetcdf2" +local CICE_HISTORY_IOTASKS=-99 +local CICE_HISTORY_REARR="box" +local CICE_HISTORY_ROOT=-99 +local CICE_HISTORY_STRIDE=-99 +local CICE_HISTORY_CHUNK=0,0 +local CICE_HISTORY_DEFLATE=0 +local CICE_HISTORY_PREC=4 local CICE_HISTORY_DIR="./CICE_OUTPUT/" local CICE_INCOND_DIR="./CICE_OUTPUT/" # grid_nml section