From 587e469a1be5e278326fc0cbceefedc90caf75bf Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA <26926959+RussTreadon-NOAA@users.noreply.github.com> Date: Fri, 21 Apr 2023 13:28:11 -0400 Subject: [PATCH] Refactor UFS-DA ATM ens component to use python g-w (#1373) This PR contains UFS-DA ATM ens changes originally in PR #1354. Below is a list of changes in this PR - rename UFS-DA ATM ens jobs atmensanalprep, atmensanalrun, and atmensanalpost as atmensanlinit, atmensanlrun, and atmensanlfinal, respectively - replace UFS-DA ATM ens shell scripts with python scripts - rename UFS-DA ATM ens j-jobs consistent with initialize, run, and finalize functions. Update j-jobs to execute python scripts instead of shell scripts - rename UFS-DA ATM ens rocoto jobs to be consistent with initialize, run, and finalize functions. Update jobs to set python paths and execute renamed j-jobs - update rocoto workflow generation to new names for UFS-DA ATM ens jobs - update UFS-DA ATM ens job names in machine dependent env files to new job names - rename UFS-DA ATM ens configuration files consistent with change in job names - add python class for UFS-DA ATM ens analysis - unify JEDIEXE link for UFS-DA Aerosol, ATM, and ENS - properly set `cycledefs` for `gfsatmanlinit` - remove unused `FV3JEDI_FIX` from atmanl and atmensanl config The above changes are part of a larger g-w effort to transition from shell scripts to python. UFS-DA Aerosol was the first GDASApp system to be converted. PR #1372 converted UFS-DA atmospheric variational DA to the python based approach. This PR converts converts UFS-DA atmospheric local ensemble DA to the python based approach. Fixes #1313 Depends (in part) on #1370 and #1372 and NOAA-EMC/GDASApp#388 --- env/CONTAINER.env | 2 +- env/HERA.env | 14 +- env/JET.env | 16 +- env/ORION.env | 16 +- env/S4.env | 16 +- env/WCOSS2.env | 16 +- jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST | 66 ---- jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP | 66 ---- jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN | 66 ---- jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE | 50 +++ jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE | 43 +++ jobs/JGLOBAL_ATMENS_ANALYSIS_RUN | 38 +++ jobs/rocoto/atmensanalpost.sh | 20 -- jobs/rocoto/atmensanalprep.sh | 20 -- jobs/rocoto/atmensanalrun.sh | 20 -- jobs/rocoto/atmensanlfinal.sh | 23 ++ jobs/rocoto/atmensanlinit.sh | 24 ++ jobs/rocoto/atmensanlrun.sh | 24 ++ parm/config/config.aeroanl | 2 +- parm/config/config.atmanl | 3 +- parm/config/config.atmensanal | 24 -- parm/config/config.atmensanalpost | 10 - parm/config/config.atmensanalprep | 10 - parm/config/config.atmensanalrun | 14 - parm/config/config.atmensanl | 22 ++ parm/config/config.atmensanlfinal | 10 + parm/config/config.atmensanlinit | 10 + parm/config/config.atmensanlrun | 11 + parm/config/config.resources | 54 +-- scripts/exgdas_global_atmos_ensanal_post.py | 44 --- scripts/exgdas_global_atmos_ensanal_run.sh | 167 ---------- scripts/exglobal_atmens_analysis_finalize.py | 25 ++ .../exglobal_atmens_analysis_initialize.py | 25 ++ scripts/exglobal_atmens_analysis_run.py | 23 ++ ush/python/pygfs/task/aero_analysis.py | 9 - ush/python/pygfs/task/analysis.py | 30 +- ush/python/pygfs/task/atm_analysis.py | 9 - ush/python/pygfs/task/atmens_analysis.py | 312 ++++++++++++++++++ workflow/applications.py | 10 +- workflow/rocoto/workflow_tasks.py | 60 ++-- 40 files changed, 761 insertions(+), 663 deletions(-) delete mode 100755 jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST delete mode 100755 jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP delete mode 100755 jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN create mode 100755 jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE create mode 100755 jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE create mode 100755 jobs/JGLOBAL_ATMENS_ANALYSIS_RUN delete mode 100755 jobs/rocoto/atmensanalpost.sh delete mode 100755 jobs/rocoto/atmensanalprep.sh delete mode 100755 jobs/rocoto/atmensanalrun.sh create mode 100755 jobs/rocoto/atmensanlfinal.sh create mode 100755 jobs/rocoto/atmensanlinit.sh create mode 100755 jobs/rocoto/atmensanlrun.sh delete mode 100644 parm/config/config.atmensanal delete mode 100644 parm/config/config.atmensanalpost delete mode 100644 parm/config/config.atmensanalprep delete mode 100644 parm/config/config.atmensanalrun create mode 100755 parm/config/config.atmensanl create mode 100755 parm/config/config.atmensanlfinal create mode 100755 parm/config/config.atmensanlinit create mode 100755 parm/config/config.atmensanlrun delete mode 100755 scripts/exgdas_global_atmos_ensanal_post.py delete mode 100755 scripts/exgdas_global_atmos_ensanal_run.sh create mode 100755 scripts/exglobal_atmens_analysis_finalize.py create mode 100755 scripts/exglobal_atmens_analysis_initialize.py create mode 100755 scripts/exglobal_atmens_analysis_run.py create mode 100644 ush/python/pygfs/task/atmens_analysis.py diff --git a/env/CONTAINER.env b/env/CONTAINER.env index d06ca91a9b..378b046944 100755 --- a/env/CONTAINER.env +++ b/env/CONTAINER.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanalrun aeroanlrun" + echo "atmanlrun atmensanlrun aeroanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" diff --git a/env/HERA.env b/env/HERA.env index c59ba72298..f97af13d95 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanalrun aeroanlrun" + echo "atmanlrun atmensanlrun aeroanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -56,15 +56,13 @@ elif [[ "${step}" = "atmanlrun" ]]; then [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" -elif [[ "${step}" = "atmensanalrun" ]]; then +elif [[ "${step}" = "atmensanlrun" ]]; then - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - - nth_max=$((npe_node_max / npe_node_atmensanalrun)) + nth_max=$((npe_node_max / npe_node_atmensanlrun)) - export NTHREADS_ATMENSANAL=${nth_atmensanalrun:-${nth_max}} - [[ ${NTHREADS_ATMENSANAL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANAL=${nth_max} - export APRUN_ATMENSANAL="${launcher} -n ${npe_atmensanalrun}" + export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} + [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} + export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun}" elif [[ "${step}" = "aeroanlrun" ]]; then diff --git a/env/JET.env b/env/JET.env index e5fd85aaa3..1632e1cc0e 100755 --- a/env/JET.env +++ b/env/JET.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanalrun aeroanlrun" + echo "atmanlrun atmensanlrun aeroanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -55,17 +55,13 @@ elif [[ "${step}" = "atmanlrun" ]]; then [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" -elif [[ "${step}" = "atmensanalrun" ]]; then +elif [[ "${step}" = "atmensanlrun" ]]; then - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - - nth_max=$((npe_node_max / npe_node_atmensanalrun)) + nth_max=$((npe_node_max / npe_node_atmensanlrun)) - export NTHREADS_ATMENSANAL=${nth_atmensanalrun:-${nth_max}} - [[ ${NTHREADS_ATMENSANAL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANAL=${nth_max} - export APRUN_ATMENSANAL="${launcher} -n ${npe_atmensanalrun}" + export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} + [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} + export APRUN_ATMENSANL="${launcher} ${npe_atmensanlrun}" elif [[ "${step}" = "aeroanlrun" ]]; then diff --git a/env/ORION.env b/env/ORION.env index 8911b70d29..43450bffcf 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanalrun aeroanlrun" + echo "atmanlrun atmensanlrun aeroanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -56,17 +56,13 @@ elif [[ "${step}" = "atmanlrun" ]]; then [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" -elif [[ "${step}" = "atmensanalrun" ]]; then +elif [[ "${step}" = "atmensanlrun" ]]; then - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - - nth_max=$((npe_node_max / npe_node_atmensanalrun)) + nth_max=$((npe_node_max / npe_node_atmensanlrun)) - export NTHREADS_ATMENSANAL=${nth_atmensanalrun:-${nth_max}} - [[ ${NTHREADS_ATMENSANAL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANAL=${nth_max} - export APRUN_ATMENSANAL="${launcher} -n ${npe_atmensanalrun}" + export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} + [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} + export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun}" elif [[ "${step}" = "aeroanlrun" ]]; then diff --git a/env/S4.env b/env/S4.env index 9a62d515a4..c69a845cdf 100755 --- a/env/S4.env +++ b/env/S4.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanalrun aeroanlrun" + echo "atmanlrun atmensanlrun aeroanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -54,17 +54,13 @@ elif [[ "${step}" = "atmanlrun" ]]; then [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" -elif [[ "${step}" = "atmensanalrun" ]]; then +elif [[ "${step}" = "atmensanlrun" ]]; then - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - - nth_max=$((npe_node_max / npe_node_atmensanalrun)) + nth_max=$((npe_node_max / npe_node_atmensanlrun)) - export NTHREADS_ATMENSANAL=${nth_atmensanalrun:-${nth_max}} - [[ ${NTHREADS_ATMENSANAL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANAL=${nth_max} - export APRUN_ATMENSANAL="${launcher} -n ${npe_atmensanalrun}" + export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} + [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} + export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun}" elif [[ "${step}" = "aeroanlrun" ]]; then diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 8110b94b5b..95c3f72fe4 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanalrun aeroanlrun" + echo "atmanlrun atmensanlrun aeroanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen esfc efcs epos" echo "postsnd awips gempak" @@ -43,17 +43,13 @@ elif [[ "${step}" = "atmanlrun" ]]; then [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" -elif [[ "${step}" = "atmensanalrun" ]]; then +elif [[ "${step}" = "atmensanlrun" ]]; then - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="${launcher} -np \$ncmd ${mpmd_opt}" - - nth_max=$((npe_node_max / npe_node_atmensanalrun)) + nth_max=$((npe_node_max / npe_node_atmensanlrun)) - export NTHREADS_ATMENSANAL=${nth_atmensanalrun:-${nth_max}} - [[ ${NTHREADS_ATMENSANAL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANAL=${nth_max} - export APRUN_ATMENSANAL="${launcher} -n ${npe_atmensanalrun}" + export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} + [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} + export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun}" elif [[ "${step}" = "aeroanlrun" ]]; then diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST b/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST deleted file mode 100755 index e1d53b552e..0000000000 --- a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST +++ /dev/null @@ -1,66 +0,0 @@ -#!/bin/bash - -source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanalpost" -c "base atmensanal atmensanalpost" - - -############################################## -# Set variables used in the script -############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export COMPONENT="atmos" - -############################################## -# Begin JOB SPECIFIC work -############################################## - -export GDATE=$(date +%Y%m%d%H -d "${CDATE:0:8} ${CDATE:8:2} - ${assim_freq} hours") -gPDY=${GDATE:0:8} -export gcyc=${GDATE:8:2} -export GDUMP=${GDUMP:-"gdas"} - -export OPREFIX="${CDUMP}.t${cyc}z." -export GPREFIX="${GDUMP}.t${gcyc}z." -export APREFIX="${CDUMP}.t${cyc}z." - -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT_ENS=${COMOUT_ENS:-${ROTDIR}/enkf${CDUMP}.${PDY}/${cyc}/${COMPONENT}} - -mkdir -p ${COMOUT} -mkdir -p ${COMOUT_ENS} - -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}" -export COMIN_GES="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" -export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" - -# Add UFSDA to PYTHONPATH -export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/:${PYTHONPATH} - -############################################################### -# Run relevant script - -EXSCRIPT=${GDASPOSTPY:-${HOMEgfs}/scripts/exgdas_global_atmos_ensanal_post.py} -${EXSCRIPT} -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -############################################## -# End JOB SPECIFIC work -############################################## - -############################################## -# Final processing -############################################## -if [ -e "${pgmout}" ] ; then - cat ${pgmout} -fi - -########################################## -# Remove the Temporary working directory -########################################## -cd ${DATAROOT} -[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} - -exit 0 diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP b/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP deleted file mode 100755 index 7b3ecee7ca..0000000000 --- a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP +++ /dev/null @@ -1,66 +0,0 @@ -#!/bin/bash - -source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanalprep" -c "base atmensanal atmensanalprep" - - -############################################## -# Set variables used in the script -############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export COMPONENT="atmos" - -############################################## -# Begin JOB SPECIFIC work -############################################## - -export GDATE=$(date +%Y%m%d%H -d "${CDATE:0:8} ${CDATE:8:2} - ${assim_freq} hours") -gPDY=${GDATE:0:8} -export gcyc=${GDATE:8:2} -export GDUMP=${GDUMP:-"gdas"} - -export OPREFIX="${CDUMP}.t${cyc}z." -export GPREFIX="${GDUMP}.t${gcyc}z." -export APREFIX="${CDUMP}.t${cyc}z." - -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT_ENS=${COMOUT_ENS:-${ROTDIR}/enkf${CDUMP}.${PDY}/${cyc}/${COMPONENT}} - -mkdir -p ${COMOUT} -mkdir -p ${COMOUT_ENS} - -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}" -export COMIN_GES="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" -export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" - -# Add UFSDA to PYTHONPATH -export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/:${PYTHONPATH} - -############################################################### -# Run relevant script - -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/scripts/exgdas_global_atmos_analysis_prep.py} -${EXSCRIPT} -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -############################################## -# End JOB SPECIFIC work -############################################## - -############################################## -# Final processing -############################################## -if [ -e "${pgmout}" ] ; then - cat ${pgmout} -fi - -########################################## -# Remove the Temporary working directory -########################################## -cd ${DATAROOT} -[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} - -exit 0 diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN b/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN deleted file mode 100755 index 45368d51ff..0000000000 --- a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN +++ /dev/null @@ -1,66 +0,0 @@ -#!/bin/bash - -source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanalrun" -c "base atmensanal atmensanalrun" - - -############################################## -# Set variables used in the script -############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export COMPONENT="atmos" - -############################################## -# Begin JOB SPECIFIC work -############################################## - -export GDATE=$(date +%Y%m%d%H -d "${CDATE:0:8} ${CDATE:8:2} - ${assim_freq} hours") -gPDY=${GDATE:0:8} -export gcyc=${GDATE:8:2} -export GDUMP=${GDUMP:-"gdas"} - -export OPREFIX="${CDUMP}.t${cyc}z." -export GPREFIX="${GDUMP}.t${gcyc}z." -export APREFIX="${CDUMP}.t${cyc}z." - -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT_ENS=${COMOUT_ENS:-${ROTDIR}/enkf${CDUMP}.${PDY}/${cyc}/${COMPONENT}} - -mkdir -p ${COMOUT} -mkdir -p ${COMOUT_ENS} - -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}" -export COMIN_GES="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" -export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" - -# Add UFSDA to PYTHONPATH -export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/:${PYTHONPATH} - -############################################################### -# Run relevant script - -EXSCRIPT=${GDASRUNSH:-${HOMEgfs}/scripts/exgdas_global_atmos_ensanal_run.sh} -${EXSCRIPT} -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -############################################## -# End JOB SPECIFIC work -############################################## - -############################################## -# Final processing -############################################## -if [ -e "${pgmout}" ] ; then - cat ${pgmout} -fi - -########################################## -# Remove the Temporary working directory -########################################## -cd ${DATAROOT} -[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} - -exit 0 diff --git a/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE b/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE new file mode 100755 index 0000000000..d40d79cf78 --- /dev/null +++ b/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE @@ -0,0 +1,50 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlfinal" -c "base atmensanl atmensanlfinal" + +############################################## +# Set variables used in the script +############################################## +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +GDUMP="gdas" + +############################################## +# Begin JOB SPECIFIC work +############################################## + +export COMOUT=${COMOUT:-${ROTDIR}/${RUN}.${PDY}/${cyc}} +mkdir -p "${COMOUT}" + +# COMIN_GES and COMIN_GES_ENS are used in script +export COMIN_GES="${ROTDIR}/${GDUMP}.${GDATE:0:8}/${GDATE:8:2}/atmos" +export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${GDATE:0:8}/${GDATE:8:2}" + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMENSFINALPY:-${HOMEgfs}/scripts/exglobal_atmens_analysis_finalize.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || ( echo "FATAL ERROR: ${DATAROOT} does not exist, ABORT!"; exit 1 ) +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE b/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE new file mode 100755 index 0000000000..dca7d0ffc6 --- /dev/null +++ b/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlinit" -c "base atmensanl atmensanlinit" + +############################################## +# Set variables used in the script +############################################## +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +GDUMP="gdas" + + +############################################## +# Begin JOB SPECIFIC work +############################################## +export COMOUT=${COMOUT:-${ROTDIR}/${RUN}.${PDY}/${cyc}} +mkdir -p "${COMOUT}" + +# COMIN_GES and COMIN_GES_ENS are used in script +export COMIN_GES="${ROTDIR}/${GDUMP}.${GDATE:0:8}/${GDATE:8:2}/atmos" +export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${GDATE:0:8}/${GDATE:8:2}" + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMENSINITPY:-${HOMEgfs}/scripts/exglobal_atmens_analysis_initialize.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN b/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN new file mode 100755 index 0000000000..5a267f197a --- /dev/null +++ b/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN @@ -0,0 +1,38 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlrun" -c "base atmensanl atmensanlrun" + +############################################## +# Set variables used in the script +############################################## + +############################################## +# Begin JOB SPECIFIC work +############################################## + +export COMOUT=${COMOUT:-${ROTDIR}/${RUN}.${PDY}/${cyc}} +mkdir -p "${COMOUT}" + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMENSRUNSH:-${HOMEgfs}/scripts/exglobal_atmens_analysis_run.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/rocoto/atmensanalpost.sh b/jobs/rocoto/atmensanalpost.sh deleted file mode 100755 index 91ac2d6212..0000000000 --- a/jobs/rocoto/atmensanalpost.sh +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env bash - -export STRICT="NO" -source "${HOMEgfs}/ush/preamble.sh" -export STRICT="YES" - -############################################################### -# Source UFSDA workflow modules -. ${HOMEgfs}/ush/load_ufsda_modules.sh -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -export job="atmensanalpost" -export jobid="${job}.$$" - -############################################################### -# Execute the JJOB -${HOMEgfs}/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST -status=$? -exit ${status} diff --git a/jobs/rocoto/atmensanalprep.sh b/jobs/rocoto/atmensanalprep.sh deleted file mode 100755 index b54a1b464e..0000000000 --- a/jobs/rocoto/atmensanalprep.sh +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env bash - -export STRICT="NO" -source "${HOMEgfs}/ush/preamble.sh" -export STRICT="YES" - -############################################################### -# Source UFSDA workflow modules -. ${HOMEgfs}/ush/load_ufsda_modules.sh -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -export job="atmensanalprep" -export jobid="${job}.$$" - -############################################################### -# Execute the JJOB -${HOMEgfs}/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP -status=$? -exit ${status} diff --git a/jobs/rocoto/atmensanalrun.sh b/jobs/rocoto/atmensanalrun.sh deleted file mode 100755 index a2509a310e..0000000000 --- a/jobs/rocoto/atmensanalrun.sh +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env bash - -export STRICT="NO" -source "${HOMEgfs}/ush/preamble.sh" -export STRICT="YES" - -############################################################### -# Source UFSDA workflow modules -. ${HOMEgfs}/ush/load_ufsda_modules.sh -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -export job="atmensanalrun" -export jobid="${job}.$$" - -############################################################### -# Execute the JJOB -${HOMEgfs}/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN -status=$? -exit ${status} diff --git a/jobs/rocoto/atmensanlfinal.sh b/jobs/rocoto/atmensanlfinal.sh new file mode 100755 index 0000000000..838e9712f8 --- /dev/null +++ b/jobs/rocoto/atmensanlfinal.sh @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmensanlfinal" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE" +status=$? +exit "${status}" diff --git a/jobs/rocoto/atmensanlinit.sh b/jobs/rocoto/atmensanlinit.sh new file mode 100755 index 0000000000..0ab78a1083 --- /dev/null +++ b/jobs/rocoto/atmensanlinit.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmensanlinit" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE" +status=$? +exit "${status}" diff --git a/jobs/rocoto/atmensanlrun.sh b/jobs/rocoto/atmensanlrun.sh new file mode 100755 index 0000000000..91efdb3768 --- /dev/null +++ b/jobs/rocoto/atmensanlrun.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmensanlrun" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN" +status=$? +exit "${status}" diff --git a/parm/config/config.aeroanl b/parm/config/config.aeroanl index 3b9a9971f4..41d63f8549 100644 --- a/parm/config/config.aeroanl +++ b/parm/config/config.aeroanl @@ -18,7 +18,7 @@ export BERROR_DATE="20160630.000000" export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ -export JEDIVAREXE=${HOMEgfs}/exec/fv3jedi_var.x +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x export crtm_VERSION="2.3.0" echo "END: config.aeroanl" diff --git a/parm/config/config.atmanl b/parm/config/config.atmanl index 719018d1fd..c0cd9e6733 100644 --- a/parm/config/config.atmanl +++ b/parm/config/config.atmanl @@ -10,7 +10,6 @@ export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yam export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml export STATICB_TYPE="gsibec" export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml -export FV3JEDI_FIX=${HOMEgfs}/fix/gdas export INTERP_METHOD='barycentric' export layout_x=1 @@ -19,7 +18,7 @@ export layout_y=1 export io_layout_x=1 export io_layout_y=1 -export JEDIVAREXE=${HOMEgfs}/exec/fv3jedi_var.x +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x export crtm_VERSION="2.3.0" echo "END: config.atmanl" diff --git a/parm/config/config.atmensanal b/parm/config/config.atmensanal deleted file mode 100644 index 2c939f0d84..0000000000 --- a/parm/config/config.atmensanal +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/bash -x - -########## config.atmensanal ########## -# configuration common to all atm atmensanal analysis tasks - -echo "BEGIN: config.atmensanal" - -export OBS_YAML_DIR=$HOMEgfs/sorc/gdas.cd/parm/atm/obs/config/ -export OBS_LIST=$HOMEgfs/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml -export BERROR_YAML=$HOMEgfs/sorc/gdas.cd/parm/atm/berror/hybvar_bump.yaml -export ATMENSYAML=$HOMEgfs/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml -export FV3JEDI_FIX=$HOMEgfs/fix/gdas -export R2D2_OBS_DB='ufsda_test' -export R2D2_OBS_DUMP='oper_gdas' -export R2D2_OBS_SRC='ncdiag' -export R2D2_BC_SRC='gsi' -export R2D2_BC_DUMP='oper_gdas' -export R2D2_ARCH_DB='local' -export INTERP_METHOD='barycentric' - -export io_layout_x=1 # hardwired to 1,1 in yamltools.py -export io_layout_y=1 - -echo "END: config.atmensanal" diff --git a/parm/config/config.atmensanalpost b/parm/config/config.atmensanalpost deleted file mode 100644 index f79ee5b507..0000000000 --- a/parm/config/config.atmensanalpost +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -x - -########## config.atmensanalpost ########## -# Post Atm Analysis specific - -echo "BEGIN: config.atmensanalpost" - -# Get task specific resources -. $EXPDIR/config.resources atmensanalpost -echo "END: config.atmensanalpost" diff --git a/parm/config/config.atmensanalprep b/parm/config/config.atmensanalprep deleted file mode 100644 index b719b9ac6c..0000000000 --- a/parm/config/config.atmensanalprep +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -x - -########## config.atmensanalprep ########## -# Pre Atm Analysis specific - -echo "BEGIN: config.atmensanalprep" - -# Get task specific resources -. $EXPDIR/config.resources atmensanalprep -echo "END: config.atmensanalprep" diff --git a/parm/config/config.atmensanalrun b/parm/config/config.atmensanalrun deleted file mode 100644 index aeb59d1805..0000000000 --- a/parm/config/config.atmensanalrun +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash -x - -########## config.atmensanalrun ########## -# Atm LETKFs specific - -echo "BEGIN: config.atmensanalrun" - -# Get task specific resources -. $EXPDIR/config.resources atmensanalrun - -# Task specific variables -export JEDIENSEXE=$HOMEgfs/exec/fv3jedi_letkf.x - -echo "END: config.atmensanalrun" diff --git a/parm/config/config.atmensanl b/parm/config/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/parm/config/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/parm/config/config.atmensanlfinal b/parm/config/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/parm/config/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/parm/config/config.atmensanlinit b/parm/config/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/parm/config/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/parm/config/config.atmensanlrun b/parm/config/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/parm/config/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/parm/config/config.resources b/parm/config/config.resources index dbf9cc63e3..96666cc60d 100644 --- a/parm/config/config.resources +++ b/parm/config/config.resources @@ -10,7 +10,7 @@ if [[ $# -ne 1 ]]; then echo "argument can be any one of the following:" echo "getic init coupled_ic aerosol_init" echo "atmanlinit atmanlrun atmanlfinal" - echo "atmensanalprep atmensanalrun atmensanalpost" + echo "atmensanlinit atmensanlrun atmensanlfinal" echo "aeroanlinit aeroanlrun aeroanlfinal" echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" echo "eobs ediag eomg eupd ecen esfc efcs epos earc" @@ -740,39 +740,39 @@ elif [[ ${step} = "coupled_ic" ]]; then export nth_coupled_ic=1 export is_exclusive=True -elif [[ ${step} = "atmensanalprep" ]]; then +elif [[ "${step}" = "atmensanlinit" ]]; then - export wtime_atmensanalprep="00:10:00" - export npe_atmensanalprep=1 - export nth_atmensanalprep=1 - npe_node_atmensanalprep=$(echo "${npe_node_max} / ${nth_atmensanalprep}" | bc) - export npe_node_atmensanalprep - export is_exclusive=True + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" -elif [[ ${step} = "atmensanalrun" ]]; then +elif [[ "${step}" = "atmensanlrun" ]]; then # make below case dependent later - export layout_x=2 - export layout_y=3 - - export wtime_atmensanalrun="00:30:00" - npe_atmensanalrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_atmensanalrun - npe_atmensanalrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_atmensanalrun_gfs - export nth_atmensanalrun=1 - export nth_atmensanalrun_gfs=${nth_atmensanalrun} + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun export is_exclusive=True - npe_node_atmensanalrun=$(echo "${npe_node_max} / ${nth_atmensanalrun}" | bc) - export npe_node_atmensanalrun -elif [[ ${step} = "atmensanalpost" ]]; then +elif [[ "${step}" = "atmensanlfinal" ]]; then - export wtime_atmensanalpost="00:30:00" - export npe_atmensanalpost=${npe_node_max} - export nth_atmensanalpost=1 - npe_node_atmensanalpost=$(echo "${npe_node_max} / ${nth_atmensanalpost}" | bc) - export npe_node_atmensanalpost + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal export is_exclusive=True elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then diff --git a/scripts/exgdas_global_atmos_ensanal_post.py b/scripts/exgdas_global_atmos_ensanal_post.py deleted file mode 100755 index 6c5384953f..0000000000 --- a/scripts/exgdas_global_atmos_ensanal_post.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python3 -################################################################################ -# UNIX Script Documentation Block -# . . -# Script name: exgdas_global_atmos_analysis_post.py -# Script description: Post atmospheric analysis script. -# -# Author: Cory Martin Org: NCEP/EMC Date: 2021-12-29 -# -# Abstract: This script runs after the atmospheric analysis and -# archives each diagnostic file into the R2D2 local user database. -# -# $Id$ -# -# Attributes: -# Language: Python3 -# -################################################################################ - -# import os and sys to add ush to path -import logging -import os -import sys - -# set up logger -logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') - -# get absolute path of ush/ directory either from env or relative to this file -my_dir = os.path.dirname(__file__) -my_home = os.path.dirname(os.path.dirname(my_dir)) -gdas_home = os.path.join(os.environ['HOMEgfs'], 'sorc', 'gdas.cd') -sys.path.append(os.path.join(os.getenv('HOMEgfs', my_home), 'ush')) -logging.info(f"sys.path={sys.path}") - -# import UFSDA utilities -import ufsda - -# get configuration based on environment variables -config = ufsda.misc_utils.get_env_config(component='atm') -config['DIAG_DIR'] = os.path.join(os.environ['COMOUT_ENS'], 'diags') -config['provider'] = 'ncdiag_lgetkf' - -# use R2D2 to archive hofx files -ufsda.archive.atm_diags(config) diff --git a/scripts/exgdas_global_atmos_ensanal_run.sh b/scripts/exgdas_global_atmos_ensanal_run.sh deleted file mode 100755 index 2e87573eda..0000000000 --- a/scripts/exgdas_global_atmos_ensanal_run.sh +++ /dev/null @@ -1,167 +0,0 @@ -#!/bin/bash -################################################################################ -#### UNIX Script Documentation Block -# . . -# Script name: exgdas_global_atmos_analysis_run.sh -# Script description: Runs the global atmospheric analysis with FV3-JEDI -# -# Author: Cory Martin Org: NCEP/EMC Date: 2021-12-28 -# -# Abstract: This script makes a global model atmospheric analysis using FV3-JEDI -# and also (for now) updates increment files using a python ush utility -# -# $Id$ -# -# Attributes: -# Language: POSIX shell -# Machine: Orion -# -################################################################################ - -# Set environment. -source "$HOMEgfs/ush/preamble.sh" - -# Directories -pwd=$(pwd) - -# Utilities -export NLN=${NLN:-"/bin/ln -sf"} -export INCPY=${INCPY:-"$HOMEgfs/sorc/gdas.cd/ush/jediinc2fv3.py"} -export GENYAML=${GENYAML:-"$HOMEgfs/sorc/gdas.cd/ush/genYAML"} -export GETOBSYAML=${GETOBSYAML:-"$HOMEgfs/sorc/gdas.cd/ush/get_obs_list.py"} - -################################################################################ -# make subdirectories -mkdir -p $DATA/fv3jedi -mkdir -p $DATA/obs -mkdir -p $DATA/diags -mkdir -p $DATA/bc -mkdir -p $DATA/anl - -################################################################################ -# generate YAML file -cat > $DATA/temp.yaml << EOF -template: ${ATMENSYAML} -output: $DATA/fv3jedi_ens.yaml -config: - atm: true - BERROR_YAML: $BERROR_YAML - OBS_DIR: obs - DIAG_DIR: diags - CRTM_COEFF_DIR: crtm - BIAS_IN_DIR: obs - BIAS_OUT_DIR: bc - OBS_PREFIX: $OPREFIX - BIAS_PREFIX: $GPREFIX - OBS_LIST: $OBS_LIST - OBS_YAML_DIR: $OBS_YAML_DIR - BKG_DIR: bkg - fv3jedi_staticb_dir: berror - fv3jedi_fix_dir: fv3jedi - fv3jedi_fieldset_dir: fv3jedi - fv3jedi_fieldmetadata_dir: fv3jedi - OBS_DATE: '$CDATE' - BIAS_DATE: '$GDATE' - ANL_DIR: anl/ - NMEM_ENKF: '$NMEM_ENKF' - INTERP_METHOD: '$INTERP_METHOD' -EOF -$GENYAML --config $DATA/temp.yaml - -################################################################################ -# link observations to $DATA -$GETOBSYAML --config $DATA/fv3jedi_ens.yaml --output $DATA/${OPREFIX}obsspace_list -files=$(cat $DATA/${OPREFIX}obsspace_list) -for file in $files; do - basefile=$(basename $file) - $NLN $COMIN/$basefile $DATA/obs/$basefile -done - -# link backgrounds to $DATA -# linking FMS RESTART files for now -# change to (or make optional) for cube sphere history later -##$NLN ${COMIN_GES}/RESTART $DATA/bkg - - -# Link ensemble backgrounds to $DATA. Make directories -# for ensemble output -if [ $DOHYBVAR = "YES" -o $DO_JEDIENS = "YES" ]; then - mkdir -p $DATA/bkg - for imem in $(seq 1 $NMEM_ENKF); do - memchar="mem"$(printf %03i $imem) - mkdir -p $DATA/bkg/$memchar - $NLN ${COMIN_GES_ENS}/$memchar/RESTART $DATA/bkg/$memchar - mkdir -p $DATA/anl/$memchar - done -fi - -################################################################################ -# link fix files to $DATA -# static B -##CASE_BERROR=${CASE_BERROR:-${CASE_ANL:-$CASE}} -##$NLN $FV3JEDI_FIX/bump/$CASE_BERROR/ $DATA/berror - -# vertical coordinate -LAYERS=$(expr $LEVS - 1) -$NLN $FV3JEDI_FIX/fv3jedi/fv3files/akbk${LAYERS}.nc4 $DATA/fv3jedi/akbk.nc4 - -# other FV3-JEDI fix files -$NLN $FV3JEDI_FIX/fv3jedi/fv3files/fmsmpp.nml $DATA/fv3jedi/fmsmpp.nml -$NLN $FV3JEDI_FIX/fv3jedi/fv3files/field_table_gfdl $DATA/fv3jedi/field_table - -# fieldmetadata -$NLN $FV3JEDI_FIX/fv3jedi/fieldmetadata/gfs-restart.yaml $DATA/fv3jedi/gfs-restart.yaml - -# fieldsets -fieldsets="dynamics.yaml ufo.yaml" -for fieldset in $fieldsets; do - $NLN $FV3JEDI_FIX/fv3jedi/fieldsets/$fieldset $DATA/fv3jedi/$fieldset -done - -# CRTM coeffs -${NLN} "${FV3JEDI_FIX}/crtm/2.3.0" "${DATA}/crtm" - -# Link executable to $DATA -$NLN $JEDIENSEXE $DATA/fv3jedi_ens.x - -################################################################################ -# run executable -export OMP_NUM_THREADS=$NTHREADS_ATMENSANAL -export pgm=$JEDIENSEXE -. prep_step -$APRUN_ATMENSANAL $DATA/fv3jedi_ens.x $DATA/fv3jedi_ens.yaml 1>&1 2>&2 -export err=$?; err_chk - -################################################################################ -# translate FV3-JEDI increment to FV3 readable format -for imem in $(seq 1 $NMEM_ENKF); do - memchar="mem"$(printf %03i $imem) - atmges_fv3=$COMIN_GES_ENS/$memchar/${GPREFIX}atmf006.nc - atminc_jedi=$DATA/anl/$memchar/atminc.${PDY}_${cyc}0000z.nc4 - atminc_fv3=$COMOUT_ENS/$memchar/${CDUMP}.${cycle}.atminc.nc - mkdir -p $COMOUT_ENS/$memchar - if [ -s $atminc_jedi ]; then - $INCPY $atmges_fv3 $atminc_jedi $atminc_fv3 - export err=$? - else - echo "***WARNING*** missing $atminc_jedi ABORT" - export err=99 - fi - err_chk -done - -################################################################################ -# Create log file noting creating of analysis increment file -echo "$CDUMP $CDATE atminc done at $(date)" > $COMOUT_ENS/${CDUMP}.${cycle}.loginc.txt - -################################################################################ -# Copy diags and YAML to $COMOUT -cp -r ${DATA}/fv3jedi_ens.yaml ${COMOUT_ENS}/${CDUMP}.${cycle}.fv3jedi_ens.yaml -cp -rf "${DATA}/diags" "${COMOUT_ENS}/" - - -################################################################################ - -exit ${err} - -################################################################################ diff --git a/scripts/exglobal_atmens_analysis_finalize.py b/scripts/exglobal_atmens_analysis_finalize.py new file mode 100755 index 0000000000..5271c5c486 --- /dev/null +++ b/scripts/exglobal_atmens_analysis_finalize.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +# exgdas_global_atmens_analysis_finalize.py +# This script creates an AtmEnsAnalysis class +# and runs the finalize method +# which perform post-processing and clean up activities +# for a global atm local ensemble analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.atmens_analysis import AtmEnsAnalysis + + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atmens analysis task + AtmEnsAnl = AtmEnsAnalysis(config) + AtmEnsAnl.finalize() diff --git a/scripts/exglobal_atmens_analysis_initialize.py b/scripts/exglobal_atmens_analysis_initialize.py new file mode 100755 index 0000000000..97326ddf3d --- /dev/null +++ b/scripts/exglobal_atmens_analysis_initialize.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +# exgdas_global_atmens_analysis_initialize.py +# This script creates an AtmEnsAnalysis class +# and runs the initialize method +# which create and stage the runtime directory +# and create the YAML configuration +# for a global atm local ensemble analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.atmens_analysis import AtmEnsAnalysis + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atmens analysis task + AtmEnsAnl = AtmEnsAnalysis(config) + AtmEnsAnl.initialize() diff --git a/scripts/exglobal_atmens_analysis_run.py b/scripts/exglobal_atmens_analysis_run.py new file mode 100755 index 0000000000..2de95e850d --- /dev/null +++ b/scripts/exglobal_atmens_analysis_run.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 +# exgdas_global_atmens_analysis_run.py +# This script creates an AtmEnsAnalysis object +# and runs the execute method +# which executes the global atm local ensemble analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.atmens_analysis import AtmEnsAnalysis + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atmens analysis task + AtmEnsAnl = AtmEnsAnalysis(config) + AtmEnsAnl.execute() diff --git a/ush/python/pygfs/task/aero_analysis.py b/ush/python/pygfs/task/aero_analysis.py index e21284dc91..8c692e22d6 100644 --- a/ush/python/pygfs/task/aero_analysis.py +++ b/ush/python/pygfs/task/aero_analysis.py @@ -68,7 +68,6 @@ def initialize(self: Analysis) -> None: - staging B error files - staging model backgrounds - generating a YAML file for the JEDI executable - - linking the JEDI executable (TODO make it copyable, requires JEDI fix) - creating output directories """ super().initialize() @@ -99,14 +98,6 @@ def initialize(self: Analysis) -> None: save_as_yaml(varda_yaml, self.task_config.fv3jedi_yaml) logger.info(f"Wrote variational YAML to: {self.task_config.fv3jedi_yaml}") - # link executable to DATA/ directory - exe_src = self.task_config['JEDIVAREXE'] - logger.debug(f"Link executable {exe_src} to DATA/") # TODO: linking is not permitted per EE2. Needs work in JEDI to be able to copy the exec. - exe_dest = os.path.join(self.task_config['DATA'], os.path.basename(exe_src)) - if os.path.exists(exe_dest): - rm_p(exe_dest) - os.symlink(exe_src, exe_dest) - # need output dir for diags and anl logger.debug("Create empty output [anl, diags] directories to receive output from executable") newdirs = [ diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index 6eeeb34996..94a93b74f4 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -5,7 +5,7 @@ from netCDF4 import Dataset from typing import List, Dict, Any -from pygw.yaml_file import YAMLFile, parse_j2yaml +from pygw.yaml_file import YAMLFile, parse_j2yaml, parse_yamltmpl from pygw.file_utils import FileHandler from pygw.template import Template, TemplateConstants from pygw.logger import logit @@ -36,6 +36,9 @@ def initialize(self) -> None: bias_dict = self.get_bias_dict() FileHandler(bias_dict).sync() + # link jedi executable to run directory + self.link_jediexe() + @logit(logger) def get_obs_dict(self: Task) -> Dict[str, Any]: """Compile a dictionary of observation files to copy @@ -171,3 +174,28 @@ def get_berror_dict(self, config: Dict[str, Any]) -> Dict[str, List[str]]: """ berror_dict = {'foo': 'bar'} return berror_dict + + @logit(logger) + def link_jediexe(self: Task) -> None: + """Compile a dictionary of background error files to copy + + This method links a JEDI executable to the run directory + + Parameters + ---------- + Task: GDAS task + + Returns + ---------- + None + """ + exe_src = self.task_config.JEDIEXE + + # TODO: linking is not permitted per EE2. Needs work in JEDI to be able to copy the exec. + logger.debug(f"Link executable {exe_src} to DATA/") + exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + + return diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index a632e318d9..045839edfd 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -67,7 +67,6 @@ def initialize(self: Analysis) -> None: - staging B error files - staging model backgrounds - generating a YAML file for the JEDI executable - - linking the JEDI executable (TODO make it copyable, requires JEDI fix) - creating output directories """ super().initialize() @@ -98,14 +97,6 @@ def initialize(self: Analysis) -> None: save_as_yaml(varda_yaml, self.task_config.fv3jedi_yaml) logger.info(f"Wrote variational YAML to: {self.task_config.fv3jedi_yaml}") - # link executable to DATA/ directory - exe_src = self.task_config.JEDIVAREXE - logger.debug(f"Link executable {exe_src} to DATA/") # TODO: linking is not permitted per EE2. Needs work in JEDI to be able to copy the exec. - exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) - if os.path.exists(exe_dest): - rm_p(exe_dest) - os.symlink(exe_src, exe_dest) - # need output dir for diags and anl logger.debug("Create empty output [anl, diags] directories to receive output from executable") newdirs = [ diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py new file mode 100644 index 0000000000..636129d3ee --- /dev/null +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -0,0 +1,312 @@ +#!/usr/bin/env python3 + +import os +import glob +import gzip +import tarfile +from logging import getLogger +from typing import Dict, List, Any + +from pygw.attrdict import AttrDict +from pygw.file_utils import FileHandler +from pygw.timetools import add_to_datetime, to_fv3time, to_timedelta, to_YMDH +from pygw.fsutils import rm_p, chdir +from pygw.yaml_file import parse_yamltmpl, parse_j2yaml, save_as_yaml +from pygw.logger import logit +from pygw.executable import Executable +from pygw.exceptions import WorkflowException +from pygfs.task.analysis import Analysis + +logger = getLogger(__name__.split('.')[-1]) + + +class AtmEnsAnalysis(Analysis): + """ + Class for global atmens analysis tasks + """ + @logit(logger, name="AtmEnsAnalysis") + def __init__(self, config): + super().__init__(config) + + _res = int(self.config.CASE_ENKF[1:]) + _res_anl = int(self.config.CASE_ANL[1:]) + _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config.assim_freq}H") / 2) + _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmens.yaml") + + # Create a local dictionary that is repeatedly used across this class + local_dict = AttrDict( + { + 'npx_ges': _res + 1, + 'npy_ges': _res + 1, + 'npz_ges': self.config.LEVS - 1, + 'npz': self.config.LEVS - 1, + 'npx_anl': _res_anl + 1, + 'npy_anl': _res_anl + 1, + 'npz_anl': self.config.LEVS - 1, + 'ATM_WINDOW_BEGIN': _window_begin, + 'ATM_WINDOW_LENGTH': f"PT{self.config.assim_freq}H", + 'comin_ges_atm': self.config.COMIN_GES, + 'comin_ges_atmens': self.config.COMIN_GES_ENS, + 'OPREFIX': f"{self.config.EUPD_CYC}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN + 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN + 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", + 'fv3jedi_yaml': _fv3jedi_yaml, + } + ) + + # task_config is everything that this task should need + self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) + + @logit(logger) + def initialize(self: Analysis) -> None: + """Initialize a global atmens analysis + + This method will initialize a global atmens analysis using JEDI. + This includes: + - staging CRTM fix files + - staging FV3-JEDI fix files + - staging model backgrounds + - generating a YAML file for the JEDI executable + - creating output directories + + Parameters + ---------- + Analysis: parent class for GDAS task + + Returns + ---------- + None + """ + super().initialize() + + # stage CRTM fix files + crtm_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'parm_gdas', 'atm_crtm_coeff.yaml') + logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}") + crtm_fix_list = parse_yamltmpl(crtm_fix_list_path, self.task_config) + FileHandler(crtm_fix_list).sync() + + # stage fix files + jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'parm_gdas', 'atm_jedi_fix.yaml') + logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}") + jedi_fix_list = parse_yamltmpl(jedi_fix_list_path, self.task_config) + FileHandler(jedi_fix_list).sync() + + # stage backgrounds + FileHandler(self.get_bkg_dict(AttrDict(self.task_config))).sync() + + # generate ensemble da YAML file + logger.debug(f"Generate ensemble da YAML file: {self.task_config.fv3jedi_yaml}") + ensda_yaml = parse_j2yaml(self.task_config.ATMENSYAML, self.task_config) + save_as_yaml(ensda_yaml, self.task_config.fv3jedi_yaml) + logger.info(f"Wrote ensemble da YAML to: {self.task_config.fv3jedi_yaml}") + + # need output dir for diags and anl + logger.debug("Create empty output [anl, diags] directories to receive output from executable") + newdirs = [ + os.path.join(self.task_config.DATA, 'anl'), + os.path.join(self.task_config.DATA, 'diags'), + ] + FileHandler({'mkdir': newdirs}).sync() + + # Make directories for member analysis files + anldir = [] + for imem in range(1, self.task_config.NMEM_ENKF + 1): + memchar = f"mem{imem:03d}" + anldir.append(os.path.join(self.task_config.DATA, 'anl', f'mem{imem:03d}')) + FileHandler({'mkdir': anldir}).sync() + + @logit(logger) + def execute(self: Analysis) -> None: + """Execute a global atmens analysis + + This method will execute a global atmens analysis using JEDI. + This includes: + - changing to the run directory + - running the global atmens analysis executable + + Parameters + ---------- + Analysis: parent class for GDAS task + + Returns + ---------- + None + """ + chdir(self.task_config.DATA) + + exec_cmd = Executable(self.task_config.APRUN_ATMENSANL) + exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_letkf.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg(self.task_config.fv3jedi_yaml) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + + pass + + @logit(logger) + def finalize(self: Analysis) -> None: + """Finalize a global atmens analysis + + This method will finalize a global atmens analysis using JEDI. + This includes: + - tar output diag files and place in ROTDIR + - copy the generated YAML file from initialize to the ROTDIR + - write UFS model readable atm incrment file + + Parameters + ---------- + Analysis: parent class for GDAS task + + Returns + ---------- + None + """ + # ---- tar up diags + # path of output tar statfile + atmensstat = os.path.join(self.task_config.COMOUT, f"{self.task_config.APREFIX}atmensstat") + + # get list of diag files to put in tarball + diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc4')) + + logger.info(f"Compressing {len(diags)} diag files to {atmensstat}.gz") + + # gzip the files first + logger.debug(f"Gzipping {len(diags)} diag files") + for diagfile in diags: + with open(diagfile, 'rb') as f_in, gzip.open(f"{diagfile}.gz", 'wb') as f_out: + f_out.writelines(f_in) + + # open tar file for writing + logger.debug(f"Creating tar file {atmensstat} with {len(diags)} gzipped diag files") + with tarfile.open(atmensstat, "w") as archive: + for diagfile in diags: + diaggzip = f"{diagfile}.gz" + archive.add(diaggzip, arcname=os.path.basename(diaggzip)) + + # copy full YAML from executable to ROTDIR + logger.info(f"Copying {self.task_config.fv3jedi_yaml} to {self.task_config.COMOUT}") + src = os.path.join(self.task_config.DATA, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmens.yaml") + dest = os.path.join(self.task_config.COMOUT, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmens.yaml") + logger.debug(f"Copying {src} to {dest}") + yaml_copy = { + 'mkdir': [self.task_config.COMOUT], + 'copy': [[src, dest]] + } + FileHandler(yaml_copy).sync() + + # Create UFS model readable atm increment file from UFS-DA atm increment + logger.info("Create UFS model readable atm increment file from UFS-DA atm increment") + self.jedi2fv3inc() + + def clean(self): + super().clean() + + @logit(logger) + def get_bkg_dict(self, task_config: Dict[str, Any]) -> Dict[str, List[str]]: + """Compile a dictionary of model background files to copy + + This method constructs a dictionary of FV3 RESTART files (coupler, core, tracer) + that are needed for global atmens DA and returns said dictionary for use by the FileHandler class. + + Parameters + ---------- + task_config: Dict + a dictionary containing all of the configuration needed for the task + + Returns + ---------- + bkg_dict: Dict + a dictionary containing the list of model background files to copy for FileHandler + """ + # NOTE for now this is FV3 RESTART files and just assumed to be fh006 + # loop over ensemble members + dirlist = [] + bkglist = [] + for imem in range(1, task_config.NMEM_ENKF + 1): + memchar = f"mem{imem:03d}" + + # accumulate directory list for member restart files + dirlist.append(os.path.join(task_config.DATA, 'bkg', memchar)) + + # get FV3 RESTART files, this will be a lot simpler when using history files + rst_dir = os.path.join(task_config.comin_ges_atmens, memchar, 'atmos/RESTART') + run_dir = os.path.join(task_config.DATA, 'bkg', memchar) + + # atmens DA needs coupler + basename = f'{to_fv3time(task_config.current_cycle)}.coupler.res' + bkglist.append([os.path.join(rst_dir, basename), os.path.join(task_config.DATA, 'bkg', memchar, basename)]) + + # atmens DA needs core, srf_wnd, tracer, phy_data, sfc_data + for ftype in ['fv_core.res', 'fv_srf_wnd.res', 'fv_tracer.res', 'phy_data', 'sfc_data']: + template = f'{to_fv3time(self.task_config.current_cycle)}.{ftype}.tile{{tilenum}}.nc' + for itile in range(1, task_config.ntiles + 1): + basename = template.format(tilenum=itile) + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + bkg_dict = { + 'mkdir': dirlist, + 'copy': bkglist, + } + + return bkg_dict + + @logit(logger) + def jedi2fv3inc(self: Analysis) -> None: + """Generate UFS model readable analysis increment + + This method writes a UFS DA atm increment in UFS model readable format. + This includes: + - write UFS-DA atm increments using variable names expected by UFS model + - compute and write delp increment + - compute and write hydrostatic delz increment + + Please note that some of these steps are temporary and will be modified + once the modle is able to directly read atm increments. + + Parameters + ---------- + Analysis: parent class for GDAS task + + Returns + ---------- + None + """ + # Select the atm guess file based on the analysis and background resolutions + # Fields from the atm guess are used to compute the delp and delz increments + cdate = to_fv3time(self.task_config.current_cycle) + cdate_inc = cdate.replace('.', '_') + + # Reference the python script which does the actual work + incpy = os.path.join(self.task_config.HOMEgfs, 'ush/jediinc2fv3.py') + + for imem in range(1, self.task_config.NMEM_ENKF + 1): + memchar = f"mem{imem:03d}" + + # make output directory for member increment + incdir = [ + os.path.join(self.task_config.COMOUT, memchar, 'atmos') + ] + FileHandler({'mkdir': incdir}).sync() + + # rewrite UFS-DA atmens increments + atmges_fv3 = os.path.join(self.task_config.COMIN_GES_ENS, memchar, 'atmos', + f"{self.task_config.CDUMP}.t{self.runtime_config.previous_cycle.hour:02d}z.atmf006.nc") + atminc_jedi = os.path.join(self.task_config.DATA, 'anl', memchar, f'atminc.{cdate_inc}z.nc4') + atminc_fv3 = os.path.join(self.task_config.COMOUT, memchar, 'atmos', + f"{self.task_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atminc.nc") + + # Execute incpy to create the UFS model atm increment file + # TODO: use MPMD or parallelize with mpi4py + # See https://github.com/NOAA-EMC/global-workflow/pull/1373#discussion_r1173060656 + cmd = Executable(incpy) + cmd.add_default_arg(atmges_fv3) + cmd.add_default_arg(atminc_jedi) + cmd.add_default_arg(atminc_fv3) + logger.debug(f"Executing {cmd}") + cmd(output='stdout', error='stderr') diff --git a/workflow/applications.py b/workflow/applications.py index fc6dbd6d05..ff4161bded 100644 --- a/workflow/applications.py +++ b/workflow/applications.py @@ -108,7 +108,7 @@ def __init__(self, conf: Configuration) -> None: self.do_fit2obs = _base.get('DO_FIT2OBS', True) self.do_metp = _base.get('DO_METP', False) self.do_jediatmvar = _base.get('DO_JEDIVAR', False) - self.do_jediens = _base.get('DO_JEDIENS', False) + self.do_jediatmens = _base.get('DO_JEDIENS', False) self.do_jediocnvar = _base.get('DO_JEDIOCNVAR', False) self.do_mergensst = _base.get('DO_MERGENSST', False) @@ -194,8 +194,8 @@ def _cycled_configs(self): configs += ['gldas'] if self.do_hybvar: - if self.do_jediens: - configs += ['atmensanalprep', 'atmensanalrun', 'atmensanalpost'] + if self.do_jediatmens: + configs += ['atmensanlinit', 'atmensanlrun', 'atmensanlfinal'] else: configs += ['eobs', 'eomg', 'ediag', 'eupd'] configs += ['ecen', 'esfc', 'efcs', 'echgres', 'epos', 'earc'] @@ -377,8 +377,8 @@ def _get_cycled_task_names(self): hybrid_tasks = [] hybrid_after_eupd_tasks = [] if self.do_hybvar: - if self.do_jediens: - hybrid_tasks += ['atmensanalprep', 'atmensanalrun', 'atmensanalpost', 'echgres'] + if self.do_jediatmens: + hybrid_tasks += ['atmensanlinit', 'atmensanlrun', 'atmensanlfinal', 'echgres'] else: hybrid_tasks += ['eobs', 'eupd', 'echgres'] hybrid_tasks += ['ediag'] if self.lobsdiag_forenkf else ['eomg'] diff --git a/workflow/rocoto/workflow_tasks.py b/workflow/rocoto/workflow_tasks.py index 3142605363..ab915c1f2a 100644 --- a/workflow/rocoto/workflow_tasks.py +++ b/workflow/rocoto/workflow_tasks.py @@ -16,7 +16,7 @@ class Tasks: 'ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalchkpt', 'ocnanalpost', 'earc', 'ecen', 'echgres', 'ediag', 'efcs', 'eobs', 'eomg', 'epos', 'esfc', 'eupd', - 'atmensanalprep', 'atmensanalrun', 'atmensanalpost', + 'atmensanlinit', 'atmensanlrun', 'atmensanlfinal', 'aeroanlinit', 'aeroanlrun', 'aeroanlfinal', 'fcst', 'post', 'ocnpost', 'vrfy', 'metp', 'postsnd', 'awips', 'gempak', @@ -397,6 +397,7 @@ def analdiag(self): return task def atmanlinit(self): + deps = [] dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} deps.append(rocoto.add_dependency(dep_dict)) @@ -407,7 +408,13 @@ def atmanlinit(self): else: dependencies = rocoto.create_dependency(dep=deps) - cycledef = "gdas" + gfs_cyc = self._base["gfs_cyc"] + gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_cdumps else False + + cycledef = self.cdump + if self.cdump in ['gfs'] and gfs_enkf and gfs_cyc != 4: + cycledef = 'gdas' + resources = self.get_resource('atmanlinit') task = create_wf_task('atmanlinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, cycledef=cycledef) @@ -1129,57 +1136,44 @@ def eupd(self): return task - def atmensanalprep(self): - - dump_suffix = self._base["DUMP_SUFFIX"] - gfs_cyc = self._base["gfs_cyc"] - dmpdir = self._base["DMPDIR"] - do_gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_cdumps else False - + def atmensanlinit(self): deps = [] - dep_dict = {'type': 'metatask', 'name': 'gdaspost', 'offset': '-06:00:00'} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.atmf009.nc' - dep_dict = {'type': 'data', 'data': data, 'offset': '-06:00:00'} + dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}prep'} deps.append(rocoto.add_dependency(dep_dict)) - data = f'{dmpdir}/{self.cdump}{dump_suffix}.@Y@m@d/@H/atmos/{self.cdump}.t@Hz.updated.status.tm00.bufr_d' - dep_dict = {'type': 'data', 'data': data} + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': '-06:00:00'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - cycledef = self.cdump - if self.cdump in ['gfs'] and do_gfs_enkf and gfs_cyc != 4: - cycledef = 'gdas' - - resources = self.get_resource('atmensanalprep') - task = create_wf_task('atmensanalprep', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef = "gdas" + resources = self.get_resource('atmensanlinit') + task = create_wf_task('atmensanlinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, cycledef=cycledef) return task - def atmensanalrun(self): + def atmensanlrun(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanalprep'} + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlinit'} deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': '-06:00:00'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - resources = self.get_resource('atmensanalrun') - task = create_wf_task('atmensanalrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + resources = self.get_resource('atmensanlrun') + task = create_wf_task('atmensanlrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) return task - def atmensanalpost(self): + def atmensanlfinal(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanalrun'} + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlrun'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) - resources = self.get_resource('atmensanalpost') - task = create_wf_task('atmensanalpost', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + resources = self.get_resource('atmensanlfinal') + task = create_wf_task('atmensanlfinal', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) return task @@ -1211,8 +1205,8 @@ def _get_ecengroups(): deps = [] dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}analcalc'} deps.append(rocoto.add_dependency(dep_dict)) - if self.app_config.do_jediens: - dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanalrun'} + if self.app_config.do_jediatmens: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlfinal'} else: dep_dict = {'type': 'task', 'name': f'{self.cdump}eupd'} deps.append(rocoto.add_dependency(dep_dict)) @@ -1240,8 +1234,8 @@ def esfc(self): deps = [] dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}analcalc'} deps.append(rocoto.add_dependency(dep_dict)) - if self.app_config.do_jediens: - dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanalrun'} + if self.app_config.do_jediatmens: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlfinal'} else: dep_dict = {'type': 'task', 'name': f'{self.cdump}eupd'} deps.append(rocoto.add_dependency(dep_dict))