diff --git a/.gitignore b/.gitignore index 79607e39fd..e73b9f2e05 100644 --- a/.gitignore +++ b/.gitignore @@ -129,6 +129,7 @@ ush/global_chgres.sh ush/global_chgres_driver.sh ush/global_cycle.sh ush/global_cycle_driver.sh +ush/jediinc2fv3.py ush/mkwfsgbl.sh ush/ufsda ush/wafs_blending.sh diff --git a/env/CONTAINER.env b/env/CONTAINER.env index 48014ab313..d06ca91a9b 100755 --- a/env/CONTAINER.env +++ b/env/CONTAINER.env @@ -4,8 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanalrun atmensanalrun" - echo "aeroanlrun" + echo "atmanlrun atmensanalrun aeroanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" diff --git a/env/HERA.env b/env/HERA.env index 3fa6288200..7960f604ba 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -4,8 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanalrun atmensanalrun" - echo "aeroanlrun" + echo "atmanlrun atmensanalrun aeroanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -49,22 +48,16 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step} export wavempexec=${launcher} export wave_mpmd=${mpmd_opt} -elif [[ "${step}" = "atmanalrun" ]]; then +elif [[ "${step}" = "atmanlrun" ]]; then - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + nth_max=$((npe_node_max / npe_node_atmanlrun)) - nth_max=$((npe_node_max / npe_node_atmanalrun)) - - export NTHREADS_ATMANAL=${nth_atmanalrun:-${nth_max}} - [[ ${NTHREADS_ATMANAL} -gt ${nth_max} ]] && export NTHREADS_ATMANAL=${nth_max} - export APRUN_ATMANAL="${launcher} -n ${npe_atmanalrun}" + export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} + [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} + export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" elif [[ "${step}" = "atmensanalrun" ]]; then - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" nth_max=$((npe_node_max / npe_node_atmensanalrun)) diff --git a/env/JET.env b/env/JET.env index 73c5031a68..e5fd85aaa3 100755 --- a/env/JET.env +++ b/env/JET.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanalrun atmensanalrun aeroanlrun" + echo "atmanlrun atmensanalrun aeroanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -47,17 +47,13 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step} export wavempexec=${launcher} export wave_mpmd=${mpmd_opt} -elif [[ "${step}" = "atmanalrun" ]]; then +elif [[ "${step}" = "atmanlrun" ]]; then - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - - nth_max=$((npe_node_max / npe_node_atmanalrun)) + nth_max=$((npe_node_max / npe_node_atmanlrun)) - export NTHREADS_ATMANAL=${nth_atmanalrun:-${nth_max}} - [[ ${NTHREADS_ATMANAL} -gt ${nth_max} ]] && export NTHREADS_ATMANAL=${nth_max} - export APRUN_ATMANAL="${launcher} -n ${npe_atmanalrun}" + export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} + [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} + export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" elif [[ "${step}" = "atmensanalrun" ]]; then diff --git a/env/ORION.env b/env/ORION.env index 43aa24689d..1ef58c82bc 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -4,8 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanalrun atmensanalrun" - echo "aeroanlrun" + echo "atmanlrun atmensanalrun aeroanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -49,17 +48,13 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step} export wavempexec=${launcher} export wave_mpmd=${mpmd_opt} -elif [[ "${step}" = "atmanalrun" ]]; then +elif [[ "${step}" = "atmanlrun" ]]; then - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - - nth_max=$((npe_node_max / npe_node_atmanalrun)) + nth_max=$((npe_node_max / npe_node_atmanlrun)) - export NTHREADS_ATMANAL=${nth_atmanalrun:-${nth_max}} - [[ ${NTHREADS_ATMANAL} -gt ${nth_max} ]] && export NTHREADS_ATMANAL=${nth_max} - export APRUN_ATMANAL="${launcher} -n ${npe_atmanalrun}" + export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} + [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} + export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" elif [[ "${step}" = "atmensanalrun" ]]; then diff --git a/env/S4.env b/env/S4.env index 4933fb989e..9a62d515a4 100755 --- a/env/S4.env +++ b/env/S4.env @@ -4,8 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanalrun atmensanalrun" - echo "aeroanlrun" + echo "atmanlrun atmensanalrun aeroanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -47,17 +46,13 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step} export wavempexec=${launcher} export wave_mpmd=${mpmd_opt} -elif [[ "${step}" = "atmanalrun" ]]; then +elif [[ "${step}" = "atmanlrun" ]]; then - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - - nth_max=$((npe_node_max / npe_node_atmanalrun)) + nth_max=$((npe_node_max / npe_node_atmanlrun)) - export NTHREADS_ATMANAL=${nth_atmanalrun:-${nth_max}} - [[ ${NTHREADS_ATMANAL} -gt ${nth_max} ]] && export NTHREADS_ATMANAL=${nth_max} - export APRUN_ATMANAL="${launcher} -n ${npe_atmanalrun}" + export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} + [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} + export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" elif [[ "${step}" = "atmensanalrun" ]]; then diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 44f8dbc994..8110b94b5b 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -4,8 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanalrun atmensanalrun" - echo "aeroanlrun" + echo "atmanlrun atmensanalrun aeroanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen esfc efcs epos" echo "postsnd awips gempak" @@ -36,17 +35,13 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step} export wavempexec="${launcher} -np" export wave_mpmd=${mpmd_opt} -elif [[ "${step}" = "atmanalrun" ]]; then +elif [[ "${step}" = "atmanlrun" ]]; then - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="${launcher} -np \$ncmd ${mpmd_opt}" - - nth_max=$((npe_node_max / npe_node_atmanalrun)) + nth_max=$((npe_node_max / npe_node_atmanlrun)) - export NTHREADS_ATMANAL=${nth_atmanalrun:-${nth_max}} - [[ ${NTHREADS_ATMANAL} -gt ${nth_max} ]] && export NTHREADS_ATMANAL=${nth_max} - export APRUN_ATMANAL="${launcher} -n ${npe_atmanalrun}" + export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} + [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} + export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" elif [[ "${step}" = "atmensanalrun" ]]; then diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST b/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST deleted file mode 100755 index e44e607581..0000000000 --- a/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST +++ /dev/null @@ -1,63 +0,0 @@ -#!/bin/bash - -source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanalpost" -c "base atmanal atmanalpost" - - -############################################## -# Set variables used in the script -############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export COMPONENT="atmos" - -############################################## -# Begin JOB SPECIFIC work -############################################## - -export GDATE=$(date +%Y%m%d%H -d "${CDATE:0:8} ${CDATE:8:2} - ${assim_freq} hours") -gPDY=${GDATE:0:8} -export gcyc=${GDATE:8:2} -export GDUMP=${GDUMP:-"gdas"} - -export OPREFIX="${CDUMP}.t${cyc}z." -export GPREFIX="${GDUMP}.t${gcyc}z." -export APREFIX="${CDUMP}.t${cyc}z." - -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} - -mkdir -p ${COMOUT} - -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN_GES="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" -export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" - -# Add UFSDA to PYTHONPATH -export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/:${PYTHONPATH} - -############################################################### -# Run relevant script - -EXSCRIPT=${GDASPOSTPY:-${HOMEgfs}/scripts/exgdas_global_atmos_analysis_post.py} -${EXSCRIPT} -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -############################################## -# End JOB SPECIFIC work -############################################## - -############################################## -# Final processing -############################################## -if [ -e "${pgmout}" ] ; then - cat ${pgmout} -fi - -########################################## -# Remove the Temporary working directory -########################################## -cd ${DATAROOT} -[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} - -exit 0 diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP b/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP deleted file mode 100755 index 093fccdd5a..0000000000 --- a/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP +++ /dev/null @@ -1,62 +0,0 @@ -#!/bin/bash - -source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanalprep" -c "base atmanal atmanalprep" - - -############################################## -# Set variables used in the script -############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export COMPONENT="atmos" - -############################################## -# Begin JOB SPECIFIC work -############################################## - -export GDATE=$(date +%Y%m%d%H -d "${CDATE:0:8} ${CDATE:8:2} - ${assim_freq} hours") -gPDY=${GDATE:0:8} -export gcyc=${GDATE:8:2} -export GDUMP=${GDUMP:-"gdas"} - -export OPREFIX="${CDUMP}.t${cyc}z." -export GPREFIX="${GDUMP}.t${gcyc}z." -export APREFIX="${CDUMP}.t${cyc}z." - -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} - -mkdir -p ${COMOUT} - -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN_GES="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" -export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" - -# Add UFSDA to PYTHONPATH -export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/:${PYTHONPATH} - -############################################################### -# Run relevant script -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/scripts/exgdas_global_atmos_analysis_prep.py} -${EXSCRIPT} -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -############################################## -# End JOB SPECIFIC work -############################################## - -############################################## -# Final processing -############################################## -if [ -e "${pgmout}" ] ; then - cat ${pgmout} -fi - -########################################## -# Remove the Temporary working directory -########################################## -cd ${DATAROOT} -[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} - -exit 0 diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN b/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN deleted file mode 100755 index 876598ff7e..0000000000 --- a/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN +++ /dev/null @@ -1,63 +0,0 @@ -#!/bin/bash - -source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanalrun" -c "base atmanal atmanalrun" - - -############################################## -# Set variables used in the script -############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export COMPONENT="atmos" - -############################################## -# Begin JOB SPECIFIC work -############################################## - -export GDATE=$(date +%Y%m%d%H -d "${CDATE:0:8} ${CDATE:8:2} - ${assim_freq} hours") -gPDY=${GDATE:0:8} -export gcyc=${GDATE:8:2} -export GDUMP=${GDUMP:-"gdas"} - -export OPREFIX="${CDUMP}.t${cyc}z." -export GPREFIX="${GDUMP}.t${gcyc}z." -export APREFIX="${CDUMP}.t${cyc}z." - -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} - -mkdir -p ${COMOUT} - -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN_GES="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" -export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" - -# Add UFSDA to PYTHONPATH -export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/:${PYTHONPATH} - -############################################################### -# Run relevant script - -EXSCRIPT=${GDASRUNSH:-${HOMEgfs}/scripts/exgdas_global_atmos_analysis_run.sh} -${EXSCRIPT} -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -############################################## -# End JOB SPECIFIC work -############################################## - -############################################## -# Final processing -############################################## -if [ -e "${pgmout}" ] ; then - cat ${pgmout} -fi - -########################################## -# Remove the Temporary working directory -########################################## -cd ${DATAROOT} -[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} - -exit 0 diff --git a/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE b/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE new file mode 100755 index 0000000000..1719b18580 --- /dev/null +++ b/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE @@ -0,0 +1,51 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}atmanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlfinal" -c "base atmanl atmanlfinal" + +############################################## +# Set variables used in the script +############################################## +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +GDUMP="gdas" + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +export COMOUT=${COMOUT:-${ROTDIR}/${RUN}.${PDY}/${cyc}/atmos} +mkdir -p "${COMOUT}" + +# COMIN_GES and COMIN_GES_ENS are used in script +export COMIN_GES="${ROTDIR}/${GDUMP}.${GDATE:0:8}/${GDATE:8:2}/atmos" +export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${GDATE:0:8}/${GDATE:8:2}/atmos" + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMFINALPY:-${HOMEgfs}/scripts/exglobal_atm_analysis_finalize.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || ( echo "FATAL ERROR: ${DATAROOT} does not exist, ABORT!"; exit 1 ) +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE b/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE new file mode 100755 index 0000000000..8fc7c249b3 --- /dev/null +++ b/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE @@ -0,0 +1,44 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export DATA=${DATA:-${DATAROOT}/${RUN}atmanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlinit" -c "base atmanl atmanlinit" + +############################################## +# Set variables used in the script +############################################## +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +GDUMP="gdas" + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +export COMOUT=${COMOUT:-${ROTDIR}/${RUN}.${PDY}/${cyc}/atmos} +mkdir -p "${COMOUT}" + +# COMIN_GES and COMIN_GES_ENS are used in script +export COMIN_GES="${ROTDIR}/${GDUMP}.${GDATE:0:8}/${GDATE:8:2}/atmos" +export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${GDATE:0:8}/${GDATE:8:2}/atmos" + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMINITPY:-${HOMEgfs}/scripts/exglobal_atm_analysis_initialize.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_ATM_ANALYSIS_RUN b/jobs/JGLOBAL_ATM_ANALYSIS_RUN new file mode 100755 index 0000000000..b04c70e209 --- /dev/null +++ b/jobs/JGLOBAL_ATM_ANALYSIS_RUN @@ -0,0 +1,44 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}atmanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlrun" -c "base atmanl atmanlrun" + +############################################## +# Set variables used in the script +############################################## +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +GDUMP="gdas" + +############################################## +# Begin JOB SPECIFIC work +############################################## + +export COMOUT=${COMOUT:-${ROTDIR}/${RUN}.${PDY}/${cyc}/atmos} +mkdir -p "${COMOUT}" + +# COMIN_GES and COMIN_GES_ENS are used in script +export COMIN_GES="${ROTDIR}/${GDUMP}.${GDATE:0:8}/${GDATE:8:2}/atmos" +export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${GDATE:0:8}/${GDATE:8:2}/atmos" + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMRUNSH:-${HOMEgfs}/scripts/exglobal_atm_analysis_run.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/rocoto/atmanalpost.sh b/jobs/rocoto/atmanalpost.sh deleted file mode 100755 index 71ace70c8b..0000000000 --- a/jobs/rocoto/atmanalpost.sh +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env bash - -export STRICT="NO" -source "${HOMEgfs}/ush/preamble.sh" -export STRICT="YES" - -############################################################### -# Source UFSDA workflow modules -. ${HOMEgfs}/ush/load_ufsda_modules.sh -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -export job="atmanalpost" -export jobid="${job}.$$" - -############################################################### -# Execute the JJOB -${HOMEgfs}/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_POST -status=$? -exit ${status} diff --git a/jobs/rocoto/atmanalprep.sh b/jobs/rocoto/atmanalprep.sh deleted file mode 100755 index d5b729194a..0000000000 --- a/jobs/rocoto/atmanalprep.sh +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env bash - -export STRICT="NO" -source "${HOMEgfs}/ush/preamble.sh" -export STRICT="YES" - -############################################################### -# Source UFSDA workflow modules -. ${HOMEgfs}/ush/load_ufsda_modules.sh -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -export job="atmanalprep" -export jobid="${job}.$$" - -############################################################### -# Execute the JJOB -${HOMEgfs}/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_PREP -status=$? -exit ${status} diff --git a/jobs/rocoto/atmanalrun.sh b/jobs/rocoto/atmanalrun.sh deleted file mode 100755 index 63aa08c184..0000000000 --- a/jobs/rocoto/atmanalrun.sh +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env bash - -export STRICT="NO" -source "${HOMEgfs}/ush/preamble.sh" -export STRICT="YES" - -############################################################### -# Source UFSDA workflow modules -. ${HOMEgfs}/ush/load_ufsda_modules.sh -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -export job="atmanalrun" -export jobid="${job}.$$" - -############################################################### -# Execute the JJOB -${HOMEgfs}/jobs/JGDAS_GLOBAL_ATMOS_ANALYSIS_RUN -status=$? -exit ${status} diff --git a/jobs/rocoto/atmanlfinal.sh b/jobs/rocoto/atmanlfinal.sh new file mode 100755 index 0000000000..3c75c52cb0 --- /dev/null +++ b/jobs/rocoto/atmanlfinal.sh @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmanlfinal" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE" +status=$? +exit "${status}" diff --git a/jobs/rocoto/atmanlinit.sh b/jobs/rocoto/atmanlinit.sh new file mode 100755 index 0000000000..7bb2587f0b --- /dev/null +++ b/jobs/rocoto/atmanlinit.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmanlinit" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE" +status=$? +exit "${status}" diff --git a/jobs/rocoto/atmanlrun.sh b/jobs/rocoto/atmanlrun.sh new file mode 100755 index 0000000000..aad80e0b06 --- /dev/null +++ b/jobs/rocoto/atmanlrun.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmanlrun" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_RUN" +status=$? +exit "${status}" diff --git a/parm/config/config.atmanalpost b/parm/config/config.atmanalpost deleted file mode 100644 index fd5f3bbbcc..0000000000 --- a/parm/config/config.atmanalpost +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -x - -########## config.atmanalpost ########## -# Post Atm Analysis specific - -echo "BEGIN: config.atmanalpost" - -# Get task specific resources -. $EXPDIR/config.resources atmanalpost -echo "END: config.atmanalpost" diff --git a/parm/config/config.atmanalprep b/parm/config/config.atmanalprep deleted file mode 100644 index 0014520f5f..0000000000 --- a/parm/config/config.atmanalprep +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -x - -########## config.atmanalprep ########## -# Pre Atm Analysis specific - -echo "BEGIN: config.atmanalprep" - -# Get task specific resources -. $EXPDIR/config.resources atmanalprep -echo "END: config.atmanalprep" diff --git a/parm/config/config.atmanalrun b/parm/config/config.atmanalrun deleted file mode 100644 index 5aaac6a01d..0000000000 --- a/parm/config/config.atmanalrun +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash -x - -########## config.atmanalrun ########## -# Atm Analysis specific - -echo "BEGIN: config.atmanalrun" - -# Get task specific resources -. $EXPDIR/config.resources atmanalrun - -# Task specific variables -export JEDIVAREXE=$HOMEgfs/exec/fv3jedi_var.x - -echo "END: config.atmanalrun" diff --git a/parm/config/config.atmanal b/parm/config/config.atmanl similarity index 52% rename from parm/config/config.atmanal rename to parm/config/config.atmanl index 2e36ffa603..719018d1fd 100644 --- a/parm/config/config.atmanal +++ b/parm/config/config.atmanl @@ -1,25 +1,25 @@ -#!/bin/bash -x +#! /usr/bin/env bash -########## config.atmanal ########## -# configuration common to all atm analysis tasks +########## config.atmanl ########## +# configuration common to all atm var analysis tasks -echo "BEGIN: config.atmanal" +echo "BEGIN: config.atmanl" export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ -export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype.yaml +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml -export STATICB_TYPE="identity" +export STATICB_TYPE="gsibec" export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml export FV3JEDI_FIX=${HOMEgfs}/fix/gdas -export R2D2_OBS_DB='ufsda_test' -export R2D2_OBS_DUMP='oper_gdas' -export R2D2_OBS_SRC='ncdiag' -export R2D2_BC_SRC='gsi' -export R2D2_BC_DUMP='oper_gdas' -export R2D2_ARCH_DB='local' export INTERP_METHOD='barycentric' +export layout_x=1 +export layout_y=1 + export io_layout_x=1 export io_layout_y=1 -echo "END: config.atmanal" +export JEDIVAREXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/parm/config/config.atmanlfinal b/parm/config/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/parm/config/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/parm/config/config.atmanlinit b/parm/config/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/parm/config/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/parm/config/config.atmanlrun b/parm/config/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/parm/config/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/parm/config/config.resources b/parm/config/config.resources index 31b871da61..b1ca57e5cb 100644 --- a/parm/config/config.resources +++ b/parm/config/config.resources @@ -9,7 +9,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input task argument to set resource variables!" echo "argument can be any one of the following:" echo "getic init coupled_ic aerosol_init" - echo "atmanalprep atmanalrun atmanalpost" + echo "atmanlinit atmanlrun atmanlfinal" echo "atmensanalprep atmensanalrun atmensanalpost" echo "aeroanlinit aeroanlrun aeroanlfinal" echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" @@ -167,39 +167,39 @@ elif [[ ${step} = "waveawipsgridded" ]]; then export NTASKS=${npe_waveawipsgridded} export memory_waveawipsgridded_gfs="1GB" -elif [[ "${step}" = "atmanalprep" ]]; then +elif [[ "${step}" = "atmanlinit" ]]; then - export wtime_atmanalprep="00:10:00" - export npe_atmanalprep=1 - export nth_atmanalprep=1 - npe_node_atmanalprep=$(echo "${npe_node_max} / ${nth_atmanalprep}" | bc) - export npe_node_atmanalprep - export memory_atmanalprep="3072M" + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" -elif [[ "${step}" = "atmanalrun" ]]; then +elif [[ "${step}" = "atmanlrun" ]]; then # make below case dependent later export layout_x=1 export layout_y=1 - export wtime_atmanalrun="00:30:00" - npe_atmanalrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_atmanalrun - npe_atmanalrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_atmanalrun_gfs - export nth_atmanalrun=1 - export nth_atmanalrun_gfs=${nth_atmanalrun} - npe_node_atmanalrun=$(echo "${npe_node_max} / ${nth_atmanalrun}" | bc) - export npe_node_atmanalrun + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun export is_exclusive=True -elif [[ "${step}" = "atmanalpost" ]]; then +elif [[ "${step}" = "atmanlfinal" ]]; then - export wtime_atmanalpost="00:30:00" - export npe_atmanalpost=${npe_node_max} - export nth_atmanalpost=1 - npe_node_atmanalpost=$(echo "${npe_node_max} / ${nth_atmanalpost}" | bc) - export npe_node_atmanalpost + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal export is_exclusive=True elif [[ "${step}" = "aeroanlinit" ]]; then diff --git a/parm/parm_gdas/atm_crtm_coeff.yaml b/parm/parm_gdas/atm_crtm_coeff.yaml new file mode 100644 index 0000000000..8e8d433b06 --- /dev/null +++ b/parm/parm_gdas/atm_crtm_coeff.yaml @@ -0,0 +1,178 @@ +mkdir: +- $(DATA)/crtm +copy: +# Emissivity files +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/NPOESS.VISice.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/NPOESS.VISland.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/NPOESS.VISsnow.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/NPOESS.VISwater.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/NPOESS.IRice.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/NPOESS.IRland.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/NPOESS.IRsnow.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/Nalli.IRwater.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/FASTEM6.MWwater.EmisCoeff.bin, $(DATA)/crtm] +# Aerosol and Cloud files +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/AerosolCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/CloudCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/CloudCoeff.GFDLFV3.-109z-1.bin, $(DATA)/crtm] +# Satellite_Sensor specific Tau and Spc coefficient files +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/abi_g16.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/abi_g16.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/abi_g17.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/abi_g17.TauCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/abi_g18.SpcCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/abi_g18.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ahi_himawari8.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ahi_himawari8.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ahi_himawari9.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ahi_himawari9.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/airs_aqua.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/airs_aqua.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsr2_gcom-w1.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsr2_gcom-w1.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsre_aqua.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsre_aqua.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_aqua.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_aqua.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_metop-a.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_metop-a.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_metop-b.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_metop-b.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_metop-c.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_metop-c.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_n15.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_n15.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_n18.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_n18.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_n19.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_n19.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsub_n17.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsub_n17.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/atms_n20.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/atms_n20.TauCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/atms_n21.SpcCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/atms_n21.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/atms_npp.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/atms_npp.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_metop-a.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_metop-a.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_metop-b.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_metop-b.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_metop-c.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_metop-c.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_n18.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_n18.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_n19.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_n19.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/cris-fsr_n20.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/cris-fsr_n20.TauCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/cris-fsr_n21.SpcCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/cris-fsr_n21.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/cris-fsr_npp.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/cris-fsr_npp.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/gmi_gpm.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/gmi_gpm.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs3_n17.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs3_n17.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs4_metop-a.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs4_metop-a.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs4_metop-b.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs4_metop-b.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs4_n19.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs4_n19.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/iasi_metop-a.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/iasi_metop-a.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/iasi_metop-b.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/iasi_metop-b.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/iasi_metop-c.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/iasi_metop-c.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g11.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g11.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g12.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g12.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g13.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g13.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g14.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g14.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g15.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g15.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_metop-a.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_metop-a.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_metop-b.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_metop-b.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_metop-c.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_metop-c.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_n18.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_n18.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_n19.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_n19.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/saphir_meghat.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/saphir_meghat.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m08.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m08.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m09.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m09.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m10.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m10.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m11.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m11.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g11.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g11.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g12.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g12.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g13.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g13.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g14.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g14.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g15.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g15.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g11.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g11.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g12.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g12.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g13.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g13.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g14.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g14.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g15.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g15.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g11.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g11.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g12.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g12.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g13.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g13.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g14.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g14.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g15.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g15.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g11.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g11.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g12.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g12.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g13.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g13.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g14.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g14.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g15.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g15.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmi_f15.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmi_f15.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f16.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f16.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f17.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f17.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f18.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f18.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f19.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f19.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f20.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f20.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/viirs-m_j1.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/viirs-m_j1.TauCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/viirs-m_j2.SpcCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/viirs-m_j2.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/viirs-m_npp.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/viirs-m_npp.TauCoeff.bin, $(DATA)/crtm] +# Special Spc files +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_metop-a_v2.SpcCoeff.bin, $(DATA)/crtm] diff --git a/parm/parm_gdas/atm_jedi_fix.yaml b/parm/parm_gdas/atm_jedi_fix.yaml new file mode 100644 index 0000000000..07b0fe49f1 --- /dev/null +++ b/parm/parm_gdas/atm_jedi_fix.yaml @@ -0,0 +1,7 @@ +mkdir: +- $(DATA)/fv3jedi +copy: +- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/akbk$(npz).nc4, $(DATA)/fv3jedi/akbk.nc4] +- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/fmsmpp.nml, $(DATA)/fv3jedi/fmsmpp.nml] +- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/field_table_gfdl, $(DATA)/fv3jedi/field_table] +- [$(HOMEgfs)/fix/gdas/fv3jedi/fieldmetadata/gfs-restart.yaml, $(DATA)/fv3jedi/gfs-restart.yaml] diff --git a/parm/parm_gdas/atmanl_inc_vars.yaml b/parm/parm_gdas/atmanl_inc_vars.yaml new file mode 100644 index 0000000000..cb6718ce9f --- /dev/null +++ b/parm/parm_gdas/atmanl_inc_vars.yaml @@ -0,0 +1 @@ +incvars: ['ua', 'va', 't', 'sphum', 'liq_wat', 'ice_wat', 'o3mr'] diff --git a/scripts/exgdas_global_atmos_analysis_post.py b/scripts/exgdas_global_atmos_analysis_post.py deleted file mode 100755 index 2f17ee4aea..0000000000 --- a/scripts/exgdas_global_atmos_analysis_post.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python3 -################################################################################ -# UNIX Script Documentation Block -# . . -# Script name: exgdas_global_atmos_analysis_post.py -# Script description: Post atmospheric analysis script. -# -# Author: Cory Martin Org: NCEP/EMC Date: 2021-12-29 -# -# Abstract: This script runs after the atmospheric analysis and -# archives each diagnostic file into the R2D2 local user database. -# -# $Id$ -# -# Attributes: -# Language: Python3 -# -################################################################################ - -# import os and sys to add ush to path -import logging -import os -import sys - -# set up logger -logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') - -# get absolute path of ush/ directory either from env or relative to this file -my_dir = os.path.dirname(__file__) -my_home = os.path.dirname(os.path.dirname(my_dir)) -gdas_home = os.path.join(os.environ['HOMEgfs'], 'sorc', 'gdas.cd') -sys.path.append(os.path.join(os.getenv('HOMEgfs', my_home), 'ush')) -logging.info(f"sys.path={sys.path}") - -# import UFSDA utilities -import ufsda - -# get configuration based on environment variables -config = ufsda.misc_utils.get_env_config(component='atm') -config['DIAG_DIR'] = os.path.join(os.environ['COMOUT'], 'diags') -config['BIAS_OUT_DIR'] = os.path.join(os.environ['COMOUT'], 'bc') -config['provider'] = 'ncdiag' - -# use R2D2 to archive diags and bias correction coefficient files -ufsda.archive.atm_diags(config) diff --git a/scripts/exgdas_global_atmos_analysis_prep.py b/scripts/exgdas_global_atmos_analysis_prep.py deleted file mode 100755 index 65c77cd903..0000000000 --- a/scripts/exgdas_global_atmos_analysis_prep.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python3 -################################################################################ -# UNIX Script Documentation Block -# . . -# Script name: exgdas_global_atmos_analysis_prep.py -# Script description: Stages files and generates YAML for Global Atmosphere Analysis -# -# Author: Cory Martin Org: NCEP/EMC Date: 2021-12-21 -# -# Abstract: This script stages necessary input files and produces YAML -# configuration input file for FV3-JEDI executable(s) needed -# to produce a UFS Global Atmospheric Analysis. -# -# $Id$ -# -# Attributes: -# Language: Python3 -# -################################################################################ - -# import os and sys to add ush to path -import logging -import os -import sys - -# set up logger -logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') - -# get absolute path of ush/ directory either from env or relative to this file -my_dir = os.path.dirname(__file__) -my_home = os.path.dirname(os.path.dirname(my_dir)) -gdas_home = os.path.join(os.environ['HOMEgfs'], 'sorc', 'gdas.cd') -sys.path.append(os.path.join(os.getenv('HOMEgfs', my_home), 'ush')) -logging.info(f"sys.path={sys.path}") - -# import UFSDA utilities -import ufsda -from ufsda.stage import atm_obs, bias_obs - -# get configuration based on environment variables -config = ufsda.misc_utils.get_env_config(component='atm') - -# use R2D2 to stage obs and bias correction coefficient files -ufsda.stage.atm_obs(config) -ufsda.stage.bias_obs(config) diff --git a/scripts/exgdas_global_atmos_analysis_run.sh b/scripts/exgdas_global_atmos_analysis_run.sh deleted file mode 100755 index 45ccdf525b..0000000000 --- a/scripts/exgdas_global_atmos_analysis_run.sh +++ /dev/null @@ -1,182 +0,0 @@ -#!/bin/bash -################################################################################ -#### UNIX Script Documentation Block -# . . -# Script name: exgdas_global_atmos_analysis_run.sh -# Script description: Runs the global atmospheric analysis with FV3-JEDI -# -# Author: Cory Martin Org: NCEP/EMC Date: 2021-12-28 -# -# Abstract: This script makes a global model atmospheric analysis using FV3-JEDI -# and also (for now) updates increment files using a python ush utility -# -# $Id$ -# -# Attributes: -# Language: POSIX shell -# Machine: Orion -# -################################################################################ - -# Set environment. -source "$HOMEgfs/ush/preamble.sh" - -# Directories -pwd=$(pwd) - -# Utilities -export NLN=${NLN:-"/bin/ln -sf"} -export INCPY=${INCPY:-"$HOMEgfs/sorc/gdas.cd/ush/jediinc2fv3.py"} -export GENYAML=${GENYAML:-"$HOMEgfs/sorc/gdas.cd/ush/genYAML"} -export GETOBSYAML=${GETOBSYAML:-"$HOMEgfs/sorc/gdas.cd/ush/get_obs_list.py"} - -################################################################################ -# make subdirectories -mkdir -p $DATA/fv3jedi -mkdir -p $DATA/obs -mkdir -p $DATA/diags -mkdir -p $DATA/bc -mkdir -p $DATA/anl - -################################################################################ -# generate YAML file -cat > $DATA/temp.yaml << EOF -template: ${ATMVARYAML} -output: $DATA/fv3jedi_var.yaml -config: - atm: true - BERROR_YAML: $BERROR_YAML - OBS_DIR: obs - DIAG_DIR: diags - CRTM_COEFF_DIR: crtm - BIAS_IN_DIR: obs - BIAS_OUT_DIR: bc - OBS_PREFIX: $OPREFIX - BIAS_PREFIX: $GPREFIX - OBS_LIST: $OBS_LIST - OBS_YAML_DIR: $OBS_YAML_DIR - BKG_DIR: bkg - fv3jedi_staticb_dir: berror - fv3jedi_fix_dir: fv3jedi - fv3jedi_fieldset_dir: fv3jedi - fv3jedi_fieldmetadata_dir: fv3jedi - OBS_DATE: '$CDATE' - BIAS_DATE: '$GDATE' - ANL_DIR: anl/ - NMEM_ENKF: '$NMEM_ENKF' - INTERP_METHOD: '$INTERP_METHOD' -EOF -$GENYAML --config $DATA/temp.yaml - -################################################################################ -# link observations to $DATA -$GETOBSYAML --config $DATA/fv3jedi_var.yaml --output $DATA/${OPREFIX}obsspace_list -files=$(cat $DATA/${OPREFIX}obsspace_list) -for file in $files; do - basefile=$(basename $file) - $NLN $COMOUT/$basefile $DATA/obs/$basefile -done - -# link backgrounds to $DATA -# linking FMS RESTART files for now -# change to (or make optional) for cube sphere history later -$NLN ${COMIN_GES}/RESTART $DATA/bkg - - -# optionally link ensemble backgrounds to $DATA -if [ $DOHYBVAR = "YES" ]; then - mkdir -p $DATA/ens - fhrs="06" - if [ $l4densvar = ".true." ]; then - fhrs="03 04 05 06 07 08 09" - fi - - for imem in $(seq 1 $NMEM_ENKF); do - memchar="mem"$(printf %03i $imem) - for fhr in $fhrs; do - $NLN ${COMIN_GES_ENS}/$memchar/atmos/RESTART $DATA/ens/$memchar - done - done - -fi - -################################################################################ -# link fix files to $DATA -# static B -CASE_BERROR=${CASE_BERROR:-${CASE_ANL:-${CASE}}} -if [[ (${STATICB_TYPE} = "bump") || (${STATICB_TYPE} = "gsibec") ]] ; then - ${NLN} "${FV3JEDI_FIX}/${STATICB_TYPE}/${CASE_BERROR}/" "${DATA}/berror" -fi - -# vertical coordinate -LAYERS=$(expr $LEVS - 1) -$NLN $FV3JEDI_FIX/fv3jedi/fv3files/akbk${LAYERS}.nc4 $DATA/fv3jedi/akbk.nc4 - -# other FV3-JEDI fix files -$NLN $FV3JEDI_FIX/fv3jedi/fv3files/fmsmpp.nml $DATA/fv3jedi/fmsmpp.nml -$NLN $FV3JEDI_FIX/fv3jedi/fv3files/field_table_gfdl $DATA/fv3jedi/field_table - -# fieldmetadata -$NLN $FV3JEDI_FIX/fv3jedi/fieldmetadata/gfs-restart.yaml $DATA/fv3jedi/gfs-restart.yaml - -# fieldsets -fieldsets="dynamics.yaml ufo.yaml" -for fieldset in $fieldsets; do - $NLN $FV3JEDI_FIX/fv3jedi/fieldsets/$fieldset $DATA/fv3jedi/$fieldset -done - -# CRTM coeffs -${NLN} "${FV3JEDI_FIX}/crtm/2.3.0" "${DATA}/crtm" - -# Link executable to $DATA -$NLN $JEDIVAREXE $DATA/fv3jedi_var.x - -################################################################################ -# run executable -export OMP_NUM_THREADS=$NTHREADS_ATMANAL -export pgm=$JEDIVAREXE -. prep_step -$APRUN_ATMANAL $DATA/fv3jedi_var.x $DATA/fv3jedi_var.yaml 1>&1 2>&2 -export err=$?; err_chk - -################################################################################ -# translate FV3-JEDI increment to FV3 readable format -if [[ "${CASE_BERROR}" = "${CASE}" ]]; then - atmges_fv3=${COMIN_GES}/${GPREFIX}atmf006.nc -else - atmges_fv3=${COMIN_GES}/${GPREFIX}atmf006.ensres.nc -fi -atminc_jedi=${DATA}/anl/atminc.${PDY}_${cyc}0000z.nc4 -atminc_fv3=${COMOUT}/${CDUMP}.${cycle}.atminc.nc -if [ -s $atminc_jedi ]; then - $INCPY $atmges_fv3 $atminc_jedi $atminc_fv3 - export err=$? -else - echo "***WARNING*** missing $atminc_jedi ABORT" - export err=99 -fi -err_chk - -################################################################################ -# Create log file noting creating of analysis increment file -echo "$CDUMP $CDATE atminc and tiled sfcanl done at $(date)" > $COMOUT/${CDUMP}.${cycle}.loginc.txt - -################################################################################ -# Copy diags and YAML to $COMOUT -cp -r $DATA/fv3jedi_var.yaml $COMOUT/${CDUMP}.${cycle}.fv3jedi_var.yaml -cp -rf $DATA/diags $COMOUT/ -cp -rf $DATA/bc $COMOUT/ - -# ***WARNING*** PATCH -# Copy abias, abias_pc, and abias_air from previous cycle to current cycle -# Deterministic abias used in enkf cycle -alist="abias abias_air abias_int abias_pc" -for abias in $alist; do - cp "${COMIN_GES}/${GPREFIX}${abias}" "${COMOUT}/${APREFIX}${abias}" -done - -################################################################################ - -exit ${err} - -################################################################################ diff --git a/scripts/exglobal_atm_analysis_finalize.py b/scripts/exglobal_atm_analysis_finalize.py new file mode 100755 index 0000000000..cd6938e210 --- /dev/null +++ b/scripts/exglobal_atm_analysis_finalize.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +# exgdas_global_atm_analysis_finalize.py +# This script creates an AtmAnalysis class +# and runs the finalize method +# which perform post-processing and clean up activities +# for a global atm variational analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.atm_analysis import AtmAnalysis + + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atm analysis task + AtmAnl = AtmAnalysis(config) + AtmAnl.finalize() diff --git a/scripts/exglobal_atm_analysis_initialize.py b/scripts/exglobal_atm_analysis_initialize.py new file mode 100755 index 0000000000..b003d98c00 --- /dev/null +++ b/scripts/exglobal_atm_analysis_initialize.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +# exgdas_global_atm_analysis_initialize.py +# This script creates an AtmAnalysis class +# and runs the initialize method +# which create and stage the runtime directory +# and create the YAML configuration +# for a global atm variational analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.atm_analysis import AtmAnalysis + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atm analysis task + AtmAnl = AtmAnalysis(config) + AtmAnl.initialize() diff --git a/scripts/exglobal_atm_analysis_run.py b/scripts/exglobal_atm_analysis_run.py new file mode 100755 index 0000000000..e1f44208c9 --- /dev/null +++ b/scripts/exglobal_atm_analysis_run.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 +# exgdas_global_atm_analysis_run.py +# This script creates an AtmAnalysis object +# and runs the execute method +# which executes the global atm variational analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.atm_analysis import AtmAnalysis + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atm analysis task + AtmAnl = AtmAnalysis(config) + AtmAnl.execute() diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 379d19387b..2872fdab42 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -187,6 +187,7 @@ fi if [[ -d "${script_dir}/gdas.cd" ]]; then cd "${top_dir}/ush" || exit 1 ${LINK} "${script_dir}/gdas.cd/ush/ufsda" . + ${LINK} "${script_dir}/gdas.cd/ush/jediinc2fv3.py" . fi diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py new file mode 100644 index 0000000000..a632e318d9 --- /dev/null +++ b/ush/python/pygfs/task/atm_analysis.py @@ -0,0 +1,445 @@ +#!/usr/bin/env python3 + +import os +import glob +import gzip +import tarfile +from logging import getLogger +from typing import Dict, List, Any + +from pygw.attrdict import AttrDict +from pygw.file_utils import FileHandler +from pygw.timetools import add_to_datetime, to_fv3time, to_timedelta, to_YMDH +from pygw.fsutils import rm_p, chdir +from pygw.yaml_file import parse_yamltmpl, parse_j2yaml, save_as_yaml +from pygw.logger import logit +from pygw.executable import Executable +from pygw.exceptions import WorkflowException +from pygfs.task.analysis import Analysis + +logger = getLogger(__name__.split('.')[-1]) + + +class AtmAnalysis(Analysis): + """ + Class for global atm analysis tasks + """ + @logit(logger, name="AtmAnalysis") + def __init__(self, config): + super().__init__(config) + + _res = int(self.config.CASE[1:]) + _res_anl = int(self.config.CASE_ANL[1:]) + _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config.assim_freq}H") / 2) + _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmvar.yaml") + + # Create a local dictionary that is repeatedly used across this class + local_dict = AttrDict( + { + 'npx_ges': _res + 1, + 'npy_ges': _res + 1, + 'npz_ges': self.config.LEVS - 1, + 'npz': self.config.LEVS - 1, + 'npx_anl': _res_anl + 1, + 'npy_anl': _res_anl + 1, + 'npz_anl': self.config.LEVS - 1, + 'ATM_WINDOW_BEGIN': _window_begin, + 'ATM_WINDOW_LENGTH': f"PT{self.config.assim_freq}H", + 'comin_ges_atm': self.config.COMIN_GES, + 'OPREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN + 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN + 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", + 'fv3jedi_yaml': _fv3jedi_yaml, + } + ) + + # task_config is everything that this task should need + self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) + + @logit(logger) + def initialize(self: Analysis) -> None: + """Initialize a global atm analysis + + This method will initialize a global atm analysis using JEDI. + This includes: + - staging CRTM fix files + - staging FV3-JEDI fix files + - staging B error files + - staging model backgrounds + - generating a YAML file for the JEDI executable + - linking the JEDI executable (TODO make it copyable, requires JEDI fix) + - creating output directories + """ + super().initialize() + + # stage CRTM fix files + crtm_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'parm_gdas', 'atm_crtm_coeff.yaml') + logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}") + crtm_fix_list = parse_yamltmpl(crtm_fix_list_path, self.task_config) + FileHandler(crtm_fix_list).sync() + + # stage fix files + jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'parm_gdas', 'atm_jedi_fix.yaml') + logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}") + jedi_fix_list = parse_yamltmpl(jedi_fix_list_path, self.task_config) + FileHandler(jedi_fix_list).sync() + + # stage berror files + # copy static background error files, otherwise it will assume ID matrix + logger.debug(f"Stage files for STATICB_TYPE {self.task_config.STATICB_TYPE}") + FileHandler(self.get_berror_dict(self.task_config)).sync() + + # stage backgrounds + FileHandler(self.get_bkg_dict(AttrDict(self.task_config))).sync() + + # generate variational YAML file + logger.debug(f"Generate variational YAML file: {self.task_config.fv3jedi_yaml}") + varda_yaml = parse_j2yaml(self.task_config.ATMVARYAML, self.task_config) + save_as_yaml(varda_yaml, self.task_config.fv3jedi_yaml) + logger.info(f"Wrote variational YAML to: {self.task_config.fv3jedi_yaml}") + + # link executable to DATA/ directory + exe_src = self.task_config.JEDIVAREXE + logger.debug(f"Link executable {exe_src} to DATA/") # TODO: linking is not permitted per EE2. Needs work in JEDI to be able to copy the exec. + exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + + # need output dir for diags and anl + logger.debug("Create empty output [anl, diags] directories to receive output from executable") + newdirs = [ + os.path.join(self.task_config.DATA, 'anl'), + os.path.join(self.task_config.DATA, 'diags'), + ] + FileHandler({'mkdir': newdirs}).sync() + + @logit(logger) + def execute(self: Analysis) -> None: + + chdir(self.task_config.DATA) + + exec_cmd = Executable(self.task_config.APRUN_ATMANL) + exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_var.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg(self.task_config.fv3jedi_yaml) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + + pass + + @logit(logger) + def finalize(self: Analysis) -> None: + """Finalize a global atm analysis + + This method will finalize a global atm analysis using JEDI. + This includes: + - tar output diag files and place in ROTDIR + - copy the generated YAML file from initialize to the ROTDIR + - copy the updated bias correction files to ROTDIR + - write UFS model readable atm incrment file + + """ + # ---- tar up diags + # path of output tar statfile + atmstat = os.path.join(self.task_config.COMOUTatmos, f"{self.task_config.APREFIX}atmstat") + + # get list of diag files to put in tarball + diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc4')) + + logger.info(f"Compressing {len(diags)} diag files to {atmstat}.gz") + + # gzip the files first + logger.debug(f"Gzipping {len(diags)} diag files") + for diagfile in diags: + with open(diagfile, 'rb') as f_in, gzip.open(f"{diagfile}.gz", 'wb') as f_out: + f_out.writelines(f_in) + + # open tar file for writing + logger.debug(f"Creating tar file {atmstat} with {len(diags)} gzipped diag files") + with tarfile.open(atmstat, "w") as archive: + for diagfile in diags: + diaggzip = f"{diagfile}.gz" + archive.add(diaggzip, arcname=os.path.basename(diaggzip)) + + # copy full YAML from executable to ROTDIR + logger.info(f"Copying {self.task_config.fv3jedi_yaml} to {self.task_config.COMOUTatmos}") + src = os.path.join(self.task_config.DATA, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmvar.yaml") + dest = os.path.join(self.task_config.COMOUTatmos, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmvar.yaml") + logger.debug(f"Copying {src} to {dest}") + yaml_copy = { + 'mkdir': [self.task_config.COMOUTatmos], + 'copy': [[src, dest]] + } + FileHandler(yaml_copy).sync() + + # copy bias correction files to ROTDIR + logger.info("Copy bias correction files from DATA/ to COM/") + biasdir = os.path.join(self.task_config.DATA, 'bc') + biasls = os.listdir(biasdir) + biaslist = [] + for bfile in biasls: + src = os.path.join(biasdir, bfile) + dest = os.path.join(self.task_config.COMOUTatmos, bfile) + biaslist.append([src, dest]) + + gprefix = f"{self.task_config.GPREFIX}" + gsuffix = f"{to_YMDH(self.task_config.previous_cycle)}" + ".txt" + aprefix = f"{self.task_config.APREFIX}" + asuffix = f"{to_YMDH(self.task_config.current_cycle)}" + ".txt" + + logger.info(f"Copying {gprefix}*{gsuffix} from DATA/ to COM/ as {aprefix}*{asuffix}") + obsdir = os.path.join(self.task_config.DATA, 'obs') + obsls = os.listdir(obsdir) + for ofile in obsls: + if ofile.endswith(".txt"): + src = os.path.join(obsdir, ofile) + tfile = ofile.replace(gprefix, aprefix) + tfile = tfile.replace(gsuffix, asuffix) + dest = os.path.join(self.task_config.COMOUTatmos, tfile) + biaslist.append([src, dest]) + + bias_copy = { + 'mkdir': [self.task_config.COMOUTatmos], + 'copy': biaslist, + } + FileHandler(bias_copy).sync() + + # Create UFS model readable atm increment file from UFS-DA atm increment + logger.info("Create UFS model readable atm increment file from UFS-DA atm increment") + self.jedi2fv3inc() + + def clean(self): + super().clean() + + @logit(logger) + def get_bkg_dict(self, task_config: Dict[str, Any]) -> Dict[str, List[str]]: + """Compile a dictionary of model background files to copy + + This method constructs a dictionary of FV3 RESTART files (coupler, core, tracer) + that are needed for global atm DA and returns said dictionary for use by the FileHandler class. + + Parameters + ---------- + task_config: Dict + a dictionary containing all of the configuration needed for the task + + Returns + ---------- + bkg_dict: Dict + a dictionary containing the list of model background files to copy for FileHandler + """ + # NOTE for now this is FV3 RESTART files and just assumed to be fh006 + + # get FV3 RESTART files, this will be a lot simpler when using history files + rst_dir = os.path.join(task_config.comin_ges_atm, 'RESTART') # for now, option later? + run_dir = os.path.join(task_config.DATA, 'bkg') + + # Start accumulating list of background files to copy + bkglist = [] + + # atm DA needs coupler + basename = f'{to_fv3time(task_config.current_cycle)}.coupler.res' + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + # atm DA needs core, srf_wnd, tracer, phy_data, sfc_data + for ftype in ['core', 'srf_wnd', 'tracer']: + template = f'{to_fv3time(self.task_config.current_cycle)}.fv_{ftype}.res.tile{{tilenum}}.nc' + for itile in range(1, task_config.ntiles + 1): + basename = template.format(tilenum=itile) + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + for ftype in ['phy_data', 'sfc_data']: + template = f'{to_fv3time(self.task_config.current_cycle)}.{ftype}.tile{{tilenum}}.nc' + for itile in range(1, task_config.ntiles + 1): + basename = template.format(tilenum=itile) + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + bkg_dict = { + 'mkdir': [run_dir], + 'copy': bkglist, + } + return bkg_dict + + @logit(logger) + def get_berror_dict(self, config: Dict[str, Any]) -> Dict[str, List[str]]: + """Compile a dictionary of background error files to copy + + This method will construct a dictionary of either bump of gsibec background + error files for global atm DA and return said dictionary for use by the + FileHandler class. + + Parameters + ---------- + config: Dict + a dictionary containing all of the configuration needed + + Returns + ---------- + berror_dict: Dict + a dictionary containing the list of atm background error files to copy for FileHandler + """ + SUPPORTED_BERROR_STATIC_MAP = {'identity': self._get_berror_dict_identity, + 'bump': self._get_berror_dict_bump, + 'gsibec': self._get_berror_dict_gsibec} + + try: + berror_dict = SUPPORTED_BERROR_STATIC_MAP[config.STATICB_TYPE](config) + except KeyError: + raise KeyError(f"{config.STATICB_TYPE} is not a supported background error type.\n" + + f"Currently supported background error types are:\n" + + f'{" | ".join(SUPPORTED_BERROR_STATIC_MAP.keys())}') + + return berror_dict + + @staticmethod + @logit(logger) + def _get_berror_dict_identity(config: Dict[str, Any]) -> Dict[str, List[str]]: + """Identity BE does not need any files for staging. + + This is a private method and should not be accessed directly. + + Parameters + ---------- + config: Dict + a dictionary containing all of the configuration needed + Returns + ---------- + berror_dict: Dict + Empty dictionary [identity BE needs not files to stage] + """ + logger.info(f"Identity background error does not use staged files. Return empty dictionary") + return {} + + @staticmethod + @logit(logger) + def _get_berror_dict_bump(config: Dict[str, Any]) -> Dict[str, List[str]]: + """Compile a dictionary of atm bump background error files to copy + + This method will construct a dictionary of atm bump background error + files for global atm DA and return said dictionary to the parent + + This is a private method and should not be accessed directly. + + Parameters + ---------- + config: Dict + a dictionary containing all of the configuration needed + + Returns + ---------- + berror_dict: Dict + a dictionary of atm bump background error files to copy for FileHandler + """ + # BUMP atm static-B needs nicas, cor_rh, cor_rv and stddev files. + b_dir = config.BERROR_DATA_DIR + b_datestr = to_fv3time(config.BERROR_DATE) + berror_list = [] + for ftype in ['cor_rh', 'cor_rv', 'stddev']: + coupler = f'{b_datestr}.{ftype}.coupler.res' + berror_list.append([ + os.path.join(b_dir, coupler), os.path.join(config.DATA, 'berror', coupler) + ]) + + template = '{b_datestr}.{ftype}.fv_tracer.res.tile{{tilenum}}.nc' + for itile in range(1, config.ntiles + 1): + tracer = template.format(tilenum=itile) + berror_list.append([ + os.path.join(b_dir, tracer), os.path.join(config.DATA, 'berror', tracer) + ]) + + nproc = config.ntiles * config.layout_x * config.layout_y + for nn in range(1, nproc + 1): + berror_list.append([ + os.path.join(b_dir, f'nicas_aero_nicas_local_{nproc:06}-{nn:06}.nc'), + os.path.join(config.DATA, 'berror', f'nicas_aero_nicas_local_{nproc:06}-{nn:06}.nc') + ]) + + # create dictionary of background error files to stage + berror_dict = { + 'mkdir': [os.path.join(config.DATA, 'berror')], + 'copy': berror_list, + } + return berror_dict + + @staticmethod + @logit(logger) + def _get_berror_dict_gsibec(config: Dict[str, Any]) -> Dict[str, List[str]]: + """Compile a dictionary of atm gsibec background error files to copy + + This method will construct a dictionary of atm gsibec background error + files for global atm DA and return said dictionary to the parent + + This is a private method and should not be accessed directly. + + Parameters + ---------- + config: Dict + a dictionary containing all of the configuration needed + + Returns + ---------- + berror_dict: Dict + a dictionary of atm gsibec background error files to copy for FileHandler + """ + # GSI atm static-B needs namelist and coefficient files. + b_dir = os.path.join(config.HOMEgfs, 'fix', 'gdas', 'gsibec', config.CASE_ANL) + berror_list = [] + for ftype in ['gfs_gsi_global.nml', 'gsi-coeffs-gfs-global.nc4']: + berror_list.append([ + os.path.join(b_dir, ftype), + os.path.join(config.DATA, 'berror', ftype) + ]) + + # create dictionary of background error files to stage + berror_dict = { + 'mkdir': [os.path.join(config.DATA, 'berror')], + 'copy': berror_list, + } + return berror_dict + + @logit(logger) + def jedi2fv3inc(self: Analysis) -> None: + """Generate UFS model readable analysis increment + + This method writes a UFS DA atm increment in UFS model readable format. + This includes: + - write UFS-DA atm increments using variable names expected by UFS model + - compute and write delp increment + - compute and write hydrostatic delz increment + + Please note that some of these steps are temporary and will be modified + once the modle is able to directly read atm increments. + + """ + # Select the atm guess file based on the analysis and background resolutions + # Fields from the atm guess are used to compute the delp and delz increments + case_anl = int(self.task_config.CASE_ANL[1:]) + case = int(self.task_config.CASE[1:]) + + file = f"{self.task_config.GPREFIX}" + "atmf006" + f"{'' if case_anl == case else '.ensres'}" + ".nc" + atmges_fv3 = os.path.join(self.task_config.comin_ges_atm, file) + + # Set the path/name to the input UFS-DA atm increment file (atminc_jedi) + # and the output UFS model atm increment file (atminc_fv3) + cdate = to_fv3time(self.task_config.current_cycle) + cdate_inc = cdate.replace('.', '_') + atminc_jedi = os.path.join(self.task_config.DATA, 'anl', f'atminc.{cdate_inc}z.nc4') + atminc_fv3 = os.path.join(self.task_config.COMOUTatmos, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atminc.nc") + + # Reference the python script which does the actual work + incpy = os.path.join(self.task_config.HOMEgfs, 'ush/jediinc2fv3.py') + + # Execute incpy to create the UFS model atm increment file + cmd = Executable(incpy) + cmd.add_default_arg(atmges_fv3) + cmd.add_default_arg(atminc_jedi) + cmd.add_default_arg(atminc_fv3) + logger.debug(f"Executing {cmd}") + cmd(output='stdout', error='stderr') diff --git a/workflow/applications.py b/workflow/applications.py index 34746e613e..82ef576ed9 100644 --- a/workflow/applications.py +++ b/workflow/applications.py @@ -107,7 +107,7 @@ def __init__(self, conf: Configuration) -> None: self.do_vrfy = _base.get('DO_VRFY', True) self.do_fit2obs = _base.get('DO_FIT2OBS', True) self.do_metp = _base.get('DO_METP', False) - self.do_jedivar = _base.get('DO_JEDIVAR', False) + self.do_jediatmvar = _base.get('DO_JEDIVAR', False) self.do_jediens = _base.get('DO_JEDIENS', False) self.do_jediocnvar = _base.get('DO_JEDIOCNVAR', False) @@ -177,8 +177,8 @@ def _cycled_configs(self): configs = ['prep'] - if self.do_jedivar: - configs += ['atmanalprep', 'atmanalrun', 'atmanalpost'] + if self.do_jediatmvar: + configs += ['atmanlinit', 'atmanlrun', 'atmanlfinal'] else: configs += ['anal', 'analdiag'] @@ -354,8 +354,8 @@ def _get_cycled_task_names(self): gdas_gfs_common_cleanup_tasks = ['arch'] - if self.do_jedivar: - gdas_gfs_common_tasks_before_fcst += ['atmanalprep', 'atmanalrun', 'atmanalpost'] + if self.do_jediatmvar: + gdas_gfs_common_tasks_before_fcst += ['atmanlinit', 'atmanlrun', 'atmanlfinal'] else: gdas_gfs_common_tasks_before_fcst += ['anal'] @@ -384,7 +384,7 @@ def _get_cycled_task_names(self): # Collect all "gdas" cycle tasks gdas_tasks = gdas_gfs_common_tasks_before_fcst.copy() - if not self.do_jedivar: + if not self.do_jediatmvar: gdas_tasks += ['analdiag'] if self.do_gldas: diff --git a/workflow/rocoto/workflow_tasks.py b/workflow/rocoto/workflow_tasks.py index 370f27f771..2cc50cde86 100644 --- a/workflow/rocoto/workflow_tasks.py +++ b/workflow/rocoto/workflow_tasks.py @@ -12,7 +12,7 @@ class Tasks: SERVICE_TASKS = ['arch', 'earc', 'getic'] VALID_TASKS = ['aerosol_init', 'coupled_ic', 'getic', 'init', 'prep', 'anal', 'sfcanl', 'analcalc', 'analdiag', 'gldas', 'arch', - 'atmanalprep', 'atmanalrun', 'atmanalpost', + 'atmanlinit', 'atmanlrun', 'atmanlfinal', 'ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalpost', 'earc', 'ecen', 'echgres', 'ediag', 'efcs', 'eobs', 'eomg', 'epos', 'esfc', 'eupd', @@ -352,8 +352,8 @@ def anal(self): def sfcanl(self): deps = [] - if self.app_config.do_jedivar: - dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanalrun'} + if self.app_config.do_jediatmvar: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlfinal'} else: dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'} deps.append(rocoto.add_dependency(dep_dict)) @@ -367,8 +367,8 @@ def sfcanl(self): def analcalc(self): deps = [] - if self.app_config.do_jedivar: - dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanalrun'} + if self.app_config.do_jediatmvar: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlfinal'} else: dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'} deps.append(rocoto.add_dependency(dep_dict)) @@ -396,59 +396,45 @@ def analdiag(self): return task - def atmanalprep(self): - - dump_suffix = self._base["DUMP_SUFFIX"] - gfs_cyc = self._base["gfs_cyc"] - dmpdir = self._base["DMPDIR"] - do_gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_cdumps else False - + def atmanlinit(self): deps = [] - dep_dict = {'type': 'metatask', 'name': 'gdaspost', 'offset': '-06:00:00'} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.atmf009.nc' - dep_dict = {'type': 'data', 'data': data, 'offset': '-06:00:00'} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'{dmpdir}/{self.cdump}{dump_suffix}.@Y@m@d/@H/atmos/{self.cdump}.t@Hz.updated.status.tm00.bufr_d' - dep_dict = {'type': 'data', 'data': data} + dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - - cycledef = self.cdump - if self.cdump in ['gfs'] and do_gfs_enkf and gfs_cyc != 4: - cycledef = 'gdas' + if self.app_config.do_hybvar: + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + else: + dependencies = rocoto.create_dependency(dep=deps) - resources = self.get_resource('atmanalprep') - task = create_wf_task('atmanalprep', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef = "gdas" + resources = self.get_resource('atmanlinit') + task = create_wf_task('atmanlinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, cycledef=cycledef) + return task - def atmanalrun(self): + def atmanlrun(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanalprep'} + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlinit'} deps.append(rocoto.add_dependency(dep_dict)) - if self.app_config.do_hybvar: - dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': '-06:00:00'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - else: - dependencies = rocoto.create_dependency(dep=deps) + dependencies = rocoto.create_dependency(dep=deps) - resources = self.get_resource('atmanalrun') - task = create_wf_task('atmanalrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + resources = self.get_resource('atmanlrun') + task = create_wf_task('atmanlrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) return task - def atmanalpost(self): + def atmanlfinal(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanalrun'} + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlrun'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - resources = self.get_resource('atmanalpost') - task = create_wf_task('atmanalpost', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + resources = self.get_resource('atmanlfinal') + task = create_wf_task('atmanlfinal', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) return task