From bc7b0edee2e5463af0bd2a2afb33517aec209db1 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Mon, 26 Aug 2019 12:05:55 -0600 Subject: [PATCH 1/7] Add error checking in J-jobs following calls to ex-scripts. --- jobs/JREGIONAL_GET_EXTRN_FILES | 4 +++- jobs/JREGIONAL_MAKE_IC_LOWBC | 4 +++- jobs/JREGIONAL_MAKE_LBC1_TO_LBCN | 4 +++- jobs/JREGIONAL_MAKE_SFC_CLIMO | 4 +++- jobs/JREGIONAL_RUN_FV3 | 4 +++- 5 files changed, 15 insertions(+), 5 deletions(-) diff --git a/jobs/JREGIONAL_GET_EXTRN_FILES b/jobs/JREGIONAL_GET_EXTRN_FILES index 2cc6f0a58b..6dbc51cb0d 100755 --- a/jobs/JREGIONAL_GET_EXTRN_FILES +++ b/jobs/JREGIONAL_GET_EXTRN_FILES @@ -240,7 +240,9 @@ $SCRIPTSDIR/exregional_get_extrn_files.sh \ EXTRN_MDL_ARCV_FNS="${EXTRN_MDL_ARCV_FNS_str}" \ EXTRN_MDL_ARCV_FPS="${EXTRN_MDL_ARCV_FPS_str}" \ EXTRN_MDL_ARCV_FMT="${EXTRN_MDL_ARCV_FMT}" \ - EXTRN_MDL_ARCVREL_DIR="${EXTRN_MDL_ARCVREL_DIR}" + EXTRN_MDL_ARCVREL_DIR="${EXTRN_MDL_ARCVREL_DIR}" \ + || print_err_msg_exit "${script_name}" "\ +Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_MAKE_IC_LOWBC b/jobs/JREGIONAL_MAKE_IC_LOWBC index 6111e97ec3..376914592f 100755 --- a/jobs/JREGIONAL_MAKE_IC_LOWBC +++ b/jobs/JREGIONAL_MAKE_IC_LOWBC @@ -214,7 +214,9 @@ $SCRIPTSDIR/exregional_make_ic_lowbc.sh \ EXTRN_MDL_CDATE="${EXTRN_MDL_CDATE}" \ WGRIB2_DIR="${WGRIB2_DIR}" \ APRUN="${APRUN}" \ - WORKDIR_ICSLBCS_CDATE="${WORKDIR_ICSLBCS_CDATE}" + WORKDIR_ICSLBCS_CDATE="${WORKDIR_ICSLBCS_CDATE}" \ + || print_err_msg_exit "${script_name}" "\ +Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN b/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN index c5d4dfe742..e477442b73 100755 --- a/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN +++ b/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN @@ -207,7 +207,9 @@ $SCRIPTSDIR/exregional_make_lbc1_to_lbcn.sh \ WGRIB2_DIR="${WGRIB2_DIR}" \ APRUN="${APRUN}" \ WORKDIR_ICSLBCS_CDATE="${WORKDIR_ICSLBCS_CDATE}" \ - EXTRN_MDL_LBC_UPDATE_FHRS="${EXTRN_MDL_LBC_UPDATE_FHRS_str}" + EXTRN_MDL_LBC_UPDATE_FHRS="${EXTRN_MDL_LBC_UPDATE_FHRS_str}" \ + || print_err_msg_exit "${script_name}" "\ +Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_MAKE_SFC_CLIMO b/jobs/JREGIONAL_MAKE_SFC_CLIMO index 4040a93e03..c0b1e1e2bb 100755 --- a/jobs/JREGIONAL_MAKE_SFC_CLIMO +++ b/jobs/JREGIONAL_MAKE_SFC_CLIMO @@ -64,7 +64,9 @@ mkdir_vrfy ${WORKDIR_LOCAL} #----------------------------------------------------------------------- # ${SCRIPTSDIR}/exregional_make_sfc_climo.sh \ - WORKDIR_LOCAL="${WORKDIR_LOCAL}" + WORKDIR_LOCAL="${WORKDIR_LOCAL}" \ + || print_err_msg_exit "${script_name}" "\ +Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_RUN_FV3 b/jobs/JREGIONAL_RUN_FV3 index e1912ab07a..515b00031e 100755 --- a/jobs/JREGIONAL_RUN_FV3 +++ b/jobs/JREGIONAL_RUN_FV3 @@ -290,7 +290,9 @@ fi #----------------------------------------------------------------------- # $SCRIPTSDIR/exregional_run_fv3.sh \ - RUNDIR="${RUNDIR}" + RUNDIR="${RUNDIR}" \ + || print_err_msg_exit "${script_name}" "\ +Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- # From 18cc1175499acb076c976066fb49841ca0ac7a36 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 27 Aug 2019 13:44:53 -0600 Subject: [PATCH 2/7] Add code that defines new EMC variables when RUN_ENVIR is set to "nco". This is to enable the workflow to recreate the EMC directory structure. Not yet complete. --- jobs/JREGIONAL_GET_EXTRN_FILES | 20 ++- jobs/JREGIONAL_RUN_FV3 | 11 +- ush/set_extrn_mdl_params.sh | 315 +++++++++++++++++---------------- ush/setup.sh | 105 +++++++++-- ush/stage_static.sh | 4 +- 5 files changed, 283 insertions(+), 172 deletions(-) diff --git a/jobs/JREGIONAL_GET_EXTRN_FILES b/jobs/JREGIONAL_GET_EXTRN_FILES index 6dbc51cb0d..94e22b947d 100755 --- a/jobs/JREGIONAL_GET_EXTRN_FILES +++ b/jobs/JREGIONAL_GET_EXTRN_FILES @@ -179,12 +179,22 @@ fi # #----------------------------------------------------------------------- # -if [ "$ANL_OR_FCST" = "ANL" ]; then - mkdir_vrfy -p "$EXTRN_MDL_FILES_BASEDIR_ICSSURF" +if [ "${RUN_ENVIR}" = "nco" ]; then + + COMINGFS=COMINgfs=/scratch3/NCEPDEV/hwrf/noscrub/hafs-input/COMGFS + COMINgfs=/scratch3/NCEPDEV/hwrf/noscrub/hafs-input/COMGFS EXTRN_MDL_FILES_DIR="$EXTRN_MDL_FILES_BASEDIR_ICSSURF/$CDATE" -elif [ "$ANL_OR_FCST" = "FCST" ]; then - mkdir_vrfy -p "$EXTRN_MDL_FILES_BASEDIR_LBCS" - EXTRN_MDL_FILES_DIR="$EXTRN_MDL_FILES_BASEDIR_LBCS/$CDATE" + +else + + if [ "$ANL_OR_FCST" = "ANL" ]; then + mkdir_vrfy -p "$EXTRN_MDL_FILES_BASEDIR_ICSSURF" + EXTRN_MDL_FILES_DIR="$EXTRN_MDL_FILES_BASEDIR_ICSSURF/$CDATE" + elif [ "$ANL_OR_FCST" = "FCST" ]; then + mkdir_vrfy -p "$EXTRN_MDL_FILES_BASEDIR_LBCS" + EXTRN_MDL_FILES_DIR="$EXTRN_MDL_FILES_BASEDIR_LBCS/$CDATE" + fi + fi # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_FV3 b/jobs/JREGIONAL_RUN_FV3 index 515b00031e..2fe87b993e 100755 --- a/jobs/JREGIONAL_RUN_FV3 +++ b/jobs/JREGIONAL_RUN_FV3 @@ -52,10 +52,17 @@ the specified cycle. # #----------------------------------------------------------------------- # -RUNDIR="$EXPTDIR/$CDATE" +if [ "${RUN_ENVIR}" = "nco" ]; then + DOMAIN=${predef_domain} + YYMMDDHH=... + DATA="$STMP/tmpnwprd/regional_forecast_tm00_${DOMAIN}_${YYMMDDHH}" + RUNDIR=$DATA +else + RUNDIR="$EXPTDIR/$CDATE" +fi + check_for_preexist_dir $RUNDIR ${preexisting_dir_method} mkdir_vrfy $RUNDIR - mkdir_vrfy $RUNDIR/INPUT mkdir_vrfy $RUNDIR/RESTART # diff --git a/ush/set_extrn_mdl_params.sh b/ush/set_extrn_mdl_params.sh index 5edb9cf541..cb327aeffc 100644 --- a/ush/set_extrn_mdl_params.sh +++ b/ush/set_extrn_mdl_params.sh @@ -59,104 +59,108 @@ esac # #----------------------------------------------------------------------- # -case $EXTRN_MDL_NAME_ICSSURF in +if [ "${RUN_ENVIR}" = "nco" ]; then + EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="$COMINgfs" -"GSMGFS") +else - case $MACHINE in - "WCOSS_C") - EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="" - ;; - "THEIA") - EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="" - ;; - "JET") - EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="" - ;; - "ODIN") - EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="" - ;; - *) - print_err_msg_exit "\ + case $EXTRN_MDL_NAME_ICSSURF in + + + "GSMGFS") + case $MACHINE in + "WCOSS_C") + EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="" + ;; + "THEIA") + EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="" + ;; + "JET") + EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="" + ;; + "ODIN") + EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="" + ;; + *) + print_err_msg_exit "\ The system directory in which to look for the files generated by the ex- ternal model specified by EXTRN_MDL_NAME_ICSSURF has not been specified for this machine and external model combination: MACHINE = \"$MACHINE\" EXTRN_MDL_NAME_ICSSURF = \"$EXTRN_MDL_NAME_ICSSURF\" " + ;; + esac ;; - esac - ;; - - -"FV3GFS") - - case $MACHINE in - "WCOSS_C") - EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="/gpfs/hps/nco/ops/com/gfs/prod" - ;; - "THEIA") - EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="/scratch4/NCEPDEV/rstprod/com/gfs/prod" - ;; - "JET") - EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="/lfs3/projects/hpc-wof1/ywang/regional_fv3/gfs" - ;; - "ODIN") - EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="/scratch/ywang/test_runs/FV3_regional/gfs" - ;; - *) - print_err_msg_exit "\ + + + "FV3GFS") + case $MACHINE in + "WCOSS_C") + EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="/gpfs/hps/nco/ops/com/gfs/prod" + ;; + "THEIA") + EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="/scratch4/NCEPDEV/rstprod/com/gfs/prod" + ;; + "JET") + EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="/lfs3/projects/hpc-wof1/ywang/regional_fv3/gfs" + ;; + "ODIN") + EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="/scratch/ywang/test_runs/FV3_regional/gfs" + ;; + *) + print_err_msg_exit "\ The system directory in which to look for the files generated by the ex- ternal model specified by EXTRN_MDL_NAME_ICSSURF has not been specified for this machine and external model combination: MACHINE = \"$MACHINE\" EXTRN_MDL_NAME_ICSSURF = \"$EXTRN_MDL_NAME_ICSSURF\" " + ;; + esac ;; - esac - ;; - - -"RAPX") -# - case $MACHINE in - "THEIA") - EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="/scratch4/BMC/public/data/gsd/rr/full/wrfnat" - ;; - *) - print_err_msg_exit "\ + + + "RAPX") + case $MACHINE in + "THEIA") + EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="/scratch4/BMC/public/data/gsd/rr/full/wrfnat" + ;; + *) + print_err_msg_exit "\ The system directory in which to look for the files generated by the ex- ternal model specified by EXTRN_MDL_NAME_ICSSURF has not been specified for this machine and external model combination: MACHINE = \"$MACHINE\" EXTRN_MDL_NAME_ICSSURF = \"$EXTRN_MDL_NAME_ICSSURF\" " + ;; + esac ;; - esac - ;; - -"HRRRX") -# - case $MACHINE in - "THEIA") - EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="/scratch4/BMC/public/data/gsd/hrrr/conus/wrfnat" - ;; - *) - print_err_msg_exit "\ + "HRRRX") + case $MACHINE in + "THEIA") + EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="/scratch4/BMC/public/data/gsd/hrrr/conus/wrfnat" + ;; + *) + print_err_msg_exit "\ The system directory in which to look for the files generated by the ex- ternal model specified by EXTRN_MDL_NAME_ICSSURF has not been specified for this machine and external model combination: MACHINE = \"$MACHINE\" EXTRN_MDL_NAME_ICSSURF = \"$EXTRN_MDL_NAME_ICSSURF\" " + ;; + esac ;; + + esac - ;; -# -esac + +fi # #----------------------------------------------------------------------- # @@ -208,105 +212,110 @@ esac # #----------------------------------------------------------------------- # -case $EXTRN_MDL_NAME_LBCS in - - -"GSMGFS") - - case $MACHINE in - "WCOSS_C") - EXTRN_MDL_FILES_SYSBASEDIR_LBCS="" - ;; - "THEIA") - EXTRN_MDL_FILES_SYSBASEDIR_LBCS="" - ;; - "JET") - EXTRN_MDL_FILES_SYSBASEDIR_LBCS="" - ;; - "ODIN") - EXTRN_MDL_FILES_SYSBASEDIR_LBCS="" - ;; - *) - print_err_msg_exit "\ -The system directory in which to look for the files generated by the ex- -ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for -this machine and external model combination: - MACHINE = \"$MACHINE\" - EXTRN_MDL_NAME_LBCS = \"$EXTRN_MDL_NAME_LBCS\" -" - ;; - esac - ;; +if [ "${RUN_ENVIR}" = "nco" ]; then + EXTRN_MDL_FILES_SYSBASEDIR_LBCS="$COMINgfs" +else -"FV3GFS") - - case $MACHINE in - "WCOSS_C") - EXTRN_MDL_FILES_SYSBASEDIR_LBCS="/gpfs/hps/nco/ops/com/gfs/prod" - ;; - "THEIA") - EXTRN_MDL_FILES_SYSBASEDIR_LBCS="/scratch4/NCEPDEV/rstprod/com/gfs/prod" - ;; - "JET") - EXTRN_MDL_FILES_SYSBASEDIR_LBCS="/lfs3/projects/hpc-wof1/ywang/regional_fv3/gfs" - ;; - "ODIN") - EXTRN_MDL_FILES_SYSBASEDIR_LBCS="/scratch/ywang/test_runs/FV3_regional/gfs" + case $EXTRN_MDL_NAME_LBCS in + + + "GSMGFS") + case $MACHINE in + "WCOSS_C") + EXTRN_MDL_FILES_SYSBASEDIR_LBCS="" + ;; + "THEIA") + EXTRN_MDL_FILES_SYSBASEDIR_LBCS="" + ;; + "JET") + EXTRN_MDL_FILES_SYSBASEDIR_LBCS="" + ;; + "ODIN") + EXTRN_MDL_FILES_SYSBASEDIR_LBCS="" + ;; + *) + print_err_msg_exit "\ + The system directory in which to look for the files generated by the ex- + ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for + this machine and external model combination: + MACHINE = \"$MACHINE\" + EXTRN_MDL_NAME_LBCS = \"$EXTRN_MDL_NAME_LBCS\" + " + ;; + esac ;; - *) - print_err_msg_exit "\ -The system directory in which to look for the files generated by the ex- -ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for -this machine and external model combination: - MACHINE = \"$MACHINE\" - EXTRN_MDL_NAME_LBCS = \"$EXTRN_MDL_NAME_LBCS\" -" - ;; - esac - ;; - -"RAPX") - - case $MACHINE in - "THEIA") - EXTRN_MDL_FILES_SYSBASEDIR_LBCS="/scratch4/BMC/public/data/gsd/rr/full/wrfnat" - ;; - *) - print_err_msg_exit "\ -The system directory in which to look for the files generated by the ex- -ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for -this machine and external model combination: - MACHINE = \"$MACHINE\" - EXTRN_MDL_NAME_LBCS = \"$EXTRN_MDL_NAME_LBCS\" -" + "FV3GFS") + + case $MACHINE in + "WCOSS_C") + EXTRN_MDL_FILES_SYSBASEDIR_LBCS="/gpfs/hps/nco/ops/com/gfs/prod" + ;; + "THEIA") + EXTRN_MDL_FILES_SYSBASEDIR_LBCS="/scratch4/NCEPDEV/rstprod/com/gfs/prod" + ;; + "JET") + EXTRN_MDL_FILES_SYSBASEDIR_LBCS="/lfs3/projects/hpc-wof1/ywang/regional_fv3/gfs" + ;; + "ODIN") + EXTRN_MDL_FILES_SYSBASEDIR_LBCS="/scratch/ywang/test_runs/FV3_regional/gfs" + ;; + *) + print_err_msg_exit "\ + The system directory in which to look for the files generated by the ex- + ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for + this machine and external model combination: + MACHINE = \"$MACHINE\" + EXTRN_MDL_NAME_LBCS = \"$EXTRN_MDL_NAME_LBCS\" + " + ;; + esac ;; - esac - ;; - -"HRRRX") - - case $MACHINE in - "THEIA") - EXTRN_MDL_FILES_SYSBASEDIR_LBCS="/scratch4/BMC/public/data/gsd/hrrr/conus/wrfnat" + "RAPX") + + case $MACHINE in + "THEIA") + EXTRN_MDL_FILES_SYSBASEDIR_LBCS="/scratch4/BMC/public/data/gsd/rr/full/wrfnat" + ;; + *) + print_err_msg_exit "\ + The system directory in which to look for the files generated by the ex- + ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for + this machine and external model combination: + MACHINE = \"$MACHINE\" + EXTRN_MDL_NAME_LBCS = \"$EXTRN_MDL_NAME_LBCS\" + " + ;; + esac ;; - *) - print_err_msg_exit "\ -The system directory in which to look for the files generated by the ex- -ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for -this machine and external model combination: - MACHINE = \"$MACHINE\" - EXTRN_MDL_NAME_LBCS = \"$EXTRN_MDL_NAME_LBCS\" -" + + + + "HRRRX") + + case $MACHINE in + "THEIA") + EXTRN_MDL_FILES_SYSBASEDIR_LBCS="/scratch4/BMC/public/data/gsd/hrrr/conus/wrfnat" + ;; + *) + print_err_msg_exit "\ + The system directory in which to look for the files generated by the ex- + ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for + this machine and external model combination: + MACHINE = \"$MACHINE\" + EXTRN_MDL_NAME_LBCS = \"$EXTRN_MDL_NAME_LBCS\" + " + ;; + esac ;; + + + esac - ;; - - -esac +fi diff --git a/ush/setup.sh b/ush/setup.sh index 9b017364cf..717644b60a 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -645,6 +645,20 @@ esac # #----------------------------------------------------------------------- # +# +# +#----------------------------------------------------------------------- +# +if [ "${RUN_ENVIR}" = "nco" ]; then + HOMEfv3=${FV3SAR_DIR} + FIXfv3=${HOMEfv3}/fix + FIXsar=${FIXfv3}/fix_sar + FIXam=${FIXfv3}/fix_am + COMINgfs=/scratch3/NCEPDEV/hwrf/noscrub/hafs-input/COMGFS +fi +# +#----------------------------------------------------------------------- +# # The forecast length (in integer hours) cannot contain more than 3 cha- # racters. Thus, its maximum value is 999. Check whether the specified # forecast length exceeds this maximum value. If so, print out a warn- @@ -844,23 +858,94 @@ check_for_preexist_dir $WORKDIR $preexisting_dir_method # Work directory for the preprocessing step that "shaves" the grid and # filtered orography files. # +# WORKDIR_SFC_CLIMO: +# Work directory for the preprocessing step that generates surface files +# from climatology. +# # WORKDIR_ICSLBCS: # Work directory for the preprocessing steps that generate the files # containing the surface fields as well as the initial and lateral # boundary conditions. # -# WORKDIR_SFC_CLIMO: -# Work directory for the preprocessing step that generates surface files -# from climatology. -# #---------------------------------------------------------------------- # -WORKDIR_GRID=$WORKDIR/grid -WORKDIR_OROG=$WORKDIR/orog -WORKDIR_FLTR=$WORKDIR/filtered_topo -WORKDIR_SHVE=$WORKDIR/shave -WORKDIR_ICSLBCS=$WORKDIR/ICs_BCs -WORKDIR_SFC_CLIMO=$WORKDIR/sfc_climo +if [ "${RUN_ENVIR}" = "nco" ]; then + + WORKDIR_GRID="" + WORKDIR_OROG="" + WORKDIR_FLTR="" + WORKDIR_SHVE="$FV3sar" + WORKDIR_SFC_CLIMO="" + WORKDIR_ICSLBCS="" + + if [ "${RUN_TASK_MAKE_GRID_OROG}" = "TRUE" ] || \ + [ "${RUN_TASK_MAKE_GRID_OROG}" = "FALSE" -a \ + "${PREGEN_GRID_OROG_DIR}" != "$FIXsar" ]; then + + msg="\ +When RUN_ENVIR is set to \"nco\", it is assumed that grid and orography +files already exist in the directory specified by FIXsar. Thus, the +grid and orography generation task must not be run (i.e. RUN_TASK_MAKE_- +GRID_OROG must be set to FALSE), and the directory in which to look for +the grid and orography files (i.e. PREGEN_GRID_OROG_DIR) must be set to +FIXsar. Current values for these quantities are: + RUN_TASK_MAKE_GRID_OROG = \"${RUN_TASK_MAKE_GRID_OROG}\" + PREGEN_GRID_OROG_DIR = \"${PREGEN_GRID_OROG_DIR}\" +Resetting RUN_TASK_MAKE_GRID_OROG to \"FALSE\" and PREGEN_GRID_OROG_DIR to +the contents of FIXsar. Reset values are: +" + + RUN_TASK_MAKE_GRID_OROG="FALSE" + PREGEN_GRID_OROG_DIR="$FIXsar" + + msg="$msg"" + RUN_TASK_MAKE_GRID_OROG = \"${RUN_TASK_MAKE_GRID_OROG}\" + PREGEN_GRID_OROG_DIR = \"${PREGEN_GRID_OROG_DIR}\" +" + + print_info_msg "$msg" + + fi + + if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "TRUE" ] || \ + [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" -a \ + "${PREGEN_SFC_CLIMO_DIR}" != "$FIXsar" ]; then + + msg="\ +When RUN_ENVIR is set to \"nco\", it is assumed that grid and orography +files already exist in the directory specified by FIXsar. Thus, the +grid and orography generation task must not be run (i.e. RUN_TASK_MAKE_- +SFC_CLIMO must be set to FALSE), and the directory in which to look for +the grid and orography files (i.e. PREGEN_SFC_CLIMO_DIR) must be set to +FIXsar. Current values for these quantities are: + RUN_TASK_MAKE_SFC_CLIMO = \"${RUN_TASK_MAKE_SFC_CLIMO}\" + PREGEN_SFC_CLIMO_DIR = \"${PREGEN_SFC_CLIMO_DIR}\" +Resetting RUN_TASK_MAKE_SFC_CLIMO to \"FALSE\" and PREGEN_SFC_CLIMO_DIR to +the contents of FIXsar. Reset values are: +" + + RUN_TASK_MAKE_SFC_CLIMO="FALSE" + PREGEN_SFC_CLIMO_DIR="$FIXsar" + + msg="$msg"" + RUN_TASK_MAKE_SFC_CLIMO = \"${RUN_TASK_MAKE_SFC_CLIMO}\" + PREGEN_SFC_CLIMO_DIR = \"${PREGEN_SFC_CLIMO_DIR}\" +" + + print_info_msg "$msg" + + fi + +else + + WORKDIR_GRID=$WORKDIR/grid + WORKDIR_OROG=$WORKDIR/orog + WORKDIR_FLTR=$WORKDIR/filtered_topo + WORKDIR_SHVE=$WORKDIR/shave + WORKDIR_SFC_CLIMO=$WORKDIR/sfc_climo + WORKDIR_ICSLBCS=$WORKDIR/ICs_BCs + +fi # #----------------------------------------------------------------------- # diff --git a/ush/stage_static.sh b/ush/stage_static.sh index 913b09fa37..53fe9dfeb7 100755 --- a/ush/stage_static.sh +++ b/ush/stage_static.sh @@ -408,7 +408,7 @@ for fn in "${file_list[@]}"; do if [ -f "${target_dir}/$fn" ]; then # Should links be made relative or absolute? Maybe relative in community # mode and absolute in nco mode? - if [ "$RUN_ENVIR" = "nco" ]; then + if [ "${RUN_ENVIR}" = "nco" ]; then ln_vrfy -sf ${target_dir}/$fn . else ln_vrfy -sf --relative ${target_dir}/$fn . @@ -465,7 +465,7 @@ for fn in "${file_list[@]}"; do if [ -f "${target_dir}/$fn" ]; then # Should links be made relative or absolute? Maybe relative in community # mode and absolute in nco mode? - if [ "$RUN_ENVIR" = "nco" ]; then + if [ "${RUN_ENVIR}" = "nco" ]; then ln_vrfy -sf ${target_dir}/$fn . else ln_vrfy -sf --relative ${target_dir}/$fn . From 9ed36374a89ab68496e20af3bd4b198d93ee04dd Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 27 Aug 2019 13:51:04 -0600 Subject: [PATCH 3/7] Point to the chgres_cube.exe executable obtained from building the code in the sorc subdirectory (which is obtained from the develop branch of UFS_UTILS via manage_externals). --- scripts/exregional_make_ic_lowbc.sh | 5 +++-- scripts/exregional_make_lbc1_to_lbcn.sh | 3 ++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/scripts/exregional_make_ic_lowbc.sh b/scripts/exregional_make_ic_lowbc.sh index a4e65a9c49..185688e856 100755 --- a/scripts/exregional_make_ic_lowbc.sh +++ b/scripts/exregional_make_ic_lowbc.sh @@ -412,8 +412,9 @@ status." # #----------------------------------------------------------------------- # -#${APRUN} ${EXECDIR}/chgres_cube.exe || print_err_msg_exit "${script_name}" "\ -${APRUN} ${BASEDIR}/UFS_UTILS_chgres_grib2/exec/chgres_cube.exe || print_err_msg_exit "${script_name}" "\ +#${APRUN} ${BASEDIR}/UFS_UTILS_chgres_grib2/exec/chgres_cube.exe || print_err_msg_exit "${script_name}" "\ +${APRUN} ${EXECDIR}/chgres_cube.exe || \ +print_err_msg_exit "${script_name}" "\ Call to executable to generate surface and initial conditions files for the FV3SAR failed: EXTRN_MDL_NAME_ICSSURF = \"${EXTRN_MDL_NAME_ICSSURF}\" diff --git a/scripts/exregional_make_lbc1_to_lbcn.sh b/scripts/exregional_make_lbc1_to_lbcn.sh index 021adb4ea6..b48ed1762c 100755 --- a/scripts/exregional_make_lbc1_to_lbcn.sh +++ b/scripts/exregional_make_lbc1_to_lbcn.sh @@ -382,7 +382,8 @@ with nonzero status." # # Run chgres_cube. # - ${APRUN} ${BASEDIR}/UFS_UTILS_chgres_grib2/exec/chgres_cube.exe || \ +# ${APRUN} ${BASEDIR}/UFS_UTILS_chgres_grib2/exec/chgres_cube.exe || \ + ${APRUN} ${EXECDIR}/chgres_cube.exe || \ print_err_msg_exit "${script_name}" "\ Call to executable to generate lateral boundary conditions file for the the FV3SAR failed: From 8f2863ef959c392f07b848303c77cc992911bf5b Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 27 Aug 2019 13:53:45 -0600 Subject: [PATCH 4/7] Split the ush/run_post.sh script to a J-job and an ex-script and call the J-job instead of the original script in the XML template. --- jobs/JREGIONAL_RUN_POST | 115 ++++++++++++++ .../exregional_run_post.sh | 141 +++++++++--------- ush/templates/FV3SAR_wflow.xml | 2 +- 3 files changed, 186 insertions(+), 72 deletions(-) create mode 100755 jobs/JREGIONAL_RUN_POST rename ush/run_post.sh => scripts/exregional_run_post.sh (79%) diff --git a/jobs/JREGIONAL_RUN_POST b/jobs/JREGIONAL_RUN_POST new file mode 100755 index 0000000000..2a3ebad06f --- /dev/null +++ b/jobs/JREGIONAL_RUN_POST @@ -0,0 +1,115 @@ +#!/bin/bash + +# +#----------------------------------------------------------------------- +# +# This script runs the post-processor (UPP) on the NetCDF output files +# of the write component of the FV3SAR model. +# +#----------------------------------------------------------------------- +# + +# +#----------------------------------------------------------------------- +# +# Source the variable definitions script and the function definitions +# file. +# +#----------------------------------------------------------------------- +# +. $SCRIPT_VAR_DEFNS_FP +. $USHDIR/source_funcs.sh +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# +{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Set the script name and print out an informational message informing +# the user that we've entered this script. +# +#----------------------------------------------------------------------- +# +script_name=$( basename "$0" ) +print_info_msg "\n\ +======================================================================== +Entering script: \"${script_name}\" +This is the J-job script for the task that runs the post-processor (UPP) +on the output files corresponding to a specified forecast hour. +========================================================================" +# +#----------------------------------------------------------------------- +# +# +# +#----------------------------------------------------------------------- +# +RUNDIR="$EXPTDIR/$CDATE" +# +#----------------------------------------------------------------------- +# +# If it doesn't already exist, create the directory (POSTPRD_DIR) in +# which to store post-processing output. (Note that POSTPRD_DIR may al- +# ready have been created by this post-processing script run for a dif- +# ferent forecast hour.) Also, create a temporary work directory (FHR_- +# DIR) for the current forecast hour being processed. FHR_DIR will be +# deleted later after the processing for the current forecast hour is +# complete. Then change location to FHR_DIR. +# +# Note that there may be a preexisting version of FHR_DIR from previous +# runs of this script for the current forecast hour (e.g. from the work- +# flow task that runs this script failing and then being called again). +# Thus, we first make sure preexisting versions are deleted. +# +#----------------------------------------------------------------------- +# +POSTPRD_DIR="$RUNDIR/postprd" +mkdir_vrfy -p "${POSTPRD_DIR}" + +FHR_DIR="${POSTPRD_DIR}/$fhr" +check_for_preexist_dir $FHR_DIR "delete" +mkdir_vrfy -p "${FHR_DIR}" + +cd_vrfy ${FHR_DIR} +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job and pass to it the necessary varia- +# bles. +# +#----------------------------------------------------------------------- +# +$SCRIPTSDIR/exregional_run_post.sh \ + RUNDIR="${RUNDIR}" \ + POSTPRD_DIR="${POSTPRD_DIR}" \ + FHR_DIR="${FHR_DIR}" \ + fhr="${fhr}" \ + || print_err_msg_exit "${script_name}" "\ +Call to ex-script corresponding to J-job \"${script_name}\" failed." +# +#----------------------------------------------------------------------- +# +# Print exit message. +# +#----------------------------------------------------------------------- +# +print_info_msg "\n\ +======================================================================== +Exiting script: \"${script_name}\" +========================================================================" +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# +{ restore_shell_opts; } > /dev/null 2>&1 + diff --git a/ush/run_post.sh b/scripts/exregional_run_post.sh similarity index 79% rename from ush/run_post.sh rename to scripts/exregional_run_post.sh index fcbe7ae5bc..d215010e14 100755 --- a/ush/run_post.sh +++ b/scripts/exregional_run_post.sh @@ -1,41 +1,65 @@ -#!/bin/sh -l +#!/bin/bash # #----------------------------------------------------------------------- # -# This script runs the post-processor (UPP) on the NetCDF output files -# of the write component of the FV3SAR model. +# Source the variable definitions script and the function definitions +# file. # #----------------------------------------------------------------------- # - +. $SCRIPT_VAR_DEFNS_FP +. $USHDIR/source_funcs.sh # #----------------------------------------------------------------------- # -# Source the variable definitions script. +# Save current shell options (in a global array). Then set new options +# for this script/function. # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # -# Source function definition files. +# Set the script name and print out an informational message informing +# the user that we've entered this script. # #----------------------------------------------------------------------- # -. $USHDIR/source_funcs.sh +script_name=$( basename "$0" ) +print_info_msg "\n\ +======================================================================== +Entering script: \"${script_name}\" +This is the ex-script for the task that runs the post-processor (UPP) on +the output files corresponding to a specified forecast hour. +========================================================================" # #----------------------------------------------------------------------- # -# Save current shell options (in a global array). Then set new options -# for this script/function. +# Specify the set of valid argument names for this script/function. +# Then process the arguments provided to this script/function (which +# should consist of a set of name-value pairs of the form arg1="value1", +# etc). # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 - -RUNDIR="$EXPTDIR/$CDATE" +valid_args=( "RUNDIR" "POSTPRD_DIR" "FHR_DIR" "fhr" ) +process_args valid_args "$@" + +# If VERBOSE is set to TRUE, print out what each valid argument has been +# set to. +if [ "$VERBOSE" = "TRUE" ]; then + num_valid_args="${#valid_args[@]}" + print_info_msg "\n\ +The arguments to script/function \"${script_name}\" have been set as +follows: +" + for (( i=0; i<$num_valid_args; i++ )); do + line=$( declare -p "${valid_args[$i]}" ) + printf " $line\n" + done +fi # #----------------------------------------------------------------------- # @@ -46,9 +70,9 @@ RUNDIR="$EXPTDIR/$CDATE" print_info_msg_verbose "Starting post-processing for fhr = $fhr hr..." case $MACHINE in -# + + "WCOSS_C" | "WCOSS" ) -# # { save_shell_opts; set +x; } > /dev/null 2>&1 module purge . $MODULESHOME/init/ksh @@ -73,9 +97,9 @@ case $MACHINE in APRUN="aprun -j 1 -n${ntasks} -N${ptile} -d${threads} -cc depth" ;; -# + + "THEIA") -# { save_shell_opts; set +x; } > /dev/null 2>&1 module purge module load intel @@ -83,13 +107,12 @@ case $MACHINE in module load netcdf module load contrib wrap-mpi { restore_shell_opts; } > /dev/null 2>&1 - np=${SLURM_NTASKS} APRUN="mpirun -np ${np}" ;; -# + + "JET") -# { save_shell_opts; set +x; } > /dev/null 2>&1 module purge . /apps/lmod/lmod/init/sh @@ -126,58 +149,45 @@ case $MACHINE in np=${SLURM_NTASKS} APRUN="mpirun -np ${np}" ;; -# + + "ODIN") -# APRUN="srun -n 1" ;; -# -esac -#----------------------------------------------------------------------- + +esac # -# If it doesn't already exist, create the directory (POSTPRD_DIR) in -# which to store post-processing output. (Note that POSTPRD_DIR may al- -# ready have been created by this post-processing script run for a dif- -# ferent forecast hour.) Also, create a temporary work directory (FHR_- -# DIR) for the current forecast hour being processed. FHR_DIR will be -# deleted later after the processing for the current forecast hour is -# complete. Then change location to FHR_DIR. +#----------------------------------------------------------------------- # -# Note that there may be a preexisting version of FHR_DIR from previous -# runs of this script for the current forecast hour (e.g. from the work- -# flow task that runs this script failing and then being called again). -# Thus, we first make sure preexisting versions are deleted. +# Remove any files from previous runs and stage necessary files in FHR_DIR. # #----------------------------------------------------------------------- - -POSTPRD_DIR="$RUNDIR/postprd" -mkdir_vrfy -p "${POSTPRD_DIR}" - -FHR_DIR="${POSTPRD_DIR}/$fhr" -check_for_preexist_dir $FHR_DIR "delete" -mkdir_vrfy -p "${FHR_DIR}" - -cd_vrfy ${FHR_DIR} - +# +rm_vrfy -f fort.* +cp_vrfy $UPPFIX/nam_micro_lookup.dat ./eta_micro_lookup.dat +cp_vrfy $UPPFIX/postxconfig-NT-fv3sar.txt ./postxconfig-NT.txt +cp_vrfy $UPPFIX/params_grib2_tbl_new ./params_grib2_tbl_new +cp_vrfy ${UPPDIR}/ncep_post . +# #----------------------------------------------------------------------- # # Get the cycle hour. This is just the variable HH set in the setup.sh # script. # #----------------------------------------------------------------------- - +# HH=${CDATE:8:2} cyc=$HH tmmark="tm$HH" - +# #----------------------------------------------------------------------- # # Create a text file (itag) containing arguments to pass to the post- # processing executable. # #----------------------------------------------------------------------- - +# dyn_file=${RUNDIR}/dynf0${fhr}.nc phy_file=${RUNDIR}/phyf0${fhr}.nc @@ -199,37 +209,24 @@ ${phy_file} KPO=47,PO=1000.,975.,950.,925.,900.,875.,850.,825.,800.,775.,750.,725.,700.,675.,650.,625.,600.,575.,550.,525.,500.,475.,450.,425.,400.,375.,350.,325.,300.,275.,250.,225.,200.,175.,150.,125.,100.,70.,50.,30.,20.,10.,7.,5.,3.,2.,1., / EOF - -#----------------------------------------------------------------------- # -# Stage files in FHR_DIR. -# -#----------------------------------------------------------------------- - -rm_vrfy -f fort.* - -cp_vrfy $UPPFIX/nam_micro_lookup.dat ./eta_micro_lookup.dat -cp_vrfy $UPPFIX/postxconfig-NT-fv3sar.txt ./postxconfig-NT.txt -cp_vrfy $UPPFIX/params_grib2_tbl_new ./params_grib2_tbl_new - #----------------------------------------------------------------------- # # Copy the UPP executable to FHR_DIR and run the post-processor. # #----------------------------------------------------------------------- - -cp_vrfy ${UPPDIR}/ncep_post . +# ${APRUN} ./ncep_post < itag || print_err_msg_exit "\ Call to executable to run post for forecast hour $fhr returned with non- zero exit code." - +# #----------------------------------------------------------------------- # # Move (and rename) the output files from the work directory to their # final location (POSTPRD_DIR). Then delete the work directory. # #----------------------------------------------------------------------- - +# # If expt_title is set to an empty string in config.sh, I think TITLE # will also be empty. Must try out that case... if [ -n ${predef_domain} ]; then @@ -245,8 +242,10 @@ mv_vrfy BGRD3D.GrbF${fhr} ${POSTPRD_DIR}/${TITLE}.t${cyc}z.bgrd3d${fhr}.${tmmark START_DATE=`echo "${CDATE}" | sed 's/\([[:digit:]]\{2\}\)$/ \1/'` basetime=`date +%y%j%H%M -d "${START_DATE}"` -ln -s ${POSTPRD_DIR}/${TITLE}.t${cyc}z.bgdawp${fhr}.${tmmark} ${POSTPRD_DIR}/BGDAWP_${basetime}${fhr}00 -ln -s ${POSTPRD_DIR}/${TITLE}.t${cyc}z.bgrd3d${fhr}.${tmmark} ${POSTPRD_DIR}/BGRD3D_${basetime}${fhr}00 +ln_vrfy -fs ${POSTPRD_DIR}/${TITLE}.t${cyc}z.bgdawp${fhr}.${tmmark} \ + ${POSTPRD_DIR}/BGDAWP_${basetime}${fhr}00 +ln_vrfy -fs ${POSTPRD_DIR}/${TITLE}.t${cyc}z.bgrd3d${fhr}.${tmmark} \ + ${POSTPRD_DIR}/BGRD3D_${basetime}${fhr}00 rm_vrfy -rf ${FHR_DIR} # @@ -256,18 +255,18 @@ rm_vrfy -rf ${FHR_DIR} # #----------------------------------------------------------------------- # -print_info_msg "\ - +print_info_msg "\n\ ======================================================================== Post-processing for forecast hour $fhr completed successfully. +Exiting script: \"${script_name}\" ========================================================================" - +# #----------------------------------------------------------------------- # # Restore the shell options saved at the beginning of this script/func- # tion. # #----------------------------------------------------------------------- - +# { restore_shell_opts; } > /dev/null 2>&1 diff --git a/ush/templates/FV3SAR_wflow.xml b/ush/templates/FV3SAR_wflow.xml index 674b68f5ae..548fdba1c5 100644 --- a/ush/templates/FV3SAR_wflow.xml +++ b/ush/templates/FV3SAR_wflow.xml @@ -313,7 +313,7 @@ cle. &RSRC_POST; &RSRV_DEFAULT; - &USHDIR;/run_post.sh + &JOBSDIR;/JREGIONAL_RUN_POST &PROC_POST; run_post_#fhr# &LOG_DIR;/run_post_#fhr#_@Y@m@d@H.log From 60be3e2339f5dfe93e9fb607a1193612ac8b18ea Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 27 Aug 2019 14:04:17 -0600 Subject: [PATCH 5/7] Add code to setup.sh so that if the make_grid_orog task is going to run, the variable containing the path to pre-generated grid and orography files gets set to an empty string. Same for the make_sfc_climo task. --- ush/config_defaults.sh | 1 - ush/setup.sh | 28 +++++++++++++++++++++++++--- 2 files changed, 25 insertions(+), 4 deletions(-) diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index 17d935adf7..e069368e1a 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -729,4 +729,3 @@ PREGEN_GRID_OROG_DIR="/path/to/pregenerated/grid/and/filtered/orog/files" RUN_TASK_MAKE_SFC_CLIMO="TRUE" PREGEN_SFC_CLIMO_DIR="/path/to/pregenerated/surface/climo/files" - diff --git a/ush/setup.sh b/ush/setup.sh index 717644b60a..1e96a77fe8 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -179,6 +179,16 @@ ted grid and orography files does not exist: " fi # +# If RUN_TASK_MAKE_GRID_OROG is set to "TRUE" and the variable specify- +# ing the directory in which to look for pregenerated grid and orography +# files (i.e. PREGEN_GRID_OROG_DIR) is not empty, then for clarity reset +# the latter to an empty string (because it will not be used). +# +if [ "$RUN_TASK_MAKE_GRID_OROG" = "TRUE" ] && \ + [ -n "${PREGEN_GRID_OROG_DIR}" ]; then + PREGEN_GRID_OROG_DIR="" +fi +# #----------------------------------------------------------------------- # # Make sure that RUN_TASK_MAKE_SFC_CLIMO is set to a valid value. @@ -222,6 +232,16 @@ ted surface climatology files does not exist: " fi # +# If RUN_TASK_MAKE_SFC_CLIMO is set to "TRUE" and the variable specify- +# ing the directory in which to look for pregenerated grid and orography +# files (i.e. PREGEN_SFC_CLIMO_DIR) is not empty, then for clarity reset +# the latter to an empty string (because it will not be used). +# +if [ "$RUN_TASK_MAKE_SFC_CLIMO" = "TRUE" ] && \ + [ -n "${PREGEN_SFC_CLIMO_DIR}" ]; then + PREGEN_SFC_CLIMO_DIR="" +fi +# #----------------------------------------------------------------------- # # Convert machine name to upper case if necessary. Then make sure that @@ -1685,7 +1705,7 @@ fi # #----------------------------------------------------------------------- # -EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="$EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF" +EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="${EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF}" # #----------------------------------------------------------------------- # @@ -1696,7 +1716,7 @@ EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF="$EXTRN_MDL_FILES_SYSBASEDIR_ICSSURF" # #----------------------------------------------------------------------- # -EXTRN_MDL_FILES_SYSBASEDIR_LBCS="$EXTRN_MDL_FILES_SYSBASEDIR_LBCS" +EXTRN_MDL_FILES_SYSBASEDIR_LBCS="${EXTRN_MDL_FILES_SYSBASEDIR_LBCS}" # #----------------------------------------------------------------------- # @@ -1705,7 +1725,7 @@ EXTRN_MDL_FILES_SYSBASEDIR_LBCS="$EXTRN_MDL_FILES_SYSBASEDIR_LBCS" # #----------------------------------------------------------------------- # -EXTRN_MDL_LBCS_OFFSET_HRS="$EXTRN_MDL_LBCS_OFFSET_HRS" +EXTRN_MDL_LBCS_OFFSET_HRS="${EXTRN_MDL_LBCS_OFFSET_HRS}" # #----------------------------------------------------------------------- # @@ -1749,3 +1769,5 @@ Setup script completed successfully!!! { restore_shell_opts; } > /dev/null 2>&1 + + From 1323755cae4a4fbdf4338b9159a56483c8c14f02 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 27 Aug 2019 14:07:58 -0600 Subject: [PATCH 6/7] Add bash utility function that counts the number of files in the current directory whose names end with the specified extension. --- ush/count_files.sh | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100755 ush/count_files.sh diff --git a/ush/count_files.sh b/ush/count_files.sh new file mode 100755 index 0000000000..55d269082f --- /dev/null +++ b/ush/count_files.sh @@ -0,0 +1,34 @@ +#!/bin/bash + +# +#----------------------------------------------------------------------- +# +# This function returns the number of files in the current directory +# that end with the specified extension (file_extension). +# +#----------------------------------------------------------------------- +# +. ./source_funcs.sh + +function count_files() { + + if [ "$#" -ne 1 ]; then + print_err_msg_exit "\ +Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. +Usage: + + ${FUNCNAME[0]} file_extension + +where file_extension is the file extension to use for counting files. +The file count returned will be equal to the number of files in the cur- +rent directory that end with \".${file_extension}\"." + fi + + local file_extension="$1" + local glob_pattern="*.${file_extension}" + local num_files=$( ls -1 ${glob_pattern} 2>/dev/null | wc -l ) + print_info_msg "${num_files}" + +} + + From 7e769d519fb404c4e8ce6b3fc983de0eabecd850 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 27 Aug 2019 14:09:43 -0600 Subject: [PATCH 7/7] Add script that tests files having specified extensions between two FV3SAR run directories. This is useful in performing regression tests. --- ush/cmp_expt_to_baseline.sh | 369 ++++++++++++++++++++++++++++++++++++ 1 file changed, 369 insertions(+) create mode 100755 ush/cmp_expt_to_baseline.sh diff --git a/ush/cmp_expt_to_baseline.sh b/ush/cmp_expt_to_baseline.sh new file mode 100755 index 0000000000..88e5f74e00 --- /dev/null +++ b/ush/cmp_expt_to_baseline.sh @@ -0,0 +1,369 @@ +#!/bin/sh -l +#----------------------------------------------------------------------- +# Description: Compare experiment to a baseline. Can be run with one +# or two command line arguments. With one argument, it +# assumes this is your experiment directory and creates a +# directory for the baseline based on your experiment's +# setup (by reading in the var_defns.sh file in your ex- +# periment directory). With two arguments, it takes the +# first one to be your experiment directory and the second +# the baseline directory. +# +# Usage: ./cmp_expt_to_baseline.sh ${expt_dir} [${baseline_dir}] +# +# Assumptions: RUNDIR1 and RUNDIR2 have the same subdirectory structure. +# nccmp is available as module load +# Script has only been tested on theia +#----------------------------------------------------------------------- + +module load nccmp +# +#----------------------------------------------------------------------- +# +# Process arguments. +# +#----------------------------------------------------------------------- +# +script_name=$( basename "$0" ) +if [ $# -ne 1 ] && [ $# -ne 2 ]; then + + printf " +ERROR from script ${script_name}: +Only 1 or 2 arguments may be specified. Usage: + + > ${script_name} expt_dir [baseline_dir] + +where expt_dir is the experiment directory and baseline_dir is an op- +tional baseline directory. +Exiting with nonzero exit code. +" + exit 1 + +fi +# +#----------------------------------------------------------------------- +# +# Set the experiment directory and make sure that it exists. +# +#----------------------------------------------------------------------- +# +expt_dir="$1" +if [ ! -d "${expt_dir}" ]; then + printf "\n +The specified experiment directory (expt_dir) does not exist: + expt_dir = \"$expt_dir\" +Exiting script with nonzero return code. +" + exit 1 +fi +# +#----------------------------------------------------------------------- +# +# Read the variable definitions file in the experiment directory. +# +#----------------------------------------------------------------------- +# +. ${expt_dir}/var_defns.sh +CDATE="${DATE_FIRST_CYCL[0]}${CYCL_HRS[0]}" +# +#----------------------------------------------------------------------- +# +# If two arguments are specified, then take the second one to be the di- +# rectory for the baseline. If only one argument is specified, form a +# baseline directory name from the parameters used in the experiment di- +# rectory. If any other number of arguments is specified, print out an +# error message and exit. +# +#----------------------------------------------------------------------- +# +if [ $# -eq 2 ]; then + + baseline_dir="$2" + +else + + baseline_dir="/scratch3/BMC/det/regional_FV3/regr_baselines" + if [ -n ${predef_domain} ]; then + baseline_dir="${baseline_dir}/${predef_domain}" + else + printf "\ +The experiment must be run on one of the predefined domains. Thus, +predef_domain cannot be empty: + predef_domain = \"${predef_domain}\" +Exiting script with nonzero return code. +" + exit 1 + fi + baseline_dir="${baseline_dir}/${CCPP_phys_suite}phys" + baseline_dir="${baseline_dir}/ICs-${EXTRN_MDL_NAME_ICSSURF}_LBCs-${EXTRN_MDL_NAME_LBCS}" + baseline_dir="${baseline_dir}/$CDATE" + +fi +# +# Make sure that the baseline directory exists. +# +if [ ! -d "${baseline_dir}" ]; then + printf "\n +A baseline directory corresponding to the configuration used in the ex- +periment directory (expt_dir) does not exist: + expt_dir = \"$expt_dir\" + baseline_dir (missing) = \"$baseline_dir\" +Exiting script with nonzero return code." + exit 1 +fi +# +#----------------------------------------------------------------------- +# +# Print out the experiment and baseline directories. +# +#----------------------------------------------------------------------- +# +printf " +The experiment and baseline directories are: + expt_dir = \"$expt_dir\" + baseline_dir = \"$baseline_dir\" +" +# +#----------------------------------------------------------------------- +# +# Set the array containing the names of the subdirectories that will be +# compared. +# +#----------------------------------------------------------------------- +# +subdirs=( "grid" \ + "orog" \ + "filtered_topo" \ + "shave" \ + "sfc_climo" \ + "${EXTRN_MDL_NAME_ICSSURF}/ICSSURF/$CDATE" \ + "${EXTRN_MDL_NAME_LBCS}/LBCS/$CDATE" \ + "ICs_BCs/$CDATE" \ + "INPUT" \ + "." ) +# +#----------------------------------------------------------------------- +# +# Set the array that defines the file extensions to compare in each sub- +# directory. +# +#----------------------------------------------------------------------- +# +declare -a file_extensions=( "nc" "nemsio" "grb" ) +#declare -a file_extensions=( "nc" "grb" ) +#declare -a file_extensions=( "nc" ) +# +#----------------------------------------------------------------------- +# +# Initialize file counts to 0. These are defined as follows: +# +# nfiles_total: +# The number of files in the experiment directory that we attempted to +# compare to the corresponding file in the baseline directory. +# +# nfiles_missing: +# The number of files (out of nfiles_total) that are missing from the +# baseline directory. +# +# nfiles_different: +# The number of files that exist in both the experiment and baseline di- +# rectories and are different. +# +#----------------------------------------------------------------------- +# +nfiles_total=0 +nfiles_missing=0 +nfiles_different=0 +# +#----------------------------------------------------------------------- +# +# Loop over the specified subdirectories. For each subdirectory, com- +# pare files having the specified extensions for the experiment and the +# baseline. +# +#----------------------------------------------------------------------- +# +for subdir in "${subdirs[@]}"; do + + msg="Comparing files in subdirectory \"$subdir\" ..." + msglen=${#msg} + printf "\n%s\n" "$msg" + printf "%0.s=" $(seq 1 $msglen) + printf "\n" + + for file_ext in "${file_extensions[@]}"; do + + msg="Comparing files with extension \"${file_ext}\" ..." + msglen=${#msg} + printf "\n%s\n" " $msg" + printf " " + printf "%0.s~" $(seq 1 $msglen) + printf "\n" + +# cmp_files_btwn_dirs "$expt_dir/$subdir" "${baseline_dir}/$subdir" "${ext}" || { \ +# printf " +#Call to file comparison function failed. Exiting with nonzero exit code. +#"; +# exit 1; } +# +#----------------------------------------------------------------------- +# +# +#----------------------------------------------------------------------- +# + if [ "$file_ext" = "nemsio" ] || [ "$file_ext" = "grb" ]; then + compare_tool="cmp" + elif [ "$file_ext" = "nc" ]; then + compare_tool="nccmp -d" + else + printf "\ +The file comparison tool to use for this file extension has not been +specified: + file_ext = \"${file_ext}\" +Please specify the compare tool and rerun. +Exiting script with nonzero exit code. +" + fi +# +#----------------------------------------------------------------------- +# +# +# +#----------------------------------------------------------------------- +# + cd ${expt_dir}/$subdir + num_files=$( ls -1 *.${file_ext} 2>/dev/null | wc -l ) + printf " + Number of files with extension \"${file_ext}\" in subdirectory \"$subdir\" + of the experiment directory is: ${num_files} +" + + if [ "${num_files}" -eq "0" ]; then + printf "\ + Skipping comparison of files with extension \"${file_ext}\" in this subdirectory. +" + else + + fn_len_max=0 + for fn in *.${file_ext}; do + fn_len=${#fn} + if [ ${fn_len} -gt ${fn_len_max} ]; then + fn_len_max=${fn_len} + fi + done + compare_msg_pre=" Comparing file " + msg_len_max=$(( fn_len_max + ${#compare_msg_pre} )) + + for fn in *.${file_ext}; do + + nfiles_total=$(( $nfiles_total + 1 )) + + fn1="$fn" + fn2="${baseline_dir}/$subdir/$fn" + if [ ! -e "$fn2" ]; then # Check if file exists in baseline directory. + + printf " + File specified by fn exists in subdirectory \"$subdir\" of the + experiment directory but not in that of the the baseline directory: + fn = \"$fn\" + subdir = \"$subdir\" + Incrementing missing file count and moving to next file or sub- + directory.\n" + nfiles_missing=$(( nfiles_missing + 1 )) + + else + + msg="${compare_msg_pre}\"$fn\"" + msg_len="${#msg}" + num_dots=$(( msg_len_max - msg_len + 7 )) + dots_str=$( printf "%0.s." $(seq 1 ${num_dots} ) ) + msg="${msg} ${dots_str}" + + printf "$msg" + eval_output=$( eval ${compare_tool} $fn1 $fn2 2>&1 ) + + if [ $? -eq 0 ]; then + printf " Files are identical.\n" + else + printf " FILES ARE DIFFERENT!!!\n" + printf "\ + Error message from \"${compare_tool}\" command is: +${eval_output} +" + nfiles_different=$(( $nfiles_different + 1 )) + fi + + fi + + done # Loop over files of the same extension. + + fi # Number of files > 0 + + done # Loop over file extensions. + +done # Loop over subdirectories. +# +#----------------------------------------------------------------------- +# +# Print out final results. +# +#----------------------------------------------------------------------- +# +msg="Summary of regression test:" +msglen=${#msg} +msg="$msg ${result_str}" +printf "\n%s\n" "$msg" +printf "%0.s=" $(seq 1 $msglen) +printf "\n" + +file_extensions_str=$(printf "\"%s\" " "${file_extensions[@]}"); +file_extensions_str="( ${file_extensions_str})" + +printf " + expt_dir = \"$expt_dir\" + baseline_dir = \"$baseline_dir\" + + file_extensions = ${file_extensions_str} + nfiles_total = ${nfiles_total} + nfiles_missing = ${nfiles_missing} + nfiles_different = ${nfiles_different} + +where + + file_extensions: + Array containing the file extensions considered when comparing files. + Only files ending with one of these extensions are compared. + + nfiles_total: + The number of files in the experiment directory that we attempted to + compare to the corresponding file in the baseline directory. + + nfiles_missing: + The number of files (out of nfiles_total) that are missing from the + baseline directory. + + nfiles_different: + The number of files that exist in both the experiment and baseline di- + rectories and are different. + +" + +if [ ${nfiles_missing} -eq 0 ] && [ ${nfiles_different} -eq 0 ]; then + result_str="PASS :)" + exit_code=0 +else + + exit_code=1 + if [ ${nfiles_missing} -ne 0 ] && [ ${nfiles_different} -eq 0 ]; then + result_str="FAIL (due to missing files)" + elif [ ${nfiles_missing} -eq 0 ] && [ ${nfiles_different} -ne 0 ]; then + result_str="FAIL (due to differing files)" + elif [ ${nfiles_missing} -ne 0 ] && [ ${nfiles_different} -ne 0 ]; then + result_str="FAIL (due to missing and differing files)" + fi + +fi + +printf "Final result of regression test: ${result_str}\n" +exit ${exit_code} +