diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 64a45b1808..6e53aa0c72 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -13,7 +13,9 @@ pipeline { // choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'cheyenne', 'gaea', 'hera', 'jet', 'orion', 'pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1'], description: 'Specify the platform(s) to use') // Use the line below to enable the PW AWS cluster // choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'cheyenne', 'gaea', 'hera', 'jet', 'orion', 'pclusternoaav2use1'], description: 'Specify the platform(s) to use') - choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'cheyenne', 'gaea', 'hera', 'jet', 'orion'], description: 'Specify the platform(s) to use') + // Use the line below to enable hera + // choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'cheyenne', 'gaea', 'hera', 'jet', 'orion'], description: 'Specify the platform(s) to use') + choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'cheyenne', 'gaea', 'jet', 'orion'], description: 'Specify the platform(s) to use') // Allow job runner to filter based on compiler choice(name: 'SRW_COMPILER_FILTER', choices: ['all', 'gnu', 'intel'], description: 'Specify the compiler(s) to use to build') // Uncomment the following line to re-enable comprehensive tests @@ -76,7 +78,9 @@ pipeline { axes { axis { name 'SRW_PLATFORM' - values 'cheyenne', 'gaea', 'hera', 'jet', 'orion' //, 'pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1' + // Uncomment line below to re-add use of Hera + // values 'cheyenne', 'gaea', 'hera', 'jet', 'orion' //, 'pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1' + values 'cheyenne', 'gaea', 'jet', 'orion' //, 'pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1' } axis { @@ -132,7 +136,7 @@ pipeline { post { success { sh 'cd "${WORKSPACE}/${INSTALL_NAME}" && tar --create --gzip --verbose --file "${WORKSPACE}/${BUILD_NAME}.tgz" *' - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'woc-epic-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: true, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.BUILD_NAME}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'woc-epic-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: true, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "build_${env.SRW_COMPILER}/srw_build-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.log", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: true, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.BUILD_NAME}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: true, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "build_${env.SRW_COMPILER}/srw_build-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } } @@ -173,10 +177,10 @@ pipeline { post { always { // Archive the test log files - sh 'cd "${SRW_WE2E_EXPERIMENT_BASE_DIR}" && tar --create --gzip --verbose --dereference --file "${WORKSPACE}/we2e_test_logs-${SRW_PLATFORM}-${SRW_COMPILER}.tgz" */log.generate_FV3LAM_wflow */log.launch_FV3LAM_wflow */log/*' + sh 'cd "${SRW_WE2E_EXPERIMENT_BASE_DIR}" && tar --create --gzip --verbose --dereference --file "${WORKSPACE}/we2e_test_logs-${SRW_PLATFORM}-${SRW_COMPILER}.tgz" */log.generate_FV3LAM_wflow */log/* ${WORKSPACE}/tests/WE2E/WE2E_tests_*yaml ${WORKSPACE}/tests/WE2E/WE2E_summary*txt ${WORKSPACE}/tests/WE2E/log.*' // Remove the data sets from the experiments directory to conserve disk space sh 'find "${SRW_WE2E_EXPERIMENT_BASE_DIR}" -regextype posix-extended -regex "^.*(orog|[0-9]{10})$" -type d | xargs rm -rf' - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'woc-epic-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: 'we2e_test_results-*-*.txt', storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'woc-epic-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: 'we2e_test_logs-*-*.tgz', storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: 'we2e_test_results-*-*.txt', storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: 'we2e_test_logs-*-*.tgz', storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } } diff --git a/.cicd/scripts/srw_build.sh b/.cicd/scripts/srw_build.sh index 10327366f0..ca81576963 100755 --- a/.cicd/scripts/srw_build.sh +++ b/.cicd/scripts/srw_build.sh @@ -35,6 +35,6 @@ cd - # Create combined log file for upload to s3 build_dir="${workspace}/build_${SRW_COMPILER}" cat ${build_dir}/log.cmake ${build_dir}/log.make \ - >${build_dir}/srw_build-${platform}-${SRW_COMPILER}.log + >${build_dir}/srw_build-${platform}-${SRW_COMPILER}.txt exit $build_exit diff --git a/.cicd/scripts/srw_test.sh b/.cicd/scripts/srw_test.sh index 4c4cbc95af..8c6ef42528 100755 --- a/.cicd/scripts/srw_test.sh +++ b/.cicd/scripts/srw_test.sh @@ -38,46 +38,17 @@ else fi cd ${we2e_test_dir} -./setup_WE2E_tests.sh ${platform} ${SRW_PROJECT} ${SRW_COMPILER} ${test_type} \ - expt_basedir=${we2e_experiment_base_dir} \ - opsroot=${nco_dir} - -# Allow the tests to start before checking for status. -# TODO: Create a parameter that sets the initial start delay. -sleep 300 - # Progress file progress_file="${workspace}/we2e_test_results-${platform}-${SRW_COMPILER}.txt" - -# Wait for all tests to complete. -while true; do - - # Check status of all experiments - ./get_expts_status.sh expts_basedir="${we2e_experiment_base_dir}" \ - verbose="FALSE" | tee ${progress_file} - - # Exit loop only if there are not tests in progress - set +e - grep -q "Workflow status: IN PROGRESS" ${progress_file} - exit_code=$? - set -e - - if [[ $exit_code -ne 0 ]]; then - break - fi - - # TODO: Create a paremeter that sets the poll frequency. - sleep 60 -done - -# Allow we2e cron jobs time to complete and clean up themselves -# TODO: Create parameter that sets the interval for the we2e cron jobs; this -# value should be some factor of that interval to ensure the cron jobs execute -# before the workspace is cleaned up. -sleep 600 +./setup_WE2E_tests.sh ${platform} ${SRW_PROJECT} ${SRW_COMPILER} ${test_type} \ + --expt_basedir=${we2e_experiment_base_dir} \ + --opsroot=${nco_dir} | tee ${progress_file} # Set exit code to number of failures set +e -failures=$(grep "Workflow status: FAILURE" ${progress_file} | wc -l) +failures=$(grep " DEAD " ${progress_file} | wc -l) +if [[ $failures -ne 0 ]]; then + failures=1 +fi set -e exit ${failures} diff --git a/.gitignore b/.gitignore index dafefc0695..bc3eee8545 100644 --- a/.gitignore +++ b/.gitignore @@ -5,13 +5,16 @@ fix/ include/ lib/ share/ +modulefiles/extrn_comp_build/ sorc/*/ -tests/WE2E/WE2E_test_info.csv +tests/WE2E/WE2E_tests_*.yaml tests/WE2E/*.txt tests/WE2E/*.log +tests/WE2E/log.* ush/__pycache__/ ush/config.yaml ush/python_utils/__pycache__/ ush/*.swp *.swp +__pycache__ diff --git a/Externals.cfg b/Externals.cfg index 926839020b..541bff335e 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -12,7 +12,7 @@ protocol = git repo_url = https://github.com/ufs-community/ufs-weather-model # Specify either a branch name or a hash but not both. #branch = develop -hash = 52072c5 +hash = 36d6e16 local_path = sorc/ufs-weather-model required = True @@ -21,7 +21,7 @@ protocol = git repo_url = https://github.com/NOAA-EMC/UPP # Specify either a branch name or a hash but not both. #branch = develop -hash = a644aaa +hash = 22cfb88 local_path = sorc/UPP required = True @@ -44,5 +44,23 @@ hash = 8d21f25 local_path = sorc/rrfs_utl required = True +[arl_nexus] +protocol = git +repo_url = https://github.com/noaa-oar-arl/NEXUS +# Specify either a branch name or a hash but not both. +#branch = develop +hash = 3842818 +local_path = sorc/arl_nexus +required = True + +[aqm-utils] +protocol = git +repo_url = https://github.com/NOAA-EMC/AQM-utils +# Specify either a branch name or a hash but not both. +#branch = develop +hash = 0a86f73 +local_path = sorc/AQM-utils +required = True + [externals_description] schema_version = 1.0.0 diff --git a/README.md b/README.md index 6956aa7a40..d4268e5e80 100644 --- a/README.md +++ b/README.md @@ -1,15 +1,18 @@ # UFS Short-Range Weather Application -The Unified Forecast System (UFS) is a community-based, coupled, comprehensive Earth modeling system. It is designed to be the source system for NOAA’s operational numerical weather prediction applications while enabling research, development, and contribution opportunities for the broader weather enterprise. For more information about the UFS, visit the UFS Portal at https://ufscommunity.org/. +The Unified Forecast System (UFS) is a community-based, coupled, comprehensive Earth modeling system. It is designed to be the source system for NOAA’s operational numerical weather prediction applications while enabling research, development, and contribution opportunities for the broader Weather Enterprise. For more information about the UFS, visit the UFS Portal at https://ufscommunity.org/. -The UFS includes multiple applications (see a complete list at https://ufscommunity.org/science/aboutapps/) that support different forecast durations and spatial domains. This documentation describes the development branch of the UFS Short-Range Weather (SRW) Application, which targets predictions of atmospheric behavior on a limited spatial domain and on time scales from minutes to several days. The development branch of the application is continually evolving as the system undergoes open development. The latest SRW App release (v2.0.0) represents a snapshot of this continuously evolving system. +The UFS includes multiple applications (see a complete list at https://ufscommunity.org/science/aboutapps/) that support different forecast durations and spatial domains. This documentation describes the development branch of the UFS Short-Range Weather (SRW) Application, which targets predictions of atmospheric behavior on a limited spatial domain and on time scales from minutes to several days. The development branch of the application is continually evolving as the system undergoes open development. The latest SRW App release (v2.1.0) represents a snapshot of this continuously evolving system. -The UFS SRW App User's Guide associated with the development branch is at: https://ufs-srweather-app.readthedocs.io/en/develop/, while the guide specific to the SRW App v2.0.0 release can be found at: https://ufs-srweather-app.readthedocs.io/en/release-public-v2/. The repository is at: https://github.com/ufs-community/ufs-srweather-app. +The UFS SRW App User's Guide associated with the development branch is at: https://ufs-srweather-app.readthedocs.io/en/develop/, while the guide specific to the SRW App v2.1.0 release can be found at: https://ufs-srweather-app.readthedocs.io/en/release-public-v2.1.0/. The repository is at: https://github.com/ufs-community/ufs-srweather-app. For instructions on how to clone the repository, build the code, and run the workflow, see: https://github.com/ufs-community/ufs-srweather-app/wiki/Getting-Started -UFS Development Team. (2022, June 23). Unified Forecast System (UFS) Short-Range Weather (SRW) Application (Version v2.0.0). Zenodo. https://doi.org/10.5281/zenodo.6505854 +For a debugging guide for users and developers in the field of Earth System Modeling, please see: +https://epic.noaa.gov/wp-content/uploads/2022/12/Debugging-Guide.pdf + +UFS Development Team. (2022, Nov. 17). Unified Forecast System (UFS) Short-Range Weather (SRW) Application (Version v2.1.0). Zenodo. https://doi.org/10.5281/zenodo.7277602 [![Python unittests](https://github.com/ufs-community/ufs-srweather-app/actions/workflows/python_unittests.yaml/badge.svg)](https://github.com/ufs-community/ufs-srweather-app/actions/workflows/python_unittests.yaml) [![Python functional tests](https://github.com/ufs-community/ufs-srweather-app/actions/workflows/python_func_tests.yaml/badge.svg)](https://github.com/ufs-community/ufs-srweather-app/actions/workflows/python_func_tests.yaml) diff --git a/devbuild.sh b/devbuild.sh index d31ed0eca7..526b1b080a 100755 --- a/devbuild.sh +++ b/devbuild.sh @@ -15,8 +15,8 @@ OPTIONS compiler to use; default depends on platform (e.g. intel | gnu | cray | gccgfortran) -a, --app=APPLICATION - weather model application to build - (e.g. ATM | ATMW | S2S | S2SW) + weather model application to build; for example, ATMAQ for Online-CMAQ + (e.g. ATM | ATMAQ | ATMW | S2S | S2SW) --ccpp="CCPP_SUITE1,CCPP_SUITE2..." CCPP suites (CCPP_SUITES) to include in build; delimited with ',' --enable-options="OPTION1,OPTION2,..." @@ -56,8 +56,7 @@ TARGETS all = builds all apps Or any combinations of (ufs, ufs_utils, upp, gsi, rrfs_utils) -NOTE: This script is for internal developer use only; -See User's Guide for detailed build instructions +NOTE: See User's Guide for detailed build instructions EOF_USAGE } @@ -87,6 +86,8 @@ Settings: BUILD_UPP=${BUILD_UPP} BUILD_GSI=${BUILD_GSI} BUILD_RRFS_UTILS=${BUILD_RRFS_UTILS} + BUILD_NEXUS=${BUILD_NEXUS} + BUILD_AQM_UTILS=${BUILD_AQM_UTILS} EOF_SETTINGS } @@ -123,6 +124,8 @@ BUILD_UFS_UTILS="off" BUILD_UPP="off" BUILD_GSI="off" BUILD_RRFS_UTILS="off" +BUILD_NEXUS="off" +BUILD_AQM_UTILS="off" # Make options CLEAN=false @@ -182,6 +185,8 @@ while :; do upp) DEFAULT_BUILD=false; BUILD_UPP="on" ;; gsi) DEFAULT_BUILD=false; BUILD_GSI="on" ;; rrfs_utils) DEFAULT_BUILD=false; BUILD_RRFS_UTILS="on" ;; + nexus) DEFAULT_BUILD=false; BUILD_NEXUS="on" ;; + aqm_utils) DEFAULT_BUILD=false; BUILD_AQM_UTILS="on" ;; # unknown -?*|?*) usage_error "Unknown option $1" ;; *) break @@ -189,17 +194,11 @@ while :; do shift done -# choose default apps to build -if [ "${DEFAULT_BUILD}" = true ]; then - BUILD_UFS="on" - BUILD_UFS_UTILS="on" - BUILD_UPP="on" -fi - # Ensure uppercase / lowercase ============================================ APPLICATION="${APPLICATION^^}" PLATFORM="${PLATFORM,,}" COMPILER="${COMPILER,,}" +EXTERNALS="${EXTERNALS^^}" # check if PLATFORM is set if [ -z $PLATFORM ] ; then @@ -207,11 +206,30 @@ if [ -z $PLATFORM ] ; then usage exit 0 fi - # set PLATFORM (MACHINE) MACHINE="${PLATFORM}" printf "PLATFORM(MACHINE)=${PLATFORM}\n" >&2 +# choose default apps to build +if [ "${DEFAULT_BUILD}" = true ]; then + BUILD_UFS="on" + BUILD_UFS_UTILS="on" + BUILD_UPP="on" +fi + +# Choose components to build for air quality modeling (Online-CMAQ) +if [ "${APPLICATION}" = "ATMAQ" ]; then + if [ "${DEFAULT_BUILD}" = true ]; then + BUILD_NEXUS="on" + BUILD_AQM_UTILS="on" + fi + if [ "${PLATFORM}" = "wcoss2" ]; then + BUILD_POST_STAT="on" + else + BUILD_POST_STAT="off" + fi +fi + set -eu # automatically determine compiler @@ -221,8 +239,8 @@ if [ -z "${COMPILER}" ] ; then orion) COMPILER=intel ;; wcoss2) COMPILER=intel ;; cheyenne) COMPILER=intel ;; - macos,singularity) COMPILER=gnu ;; - odin,noaacloud) COMPILER=intel ;; + macos|singularity) COMPILER=gnu ;; + odin|noaacloud) COMPILER=intel ;; *) COMPILER=intel printf "WARNING: Setting default COMPILER=intel for new platform ${PLATFORM}\n" >&2; @@ -304,7 +322,9 @@ CMAKE_SETTINGS="\ -DBUILD_UFS_UTILS=${BUILD_UFS_UTILS}\ -DBUILD_UPP=${BUILD_UPP}\ -DBUILD_GSI=${BUILD_GSI}\ - -DBUILD_RRFS_UTILS=${BUILD_RRFS_UTILS}" + -DBUILD_RRFS_UTILS=${BUILD_RRFS_UTILS}\ + -DBUILD_NEXUS=${BUILD_NEXUS}\ + -DBUILD_AQM_UTILS=${BUILD_AQM_UTILS}" if [ ! -z "${APPLICATION}" ]; then CMAKE_SETTINGS="${CMAKE_SETTINGS} -DAPP=${APPLICATION}" @@ -318,6 +338,29 @@ fi if [ ! -z "${DISABLE_OPTIONS}" ]; then CMAKE_SETTINGS="${CMAKE_SETTINGS} -DDISABLE_OPTIONS=${DISABLE_OPTIONS}" fi +if [ "${APPLICATION}" = "ATMAQ" ]; then + CMAKE_SETTINGS="${CMAKE_SETTINGS} -DCPL_AQM=ON -DBUILD_POST_STAT=${BUILD_POST_STAT}" + + # Copy module files to designated directory + EXTRN_BUILD_MOD_DIR="${SRW_DIR}/modulefiles/extrn_comp_build" + mkdir -p ${EXTRN_BUILD_MOD_DIR} + if [ "${BUILD_UFS}" = "on" ]; then + cp "${SRW_DIR}/sorc/ufs-weather-model/modulefiles/ufs_${PLATFORM}.${COMPILER}.lua" "${EXTRN_BUILD_MOD_DIR}/mod_ufs-weather-model.lua" + cp "${SRW_DIR}/sorc/ufs-weather-model/modulefiles/ufs_common.lua" ${EXTRN_BUILD_MOD_DIR} + fi + if [ "${BUILD_UFS_UTILS}" = "on" ]; then + cp "${SRW_DIR}/sorc/UFS_UTILS/modulefiles/build.${PLATFORM}.${COMPILER}.lua" "${EXTRN_BUILD_MOD_DIR}/mod_ufs-utils.lua" + fi + if [ "${BUILD_UPP}" = "on" ]; then + cp "${SRW_DIR}/sorc/UPP/modulefiles/${PLATFORM}.lua" "${EXTRN_BUILD_MOD_DIR}/mod_upp.lua" + fi + if [ "${BUILD_NEXUS}" = "on" ]; then + cp "${SRW_DIR}/sorc/AQM-utils/parm/nexus_modulefiles/${PLATFORM}.${COMPILER}.lua" "${EXTRN_BUILD_MOD_DIR}/mod_nexus.lua" + fi + if [ "${BUILD_AQM_UTILS}" = "on" ]; then + cp "${SRW_DIR}/sorc/AQM-utils/modulefiles/build_${PLATFORM}.${COMPILER}.lua" "${EXTRN_BUILD_MOD_DIR}/mod_aqm-utils.lua" + fi +fi # make settings MAKE_SETTINGS="-j ${BUILD_JOBS}" @@ -393,6 +436,16 @@ if [ $USE_SUB_MODULES = true ]; then printf "... Loading RRFS_UTILS modules ...\n" load_module "" fi + if [ $BUILD_NEXUS = "on" ]; then + printf "... Loading NEXUS modules ...\n" + module use ${SRW_DIR}/sorc/arl_nexus/modulefiles + load_module "" + fi + if [ $BUILD_AQM_UTILS = "on" ]; then + printf "... Loading AQM-utils modules ...\n" + module use ${SRW_DIR}/sorc/AQM-utils/modulefiles + load_module "" + fi else module use ${SRW_DIR}/modulefiles module load ${MODULE_FILE} diff --git a/docs/UsersGuide/requirements.txt b/docs/UsersGuide/requirements.txt index 9c7258463b..7be32f526d 100644 --- a/docs/UsersGuide/requirements.txt +++ b/docs/UsersGuide/requirements.txt @@ -1,2 +1,3 @@ sphinxcontrib-bibtex sphinx_rtd_theme +docutils==0.16 \ No newline at end of file diff --git a/docs/UsersGuide/source/AQM.rst b/docs/UsersGuide/source/AQM.rst new file mode 100644 index 0000000000..a0b52ca3fe --- /dev/null +++ b/docs/UsersGuide/source/AQM.rst @@ -0,0 +1,172 @@ +.. _AQM: + +===================================== +Air Quality Modeling (Online-CMAQ) +===================================== + +The standard SRW App distribution uses the uncoupled version of the UFS Weather Model (atmosphere-only). However, users have the option to use a coupled version of the SRW App that includes the standard distribution (atmospheric model) plus the Air Quality Model (AQM). + +The AQM is a UFS Application that dynamically couples the Community Multiscale Air Quality (:term:`CMAQ`) model with the UFS Weather Model (WM) through the :term:`NUOPC` Layer to simulate temporal and spatial variations of atmospheric compositions (e.g., ozone and aerosol compositions). The CMAQ model, treated as a column chemistry model, updates concentrations of chemical species (e.g., ozone and aerosol compositions) at each integration time step. The transport terms (e.g., :term:`advection` and diffusion) of all chemical species are handled by the UFS WM as tracers. + +.. note:: + + Although this chapter is the primary documentation resource for running the AQM configuration, users may need to refer to :numref:`Chapter %s ` and :numref:`Chapter %s ` for additional information on building and running the SRW App, respectively. + +.. attention:: + + These instructions should work smoothly on Hera and WCOSS2, but users on other systems may need to make additional adjustments. + +Quick Start Guide (AQM/Online-CMAQ) +===================================== + +Download the Code +------------------- + +Clone the ``develop`` branch of the authoritative SRW App repository: + +.. code-block:: console + + git clone -b develop https://github.com/ufs-community/ufs-srweather-app + cd ufs-srweather-app + +Note that the latest hash of the ``develop`` branch might not be tested with the sample scripts of for AQM. To check out the stable (verified) version for AQM/online-CMAQ, users can check out hash ``#ff6f103``: + +.. code-block:: console + + git checkout ff6f103 + +This hash will then check out the following hashes (as of 03/08/2023) of the external components, which are specified in ``ufs-srweather-app/Externals.cfg``: + +.. _ExternalsAQM: + +.. table:: Externals for Online-CMAQ + + +--------------------+--------------+ + | Component | Hash | + +====================+==============+ + | UFS_UTILS | ca9bed8 | + +--------------------+--------------+ + | ufs-weather-model | e051e0e | + +--------------------+--------------+ + | UPP | 2b2c84a | + +--------------------+--------------+ + | NEXUS | 3842818 | + +--------------------+--------------+ + | AQM-utils | e078c70 | + +--------------------+--------------+ + +Users may replace the hashes above with different ones if they prefer. For example, users can comment out the hash line and uncomment the branch line with a new ``repo_url`` address to use a different branch for development. In the example below, the ``ufs-weather-model`` repository URL has been changed to check out code from a user's personal fork rather than from the authoritative UFS repository. + +.. code-block:: console + + repo_url = https://github.com/chan-hoo/ufs-weather-model + branch = feature/for_example + #hash = ff6f103 + +Checkout Externals +--------------------- + +Along with the components detailed in :numref:`Chapter %s `, the AQM version of the SRW App pulls in the externals listed in :numref:`Table %s `. Users must run the ``checkout_externals`` script to collect (or "check out") the individual components of the SRW App (AQM version) from their respective GitHub repositories. + +.. code-block:: console + + ./manage_externals/checkout_externals + +Build the SRW App with AQM +----------------------------- + +On Hera and WCOSS2, users can build the SRW App AQM binaries with the following command: + +.. code-block:: console + + ./devbuild.sh -p= -a=ATMAQ + +where ```` is ``hera``, or ``wcoss2``. The ``-a`` argument indicates the configuration/version of the application to build. + +Building the SRW App with AQM on other machines, including other `Level 1 `__ platforms, is not currently guaranteed to work, and users may have to make adjustments to the modulefiles for their system. + +Load the ``regional_workflow`` Environment +-------------------------------------------- + +Load the python environment for the workflow: + +.. code-block:: console + + # On WCOSS2 (do not run on other systems): + source ../versions/run.ver.wcoss2 + # On all systems (including WCOSS2): + module use /path/to/ufs-srweather-app/modulefiles + module load wflow_ + +where ```` is ``hera`` or ``wcoss2``. The workflow should load on other platforms listed under the ``MACHINE`` variable in :numref:`Section %s `, but users may need to adjust other elements of the process when running on those platforms. + +If the console outputs a message, the user should run the commands specified in the message. For example, if the output says: + +.. code-block:: console + + Please do the following to activate conda: + > conda activate regional_workflow + +then the user should run ``conda activate regional_workflow``. Otherwise, the user can continue with configuring the workflow. + +.. _AQMConfig: + +Configure and Experiment +--------------------------- + +Users will need to configure their experiment by setting parameters in the ``config.yaml`` file. To start, users can copy a default experiment setting into ``config.yaml``: + +.. code-block:: console + + cd ush + cp config.aqm.community.yaml config.yaml + +Users may prefer to copy the ``config.aqm.nco.realtime.yaml`` for a default "nco" mode experiment instead. + +.. note:: + + Additional sample configuration files can be found in the ``online-cmaq`` branch of Chan-Hoo Jeon's (NOAA/NCEP/EMC) ``ufs-srweather-app`` repository fork on `GitHub `__. + +Users will need to change the ``MACHINE`` and ``ACCOUNT`` variables in ``config.yaml`` to match their system. They may also wish to adjust other experiment settings. For more information on each task and variable, see :numref:`Chapter %s `. + +Users may also wish to change :term:`cron`-related parameters in ``config.yaml``. In the ``config.aqm.community.yaml`` file, which was copied into ``config.yaml``, cron is used for automatic submission and resubmission of the workflow: + +.. code-block:: console + + workflow: + USE_CRON_TO_RELAUNCH: true + CRON_RELAUNCH_INTVL_MNTS: 3 + +This means that cron will submit the launch script every 3 minutes. Users may choose not to submit using cron or to submit at a different frequency. Note that users should create a crontab by running ``crontab -e`` the first time they use cron. + +Generate the Workflow +------------------------ + +Generate the workflow: + +.. code-block:: console + + ./generate_FV3LAM_wflow.py + +Run the Workflow +------------------ + +If ``USE_CRON_TO_RELAUNCH`` is set to true in ``config.yaml`` (see :numref:`Section %s `), the workflow will run automatically. If it was set to false, users must submit the workflow manually from the experiment directory: + +.. code-block:: console + + cd / + ./launch_FV3LAM_wflow.sh + +Repeat the launch command regularly until a SUCCESS or FAILURE message appears on the terminal window. See :numref:`Section %s ` for more on the ```` and ```` variables. + +Users may check experiment status from the experiment directory with either of the following commands: + +.. code-block:: console + + # Check the experiment status (best for cron jobs) + rocotostat -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10 + + # Check the experiment status and relaunch the workflow (for manual jobs) + ./launch_FV3LAM_wflow.sh; tail -n 40 log.launch_FV3LAM_wflow + diff --git a/docs/UsersGuide/source/BuildSRW.rst b/docs/UsersGuide/source/BuildSRW.rst index 85790b482c..e768372233 100644 --- a/docs/UsersGuide/source/BuildSRW.rst +++ b/docs/UsersGuide/source/BuildSRW.rst @@ -30,18 +30,18 @@ To build the SRW App, users will complete the following steps: .. _HPCstackInfo: -Install the HPC-Stack -======================== +Install the Prerequisite Software Stack +========================================== -.. Attention:: - Skip the HPC-Stack installation if working on a `Level 1 system `_ (e.g., Cheyenne, Hera, Orion, NOAA Cloud). +Currently, installation of the prerequisite software stack is supported via HPC-Stack. :term:`HPC-Stack` is a repository that provides a unified, shell script-based system to build the software stack required for `UFS `__ applications such as the SRW App. -**Definition:** :term:`HPC-Stack` is a repository that provides a unified, shell script-based build system to build the software stack required for `UFS `_ applications such as the SRW App. +.. Attention:: + Skip the HPC-Stack installation if working on a `Level 1 system `__ (e.g., Cheyenne, Hera, Orion, NOAA Cloud), and :ref:`continue to the next section `. Background ---------------- -The UFS Weather Model draws on over 50 code libraries to run its applications. These libraries range from libraries developed in-house at NOAA (e.g., NCEPLIBS, FMS) to libraries developed by NOAA's partners (e.g., PIO, ESMF) to truly third party libraries (e.g., NETCDF). Individual installation of these libraries is not practical, so the `HPC-Stack `__ was developed as a central installation system to ensure that the infrastructure environment across multiple platforms is as similar as possible. Installation of the HPC-Stack is required to run the SRW App. +The UFS Weather Model draws on over 50 code libraries to run its applications. These libraries range from libraries developed in-house at NOAA (e.g., NCEPLIBS, FMS) to libraries developed by NOAA's partners (e.g., PIO, ESMF) to truly third party libraries (e.g., netCDF). Individual installation of these libraries is not practical, so the `HPC-Stack `__ was developed as a central installation system to ensure that the infrastructure environment across multiple platforms is as similar as possible. Installation of the HPC-Stack is required to run the SRW App. Instructions ------------------------- @@ -57,6 +57,9 @@ Users working on systems that fall under `Support Levels 2-4 `. +.. attention:: + Although HPC-Stack is the fully-supported option as of the v2.1.0 release, UFS applications are gradually shifting to :term:`spack-stack`, which is a :term:`Spack`-based method for installing UFS prerequisite software libraries. The spack-stack is currently used on NOAA Cloud platforms and in containers, while HPC-Stack is still used on other Level 1 systems and is the software stack validated by the UFS Weather Model as of the v2.1.0 release. Users are encouraged to check out `spack-stack `__ to prepare for the upcoming shift in support from HPC-Stack to spack-stack. + After completing installation, continue to the next section (:numref:`Section %s: Download the UFS SRW Application Code `). .. _DownloadSRWApp: @@ -114,10 +117,10 @@ The cloned repository contains the configuration files and sub-directories shown | | configuration files such as ``model_configure``, | | | ``diag_table``, and ``field_table``. | +--------------------------------+-----------------------------------------------------------+ - | README.md | Getting Started Guide | + | README.md | Contains SRW App introductory information | +--------------------------------+-----------------------------------------------------------+ - | rename_model.sh | Used to rename the model before it becomes NCO | - | | operational. The SRW App is a generic app that is the base| + | rename_model.sh | Used to rename the model before it is transitioned into | + | | operations. The SRW App is a generic app that is the base | | | for models such as :term:`AQM` and :term:`RRFS`. When | | | these models become operational, variables like | | | ``HOMEdir`` and ``PARMdir`` will be renamed to | @@ -160,7 +163,7 @@ Run the executable that pulls in SRW App components from external repositories: .. code-block:: console - cd $SRW + cd ./manage_externals/checkout_externals The script should output dialogue indicating that it is retrieving different code repositories. It may take several minutes to download these repositories. @@ -169,7 +172,7 @@ To see more options for the ``checkout_externals`` script, users can run ``./man * ``-S``: Outputs the status of the repositories managed by ``checkout_externals``. By default only summary information is provided. Use with the ``-v`` (verbose) option to see details. * ``-x [EXCLUDE [EXCLUDE ...]]``: allows users to exclude components when checking out externals. - * ``-o``: By default only the required externals are checked out. This flag will also checkout the optional externals. + * ``-o``: By default only the required externals are checked out. This flag will also check out the optional externals. Generally, users will not need to use the options and can simply run the script, but the options are available for those who are curious. @@ -189,10 +192,10 @@ On Level 1 systems for which a modulefile is provided under the ``modulefiles`` ./devbuild.sh --platform= -where ```` is replaced with the name of the platform the user is working on. Valid values are: ``cheyenne`` | ``gaea`` | ``hera`` | ``jet`` | ``linux`` | ``macos`` | ``noaacloud`` | ``odin`` | ``orion`` | ``singularity`` | ``wcoss2`` +where ```` is replaced with the name of the platform the user is working on. Valid values include: ``cheyenne`` | ``gaea`` | ``hera`` | ``jet`` | ``linux`` | ``macos`` | ``noaacloud`` | ``orion`` .. note:: - Although build modulefiles exist for generic Linux and MacOS machines, users will need to alter these according to the instructions in Sections :numref:`%s ` & :numref:`%s `. Users on these systems may have more success building the SRW App with the :ref:`CMake Approach ` instead. + Although build modulefiles exist for generic Linux and MacOS machines, users will need to alter these according to the instructions in Sections :numref:`%s ` & :numref:`%s `. Users on these systems may have more success building the SRW App with the :ref:`CMake Approach ` instead. If compiler auto-detection fails for some reason, specify it using the ``--compiler`` argument. For example: @@ -375,7 +378,7 @@ Set Up the Build Environment .. attention:: * If users successfully built the executables in :numref:`Table %s `, they should skip to step :numref:`Chapter %s `. - * Users who want to build the SRW App on a generic MacOS should skip to :numref:`Section %s ` and follow the approach there. + * Users who want to build the SRW App on MacOS or generic Linux systems should skip to :numref:`Section %s ` and follow the approach there. If the ``devbuild.sh`` approach failed, users need to set up their environment to run a workflow on their specific platform. First, users should make sure ``Lmod`` is the app used for loading modulefiles. This is the case on most Level 1 systems; however, on systems such as Gaea/Odin, the default modulefile loader is from Cray and must be switched to Lmod. For example, on Gaea, users can run one of the following two commands depending on whether they have a bash or csh shell, respectively: @@ -384,9 +387,11 @@ If the ``devbuild.sh`` approach failed, users need to set up their environment t source etc/lmod-setup.sh gaea source etc/lmod-setup.csh gaea -If users execute one of the above commands on systems that don't need it, it will not cause any problems (it will simply do a ``module purge``). +.. note:: -From here on, ``Lmod`` is ready to load the modulefiles needed by the SRW App. These modulefiles are located in the ``modulefiles`` directory. To load the necessary modulefile for a specific ```` using a given ````, run: + If users execute one of the above commands on systems that don't need it, it will not cause any problems (it will simply do a ``module purge``). + +From here, ``Lmod`` is ready to load the modulefiles needed by the SRW App. These modulefiles are located in the ``modulefiles`` directory. To load the necessary modulefile for a specific ```` using a given ````, run: .. code-block:: console @@ -423,7 +428,7 @@ From the build directory, run the following commands to build the pre-processing cmake .. -DCMAKE_INSTALL_PREFIX=.. -DCMAKE_INSTALL_BINDIR=exec .. make -j 4 >& build.out & -``-DCMAKE_INSTALL_PREFIX`` specifies the location in which the ``exec``, ``include``, ``lib``, and ``share`` directories will be created. These directories will contain various components of the SRW App. Its recommended value ``..`` denotes one directory up from the build directory. In the next line, the ``make`` argument ``-j 4`` indicates that the build will run in parallel with 4 threads. Although users can specify a larger or smaller number of threads (e.g., ``-j 8``, ``-j 2``), it is highly recommended to use at least 4 parallel threads to prevent overly long installation times. +``-DCMAKE_INSTALL_PREFIX`` specifies the location where the ``exec``, ``include``, ``lib``, and ``share`` directories will be created. These directories will contain various components of the SRW App. Its recommended value ``..`` denotes one directory up from the build directory. In the next line, the ``make`` argument ``-j 4`` indicates that the build will run in parallel with 4 threads. Although users can specify a larger or smaller number of threads (e.g., ``-j 8``, ``-j 2``), it is highly recommended to use at least 4 parallel threads to prevent overly long installation times. The build will take a few minutes to complete. When it starts, a random number is printed to the console, and when it is done, a ``[1]+ Done`` message is printed to the console. ``[1]+ Exit`` indicates an error. Output from the build will be in the ``ufs-srweather-app/build/build.out`` file. When the build completes, users should see the forecast model executable ``ufs_model`` and several pre- and post-processing executables in the ``ufs-srweather-app/exec`` directory. These executables are described in :numref:`Table %s `. @@ -431,66 +436,45 @@ The build will take a few minutes to complete. When it starts, a random number i If you see the ``build.out`` file, but there is no ``ufs-srweather-app/exec`` directory, wait a few more minutes for the build to complete. -.. _MacDetails: +.. _MacLinuxDetails: -Additional Details for Building on MacOS ------------------------------------------- +Additional Details for Building on MacOS or Generic Linux +------------------------------------------------------------ .. note:: - Users who are **not** building the SRW App on a MacOS machine may skip to the :numref:`Section %s ` to finish building the SRW App or continue to :numref:`Chapter %s ` to configure and run an experiment. + Users who are **not** building the SRW App on MacOS or generic Linux platforms may skip to :numref:`Section %s ` to finish building the SRW App or continue to :numref:`Chapter %s ` to configure and run an experiment. -The SRW App can be built on MacOS machines, presuming HPC-Stack has already been installed successfully. The following two options have been tested: +The SRW App can be built on MacOS and generic Linux machines after the prerequisite software has been installed on these systems (via :term:`HPC-Stack` or :term:`spack-stack`). The installation for MacOS is architecture-independent and has been tested using both x86_64 and M1 chips (running natively). The following configurations for MacOS have been tested: -* **Option 1:** MacBookAir 2020, M1 chip (arm64, running natively), 4+4 cores, Big Sur 11.6.4, GNU compiler suite v.11.2.0_3 (gcc, gfortran, g++); no MPI pre-installed + #. MacBookPro 2019, 2.4 GHz 8-core Intel Core i9 (x86_64), Monterey Sur 12.1, GNU compiler suite v.11.3.0 (gcc, gfortran, g++); mpich 3.3.2 or openmpi/4.1.2 + #. MacBookAir 2020, M1 chip (arm64, running natively), 4+4 cores, Big Sur 11.6.4, GNU compiler suite v.11.3.0 (gcc, gfortran, g++); mpich 3.3.2 or openmpi/4.1.2 + #. MacBook Pro 2015, 2.8 GHz Quad-Core Intel Core i7 (x86_64), Catalina OS X 10.15.7, GNU compiler suite v.11.2.0_3 (gcc, gfortran, g++); mpich 3.3.2 or openmpi/4.1.2 -* **Option 2:** MacBook Pro 2015, 2.8 GHz Quad-Core Intel Core i7 (x86_64), Catalina OS X 10.15.7, GNU compiler suite v.11.2.0_3 (gcc, gfortran, g++); no MPI pre-installed +Several Linux builds have been tested on systems with x86_64 architectures. -The ``build_macos_gnu`` modulefile initializes the module environment, lists the location of HPC-Stack modules, loads the meta-modules and modules, and sets compilers, additional flags, and environment variables needed for building the SRW App. The modulefile must be modified to include the absolute path to the user's HPC-Stack installation and ``ufs-srweather-app`` directories. In particular, the following section must be modified: +The ``./modulefiles/build__gnu.lua`` modulefile (where ```` is ``macos`` or ``linux``) is written as a Lmod module in the Lua language, and it can be loaded once the Lmod module environment has been initialized (which should have happened even prior to :ref:`installing HPC-Stack `). This module lists the location of the HPC-Stack modules, loads the meta-modules and modules, sets serial and parallel compilers, additional flags, and any environment variables needed for building the SRW App. The modulefile must be modified to include the absolute path to the user's HPC-Stack installation: .. code-block:: console - # This path should point to your HPCstack installation directory - setenv HPCstack "/Users/username/hpc-stack/install" - - # This path should point to your SRW Application directory - setenv SRW "/Users/username/ufs-srweather-app" + - This path should point to your HPCstack installation directory + local HPCstack="/Users/username/hpc-stack/install" -An excerpt of the ``build_macos_gnu`` contents appears below for Option 1. To use Option 2, the user will need to comment out the lines specific to Option 1 (using a double hyphen) and uncomment the lines specific to Option 2 in the ``build_macos_gnu`` modulefile. Additionally, users need to verify that all file paths reflect their system's configuration and that the correct version numbers for software libraries appear in the modulefile. - -.. code-block:: console +Linux users need to configure the ``ufs-srweather-app/etc/lmod-setup.sh`` file for the ``linux`` case and set the ``BASH_ENV`` variable to point to the Lmod initialization script. There is no need to modify this script for the ``macos`` case presuming that Lmod followed a standard installation procedure using the Homebrew package manager for MacOS. - -- Option 1 compiler paths: - setenv("CC", "/opt/homebrew/bin/gcc") - setenv("FC", "/opt/homebrew/bin/gfortran") - setenv("CXX", "/opt/homebrew/bin/g++") - - -- Option 2 compiler paths: - --[[ - setenv("CC", "/usr/local/bin/gcc") - setenv("FC", "/usr/local/bin/gfortran") - setenv("CXX", "/usr/local/bin/g++") - --]] - -Then, users must source the Lmod setup file, just as they would on other systems, and load the modulefiles needed for building and running the SRW App: +Next, users must source the Lmod setup file, just as they would on other systems, and load the modulefiles needed for building and running the SRW App: .. code-block:: console - - source etc/lmod-setup.sh macos + + source etc/lmod-setup.sh module use - module load build_macos_gnu - export LDFLAGS="-L${MPI_ROOT}/lib" - -In a csh/tcsh shell, users would run ``source etc/lmod-setup.csh macos`` in place of the first line in the code block above. - -Additionally, for Option 1 systems, set the variable ``ENABLE_QUAD_PRECISION`` to ``OFF`` in the ``$SRW/src/ufs-weather-model/FV3/atmos_cubed_sphere/CMakeLists.txt`` file. This change is optional if using Option 2 to build the SRW App. To make this change using a streamline editor (`sed`), run: - -.. code-block:: console + module load build__gnu + export LDFLAGS+=" -L${MPI_ROOT}/lib " - sed -i .bak 's/QUAD_PRECISION\" ON)/QUAD_PRECISION\" OFF)/' $SRW/src/ufs-weather-model/FV3/atmos_cubed_sphere/CMakeLists.txt +In a csh/tcsh shell, users would run ``source etc/lmod-setup.csh `` in place of the first line in the code block above. The last line is primarily needed for the MacOS platforms. Proceed to building the executables using the process outlined in :numref:`Step %s `. Run an Experiment ===================== -To configure and run an experiment, users should proceed to :numref:`Chapter %s `. \ No newline at end of file +To configure and run an experiment, users should proceed to :numref:`Chapter %s `. diff --git a/docs/UsersGuide/source/Components.rst b/docs/UsersGuide/source/Components.rst index b187db6680..85b72a5289 100644 --- a/docs/UsersGuide/source/Components.rst +++ b/docs/UsersGuide/source/Components.rst @@ -8,11 +8,11 @@ The SRW Application assembles a variety of components, including: * Pre-processor Utilities & Initial Conditions * UFS Weather Forecast Model -* Unified Post-Processor +* Unified Post Processor * Visualization Examples * Build System and Workflow -These components are documented within this User's Guide and supported through a `community forum `_. +These components are documented within this User's Guide and supported through the `GitHub Discussions `__ forum. .. _Utils: @@ -30,25 +30,23 @@ The SRW Application can be initialized from a range of operational initial condi Forecast Model ============== -The prognostic atmospheric model in the UFS SRW Application is the Finite-Volume Cubed-Sphere -(:term:`FV3`) dynamical core configured with a Limited Area Model (:term:`LAM`) capability :cite:`BlackEtAl2021`. The :term:`dynamical core` is the computational part of a model that solves the equations of fluid motion. A User’s Guide for the UFS :term:`Weather Model` is `here `__. +The prognostic atmospheric model in the UFS SRW Application is the Finite-Volume Cubed-Sphere (:term:`FV3`) dynamical core configured with a Limited Area Model (:term:`LAM`) capability :cite:`BlackEtAl2021`. The :term:`dynamical core` is the computational part of a model that solves the equations of fluid motion. A User's Guide for the UFS Weather Model can be accessed `here `__. Supported model resolutions in this release include 3-, 13-, and 25-km predefined contiguous U.S. (:term:`CONUS`) domains, each with 127 vertical levels. Preliminary tools for users to define their own domain are also available in the release with full, formal support of these tools to be provided in future releases. The Extended Schmidt Gnomonic (ESG) grid is used with the FV3-LAM, which features relatively uniform grid cells across the entirety of the domain. Additional information about the FV3 dynamical core can be found in the `scientific documentation `__, the `technical documentation `__, and on the `NOAA Geophysical Fluid Dynamics Laboratory website `__. -Interoperable atmospheric physics, along with various land surface model options, are supported through the Common Community Physics Package (CCPP), described `here `__. Atmospheric physics are a set of numerical methods describing small-scale processes such as clouds, turbulence, radiation, and their interactions. There will be four physics suites supported for the SRW App v2.0.0 release. The first is the FV3_RRFS_v1beta physics suite, which is being tested for use in the future operational implementation of the Rapid Refresh Forecast System (RRFS) planned for 2023-2024, and the second is an updated version of the physics suite used in the operational Global Forecast System (GFS) v16. Additionally, FV3_WoFS_v0 and FV3_HRRR will be supported. A scientific description of the CCPP parameterizations and suites can be found in the `CCPP Scientific Documentation `__, and CCPP technical aspects are described in the `CCPP Technical Documentation `__. The model namelist has many settings beyond the physics options that can optimize various aspects of the model for use with each of the supported suites. Additional information on Stochastic Physics options is available `here `__. +Interoperable atmospheric physics, along with various land surface model options, are supported through the Common Community Physics Package (CCPP), described `here `__. Atmospheric physics are a set of numerical methods describing small-scale processes such as clouds, turbulence, radiation, and their interactions. There are four physics suites supported as of the SRW App v2.1.0 release. The first is the FV3_RRFS_v1beta physics suite, which is being tested for use in the future operational implementation of the Rapid Refresh Forecast System (:term:`RRFS`) planned for 2023-2024, and the second is an updated version of the physics suite used in the operational Global Forecast System (GFS) v16. Additionally, FV3_WoFS_v0 and FV3_HRRR are supported. A detailed list of CCPP updates since the SRW App v2.0.0 release is available :ref:`here `. A full scientific description of CCPP parameterizations and suites can be found in the `CCPP Scientific Documentation `__, and CCPP technical aspects are described in the `CCPP Technical Documentation `__. The model namelist has many settings beyond the physics options that can optimize various aspects of the model for use with each of the supported suites. Additional information on Stochastic Physics options is available `here `__. .. note:: SPP is currently only available for specific physics schemes used in the RAP/HRRR physics suite. Users need to be aware of which physics suite definition file (:term:`SDF`) is chosen when turning this option on. Among the supported physics suites, the full set of parameterizations can only be used with the ``FV3_HRRR`` option for ``CCPP_PHYS_SUITE``. The SRW App supports the use of both :term:`GRIB2` and :term:`NEMSIO` input data. The UFS Weather Model ingests initial and lateral boundary condition files produced by :term:`chgres_cube` and outputs files in netCDF format on a specific projection (e.g., Lambert Conformal) in the horizontal direction and model levels in the vertical direction. -Post-processor +Post Processor ============== The SRW Application is distributed with the Unified Post Processor (:term:`UPP`) included in the workflow as a way to convert the netCDF output on the native model grid to :term:`GRIB2` format on standard isobaric vertical coordinates. The UPP can also be used to compute a variety of useful diagnostic fields, as described in the `UPP User's Guide `__. -Output from UPP can be used with visualization, plotting, and verification packages or in -further downstream post-processing (e.g., statistical post-processing techniques). +Output from UPP can be used with visualization, plotting, and verification packages or in further downstream post-processing (e.g., statistical post-processing techniques). .. _MetplusComponent: @@ -57,39 +55,32 @@ METplus Verification Suite The enhanced Model Evaluation Tools (`METplus `__) verification system has been integrated into the SRW App to facilitate forecast evaluation. METplus is a verification framework that spans a wide range of temporal scales (warn-on-forecast to climate) and spatial scales (storm to global). It is supported by the `Developmental Testbed Center (DTC) `__. -METplus *installation* is not included as part of the build process for the most recent release of the SRW App. However, METplus is preinstalled on many `Level 1 & 2 `__ systems; existing builds can be viewed `here `__. METplus can be installed on other systems individually or as part of :term:`HPC-Stack` installation. Users on systems without a previous installation of METplus can follow the `MET Installation Guide `__ and `METplus Installation Guide `__ for individual installation. Currently, METplus *installation* is not a supported feature for this release of the SRW App. However, METplus *use* is supported on systems with a functioning METplus installation. +METplus *installation* is not included as part of the build process for the most recent release of the SRW App. However, METplus is preinstalled on many `Level 1 & 2 `__ systems; existing builds can be viewed `here `__. -The core components of the METplus framework include the statistical driver, MET, the associated database and display systems known as METviewer and METexpress, and a suite of Python wrappers to provide low-level automation and examples, also called use-cases. MET is a set of verification tools developed for use by the :term:`NWP` community. It matches up grids with either gridded analyses or point observations and applies configurable methods to compute statistics and diagnostics. Extensive documentation is available in the `METplus User’s Guide `__ and `MET User’s Guide `__. Documentation for all other components of the framework can be found at the Documentation link for each component on the METplus `downloads `__ page. +METplus can be installed on other systems individually or as part of :term:`HPC-Stack` installation. Users on systems without a previous installation of METplus can follow the `MET Installation Guide `__ and `METplus Installation Guide `__ for individual installation. Currently, METplus *installation* is not a supported feature for this release of the SRW App. However, METplus *use* is supported on systems with a functioning METplus installation. -Among other techniques, MET provides the capability to compute standard verification scores for comparing deterministic gridded model data to point-based and gridded observations. It also provides ensemble and probabilistic verification methods for comparing gridded model data to point-based or gridded observations. Verification tasks to accomplish these comparisons are defined in the SRW App in :numref:`Table %s `. Currently, the SRW App supports the use of :term:`NDAS` observation files in `prepBUFR format `__ (which include conventional point-based surface and upper-air data) for point-based verification. It also supports gridded Climatology-Calibrated Precipitation Analysis (:term:`CCPA`) data for accumulated precipitation evaluation and Multi-Radar/Multi-Sensor (:term:`MRMS`) gridded analysis data for composite reflectivity and :term:`echo top` verification. +The core components of the METplus framework include the statistical driver, MET, the associated database and display systems known as METviewer and METexpress, and a suite of Python wrappers to provide low-level automation and examples, also called use cases. MET is a set of verification tools developed for use by the :term:`NWP` community. It matches up grids with either gridded analyses or point observations and applies configurable methods to compute statistics and diagnostics. Extensive documentation is available in the `METplus User's Guide `__ and `MET User's Guide `__. Documentation for all other components of the framework can be found at the Documentation link for each component on the METplus `downloads `__ page. -METplus is being actively developed by :term:`NCAR`/Research Applications Laboratory (RAL), NOAA/Earth Systems Research Laboratories (ESRL), and NOAA/Environmental Modeling Center (:term:`EMC`), and it is open to community contributions. - - -Visualization Example -===================== -A Python script is provided to create basic visualizations of the model output. The script -is designed to output graphics in PNG format for 14 standard meteorological variables -when using the pre-defined :term:`CONUS` domain. A difference plotting script is also included to visually compare two runs for the same domain and resolution. These scripts are provided only as an example for users familiar with Python. They may be used to perform a visual check to verify that the application is producing reasonable results. +Among other techniques, MET provides the capability to compute standard verification scores for comparing deterministic gridded model data to point-based and gridded observations. It also provides ensemble and probabilistic verification methods for comparing gridded model data to point-based or gridded observations. Verification tasks to accomplish these comparisons are defined in the SRW App in :numref:`Table %s `. Currently, the SRW App supports the use of :term:`NDAS` observation files (which include conventional point-based surface and upper-air data) in `prepBUFR format `__ for point-based verification. It also supports gridded Climatology-Calibrated Precipitation Analysis (:term:`CCPA`) data for accumulated precipitation evaluation and Multi-Radar/Multi-Sensor (:term:`MRMS`) gridded analysis data for composite reflectivity and :term:`echo top` verification. -After running ``manage_externals/checkout_externals``, the visualization scripts will be available in the ``ufs-srweather-app/ush/Python`` directory. Usage information and instructions are described in :numref:`Chapter %s ` and are also included at the top of the script. +METplus is being actively developed by :term:`NCAR`/Research Applications Laboratory (RAL), NOAA/Earth Systems Research Laboratories (ESRL), and NOAA/Environmental Modeling Center (:term:`EMC`), and it is open to community contributions. Build System and Workflow ========================= The SRW Application has a portable build system and a user-friendly, modular, and expandable workflow framework. -An umbrella CMake-based build system is used for building the components necessary for running the end-to-end SRW Application, including the UFS Weather Model and the pre- and post-processing software. Additional libraries necessary for the application (e.g., :term:`NCEPLIBS-external` and :term:`NCEPLIBS`) are not included in the SRW Application build system but are available pre-built on pre-configured platforms. On other systems, they can be installed via the HPC-Stack (see :doc:`HPC-Stack Documentation `). There is a small set of system libraries and utilities that are assumed to be present on the target computer: the CMake build software, a Fortran, C, and C++ compiler, and an :term:`MPI` library. +An umbrella CMake-based build system is used for building the components necessary for running the end-to-end SRW Application, including the UFS Weather Model and the pre- and post-processing software. Additional libraries necessary for the application (e.g., :term:`NCEPLIBS-external` and :term:`NCEPLIBS`) are not included in the SRW Application build system but are available pre-built on pre-configured platforms. On other systems, they can be installed via the HPC-Stack (see :doc:`HPC-Stack Documentation `). There is a small set of system libraries and utilities that are assumed to be present on the target computer: the CMake build software; a Fortran, C, and C++ compiler; and an :term:`MPI` library. Once built, the provided experiment generator script can be used to create a Rocoto-based -workflow file that will run each task in the system in the proper sequence (see :numref:`Chapter %s ` or the `Rocoto documentation `_ for more information on Rocoto). If Rocoto and/or a batch system is not present on the available platform, the individual components can be run in a stand-alone, command line fashion with provided run scripts. The generated namelist for the atmospheric model can be modified in order to vary settings such as forecast starting and ending dates, forecast length hours, the :term:`CCPP` physics suite, integration time step, history file output frequency, and more. It also allows for configuration of other elements of the workflow; for example, users can choose whether to run some or all of the pre-processing, forecast model, and post-processing steps. +workflow file that will run each task in the system in the proper sequence (see :numref:`Chapter %s ` or the `Rocoto documentation `__ for more information on Rocoto). If Rocoto and/or a batch system is not present on the available platform, the individual components can be run in a stand-alone, command line fashion with provided run scripts. The generated namelist for the atmospheric model can be modified in order to vary settings such as forecast starting and ending dates, forecast length hours, the :term:`CCPP` physics suite, integration time step, history file output frequency, and more. It also allows for configuration of other elements of the workflow; for example, users can choose whether to run some or all of the pre-processing, forecast model, and post-processing steps. + +An optional Python plotting task is also included to create basic visualizations of the model output. The task outputs graphics in PNG format for several standard meteorological variables on the pre-defined :term:`CONUS` domain. A difference plotting option is also included to visually compare two runs for the same domain and resolution. These plots may be used to perform a visual check to verify that the application is producing reasonable results. Configuration instructions are provided in :numref:`Section %s `. -The latest SRW Application release has been tested on a variety of platforms widely used by -researchers, such as the NOAA Research and Development High-Performance Computing Systems -(RDHPCS), including Hera, Orion, and Jet; the National Center for Atmospheric Research (:term:`NCAR`) Cheyenne system; the National Severe Storms Laboratory (NSSL) HPC machine, Odin; the National Science Foundation Stampede2 system; and generic Linux and MacOS systems using Intel and GNU compilers. Four `levels of support `_ have been defined for the SRW Application, including pre-configured (Level 1), configurable (Level 2), limited test platforms (Level 3), and build only platforms (Level 4). Each level is further described below. +The latest SRW Application release has been tested on a variety of platforms widely used by researchers, such as the NOAA Research and Development High-Performance Computing Systems (RDHPCS), including Hera, Orion, and Jet; the National Center for Atmospheric Research (:term:`NCAR`) Cheyenne system; and generic Linux and MacOS systems using Intel and GNU compilers. Four `levels of support `__ have been defined for the SRW Application, including pre-configured (Level 1), configurable (Level 2), limited-test (Level 3), and build-only (Level 4) platforms. Each level is further described below. On pre-configured (Level 1) computational platforms, all the required libraries for building the SRW Application are available in a central place. That means bundled libraries (NCEPLIBS) and third-party libraries (NCEPLIBS-external) have both been built. The SRW Application is expected to build and run out-of-the-box on these pre-configured platforms. A few additional computational platforms are considered configurable for the SRW Application release. Configurable platforms (Level 2) are platforms where all of the required libraries for building the SRW Application are expected to install successfully but are not available in a central location. Applications and models are expected to build and run once the required bundled libraries (e.g., NCEPLIBS) and third-party libraries (e.g., NCEPLIBS-external) are built. -Limited-Test (Level 3) and Build-Only (Level 4) computational platforms are those in which the developers have built the code but little or no pre-release testing has been conducted, respectively. A complete description of the levels of support, along with a list of preconfigured and configurable platforms can be found in the `SRW Application Wiki `_. +Limited-Test (Level 3) and Build-Only (Level 4) computational platforms are those in which the developers have built the code but little or no pre-release testing has been conducted, respectively. A complete description of the levels of support, along with a list of preconfigured and configurable platforms can be found in the `SRW Application Wiki `__. diff --git a/docs/UsersGuide/source/ConfigWorkflow.rst b/docs/UsersGuide/source/ConfigWorkflow.rst index eb0407e954..2f71065b92 100644 --- a/docs/UsersGuide/source/ConfigWorkflow.rst +++ b/docs/UsersGuide/source/ConfigWorkflow.rst @@ -151,7 +151,7 @@ METplus Parameters * ``SS`` refers to the two-digit valid seconds of the hour ``CCPA_OBS_DIR``: (Default: "") - User-specified location of top-level directory where CCPA hourly precipitation files used by METplus are located. This parameter needs to be set for both user-provided observations and for observations that are retrieved from the NOAA :term:`HPSS` (if the user has access) via the ``get_obs_ccpa_tn`` task. (This task is activated in the workflow by setting ``RUN_TASK_GET_OBS_CCPA: true``). + User-specified location of top-level directory where CCPA hourly precipitation files used by METplus are located. This parameter needs to be set for both user-provided observations and for observations that are retrieved from the NOAA :term:`HPSS` (if the user has access) via the ``TN_GET_OBS_CCPA`` task. (This task is activated in the workflow by setting ``RUN_TASK_GET_OBS_CCPA: true``). METplus configuration files require the use of a predetermined directory structure and file names. If the CCPA files are user-provided, they need to follow the anticipated naming structure: ``{YYYYMMDD}/ccpa.t{HH}z.01h.hrap.conus.gb2``, where YYYYMMDD and HH are as described in the note :ref:`above `. When pulling observations from NOAA HPSS, the data retrieved will be placed in the ``CCPA_OBS_DIR`` directory. This path must be defind as ``//ccpa/proc``. METplus is configured to verify 01-, 03-, 06-, and 24-h accumulated precipitation using hourly CCPA files. @@ -159,7 +159,7 @@ METplus Parameters There is a problem with the valid time in the metadata for files valid from 19 - 00 UTC (i.e., files under the "00" directory). The script to pull the CCPA data from the NOAA HPSS (``scripts/exregional_get_obs_ccpa.sh``) has an example of how to account for this and organize the data into a more intuitive format. When a fix is provided, it will be accounted for in the ``exregional_get_obs_ccpa.sh`` script. ``MRMS_OBS_DIR``: (Default: "") - User-specified location of top-level directory where MRMS composite reflectivity files used by METplus are located. This parameter needs to be set for both user-provided observations and for observations that are retrieved from the NOAA :term:`HPSS` (if the user has access) via the ``get_obs_mrms_tn`` task (activated in the workflow by setting ``RUN_TASK_GET_OBS_MRMS: true``). When pulling observations directly from NOAA HPSS, the data retrieved will be placed in this directory. Please note, this path must be defind as ``//mrms/proc``. + User-specified location of top-level directory where MRMS composite reflectivity files used by METplus are located. This parameter needs to be set for both user-provided observations and for observations that are retrieved from the NOAA :term:`HPSS` (if the user has access) via the ``TN_GET_OBS_MRMS`` task (activated in the workflow by setting ``RUN_TASK_GET_OBS_MRMS: true``). When pulling observations directly from NOAA HPSS, the data retrieved will be placed in this directory. Please note, this path must be defind as ``//mrms/proc``. METplus configuration files require the use of a predetermined directory structure and file names. Therefore, if the MRMS files are user-provided, they need to follow the anticipated naming structure: ``{YYYYMMDD}/MergedReflectivityQCComposite_00.50_{YYYYMMDD}-{HH}{mm}{SS}.grib2``, where YYYYMMDD and {HH}{mm}{SS} are as described in the note :ref:`above `. @@ -167,14 +167,14 @@ METplus Parameters METplus is configured to look for a MRMS composite reflectivity file for the valid time of the forecast being verified; since MRMS composite reflectivity files do not always exactly match the valid time, a script (within the main script that retrieves MRMS data from the NOAA HPSS) is used to identify and rename the MRMS composite reflectivity file to match the valid time of the forecast. The script to pull the MRMS data from the NOAA HPSS has an example of the expected file-naming structure: ``scripts/exregional_get_obs_mrms.sh``. This script calls the script used to identify the MRMS file closest to the valid time: ``ush/mrms_pull_topofhour.py``. ``NDAS_OBS_DIR``: (Default: "") - User-specified location of the top-level directory where NDAS prepbufr files used by METplus are located. This parameter needs to be set for both user-provided observations and for observations that are retrieved from the NOAA :term:`HPSS` (if the user has access) via the ``get_obs_ndas_tn`` task (activated in the workflow by setting ``RUN_TASK_GET_OBS_NDAS: true``). When pulling observations directly from NOAA HPSS, the data retrieved will be placed in this directory. Please note, this path must be defined as ``//ndas/proc``. METplus is configured to verify near-surface variables hourly and upper-air variables at 00 and 12 UTC with NDAS prepbufr files. + User-specified location of the top-level directory where NDAS prepbufr files used by METplus are located. This parameter needs to be set for both user-provided observations and for observations that are retrieved from the NOAA :term:`HPSS` (if the user has access) via the ``TN_GET_OBS_NDAS`` task (activated in the workflow by setting ``RUN_TASK_GET_OBS_NDAS: true``). When pulling observations directly from NOAA HPSS, the data retrieved will be placed in this directory. Please note, this path must be defined as ``//ndas/proc``. METplus is configured to verify near-surface variables hourly and upper-air variables at 00 and 12 UTC with NDAS prepbufr files. METplus configuration files require the use of predetermined file names. Therefore, if the NDAS files are user-provided, they need to follow the anticipated naming structure: ``prepbufr.ndas.{YYYYMMDDHH}``, where YYYYMMDDHH is as described in the note :ref:`above `. The script to pull the NDAS data from the NOAA HPSS (``scripts/exregional_get_obs_ndas.sh``) has an example of how to rename the NDAS data into a more intuitive format with the valid time listed in the file name. Test Directories ---------------------- -These directories are used only by the ``run_WE2E_tests.sh`` script, so they are not used unless the user runs a Workflow End-to-End (WE2E) test. Their function corresponds to the same variables without the ``TEST_`` prefix. Users typically should not modify these variables. For any alterations, the logic in the ``run_WE2E_tests.sh`` script would need to be adjusted accordingly. +These directories are used only by the ``run_WE2E_tests.py`` script, so they are not used unless the user runs a Workflow End-to-End (WE2E) test (see :numref:`Chapter %s `). Their function corresponds to the same variables without the ``TEST_`` prefix. Users typically should not modify these variables. For any alterations, the logic in the ``run_WE2E_tests.py`` script would need to be adjusted accordingly. ``TEST_EXTRN_MDL_SOURCE_BASEDIR``: (Default: "") This parameter allows testing of user-staged files in a known location on a given platform. This path contains a limited dataset and likely will not be useful for most user experiments. @@ -215,7 +215,7 @@ Directory Parameters ----------------------- ``EXPT_BASEDIR``: (Default: "") - The full path to the base directory in which the experiment directory (``EXPT_SUBDIR``) will be created. If this is not specified or if it is set to an empty string, it will default to ``${HOMEdir}/../expt_dirs``, where ``${HOMEdir}`` contains the full path to the ``ufs-srweather-app`` directory. + The full path to the base directory in which the experiment directory (``EXPT_SUBDIR``) will be created. If this is not specified or if it is set to an empty string, it will default to ``${HOMEdir}/../expt_dirs``, where ``${HOMEdir}`` contains the full path to the ``ufs-srweather-app`` directory. If set to a relative path, the provided path will be appended to the default value ``${HOMEdir}/../expt_dirs``. For example, if ``EXPT_BASEDIR=some/relative/path`` (i.e. a path that does not begin with ``/``), the value of ``EXPT_BASEDIR`` used by the workflow will be ``EXPT_BASEDIR=${HOMEdir}/../expt_dirs/some/relative/path``. ``EXPT_SUBDIR``: (Default: "") The user-designated name of the experiment directory (*not* its full path). The full path to the experiment directory, which will be contained in the variable ``EXPTDIR``, will be: @@ -285,7 +285,7 @@ Set File Name Parameters Name of the file (a shell script) containing definitions of the primary and secondary experiment variables (parameters). This file is sourced by many scripts (e.g., the J-job scripts corresponding to each workflow task) in order to make all the experiment variables available in those scripts. The primary variables are defined in the default configuration script (``config_defaults.yaml``) and in ``config.yaml``. The secondary experiment variables are generated by the experiment generation script. ``EXTRN_MDL_VAR_DEFNS_FN``: (Default: "extrn_mdl_var_defns") - Name of the file (a shell script) containing the definitions of variables associated with the external model from which :term:`ICs` or :term:`LBCs` are generated. This file is created by the ``GET_EXTRN_*_TN`` task because the values of the variables it contains are not known before this task runs. The file is then sourced by the ``MAKE_ICS_TN`` and ``MAKE_LBCS_TN`` tasks. + Name of the file (a shell script) containing the definitions of variables associated with the external model from which :term:`ICs` or :term:`LBCs` are generated. This file is created by the ``TN_GET_EXTRN_*`` task because the values of the variables it contains are not known before this task runs. The file is then sourced by the ``TN_MAKE_ICS`` and ``TN_MAKE_LBCS`` tasks. ``WFLOW_LAUNCH_SCRIPT_FN``: (Default: "launch_FV3LAM_wflow.sh") Name of the script that can be used to (re)launch the experiment's Rocoto workflow. @@ -386,17 +386,17 @@ Verification Parameters ``GET_OBS``: (Default: "get_obs") Set the name of the Rocoto workflow task used to load proper module files for ``GET_OBS_*`` tasks. Users typically do not need to change this value. -``VX_TN``: (Default: "run_vx") +``TN_VX``: (Default: "run_vx") Set the name of the Rocoto workflow task used to load proper module files for ``VX_*`` tasks. Users typically do not need to change this value. -``VX_ENSGRID_TN``: (Default: "run_ensgridvx") +``TN_VX_ENSGRID``: (Default: "run_ensgridvx") Set the name of the Rocoto workflow task that runs METplus grid-to-grid ensemble verification for 1-h accumulated precipitation. Users typically do not need to change this value. -``VX_ENSGRID_PROB_REFC_TN``: (Default: "run_ensgridvx_prob_refc") +``TN_VX_ENSGRID_PROB_REFC``: (Default: "run_ensgridvx_prob_refc") Set the name of the Rocoto workflow task that runs METplus grid-to-grid verification for ensemble probabilities for composite reflectivity. Users typically do not need to change this value. ``MAXTRIES_VX_ENSGRID_PROB_REFC``: (Default: 1) - Maximum number of times to attempt ``VX_ENSGRID_PROB_REFC_TN``. + Maximum number of times to attempt ``TN_VX_ENSGRID_PROB_REFC``. .. _NCOModeParms: @@ -410,7 +410,7 @@ A standard set of environment variables has been established for *nco* mode to s Only *community* mode is fully supported for this release. *nco* mode is used by those at the Environmental Modeling Center (EMC) and Global Systems Laboratory (GSL) who are working on pre-implementation operational testing. Other users should run the SRW App in *community* mode. ``envir, NET, model_ver, RUN``: - Standard environment variables defined in the NCEP Central Operations WCOSS Implementation Standards document. These variables are used in forming the path to various directories containing input, output, and workflow files. The variables are defined in the `WCOSS Implementation Standards `__ document (pp. 4-5) as follows: + Standard environment variables defined in the NCEP Central Operations WCOSS Implementation Standards document. These variables are used in forming the path to various directories containing input, output, and workflow files. The variables are defined in the `WCOSS Implementation Standards `__ document (pp. 4-5) as follows: ``envir``: (Default: "para") Set to "test" during the initial testing phase, "para" when running in parallel (on a schedule), and "prod" in production. @@ -427,40 +427,45 @@ A standard set of environment variables has been established for *nco* mode to s ``OPSROOT``: (Default: "") The operations root directory in *nco* mode. +.. _workflow-switches: + WORKFLOW SWITCHES Configuration Parameters ============================================= -These parameters set flags that determine whether various workflow tasks should be run. When non-default parameters are selected for the variables in this section, they should be added to the ``workflow_switches:`` section of the ``config.yaml`` file. Note that the ``MAKE_GRID_TN``, ``MAKE_OROG_TN``, and ``MAKE_SFC_CLIMO_TN`` are all :term:`cycle-independent` tasks, i.e., if they are run, they only run once at the beginning of the workflow before any cycles are run. +These parameters set flags that determine whether various workflow tasks should be run. When non-default parameters are selected for the variables in this section, they should be added to the ``workflow_switches:`` section of the ``config.yaml`` file. Note that the ``TN_MAKE_GRID``, ``TN_MAKE_OROG``, and ``TN_MAKE_SFC_CLIMO`` are all :term:`cycle-independent` tasks, i.e., if they are run, they only run once at the beginning of the workflow before any cycles are run. Baseline Workflow Tasks -------------------------- ``RUN_TASK_MAKE_GRID``: (Default: true) - Flag that determines whether to run the grid file generation task (``MAKE_GRID_TN``). If this is set to true, the grid generation task is run and new grid files are generated. If it is set to false, then the scripts look for pre-generated grid files in the directory specified by ``GRID_DIR`` (see :numref:`Section %s ` below). Valid values: ``True`` | ``False`` + Flag that determines whether to run the grid file generation task (``TN_MAKE_GRID``). If this is set to true, the grid generation task is run and new grid files are generated. If it is set to false, then the scripts look for pre-generated grid files in the directory specified by ``GRID_DIR`` (see :numref:`Section %s ` below). Valid values: ``True`` | ``False`` ``RUN_TASK_MAKE_OROG``: (Default: true) - Same as ``RUN_TASK_MAKE_GRID`` but for the orography generation task (``MAKE_OROG_TN``). Flag that determines whether to run the orography file generation task (``MAKE_OROG_TN``). If this is set to true, the orography generation task is run and new orography files are generated. If it is set to false, then the scripts look for pre-generated orography files in the directory specified by ``OROG_DIR`` (see :numref:`Section %s ` below). Valid values: ``True`` | ``False`` + Same as ``RUN_TASK_MAKE_GRID`` but for the orography generation task (``TN_MAKE_OROG``). Flag that determines whether to run the orography file generation task (``TN_MAKE_OROG``). If this is set to true, the orography generation task is run and new orography files are generated. If it is set to false, then the scripts look for pre-generated orography files in the directory specified by ``OROG_DIR`` (see :numref:`Section %s ` below). Valid values: ``True`` | ``False`` ``RUN_TASK_MAKE_SFC_CLIMO``: (Default: true) - Same as ``RUN_TASK_MAKE_GRID`` but for the surface climatology generation task (``MAKE_SFC_CLIMO_TN``). Flag that determines whether to run the surface climatology file generation task (``MAKE_SFC_CLIMO_TN``). If this is set to true, the surface climatology generation task is run and new surface climatology files are generated. If it is set to false, then the scripts look for pre-generated surface climatology files in the directory specified by ``SFC_CLIMO_DIR`` (see :numref:`Section %s ` below). Valid values: ``True`` | ``False`` + Same as ``RUN_TASK_MAKE_GRID`` but for the surface climatology generation task (``TN_MAKE_SFC_CLIMO``). Flag that determines whether to run the surface climatology file generation task (``TN_MAKE_SFC_CLIMO``). If this is set to true, the surface climatology generation task is run and new surface climatology files are generated. If it is set to false, then the scripts look for pre-generated surface climatology files in the directory specified by ``SFC_CLIMO_DIR`` (see :numref:`Section %s ` below). Valid values: ``True`` | ``False`` ``RUN_TASK_GET_EXTRN_ICS``: (Default: true) - Flag that determines whether to run the ``GET_EXTRN_ICS_TN`` task. + Flag that determines whether to run the ``TN_GET_EXTRN_ICS`` task. ``RUN_TASK_GET_EXTRN_LBCS``: (Default: true) - Flag that determines whether to run the ``GET_EXTRN_LBCS_TN`` task. + Flag that determines whether to run the ``TN_GET_EXTRN_LBCS`` task. ``RUN_TASK_MAKE_ICS``: (Default: true) - Flag that determines whether to run the ``MAKE_ICS_TN`` task. + Flag that determines whether to run the ``TN_MAKE_ICS`` task. ``RUN_TASK_MAKE_LBCS``: (Default: true) - Flag that determines whether to run the ``MAKE_LBCS_TN`` task. + Flag that determines whether to run the ``TN_MAKE_LBCS`` task. ``RUN_TASK_RUN_FCST``: (Default: true) - Flag that determines whether to run the ``RUN_FCST_TN`` task. + Flag that determines whether to run the ``TN_RUN_FCST`` task. ``RUN_TASK_RUN_POST``: (Default: true) - Flag that determines whether to run the ``RUN_POST_TN`` task. Valid values: ``True`` | ``False`` + Flag that determines whether to run the ``TN_RUN_POST`` task. Valid values: ``True`` | ``False`` + +``RUN_TASK_RUN_PRDGEN``: (Default: false) + Flag that determines whether to run the ``TN_RUN_PRDGEN`` task. Valid values: ``True`` | ``False`` .. _VXTasks: @@ -468,13 +473,13 @@ Verification Tasks -------------------- ``RUN_TASK_GET_OBS_CCPA``: (Default: false) - Flag that determines whether to run the ``GET_OBS_CCPA_TN`` task, which retrieves the :term:`CCPA` hourly precipitation files used by METplus from NOAA :term:`HPSS`. See :numref:`Section %s ` for additional parameters related to this task. + Flag that determines whether to run the ``TN_GET_OBS_CCPA`` task, which retrieves the :term:`CCPA` hourly precipitation files used by METplus from NOAA :term:`HPSS`. See :numref:`Section %s ` for additional parameters related to this task. ``RUN_TASK_GET_OBS_MRMS``: (Default: false) - Flag that determines whether to run the ``GET_OBS_MRMS_TN`` task, which retrieves the :term:`MRMS` composite reflectivity files used by METplus from NOAA HPSS. See :numref:`Section %s ` for additional parameters related to this task. + Flag that determines whether to run the ``TN_GET_OBS_MRMS`` task, which retrieves the :term:`MRMS` composite reflectivity files used by METplus from NOAA HPSS. See :numref:`Section %s ` for additional parameters related to this task. ``RUN_TASK_GET_OBS_NDAS``: (Default: false) - Flag that determines whether to run the ``GET_OBS_NDAS_TN`` task, which retrieves the :term:`NDAS` PrepBufr files used by METplus from NOAA HPSS. See :numref:`Section %s ` for additional parameters related to this task. + Flag that determines whether to run the ``TN_GET_OBS_NDAS`` task, which retrieves the :term:`NDAS` PrepBufr files used by METplus from NOAA HPSS. See :numref:`Section %s ` for additional parameters related to this task. ``RUN_TASK_VX_GRIDSTAT``: (Default: false) Flag that determines whether to run the grid-stat verification task. The :ref:`MET Grid-Stat tool ` provides verification statistics for a matched forecast and observation grid. See :numref:`Section %s ` for additional parameters related to this task. Valid values: ``True`` | ``False`` @@ -488,8 +493,13 @@ Verification Tasks ``RUN_TASK_VX_ENSPOINT``: (Default: false) Flag that determines whether to run the ensemble point verification task. If this flag is set, both ensemble-stat point verification and point verification of ensemble-stat output is computed. The :ref:`MET Ensemble-Stat tool ` provides verification statistics for ensemble forecasts and can be used in conjunction with the :ref:`MET Point-Stat tool `. See :numref:`Section %s ` for additional parameters related to this task. Valid values: ``True`` | ``False`` -.. - COMMENT: Might be worth defining "ensemble-stat verification for gridded data," "ensemble point verification," "ensemble-stat point verification," and "point verification of ensemble-stat output" +.. COMMENT: COMMENT: Define "ensemble-stat verification for gridded data," "ensemble point verification," "ensemble-stat point verification," and "point verification of ensemble-stat output"? + +Plotting Task +---------------- + +``RUN_TASK_PLOT_ALLVARS:`` (Default: false) + Flag that determines whether to run python plotting scripts. .. _make-grid: @@ -503,7 +513,7 @@ Basic Task Parameters For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task. Typically, users do not need to adjust the default values. - ``MAKE_GRID_TN``: (Default: "make_grid") + ``TN_MAKE_GRID``: (Default: "make_grid") Set the name of this :term:`cycle-independent` Rocoto workflow task. Users typically do not need to change this value. ``NNODES_MAKE_GRID``: (Default: 1) @@ -623,7 +633,7 @@ MAKE_OROG Configuration Parameters Non-default parameters for the ``make_orog`` task are set in the ``task_make_orog:`` section of the ``config.yaml`` file. -``MAKE_OROG_TN``: (Default: "make_orog") +``TN_MAKE_OROG``: (Default: "make_orog") Set the name of this :term:`cycle-independent` Rocoto workflow task. Users typically do not need to change this value. ``NNODES_MAKE_OROG``: (Default: 1) @@ -648,7 +658,7 @@ Non-default parameters for the ``make_orog`` task are set in the ``task_make_oro Controls the size of the stack for threads created by the OpenMP implementation. ``OROG_DIR``: (Default: "") - The directory containing pre-generated orography files to use when ``MAKE_OROG_TN`` is set to false. + The directory containing pre-generated orography files to use when ``TN_MAKE_OROG`` is set to false. .. _make-sfc-climo: @@ -657,7 +667,7 @@ MAKE_SFC_CLIMO Configuration Parameters Non-default parameters for the ``make_sfc_climo`` task are set in the ``task_make_sfc_climo:`` section of the ``config.yaml`` file. -``MAKE_SFC_CLIMO_TN``: "make_sfc_climo" +``TN_MAKE_SFC_CLIMO``: "make_sfc_climo" Set the name of this :term:`cycle-independent` Rocoto workflow task. Users typically do not need to change this value. ``NNODES_MAKE_SFC_CLIMO``: (Default: 2) @@ -682,7 +692,9 @@ Non-default parameters for the ``make_sfc_climo`` task are set in the ``task_mak Controls the size of the stack for threads created by the OpenMP implementation. ``SFC_CLIMO_DIR``: (Default: "") - The directory containing pre-generated surface climatology files to use when ``MAKE_SFC_CLIMO_TN`` is set to false. + The directory containing pre-generated surface climatology files to use when ``TN_MAKE_SFC_CLIMO`` is set to false. + +.. _task_get_extrn_ics: GET_EXTRN_ICS Configuration Parameters ========================================= @@ -696,7 +708,7 @@ Basic Task Parameters For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task. -``GET_EXTRN_ICS_TN``: (Default: "get_extrn_ics") +``TN_GET_EXTRN_ICS``: (Default: "get_extrn_ics") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_GET_EXTRN_ICS``: (Default: 1) @@ -712,7 +724,7 @@ For each workflow task, certain parameter values must be passed to the job sched Maximum number of times to attempt the task. ``EXTRN_MDL_NAME_ICS``: (Default: "FV3GFS") - The name of the external model that will provide fields from which initial condition (IC) files, surface files, and 0-th hour boundary condition files will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"RAP"`` | ``"HRRR"`` | ``"NAM"`` + The name of the external model that will provide fields from which initial condition (IC) files, surface files, and 0-th hour boundary condition files will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"GEFS"`` | ``"GDAS"`` | ``"RAP"`` | ``"HRRR"`` | ``"NAM"`` ``EXTRN_MDL_ICS_OFFSET_HRS``: (Default: 0) Users may wish to start a forecast using forecast data from a previous cycle of an external model. This variable indicates how many hours earlier the external model started than the FV3 forecast configured here. For example, if the forecast should start from a 6-hour forecast of the GFS, then ``EXTRN_MDL_ICS_OFFSET_HRS: "6"``. @@ -765,6 +777,7 @@ Set parameters associated with NOMADS online data. ``NOMADS_file_type``: (Default: "nemsio") Flag controlling the format of the data. Valid values: ``"GRIB2"`` | ``"grib2"`` | ``"NEMSIO"`` | ``"nemsio"`` +.. _task_get_extrn_lbcs: GET_EXTRN_LBCS Configuration Parameters ========================================== @@ -778,7 +791,7 @@ Basic Task Parameters For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task. -``GET_EXTRN_LBCS_TN``: (Default: "get_extrn_lbcs") +``TN_GET_EXTRN_LBCS``: (Default: "get_extrn_lbcs") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_GET_EXTRN_LBCS``: (Default: 1) @@ -794,7 +807,7 @@ For each workflow task, certain parameter values must be passed to the job sched Maximum number of times to attempt the task. ``EXTRN_MDL_NAME_LBCS``: (Default: "FV3GFS") - The name of the external model that will provide fields from which lateral boundary condition (LBC) files (except for the 0-th hour LBC file) will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"RAP"`` | ``"HRRR"`` | ``"NAM"`` + The name of the external model that will provide fields from which lateral boundary condition (LBC) files (except for the 0-th hour LBC file) will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"GEFS"`` | ``"GDAS"`` | ``"RAP"`` | ``"HRRR"`` | ``"NAM"`` ``LBC_SPEC_INTVL_HRS``: (Default: "6") The interval (in integer hours) at which LBC files will be generated. This is also referred to as the *boundary update interval*. Note that the model selected in ``EXTRN_MDL_NAME_LBCS`` must have data available at a frequency greater than or equal to that implied by ``LBC_SPEC_INTVL_HRS``. For example, if ``LBC_SPEC_INTVL_HRS`` is set to "6", then the model must have data available at least every 6 hours. It is up to the user to ensure that this is the case. @@ -849,7 +862,7 @@ Basic Task Parameters For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task. -``MAKE_ICS_TN``: (Default: "make_ics") +``TN_MAKE_ICS``: (Default: "make_ics") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_MAKE_ICS``: (Default: 4) @@ -876,7 +889,7 @@ For each workflow task, certain parameter values must be passed to the job sched FVCOM Parameter ------------------- ``USE_FVCOM``: (Default: false) - Flag that specifies whether to update surface conditions in FV3-:term:`LAM` with fields generated from the Finite Volume Community Ocean Model (:term:`FVCOM`). If set to true, lake/sea surface temperatures, ice surface temperatures, and ice placement will be overwritten using data provided by FVCOM. Setting ``USE_FVCOM`` to true causes the executable ``process_FVCOM.exe`` in the ``MAKE_ICS_TN`` task to run. This, in turn, modifies the file ``sfc_data.nc`` generated by ``chgres_cube`` during the ``make_ics`` task. Note that the FVCOM data must already be interpolated to the desired FV3-LAM grid. Valid values: ``True`` | ``False`` + Flag that specifies whether to update surface conditions in FV3-:term:`LAM` with fields generated from the Finite Volume Community Ocean Model (:term:`FVCOM`). If set to true, lake/sea surface temperatures, ice surface temperatures, and ice placement will be overwritten using data provided by FVCOM. Setting ``USE_FVCOM`` to true causes the executable ``process_FVCOM.exe`` in the ``TN_MAKE_ICS`` task to run. This, in turn, modifies the file ``sfc_data.nc`` generated by ``chgres_cube`` during the ``make_ics`` task. Note that the FVCOM data must already be interpolated to the desired FV3-LAM grid. Valid values: ``True`` | ``False`` ``FVCOM_WCSTART``: (Default: "cold") Define if this is a "warm" start or a "cold" start. Setting this to "warm" will read in ``sfc_data.nc`` generated in a RESTART directory. Setting this to "cold" will read in the ``sfc_data.nc`` generated from ``chgres_cube`` in the ``make_ics`` portion of the workflow. Valid values: ``"cold"`` | ``"COLD"`` | ``"warm"`` | ``"WARM"`` @@ -893,7 +906,7 @@ MAKE_LBCS Configuration Parameters Non-default parameters for the ``make_lbcs`` task are set in the ``task_make_lbcs:`` section of the ``config.yaml`` file. -``MAKE_LBCS_TN``: (Default: "make_lbcs") +``TN_MAKE_LBCS``: (Default: "make_lbcs") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_MAKE_LBCS``: (Default: 4) @@ -917,17 +930,19 @@ Non-default parameters for the ``make_lbcs`` task are set in the ``task_make_lbc ``OMP_STACKSIZE_MAKE_LBCS``: (Default: "1024m") Controls the size of the stack for threads created by the OpenMP implementation. +.. _FcstConfigParams: + FORECAST Configuration Parameters ===================================== -Non-default parameters for the ``run_fcst`` task are set in the ``task_run_fcst`` section of the ``config.yaml`` file. +Non-default parameters for the ``run_fcst`` task are set in the ``task_run_fcst:`` section of the ``config.yaml`` file. Basic Task Parameters --------------------------------- For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task. -``RUN_FCST_TN``: (Default: "run_fcst") +``TN_RUN_FCST``: (Default: "run_fcst") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_RUN_FCST``: (Default: "") @@ -961,13 +976,15 @@ For each workflow task, certain parameter values must be passed to the job sched ``OMP_STACKSIZE_RUN_FCST``: (Default: "1024m") Controls the size of the stack for threads created by the OpenMP implementation. +.. _ModelConfigParams: + Model Configuration Parameters ---------------------------------- These parameters set values in the Weather Model's ``model_configure`` file. ``DT_ATMOS``: (Default: "") - Time step for the outermost atmospheric model loop in seconds. This corresponds to the frequency at which the physics routines and the top level dynamics routine are called. (Note that one call to the top-level dynamics routine results in multiple calls to the horizontal dynamics, :term:`tracer` transport, and vertical dynamics routines; see the `FV3 dycore scientific documentation `__ for details.) Must be set. Takes an integer value. In the SRW App, a default value for ``DT_ATMOS`` appears in the ``set_predef_grid_params.yaml`` script, but a different value can be set in ``config.yaml``. + Time step for the outermost atmospheric model loop in seconds. This corresponds to the frequency at which the physics routines and the top level dynamics routine are called. (Note that one call to the top-level dynamics routine results in multiple calls to the horizontal dynamics, :term:`tracer` transport, and vertical dynamics routines; see the `FV3 dycore scientific documentation `__ for details.) Must be set. Takes an integer value. In the SRW App, a default value for ``DT_ATMOS`` appears in the ``set_predef_grid_params.yaml`` script, but a different value can be set in ``config.yaml``. In general, the smaller the grid cell size is, the smaller this value needs to be in order to avoid numerical instabilities during the forecast. ``RESTART_INTERVAL``: (Default: 0) Frequency of the output restart files in hours. Using the default interval (0), restart files are produced at the end of a forecast run. When ``RESTART_INTERVAL: 1``, restart files are produced every hour with the prefix "YYYYMMDD.HHmmSS." in the ``RESTART`` directory. @@ -999,20 +1016,20 @@ Write-Component (Quilting) Parameters ----------------------------------------- .. note:: - The :term:`UPP` (called by the ``RUN_POST_TN`` task) cannot process output on the native grid types ("GFDLgrid" and "ESGgrid"), so output fields are interpolated to a **write component grid** before writing them to an output file. The output files written by the UFS Weather Model use an Earth System Modeling Framework (:term:`ESMF`) component, referred to as the **write component**. This model component is configured with settings in the ``model_configure`` file, as described in `Section 4.2.3 `__ of the UFS Weather Model documentation. + The :term:`UPP` (called by the ``TN_RUN_POST`` task) cannot process output on the native grid types ("GFDLgrid" and "ESGgrid"), so output fields are interpolated to a **write component grid** before writing them to an output file. The output files written by the UFS Weather Model use an Earth System Modeling Framework (:term:`ESMF`) component, referred to as the **write component**. This model component is configured with settings in the ``model_configure`` file, as described in `Section 4.2.3 `__ of the UFS Weather Model documentation. ``QUILTING``: (Default: true) .. attention:: The regional grid requires the use of the write component, so users generally should not need to change the default value for ``QUILTING``. - Flag that determines whether to use the write component for writing forecast output files to disk. If set to true, the forecast model will output files named ``dynf$HHH.nc`` and ``phyf$HHH.nc`` (where ``HHH`` is the 3-digit forecast hour) containing dynamics and physics fields, respectively, on the write-component grid. For example, the output files for the 3rd hour of the forecast would be ``dynf$003.nc`` and ``phyf$003.nc``. (The regridding from the native FV3-LAM grid to the write-component grid is done by the forecast model.) If ``QUILTING`` is set to false, then the output file names are ``fv3_history.nc`` and ``fv3_history2d.nc``, and they contain fields on the native grid. Although the UFS Weather Model can run without quilting, the regional grid requires the use of the write component. Therefore, QUILTING should be set to true when running the SRW App. If ``QUILTING`` is set to false, the ``RUN_POST_TN`` (meta)task cannot run because the :term:`UPP` code called by this task cannot process fields on the native grid. In that case, the ``RUN_POST_TN`` (meta)task will be automatically removed from the Rocoto workflow XML. The :ref:`INLINE POST ` option also requires ``QUILTING`` to be set to true in the SRW App. Valid values: ``True`` | ``False`` + Flag that determines whether to use the write component for writing forecast output files to disk. If set to true, the forecast model will output files named ``dynf$HHH.nc`` and ``phyf$HHH.nc`` (where ``HHH`` is the 3-digit forecast hour) containing dynamics and physics fields, respectively, on the write-component grid. For example, the output files for the 3rd hour of the forecast would be ``dynf$003.nc`` and ``phyf$003.nc``. (The regridding from the native FV3-LAM grid to the write-component grid is done by the forecast model.) If ``QUILTING`` is set to false, then the output file names are ``fv3_history.nc`` and ``fv3_history2d.nc``, and they contain fields on the native grid. Although the UFS Weather Model can run without quilting, the regional grid requires the use of the write component. Therefore, QUILTING should be set to true when running the SRW App. If ``QUILTING`` is set to false, the ``TN_RUN_POST`` (meta)task cannot run because the :term:`UPP` code called by this task cannot process fields on the native grid. In that case, the ``TN_RUN_POST`` (meta)task will be automatically removed from the Rocoto workflow XML. The :ref:`INLINE POST ` option also requires ``QUILTING`` to be set to true in the SRW App. Valid values: ``True`` | ``False`` ``PRINT_ESMF``: (Default: false) Flag that determines whether to output extra (debugging) information from :term:`ESMF` routines. Note that the write component uses ESMF library routines to interpolate from the native forecast model grid to the user-specified output grid (which is defined in the model configuration file ``model_configure`` in the forecast run directory). Valid values: ``True`` | ``False`` ``WRTCMP_write_groups``: (Default: 1) - The number of write groups (i.e., groups of :term:`MPI` tasks) to use in the write component. + The number of write groups (i.e., groups of :term:`MPI` tasks) to use in the write component. Each write group will write to one set of output files (a ``dynf${fhr}.nc`` and a ``phyf${fhr}.nc`` file, where ``${fhr}`` is the forecast hour). Each write group contains ``WRTCMP_write_tasks_per_group`` tasks. Usually, one write group is sufficient. This may need to be increased if the forecast is proceeding so quickly that a single write group cannot complete writing to its set of files before there is a need/request to start writing the next set of files at the next output time. ``WRTCMP_write_tasks_per_group``: (Default: 20) The number of MPI tasks to allocate for each write group. @@ -1131,6 +1148,9 @@ These parameters are associated with the fixed (i.e., static) files. On `Level 1 ``FIXlut``: (Default: "") System directory where the lookup tables for optics properties are located. +``FIXshp``: (Default: "") + System directory where the graphics shapefiles are located. On Level 1 systems, these are set within the machine files. Users on other systems will need to provide the path to the directory that contains the *Natural Earth* shapefiles. + ``TOPO_DIR``: (Default: "") The location on disk of the static input files used by the ``make_orog`` task (i.e., ``orog.x`` and ``shave.x``). Can be the same as ``FIXgsm``. @@ -1150,7 +1170,7 @@ Basic Task Parameters For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task. -``RUN_POST_TN``: (Default: "run_post") +``TN_RUN_POST``: (Default: "run_post") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_RUN_POST``: (Default: 2) @@ -1205,6 +1225,49 @@ Set parameters for customizing the :term:`UPP`. Note that this variable is first changed to lower case before being used to construct the file names. +RUN_PRDGEN Configuration Parameters +===================================== + +Non-default parameters for the ``run_prdgen`` task are set in the ``task_run_prdgen:`` section of the ``config.yaml`` file. + +Basic Task Parameters +--------------------------------- +For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task. + +``TN_RUN_PRDGEN``: (Default: "run_prdgen") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_RUN_PRDGEN``: (Default: 1) + Number of nodes to use for the job. + +``PPN_RUN_PRDGEN``: (Default: 22) + Number of :term:`MPI` processes per node. + +``WTIME_RUN_PRDGEN``: (Default: 00:30:00) + Maximum time for the task to complete. + +``MAXTRIES_RUN_PRDGEN``: (Default: 2) + Maximum number of times to attempt the task. + +``KMP_AFFINITY_RUN_PRDGEN``: (Default: "scatter") + Intel Thread Affinity Interface for the ``run_prdgen`` task. See :ref:`this note ` for more information on thread affinity. + +``OMP_NUM_THREADS_RUN_PRDGEN``: (Default: 1) + The number of OpenMP threads to use for parallel regions. + +``OMP_STACKSIZE_RUN_PRDGEN``: (Default: "1024m") + Controls the size of the stack for threads created by the OpenMP implementation. + +``DO_PARALLEL_PRDGEN``: (Default: false) + Flag that determines whether to use CFP to run the product generation job in parallel. CFP is a utility that allows the user to launch a number of small jobs across nodes/cpus in one batch command. This option should be used with the ``RRFS_NA_3km`` grid and ``PPN_RUN_PRDGEN`` should be set to 22. + +``ADDNL_OUTPUT_GRIDS``: (Default: []) + Set additional output grids for wgrib2 remapping, if any. Space-separated list of strings, e.g., ( "130" "242" "clue"). Default is no additional grids. + +``TESTBED_FIELDS_FN``: (Default: "") + The file which lists grib2 fields to be extracted for testbed files. Empty string means no need to generate testbed files. + + .. _get-obs-ccpa: GET_OBS_CCPA Configuration Parameters @@ -1212,7 +1275,7 @@ GET_OBS_CCPA Configuration Parameters Non-default parameters for the ``get_obs_ccpa`` task are set in the ``task_get_obs_ccpa:`` section of the ``config.yaml`` file. -``GET_OBS_CCPA_TN``: (Default: "get_obs_ccpa") +``TN_GET_OBS_CCPA``: (Default: "get_obs_ccpa") Set the name of this Rocoto workflow task. Users typically do not need to change this value. See :numref:`Section %s ` for more information about the verification tasks. ``NNODES_GET_OBS_CCPA``: (Default: 1) @@ -1234,7 +1297,7 @@ GET_OBS_MRMS Configuration Parameters Non-default parameters for the ``get_obs_mrms`` task are set in the ``task_get_obs_mrms:`` section of the ``config.yaml`` file. See :numref:`Section %s ` for more information about the verification tasks. -``GET_OBS_MRMS_TN``: (Default: "get_obs_mrms") +``TN_GET_OBS_MRMS``: (Default: "get_obs_mrms") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_GET_OBS_MRMS``: (Default: 1) @@ -1256,7 +1319,7 @@ GET_OBS_NDAS Configuration Parameters Non-default parameters for the ``get_obs_ndas`` task are set in the ``task_get_obs_ndas:`` section of the ``config.yaml`` file. See :numref:`Section %s ` for more information about the verification tasks. -``GET_OBS_NDAS_TN``: (Default: "get_obs_ndas") +``TN_GET_OBS_NDAS``: (Default: "get_obs_ndas") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_GET_OBS_NDAS``: (Default: 1) @@ -1279,7 +1342,7 @@ VX_GRIDSTAT Configuration Parameters Non-default parameters for the ``run_gridstatvx`` task are set in the ``task_run_vx_gridstat:`` section of the ``config.yaml`` file. -``VX_GRIDSTAT_TN``: (Default: "run_gridstatvx") +``TN_VX_GRIDSTAT``: (Default: "run_gridstatvx") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_VX_GRIDSTAT``: (Default: 1) @@ -1300,7 +1363,7 @@ VX_GRIDSTAT_REFC Configuration Parameters Non-default parameters for the ``run_gridstatvx_refc`` task are set in the ``task_run_vx_gridstat_refc:`` section of the ``config.yaml`` file. -``VX_GRIDSTAT_REFC_TN``: (Default: "run_gridstatvx_refc") +``TN_VX_GRIDSTAT_REFC``: (Default: "run_gridstatvx_refc") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_VX_GRIDSTAT``: (Default: 1) @@ -1321,7 +1384,7 @@ VX_GRIDSTAT_RETOP Configuration Parameters Non-default parameters for the ``run_gridstatvx_retop`` task are set in the ``task_run_vx_gridstat_retop:`` section of the ``config.yaml`` file. -``VX_GRIDSTAT_RETOP_TN``: (Default: "run_gridstatvx_retop") +``TN_VX_GRIDSTAT_RETOP``: (Default: "run_gridstatvx_retop") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_VX_GRIDSTAT``: (Default: 1) @@ -1342,7 +1405,7 @@ VX_GRIDSTAT_03h Configuration Parameters Non-default parameters for the ``run_gridstatvx_03h`` task are set in the ``task_run_vx_gridstat_03h:`` section of the ``config.yaml`` file. -``VX_GRIDSTAT_03h_TN``: (Default: "run_gridstatvx_03h") +``TN_VX_GRIDSTAT_03h``: (Default: "run_gridstatvx_03h") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_VX_GRIDSTAT``: (Default: 1) @@ -1363,7 +1426,7 @@ VX_GRIDSTAT_06h Configuration Parameters Non-default parameters for the ``run_gridstatvx_06h`` task are set in the ``task_run_vx_gridstat_06h:`` section of the ``config.yaml`` file. -``VX_GRIDSTAT_06h_TN``: (Default: "run_gridstatvx_06h") +``TN_VX_GRIDSTAT_06h``: (Default: "run_gridstatvx_06h") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_VX_GRIDSTAT``: (Default: 1) @@ -1384,7 +1447,7 @@ VX_GRIDSTAT_24h Configuration Parameters Non-default parameters for the ``run_gridstatvx_24h`` task are set in the ``task_run_vx_gridstat_24h:`` section of the ``config.yaml`` file. -``VX_GRIDSTAT_24h_TN``: (Default: "run_gridstatvx_24h") +``TN_VX_GRIDSTAT_24h``: (Default: "run_gridstatvx_24h") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_VX_GRIDSTAT``: (Default: 1) @@ -1406,7 +1469,7 @@ VX_POINTSTAT Configuration Parameters Non-default parameters for the ``run_pointstatvx`` task are set in the ``task_run_vx_pointstat:`` section of the ``config.yaml`` file. -``VX_POINTSTAT_TN``: (Default: "run_pointstatvx") +``TN_VX_POINTSTAT``: (Default: "run_pointstatvx") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_VX_POINTSTAT``: (Default: 1) @@ -1428,31 +1491,31 @@ VX_ENSGRID Configuration Parameters Non-default parameters for the ``run_ensgridvx_*`` tasks are set in the ``task_run_vx_ensgrid:`` section of the ``config.yaml`` file. -``VX_ENSGRID_03h_TN``: (Default: "run_ensgridvx_03h") +``TN_VX_ENSGRID_03h``: (Default: "run_ensgridvx_03h") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``MAXTRIES_VX_ENSGRID_03h``: (Default: 1) Maximum number of times to attempt the task. -``VX_ENSGRID_06h_TN``: (Default: "run_ensgridvx_06h") +``TN_VX_ENSGRID_06h``: (Default: "run_ensgridvx_06h") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``MAXTRIES_VX_ENSGRID_06h``: (Default: 1) Maximum number of times to attempt the task. -``VX_ENSGRID_24h_TN``: (Default: "run_ensgridvx_24h") +``TN_VX_ENSGRID_24h``: (Default: "run_ensgridvx_24h") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``MAXTRIES_VX_ENSGRID_24h``: (Default: 1) Maximum number of times to attempt the task. -``VX_ENSGRID_RETOP_TN``: (Default: "run_ensgridvx_retop") +``TN_VX_ENSGRID_RETOP``: (Default: "run_ensgridvx_retop") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``MAXTRIES_VX_ENSGRID_RETOP``: (Default: 1) Maximum number of times to attempt the task. -``VX_ENSGRID_PROB_RETOP_TN``: (Default: "run_ensgridvx_prob_retop") +``TN_VX_ENSGRID_PROB_RETOP``: (Default: "run_ensgridvx_prob_retop") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``MAXTRIES_VX_ENSGRID_PROB_RETOP``: (Default: 1) @@ -1476,7 +1539,7 @@ VX_ENSGRID_REFC Configuration Parameters Non-default parameters for the ``run_ensgridvx_refc`` task are set in the ``task_run_vx_ensgrid_refc:`` section of the ``config.yaml`` file. -``VX_ENSGRID_REFC_TN``: (Default: "run_ensgridvx_refc") +``TN_VX_ENSGRID_REFC``: (Default: "run_ensgridvx_refc") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_VX_ENSGRID``: (Default: 1) @@ -1497,7 +1560,7 @@ VX_ENSGRID_MEAN Configuration Parameters Non-default parameters for the ``run_ensgridvx_mean`` task are set in the ``task_run_vx_ensgrid_mean:`` section of the ``config.yaml`` file. -``VX_ENSGRID_MEAN_TN``: (Default: "run_ensgridvx_mean") +``TN_VX_ENSGRID_MEAN``: (Default: "run_ensgridvx_mean") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_VX_ENSGRID_MEAN``: (Default: 1) @@ -1518,7 +1581,7 @@ VX_ENSGRID_MEAN_03h Configuration Parameters Non-default parameters for the ``run_ensgridvx_mean_03h`` task are set in the ``task_run_vx_ensgrid_mean_03h:`` section of the ``config.yaml`` file. -``VX_ENSGRID_MEAN_03h_TN``: (Default: "run_ensgridvx_mean_03h") +``TN_VX_ENSGRID_MEAN_03h``: (Default: "run_ensgridvx_mean_03h") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_VX_ENSGRID_MEAN``: (Default: 1) @@ -1539,7 +1602,7 @@ VX_ENSGRID_MEAN_06h Configuration Parameters Non-default parameters for the ``run_ensgridvx_mean_06h`` task are set in the ``task_run_vx_ensgrid_mean_06h:`` section of the ``config.yaml`` file. -``VX_ENSGRID_MEAN_06h_TN``: (Default: "run_ensgridvx_mean_06h") +``TN_VX_ENSGRID_MEAN_06h``: (Default: "run_ensgridvx_mean_06h") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_VX_ENSGRID_MEAN``: (Default: 1) @@ -1560,7 +1623,7 @@ VX_ENSGRID_MEAN_24h Configuration Parameters Non-default parameters for the ``run_ensgridvx_mean_24h`` task are set in the ``task_run_vx_ensgrid_mean_24h:`` section of the ``config.yaml`` file. -``VX_ENSGRID_MEAN_24h_TN``: (Default: "run_ensgridvx_mean_24h") +``TN_VX_ENSGRID_MEAN_24h``: (Default: "run_ensgridvx_mean_24h") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_VX_ENSGRID_MEAN``: (Default: 1) @@ -1581,7 +1644,7 @@ VX_ENSGRID_PROB Configuration Parameters Non-default parameters for the ``run_ensgridvx_prob`` task are set in the ``task_run_vx_ensgrid_prob:`` section of the ``config.yaml`` file. -``VX_ENSGRID_PROB_TN``: (Default: "run_ensgridvx_prob") +``TN_VX_ENSGRID_PROB``: (Default: "run_ensgridvx_prob") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_VX_ENSGRID_PROB``: (Default: 1) @@ -1602,7 +1665,7 @@ VX_ENSGRID_PROB_03h Configuration Parameters Non-default parameters for the ``run_ensgridvx_prob_03h`` task are set in the ``task_run_vx_ensgrid_prob_03h:`` section of the ``config.yaml`` file. -``VX_ENSGRID_PROB_03h_TN``: (Default: "run_ensgridvx_prob_03h") +``TN_VX_ENSGRID_PROB_03h``: (Default: "run_ensgridvx_prob_03h") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_VX_ENSGRID_PROB``: (Default: 1) @@ -1623,7 +1686,7 @@ VX_ENSGRID_PROB_06h Configuration Parameters Non-default parameters for the ``run_ensgridvx_prob_06h`` task are set in the ``task_run_vx_ensgrid_prob_06h:`` section of the ``config.yaml`` file. -``VX_ENSGRID_PROB_06h_TN``: (Default: "run_ensgridvx_prob_06h") +``TN_VX_ENSGRID_PROB_06h``: (Default: "run_ensgridvx_prob_06h") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_VX_ENSGRID_PROB``: (Default: 1) @@ -1644,7 +1707,7 @@ VX_ENSGRID_PROB_24h Configuration Parameters Non-default parameters for the ``run_ensgridvx_prob_24h`` task are set in the ``task_run_vx_ensgrid_prob_24h:`` section of the ``config.yaml`` file. -``VX_ENSGRID_PROB_24h_TN``: (Default: "run_ensgridvx_prob_24h") +``TN_VX_ENSGRID_PROB_24h``: (Default: "run_ensgridvx_prob_24h") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_VX_ENSGRID_PROB``: (Default: 1) @@ -1666,7 +1729,7 @@ VX_ENSPOINT Configuration Parameters Non-default parameters for the ``run_enspointvx`` task are set in the ``task_run_vx_enspoint:`` section of the ``config.yaml`` file. -``VX_ENSPOINT_TN``: (Default: "run_enspointvx") +``TN_VX_ENSPOINT``: (Default: "run_enspointvx") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_VX_ENSPOINT``: (Default: 1) @@ -1687,7 +1750,7 @@ VX_ENSPOINT_MEAN Configuration Parameters Non-default parameters for the ``run_enspointvx_mean`` task are set in the ``task_run_vx_enspoint_mean:`` section of the ``config.yaml`` file. -``VX_ENSPOINT_MEAN_TN``: (Default: "run_enspointvx_mean") +``TN_VX_ENSPOINT_MEAN``: (Default: "run_enspointvx_mean") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_VX_ENSPOINT_MEAN``: (Default: 1) @@ -1708,7 +1771,7 @@ VX_ENSPOINT_PROB Configuration Parameters Non-default parameters for the ``run_enspointvx_prob`` task are set in the ``task_run_vx_enspoint_prob:`` section of the ``config.yaml`` file. -``VX_ENSPOINT_PROB_TN``: (Default: "run_enspointvx_prob") +``TN_VX_ENSPOINT_PROB``: (Default: "run_enspointvx_prob") Set the name of this Rocoto workflow task. Users typically do not need to change this value. ``NNODES_VX_ENSPOINT_PROB``: (Default: 1) @@ -1723,6 +1786,52 @@ Non-default parameters for the ``run_enspointvx_prob`` task are set in the ``tas ``MAXTRIES_VX_ENSPOINT_PROB``: (Default: 1) Maximum number of times to attempt the task. +.. _PlotVars: + +PLOT_ALLVARS Configuration Parameters +======================================== + +Non-default parameters for the ``plot_allvars`` task are set in the ``task_plot_allvars:`` section of the ``config.yaml`` file. + +Basic Task Parameters +-------------------------- + +For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task. Typically, users do not need to adjust the default values. + +``TN_PLOT_ALLVARS``: (Default: "plot_allvars") + Set the name of this Rocoto workflow task. Users typically do not need to change this value. + +``NNODES_PLOT_ALLVARS``: (Default: 1) + Number of nodes to use for the job. + +``PPN_PLOT_ALLVARS``: (Default: 24) + Number of :term:`MPI` processes per node. + +``WTIME_PLOT_ALLVARS``: (Default: 01:00:00) + Maximum time for the task to complete. + +``MAXTRIES_PLOT_ALLVARS``: (Default: 1) + Maximum number of times to attempt the task. + +Additional Parameters +------------------------ + +Typically, the following parameters must be set explicitly by the user in the configuration file (``config.yaml``) when executing the plotting tasks. + +``COMOUT_REF``: (Default: "") + The directory where the GRIB2 files from post-processing are located. In *community* mode (i.e., when ``RUN_ENVIR: "community"``), this directory will correspond to the location in the experiment directory where the post-processed output can be found (e.g., ``$EXPTDIR/$DATE_FIRST_CYCL/postprd``). In *nco* mode, this directory should be set to the location of the ``COMOUT`` directory and end with ``$PDY/$cyc``. For more detail on *nco* standards and directory naming conventions, see `WCOSS Implementation Standards `__ (particularly pp. 4-5). + +``PLOT_FCST_START``: (Default: 0) + The starting forecast hour for the plotting task. For example, if a forecast starts at 18h/18z, this is considered the 0th forecast hour, so "starting forecast hour" should be 0, not 18. If a forecast starts at 18h/18z, but the user only wants plots from the 6th forecast hour on, "starting forecast hour" should be 6. + +``PLOT_FCST_INC``: (Default: 3) + Forecast hour increment for the plotting task. For example, if the user wants plots for each forecast hour, they should set ``PLOT_FCST_INC: 1``. If the user only wants plots for some of the output (e.g., every 6 hours), they should set ``PLOT_FCST_INC: 6``. + +``PLOT_FCST_END``: (Default: "") + The last forecast hour for the plotting task. For example, if a forecast run for 24 hours, and the user wants plots for each available hour of forecast output, they should set ``PLOT_FCST_END: 24``. If the user only wants plots from the first 12 hours of the forecast, the "last forecast hour" should be 12. + +``PLOT_DOMAINS``: (Default: ["conus"]) + Domains to plot. Currently supported options are ["conus"], ["regional"], or both (i.e., ["conus", "regional"]). Global Configuration Parameters =================================== @@ -1912,11 +2021,6 @@ The parameters below turn on SPP in Noah or RUC LSM (support for Noah MP is in p ``LSM_SPP_MAG_LIST``: (Default: [ 0.017, 0.001, 0.001, 0.001, 0.001, 0.001, 0.2 ] ) Sets the maximum random pattern amplitude for each of the LSM perturbations. -.. COMMENT: This variable no longer appears and was going to be removed. See if anything has replaced it. - ``LSM_SPP_EACH_STEP``: (Default: "true") - When set to "TRUE", it sets ``lndp_each_step=.true.`` and perturbs each time step. - - .. _HaloBlend: Halo Blend Parameter diff --git a/docs/UsersGuide/source/ContainerQuickstart.rst b/docs/UsersGuide/source/ContainerQuickstart.rst index 8b72c079e5..8515e097a3 100644 --- a/docs/UsersGuide/source/ContainerQuickstart.rst +++ b/docs/UsersGuide/source/ContainerQuickstart.rst @@ -4,7 +4,7 @@ Container-Based Quick Start Guide ==================================== -This Container-Based Quick Start Guide will help users build and run the "out-of-the-box" case for the Unified Forecast System (:term:`UFS`) Short-Range Weather (SRW) Application using a `Singularity `__ container. The :term:`container` approach provides a uniform enviroment in which to build and run the SRW App. Normally, the details of building and running the SRW App vary from system to system due to the many possible combinations of operating systems, compilers, :term:`MPI`'s, and package versions available. Installation via Singularity container reduces this variability and allows for a smoother SRW App build experience. Normally, containers can only run on a single compute node and are not compatible with the `Rocoto workflow manager `__, so users must run each task in the workflow manually. However, the Singularity container described in this chapter has been adapted such that it is able to run across multiple nodes using Rocoto. This makes it an excellent starting point for beginners. The :ref:`non-container build approach ` may still be more appropriate for users who desire additional customizability, particularly if they already have experience running the SRW App. +This Container-Based Quick Start Guide will help users build and run the "out-of-the-box" case for the Unified Forecast System (:term:`UFS`) Short-Range Weather (SRW) Application using a `Singularity `__ container. The :term:`container` approach provides a uniform enviroment in which to build and run the SRW App. Normally, the details of building and running the SRW App vary from system to system due to the many possible combinations of operating systems, compilers, :term:`MPIs `, and package versions available. Installation via Singularity container reduces this variability and allows for a smoother SRW App build experience. Normally, containers can only run on a single compute node and are not compatible with the `Rocoto workflow manager `__, so users must run each task in the workflow manually. However, the Singularity container described in this chapter has been adapted such that it is able to run across multiple nodes using Rocoto. This makes it an excellent starting point for beginners. The :ref:`non-container build approach ` may still be more appropriate for users who desire additional customizability, particularly if they already have experience running the SRW App. The "out-of-the-box" SRW App case described in this User's Guide builds a weather forecast for June 15-16, 2019. Multiple convective weather events during these two days produced over 200 filtered storm reports. Severe weather was clustered in two areas: the Upper Midwest through the Ohio Valley and the Southern Great Plains. This forecast uses a predefined 25-km Continental United States (:term:`CONUS`) grid (RRFS_CONUS_25km), the Global Forecast System (:term:`GFS`) version 16 physics suite (FV3_GFS_v16 :term:`CCPP`), and :term:`FV3`-based GFS raw external model data for initialization. @@ -28,7 +28,7 @@ Users must have an **Intel** compiler and :term:`MPI` (available for free `here Install Singularity ^^^^^^^^^^^^^^^^^^^^^^^ -To build and run the SRW App using a Singularity container, first install the Singularity package according to the `Singularity Installation Guide `__. This will include the installation of dependencies and the installation of the Go programming language. SingularityCE Version 3.7 or above is recommended. +To build and run the SRW App using a Singularity container, first install the Singularity package according to the `Singularity Installation Guide `__. This will include the installation of dependencies and the installation of the Go programming language. SingularityCE Version 3.7 or above is recommended. .. warning:: Docker containers can only be run with root privileges, and users cannot have root privileges on :term:`HPCs `. Therefore, it is not possible to build the SRW App, which uses the HPC-Stack, inside a Docker container on an HPC system. However, a Singularity image may be built directly from a Docker image for use on the system. @@ -36,12 +36,12 @@ To build and run the SRW App using a Singularity container, first install the Si Working in the Cloud or on HPC Systems ----------------------------------------- -For users working on systems with limited disk space in their ``/home`` directory, it is recommended to set the ``SINGULARITY_CACHEDIR`` and ``SINGULARITY_TEMPDIR`` environment variables to point to a location with adequate disk space. For example: +For users working on systems with limited disk space in their ``/home`` directory, it is recommended to set the ``SINGULARITY_CACHEDIR`` and ``SINGULARITY_TMPDIR`` environment variables to point to a location with adequate disk space. For example: .. code-block:: export SINGULARITY_CACHEDIR= - export SINGULARITY_TEMPDIR= + export SINGULARITY_TMPDIR= where ``/absolute/path/to/writable/directory/`` refers to a writable directory (usually a project or user directory within ``/lustre``, ``/work``, ``/scratch``, or ``/glade`` on NOAA Level 1 systems). If the ``cache`` and ``tmp`` directories do not exist already, they must be created with a ``mkdir`` command. @@ -53,7 +53,7 @@ On NOAA Cloud systems, the ``sudo su`` command may also be required: mkdir /lustre/tmp sudo su export SINGULARITY_CACHEDIR=/lustre/cache - export SINGULARITY_TEMPDIR=/lustre/tmp + export SINGULARITY_TMPDIR=/lustre/tmp exit .. note:: @@ -124,11 +124,11 @@ On non-Level 1 systems, users should build the container in a writable sandbox: Some users may prefer to issue the command without the ``sudo`` prefix. Whether ``sudo`` is required is system-dependent. .. note:: - Users can choose to build a release version of the container (SRW App v2.0.0) using a similar command: + Users can choose to build a release version of the container (SRW App v2.1.0) using a similar command: .. code-block:: console - sudo singularity build --sandbox ubuntu20.04-intel-srwapp docker://noaaepic/ubuntu20.04-intel22-ufs-srwapp:release-public-v2 + sudo singularity build --sandbox ubuntu20.04-intel-srwapp docker://noaaepic/ubuntu20.04-intel-srwapp:release-public-v2.1.0 .. _WorkOnHPC: @@ -210,7 +210,7 @@ Users can run ``exit`` to exit the shell. Download and Stage the Data ============================ -The SRW App requires input files to run. These include static datasets, initial and boundary condition files, and model configuration files. On Level 1 systems, the data required to run SRW App tests are already available as long as the bind argument (starting with ``-B``) in :numref:`Step %s ` included the directory with the input model data. For Level 2-4 systems, the data must be added manually by the user. Detailed instructions on how to add the data can be found in :numref:`Section %s `. Sections :numref:`%s ` and :numref:`%s ` contain useful background information on the input and output files used in the SRW App. +The SRW App requires input files to run. These include static datasets, initial and boundary condition files, and model configuration files. On Level 1 systems, the data required to run SRW App tests are already available as long as the bind argument (starting with ``-B``) in :numref:`Step %s ` included the directory with the input model data. See :numref:`Table %s ` for Level 1 data locations. For Level 2-4 systems, the data must be added manually by the user. Detailed instructions on how to add the data can be found in :numref:`Section %s `. Sections :numref:`%s ` and :numref:`%s ` contain useful background information on the input and output files used in the SRW App. .. _GenerateForecastC: @@ -246,7 +246,7 @@ To activate the regional workflow, run the following commands: where: - * ```` is replaced with the actual path to the modulefiles on the user's system (often ``$PWD/modulefiles``), and + * ```` is replaced with the actual path to the modulefiles on the user's local system (often ``$PWD/modulefiles``), and * ```` is a valid, lowercased machine/platform name (see the ``MACHINE`` variable in :numref:`Section %s `). The ``wflow_`` modulefile will then output instructions to activate the regional workflow. The user should run the commands specified in the modulefile output. For example, if the output says: @@ -310,7 +310,7 @@ From here, users can follow the steps below to configure the out-of-the-box SRW .. code-block:: console USE_CRON_TO_RELAUNCH: TRUE - CRON_RELAUNCH_INTVL_MNTS: 02 + CRON_RELAUNCH_INTVL_MNTS: 3 There are instructions for running the experiment via additional methods in :numref:`Section %s `. However, this technique (automation via :term:`crontab`) is the simplest option. @@ -322,7 +322,7 @@ From here, users can follow the steps below to configure the out-of-the-box SRW .. code-block:: console USE_USER_STAGED_EXTRN_FILES: true - EXTRN_MDL_SOURCE_BASEDIR_ICS: /scratch2/BMC/det/UFS_SRW_App/develop/input_model_data/FV3GFS/grib2/${yyyymmddhh} + EXTRN_MDL_SOURCE_BASEDIR_ICS: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/input_model_data/FV3GFS/grib2/${yyyymmddhh} EXTRN_MDL_FILES_ICS: [] EXTRN_MDL_DATA_STORES: disk @@ -333,7 +333,7 @@ From here, users can follow the steps below to configure the out-of-the-box SRW .. code-block:: console USE_USER_STAGED_EXTRN_FILES: true - EXTRN_MDL_SOURCE_BASEDIR_LBCS: /scratch2/BMC/det/UFS_SRW_App/develop/input_model_data/FV3GFS/grib2/${yyyymmddhh} + EXTRN_MDL_SOURCE_BASEDIR_LBCS: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/input_model_data/FV3GFS/grib2/${yyyymmddhh} EXTRN_MDL_FILES_LBCS: [] EXTRN_MDL_DATA_STORES: disk @@ -351,7 +351,7 @@ Run the following command to generate the workflow: .. code-block:: console - python generate_FV3LAM_wflow.py + ./generate_FV3LAM_wflow.py This workflow generation script creates an experiment directory and populates it with all the data needed to run through the workflow. The last line of output from this script should start with ``*/3 * * * *`` (or similar). @@ -387,8 +387,4 @@ where: New Experiment =============== -To run a new experiment in the container at a later time, users will need to rerun the commands in :numref:`Section %s ` to reactivate the regional workflow. Then, users can configure a new experiment by updating the environment variables in ``config.yaml`` to reflect the desired experiment configuration. Basic instructions appear in :numref:`Section %s ` above, and detailed instructions can be viewed in :numref:`Section %s `. After adjusting the configuration file, regenerate the experiment by running ``python generate_FV3LAM_wflow.py``. - -Plot the Output -=============== -Two python scripts are provided to generate plots from the FV3-LAM post-processed GRIB2 output. Information on how to generate the graphics can be found in :numref:`Chapter %s `. +To run a new experiment in the container at a later time, users will need to rerun the commands in :numref:`Section %s ` to reactivate the regional workflow. Then, users can configure a new experiment by updating the environment variables in ``config.yaml`` to reflect the desired experiment configuration. Basic instructions appear in :numref:`Section %s ` above, and detailed instructions can be viewed in :numref:`Section %s `. After adjusting the configuration file, regenerate the experiment by running ``./generate_FV3LAM_wflow.py``. diff --git a/docs/UsersGuide/source/ContributorsGuide.rst b/docs/UsersGuide/source/ContributorsGuide.rst deleted file mode 100644 index efb90ca4cc..0000000000 --- a/docs/UsersGuide/source/ContributorsGuide.rst +++ /dev/null @@ -1,463 +0,0 @@ - -.. _ContributorsGuide: - -============================== -SRW App Contributor's Guide -============================== - -.. _Background: - -Background -=========== - -Authoritative branch ------------------------ - -The ``ufs-srweather-app`` repository maintains a main branch for development called ``develop``. The HEAD of ``develop`` reflects the latest development changes. It points to regularly updated hashes for individual sub-components. Pull requests (PRs) will be merged to ``develop``. - -The ``develop`` branch is protected by the code management team: - #. Pull requests for this branch require approval by at least two code reviewers. - #. A code manager should perform at least one of the reviews and the merge, but other contributors are welcome to provide comments/suggestions. - - -Code Management Team --------------------------- - -Scientists from across multiple labs and organizations have volunteered to review pull requests for the ``develop`` branch: - -.. table:: - - +------------------+------------------------------------------------+---------------------------------------------------------------------------------------------+ - | **Organization** | **Reviewers** | **Areas of Expertise** | - +==================+================================================+=============================================================================================+ - | EMC | Chan-Hoo Jeon (@chan-hoo) | Workflow, Operational platform testing (WCOSS/NCO), and Air quality modeling (Online-CMAQ) | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Ben Blake (@BenjaminBlake-NOAA) | Output visualization, Rocoto | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Ratko Vasic (@RatkoVasic-NOAA) | Workflow, NCO requirements, and operational platform testing | - +------------------+------------------------------------------------+---------------------------------------------------------------------------------------------+ - | EPIC | Mark Potts (@mark-a-potts) | HPC systems | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Jong Kim (@jkbk2004) | UFS Weather Model configuration, forecast sensitivity analysis, data assimilation | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Michael Lueken (@MichaelLueken) | SRW App code management | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Natalie Perlin (@natalie-perlin) | Generic Linux/Mac installations, hpc-stack/spack-stack | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Gillian Petro (@gspetro-NOAA) | Documentation | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Edward Snyder (@EdwardSnyder-NOAA) | WE2E testing, input data | - +------------------+------------------------------------------------+---------------------------------------------------------------------------------------------+ - | GLERL | David Wright (@dmwright526) | FVCOM integration, output visualization, preprocessing tasks | - +------------------+------------------------------------------------+---------------------------------------------------------------------------------------------+ - | GSL | Jeff Beck (@JeffBeck-NOAA) | SRW App configuration/workflow, code management, meteorological evaluation | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Gerard Ketefian (@gsketefian) | regional workflow scripts, jinja templates, and verification tasks | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Linlin Pan (@panll) | Workflow, CCPP/physics, verification | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Christina Holt (@christinaholtNOAA) | Workflow, conda environment support, testing, and code management | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Christopher Harrop (@christopherwharrop-noaa) | Rocoto, code management, and testing | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Daniel Abdi (@danielabdi-noaa) | Workflow generation, testing RRFS on the cloud, environment modules | - +------------------+------------------------------------------------+---------------------------------------------------------------------------------------------+ - | NCAR | Mike Kavulich (@mkavulich) | CCPP/physics | - | +------------------------------------------------+---------------------------------------------------------------------------------------------+ - | | Will Mayfield (@willmayfield) | Verification/METplus tasks, regional workflow (esp. on Cheyenne) | - +------------------+------------------------------------------------+---------------------------------------------------------------------------------------------+ - | NSSL | Yunheng Wang (@ywangwof) | HPC systems, code management and regional workflow especially on Stampede, Jet | - | | | and NSSL computers | - +------------------+------------------------------------------------+---------------------------------------------------------------------------------------------+ - -.. _ContribProcess: - -Contribution Process -======================== - -The steps below should be followed in order to make changes to the ``develop`` branch of the ``ufs-srweather-app`` repository. Communication with code managers and the code management team throughout the process is encouraged. - - #. **Issue** - Open an issue to document changes. Click `here `__ to open a new ``ufs-srweather-app`` issue or see :numref:`Step %s ` for detailed instructions. - #. **GitFlow** - Follow `GitFlow `__ procedures for development. - #. **Fork the repository** - Read more `here `__ about forking in GitHub. - #. **Create a branch** - Create a branch in your fork of the authoritative repository. Follow `GitFlow `__ conventions when creating the branch. All development should take place on a branch, *not* on ``develop``. Branches should be named as follows, where [name] is a one-word description of the branch: - - * **bugfix/[name]:** Fixes a demonstrably incorrect portion of code - * **feature/[name]:** Adds a new feature to the code or improves an existing portion of the code - * **text/[name]:** Changes elements of the repository that do not impact program output or log files (e.g., changes to README, documentation, comments, changing quoted Registry elements, white space alignment). Any change that does not impact the compiled code in any way should fall under this category. - - #. **Development** - Perform and test changes in the branch (not on ``develop``!). Document work in the issue and mention the issue number in commit messages to link your work to the issue (e.g., ``commit -m "Issue #23 - "``). Test code modifications on as many platforms as possible, and request help with further testing from the code management team when unable to test on all Level 1 platforms. Document changes to the workflow and capabilities in the ``.rst`` files so that the SRW App documentation stays up-to-date. - #. **Pull request** - When ready to merge changes back to the ``develop`` branch, the code developer should initiate a pull request (PR) of the feature branch into the ``develop`` branch. Read `here `__ about pull requests in GitHub. When a PR is initiated, the :ref:`PR Template