diff --git a/.github/actions/testing-setup/action.yml b/.github/actions/testing-setup/action.yml new file mode 100644 index 0000000000..c47270af0d --- /dev/null +++ b/.github/actions/testing-setup/action.yml @@ -0,0 +1,77 @@ +name: 'Build-.testing-prerequisites' +description: 'Build pre-requisites for .testing including FMS and a symmetric MOM6 executable' +inputs: + build_symmetric: + description: 'If true, will build the symmetric MOM6 executable' + required: false + default: 'true' + install_python: + description: 'If true, will install the local python env needed for .testing' + required: false + default: 'true' +runs: + using: 'composite' + steps: + - name: Git info + shell: bash + run: | + echo "::group::Git commit info" + echo "git log:" + git log | head -60 + echo "::endgroup::" + + - name: Env + shell: bash + run: | + echo "::group::Environment" + env + echo "::endgroup::" + + - name: Install needed packages for compiling + shell: bash + run: | + echo "::group::Install linux packages" + sudo apt-get update + sudo apt-get install netcdf-bin libnetcdf-dev libnetcdff-dev mpich libmpich-dev + echo "::endgroup::" + + - name: Compile FMS library + shell: bash + run: | + echo "::group::Compile FMS library" + cd .testing + make deps/lib/libFMS.a -s -j + echo "::endgroup::" + + - name: Store compiler flags used in Makefile + shell: bash + run: | + echo "::group::config.mk" + cd .testing + echo "FCFLAGS_DEBUG=-g -O0 -Wextra -Wno-compare-reals -fbacktrace -ffpe-trap=invalid,zero,overflow -fcheck=bounds" >> config.mk + echo "FCFLAGS_REPRO=-g -O2 -fbacktrace" >> config.mk + echo "FCFLAGS_INIT=-finit-real=snan -finit-integer=2147483647 -finit-derived" >> config.mk + echo "FCFLAGS_COVERAGE=--coverage" >> config.mk + cat config.mk + echo "::endgroup::" + + - name: Compile MOM6 in symmetric memory mode + shell: bash + run: | + echo "::group::Compile MOM6 in symmetric memory mode" + cd .testing + test ${{ inputs.build_symmetric }} == true && make build/symmetric/MOM6 -j + echo "::endgroup::" + + - name: Install local python venv for generating input data + shell: bash + run: | + echo "::group::Create local python env for input data generation" + cd .testing + test ${{ inputs.install_python }} == true && make work/local-env + echo "::endgroup::" + + - name: Set flags + shell: bash + run: | + echo "TIMEFORMAT=... completed in %lR (user: %lU, sys: %lS)" >> $GITHUB_ENV diff --git a/.github/workflows/coupled-api.yml b/.github/workflows/coupled-api.yml new file mode 100644 index 0000000000..86d7262548 --- /dev/null +++ b/.github/workflows/coupled-api.yml @@ -0,0 +1,33 @@ +name: API for coupled drivers + +on: [push, pull_request] + +jobs: + test-top-api: + + runs-on: ubuntu-latest + defaults: + run: + working-directory: .testing + + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive + + - uses: ./.github/actions/testing-setup + with: + build_symmetric: 'false' + install_python: 'false' + + - name: Compile MOM6 for the GFDL coupled driver + shell: bash + run: make check_mom6_api_coupled -j + + - name: Compile MOM6 for the NUOPC driver + shell: bash + run: make check_mom6_api_nuopc -j + + - name: Compile MOM6 for the MCT driver + shell: bash + run: make check_mom6_api_mct -j diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml new file mode 100644 index 0000000000..60b85e412b --- /dev/null +++ b/.github/workflows/coverage.yml @@ -0,0 +1,24 @@ +name: Code coverage + +on: [push, pull_request] + +jobs: + build-test-nans: + + runs-on: ubuntu-latest + defaults: + run: + working-directory: .testing + + env: + REPORT_COVERAGE: true + + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive + + - uses: ./.github/actions/testing-setup + + - name: Run and post coverage + run: make run.symmetric -k -s diff --git a/.github/workflows/documentation-and-style.yml b/.github/workflows/documentation-and-style.yml new file mode 100644 index 0000000000..c171c538d5 --- /dev/null +++ b/.github/workflows/documentation-and-style.yml @@ -0,0 +1,39 @@ +name: Doxygen and style + +on: [push, pull_request] + +jobs: + doxygen: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive + + - name: Check white space (non-blocking) + run: | + ./.testing/trailer.py -e TEOS10 -l 120 src config_src 2>&1 | tee style_errors + continue-on-error: true + + - name: Install packages used when generating documentation + run: | + sudo apt-get update + sudo apt-get install python3-sphinx python3-lxml perl + sudo apt-get install texlive-binaries texlive-base bibtool tex-common texlive-bibtex-extra + sudo apt-get install graphviz + + - name: Build doxygen HTML + run: | + cd docs + perl -e 'print "perl version $^V" . "\n"' + mkdir _build && make nortd DOXYGEN_RELEASE=Release_1_8_13 UPDATEHTMLEQS=Y + cat _build/doxygen_warn_nortd_log.txt + + - name: Report doxygen or style errors + run: | + grep "warning:" docs/_build/doxygen_warn_nortd_log.txt | grep -v "as part of a" | tee doxy_errors + cat style_errors doxy_errors > all_errors + cat all_errors + test ! -s all_errors diff --git a/.github/workflows/expression.yml b/.github/workflows/expression.yml new file mode 100644 index 0000000000..c504e6c15a --- /dev/null +++ b/.github/workflows/expression.yml @@ -0,0 +1,27 @@ +name: Expression verification + +on: [push, pull_request] + +jobs: + test-repro-and-dims: + + runs-on: ubuntu-latest + defaults: + run: + working-directory: .testing + + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive + + - uses: ./.github/actions/testing-setup + + - name: Compile MOM6 using repro optimization + run: make build/repro/MOM6 -j + + - name: Create validation data + run: make run.symmetric -k -s + + - name: Run tests + run: make test.repros test.dims -k -s diff --git a/.github/workflows/other.yml b/.github/workflows/other.yml new file mode 100644 index 0000000000..3406fa9bc8 --- /dev/null +++ b/.github/workflows/other.yml @@ -0,0 +1,27 @@ +name: OpenMP and Restart verification + +on: [push, pull_request] + +jobs: + test-openmp-nan-restarts: + + runs-on: ubuntu-latest + defaults: + run: + working-directory: .testing + + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive + + - uses: ./.github/actions/testing-setup + + - name: Compile with openMP + run: make build/openmp/MOM6 -j + + - name: Create validation data + run: make run.symmetric -k -s + + - name: Run tests + run: make test.openmps test.nans test.restarts -k -s diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml new file mode 100644 index 0000000000..7dd1f3c703 --- /dev/null +++ b/.github/workflows/regression.yml @@ -0,0 +1,27 @@ +name: Regression + +on: [pull_request] + +jobs: + build-test-regression: + + runs-on: ubuntu-latest + defaults: + run: + working-directory: .testing + + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive + + - uses: ./.github/actions/testing-setup + + - name: Compile reference model + run: make build.regressions MOM_TARGET_SLUG=$GITHUB_REPOSITORY MOM_TARGET_LOCAL_BRANCH=$GITHUB_BASE_REF DO_REGRESSION_TESTS=true -j + + - name: Create validation data + run: make run.symmetric -k -s + + - name: Regression test + run: make test.regressions DO_REGRESSION_TESTS=true -k -s diff --git a/.github/workflows/stencil.yml b/.github/workflows/stencil.yml new file mode 100644 index 0000000000..20081747cc --- /dev/null +++ b/.github/workflows/stencil.yml @@ -0,0 +1,27 @@ +name: Stencil related verification + +on: [push, pull_request] + +jobs: + test-symmetric-layout-rotation: + + runs-on: ubuntu-latest + defaults: + run: + working-directory: .testing + + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive + + - uses: ./.github/actions/testing-setup + + - name: Compile MOM6 in asymmetric memory mode + run: make build/asymmetric/MOM6 -j + + - name: Create validation data + run: make run.symmetric -k -s + + - name: Run tests + run: make test.grids test.layouts test.rotations -k -s diff --git a/.gitignore b/.gitignore index ccaecbbead..25f7524d1c 100644 --- a/.gitignore +++ b/.gitignore @@ -4,13 +4,7 @@ html -# Build output -*.o -*.mod -MOM6 - - -# Autoconf +# Autoconf output aclocal.m4 autom4te.cache/ config.log diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 1622ae9886..1f9768a6a8 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -117,11 +117,36 @@ run: - time tar zxf $CACHE_DIR/build-pgi-repro-$CI_PIPELINE_ID.tgz # time tar zxf $CACHE_DIR/build-gnu-debug-$CI_PIPELINE_ID.tgz - (echo '#!/bin/tcsh';echo 'make -f MRS/Makefile.tests all') > job.sh - - sbatch --clusters=c3,c4 --nodes=29 --time=0:34:00 --account=gfdl_o --qos=debug --job-name=mom6_regressions --output=log.$CI_PIPELINE_ID --wait job.sh + - sbatch --clusters=c3,c4 --nodes=29 --time=0:34:00 --account=gfdl_o --qos=debug --job-name=mom6_regressions --output=log.$CI_PIPELINE_ID --wait job.sh || MJOB_RETURN_STATE=Fail - cat log.$CI_PIPELINE_ID + - test -z "$MJOB_RETURN_STATE" - test -f restart_results_gnu.tar.gz - time tar zvcf $CACHE_DIR/results-$CI_PIPELINE_ID.tgz *.tar.gz +gnu.testing: + stage: run + tags: + - ncrc4 + script: + - cd .testing + - module unload PrgEnv-pgi PrgEnv-intel PrgEnv-gnu darshan ; module load PrgEnv-gnu ; module unload netcdf gcc ; module load gcc/7.3.0 cray-hdf5 cray-netcdf + - make work/local-env + - make -s -j + - (echo '#!/bin/bash';echo '. ./work/local-env/bin/activate';echo 'make MPIRUN="srun -mblock --exclusive" test -s -j') > job.sh + - sbatch --clusters=c3,c4 --nodes=5 --time=0:05:00 --account=gfdl_o --qos=debug --job-name=MOM6.gnu.testing --output=log.$CI_PIPELINE_ID --wait job.sh || cat log.$CI_PIPELINE_ID && make test + +intel.testing: + stage: run + tags: + - ncrc4 + script: + - cd .testing + - module unload PrgEnv-pgi PrgEnv-intel PrgEnv-gnu darshan; module load PrgEnv-intel; module unload netcdf intel; module load intel/18.0.6.288 cray-hdf5 cray-netcdf + - make work/local-env + - make -s -j + - (echo '#!/bin/bash';echo '. ./work/local-env/bin/activate';echo 'make MPIRUN="srun -mblock --exclusive" test -s -j') > job.sh + - sbatch --clusters=c3,c4 --nodes=5 --time=0:05:00 --account=gfdl_o --qos=debug --job-name=MOM6.gnu.testing --output=log.$CI_PIPELINE_ID --wait job.sh || cat log.$CI_PIPELINE_ID && make test + # Tests gnu:non-symmetric: stage: tests diff --git a/.readthedocs.yml b/.readthedocs.yml index b95a9b901f..f7ad4421b4 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,11 +1,16 @@ -# don't build extra formats (like HTML zip) -formats: - - none +version: 2 -# path to pip requirements file to bring in -# doxygen extensions -requirements_file: docs/requirements.txt +# Extra formats +# PDF generation is failing for now; disabled on 2020-12-02 +#formats: +# - pdf + +# Build documentation +sphinx: + configuration: docs/conf.py python: # make sure we're using Python 3 - version: 3.5 + version: 3 + install: + - requirements: docs/requirements.txt diff --git a/.testing/Makefile b/.testing/Makefile index 4b3dfdefb8..2806d54130 100644 --- a/.testing/Makefile +++ b/.testing/Makefile @@ -1,13 +1,75 @@ +# MOM6 Test suite Makefile +# +# Usage: +# make -j +# Build the FMS library and test executables +# +# make -j test +# Run the test suite, defined in the `tc` directores. +# +# make clean +# Wipe the MOM6 test executables +# (NOTE: This does not delete FMS in the `deps`) +# +# +# Configuration: +# These settings can be provided as either command-line flags, or saved in a +# `config.mk` file. +# +# Test suite configuration: +# +# MPIRUN MPI job launcher (mpirun, srun, etc) +# DO_REPRO_TESTS Enable production ("repro") testing equivalence +# DO_REGRESSION_TESTS: Enable regression tests (usually dev/gfdl) +# REPORT_COVERAGE Enable code coverage and report to codecov +# +# Compiler configuration: +# (NOTE: These are environment variables and may be inherited from a shell.) +# +# CC C compiler +# MPICC MPI C compiler +# FC Fortran compiler +# MPIFC MPI Fortran compiler +# +# Build configuration: +# +# FCFLAGS_DEBUG Testing ("debug") compiler flags +# FCFLAGS_REPRO Production ("repro") compiler flags +# FCFLAGS_INIT Variable initialization flags +# FCFLAGS_COVERAGE Code coverage flags +# +# Regression repository ("target") configuration: +# (NOTE: These would typically be configured by a CI such as Travis.) +# +# MOM_TARGET_SLUG URL slug (minus domain) of the target repo +# MOM_TARGET_URL Full URL of the target repo +# MOM_TARGET_LOCAL_BRANCH Target branch name +# +#---- + +# TODO: Bourne shell compatibility SHELL = bash + +# No implicit rules .SUFFIXES: +# No implicit variables +MAKEFLAGS += -R + # User-defined configuration -include config.mk # Set the MPI launcher here +# TODO: This needs more automated configuration MPIRUN ?= mpirun -# Default target compiler flags +# Generic compiler variables are pass through to the builds +export CC +export MPICC +export FC +export MPIFC + +# Builds are distinguished by FCFLAGS # NOTE: FMS will be built using FCFLAGS_DEBUG FCFLAGS_DEBUG ?= -g -O0 FCFLAGS_REPRO ?= -g -O2 @@ -27,6 +89,8 @@ FCFLAGS_COVERAGE ?= # Set to `true` to require identical results from DEBUG and REPRO builds DO_REPRO_TESTS ?= +TIME ?= time + # Many compilers (Intel, GCC on ARM64) do not yet produce identical results # across DEBUG and REPRO builds (as defined below), so we disable on default. @@ -60,7 +124,6 @@ endif # DO_REGRESSION_TESTS: true if $(TRAVIS_PULL_REQUEST) is a PR number # MOM_TARGET_SLUG: TRAVIS_REPO_SLUG # MOM_TARGET_LOCAL_BRANCH: TRAVIS_BRANCH - # These are set to true by our Travis configuration if testing a pull request DO_REGRESSION_TESTS ?= REPORT_COVERAGE ?= @@ -163,7 +226,9 @@ build/asymmetric/Makefile: MOM_ENV=$(PATH_FMS) $(ASYMMETRIC_FCFLAGS) $(MOM_LDFLA build/repro/Makefile: MOM_ENV=$(PATH_FMS) $(REPRO_FCFLAGS) $(MOM_LDFLAGS) build/openmp/Makefile: MOM_ENV=$(PATH_FMS) $(OPENMP_FCFLAGS) $(MOM_LDFLAGS) build/target/Makefile: MOM_ENV=$(PATH_FMS) $(TARGET_FCFLAGS) $(MOM_LDFLAGS) - +build/coupled/Makefile: MOM_ENV=$(PATH_FMS) $(SYMMETRIC_FCFLAGS) $(SYMMETRIC_LDFLAGS) +build/nuopc/Makefile: MOM_ENV=$(PATH_FMS) $(SYMMETRIC_FCFLAGS) $(SYMMETRIC_LDFLAGS) +build/mct/Makefile: MOM_ENV=$(PATH_FMS) $(SYMMETRIC_FCFLAGS) $(SYMMETRIC_LDFLAGS) # Configure script flags build/symmetric/Makefile: MOM_ACFLAGS= @@ -171,7 +236,9 @@ build/asymmetric/Makefile: MOM_ACFLAGS=--enable-asymmetric build/repro/Makefile: MOM_ACFLAGS= build/openmp/Makefile: MOM_ACFLAGS=--enable-openmp build/target/Makefile: MOM_ACFLAGS= - +build/coupled/Makefile: MOM_ACFLAGS=--with-driver=coupled_driver +build/nuopc/Makefile: MOM_ACFLAGS=--with-driver=nuopc_driver +build/mct/Makefile: MOM_ACFLAGS=--with-driver=mct_driver # Fetch regression target source code build/target/Makefile: | $(TARGET_CODEBASE) @@ -188,7 +255,7 @@ build/target/configure: $(TARGET_SOURCE) # Build MOM6 .PRECIOUS: $(foreach b,$(BUILDS),build/$(b)/MOM6) build/%/MOM6: build/%/Makefile - cd $(@D) && time $(MAKE) -j + cd $(@D) && $(TIME) $(MAKE) -j # Use autoconf to construct the Makefile for each target @@ -227,6 +294,9 @@ $(TARGET_CODEBASE): #--- # FMS +# Set up the FMS build environment variables +FMS_ENV = PATH="${PATH}:../../bin" FCFLAGS="$(FCFLAGS_DEBUG)" + # TODO: *.mod dependencies? $(DEPS)/lib/libFMS.a: $(DEPS)/fms/build/libFMS.a $(MAKE) -C $(DEPS) lib/libFMS.a @@ -235,7 +305,8 @@ $(DEPS)/fms/build/libFMS.a: $(DEPS)/fms/build/Makefile $(MAKE) -C $(DEPS) fms/build/libFMS.a $(DEPS)/fms/build/Makefile: $(DEPS)/fms/src/configure $(DEPS)/Makefile.fms.in $(MKMF) $(LIST_PATHS) - PATH_ENV="${PATH}:../../bin" FCFLAGS_ENV="$(FCFLAGS_DEBUG)" $(MAKE) -C $(DEPS) fms/build/Makefile + $(FMS_ENV) $(MAKE) -C $(DEPS) fms/build/Makefile + $(MAKE) -C $(DEPS) fms/build/Makefile $(DEPS)/Makefile.fms.in: ../ac/deps/Makefile.fms.in $(DEPS)/Makefile cp $< $(DEPS) @@ -261,8 +332,29 @@ $(DEPS)/Makefile: ../ac/deps/Makefile mkdir -p $(@D) cp $< $@ +#--- +# The following block does a non-library build of a coupled driver interface to MOM, along with everything below it. +# This simply checks that we have not broken the ability to compile. This is not a means to build a complete coupled executable. +# Todo: +# - avoid re-building FMS and MOM6 src by re-using existing object/mod files +# - use autoconf rather than mkmf templates +MK_TEMPLATE ?= ../../$(DEPS)/mkmf/templates/ncrc-gnu.mk +# NUOPC driver +build/nuopc/mom_ocean_model_nuopc.o: build/nuopc/Makefile + cd $(@D) && make $(@F) +check_mom6_api_nuopc: build/nuopc/mom_ocean_model_nuopc.o +# GFDL coupled driver +build/coupled/ocean_model_MOM.o: build/coupled/Makefile + cd $(@D) && make $(@F) +check_mom6_api_coupled: build/coupled/ocean_model_MOM.o +# MCT driver +build/mct/mom_ocean_model_mct.o: build/mct/Makefile + cd $(@D) && make $(@F) +check_mom6_api_mct: build/mct/mom_ocean_model_mct.o + #--- # Python preprocessing + # NOTE: Some less mature environments (e.g. Arm64 Ubuntu) require explicit # installation of numpy before netCDF4, as well as wheel and cython support. work/local-env: @@ -273,6 +365,7 @@ work/local-env: && pip3 install numpy \ && pip3 install netCDF4 + #--- # Testing @@ -304,16 +397,16 @@ run.nans: $(foreach c,$(CONFIGS),work/$(c)/nan/ocean.stats) run.openmp: $(foreach c,$(CONFIGS),work/$(c)/openmp/ocean.stats) # Color highlights for test results -RED=\033[0;31m -YELLOW=\033[0;33m -GREEN=\033[0;32m -MAGENTA=\033[0;35m -RESET=\033[0m +RED = \033[0;31m +YELLOW = \033[0;33m +GREEN = \033[0;32m +MAGENTA = \033[0;35m +RESET = \033[0m -DONE=${GREEN}DONE${RESET} -PASS=${GREEN}PASS${RESET} -WARN=${YELLOW}WARN${RESET} -FAIL=${RED}FAIL${RESET} +DONE = ${GREEN}DONE${RESET} +PASS = ${GREEN}PASS${RESET} +WARN = ${YELLOW}WARN${RESET} +FAIL = ${RED}FAIL${RESET} # Comparison rules # $(1): Test type (grid, layout, &c.) @@ -321,9 +414,10 @@ FAIL=${RED}FAIL${RESET} define CMP_RULE .PRECIOUS: $(foreach b,$(2),work/%/$(b)/ocean.stats) %.$(1): $(foreach b,$(2),work/%/$(b)/ocean.stats) + @test "$$(shell ls -A results/$$* 2>/dev/null)" || rm -rf results/$$* @cmp $$^ || !( \ mkdir -p results/$$*; \ - (diff $$^ | tee results/$$*/ocean.stats.$(1).diff | head) ; \ + (diff $$^ | tee results/$$*/ocean.stats.$(1).diff | head -n 20) ; \ echo -e "$(FAIL): Solutions $$*.$(1) have changed." \ ) @echo -e "$(PASS): Solutions $$*.$(1) agree." @@ -332,7 +426,7 @@ define CMP_RULE %.$(1).diag: $(foreach b,$(2),work/%/$(b)/chksum_diag) @cmp $$^ || !( \ mkdir -p results/$$*; \ - (diff $$^ | tee results/$$*/chksum_diag.$(1).diff | head) ; \ + (diff $$^ | tee results/$$*/chksum_diag.$(1).diff | head -n 20) ; \ echo -e "$(FAIL): Diagnostics $$*.$(1).diag have changed." \ ) @echo -e "$(PASS): Diagnostics $$*.$(1).diag agree." @@ -351,10 +445,11 @@ $(foreach d,$(DIMS),$(eval $(call CMP_RULE,dim.$(d),symmetric dim.$(d)))) # Restart tests only compare the final stat record .PRECIOUS: $(foreach b,symmetric restart target,work/%/$(b)/ocean.stats) %.restart: $(foreach b,symmetric restart,work/%/$(b)/ocean.stats) + @test "$(shell ls -A results/$* 2>/dev/null)" || rm -rf results/$* @cmp $(foreach f,$^,<(tr -s ' ' < $(f) | cut -d ' ' -f3- | tail -n 1)) \ || !( \ mkdir -p results/$*; \ - (diff $^ | tee results/$*/chksum_diag.restart.diff | head) ; \ + (diff $^ | tee results/$*/chksum_diag.restart.diff | head -n 20) ; \ echo -e "$(FAIL): Solutions $*.restart have changed." \ ) @echo -e "$(PASS): Solutions $*.restart agree." @@ -363,22 +458,26 @@ $(foreach d,$(DIMS),$(eval $(call CMP_RULE,dim.$(d),symmetric dim.$(d)))) # stats rule is unchanged, but we cannot use CMP_RULE to generate it. %.regression: $(foreach b,symmetric target,work/%/$(b)/ocean.stats) + @test "$(shell ls -A results/$* 2>/dev/null)" || rm -rf results/$* @cmp $^ || !( \ mkdir -p results/$*; \ - (diff $^ | tee results/$*/ocean.stats.regression.diff | head) ; \ + (diff $^ | tee results/$*/ocean.stats.regression.diff | head -n 20) ; \ echo -e "$(FAIL): Solutions $*.regression have changed." \ ) @echo -e "$(PASS): Solutions $*.regression agree." # Regression testing only checks for changes in existing diagnostics %.regression.diag: $(foreach b,symmetric target,work/%/$(b)/chksum_diag) - @! diff $^ | grep "^[<>]" | grep "^>" \ + @! diff $^ | grep "^[<>]" | grep "^>" > /dev/null \ || ! (\ mkdir -p results/$*; \ - (diff $^ | tee results/$*/chksum_diag.regression.diff | head) ; \ + (diff $^ | tee results/$*/chksum_diag.regression.diff | head -n 20) ; \ echo -e "$(FAIL): Diagnostics $*.regression.diag have changed." \ ) - @diff $^ || echo -e "$(WARN): New diagnostics in $<" + @cmp $^ || ( \ + diff $^ | head -n 20; \ + echo -e "$(WARN): New diagnostics in $<" \ + ) @echo -e "$(PASS): Diagnostics $*.regression.diag agree." @@ -407,12 +506,13 @@ work/%/$(1)/ocean.stats work/%/$(1)/chksum_diag: build/$(2)/MOM6 $(VENV_PATH) fi mkdir -p $$(@D)/RESTART echo -e "$(4)" > $$(@D)/MOM_override + rm -f results/$$*/std.$(1).{out,err} cd $$(@D) \ - && time $(5) $(MPIRUN) -n $(6) ../../../$$< 2> std.err > std.out \ + && $(TIME) $(5) $(MPIRUN) -n $(6) ../../../$$< 2> std.err > std.out \ || !( \ mkdir -p ../../../results/$$*/ ; \ - cat std.out | tee ../../../results/$$*/std.$(1).out | tail -20 ; \ - cat std.err | tee ../../../results/$$*/std.$(1).err | tail -20 ; \ + cat std.out | tee ../../../results/$$*/std.$(1).out | tail -n 20 ; \ + cat std.err | tee ../../../results/$$*/std.$(1).err | tail -n 20 ; \ rm ocean.stats chksum_diag ; \ echo -e "$(FAIL): $$*.$(1) failed at runtime." \ ) @@ -468,11 +568,13 @@ work/%/restart/ocean.stats: build/symmetric/MOM6 $(VENV_PATH) && printf -v timeunit_int "%.f" "$${timeunit}" \ && halfperiod=$$(printf "%.f" $$(bc <<< "scale=10; 0.5 * $${daymax} * $${timeunit_int}")) \ && printf "\n&ocean_solo_nml\n seconds = $${halfperiod}\n/\n" >> input.nml + # Remove any previous archived output + rm -f results/$*/std.restart{1,2}.{out,err} # Run the first half-period - cd $(@D) && time $(MPIRUN) -n 1 ../../../$< 2> std1.err > std1.out \ + cd $(@D) && $(TIME) $(MPIRUN) -n 1 ../../../$< 2> std1.err > std1.out \ || !( \ - cat std1.out | tee ../../../results/$*/std.restart1.out | tail ; \ - cat std1.err | tee ../../../results/$*/std.restart1.err | tail ; \ + cat std1.out | tee ../../../results/$*/std.restart1.out | tail -n 20 ; \ + cat std1.err | tee ../../../results/$*/std.restart1.err | tail -n 20 ; \ echo -e "$(FAIL): $*.restart failed at runtime." \ ) # Setup the next inputs @@ -480,10 +582,10 @@ work/%/restart/ocean.stats: build/symmetric/MOM6 $(VENV_PATH) mkdir $(@D)/RESTART cd $(@D) && sed -i -e "s/input_filename *= *'n'/input_filename = 'r'/g" input.nml # Run the second half-period - cd $(@D) && time $(MPIRUN) -n 1 ../../../$< 2> std2.err > std2.out \ + cd $(@D) && $(TIME) $(MPIRUN) -n 1 ../../../$< 2> std2.err > std2.out \ || !( \ - cat std2.out | tee ../../../results/$*/std.restart2.out | tail ; \ - cat std2.err | tee ../../../results/$*/std.restart2.err | tail ; \ + cat std2.out | tee ../../../results/$*/std.restart2.out | tail -n 20 ; \ + cat std2.err | tee ../../../results/$*/std.restart2.err | tail -n 20 ; \ echo -e "$(FAIL): $*.restart failed at runtime." \ ) diff --git a/.testing/README.md b/.testing/README.md index adc56e56cd..ef02bcfa09 100644 --- a/.testing/README.md +++ b/.testing/README.md @@ -229,6 +229,7 @@ configurations in the MOM6-examples repository. - `tc2`: An ALE configuration based on tc1 with tides - `tc2.a`: Use sigma, PPM_H4 and no tides - `tc3`: An open-boundary condition (OBC) test based on `circle_obcs` +- `tc4`: Sponges and initialization using I/O ## Code coverage diff --git a/.testing/trailer.py b/.testing/trailer.py index a483bf9995..64f016275f 100755 --- a/.testing/trailer.py +++ b/.testing/trailer.py @@ -1,100 +1,137 @@ #!/usr/bin/env python +"""Subroutines for Validating the whitespace of the source code.""" import argparse import os import re import sys + def parseCommandLine(): - """ - Parse the command line positional and optional arguments. - This is the highest level procedure invoked from the very end of the script. - """ + """Parse the command line positional and optional arguments. + + This is the highest level procedure invoked from the very end of the + script. + """ + # Arguments + parser = argparse.ArgumentParser( + description='trailer.py checks Fortran files for trailing white ' + 'space.', + epilog='Written by A.Adcroft, 2017.' + ) + parser.add_argument( + 'files_or_dirs', type=str, nargs='+', + metavar='FILE|DIR', + help='Fortran files or director in which to search for Fortran files ' + '(with .f, .f90, .F90 suffixes).''' + ) + parser.add_argument( + '-e', '--exclude_dir', type=str, action='append', + metavar='DIR', + help='''Exclude directories from search that end in DIR.''' + ) + parser.add_argument( + '-l', '--line_length', type=int, default=512, + help='''Maximum allowed length of a line.''' + ) + parser.add_argument( + '-s', '--source_line_length', type=int, default=132, + help='''Maximum allowed length of a source line excluding comments.''' + ) + parser.add_argument( + '-d', '--debug', action='store_true', + help='turn on debugging information.' + ) + args = parser.parse_args() - # Arguments - parser = argparse.ArgumentParser(description='''trailer.py checks Fortran files for trailing white space.''', - epilog='Written by A.Adcroft, 2017.') - parser.add_argument('files_or_dirs', type=str, nargs='+', - metavar='FILE|DIR', - help='''Fortran files or director in which to search for Fortran files (with .f, .f90, .F90 suffixes).''') - parser.add_argument('-e','--exclude_dir', type=str, action='append', - metavar='DIR', - help='''Exclude directories from search that end in DIR.''') - parser.add_argument('-l','--line_length', type=int, default=512, - help='''Maximum allowed length of a line.''') - parser.add_argument('-s','--source_line_length', type=int, default=132, - help='''Maximum allowed length of a source line excluding comments.''') - parser.add_argument('-d','--debug', action='store_true', - help='turn on debugging information.') - args = parser.parse_args() + global debug + debug = args.debug - global debug - debug = args.debug + main(args) - main(args) def main(args): - ''' - Does the actual work - ''' - if (debug): print(args) + """Do the actual work.""" + if (debug): + print(args) - # Process files_or_dirs argument into list of files - all_files = [] - for a in args.files_or_dirs: - if os.path.isfile(a): all_files.append(a) - elif os.path.isdir(a): - for d,s,files in os.walk(a): - ignore = False - if args.exclude_dir is not None: - for e in args.exclude_dir: - if e+'/' in d+'/': ignore = True - if not ignore: - for f in files: - _,ext = os.path.splitext(f) - if ext in ('.f','.F','.f90','.F90'): all_files.append( os.path.join(d,f) ) - else: raise Exception('Argument '+a+' is not a file or directory! Stopping.') - if (debug): print('Found: ',all_files) + # Process files_or_dirs argument into list of files + all_files = [] + for a in args.files_or_dirs: + if os.path.isfile(a): + all_files.append(a) + elif os.path.isdir(a): + for d, s, files in os.walk(a): + ignore = False + if args.exclude_dir is not None: + for e in args.exclude_dir: + if e+'/' in d+'/': + ignore = True + if not ignore: + for f in files: + _, ext = os.path.splitext(f) + if ext in ('.f', '.F', '.f90', '.F90'): + all_files.append(os.path.join(d, f)) + else: + raise Exception('Argument '+a+' is not a file or directory! ' + 'Stopping.') + if (debug): + print('Found: ', all_files) + + # For each file, check for trailing white space + fail = False + for filename in all_files: + this = scan_file(filename, line_length=args.line_length, + source_line_length=args.source_line_length) + fail = fail or this + if fail: + sys.exit(1) - # For each file, check for trailing white space - fail = False - for filename in all_files: - this = scan_file(filename, line_length=args.line_length, source_line_length=args.source_line_length) - fail = fail or this - if fail: sys.exit(1) def scan_file(filename, line_length=512, source_line_length=132): - '''Scans file for trailing white space''' - def msg(filename,lineno,mesg,line=None): - if line is None: print('%s, line %i: %s'%(filename,lineno,mesg)) - else: print('%s, line %i: %s "%s"'%(filename,lineno,mesg,line)) - white_space_detected = False - tabs_space_detected = False - long_line_detected = False - with open(filename) as file: - trailing_space = re.compile(r'.* +$') - tabs = re.compile(r'.*\t.*') - lineno = 0 - for line in file.readlines(): - lineno += 1 - line = line.replace('\n','') - srcline = line.split('!', 1)[0] # Discard comments - if trailing_space.match(line) is not None: - if debug: print(filename,lineno,line,trailing_space.match(line)) - if len(line.strip())>0: msg(filename,lineno,'Trailing space detected',line) - else: msg(filename,lineno,'Blank line contains spaces') - white_space_detected = True - if tabs.match(line) is not None: - if len(line.strip())>0: msg(filename,lineno,'Tab detected',line) - else: msg(filename,lineno,'Blank line contains tabs') - tabs_space_detected = True - if len(line)>line_length: - if len(line.strip())>0: msg(filename,lineno,'Line length exceeded',line) - else: msg(filename,lineno,'Blank line exceeds line length limit') - long_line_detected = True - if len(srcline)>source_line_length: - msg(filename,lineno,'Non-comment line length exceeded',line) - return white_space_detected or tabs_space_detected or long_line_detected + """Scan file for trailing white space.""" + def msg(filename, lineno, mesg, line=None): + if line is None: + print('%s, line %i: %s' % (filename, lineno, mesg)) + else: + print('%s, line %i: %s "%s"' % (filename, lineno, mesg, line)) + white_space_detected = False + tabs_space_detected = False + long_line_detected = False + with open(filename) as file: + trailing_space = re.compile(r'.* +$') + tabs = re.compile(r'.*\t.*') + lineno = 0 + for line in file.readlines(): + lineno += 1 + line = line.replace('\n', '') + srcline = line.split('!', 1)[0] # Discard comments + if trailing_space.match(line) is not None: + if debug: + print(filename, lineno, line, trailing_space.match(line)) + if len(line.strip()) > 0: + msg(filename, lineno, 'Trailing space detected', line) + else: + msg(filename, lineno, 'Blank line contains spaces') + white_space_detected = True + if tabs.match(line) is not None: + if len(line.strip()) > 0: + msg(filename, lineno, 'Tab detected', line) + else: + msg(filename, lineno, 'Blank line contains tabs') + tabs_space_detected = True + if len(line) > line_length: + if len(line.strip()) > 0: + msg(filename, lineno, 'Line length exceeded', line) + else: + msg(filename, lineno, + 'Blank line exceeds line length limit') + long_line_detected = True + if len(srcline) > source_line_length: + msg(filename, lineno, 'Non-comment line length exceeded', line) + return white_space_detected or tabs_space_detected or long_line_detected + # Invoke parseCommandLine(), the top-level procedure -if __name__ == '__main__': parseCommandLine() +if __name__ == '__main__': + parseCommandLine() diff --git a/.travis.yml b/.travis.yml index 22c497f916..c34089ddf6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,10 +12,12 @@ addons: packages: - tcsh pkg-config netcdf-bin libnetcdf-dev libnetcdff-dev gfortran - mpich libmpich-dev - - doxygen graphviz flex bison cmake + - graphviz flex bison cmake - python-numpy python-netcdf4 - - python3 python3-dev python3-venv python3-pip + - python3 python3-dev python3-venv python3-pip python3-sphinx python3-lxml - bc + - perl + - texlive-binaries texlive-base bibtool tex-common texlive-bibtex-extra # Environment variables env: @@ -34,8 +36,12 @@ jobs: # Whitespace - ./.testing/trailer.py -e TEOS10 -l 120 src config_src # API Documentation - - cd docs && doxygen Doxyfile_nortd - - grep -v "config_src/solo_driver/coupler_types.F90" doxygen.log | tee doxy_errors + - perl -e 'print "perl version $^V" . "\n"' + - cd docs && mkdir _build && make nortd DOXYGEN_RELEASE=Release_1_8_13 UPDATEHTMLEQS=Y + # We can tighten up the warnings here. Math im image captions should only generate + # \f warnings. All other latex math should be double escaped (\\) like (\\Phi) for + # html image captions. + - grep "warning:" _build/doxygen_warn_nortd_log.txt | grep -v 'Illegal command f as part of a \\image' | tee doxy_errors - test ! -s doxy_errors - env: @@ -56,7 +62,6 @@ jobs: env: - JOB="x86 Regression testing" - DO_REGRESSION_TESTS=true - - REPORT_COVERAGE=true - MOM_TARGET_SLUG=${TRAVIS_REPO_SLUG} - MOM_TARGET_LOCAL_BRANCH=${TRAVIS_BRANCH} script: @@ -67,18 +72,6 @@ jobs: - time make -k -s test.regressions - make test.summary - - if: NOT type = pull_request - env: - - JOB="Coverage upload" - - REPORT_COVERAGE=true - - DO_REGRESSION_TESTS=false - script: - - cd .testing - - echo 'Build executables...' && echo -en 'travis_fold:start:script.1\\r' - - make build/symmetric/MOM6 - - echo -en 'travis_fold:end:script.1\\r' - - make -k -s run.symmetric - - arch: arm64 env: - JOB="ARM64 verification testing" diff --git a/ac/Makefile.in b/ac/Makefile.in index ce8173e6f1..7be6c5bf2b 100644 --- a/ac/Makefile.in +++ b/ac/Makefile.in @@ -10,19 +10,28 @@ # # The following variables are used by Makefiles generated by mkmf. # -# CC: C compiler -# CXX: C++ compiler -# FC: Fortran compiler (f77 and f90) -# LD: Linker +# CC C compiler +# CXX C++ compiler +# FC Fortran compiler (f77 and f90) +# LD Linker +# AR Archiver # -# CPPDEFS: Preprocessor macros -# CPPFLAGS: C preprocessing flags -# CXXFLAGS: C++ preprocessing flags -# FPPFLAGS: Fortran preprocessing flags +# CPPDEFS Preprocessor macros +# CPPFLAGS C preprocessing flags +# CXXFLAGS C++ preprocessing flags +# FPPFLAGS Fortran preprocessing flags +# +# CFLAGS C compiler flags +# FFLAGS Fortran compiler flags +# LDFLAGS Linker flags + libraries +# ARFLAGS Archiver flags +# +# OTHERFLAGS Additional flags for all languages (C, C++, Fortran) +# OTHER_CFLAGS Optional C flags +# OTHER_CXXFLAGS Optional C++ flags +# OTHER_FFLAGS Optional Fortran flags +# TMPFILES Placeholder for `make clean` deletion (as `make neat`). # -# CFLAGS: C compiler flags -# FFLAGS: Fortran compiler flags -# LDFLAGS: Linker flags + libraries # # NOTES: # - FPPFLAGS and FFLAGS always appear as a pair, and autoconf does not use @@ -33,13 +42,6 @@ # # - LDFLAGS does not distinguish between autoconf's LDFLAGS and LIBS. # It also places both after the executable rather than just LIBS. -# -# OTHERFLAGS: Additional flags for all languages (C, C++, Fortran) -# OTHER_CFLAGS: Optional C flags -# OTHER_CXXFLAGS: Optional C++ flags -# OTHER_FFLAGS: Optional Fortran flags -# -# TMPFILES: Placeholder for `make clean` deletion (as `make neat`). FC = @FC@ LD = @FC@ diff --git a/ac/README.md b/ac/README.md index d5a5310ab8..f50275c3a0 100644 --- a/ac/README.md +++ b/ac/README.md @@ -28,25 +28,31 @@ support. The following instructions will allow a new user to quickly create a MOM6 executable for ocean-only simulations. -Each set of instructions is meant to be run from the root directory of the -repository. +Before starting, ensure that all submodules have been updated. +``` +$ git submodule update --init --recursive +``` -A separate Makefile in `ac/deps/` is provided to gather and build any GFDL -dependencies. +Next, fetch the GFDL `mkmf` build tool and build the FMS framework library. + +For new users, a separate Makefile in `./ac/deps/` is provided for this step. ``` $ cd ac/deps $ make -j ``` -This will fetch the `mkmf` tool and build the FMS library. -To build MOM6, first generate the Autoconf `configure` script. +To build MOM6, first generate the Autoconf `configure` script in `./ac`. ``` +$ cd ../.. # Return to the root directory $ cd ac $ autoreconf ``` -Then select your build directory, e.g. `./build`, run the configure script, and -build the model. +Then select your build directory, run the configure script, and build the +model. + +The instructions below build the model in the `./build` directory. ``` +$ cd .. # Return to the root directory $ mkdir -p build $ cd build $ ../ac/configure @@ -54,31 +60,32 @@ $ make -j ``` This will create the MOM6 executable in the build directory. -This executable is only useable for ocean-only simulations, and cannot be used -for coupled modeling. It also requires the necessary experiment configuration -files, such as `input.nml` and `MOM_input`. For more information, consult the -[MOM6-examples wiki](https://github.com/NOAA-GFDL/MOM6-examples/wiki). +The steps above will produce an executable for ocean-only simulations, and +cannot be used for coupled modeling. It also requires the necessary experiment +configuration files, such as `input.nml` and `MOM_input`. For more +information, consult the [MOM6-examples +wiki](https://github.com/NOAA-GFDL/MOM6-examples/wiki). # Build rules The Makefile produced by Autoconf provides the following rules. -``make`` +`make` Build the MOM6 executable. -``make clean`` +`make clean` Delete the executable and any object and module files, but preserve the Autoconf output. -``make distclean`` +`make distclean` Delete all of the files above, as well as any files generated by `./configure`. Note that this will delete the Makefile containing this rule. -``make ac-clean`` +`make ac-clean` Delete all of the files above, including `./configure` and any other files created by `autoreconf`. As with `make distclean`, this will also delete the @@ -124,31 +131,24 @@ For the complete list of settings, run `./configure --help`. # GFDL Dependencies -This section briefly describes the management of GFDL dependencies `mkmf` and -FMS. - -The `configure` script will first check if the compiler and its configured -flags (`FCFLAGS`, `LDFLAGS`, etc.) can find `mkmf` and the FMS library. If -unavailable, then it will search in the local `ac/deps` library. If still -unavailable, then the build will abort. - -Running `make -C ac/deps` will ensure that the libraries are available. But if -the user wishes to target an external FMS library, then they should add the -appropriate `FCFLAGS` and `LDFLAGS` to find the library. - -Similar options are provided for `mkmf` with respect to `PATH`, although it -is usually not necessary to use an external `mkmf` script. - -Some configuration options are provided by the `ac/deps` Makefile: +This section briefly describes the management of the `mkmf` and FMS +dependencies. -`PATH_ENV` +When building MOM6, the `configure` script will first check if the compiler and +its configured flags (`FCFLAGS`, `LDFLAGS`, etc.) can locate `mkmf` and the FMS +library. If unavailable, then it will search in the local `ac/deps` library. +If still unavailable, then the build will abort. - This variable will override the value of `PATH` when building the dependencies. +The dependencies are not automatically provided in `ac/deps`. However, running +`make -C ac/deps` will fetch and build them. If the user wishes to target an +external FMS library or `mkmf` tools, then they should set `PATH`, `FCFLAGS` +and `LDFLAGS` so that `configure` can locate them. -`FCFLAGS_ENV` +Exported environment variables such as `FC` or `FCFLAGS` will be passed to the +corresponding `configure` scripts. - Used to override the default Autoconf flags, `-g -O2`. This is useful if, - for example, one wants to build with `-O0` to speed up the build time. +The following configuration options are also provided, which can be used to +specify the git URL and commit of the dependencies. `MKMF_URL` (*default:* https://github.com/NOAA-GFDL/mkmf.git) @@ -157,12 +157,6 @@ Some configuration options are provided by the `ac/deps` Makefile: `FMS_URL` (*default:* https://github.com/NOAA-GFDL/FMS.git) `FMS_COMMIT` (*default:* `2019.01.03`) - - These are used to specify where to check out the source code for each - respective project. - -Additional hooks for FMS builds do not yet exist, but can be added if -necessary. # Known issues / Future development diff --git a/ac/configure.ac b/ac/configure.ac index ee6b76dacb..487230beb8 100644 --- a/ac/configure.ac +++ b/ac/configure.ac @@ -8,6 +8,8 @@ # - We would probably run this inside of a script to avoid the explicit # dependency on git. +AC_PREREQ([2.63]) + AC_INIT( [MOM6], [ ], @@ -46,6 +48,12 @@ AC_ARG_ENABLE([asymmetric], AS_IF([test "$enable_asymmetric" = yes], [MEM_LAYOUT=${srcdir}/config_src/dynamic]) +# Default to solo_driver +DRIVER_DIR=${srcdir}/config_src/solo_driver +AC_ARG_WITH([driver], + AS_HELP_STRING([--with-driver=coupled_driver|solo_driver], [Select directory for driver source code])) +AS_IF([test "x$with_driver" != "x"], + [DRIVER_DIR=${srcdir}/config_src/${with_driver}]) # TODO: Rather than point to a pre-configured header file, autoconf could be # used to configure a header based on a template. @@ -80,18 +88,57 @@ AX_FC_CHECK_MODULE([mpi], # netCDF configuration -AC_PATH_PROG([NC_CONFIG], [nc-config]) -AS_IF([test -n "$NC_CONFIG"], - [CPPFLAGS="$CPPFLAGS -I$($NC_CONFIG --includedir)" - FCFLAGS="$FCFLAGS -I$($NC_CONFIG --includedir)" - LDFLAGS="$LDFLAGS -L$($NC_CONFIG --libdir)"], - [AC_MSG_ERROR([Could not find nc-config.])]) - -AX_FC_CHECK_MODULE([netcdf], - [], [AC_MSG_ERROR([Could not find FMS library.])]) -AX_FC_CHECK_LIB([netcdff], [nf_create], [netcdf], - [], [AC_MSG_ERROR([Could not link netcdff library.])] -) + +# Search for the Fortran netCDF module, fallback to nf-config. +AX_FC_CHECK_MODULE([netcdf], [], [ + AS_UNSET([ax_fc_cv_mod_netcdf]) + AC_PATH_PROG([NF_CONFIG], [nf-config]) + AS_IF([test -n "$NF_CONFIG"], [ + AC_SUBST([FCFLAGS], ["$FCFLAGS -I$($NF_CONFIG --includedir)"]) + ], [AC_MSG_ERROR([Could not find nf-config.])] + ) + AX_FC_CHECK_MODULE([netcdf], [], [ + AC_MSG_ERROR([Could not find netcdf module.]) + ]) +]) + +# FMS may invoke netCDF C calls, so we link to libnetcdf. +AC_LANG_PUSH([C]) +AC_CHECK_LIB([netcdf], [nc_create], [], [ + AS_UNSET([ac_cv_lib_netcdf_nc_create]) + AC_PATH_PROG([NC_CONFIG], [nc-config]) + AS_IF([test -n "$NC_CONFIG"], [ + AC_SUBST([LDFLAGS], + ["$LDFLAGS -L$($NC_CONFIG --libdir)"] + ) + ], [AC_MSG_ERROR([Could not find nc-config.])] + ) + AC_CHECK_LIB([netcdf], [nc_create], [], [ + AC_MSG_ERROR([Could not find libnetcdf.]) + ]) +]) +AC_LANG_POP([C]) + +# NOTE: We test for nf_create, rather than nf90_create, because AX_FC_CHECK_LIB +# is currently not yet able to properly probe inside modules. +# Testing of the nf90_* functions will require a macro update. +# NOTE: nf-config does not have a --libdir flag, so we use --prefix and assume +# that libraries are in the $prefix/lib directory. + +# Link to Fortran netCDF library, netcdff +AX_FC_CHECK_LIB([netcdff], [nf_create], [], [], [ + AS_UNSET([ax_fc_cv_lib_netcdff_nf_create]) + AC_PATH_PROG([NF_CONFIG], [nf-config]) + AS_IF([test -n "$NF_CONFIG"], [ + AC_SUBST([LDFLAGS], + ["$LDFLAGS -L$($NF_CONFIG --prefix)/lib"] + ) + ], [AC_MSG_ERROR([Could not find nf-config.])] + ) + AX_FC_CHECK_LIB([netcdff], [nf_create], [], [], [ + AC_MSG_ERROR([Could not find libnetcdff.]) + ]) +]) # Force 8-byte reals @@ -102,11 +149,22 @@ AS_IF( # OpenMP configuration -AC_OPENMP + +# NOTE: AC_OPENMP fails on `Fortran` for Autoconf <2.69 due to a m4 bug. +# For older versions, we test against CC and use the result for FC. +m4_version_prereq([2.69], [AC_OPENMP], [ + AC_LANG_PUSH([C]) + AC_OPENMP + AC_LANG_POP([C]) + OPENMP_FCFLAGS="$OPENMP_CFLAGS" +]) + +# NOTE: Only apply OpenMP flags if explicitly enabled. AS_IF( - [test "$enable_openmp" = yes], - [FCFLAGS="$FCFLAGS $OPENMP_FCFLAGS" - LDFLAGS="$LDFLAGS $OPENMP_FCFLAGS"]) + [test "$enable_openmp" = yes], [ + FCFLAGS="$FCFLAGS $OPENMP_FCFLAGS" + LDFLAGS="$LDFLAGS $OPENMP_FCFLAGS" +]) # FMS support @@ -158,10 +216,10 @@ AS_IF([test -z "$MKMF"], [ AC_CONFIG_COMMANDS([path_names], [list_paths -l \ ${srcdir}/src \ - ${srcdir}/config_src/solo_driver \ ${srcdir}/config_src/ext* \ + ${DRIVER_DIR} \ ${MEM_LAYOUT} -], [MEM_LAYOUT=$MEM_LAYOUT]) +], [MEM_LAYOUT=$MEM_LAYOUT DRIVER_DIR=$DRIVER_DIR]) AC_CONFIG_COMMANDS([Makefile.mkmf], diff --git a/ac/deps/Makefile b/ac/deps/Makefile index 91fe343047..0ed4fd19a7 100644 --- a/ac/deps/Makefile +++ b/ac/deps/Makefile @@ -1,22 +1,10 @@ SHELL = bash -.SUFFIXES: - -# FMS build configuration -PATH_ENV ?= -FCFLAGS_ENV ?= - -# Only set FCFLAGS if an argument is provided. -FMS_FCFLAGS = -ifneq ($(FCFLAGS_ENV),) - FMS_FCFLAGS := FCFLAGS="$(FCFLAGS_ENV)" -endif +# Disable implicit rules +.SUFFIXES: -# Ditto for path -FMS_PATH = -ifneq ($(PATH_ENV),) - FMS_PATH := PATH="$(PATH_ENV)" -endif +# Disable implicit variables +MAKEFLAGS += -R # mkmf, list_paths (GFDL build toolchain) @@ -44,7 +32,6 @@ FMS_SOURCE = $(call SOURCE,fms/src) .PHONY: all all: bin/mkmf bin/list_paths lib/libFMS.a - #--- # mkmf checkout @@ -65,6 +52,7 @@ mkmf: # This is a flawed approach, since module files are untracked and could be # handled more safely, but this is adequate for now. + # TODO: track *.mod copy? lib/libFMS.a: fms/build/libFMS.a fms/build/Makefile mkdir -p {lib,include} @@ -76,13 +64,10 @@ fms/build/libFMS.a: fms/build/Makefile make -C fms/build libFMS.a -# TODO: Include FC, CC, CFLAGS? -fms/build/Makefile: FMS_ENV=$(FMS_PATH) $(FMS_FCFLAGS) - fms/build/Makefile: Makefile.fms.in fms/src/configure bin/mkmf bin/list_paths mkdir -p fms/build cp Makefile.fms.in fms/src/Makefile.in - cd $(@D) && $(FMS_ENV) ../src/configure --srcdir=../src + cd $(@D) && ../src/configure --srcdir=../src # TODO: Track m4 macros? @@ -100,7 +85,6 @@ fms/src: clean: rm -rf fms/build lib include bin - .PHONY: distclean distclean: clean rm -rf fms mkmf diff --git a/ac/deps/Makefile.fms.in b/ac/deps/Makefile.fms.in index 694ad8e0b0..0286d94b58 100644 --- a/ac/deps/Makefile.fms.in +++ b/ac/deps/Makefile.fms.in @@ -1,46 +1,58 @@ -# Makefile template for MOM6 +# Makefile template for autoconf builds using mkmf # -# Previously this would have been generated by mkmf using a template file. +# Compiler flags are configured by autoconf's configure script. # -# The proposed autoconf build inverts this approach by constructing the -# information previously stored in the mkmf template, such as compiler names -# and flags, and importing the un-templated mkmf output for its rules and -# dependencies. +# Source code dependencies are configured by mkmf and list_paths, specified in +# the `Makefile.mkmf` file. # -# While this approach does not eliminate our dependency on mkmf, it does -# promises to eliminate our reliance on platform-specific templates, and -# instead allows us to provide a configure script for determining our compilers -# and flags. As a last resort, we provide hooks to override such settings. - -# NOTE: mkmf conventions are close, but not identical, to autoconf. -# -# CC: C compiler -# CXX: C++ compiler -# FC: Fortran compiler (f77 and f90) -# LD: Linker -# -# CPPDEFS: Preprocessor macros -# CPPFLAGS: C preprocessing flags -# CXXFLAGS: C++ preprocessing flags -# FPPFLAGS: Fortran preprocessing flags -# -# CFLAGS: C compiler flags -# FFLAGS: Fortran compiler flags (f77 and f90) -# LDFLAGS: Linker flags -# -# OTHERFLAGS: Additional flags for all languages (C, C++, Fortran) -# OTHER_CFLAGS: Optional C flags -# OTHER_CXXFLAGS: Optional C++ flags -# OTHER_FFLAGS: Optional Fortran flags +# mkmf conventions are close, but not identical, to autoconf. We attempt to +# map the autoconf variables to the mkmf variables. +# +# The following variables are used by Makefiles generated by mkmf. +# +# CC C compiler +# CXX C++ compiler +# FC Fortran compiler (f77 and f90) +# LD Linker +# AR Archiver +# +# CPPDEFS Preprocessor macros +# CPPFLAGS C preprocessing flags +# CXXFLAGS C++ preprocessing flags +# FPPFLAGS Fortran preprocessing flags +# +# CFLAGS C compiler flags +# FFLAGS Fortran compiler flags +# LDFLAGS Linker flags + libraries +# ARFLAGS Archiver flags +# +# OTHERFLAGS Additional flags for all languages (C, C++, Fortran) +# OTHER_CFLAGS Optional C flags +# OTHER_CXXFLAGS Optional C++ flags +# OTHER_FFLAGS Optional Fortran flags +# TMPFILES Placeholder for `make clean` deletion (as `make neat`). +# +# +# NOTES: +# - FPPFLAGS and FFLAGS always appear as a pair, and autoconf does not use +# FPPFLAGS, so FPPFLAGS does not serve much purpose. +# +# - mkmf's FFLAGS does not distinguish between autoconf's fixed-format +# FFLAGS and free-format FCFLAGS. +# +# - LDFLAGS does not distinguish between autoconf's LDFLAGS and LIBS. +# It also places both after the executable rather than just LIBS. CC = @CC@ FC = @FC@ LD = @FC@ +AR = @AR@ CPPDEFS = @DEFS@ CPPFLAGS = @CPPFLAGS@ FFLAGS = @FCFLAGS@ LDFLAGS = @LDFLAGS@ +ARFLAGS = @ARFLAGS@ # Gather modulefiles TMPFILES = $(wildcard *.mod) diff --git a/ac/deps/configure.fms.ac b/ac/deps/configure.fms.ac index 1d66194c81..bf899126cc 100644 --- a/ac/deps/configure.fms.ac +++ b/ac/deps/configure.fms.ac @@ -1,4 +1,6 @@ # Autoconf configuration +AC_PREREQ([2.63]) + AC_INIT( [FMS], [ ], @@ -13,13 +15,14 @@ AC_PROG_CC AX_MPI CC=$MPICC + # FMS configuration -# Linux and OSX have a gettid system call, but it is not implemented in older +# Linux and macOS have a gettid system call, but it is not implemented in older # glibc implementations. When unavailable, a native syscall is used. # # On Linux, this is defined in unistd.h as __NR_gettid, and FMS is hard-coded -# to use this value. In OS X, this is defined in sys/syscall.h as SYS_gettid, +# to use this value. In macOS, this is defined in sys/syscall.h as SYS_gettid, # so we override this macro if __NR_gettid is unavailable. AC_CHECK_FUNCS([gettid], [], [ AC_MSG_CHECKING([if __NR_gettid must be redefined]) @@ -71,18 +74,39 @@ AC_DEFINE([use_libMPI]) # netCDF configuration -AC_PATH_PROG([NC_CONFIG], [nc-config]) -AS_IF([test -n "$NC_CONFIG"], - [CPPFLAGS="$CPPFLAGS -I$($NC_CONFIG --includedir)" - FCFLAGS="$FCFLAGS -I$($NC_CONFIG --includedir)" - LDFLAGS="$LDFLAGS -L$($NC_CONFIG --libdir)"], - [AC_MSG_ERROR([Could not find nc-config.])]) - -AX_FC_CHECK_MODULE([netcdf], - [], [AC_MSG_ERROR([Could not find FMS library.])]) -AX_FC_CHECK_LIB([netcdff], [nf_create], [netcdf], - [], [AC_MSG_ERROR([Could not link netcdff library.])] -) + +# Check for netcdf.h header function declarations. +# If unavailable, then try to invoke nc-create. +AC_LANG_PUSH([C]) +AC_CHECK_HEADERS([netcdf.h], [], [ + AS_UNSET([ac_cv_header_netcdf_h]) + AC_PATH_PROG([NC_CONFIG], [nc-config]) + AS_IF([test -n "$NC_CONFIG"], [ + AC_SUBST([CPPFLAGS], ["$CPPFLAGS -I$($NC_CONFIG --includedir)"]) + ], + [AC_MSG_ERROR([Could not find nc-config.])] + ) + AC_CHECK_HEADERS([netcdf.h], [], [ + AC_MSG_ERROR([Could not find netcdf.h]) + ]) +]) +AC_LANG_POP([C]) + +# Search for the Fortran netCDF module, fallback to nf-config. +AX_FC_CHECK_MODULE([netcdf], [], [ + AS_UNSET([ax_fc_cv_mod_netcdf]) + AC_PATH_PROG([NF_CONFIG], [nf-config]) + AS_IF([test -n "$NF_CONFIG"], [ + AC_SUBST([FCFLAGS], ["$FCFLAGS -I$($NF_CONFIG --includedir)"]) + ], + [AC_MSG_ERROR([Could not find nf-config.])] + ) + AX_FC_CHECK_MODULE([netcdf], [], [ + AC_MSG_ERROR([Could not find netcdf module.]) + ]) +]) + +# FMS requires this macro to signal netCDF support. AC_DEFINE([use_netCDF]) @@ -98,15 +122,31 @@ AS_IF( # OpenMP configuration -AC_OPENMP + +# NOTE: AC_OPENMP fails in Autoconf <2.69 when LANG is Fortran or Fortran 77. +# For older versions, we test against CC and use the result for FC. +m4_version_prereq([2.69], [AC_OPENMP], [ + AC_LANG_PUSH([C]) + AC_OPENMP + AC_LANG_POP([C]) + OPENMP_FCFLAGS="$OPENMP_CFLAGS" +]) + +# NOTE: Only apply OpenMP flags if explicitly enabled. AS_IF( - [test "$enable_openmp" = yes], - [FCFLAGS="$FCFLAGS $OPENMP_FCFLAGS" - LDFLAGS="$LDFLAGS $OPENMP_FCFLAGS"]) + [test "$enable_openmp" = yes], [ + FCFLAGS="$FCFLAGS $OPENMP_FCFLAGS" + LDFLAGS="$LDFLAGS $OPENMP_FCFLAGS" +]) + +# Unlimited line length (2.67) +# AC_FC_LINE_LENGTH was added in 2.67. +m4_version_prereq([2.67], + [AC_FC_LINE_LENGTH([unlimited])], + [AX_FC_LINE_LENGTH([unlimited])] +) -# Unlimited line length -AC_FC_LINE_LENGTH([unlimited]) # Allow invaliz BOZ assignment AX_FC_ALLOW_INVALID_BOZ @@ -141,12 +181,22 @@ AS_IF([test -z "$MKMF"], [ # MKMF commands AC_CONFIG_COMMANDS([path_names], [${LIST_PATHS} -l ${srcdir}], - [LIST_PATHS=${LIST_PATHS}]) + [LIST_PATHS=${LIST_PATHS}] +) AC_CONFIG_COMMANDS([mkmf], [${MKMF} -p libFMS.a -m Makefile.mkmf path_names], - [MKMF=${MKMF}]) + [MKMF=${MKMF}] +) + + +# Autoconf does not configure the archiver (ar), as it is handled by Automake. +# TODO: Properly configure this tool. For now, we hard-set this to `ar`. +AR=ar +ARFLAGS=rv +AC_SUBST(AR) +AC_SUBST(ARFLAGS) # Prepare output diff --git a/ac/deps/m4/ax_fc_cray_pointer.m4 b/ac/deps/m4/ax_fc_cray_pointer.m4 index a9f5d9bbe3..57ed186afa 100644 --- a/ac/deps/m4/ax_fc_cray_pointer.m4 +++ b/ac/deps/m4/ax_fc_cray_pointer.m4 @@ -19,8 +19,8 @@ dnl AC_DEFUN([AX_FC_CRAY_POINTER], [ AC_LANG_ASSERT([Fortran]) AC_MSG_CHECKING([for $FC option to support Cray pointers]) - AC_CACHE_VAL([ac_cv_prog_fc_cray_ptr], [ - ac_cv_prog_fc_cray_ptr='unknown' + AC_CACHE_VAL([ac_cv_fc_cray_ptr], [ + ac_cv_fc_cray_ptr='unknown' ac_save_FCFLAGS=$FCFLAGS for ac_option in none -fcray-pointer -Mcray=pointer; do test "$ac_option" != none && FCFLAGS="$ac_save_FCFLAGS $ac_option" @@ -29,21 +29,21 @@ AC_DEFUN([AX_FC_CRAY_POINTER], [ integer aptr(2) pointer (iptr, aptr) ])], - [ac_cv_prog_fc_cray_ptr=$ac_option], + [ac_cv_fc_cray_ptr=$ac_option], ) FCFLAGS=$ac_save_FCFLAGS - AS_IF([test "$ac_cv_prog_fc_cray_ptr" != unknown], [break]) + AS_IF([test "$ac_cv_fc_cray_ptr" != unknown], [break]) done ]) - AS_CASE([ac_cv_prog_fc_cray_ptr], + AS_CASE([ac_cv_fc_cray_ptr], [none], [AC_MSG_RESULT([none_needed])], [unknown], [AC_MSG_RESULT([unsupported])], - [AC_MSG_RESULT([$ac_cv_prog_fc_cray_ptr])] + [AC_MSG_RESULT([$ac_cv_fc_cray_ptr])] ) - AS_IF([test "$ac_cv_prog_fc_cray_ptr" != unknown], [ + AS_IF([test "$ac_cv_fc_cray_ptr" != unknown], [ m4_default([$1], [ - AS_IF([test "$ac_cv_prog_fc_cray_ptr" != none], - [FCFLAGS="$FCFLAGS $ac_cv_prog_fc_cray_ptr"] + AS_IF([test "$ac_cv_fc_cray_ptr" != none], + [FCFLAGS="$FCFLAGS $ac_cv_fc_cray_ptr"] ) ])], [m4_default([$2], [AC_MSG_ERROR(["$FC does not support Cray pointers"])])] diff --git a/ac/deps/m4/ax_fc_line_length.m4 b/ac/deps/m4/ax_fc_line_length.m4 new file mode 100644 index 0000000000..97271da1f6 --- /dev/null +++ b/ac/deps/m4/ax_fc_line_length.m4 @@ -0,0 +1,101 @@ +# AX_FC_LINE_LENGTH([LENGTH], [ACTION-IF-SUCCESS], +# [ACTION-IF-FAILURE = FAILURE]) +# ------------------------------------------------ +# This is a backport of the AC_FC_LINE_LENGTH macro in Autoconf 2.67 and newer. +# Comments below are from the Autoconf 2.69 implementation. +# +# Look for a compiler flag to make the Fortran (FC) compiler accept long lines +# in the current (free- or fixed-format) source code, and adds it to FCFLAGS. +# The optional LENGTH may be 80, 132 (default), or `unlimited' for longer +# lines. Note that line lengths above 250 columns are not portable, and some +# compilers (hello ifort) do not accept more than 132 columns at least for +# fixed format. Call ACTION-IF-SUCCESS (defaults to nothing) if successful +# (i.e. can compile code using new extension) and ACTION-IF-FAILURE (defaults +# to failing with an error message) if not. (Defined via DEFUN_ONCE to +# prevent flag from being added to FCFLAGS multiple times.) +# You should call AC_FC_FREEFORM or AC_FC_FIXEDFORM to set the desired format +# prior to using this macro. +# +# The known flags are: +# -f{free,fixed}-line-length-N with N 72, 80, 132, or 0 or none for none. +# -ffree-line-length-none: GNU gfortran +# -ffree-line-length-huge: g95 (also -ffixed-line-length-N as above) +# -qfixed=132 80 72: IBM compiler (xlf) +# -Mextend: Cray +# -132 -80 -72: Intel compiler (ifort) +# Needs to come before -extend_source because ifort +# accepts that as well with an optional parameter and +# doesn't fail but only warns about unknown arguments. +# -extend_source: SGI compiler +# -W, -WNN (132, 80, 72): Absoft Fortran +# +es, +extend_source: HP Fortran (254 in either form, default is 72 fixed, +# 132 free) +# -w, (-)-wide: Lahey/Fujitsu Fortran (255 cols in fixed form) +# -e: Sun Fortran compiler (132 characters) +# -132: NAGWare +# -72, -f, -Wf,-f: f2c (a weak form of "free-form" and long lines). +# /XLine: Open Watcom + +AC_DEFUN_ONCE([AX_FC_LINE_LENGTH], [ + AC_LANG_ASSERT([Fortran]) + m4_case(m4_default([$1], [132]), + [unlimited], [ + ac_fc_line_len_string=unlimited + ac_fc_line_len=0 + ac_fc_line_length_test=' + subroutine longer_than_132(arg1,arg2,arg3,arg4,arg5,arg6,arg7,arg8,'\ +'arg9,arg10,arg11,arg12,arg13,arg14,arg15,arg16,arg17,arg18,arg19)' + ], + [132], [ + ac_fc_line_len=132 + ac_fc_line_length_test=' + subroutine longer_than_80(arg1,arg2,arg3,arg4,arg5,arg6,arg7,arg8,arg9,'\ +'arg10)' + ], + [80], [ + ac_fc_line_len=80 + ac_fc_line_length_test=' + subroutine longer_than_72(arg1,arg2,arg3,arg4,arg5,arg6,arg7,arg8,arg9)' + ], + [m4_warning([Invalid length argument `$1'])] + ) + : ${ac_fc_line_len_string=$ac_fc_line_len} + AC_MSG_CHECKING([for Fortran flag needed to accept $ac_fc_line_len_string column source lines]) + AC_CACHE_VAL([ac_cv_fc_line_length], [ + ac_cv_fc_line_length=unknown + ac_save_FCFLAGS=$FCFLAGS + for ac_flag in none \ + -ffree-line-length-none \ + -ffixed-line-length-none \ + -ffree-line-length-huge \ + -ffree-line-length-$ac_fc_line_len \ + -ffixed-line-length-$ac_fc_line_len \ + -qfixed=$ac_fc_line_len \ + -Mextend \ + -$ac_fc_line_len \ + -extend_source \ + -W$ac_fc_line_len \ + -W +extend_source +es -wide --wide -w -e -f -Wf,-f -xline + do + test "$ac_flag" != none && FCFLAGS="$ac_save_FCFLAGS $ac_flag" + AC_COMPILE_IFELSE([$ac_fc_line_length_test + end subroutine + ], [ac_cv_fc_line_length=$ac_flag] + ) + FCFLAGS=$ac_save_FCFLAGS + dnl TODO: Remove conftest.{err,$ac_objext,$ac_ext} ?? + AS_IF([test "$ac_cv_fc_line_length" != unknown], [break]) + done + ]) + AC_MSG_RESULT([$ac_cv_fc_line_length]) + AS_IF([test "$ac_cv_fc_line_length" != unknown], [ + m4_default([$2], [ + AS_IF([test "$ac_cv_fc_line_length" != none], [ + FCFLAGS="$FCFLAGS $ac_cv_fc_line_length" + ]) + ])], [ + m4_default([$3], [ + AC_MSG_ERROR([Fortran does not accept long source lines], 77) + ]) + ]) +]) diff --git a/ac/deps/m4/ax_mpi.m4 b/ac/deps/m4/ax_mpi.m4 index ecce2e141a..3d9966a19d 100644 --- a/ac/deps/m4/ax_mpi.m4 +++ b/ac/deps/m4/ax_mpi.m4 @@ -67,7 +67,7 @@ AU_ALIAS([ACX_MPI], [AX_MPI]) AC_DEFUN([AX_MPI], [ -AC_PREREQ(2.50) dnl for AC_LANG_CASE +AC_PREREQ([2.50]) dnl for AC_LANG_CASE AC_LANG_CASE([C], [ AC_REQUIRE([AC_PROG_CC]) @@ -135,16 +135,16 @@ if test x = x"$MPILIBS"; then AC_CHECK_LIB(mpich, MPI_Init, [MPILIBS="-lmpich"]) fi -dnl We have to use AC_TRY_COMPILE and not AC_CHECK_HEADER because the +dnl We have to use AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[]])],[],[]) and not AC_CHECK_HEADER because the dnl latter uses $CPP, not $CC (which may be mpicc). AC_LANG_CASE([C], [if test x != x"$MPILIBS"; then AC_MSG_CHECKING([for mpi.h]) - AC_TRY_COMPILE([#include ],[],[AC_MSG_RESULT(yes)], [MPILIBS="" + AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include ]], [[]])],[AC_MSG_RESULT(yes)],[MPILIBS="" AC_MSG_RESULT(no)]) fi], [C++], [if test x != x"$MPILIBS"; then AC_MSG_CHECKING([for mpi.h]) - AC_TRY_COMPILE([#include ],[],[AC_MSG_RESULT(yes)], [MPILIBS="" + AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include ]], [[]])],[AC_MSG_RESULT(yes)],[MPILIBS="" AC_MSG_RESULT(no)]) fi], [Fortran 77], [if test x != x"$MPILIBS"; then diff --git a/ac/m4/ax_fc_check_lib.m4 b/ac/m4/ax_fc_check_lib.m4 index c0accab6cd..a7f848cd60 100644 --- a/ac/m4/ax_fc_check_lib.m4 +++ b/ac/m4/ax_fc_check_lib.m4 @@ -18,7 +18,7 @@ dnl library with different -L flags, or perhaps other ld configurations. dnl dnl Results are cached in the ax_fc_cv_lib_LIBRARY_FUNCTION variable. dnl -AC_DEFUN([AX_FC_CHECK_LIB],[dnl +AC_DEFUN([AX_FC_CHECK_LIB],[ AS_VAR_PUSHDEF([ax_fc_Lib], [ax_fc_cv_lib_$1_$2]) m4_ifval([$6], [ax_fc_lib_msg_LDFLAGS=" with $6"], @@ -29,14 +29,15 @@ AC_DEFUN([AX_FC_CHECK_LIB],[dnl LDFLAGS="$6 $LDFLAGS" ax_fc_check_lib_save_LIBS=$LIBS LIBS="-l$1 $7 $LIBS" - AS_IF([test -n $3], + AS_IF([test -n "$3"], [ax_fc_use_mod="use $3"], [ax_fc_use_mod=""]) - AC_LINK_IFELSE([ - AC_LANG_PROGRAM([], [dnl + AC_LINK_IFELSE([dnl +dnl Begin 7-column code block +AC_LANG_PROGRAM([], [dnl $ax_fc_use_mod - call $2]dnl - ) + call $2])dnl +dnl End code block ], [AS_VAR_SET([ax_fc_Lib], [yes])], [AS_VAR_SET([ax_fc_Lib], [no])] diff --git a/ac/m4/ax_mpi.m4 b/ac/m4/ax_mpi.m4 index ecce2e141a..3d9966a19d 100644 --- a/ac/m4/ax_mpi.m4 +++ b/ac/m4/ax_mpi.m4 @@ -67,7 +67,7 @@ AU_ALIAS([ACX_MPI], [AX_MPI]) AC_DEFUN([AX_MPI], [ -AC_PREREQ(2.50) dnl for AC_LANG_CASE +AC_PREREQ([2.50]) dnl for AC_LANG_CASE AC_LANG_CASE([C], [ AC_REQUIRE([AC_PROG_CC]) @@ -135,16 +135,16 @@ if test x = x"$MPILIBS"; then AC_CHECK_LIB(mpich, MPI_Init, [MPILIBS="-lmpich"]) fi -dnl We have to use AC_TRY_COMPILE and not AC_CHECK_HEADER because the +dnl We have to use AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[]])],[],[]) and not AC_CHECK_HEADER because the dnl latter uses $CPP, not $CC (which may be mpicc). AC_LANG_CASE([C], [if test x != x"$MPILIBS"; then AC_MSG_CHECKING([for mpi.h]) - AC_TRY_COMPILE([#include ],[],[AC_MSG_RESULT(yes)], [MPILIBS="" + AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include ]], [[]])],[AC_MSG_RESULT(yes)],[MPILIBS="" AC_MSG_RESULT(no)]) fi], [C++], [if test x != x"$MPILIBS"; then AC_MSG_CHECKING([for mpi.h]) - AC_TRY_COMPILE([#include ],[],[AC_MSG_RESULT(yes)], [MPILIBS="" + AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include ]], [[]])],[AC_MSG_RESULT(yes)],[MPILIBS="" AC_MSG_RESULT(no)]) fi], [Fortran 77], [if test x != x"$MPILIBS"; then diff --git a/config_src/coupled_driver/MOM_surface_forcing_gfdl.F90 b/config_src/coupled_driver/MOM_surface_forcing_gfdl.F90 index 7075fb7c10..bb89c4e85e 100644 --- a/config_src/coupled_driver/MOM_surface_forcing_gfdl.F90 +++ b/config_src/coupled_driver/MOM_surface_forcing_gfdl.F90 @@ -5,10 +5,14 @@ module MOM_surface_forcing_gfdl !#CTRL# use MOM_controlled_forcing, only : apply_ctrl_forcing, register_ctrl_forcing_restarts !#CTRL# use MOM_controlled_forcing, only : controlled_forcing_init, controlled_forcing_end !#CTRL# use MOM_controlled_forcing, only : ctrl_forcing_CS -use MOM_coms, only : reproducing_sum +use MOM_coms, only : reproducing_sum, field_chksum use MOM_constants, only : hlv, hlf +use MOM_coupler_types, only : coupler_2d_bc_type, coupler_type_write_chksums +use MOM_coupler_types, only : coupler_type_initialized, coupler_type_spawn +use MOM_coupler_types, only : coupler_type_copy_data use MOM_cpu_clock, only : cpu_clock_id, cpu_clock_begin, cpu_clock_end use MOM_cpu_clock, only : CLOCK_SUBCOMPONENT +use MOM_data_override, only : data_override_init, data_override use MOM_diag_mediator, only : diag_ctrl, safe_alloc_ptr, time_type use MOM_domains, only : pass_vector, pass_var, fill_symmetric_edges use MOM_domains, only : AGRID, BGRID_NE, CGRID_NE, To_All @@ -21,7 +25,9 @@ module MOM_surface_forcing_gfdl use MOM_forcing_type, only : allocate_mech_forcing, deallocate_mech_forcing use MOM_get_input, only : Get_MOM_Input, directories use MOM_grid, only : ocean_grid_type -use MOM_io, only : slasher, write_version_number, MOM_read_data +use MOM_interpolate, only : init_external_field, time_interp_external +use MOM_interpolate, only : time_interp_external_init +use MOM_io, only : slasher, write_version_number, MOM_read_data, stdout use MOM_restart, only : register_restart_field, restart_init, MOM_restart_CS use MOM_restart, only : restart_init_end, save_restart, restore_state use MOM_string_functions, only : uppercase @@ -31,15 +37,6 @@ module MOM_surface_forcing_gfdl use user_revise_forcing, only : user_alter_forcing, user_revise_forcing_init use user_revise_forcing, only : user_revise_forcing_CS -use coupler_types_mod, only : coupler_2d_bc_type, coupler_type_write_chksums -use coupler_types_mod, only : coupler_type_initialized, coupler_type_spawn -use coupler_types_mod, only : coupler_type_copy_data -use data_override_mod, only : data_override_init, data_override -use fms_mod, only : stdout -use mpp_mod, only : mpp_chksum -use time_interp_external_mod, only : init_external_field, time_interp_external -use time_interp_external_mod, only : time_interp_external_init - implicit none ; private #include @@ -319,8 +316,7 @@ subroutine convert_IOB_to_fluxes(IOB, fluxes, index_bounds, Time, valid_time, G, if ((.not.coupler_type_initialized(fluxes%tr_fluxes)) .and. & coupler_type_initialized(IOB%fluxes)) & - call coupler_type_spawn(IOB%fluxes, fluxes%tr_fluxes, & - (/is,is,ie,ie/), (/js,js,je,je/)) + call coupler_type_spawn(IOB%fluxes, fluxes%tr_fluxes, (/is,is,ie,ie/), (/js,js,je,je/)) ! It might prove valuable to use the same array extents as the rest of the ! ocean model, rather than using haloless arrays, in which case the last line ! would be: ( (/isd,is,ie,ied/), (/jsd,js,je,jed/)) @@ -350,7 +346,7 @@ subroutine convert_IOB_to_fluxes(IOB, fluxes, index_bounds, Time, valid_time, G, ! Salinity restoring logic if (CS%restore_salt) then - call time_interp_external(CS%id_srestore,Time,data_restore) + call time_interp_external(CS%id_srestore, Time, data_restore) ! open_ocn_mask indicates where to restore salinity (1 means restore, 0 does not) open_ocn_mask(:,:) = 1.0 if (CS%mask_srestore_under_ice) then ! Do not restore under sea-ice @@ -407,7 +403,7 @@ subroutine convert_IOB_to_fluxes(IOB, fluxes, index_bounds, Time, valid_time, G, ! SST restoring logic if (CS%restore_temp) then - call time_interp_external(CS%id_trestore,Time,data_restore) + call time_interp_external(CS%id_trestore, Time, data_restore) do j=js,je ; do i=is,ie delta_sst = data_restore(i,j)- sfc_state%SST(i,j) delta_sst = sign(1.0,delta_sst)*min(abs(delta_sst),CS%max_delta_trestore) @@ -1121,28 +1117,27 @@ subroutine apply_flux_adjustments(G, US, CS, Time, fluxes) isc = G%isc; iec = G%iec ; jsc = G%jsc; jec = G%jec - overrode_h = .false. - call data_override('OCN', 'hflx_adj', temp_at_h(isc:iec,jsc:jec), Time, override=overrode_h) + call data_override(G%Domain, 'hflx_adj', temp_at_h, Time, override=overrode_h, & + scale=US%W_m2_to_QRZ_T) if (overrode_h) then ; do j=jsc,jec ; do i=isc,iec - fluxes%heat_added(i,j) = fluxes%heat_added(i,j) + US%W_m2_to_QRZ_T*temp_at_h(i,j)* G%mask2dT(i,j) + fluxes%heat_added(i,j) = fluxes%heat_added(i,j) + temp_at_h(i,j)* G%mask2dT(i,j) enddo ; enddo ; endif ! Not needed? ! if (overrode_h) call pass_var(fluxes%heat_added, G%Domain) - overrode_h = .false. - call data_override('OCN', 'sflx_adj', temp_at_h(isc:iec,jsc:jec), Time, override=overrode_h) + call data_override(G%Domain, 'sflx_adj', temp_at_h, Time, override=overrode_h, & + scale=US%kg_m2s_to_RZ_T) if (overrode_h) then ; do j=jsc,jec ; do i=isc,iec - fluxes%salt_flux_added(i,j) = fluxes%salt_flux_added(i,j) + & - US%kg_m2s_to_RZ_T * temp_at_h(i,j)* G%mask2dT(i,j) + fluxes%salt_flux_added(i,j) = fluxes%salt_flux_added(i,j) + temp_at_h(i,j) * G%mask2dT(i,j) enddo ; enddo ; endif ! Not needed? ! if (overrode_h) call pass_var(fluxes%salt_flux_added, G%Domain) - overrode_h = .false. - call data_override('OCN', 'prcme_adj', temp_at_h(isc:iec,jsc:jec), Time, override=overrode_h) + call data_override(G%Domain, 'prcme_adj', temp_at_h, Time, override=overrode_h, & + scale=US%kg_m2s_to_RZ_T) if (overrode_h) then ; do j=jsc,jec ; do i=isc,iec - fluxes%vprec(i,j) = fluxes%vprec(i,j) + US%kg_m2s_to_RZ_T * temp_at_h(i,j)* G%mask2dT(i,j) + fluxes%vprec(i,j) = fluxes%vprec(i,j) + temp_at_h(i,j)* G%mask2dT(i,j) enddo ; enddo ; endif ! Not needed? ! if (overrode_h) call pass_var(fluxes%vprec, G%Domain) end subroutine apply_flux_adjustments @@ -1174,8 +1169,8 @@ subroutine apply_force_adjustments(G, US, CS, Time, forces) tempx_at_h(:,:) = 0.0 ; tempy_at_h(:,:) = 0.0 ! Either reads data or leaves contents unchanged overrode_x = .false. ; overrode_y = .false. - call data_override('OCN', 'taux_adj', tempx_at_h(isc:iec,jsc:jec), Time, override=overrode_x) - call data_override('OCN', 'tauy_adj', tempy_at_h(isc:iec,jsc:jec), Time, override=overrode_y) + call data_override(G%Domain, 'taux_adj', tempx_at_h, Time, override=overrode_x, scale=Pa_conversion) + call data_override(G%Domain, 'tauy_adj', tempy_at_h, Time, override=overrode_y, scale=Pa_conversion) if (overrode_x .or. overrode_y) then if (.not. (overrode_x .and. overrode_y)) call MOM_error(FATAL,"apply_flux_adjustments: "//& @@ -1190,8 +1185,8 @@ subroutine apply_force_adjustments(G, US, CS, Time, forces) if (rDlon > 0.) rDlon = 1. / rDlon cosA = dLonDx * rDlon sinA = dLonDy * rDlon - zonal_tau = Pa_conversion * tempx_at_h(i,j) - merid_tau = Pa_conversion * tempy_at_h(i,j) + zonal_tau = tempx_at_h(i,j) + merid_tau = tempy_at_h(i,j) tempx_at_h(i,j) = cosA * zonal_tau - sinA * merid_tau tempy_at_h(i,j) = sinA * zonal_tau + cosA * merid_tau enddo ; enddo @@ -1486,7 +1481,7 @@ subroutine surface_forcing_init(Time, G, US, param_file, diag, CS, wind_stagger) enddo ; enddo endif - call time_interp_external_init + call time_interp_external_init() ! Optionally read a x-y gustiness field in place of a global constant. call get_param(param_file, mdl, "READ_GUST_2D", CS%read_gust_2d, & @@ -1554,11 +1549,11 @@ subroutine surface_forcing_init(Time, G, US, param_file, diag, CS, wind_stagger) "above land points (i.e. G%mask2dT = 0).", default=.false., & debuggingParam=.true.) - call data_override_init(Ocean_domain_in=G%Domain%mpp_domain) + call data_override_init(G%Domain) if (CS%restore_salt) then salt_file = trim(CS%inputdir) // trim(CS%salt_restore_file) - CS%id_srestore = init_external_field(salt_file, CS%salt_restore_var_name, domain=G%Domain%mpp_domain) + CS%id_srestore = init_external_field(salt_file, CS%salt_restore_var_name, MOM_domain=G%Domain) call safe_alloc_ptr(CS%srestore_mask,isd,ied,jsd,jed); CS%srestore_mask(:,:) = 1.0 if (CS%mask_srestore) then ! read a 2-d file containing a mask for restoring fluxes flnam = trim(CS%inputdir) // 'salt_restore_mask.nc' @@ -1568,7 +1563,7 @@ subroutine surface_forcing_init(Time, G, US, param_file, diag, CS, wind_stagger) if (CS%restore_temp) then temp_file = trim(CS%inputdir) // trim(CS%temp_restore_file) - CS%id_trestore = init_external_field(temp_file, CS%temp_restore_var_name, domain=G%Domain%mpp_domain) + CS%id_trestore = init_external_field(temp_file, CS%temp_restore_var_name, MOM_domain=G%Domain) call safe_alloc_ptr(CS%trestore_mask,isd,ied,jsd,jed); CS%trestore_mask(:,:) = 1.0 if (CS%mask_trestore) then ! read a 2-d file containing a mask for restoring fluxes flnam = trim(CS%inputdir) // 'temp_restore_mask.nc' @@ -1629,30 +1624,30 @@ subroutine ice_ocn_bnd_type_chksum(id, timestep, iobt) !! ocean in a coupled model whose checksums are reported integer :: n,m, outunit - outunit = stdout() + outunit = stdout write(outunit,*) "BEGIN CHECKSUM(ice_ocean_boundary_type):: ", id, timestep - write(outunit,100) 'iobt%u_flux ', mpp_chksum( iobt%u_flux ) - write(outunit,100) 'iobt%v_flux ', mpp_chksum( iobt%v_flux ) - write(outunit,100) 'iobt%t_flux ', mpp_chksum( iobt%t_flux ) - write(outunit,100) 'iobt%q_flux ', mpp_chksum( iobt%q_flux ) - write(outunit,100) 'iobt%salt_flux ', mpp_chksum( iobt%salt_flux ) - write(outunit,100) 'iobt%lw_flux ', mpp_chksum( iobt%lw_flux ) - write(outunit,100) 'iobt%sw_flux_vis_dir', mpp_chksum( iobt%sw_flux_vis_dir) - write(outunit,100) 'iobt%sw_flux_vis_dif', mpp_chksum( iobt%sw_flux_vis_dif) - write(outunit,100) 'iobt%sw_flux_nir_dir', mpp_chksum( iobt%sw_flux_nir_dir) - write(outunit,100) 'iobt%sw_flux_nir_dif', mpp_chksum( iobt%sw_flux_nir_dif) - write(outunit,100) 'iobt%lprec ', mpp_chksum( iobt%lprec ) - write(outunit,100) 'iobt%fprec ', mpp_chksum( iobt%fprec ) - write(outunit,100) 'iobt%runoff ', mpp_chksum( iobt%runoff ) - write(outunit,100) 'iobt%calving ', mpp_chksum( iobt%calving ) - write(outunit,100) 'iobt%p ', mpp_chksum( iobt%p ) + write(outunit,100) 'iobt%u_flux ', field_chksum( iobt%u_flux ) + write(outunit,100) 'iobt%v_flux ', field_chksum( iobt%v_flux ) + write(outunit,100) 'iobt%t_flux ', field_chksum( iobt%t_flux ) + write(outunit,100) 'iobt%q_flux ', field_chksum( iobt%q_flux ) + write(outunit,100) 'iobt%salt_flux ', field_chksum( iobt%salt_flux ) + write(outunit,100) 'iobt%lw_flux ', field_chksum( iobt%lw_flux ) + write(outunit,100) 'iobt%sw_flux_vis_dir', field_chksum( iobt%sw_flux_vis_dir) + write(outunit,100) 'iobt%sw_flux_vis_dif', field_chksum( iobt%sw_flux_vis_dif) + write(outunit,100) 'iobt%sw_flux_nir_dir', field_chksum( iobt%sw_flux_nir_dir) + write(outunit,100) 'iobt%sw_flux_nir_dif', field_chksum( iobt%sw_flux_nir_dif) + write(outunit,100) 'iobt%lprec ', field_chksum( iobt%lprec ) + write(outunit,100) 'iobt%fprec ', field_chksum( iobt%fprec ) + write(outunit,100) 'iobt%runoff ', field_chksum( iobt%runoff ) + write(outunit,100) 'iobt%calving ', field_chksum( iobt%calving ) + write(outunit,100) 'iobt%p ', field_chksum( iobt%p ) if (associated(iobt%ustar_berg)) & - write(outunit,100) 'iobt%ustar_berg ', mpp_chksum( iobt%ustar_berg ) + write(outunit,100) 'iobt%ustar_berg ', field_chksum( iobt%ustar_berg ) if (associated(iobt%area_berg)) & - write(outunit,100) 'iobt%area_berg ', mpp_chksum( iobt%area_berg ) + write(outunit,100) 'iobt%area_berg ', field_chksum( iobt%area_berg ) if (associated(iobt%mass_berg)) & - write(outunit,100) 'iobt%mass_berg ', mpp_chksum( iobt%mass_berg ) + write(outunit,100) 'iobt%mass_berg ', field_chksum( iobt%mass_berg ) 100 FORMAT(" CHECKSUM::",A20," = ",Z20) call coupler_type_write_chksums(iobt%fluxes, outunit, 'iobt%') @@ -1664,7 +1659,8 @@ subroutine check_mask_val_consistency(val, mask, i, j, varname, G) real, intent(in) :: val !< value of flux/variable passed by IOB real, intent(in) :: mask !< value of ocean mask - integer, intent(in) :: i, j !< model grid cell indices + integer, intent(in) :: i !< model grid cell indices + integer, intent(in) :: j !< model grid cell indices character(len=*), intent(in) :: varname !< variable name type(ocean_grid_type), intent(in) :: G !< The ocean's grid structure ! Local variables diff --git a/config_src/coupled_driver/ocean_model_MOM.F90 b/config_src/coupled_driver/ocean_model_MOM.F90 index 082099158c..f635e886a5 100644 --- a/config_src/coupled_driver/ocean_model_MOM.F90 +++ b/config_src/coupled_driver/ocean_model_MOM.F90 @@ -15,13 +15,19 @@ module ocean_model_mod use MOM, only : extract_surface_state, allocate_surface_state, finish_MOM_initialization use MOM, only : get_MOM_state_elements, MOM_state_is_synchronized use MOM, only : get_ocean_stocks, step_offline +use MOM_coms, only : field_chksum use MOM_constants, only : CELSIUS_KELVIN_OFFSET, hlf +use MOM_coupler_types, only : coupler_1d_bc_type, coupler_2d_bc_type +use MOM_coupler_types, only : coupler_type_spawn, coupler_type_write_chksums +use MOM_coupler_types, only : coupler_type_initialized, coupler_type_copy_data +use MOM_coupler_types, only : coupler_type_set_diags, coupler_type_send_data use MOM_diag_mediator, only : diag_ctrl, enable_averaging, disable_averaging use MOM_diag_mediator, only : diag_mediator_close_registration, diag_mediator_end -use MOM_domains, only : pass_var, pass_vector, AGRID, BGRID_NE, CGRID_NE -use MOM_domains, only : TO_ALL, Omit_Corners +use MOM_domains, only : MOM_domain_type, domain2d, clone_MOM_domain, get_domain_extent +use MOM_domains, only : pass_var, pass_vector, AGRID, BGRID_NE, CGRID_NE, TO_ALL, Omit_Corners use MOM_error_handler, only : MOM_error, MOM_mesg, FATAL, WARNING, is_root_pe use MOM_error_handler, only : callTree_enter, callTree_leave +use MOM_EOS, only : gsw_sp_from_sr, gsw_pt_from_ct use MOM_file_parser, only : get_param, log_version, close_param_file, param_file_type use MOM_forcing_type, only : forcing, mech_forcing, allocate_forcing_type use MOM_forcing_type, only : fluxes_accumulate, get_net_mass_forcing @@ -29,7 +35,7 @@ module ocean_model_mod use MOM_forcing_type, only : forcing_diagnostics, mech_forcing_diags use MOM_get_input, only : Get_MOM_Input, directories use MOM_grid, only : ocean_grid_type -use MOM_io, only : close_file, file_exists, read_data, write_version_number +use MOM_io, only : write_version_number, stdout use MOM_marine_ice, only : iceberg_forces, iceberg_fluxes, marine_ice_init, marine_ice_CS use MOM_restart, only : MOM_restart_CS, save_restart use MOM_string_functions, only : uppercase @@ -48,16 +54,6 @@ module ocean_model_mod use MOM_verticalGrid, only : verticalGrid_type use MOM_ice_shelf, only : initialize_ice_shelf, shelf_calc_flux, ice_shelf_CS use MOM_ice_shelf, only : add_shelf_forces, ice_shelf_end, ice_shelf_save_restart -use coupler_types_mod, only : coupler_1d_bc_type, coupler_2d_bc_type -use coupler_types_mod, only : coupler_type_spawn, coupler_type_write_chksums -use coupler_types_mod, only : coupler_type_initialized, coupler_type_copy_data -use coupler_types_mod, only : coupler_type_set_diags, coupler_type_send_data -use mpp_domains_mod, only : domain2d, mpp_get_layout, mpp_get_global_domain -use mpp_domains_mod, only : mpp_define_domains, mpp_get_compute_domain, mpp_get_data_domain -use atmos_ocean_fluxes_mod, only : aof_set_coupler_flux -use fms_mod, only : stdout -use mpp_mod, only : mpp_chksum -use MOM_EOS, only : gsw_sp_from_sr, gsw_pt_from_ct use MOM_wave_interface, only: wave_parameters_CS, MOM_wave_interface_init use MOM_wave_interface, only: MOM_wave_interface_init_lite, Update_Surface_Waves @@ -107,7 +103,7 @@ module ocean_model_mod !! points of the two velocity components. Valid entries !! include AGRID, BGRID_NE, CGRID_NE, BGRID_SW, and CGRID_SW, !! corresponding to the community-standard Arakawa notation. - !! (These are named integers taken from mpp_parameter_mod.) + !! (These are named integers taken from the MOM_domains module.) !! Following MOM5, stagger is BGRID_NE by default when the !! ocean is initialized, but here it is set to -999 so that !! a global max across ocean and non-ocean processors can be @@ -121,6 +117,8 @@ module ocean_model_mod !! i.e. dzt(1) + eta_t + patm/rho0/grav [m] frazil =>NULL(), & !< Accumulated heating [J m-2] from frazil !! formation in the ocean. + melt_potential => NULL(), & !< Instantaneous heat used to melt sea ice [J m-2]. + OBLD => NULL(), & !< Ocean boundary layer depth [m]. area => NULL() !< cell area of the ocean surface [m2]. type(coupler_2d_bc_type) :: fields !< A structure that may contain named !! arrays of tracer-related surface fields. @@ -179,13 +177,13 @@ module ocean_model_mod !! processes before time stepping the dynamics. type(directories) :: dirs !< A structure containing several relevant directory paths. - type(mech_forcing) :: forces !< A structure with the driving mechanical surface forces - type(forcing) :: fluxes !< A structure containing pointers to - !! the thermodynamic ocean forcing fields. - type(forcing) :: flux_tmp !< A secondary structure containing pointers to the + type(mech_forcing) :: forces !< A structure with the driving mechanical surface forces + type(forcing) :: fluxes !< A structure containing pointers to + !! the thermodynamic ocean forcing fields. + type(forcing) :: flux_tmp !< A secondary structure containing pointers to the !! ocean forcing fields for when multiple coupled !! timesteps are taken per thermodynamic step. - type(surface) :: sfc_state !< A structure containing pointers to + type(surface) :: sfc_state !< A structure containing pointers to !! the ocean surface state fields. type(ocean_grid_type), pointer :: & grid => NULL() !< A pointer to a grid structure containing metrics @@ -267,6 +265,10 @@ subroutine ocean_model_init(Ocean_sfc, OS, Time_init, Time_in, wind_stagger, gas endif allocate(OS) +! allocate(OS%fluxes) +! allocate(OS%forces) +! allocate(OS%flux_tmp) + OS%is_ocean_pe = Ocean_sfc%is_ocean_pe if (.not.OS%is_ocean_pe) return @@ -355,6 +357,7 @@ subroutine ocean_model_init(Ocean_sfc, OS, Time_init, Time_in, wind_stagger, gas use_melt_pot=.false. endif + !allocate(OS%sfc_state) call allocate_surface_state(OS%sfc_state, OS%grid, use_temperature, do_integrals=.true., & gas_fields_ocn=gas_fields_ocn, use_meltpot=use_melt_pot) @@ -384,14 +387,8 @@ subroutine ocean_model_init(Ocean_sfc, OS, Time_init, Time_in, wind_stagger, gas call MOM_wave_interface_init_lite(param_file) endif - if (associated(OS%grid%Domain%maskmap)) then - call initialize_ocean_public_type(OS%grid%Domain%mpp_domain, Ocean_sfc, & - OS%diag, maskmap=OS%grid%Domain%maskmap, & - gas_fields_ocn=gas_fields_ocn) - else - call initialize_ocean_public_type(OS%grid%Domain%mpp_domain, Ocean_sfc, & - OS%diag, gas_fields_ocn=gas_fields_ocn) - endif + call initialize_ocean_public_type(OS%grid%Domain, Ocean_sfc, OS%diag, & + gas_fields_ocn=gas_fields_ocn) ! This call can only occur here if the coupler_bc_type variables have been ! initialized already using the information from gas_fields_ocn. @@ -506,8 +503,7 @@ subroutine update_ocean_model(Ice_ocean_boundary, OS, Ocean_sfc, time_start_upda (/is,is,ie,ie/), (/js,js,je,je/), as_needed=.true.) ! Translate Ice_ocean_boundary into fluxes and forces. - call mpp_get_compute_domain(Ocean_sfc%Domain, index_bnds(1), index_bnds(2), & - index_bnds(3), index_bnds(4)) + call get_domain_extent(Ocean_sfc%Domain, index_bnds(1), index_bnds(2), index_bnds(3), index_bnds(4)) if (do_dyn) then call convert_IOB_to_forces(Ice_ocean_boundary, OS%forces, index_bnds, OS%Time_dyn, OS%grid, OS%US, & @@ -726,7 +722,7 @@ end subroutine ocean_model_end subroutine ocean_model_save_restart(OS, Time, directory, filename_suffix) type(ocean_state_type), pointer :: OS !< A pointer to the structure containing the !! internal ocean state (in). - type(time_type), intent(in) :: Time !< The model time at this call, needed for mpp_write calls. + type(time_type), intent(in) :: Time !< The model time at this call, needed for writing files. character(len=*), optional, intent(in) :: directory !< An optional directory into which to !! write these restart files. character(len=*), optional, intent(in) :: filename_suffix !< An optional suffix (e.g., a time-stamp) @@ -758,16 +754,12 @@ subroutine ocean_model_save_restart(OS, Time, directory, filename_suffix) end subroutine ocean_model_save_restart !> Initialize the public ocean type -subroutine initialize_ocean_public_type(input_domain, Ocean_sfc, diag, maskmap, & - gas_fields_ocn) - type(domain2D), intent(in) :: input_domain !< The ocean model domain description +subroutine initialize_ocean_public_type(input_domain, Ocean_sfc, diag, gas_fields_ocn) + type(MOM_domain_type), intent(in) :: input_domain !< The ocean model domain description type(ocean_public_type), intent(inout) :: Ocean_sfc !< A structure containing various publicly - !! visible ocean surface properties after initialization, whose - !! elements are allocated here. - type(diag_ctrl), intent(in) :: diag !< A structure that regulates diagnsotic output - logical, dimension(:,:), & - optional, intent(in) :: maskmap !< A mask indicating which virtual processors - !! are actually in use. If missing, all are used. + !! visible ocean surface properties after + !! initialization, whose elements are allocated here. + type(diag_ctrl), intent(in) :: diag !< A structure that regulates diagnostic output type(coupler_1d_bc_type), & optional, intent(in) :: gas_fields_ocn !< If present, this type describes the !! ocean and surface-ice fields that will participate @@ -779,14 +771,9 @@ subroutine initialize_ocean_public_type(input_domain, Ocean_sfc, diag, maskmap, ! and have no halos. integer :: isc, iec, jsc, jec - call mpp_get_layout(input_domain,layout) - call mpp_get_global_domain(input_domain, xsize=xsz, ysize=ysz) - if (PRESENT(maskmap)) then - call mpp_define_domains((/1,xsz,1,ysz/),layout,Ocean_sfc%Domain, maskmap=maskmap) - else - call mpp_define_domains((/1,xsz,1,ysz/),layout,Ocean_sfc%Domain) - endif - call mpp_get_compute_domain(Ocean_sfc%Domain, isc, iec, jsc, jec) + call clone_MOM_domain(input_domain, Ocean_sfc%Domain, halo_size=0, symmetric=.false.) + + call get_domain_extent(Ocean_sfc%Domain, isc, iec, jsc, jec) allocate ( Ocean_sfc%t_surf (isc:iec,jsc:jec), & Ocean_sfc%s_surf (isc:iec,jsc:jec), & @@ -794,6 +781,8 @@ subroutine initialize_ocean_public_type(input_domain, Ocean_sfc, diag, maskmap, Ocean_sfc%v_surf (isc:iec,jsc:jec), & Ocean_sfc%sea_lev(isc:iec,jsc:jec), & Ocean_sfc%area (isc:iec,jsc:jec), & + Ocean_sfc%melt_potential(isc:iec,jsc:jec), & + Ocean_sfc%OBLD (isc:iec,jsc:jec), & Ocean_sfc%frazil (isc:iec,jsc:jec)) Ocean_sfc%t_surf(:,:) = 0.0 ! time averaged sst (Kelvin) passed to atmosphere/ice model @@ -802,6 +791,8 @@ subroutine initialize_ocean_public_type(input_domain, Ocean_sfc, diag, maskmap, Ocean_sfc%v_surf(:,:) = 0.0 ! time averaged v-current (m/sec) passed to atmosphere/ice models Ocean_sfc%sea_lev(:,:) = 0.0 ! time averaged thickness of top model grid cell (m) plus patm/rho0/grav Ocean_sfc%frazil(:,:) = 0.0 ! time accumulated frazil (J/m^2) passed to ice model + Ocean_sfc%melt_potential(:,:) = 0.0 ! time accumulated melt potential (J/m^2) passed to ice model + Ocean_sfc%OBLD(:,:) = 0.0 ! ocean boundary layer depth (m) Ocean_sfc%area(:,:) = 0.0 Ocean_sfc%axes = diag%axesT1%handles !diag axes to be used by coupler tracer flux diagnostics @@ -838,8 +829,7 @@ subroutine convert_state_to_ocean_type(sfc_state, Ocean_sfc, G, US, patm, press_ is = G%isc ; ie = G%iec ; js = G%jsc ; je = G%jec call pass_vector(sfc_state%u, sfc_state%v, G%Domain) - call mpp_get_compute_domain(Ocean_sfc%Domain, isc_bnd, iec_bnd, & - jsc_bnd, jec_bnd) + call get_domain_extent(Ocean_sfc%Domain, isc_bnd, iec_bnd, jsc_bnd, jec_bnd) if (present(patm)) then ! Check that the inidicies in patm are (isc_bnd:iec_bnd,jsc_bnd:jec_bnd). if (.not.present(press_to_z)) call MOM_error(FATAL, & @@ -887,6 +877,18 @@ subroutine convert_state_to_ocean_type(sfc_state, Ocean_sfc, G, US, patm, press_ enddo ; enddo endif + if (allocated(sfc_state%melt_potential)) then + do j=jsc_bnd,jec_bnd ; do i=isc_bnd,iec_bnd + Ocean_sfc%melt_potential(i,j) = US%Q_to_J_kg*US%RZ_to_kg_m2 * sfc_state%melt_potential(i+i0,j+j0) + enddo ; enddo + endif + + if (allocated(sfc_state%Hml)) then + do j=jsc_bnd,jec_bnd ; do i=isc_bnd,iec_bnd + Ocean_sfc%OBLD(i,j) = US%Z_to_m * sfc_state%Hml(i+i0,j+j0) + enddo ; enddo + endif + if (Ocean_sfc%stagger == AGRID) then do j=jsc_bnd,jec_bnd ; do i=isc_bnd,iec_bnd Ocean_sfc%u_surf(i,j) = G%mask2dT(i+i0,j+j0) * US%L_T_to_m_s * & @@ -1021,51 +1023,52 @@ subroutine ocean_model_data2D_get(OS, Ocean, name, array2D, isc, jsc) integer , intent(in) :: isc !< The starting i-index of array2D integer , intent(in) :: jsc !< The starting j-index of array2D - integer :: g_isc, g_iec, g_jsc, g_jec,g_isd, g_ied, g_jsd, g_jed, i, j + integer :: g_isc, g_iec, g_jsc, g_jec, g_isd, g_ied, g_jsd, g_jed, i, j if (.not.associated(OS)) return if (.not.OS%is_ocean_pe) return -! The problem is %areaT is on MOM domain but Ice_Ocean_Boundary%... is on mpp domain. -! We want to return the MOM data on the mpp (compute) domain -! Get MOM domain extents - call mpp_get_compute_domain(OS%grid%Domain%mpp_domain, g_isc, g_iec, g_jsc, g_jec) - call mpp_get_data_domain (OS%grid%Domain%mpp_domain, g_isd, g_ied, g_jsd, g_jed) + ! The problem is that %areaT is on MOM domain but Ice_Ocean_Boundary%... is on a haloless domain. + ! We want to return the MOM data on the haloless (compute) domain + call get_domain_extent(OS%grid%Domain, g_isc, g_iec, g_jsc, g_jec, g_isd, g_ied, g_jsd, g_jed) g_isc = g_isc-g_isd+1 ; g_iec = g_iec-g_isd+1 ; g_jsc = g_jsc-g_jsd+1 ; g_jec = g_jec-g_jsd+1 - select case(name) - case('area') - array2D(isc:,jsc:) = OS%US%L_to_m**2*OS%grid%areaT(g_isc:g_iec,g_jsc:g_jec) - case('mask') - array2D(isc:,jsc:) = OS%grid%mask2dT(g_isc:g_iec,g_jsc:g_jec) + case('area') + array2D(isc:,jsc:) = OS%US%L_to_m**2*OS%grid%areaT(g_isc:g_iec,g_jsc:g_jec) + case('mask') + array2D(isc:,jsc:) = OS%grid%mask2dT(g_isc:g_iec,g_jsc:g_jec) !OR same result ! do j=g_jsc,g_jec ; do i=g_isc,g_iec ! array2D(isc+i-g_isc,jsc+j-g_jsc) = OS%grid%mask2dT(i,j) ! enddo ; enddo - case('t_surf') - array2D(isc:,jsc:) = Ocean%t_surf(isc:,jsc:)-CELSIUS_KELVIN_OFFSET - case('t_pme') - array2D(isc:,jsc:) = Ocean%t_surf(isc:,jsc:)-CELSIUS_KELVIN_OFFSET - case('t_runoff') - array2D(isc:,jsc:) = Ocean%t_surf(isc:,jsc:)-CELSIUS_KELVIN_OFFSET - case('t_calving') - array2D(isc:,jsc:) = Ocean%t_surf(isc:,jsc:)-CELSIUS_KELVIN_OFFSET - case('btfHeat') - array2D(isc:,jsc:) = 0 - case('cos_rot') - array2D(isc:,jsc:) = OS%grid%cos_rot(g_isc:g_iec,g_jsc:g_jec) ! =1 - case('sin_rot') - array2D(isc:,jsc:) = OS%grid%sin_rot(g_isc:g_iec,g_jsc:g_jec) ! =0 - case('s_surf') - array2D(isc:,jsc:) = Ocean%s_surf(isc:,jsc:) - case('sea_lev') - array2D(isc:,jsc:) = Ocean%sea_lev(isc:,jsc:) - case('frazil') - array2D(isc:,jsc:) = Ocean%frazil(isc:,jsc:) - case default - call MOM_error(FATAL,'get_ocean_grid_data2D: unknown argument name='//name) + case('t_surf') + array2D(isc:,jsc:) = Ocean%t_surf(isc:,jsc:)-CELSIUS_KELVIN_OFFSET + case('t_pme') + array2D(isc:,jsc:) = Ocean%t_surf(isc:,jsc:)-CELSIUS_KELVIN_OFFSET + case('t_runoff') + array2D(isc:,jsc:) = Ocean%t_surf(isc:,jsc:)-CELSIUS_KELVIN_OFFSET + case('t_calving') + array2D(isc:,jsc:) = Ocean%t_surf(isc:,jsc:)-CELSIUS_KELVIN_OFFSET + case('btfHeat') + array2D(isc:,jsc:) = 0 + case('cos_rot') + array2D(isc:,jsc:) = OS%grid%cos_rot(g_isc:g_iec,g_jsc:g_jec) ! =1 + case('sin_rot') + array2D(isc:,jsc:) = OS%grid%sin_rot(g_isc:g_iec,g_jsc:g_jec) ! =0 + case('s_surf') + array2D(isc:,jsc:) = Ocean%s_surf(isc:,jsc:) + case('sea_lev') + array2D(isc:,jsc:) = Ocean%sea_lev(isc:,jsc:) + case('frazil') + array2D(isc:,jsc:) = Ocean%frazil(isc:,jsc:) + case('melt_pot') + array2D(isc:,jsc:) = Ocean%melt_potential(isc:,jsc:) + case('obld') + array2D(isc:,jsc:) = Ocean%OBLD(isc:,jsc:) + case default + call MOM_error(FATAL,'get_ocean_grid_data2D: unknown argument name='//name) end select end subroutine ocean_model_data2D_get @@ -1100,16 +1103,16 @@ subroutine ocean_public_type_chksum(id, timestep, ocn) !! visible ocean surface fields. integer :: n, m, outunit - outunit = stdout() + outunit = stdout write(outunit,*) "BEGIN CHECKSUM(ocean_type):: ", id, timestep - write(outunit,100) 'ocean%t_surf ',mpp_chksum(ocn%t_surf ) - write(outunit,100) 'ocean%s_surf ',mpp_chksum(ocn%s_surf ) - write(outunit,100) 'ocean%u_surf ',mpp_chksum(ocn%u_surf ) - write(outunit,100) 'ocean%v_surf ',mpp_chksum(ocn%v_surf ) - write(outunit,100) 'ocean%sea_lev ',mpp_chksum(ocn%sea_lev) - write(outunit,100) 'ocean%frazil ',mpp_chksum(ocn%frazil ) - + write(outunit,100) 'ocean%t_surf ', field_chksum(ocn%t_surf ) + write(outunit,100) 'ocean%s_surf ', field_chksum(ocn%s_surf ) + write(outunit,100) 'ocean%u_surf ', field_chksum(ocn%u_surf ) + write(outunit,100) 'ocean%v_surf ', field_chksum(ocn%v_surf ) + write(outunit,100) 'ocean%sea_lev ', field_chksum(ocn%sea_lev) + write(outunit,100) 'ocean%frazil ', field_chksum(ocn%frazil ) + write(outunit,100) 'ocean%melt_potential ', field_chksum(ocn%melt_potential) call coupler_type_write_chksums(ocn%fields, outunit, 'ocean%') 100 FORMAT(" CHECKSUM::",A20," = ",Z20) @@ -1153,8 +1156,7 @@ subroutine ocean_model_get_UV_surf(OS, Ocean, name, array2D, isc, jsc) G => OS%grid is = G%isc ; ie = G%iec ; js = G%jsc ; je = G%jec - call mpp_get_compute_domain(Ocean%Domain, isc_bnd, iec_bnd, & - jsc_bnd, jec_bnd) + call get_domain_extent(Ocean%Domain, isc_bnd, iec_bnd, jsc_bnd, jec_bnd) i0 = is - isc_bnd ; j0 = js - jsc_bnd @@ -1182,7 +1184,7 @@ subroutine ocean_model_get_UV_surf(OS, Ocean, name, array2D, isc, jsc) 0.5*(sfc_state%v(i+i0,J+j0)+sfc_state%v(i+i0+1,J+j0)) enddo ; enddo case default - call MOM_error(FATAL,'ocean_model_get_UV_surf: unknown argument name='//name) + call MOM_error(FATAL,'ocean_model_get_UV_surf: unknown argument name='//name) end select end subroutine ocean_model_get_UV_surf diff --git a/config_src/external/GFDL_ocean_BGC/FMS_coupler_util.F90 b/config_src/external/GFDL_ocean_BGC/FMS_coupler_util.F90 index f3d63dd061..e50f2ccf0b 100644 --- a/config_src/external/GFDL_ocean_BGC/FMS_coupler_util.F90 +++ b/config_src/external/GFDL_ocean_BGC/FMS_coupler_util.F90 @@ -12,11 +12,15 @@ module FMS_coupler_util subroutine extract_coupler_values(BC_struc, BC_index, BC_element, array_out, ilb, jlb, & is, ie, js, je, conversion) real, dimension(ilb:,jlb:),intent(out) :: array_out !< The array being filled with the input values - integer, intent(in) :: ilb, jlb !< Lower bounds + integer, intent(in) :: ilb !< Lower bounds + integer, intent(in) :: jlb !< Lower bounds type(coupler_2d_bc_type), intent(in) :: BC_struc !< The type from which the data is being extracted integer, intent(in) :: BC_index !< The boundary condition number being extracted integer, intent(in) :: BC_element !< The element of the boundary condition being extracted - integer, optional, intent(in) :: is, ie, js, je !< The i- and j- limits of array_out to be filled + integer, optional, intent(in) :: is !< The i- limits of array_out to be filled + integer, optional, intent(in) :: ie !< The i- limits of array_out to be filled + integer, optional, intent(in) :: js !< The j- limits of array_out to be filled + integer, optional, intent(in) :: je !< The j- limits of array_out to be filled real, optional, intent(in) :: conversion !< A number that every element is multiplied by end subroutine extract_coupler_values @@ -24,11 +28,15 @@ end subroutine extract_coupler_values subroutine set_coupler_values(array_in, BC_struc, BC_index, BC_element, ilb, jlb,& is, ie, js, je, conversion) real, dimension(ilb:,jlb:), intent(in) :: array_in !< The array containing the values to load into the BC - integer, intent(in) :: ilb, jlb !< Lower bounds + integer, intent(in) :: ilb !< Lower bounds + integer, intent(in) :: jlb !< Lower bounds type(coupler_2d_bc_type), intent(inout) :: BC_struc !< The type into which the data is being loaded integer, intent(in) :: BC_index !< The boundary condition number being set integer, intent(in) :: BC_element !< The element of the boundary condition being set - integer, optional, intent(in) :: is, ie, js, je !< The i- and j- limits of array_out to be filled + integer, optional, intent(in) :: is !< The i- limits of array_out to be filled + integer, optional, intent(in) :: ie !< The i- limits of array_out to be filled + integer, optional, intent(in) :: js !< The j- limits of array_out to be filled + integer, optional, intent(in) :: je !< The j- limits of array_out to be filled real, optional, intent(in) :: conversion !< A number that every element is multiplied by end subroutine set_coupler_values diff --git a/config_src/external/GFDL_ocean_BGC/generic_tracer.F90 b/config_src/external/GFDL_ocean_BGC/generic_tracer.F90 index bfbc846af9..4d2e4183f7 100644 --- a/config_src/external/GFDL_ocean_BGC/generic_tracer.F90 +++ b/config_src/external/GFDL_ocean_BGC/generic_tracer.F90 @@ -33,7 +33,17 @@ end subroutine generic_tracer_register !> Initialize generic tracers subroutine generic_tracer_init(isc,iec,jsc,jec,isd,ied,jsd,jed,nk,ntau,axes,grid_tmask,grid_kmt,init_time) - integer, intent(in) :: isc,iec,jsc,jec,isd,ied,jsd,jed,nk,ntau,axes(3) !< Domain boundaries and axes + integer, intent(in) :: isc !< Computation start index in i direction + integer, intent(in) :: iec !< Computation end index in i direction + integer, intent(in) :: jsc !< Computation start index in j direction + integer, intent(in) :: jec !< Computation end index in j direction + integer, intent(in) :: isd !< Data start index in i direction + integer, intent(in) :: ied !< Data end index in i direction + integer, intent(in) :: jsd !< Data start index in j direction + integer, intent(in) :: jed !< Data end index in j direction + integer, intent(in) :: nk !< Number of levels in k direction + integer, intent(in) :: ntau !< Unknown + integer, intent(in) :: axes(3) !< Domain axes? type(time_type), intent(in) :: init_time !< Time real, dimension(:,:,:),target, intent(in) :: grid_tmask !< Mask integer, dimension(:,:) , intent(in) :: grid_kmt !< Number of wet cells in column @@ -61,7 +71,7 @@ subroutine generic_tracer_source(Temp,Salt,rho_dzt,dzt,hblt_depth,ilb,jlb,tau,dt frunoff,grid_ht, current_wave_stress, sosga) real, dimension(ilb:,jlb:,:), intent(in) :: Temp !< Potential temperature [deg C] real, dimension(ilb:,jlb:,:), intent(in) :: Salt !< Salinity [psu] - real, dimension(ilb:,jlb:,:), intent(in) :: rho_dzt + real, dimension(ilb:,jlb:,:), intent(in) :: rho_dzt !< Unknown real, dimension(ilb:,jlb:,:), intent(in) :: dzt !< Ocean layer thickness [m] real, dimension(ilb:,jlb:), intent(in) :: hblt_depth !< Boundary layer depth integer, intent(in) :: ilb !< Lower bounds of x extent of input arrays on data domain @@ -71,14 +81,14 @@ subroutine generic_tracer_source(Temp,Salt,rho_dzt,dzt,hblt_depth,ilb,jlb,tau,dt real, dimension(ilb:,jlb:), intent(in) :: grid_dat !< Unknown type(time_type), intent(in) :: model_time !< Time integer, intent(in) :: nbands !< Unknown - real, dimension(:), intent(in) :: max_wavelength_band + real, dimension(:), intent(in) :: max_wavelength_band !< Unknown real, dimension(:,ilb:,jlb:), intent(in) :: sw_pen_band !< Shortwave penetration real, dimension(:,ilb:,jlb:,:), intent(in) :: opacity_band !< Unknown real, dimension(ilb:,jlb:),optional, intent(in) :: internal_heat !< Unknown real, dimension(ilb:,jlb:),optional, intent(in) :: frunoff !< Unknown real, dimension(ilb:,jlb:),optional, intent(in) :: grid_ht !< Unknown real, dimension(ilb:,jlb:),optional , intent(in) :: current_wave_stress !< Unknown - real, optional , intent(in) :: sosga ! global avg. sea surface salinity + real, optional , intent(in) :: sosga !< Global average sea surface salinity end subroutine generic_tracer_source !> Update the tracers from bottom fluxes diff --git a/config_src/external/GFDL_ocean_BGC/generic_tracer_utils.F90 b/config_src/external/GFDL_ocean_BGC/generic_tracer_utils.F90 index 6937ef4710..de513a7f11 100644 --- a/config_src/external/GFDL_ocean_BGC/generic_tracer_utils.F90 +++ b/config_src/external/GFDL_ocean_BGC/generic_tracer_utils.F90 @@ -21,8 +21,10 @@ module g_tracer_utils !> Tracer concentration in river runoff real, allocatable, dimension(:,:) :: trunoff logical :: requires_restart = .true. !< Unknown - !> Tracer source: filename, type, var name, units, record, gridfile - character(len=fm_string_len) :: src_file, src_var_name, src_var_unit, src_var_gridspec + character(len=fm_string_len) :: src_file !< Tracer source filename + character(len=fm_string_len) :: src_var_name !< Tracer source variable name + character(len=fm_string_len) :: src_var_unit !< Tracer source variable units + character(len=fm_string_len) :: src_var_gridspec !< Tracer source grid file name integer :: src_var_record !< Unknown logical :: requires_src_info = .false. !< Unknown real :: src_var_unit_conversion = 1.0 !< This factor depends on the tracer. Ask Jasmin @@ -38,7 +40,8 @@ module g_tracer_utils type g_tracer_common ! type(g_diag_ctrl) :: diag_CS !< Unknown !> Domain extents - integer :: isd,jsd + integer :: isd !< Start index of the data domain in the i-direction + integer :: jsd !< Start index of the data domain in the j-direction end type g_tracer_common !> Unknown dangerous module data! @@ -102,7 +105,17 @@ subroutine g_tracer_set_csdiag(diag_CS) end subroutine g_tracer_set_csdiag subroutine g_tracer_set_common(isc,iec,jsc,jec,isd,ied,jsd,jed,nk,ntau,axes,grid_tmask,grid_kmt,init_time) - integer, intent(in) :: isc,iec,jsc,jec,isd,ied,jsd,jed,nk,ntau,axes(3) !< Unknown + integer, intent(in) :: isc !< Computation start index in i direction + integer, intent(in) :: iec !< Computation end index in i direction + integer, intent(in) :: jsc !< Computation start index in j direction + integer, intent(in) :: jec !< Computation end index in j direction + integer, intent(in) :: isd !< Data start index in i direction + integer, intent(in) :: ied !< Data end index in i direction + integer, intent(in) :: jsd !< Data start index in j direction + integer, intent(in) :: jed !< Data end index in j direction + integer, intent(in) :: nk !< Number of levels in k direction + integer, intent(in) :: ntau !< Unknown + integer, intent(in) :: axes(3) !< Domain axes? real, dimension(isd:,jsd:,:),intent(in) :: grid_tmask !< Unknown integer,dimension(isd:,jsd:),intent(in) :: grid_kmt !< Unknown type(time_type), intent(in) :: init_time !< Unknown @@ -110,10 +123,19 @@ end subroutine g_tracer_set_common subroutine g_tracer_get_common(isc,iec,jsc,jec,isd,ied,jsd,jed,nk,ntau,& axes,grid_tmask,grid_mask_coast,grid_kmt,init_time,diag_CS) - integer, intent(out) :: isc,iec,jsc,jec,isd,ied,jsd,jed,nk,ntau !< Unknown - integer,optional, intent(out) :: axes(3) !< Unknown + integer, intent(out) :: isc !< Computation start index in i direction + integer, intent(out) :: iec !< Computation end index in i direction + integer, intent(out) :: jsc !< Computation start index in j direction + integer, intent(out) :: jec !< Computation end index in j direction + integer, intent(out) :: isd !< Data start index in i direction + integer, intent(out) :: ied !< Data end index in i direction + integer, intent(out) :: jsd !< Data start index in j direction + integer, intent(out) :: jed !< Data end index in j direction + integer, intent(out) :: nk !< Number of levels in k direction + integer, intent(out) :: ntau !< Unknown + integer, optional, intent(out) :: axes(3) !< Unknown type(time_type), optional, intent(out) :: init_time !< Unknown - real, optional, dimension(:,:,:),pointer :: grid_tmask !< Unknown + real, optional, dimension(:,:,:), pointer :: grid_tmask !< Unknown integer, optional, dimension(:,:), pointer :: grid_mask_coast !< Unknown integer, optional, dimension(:,:), pointer :: grid_kmt !< Unknown type(g_diag_ctrl), optional, pointer :: diag_CS !< Unknown @@ -123,32 +145,33 @@ end subroutine g_tracer_get_common subroutine g_tracer_get_4D(g_tracer_list,name,member,array_ptr) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown - real, dimension(:,:,:,:), pointer :: array_ptr + type(g_tracer_type), pointer :: g_tracer_list !< Unknown + real, dimension(:,:,:,:), pointer :: array_ptr !< Unknown end subroutine g_tracer_get_4D !> Unknown subroutine g_tracer_get_3D(g_tracer_list,name,member,array_ptr) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown - real, dimension(:,:,:), pointer :: array_ptr !< Unknown + type(g_tracer_type), pointer :: g_tracer_list !< Unknown + real, dimension(:,:,:), pointer :: array_ptr !< Unknown end subroutine g_tracer_get_3D !> Unknown subroutine g_tracer_get_2D(g_tracer_list,name,member,array_ptr) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown - real, dimension(:,:), pointer :: array_ptr !< Unknown + type(g_tracer_type), pointer :: g_tracer_list !< Unknown + real, dimension(:,:), pointer :: array_ptr !< Unknown end subroutine g_tracer_get_2D !> Unknown subroutine g_tracer_get_4D_val(g_tracer_list,name,member,array,isd,jsd) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown - integer, intent(in) :: isd,jsd !< Unknown + type(g_tracer_type), pointer :: g_tracer_list !< Unknown + integer, intent(in) :: isd !< Unknown + integer, intent(in) :: jsd !< Unknown real, dimension(isd:,jsd:,:,:), intent(out):: array !< Unknown end subroutine g_tracer_get_4D_val @@ -156,8 +179,9 @@ end subroutine g_tracer_get_4D_val subroutine g_tracer_get_3D_val(g_tracer_list,name,member,array,isd,jsd,ntau,positive) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown - integer, intent(in) :: isd,jsd !< Unknown + type(g_tracer_type), pointer :: g_tracer_list !< Unknown + integer, intent(in) :: isd !< Unknown + integer, intent(in) :: jsd !< Unknown integer, optional, intent(in) :: ntau !< Unknown logical, optional, intent(in) :: positive !< Unknown real, dimension(isd:,jsd:,:), intent(out):: array !< Unknown @@ -169,8 +193,9 @@ end subroutine g_tracer_get_3D_val subroutine g_tracer_get_2D_val(g_tracer_list,name,member,array,isd,jsd) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown - integer, intent(in) :: isd,jsd !< Unknown + type(g_tracer_type), pointer :: g_tracer_list !< Unknown + integer, intent(in) :: isd !< Unknown + integer, intent(in) :: jsd !< Unknown real, dimension(isd:,jsd:), intent(out):: array !< Unknown end subroutine g_tracer_get_2D_val @@ -178,15 +203,15 @@ end subroutine g_tracer_get_2D_val subroutine g_tracer_get_real(g_tracer_list,name,member,value) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown - real, intent(out):: value + type(g_tracer_type), pointer :: g_tracer_list !< Unknown + real, intent(out):: value !< Unknown end subroutine g_tracer_get_real !> Unknown subroutine g_tracer_get_string(g_tracer_list,name,member,string) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown + type(g_tracer_type), pointer :: g_tracer_list !< Unknown character(len=fm_string_len), intent(out) :: string !< Unknown end subroutine g_tracer_get_string @@ -194,8 +219,9 @@ end subroutine g_tracer_get_string subroutine g_tracer_set_2D(g_tracer_list,name,member,array,isd,jsd,weight) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown - integer, intent(in) :: isd,jsd !< Unknown + type(g_tracer_type), pointer :: g_tracer_list !< Unknown + integer, intent(in) :: isd !< Unknown + integer, intent(in) :: jsd !< Unknown real, dimension(isd:,jsd:),intent(in) :: array !< Unknown real, optional ,intent(in) :: weight !< Unknown end subroutine g_tracer_set_2D @@ -204,8 +230,9 @@ end subroutine g_tracer_set_2D subroutine g_tracer_set_3D(g_tracer_list,name,member,array,isd,jsd,ntau) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown - integer, intent(in) :: isd,jsd !< Unknown + type(g_tracer_type), pointer :: g_tracer_list !< Unknown + integer, intent(in) :: isd !< Unknown + integer, intent(in) :: jsd !< Unknown integer, optional, intent(in) :: ntau !< Unknown real, dimension(isd:,jsd:,:), intent(in) :: array !< Unknown end subroutine g_tracer_set_3D @@ -214,16 +241,17 @@ end subroutine g_tracer_set_3D subroutine g_tracer_set_4D(g_tracer_list,name,member,array,isd,jsd) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown - integer, intent(in) :: isd,jsd !< Unknown - real, dimension(isd:,jsd:,:,:), intent(in) :: array !< Unknown + type(g_tracer_type), pointer :: g_tracer_list !< Unknown + integer, intent(in) :: isd !< Unknown + integer, intent(in) :: jsd !< Unknown + real, dimension(isd:,jsd:,:,:), intent(in) :: array !< Unknown end subroutine g_tracer_set_4D !> Unknown subroutine g_tracer_set_real(g_tracer_list,name,member,value) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown + type(g_tracer_type), pointer :: g_tracer_list !< Unknown real, intent(in) :: value !< Unknown end subroutine g_tracer_set_real @@ -265,7 +293,7 @@ end subroutine g_tracer_get_next !! Since the surface flux from the atmosphere (%stf) has the units of mol/m^2/sec the resulting !! tracer concentration has units of mol/Kg subroutine g_tracer_vertdiff_G(g_tracer, h_old, ea, eb, dt, kg_m2_to_H, m_to_H, tau, mom) - type(g_tracer_type), pointer :: g_tracer + type(g_tracer_type), pointer :: g_tracer !< Unknown !> Layer thickness before entrainment, in m or kg m-2. real, dimension(g_tracer_com%isd:,g_tracer_com%jsd:,:), intent(in) :: h_old !> The amount of fluid entrained from the layer above, in H. @@ -278,7 +306,7 @@ subroutine g_tracer_vertdiff_G(g_tracer, h_old, ea, eb, dt, kg_m2_to_H, m_to_H, real, intent(in) :: m_to_H !< A conversion factor that translates m into the units !! of h_old (H). integer, intent(in) :: tau !< Unknown - logical, intent(in), optional :: mom + logical, intent(in), optional :: mom !< Unknown end subroutine g_tracer_vertdiff_G end module g_tracer_utils diff --git a/config_src/external/ODA_hooks/ocean_da_types.F90 b/config_src/external/ODA_hooks/ocean_da_types.F90 index bc5af1d782..e71c76a048 100644 --- a/config_src/external/ODA_hooks/ocean_da_types.F90 +++ b/config_src/external/ODA_hooks/ocean_da_types.F90 @@ -7,10 +7,9 @@ module ocean_da_types_mod private - !> Example type for ocean ensemble DA state type, public :: OCEAN_CONTROL_STRUCT - integer :: ensemble_size + integer :: ensemble_size !< ensemble size real, pointer, dimension(:,:,:) :: SSH=>NULL() !NULL() !NULL() !NULL() !< all profiles are stored as linked list. - type(ocean_profile_type), pointer :: prev=>NULL() - type(ocean_profile_type), pointer :: cnext=>NULL() ! current profiles are stored as linked list. - type(ocean_profile_type), pointer :: cprev=>NULL() - integer :: nbr_xi, nbr_yi ! nearest neighbor model gridpoint for the profile - real :: nbr_dist ! distance to nearest neighbor model gridpoint + type(ocean_profile_type), pointer :: prev=>NULL() !< previous + type(ocean_profile_type), pointer :: cnext=>NULL() !< current profiles are stored as linked list. + type(ocean_profile_type), pointer :: cprev=>NULL() !< previous + integer :: nbr_xi !< x nearest neighbor model gridpoint for the profile + integer :: nbr_yi !< y nearest neighbor model gridpoint for the profile + real :: nbr_dist !< distance to nearest neighbor model gridpoint logical :: compute !< profile is within current compute domain real, dimension(:,:), pointer :: depth => NULL() !< depth of measurement [m] real, dimension(:,:), pointer :: data => NULL() !< data by variable type @@ -54,32 +55,36 @@ module ocean_da_types_mod real, dimension(:,:,:), pointer :: analysis => NULL() !< ensemble member analysis type(forward_operator_type), pointer :: obs_def => NULL() !< observation forward operator type(time_type) :: time !< profile time type - real :: i_index, j_index !< model longitude and latitude indices respectively + real :: i_index !< model longitude indices respectively + real :: j_index !< model latitude indices respectively real, dimension(:,:), pointer :: k_index !< model depth indices type(time_type) :: tdiff !< difference between model time and observation time - character(len=128) :: filename + character(len=128) :: filename !< a filename end type ocean_profile_type !> Example forward operator type. type, public :: forward_operator_type - integer :: num + integer :: num !< how many? integer, dimension(2) :: state_size !< for integer, dimension(:), pointer :: state_var_index !< for flattened data integer, dimension(:), pointer :: i_index !< i-dimension index integer, dimension(:), pointer :: j_index !< j-dimension index - real, dimension(:), pointer :: coef + real, dimension(:), pointer :: coef !< coefficient end type forward_operator_type !> Grid type for DA type, public :: grid_type - real, pointer, dimension(:,:) :: x=>NULL(), y=>NULL() - real, pointer, dimension(:,:,:) :: z=>NULL() - real, pointer, dimension(:,:,:) :: h=>NULL() - real, pointer, dimension(:,:) :: basin_mask => NULL() - real, pointer, dimension(:,:,:) :: mask => NULL() - real, pointer, dimension(:,:) :: bathyT => NULL() - logical :: tripolar_N - integer :: ni, nj, nk + real, pointer, dimension(:,:) :: x=>NULL() !< x + real, pointer, dimension(:,:) :: y=>NULL() !< y + real, pointer, dimension(:,:,:) :: z=>NULL() !< z + real, pointer, dimension(:,:,:) :: h=>NULL() !< h + real, pointer, dimension(:,:) :: basin_mask => NULL() !< basin mask + real, pointer, dimension(:,:,:) :: mask => NULL() !< land mask? + real, pointer, dimension(:,:) :: bathyT => NULL() !< bathymetry at T points + logical :: tripolar_N !< True for tripolar grids + integer :: ni !< ni + integer :: nj !< nj + integer :: nk !< nk end type grid_type end module ocean_da_types_mod diff --git a/config_src/external/ODA_hooks/write_ocean_obs.F90 b/config_src/external/ODA_hooks/write_ocean_obs.F90 index a2c41b58d6..da4a404d3d 100644 --- a/config_src/external/ODA_hooks/write_ocean_obs.F90 +++ b/config_src/external/ODA_hooks/write_ocean_obs.F90 @@ -15,13 +15,13 @@ module write_ocean_obs_mod contains !> Open a profile file -integer function open_profile_file(name, nvar, grid_lon, grid_lat,thread,fset) +integer function open_profile_file(name, nvar, grid_lon, grid_lat, thread, fset) character(len=*), intent(in) :: name !< File name integer, intent(in), optional :: nvar !< Number of variables real, dimension(:), optional, intent(in) :: grid_lon !< Longitude [degreeE] real, dimension(:), optional, intent(in) :: grid_lat !< Latitude [degreeN] - integer, intent(in), optional :: thread !< Thread - integer, intent(in), optional :: fset !< File set + integer, optional, intent(in) :: thread !< Thread number + integer, optional, intent(in) :: fset !< File set open_profile_file=-1 end function open_profile_file @@ -29,7 +29,7 @@ end function open_profile_file !> Write a profile subroutine write_profile(unit,profile) integer, intent(in) :: unit !< File unit - type(ocean_profile_type), intent(in) :: profile !< Profile + type(ocean_profile_type), intent(in) :: profile !< Profile to write return end subroutine write_profile diff --git a/config_src/ice_solo_driver/ice_shelf_driver.F90 b/config_src/ice_solo_driver/ice_shelf_driver.F90 index b1323a5485..bd64050a6f 100644 --- a/config_src/ice_solo_driver/ice_shelf_driver.F90 +++ b/config_src/ice_solo_driver/ice_shelf_driver.F90 @@ -38,9 +38,9 @@ program Shelf_main use MOM_get_input, only : Get_MOM_Input, directories use MOM_grid, only : ocean_grid_type, MOM_grid_init, MOM_grid_end use MOM_hor_index, only : hor_index_type, hor_index_init - use MOM_io, only : MOM_io_init, file_exists, open_file, close_file + use MOM_io, only : MOM_io_init, file_exists, open_ASCII_file, close_file use MOM_io, only : check_nml_error, io_infra_init, io_infra_end - use MOM_io, only : APPEND_FILE, ASCII_FILE, READONLY_FILE, SINGLE_FILE + use MOM_io, only : APPEND_FILE, READONLY_FILE, SINGLE_FILE use MOM_open_boundary, only : ocean_OBC_type use MOM_restart, only : save_restart use MOM_string_functions,only : uppercase @@ -176,7 +176,7 @@ program Shelf_main if (file_exists('input.nml')) then ! Provide for namelist specification of the run length and calendar data. - call open_file(unit, 'input.nml', form=ASCII_FILE, action=READONLY_FILE) + call open_ASCII_file(unit, 'input.nml', action=READONLY_FILE) read(unit, ice_solo_nml, iostat=io_status) call close_file(unit) ierr = check_nml_error(io_status,'ice_solo_nml') @@ -187,15 +187,14 @@ program Shelf_main ! Read ocean_solo restart, which can override settings from the namelist. if (file_exists(trim(dirs%restart_input_dir)//'ice_solo.res')) then - call open_file(unit,trim(dirs%restart_input_dir)//'ice_solo.res', & - form=ASCII_FILE,action=READONLY_FILE) + call open_ASCII_file(unit, trim(dirs%restart_input_dir)//'ice_solo.res', action=READONLY_FILE) read(unit,*) calendar_type read(unit,*) date_init read(unit,*) date call close_file(unit) else calendar = uppercase(calendar) - if (calendar(1:6) == 'JULIAN') then ; calendar_type = JULIAN + if (calendar(1:6) == 'JULIAN') then ; calendar_type = JULIAN elseif (calendar(1:9) == 'GREGORIAN') then ; calendar_type = GREGORIAN elseif (calendar(1:6) == 'NOLEAP') then ; calendar_type = NOLEAP elseif (calendar(1:10)=='THIRTY_DAY') then ; calendar_type = THIRTY_DAY_MONTHS @@ -341,15 +340,14 @@ program Shelf_main call diag_mediator_close_registration(diag) ! Write out a time stamp file. - if (calendar_type /= NO_CALENDAR) then - call open_file(unit, 'time_stamp.out', form=ASCII_FILE, action=APPEND_FILE, & - threading=SINGLE_FILE) + if (is_root_pe() .and. (calendar_type /= NO_CALENDAR)) then + call open_ASCII_file(unit, 'time_stamp.out', action=APPEND_FILE) call get_date(Time, date(1), date(2), date(3), date(4), date(5), date(6)) month = month_name(date(2)) - if (is_root_pe()) write(unit,'(6i4,2x,a3)') date, month(1:3) + write(unit,'(6i4,2x,a3)') date, month(1:3) call get_date(Time_end, date(1), date(2), date(3), date(4), date(5), date(6)) month = month_name(date(2)) - if (is_root_pe()) write(unit,'(6i4,2x,a3)') date, month(1:3) + write(unit,'(6i4,2x,a3)') date, month(1:3) call close_file(unit) endif @@ -428,19 +426,19 @@ program Shelf_main dirs%restart_output_dir) ! Write ice shelf solo restart file. - call open_file(unit, trim(dirs%restart_output_dir)//'shelf.res', nohdrs=.true.) if (is_root_pe())then - write(unit, '(i6,8x,a)') calendar_type, & - '(Calendar: no_calendar=0, thirty_day_months=1, julian=2, gregorian=3, noleap=4)' - - call get_date(Start_time, yr, mon, day, hr, mins, sec) - write(unit, '(6i6,8x,a)') yr, mon, day, hr, mins, sec, & - 'Model start time: year, month, day, hour, minute, second' - call get_date(Time, yr, mon, day, hr, mins, sec) - write(unit, '(6i6,8x,a)') yr, mon, day, hr, mins, sec, & - 'Current model time: year, month, day, hour, minute, second' + call open_ASCII_file(unit, trim(dirs%restart_output_dir)//'shelf.res') + write(unit, '(i6,8x,a)') calendar_type, & + '(Calendar: no_calendar=0, thirty_day_months=1, julian=2, gregorian=3, noleap=4)' + + call get_date(Start_time, yr, mon, day, hr, mins, sec) + write(unit, '(6i6,8x,a)') yr, mon, day, hr, mins, sec, & + 'Model start time: year, month, day, hour, minute, second' + call get_date(Time, yr, mon, day, hr, mins, sec) + write(unit, '(6i6,8x,a)') yr, mon, day, hr, mins, sec, & + 'Current model time: year, month, day, hour, minute, second' + call close_file(unit) endif - call close_file(unit) endif if (is_root_pe()) then diff --git a/config_src/solo_driver/MESO_surface_forcing.F90 b/config_src/solo_driver/MESO_surface_forcing.F90 index e2f0694b6c..679f147797 100644 --- a/config_src/solo_driver/MESO_surface_forcing.F90 +++ b/config_src/solo_driver/MESO_surface_forcing.F90 @@ -274,7 +274,7 @@ subroutine MESO_surface_forcing_init(Time, G, US, param_file, diag, CS) call get_param(param_file, mdl, "INPUTDIR", CS%inputdir, default=".") CS%inputdir = slasher(CS%inputdir) - endif + endif end subroutine MESO_surface_forcing_init diff --git a/config_src/solo_driver/MOM_driver.F90 b/config_src/solo_driver/MOM_driver.F90 index ba52d9c02a..9c222bb0bb 100644 --- a/config_src/solo_driver/MOM_driver.F90 +++ b/config_src/solo_driver/MOM_driver.F90 @@ -32,49 +32,43 @@ program MOM_main use MOM, only : extract_surface_state, finish_MOM_initialization use MOM, only : get_MOM_state_elements, MOM_state_is_synchronized use MOM, only : step_offline - use MOM_domains, only : MOM_infra_init, MOM_infra_end + use MOM_coms, only : Set_PElist + use MOM_domains, only : MOM_infra_init, MOM_infra_end, set_MOM_thread_affinity + use MOM_ensemble_manager, only : ensemble_manager_init, get_ensemble_size + use MOM_ensemble_manager, only : ensemble_pelist_setup use MOM_error_handler, only : MOM_error, MOM_mesg, WARNING, FATAL, is_root_pe use MOM_error_handler, only : callTree_enter, callTree_leave, callTree_waypoint use MOM_file_parser, only : read_param, get_param, log_param, log_version, param_file_type use MOM_file_parser, only : close_param_file use MOM_forcing_type, only : forcing, mech_forcing, forcing_diagnostics use MOM_forcing_type, only : mech_forcing_diags, MOM_forcing_chksum, MOM_mech_forcing_chksum - use MOM_get_input, only : directories + use MOM_get_input, only : get_MOM_input, directories use MOM_grid, only : ocean_grid_type - use MOM_io, only : file_exists, open_file, close_file + use MOM_ice_shelf, only : initialize_ice_shelf, ice_shelf_end, ice_shelf_CS + use MOM_ice_shelf, only : shelf_calc_flux, add_shelf_forces, ice_shelf_save_restart + use MOM_ice_shelf, only : initialize_ice_shelf_fluxes, initialize_ice_shelf_forces + use MOM_interpolate, only : time_interp_external_init + use MOM_io, only : file_exists, open_ASCII_file, close_file use MOM_io, only : check_nml_error, io_infra_init, io_infra_end - use MOM_io, only : APPEND_FILE, ASCII_FILE, READONLY_FILE, SINGLE_FILE + use MOM_io, only : APPEND_FILE, READONLY_FILE use MOM_restart, only : MOM_restart_CS, save_restart use MOM_string_functions,only : uppercase use MOM_surface_forcing, only : set_forcing, forcing_save_restart use MOM_surface_forcing, only : surface_forcing_init, surface_forcing_CS - use MOM_time_manager, only : time_type, set_date, get_date - use MOM_time_manager, only : real_to_time, time_type_to_real + use MOM_time_manager, only : time_type, set_date, get_date, real_to_time, time_type_to_real use MOM_time_manager, only : operator(+), operator(-), operator(*), operator(/) use MOM_time_manager, only : operator(>), operator(<), operator(>=) use MOM_time_manager, only : increment_date, set_calendar_type, month_name - use MOM_time_manager, only : JULIAN, GREGORIAN, NOLEAP, THIRTY_DAY_MONTHS - use MOM_time_manager, only : NO_CALENDAR + use MOM_time_manager, only : JULIAN, GREGORIAN, NOLEAP, THIRTY_DAY_MONTHS, NO_CALENDAR use MOM_tracer_flow_control, only : tracer_flow_control_CS use MOM_unit_scaling, only : unit_scale_type use MOM_variables, only : surface use MOM_verticalGrid, only : verticalGrid_type + use MOM_wave_interface, only : wave_parameters_CS, MOM_wave_interface_init + use MOM_wave_interface, only : MOM_wave_interface_init_lite, Update_Surface_Waves use MOM_write_cputime, only : write_cputime, MOM_write_cputime_init use MOM_write_cputime, only : write_cputime_start_clock, write_cputime_CS - use ensemble_manager_mod, only : ensemble_manager_init, get_ensemble_size - use ensemble_manager_mod, only : ensemble_pelist_setup - use mpp_mod, only : set_current_pelist => mpp_set_current_pelist - use time_interp_external_mod, only : time_interp_external_init - use fms_affinity_mod, only : fms_affinity_init, fms_affinity_set,fms_affinity_get - - use MOM_ice_shelf, only : initialize_ice_shelf, ice_shelf_end, ice_shelf_CS - use MOM_ice_shelf, only : shelf_calc_flux, add_shelf_forces, ice_shelf_save_restart -! , add_shelf_flux_forcing, add_shelf_flux_IOB - - use MOM_wave_interface, only: wave_parameters_CS, MOM_wave_interface_init - use MOM_wave_interface, only: MOM_wave_interface_init_lite, Update_Surface_Waves - implicit none #include @@ -84,18 +78,17 @@ program MOM_main ! A structure containing pointers to the thermodynamic forcing fields ! at the ocean surface. type(forcing) :: fluxes - ! A structure containing pointers to the ocean surface state fields. type(surface) :: sfc_state ! A pointer to a structure containing metrics and related information. - type(ocean_grid_type), pointer :: grid - type(verticalGrid_type), pointer :: GV + type(ocean_grid_type), pointer :: grid => NULL() + type(verticalGrid_type), pointer :: GV => NULL() ! A pointer to a structure containing dimensional unit scaling factors. - type(unit_scale_type), pointer :: US + type(unit_scale_type), pointer :: US => NULL() ! If .true., use the ice shelf model for part of the domain. - logical :: use_ice_shelf + logical :: use_ice_shelf = .false. ! If .true., use surface wave coupling logical :: use_waves = .false. @@ -198,8 +191,8 @@ program MOM_main type(MOM_restart_CS), pointer :: & restart_CSp => NULL() !< A pointer to the restart control structure !! that will be used for MOM restart files. - type(diag_ctrl), pointer :: & - diag => NULL() !< A pointer to the diagnostic regulatory structure + type(diag_ctrl), pointer :: & + diag => NULL() !< A pointer to the diagnostic regulatory structure !----------------------------------------------------------------------- character(len=4), parameter :: vers_num = 'v2.0' @@ -219,6 +212,8 @@ program MOM_main call MOM_infra_init() ; call io_infra_init() + !allocate(forces,fluxes,sfc_state) + ! Initialize the ensemble manager. If there are no settings for ensemble_size ! in input.nml(ensemble.nml), these should not do anything. In coupled ! configurations, this all occurs in the external driver. @@ -228,7 +223,7 @@ program MOM_main allocate(ocean_pelist(nPEs_per)) call ensemble_pelist_setup(.true., 0, nPEs_per, 0, 0, atm_pelist, ocean_pelist, & land_pelist, ice_pelist) - call set_current_pelist(ocean_pelist) + call Set_PElist(ocean_pelist) deallocate(ocean_pelist) endif @@ -243,7 +238,7 @@ program MOM_main if (file_exists('input.nml')) then ! Provide for namelist specification of the run length and calendar data. - call open_file(unit, 'input.nml', form=ASCII_FILE, action=READONLY_FILE) + call open_ASCII_file(unit, 'input.nml', action=READONLY_FILE) read(unit, ocean_solo_nml, iostat=io_status) call close_file(unit) ierr = check_nml_error(io_status,'ocean_solo_nml') @@ -252,25 +247,19 @@ program MOM_main endif endif -!$ call fms_affinity_init -!$ call fms_affinity_set('OCEAN', use_hyper_thread, ocean_nthreads) -!$ call omp_set_num_threads(ocean_nthreads) -!$OMP PARALLEL -!$ write(6,*) "ocean_solo OMPthreading ", fms_affinity_get(), omp_get_thread_num(), omp_get_num_threads() -!$ call flush(6) -!$OMP END PARALLEL + ! This call sets the number and affinity of threads with openMP. + !$ call set_MOM_thread_affinity(ocean_nthreads, use_hyper_thread) ! Read ocean_solo restart, which can override settings from the namelist. if (file_exists(trim(dirs%restart_input_dir)//'ocean_solo.res')) then - call open_file(unit,trim(dirs%restart_input_dir)//'ocean_solo.res', & - form=ASCII_FILE,action=READONLY_FILE) + call open_ASCII_file(unit, trim(dirs%restart_input_dir)//'ocean_solo.res', action=READONLY_FILE) read(unit,*) calendar_type read(unit,*) date_init read(unit,*) date call close_file(unit) else calendar = uppercase(calendar) - if (calendar(1:6) == 'JULIAN') then ; calendar_type = JULIAN + if (calendar(1:6) == 'JULIAN') then ; calendar_type = JULIAN elseif (calendar(1:9) == 'GREGORIAN') then ; calendar_type = GREGORIAN elseif (calendar(1:6) == 'NOLEAP') then ; calendar_type = NOLEAP elseif (calendar(1:10)=='THIRTY_DAY') then ; calendar_type = THIRTY_DAY_MONTHS @@ -285,32 +274,47 @@ program MOM_main if (sum(date_init) > 0) then - Start_time = set_date(date_init(1),date_init(2), date_init(3), & - date_init(4),date_init(5),date_init(6)) + Start_time = set_date(date_init(1), date_init(2), date_init(3), & + date_init(4), date_init(5), date_init(6)) else Start_time = real_to_time(0.0) endif - call time_interp_external_init + call time_interp_external_init() if (sum(date) >= 0) then ! In this case, the segment starts at a time fixed by ocean_solo.res - segment_start_time = set_date(date(1),date(2),date(3),date(4),date(5),date(6)) + segment_start_time = set_date(date(1), date(2), date(3), date(4), date(5), date(6)) Time = segment_start_time - call initialize_MOM(Time, Start_time, param_file, dirs, MOM_CSp, restart_CSp, & - segment_start_time, offline_tracer_mode=offline_tracer_mode, & - diag_ptr=diag, tracer_flow_CSp=tracer_flow_CSp) else ! In this case, the segment starts at a time read from the MOM restart file ! or left as Start_time by MOM_initialize. Time = Start_time + endif + + ! Call initialize MOM with an optional Ice Shelf CS which, if present triggers + ! initialization of ice shelf parameters and arrays. + if (sum(date) >= 0) then + call initialize_MOM(Time, Start_time, param_file, dirs, MOM_CSp, restart_CSp, & + segment_start_time, offline_tracer_mode=offline_tracer_mode, & + diag_ptr=diag, tracer_flow_CSp=tracer_flow_CSp, ice_shelf_CSp=ice_shelf_CSp) + else call initialize_MOM(Time, Start_time, param_file, dirs, MOM_CSp, restart_CSp, & offline_tracer_mode=offline_tracer_mode, diag_ptr=diag, & - tracer_flow_CSp=tracer_flow_CSp) + tracer_flow_CSp=tracer_flow_CSp, ice_shelf_CSp=ice_shelf_CSp) endif call get_MOM_state_elements(MOM_CSp, G=grid, GV=GV, US=US, C_p_scaled=fluxes%C_p) Master_Time = Time + use_ice_shelf = associated(ice_shelf_CSp) + + if (use_ice_shelf) then + ! These arrays are not initialized in most solo cases, but are needed + ! when using an ice shelf + call initialize_ice_shelf_fluxes(ice_shelf_CSp, grid, US, fluxes) + call initialize_ice_shelf_forces(ice_shelf_CSp, grid, US, forces) + endif + call callTree_waypoint("done initialize_MOM") @@ -320,16 +324,8 @@ program MOM_main surface_forcing_CSp, tracer_flow_CSp) call callTree_waypoint("done surface_forcing_init") - call get_param(param_file, mod_name, "ICE_SHELF", use_ice_shelf, & - "If true, enables the ice shelf model.", default=.false.) - if (use_ice_shelf) then - ! These arrays are not initialized in most solo cases, but are needed - ! when using an ice shelf - call initialize_ice_shelf(param_file, grid, Time, ice_shelf_CSp, & - diag, forces, fluxes) - endif - call get_param(param_file,mod_name,"USE_WAVES",Use_Waves,& + call get_param(param_file,mod_name, "USE_WAVES", Use_Waves, & "If true, enables surface wave modules.",default=.false.) if (use_waves) then call MOM_wave_interface_init(Time, grid, GV, US, param_file, Waves_CSp, diag) @@ -435,15 +431,14 @@ program MOM_main call diag_mediator_close_registration(diag) ! Write out a time stamp file. - if (calendar_type /= NO_CALENDAR) then - call open_file(unit, 'time_stamp.out', form=ASCII_FILE, action=APPEND_FILE, & - threading=SINGLE_FILE) + if (is_root_pe() .and. (calendar_type /= NO_CALENDAR)) then + call open_ASCII_file(unit, 'time_stamp.out', action=APPEND_FILE) call get_date(Time, date(1), date(2), date(3), date(4), date(5), date(6)) month = month_name(date(2)) - if (is_root_pe()) write(unit,'(6i4,2x,a3)') date, month(1:3) + write(unit,'(6i4,2x,a3)') date, month(1:3) call get_date(Time_end, date(1), date(2), date(3), date(4), date(5), date(6)) month = month_name(date(2)) - if (is_root_pe()) write(unit,'(6i4,2x,a3)') date, month(1:3) + write(unit,'(6i4,2x,a3)') date, month(1:3) call close_file(unit) endif @@ -482,7 +477,7 @@ program MOM_main if (use_ice_shelf) then call shelf_calc_flux(sfc_state, fluxes, Time, dt_forcing, ice_shelf_CSp) - call add_shelf_forces(grid, US, Ice_shelf_CSp, forces) + call add_shelf_forces(grid, US, Ice_shelf_CSp, forces, external_call=.true.) endif fluxes%fluxes_used = .false. fluxes%dt_buoy_accum = US%s_to_T*dt_forcing @@ -621,19 +616,19 @@ program MOM_main if (use_ice_shelf) call ice_shelf_save_restart(ice_shelf_CSp, Time, & dirs%restart_output_dir) ! Write ocean solo restart file. - call open_file(unit, trim(dirs%restart_output_dir)//'ocean_solo.res', nohdrs=.true.) - if (is_root_pe())then - write(unit, '(i6,8x,a)') calendar_type, & - '(Calendar: no_calendar=0, thirty_day_months=1, julian=2, gregorian=3, noleap=4)' - - call get_date(Start_time, yr, mon, day, hr, mins, sec) - write(unit, '(6i6,8x,a)') yr, mon, day, hr, mins, sec, & - 'Model start time: year, month, day, hour, minute, second' - call get_date(Time, yr, mon, day, hr, mins, sec) - write(unit, '(6i6,8x,a)') yr, mon, day, hr, mins, sec, & - 'Current model time: year, month, day, hour, minute, second' + if (is_root_pe()) then + call open_ASCII_file(unit, trim(dirs%restart_output_dir)//'ocean_solo.res') + write(unit, '(i6,8x,a)') calendar_type, & + '(Calendar: no_calendar=0, thirty_day_months=1, julian=2, gregorian=3, noleap=4)' + + call get_date(Start_time, yr, mon, day, hr, mins, sec) + write(unit, '(6i6,8x,a)') yr, mon, day, hr, mins, sec, & + 'Model start time: year, month, day, hour, minute, second' + call get_date(Time, yr, mon, day, hr, mins, sec) + write(unit, '(6i6,8x,a)') yr, mon, day, hr, mins, sec, & + 'Current model time: year, month, day, hour, minute, second' + call close_file(unit) endif - call close_file(unit) endif if (is_root_pe()) then @@ -651,6 +646,7 @@ program MOM_main endif call callTree_waypoint("End MOM_main") + if (use_ice_shelf) call ice_shelf_end(ice_shelf_CSp) call diag_mediator_end(Time, diag, end_diag_manager=.true.) if (cpu_steps > 0) call write_cputime(Time, ns-1, write_CPU_CSp, call_end=.true.) call cpu_clock_end(termClock) @@ -658,6 +654,5 @@ program MOM_main call io_infra_end ; call MOM_infra_end call MOM_end(MOM_CSp) - if (use_ice_shelf) call ice_shelf_end(ice_shelf_CSp) end program MOM_main diff --git a/config_src/solo_driver/MOM_surface_forcing.F90 b/config_src/solo_driver/MOM_surface_forcing.F90 index 3d8b398516..85c363b897 100644 --- a/config_src/solo_driver/MOM_surface_forcing.F90 +++ b/config_src/solo_driver/MOM_surface_forcing.F90 @@ -1,5 +1,5 @@ !> Functions that calculate the surface wind stresses and fluxes of buoyancy -!! or temperature/salinity andfresh water, in ocean-only (solo) mode. +!! or temperature/salinity and fresh water, in ocean-only (solo) mode. !! !! These functions are called every time step, even if the wind stresses !! or buoyancy fluxes are constant in time - in that case these routines @@ -12,6 +12,7 @@ module MOM_surface_forcing use MOM_constants, only : hlv, hlf use MOM_cpu_clock, only : cpu_clock_id, cpu_clock_begin, cpu_clock_end use MOM_cpu_clock, only : CLOCK_MODULE +use MOM_data_override, only : data_override_init, data_override use MOM_diag_mediator, only : post_data, query_averaging_enabled use MOM_diag_mediator, only : diag_ctrl, safe_alloc_ptr use MOM_domains, only : pass_var, pass_vector, AGRID, To_South, To_West, To_All @@ -54,7 +55,6 @@ module MOM_surface_forcing use BFB_surface_forcing, only : BFB_surface_forcing_init, BFB_surface_forcing_CS use dumbbell_surface_forcing, only : dumbbell_surface_forcing_init, dumbbell_surface_forcing_CS use dumbbell_surface_forcing, only : dumbbell_buoyancy_forcing -use data_override_mod, only : data_override_init, data_override implicit none ; private @@ -151,7 +151,7 @@ module MOM_surface_forcing character(len=200) :: runoff_file = '' !< The file from which the runoff is read character(len=200) :: longwaveup_file = '' !< The file from which the upward longwave heat flux is read - character(len=200) :: shortwaveup_file = '' !< The file from which the upward shorwave heat flux is read + character(len=200) :: shortwaveup_file = '' !< The file from which the upward shortwave heat flux is read character(len=200) :: SSTrestore_file = '' !< The file from which to read the sea surface !! temperature to restore toward @@ -161,7 +161,7 @@ module MOM_surface_forcing character(len=80) :: stress_x_var = '' !< X-windstress variable name in the input file character(len=80) :: stress_y_var = '' !< Y-windstress variable name in the input file character(len=80) :: ustar_var = '' !< ustar variable name in the input file - character(len=80) :: LW_var = '' !< lonngwave heat flux variable name in the input file + character(len=80) :: LW_var = '' !< longwave heat flux variable name in the input file character(len=80) :: SW_var = '' !< shortwave heat flux variable name in the input file character(len=80) :: latent_var = '' !< latent heat flux variable name in the input file character(len=80) :: sens_var = '' !< sensible heat flux variable name in the input file @@ -170,7 +170,7 @@ module MOM_surface_forcing character(len=80) :: snow_var = '' !< snowfall variable name in the input file character(len=80) :: lrunoff_var = '' !< liquid runoff variable name in the input file character(len=80) :: frunoff_var = '' !< frozen runoff variable name in the input file - character(len=80) :: SST_restore_var = '' !< target sea surface temeperature variable name in the input file + character(len=80) :: SST_restore_var = '' !< target sea surface temperature variable name in the input file character(len=80) :: SSS_restore_var = '' !< target sea surface salinity variable name in the input file ! These variables give the number of time levels in the various forcing files. @@ -228,7 +228,7 @@ subroutine set_forcing(sfc_state, forces, fluxes, day_start, day_interval, G, US type(time_type), intent(in) :: day_interval !< Length of time over which these fluxes applied type(ocean_grid_type), intent(inout) :: G !< The ocean's grid structure type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables real :: dt ! length of time over which fluxes applied [s] @@ -243,7 +243,7 @@ subroutine set_forcing(sfc_state, forces, fluxes, day_start, day_interval, G, US dt = time_type_to_real(day_interval) if (CS%first_call_set_forcing) then - ! Allocate memory for the mechanical and thermodyanmic forcing fields. + ! Allocate memory for the mechanical and thermodynamic forcing fields. call allocate_mech_forcing(G, forces, stress=.true., ustar=.true., press=.true.) call allocate_forcing_type(G, fluxes, ustar=.true., fix_accum_bug=CS%fix_ustar_gustless_bug) @@ -376,7 +376,7 @@ subroutine wind_forcing_const(sfc_state, forces, tau_x0, tau_y0, day, G, US, CS) type(time_type), intent(in) :: day !< The time of the fluxes type(ocean_grid_type), intent(in) :: G !< The ocean's grid structure type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables real :: Pa_conversion ! A unit conversion factor from Pa to the internal units [R Z L T-2 Pa-1 ~> 1] @@ -421,7 +421,7 @@ subroutine wind_forcing_2gyre(sfc_state, forces, day, G, US, CS) type(time_type), intent(in) :: day !< The time of the fluxes type(ocean_grid_type), intent(in) :: G !< The ocean's grid structure type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables real :: PI @@ -455,7 +455,7 @@ subroutine wind_forcing_1gyre(sfc_state, forces, day, G, US, CS) type(time_type), intent(in) :: day !< The time of the fluxes type(ocean_grid_type), intent(in) :: G !< The ocean's grid structure type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables real :: PI @@ -488,7 +488,7 @@ subroutine wind_forcing_gyres(sfc_state, forces, day, G, US, CS) type(time_type), intent(in) :: day !< The time of the fluxes type(ocean_grid_type), intent(in) :: G !< The ocean's grid structure type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables real :: PI, y, I_rho @@ -541,7 +541,7 @@ subroutine Neverworld_wind_forcing(sfc_state, forces, day, G, US, CS) type(time_type), intent(in) :: day !< Time used for determining the fluxes. type(ocean_grid_type), intent(inout) :: G !< Grid structure. type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables integer :: i, j, is, ie, js, je, Isq, Ieq, Jsq, Jeq @@ -606,7 +606,7 @@ subroutine scurve_wind_forcing(sfc_state, forces, day, G, US, CS) type(time_type), intent(in) :: day !< Time used for determining the fluxes. type(ocean_grid_type), intent(inout) :: G !< Grid structure. type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables integer :: i, j, kseg @@ -671,16 +671,16 @@ subroutine wind_forcing_from_file(sfc_state, forces, day, G, US, CS) type(time_type), intent(in) :: day !< The time of the fluxes type(ocean_grid_type), intent(inout) :: G !< The ocean's grid structure type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables character(len=200) :: filename ! The name of the input file. - real :: temp_x(SZI_(G),SZJ_(G)) ! Pseudo-zonal and psuedo-meridional + real :: temp_x(SZI_(G),SZJ_(G)) ! Pseudo-zonal and pseudo-meridional real :: temp_y(SZI_(G),SZJ_(G)) ! wind stresses at h-points [R L Z T-1 ~> Pa]. real :: Pa_conversion ! A unit conversion factor from Pa to the internal wind stress ! units [R Z L T-2 Pa-1 ~> 1] integer :: time_lev_daily ! The time levels to read for fields with - integer :: time_lev_monthly ! daily and montly cycles. + integer :: time_lev_monthly ! daily and monthly cycles. integer :: time_lev ! The time level that is used for a field. integer :: days, seconds integer :: i, j, is, ie, js, je, Isq, Ieq, Jsq, Jeq @@ -787,13 +787,13 @@ subroutine wind_forcing_from_file(sfc_state, forces, day, G, US, CS) call pass_vector(forces%taux, forces%tauy, G%Domain, To_All) if (.not.read_Ustar) then if (CS%read_gust_2d) then - do j=js, je ; do i=is, ie + do j=js,je ; do i=is,ie forces%ustar(i,j) = sqrt((CS%gust(i,j) + & sqrt(0.5*((forces%tauy(i,j-1)**2 + forces%tauy(i,j)**2) + & (forces%taux(i-1,j)**2 + forces%taux(i,j)**2))) ) * US%L_to_Z / CS%Rho0 ) enddo ; enddo else - do j=js, je ; do i=is, ie + do j=js,je ; do i=is,ie forces%ustar(i,j) = sqrt(US%L_to_Z * ( (CS%gust_const/CS%Rho0) + & sqrt(0.5*((forces%tauy(i,j-1)**2 + forces%tauy(i,j)**2) + & (forces%taux(i-1,j)**2 + forces%taux(i,j)**2)))/CS%Rho0)) @@ -826,68 +826,58 @@ subroutine wind_forcing_by_data_override(sfc_state, forces, day, G, US, CS) type(time_type), intent(in) :: day !< The time of the fluxes type(ocean_grid_type), intent(inout) :: G !< The ocean's grid structure type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables - real :: temp_x(SZI_(G),SZJ_(G)) ! Pseudo-zonal and psuedo-meridional - real :: temp_y(SZI_(G),SZJ_(G)) ! wind stresses at h-points [Pa]. - real :: temp_ustar(SZI_(G),SZJ_(G)) ! ustar [m s-1] (not rescaled). + real :: temp_x(SZI_(G),SZJ_(G)) ! Pseudo-zonal wind stresses at h-points [R Z L T-2 ~> Pa]. + real :: temp_y(SZI_(G),SZJ_(G)) ! Psuedo-meridional wind stresses at h-points [R Z L T-2 ~> Pa]. real :: Pa_conversion ! A unit conversion factor from Pa to the internal units [R Z L T-2 Pa-1 ~> 1] - integer :: i, j, is_in, ie_in, js_in, je_in - logical :: read_uStar + integer :: i, j call callTree_enter("wind_forcing_by_data_override, MOM_surface_forcing.F90") if (.not.CS%dataOverrideIsInitialized) then call allocate_mech_forcing(G, forces, stress=.true., ustar=.true., press=.true.) - call data_override_init(Ocean_domain_in=G%Domain%mpp_domain) + call data_override_init(G%Domain) CS%dataOverrideIsInitialized = .True. endif - is_in = G%isc - G%isd + 1 ; ie_in = G%iec - G%isd + 1 - js_in = G%jsc - G%jsd + 1 ; je_in = G%jec - G%jsd + 1 Pa_conversion = US%kg_m3_to_R*US%m_s_to_L_T**2*US%L_to_Z temp_x(:,:) = 0.0 ; temp_y(:,:) = 0.0 - call data_override('OCN', 'taux', temp_x, day, is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) - call data_override('OCN', 'tauy', temp_y, day, is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) + ! CS%wind_scale is ignored here because it is not set in this mode. + call data_override(G%Domain, 'taux', temp_x, day, scale=Pa_conversion) + call data_override(G%Domain, 'tauy', temp_y, day, scale=Pa_conversion) call pass_vector(temp_x, temp_y, G%Domain, To_All, AGRID) - ! Ignore CS%wind_scale when using data_override ????? do j=G%jsc,G%jec ; do I=G%isc-1,G%IecB - forces%taux(I,j) = Pa_conversion * 0.5 * (temp_x(i,j) + temp_x(i+1,j)) + forces%taux(I,j) = 0.5 * (temp_x(i,j) + temp_x(i+1,j)) enddo ; enddo do J=G%jsc-1,G%JecB ; do i=G%isc,G%iec - forces%tauy(i,J) = Pa_conversion * 0.5 * (temp_y(i,j) + temp_y(i,j+1)) + forces%tauy(i,J) = 0.5 * (temp_y(i,j) + temp_y(i,j+1)) enddo ; enddo - read_Ustar = (len_trim(CS%ustar_var) > 0) ! Need better control higher up ???? - if (read_Ustar) then - do j=G%jsc,G%jec ; do i=G%isc,G%iec ; temp_ustar(i,j) = US%Z_to_m*US%s_to_T*forces%ustar(i,j) ; enddo ; enddo - call data_override('OCN', 'ustar', temp_ustar, day, is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) - do j=G%jsc,G%jec ; do i=G%isc,G%iec ; forces%ustar(i,j) = US%m_to_Z*US%T_to_s*temp_ustar(i,j) ; enddo ; enddo + if (CS%read_gust_2d) then + call data_override(G%Domain, 'gust', CS%gust, day, scale=Pa_conversion) + do j=G%jsc,G%jec ; do i=G%isc,G%iec + forces%ustar(i,j) = sqrt((sqrt(temp_x(i,j)**2 + temp_y(i,j)**2) + & + CS%gust(i,j)) * US%L_to_Z / CS%Rho0) + enddo ; enddo else - if (CS%read_gust_2d) then - call data_override('OCN', 'gust', CS%gust, day, is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) - do j=G%jsc,G%jec ; do i=G%isc,G%iec - forces%ustar(i,j) = sqrt((Pa_conversion * sqrt(temp_x(i,j)*temp_x(i,j) + & - temp_y(i,j)*temp_y(i,j)) + CS%gust(i,j)) * US%L_to_Z / CS%Rho0) - enddo ; enddo - else - do j=G%jsc,G%jec ; do i=G%isc,G%iec - forces%ustar(i,j) = sqrt(US%L_to_Z * (Pa_conversion*sqrt(temp_x(i,j)*temp_x(i,j) + & - temp_y(i,j)*temp_y(i,j))/CS%Rho0 + CS%gust_const/CS%Rho0 )) - enddo ; enddo - endif + do j=G%jsc,G%jec ; do i=G%isc,G%iec + forces%ustar(i,j) = sqrt(US%L_to_Z * (sqrt(temp_x(i,j)**2 + temp_y(i,j)**2)/CS%Rho0 + & + CS%gust_const/CS%Rho0)) + enddo ; enddo endif + ! Give the data override the option to modify the newly calculated forces%ustar. + call data_override(G%Domain, 'ustar', forces%ustar, day, scale=US%m_to_Z*US%T_to_s) call pass_vector(forces%taux, forces%tauy, G%Domain, To_All) -! call pass_var(forces%ustar, G%Domain, To_All) Not needed ????? call callTree_leave("wind_forcing_by_data_override") end subroutine wind_forcing_by_data_override -!> Specifies zero surface bouyancy fluxes from input files. +!> Specifies zero surface buoyancy fluxes from input files. subroutine buoyancy_forcing_from_files(sfc_state, fluxes, day, dt, G, US, CS) type(surface), intent(inout) :: sfc_state !< A structure containing fields that !! describe the surface state of the ocean. @@ -897,7 +887,7 @@ subroutine buoyancy_forcing_from_files(sfc_state, fluxes, day, dt, G, US, CS) !! the fluxes apply [s] type(ocean_grid_type), intent(inout) :: G !< The ocean's grid structure type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables real, dimension(SZI_(G),SZJ_(G)) :: & @@ -1165,7 +1155,7 @@ subroutine buoyancy_forcing_from_files(sfc_state, fluxes, day, dt, G, US, CS) call callTree_leave("buoyancy_forcing_from_files") end subroutine buoyancy_forcing_from_files -!> Specifies zero surface bouyancy fluxes from data over-ride. +!> Specifies zero surface buoyancy fluxes from data over-ride. subroutine buoyancy_forcing_from_data_override(sfc_state, fluxes, day, dt, G, US, CS) type(surface), intent(inout) :: sfc_state !< A structure containing fields that !! describe the surface state of the ocean. @@ -1175,7 +1165,7 @@ subroutine buoyancy_forcing_from_data_override(sfc_state, fluxes, day, dt, G, US !! the fluxes apply [s] type(ocean_grid_type), intent(inout) :: G !< The ocean's grid structure type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables real, dimension(SZI_(G),SZJ_(G)) :: & @@ -1190,14 +1180,7 @@ subroutine buoyancy_forcing_from_data_override(sfc_state, fluxes, day, dt, G, US real :: kg_m2_s_conversion ! A combination of unit conversion factors for rescaling ! mass fluxes [R Z s m2 kg-1 T-1 ~> 1]. real :: rhoXcp ! The mean density times the heat capacity [Q R degC-1 ~> J m-3 degC-1]. - - integer :: time_lev_daily ! The time levels to read for fields with - integer :: time_lev_monthly ! daily and montly cycles. - integer :: itime_lev ! The time level that is used for a field. - - integer :: days, seconds integer :: i, j, is, ie, js, je, isd, ied, jsd, jed - integer :: is_in, ie_in, js_in, je_in call callTree_enter("buoyancy_forcing_from_data_override, MOM_surface_forcing.F90") @@ -1208,75 +1191,32 @@ subroutine buoyancy_forcing_from_data_override(sfc_state, fluxes, day, dt, G, US if (CS%use_temperature) rhoXcp = CS%Rho0 * fluxes%C_p if (.not.CS%dataOverrideIsInitialized) then - call data_override_init(Ocean_domain_in=G%Domain%mpp_domain) + call data_override_init(G%Domain) CS%dataOverrideIsInitialized = .True. endif - is_in = G%isc - G%isd + 1 - ie_in = G%iec - G%isd + 1 - js_in = G%jsc - G%jsd + 1 - je_in = G%jec - G%jsd + 1 + call data_override(G%Domain, 'lw', fluxes%lw, day, scale=US%W_m2_to_QRZ_T) + call data_override(G%Domain, 'sw', fluxes%sw, day, scale=US%W_m2_to_QRZ_T) - call data_override('OCN', 'lw', fluxes%lw(:,:), day, & - is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) ! scale=US%W_m2_to_QRZ_T - if (US%QRZ_T_to_W_m2 /= 1.0) then ; do j=js,je ; do i=is,ie - fluxes%lw(i,j) = fluxes%lw(i,j) * US%W_m2_to_QRZ_T - enddo ; enddo ; endif - call data_override('OCN', 'evap', fluxes%evap(:,:), day, & - is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) + ! The normal MOM6 sign conventions are that fluxes%evap and fluxes%sens are positive into the + ! ocean but evap and sens are normally positive quantities in the files. + call data_override(G%Domain, 'evap', fluxes%evap, day, scale=-US%kg_m2s_to_RZ_T) + call data_override(G%Domain, 'sens', fluxes%sens, day, scale=-US%W_m2_to_QRZ_T) - ! note the sign convention do j=js,je ; do i=is,ie - ! The normal convention is that fluxes%evap positive into the ocean - ! but evap is normally a positive quantity in the files - ! This conversion is dangerous because it is not clear whether the data files have been read! - fluxes%evap(i,j) = -kg_m2_s_conversion*fluxes%evap(i,j) fluxes%latent(i,j) = CS%latent_heat_vapor*fluxes%evap(i,j) fluxes%latent_evap_diag(i,j) = fluxes%latent(i,j) enddo ; enddo - call data_override('OCN', 'sens', fluxes%sens(:,:), day, & - is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) - - ! note the sign convention - do j=js,je ; do i=is,ie - fluxes%sens(i,j) = -US%W_m2_to_QRZ_T * fluxes%sens(i,j) ! Normal convention is positive into the ocean - ! but sensible is normally a positive quantity in the files - enddo ; enddo - - call data_override('OCN', 'sw', fluxes%sw(:,:), day, & - is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) ! scale=US%W_m2_to_QRZ_T - if (US%QRZ_T_to_W_m2 /= 1.0) then ; do j=js,je ; do i=is,ie - fluxes%sw(i,j) = fluxes%sw(i,j) * US%W_m2_to_QRZ_T - enddo ; enddo ; endif - - call data_override('OCN', 'snow', fluxes%fprec(:,:), day, & - is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) ! scale=kg_m2_s_conversion - - call data_override('OCN', 'rain', fluxes%lprec(:,:), day, & - is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) ! scale=kg_m2_s_conversion - - call data_override('OCN', 'runoff', fluxes%lrunoff(:,:), day, & - is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) ! scale=kg_m2_s_conversion - - call data_override('OCN', 'calving', fluxes%frunoff(:,:), day, & - is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) ! scale=kg_m2_s_conversion - - if (kg_m2_s_conversion /= 1.0) then ; do j=js,je ; do i=is,ie - fluxes%lprec(i,j) = fluxes%lprec(i,j) * kg_m2_s_conversion - fluxes%fprec(i,j) = fluxes%fprec(i,j) * kg_m2_s_conversion - fluxes%lrunoff(i,j) = fluxes%lrunoff(i,j) * kg_m2_s_conversion - fluxes%frunoff(i,j) = fluxes%frunoff(i,j) * kg_m2_s_conversion - enddo ; enddo ; endif + call data_override(G%Domain, 'snow', fluxes%fprec, day, scale=kg_m2_s_conversion) + call data_override(G%Domain, 'rain', fluxes%lprec, day, scale=kg_m2_s_conversion) + call data_override(G%Domain, 'runoff', fluxes%lrunoff, day, scale=kg_m2_s_conversion) + call data_override(G%Domain, 'calving', fluxes%frunoff, day, scale=kg_m2_s_conversion) ! Read the SST and SSS fields for damping. if (CS%restorebuoy) then !#CTRL# .or. associated(CS%ctrl_forcing_CSp)) then - call data_override('OCN', 'SST_restore', CS%T_restore(:,:), day, & - is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) - - call data_override('OCN', 'SSS_restore', CS%S_restore(:,:), day, & - is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) - + call data_override(G%Domain, 'SST_restore', CS%T_restore, day) + call data_override(G%Domain, 'SSS_restore', CS%S_restore, day) endif ! restoring boundary fluxes @@ -1334,7 +1274,6 @@ subroutine buoyancy_forcing_from_data_override(sfc_state, fluxes, day, dt, G, US fluxes%latent_frunoff_diag(i,j) = -fluxes%frunoff(i,j)*CS%latent_heat_fusion enddo ; enddo - !#CTRL# if (associated(CS%ctrl_forcing_CSp)) then !#CTRL# do j=js,je ; do i=is,ie !#CTRL# SST_anom(i,j) = sfc_state%SST(i,j) - CS%T_Restore(i,j) @@ -1348,7 +1287,7 @@ subroutine buoyancy_forcing_from_data_override(sfc_state, fluxes, day, dt, G, US call callTree_leave("buoyancy_forcing_from_data_override") end subroutine buoyancy_forcing_from_data_override -!> This subroutine specifies zero surface bouyancy fluxes +!> This subroutine specifies zero surface buoyancy fluxes subroutine buoyancy_forcing_zero(sfc_state, fluxes, day, dt, G, CS) type(surface), intent(inout) :: sfc_state !< A structure containing fields that !! describe the surface state of the ocean. @@ -1357,7 +1296,7 @@ subroutine buoyancy_forcing_zero(sfc_state, fluxes, day, dt, G, CS) real, intent(in) :: dt !< The amount of time over which !! the fluxes apply [s] type(ocean_grid_type), intent(in) :: G !< The ocean's grid structure - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables integer :: i, j, is, ie, js, je @@ -1401,7 +1340,7 @@ subroutine buoyancy_forcing_const(sfc_state, fluxes, day, dt, G, US, CS) !! the fluxes apply [s] type(ocean_grid_type), intent(in) :: G !< The ocean's grid structure type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables integer :: i, j, is, ie, js, je @@ -1444,7 +1383,7 @@ subroutine buoyancy_forcing_linear(sfc_state, fluxes, day, dt, G, US, CS) !! the fluxes apply [s] type(ocean_grid_type), intent(in) :: G !< The ocean's grid structure type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables real :: y, T_restore, S_restore @@ -1518,7 +1457,7 @@ end subroutine buoyancy_forcing_linear !> Save a restart file for the forcing fields subroutine forcing_save_restart(CS, G, Time, directory, time_stamped, & filename_suffix) - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call type(ocean_grid_type), intent(inout) :: G !< The ocean's grid structure type(time_type), intent(in) :: Time !< model time at this call; needed for mpp_write calls @@ -1526,7 +1465,7 @@ subroutine forcing_save_restart(CS, G, Time, directory, time_stamped, & logical, optional, intent(in) :: time_stamped !< If true, the restart file names !! include a unique time stamp; the default is false. character(len=*), optional, intent(in) :: filename_suffix !< optional suffix (e.g., a time-stamp) - !! to append to the restart fname + !! to append to the restart file name if (.not.associated(CS)) return if (.not.associated(CS%restart_CSp)) return @@ -1542,7 +1481,7 @@ subroutine surface_forcing_init(Time, G, US, param_file, diag, CS, tracer_flow_C type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type type(param_file_type), intent(in) :: param_file !< A structure to parse for run-time parameters type(diag_ctrl), target, intent(inout) :: diag !< structure used to regulate diagnostic output - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call type(tracer_flow_control_CS), pointer :: tracer_flow_CSp !< Forcing for tracers? @@ -1593,9 +1532,9 @@ subroutine surface_forcing_init(Time, G, US, param_file, diag, CS, tracer_flow_C "initialization of the model.", default=.true.) call get_param(param_file, mdl, "BUOY_CONFIG", CS%buoy_config, & - "The character string that indicates how buoyancy forcing "//& - "is specified. Valid options include (file), (zero), "//& - "(linear), (USER), (BFB) and (NONE).", default="zero") + "The character string that indicates how buoyancy forcing is specified. Valid "//& + "options include (file), (data_override), (zero), (const), (linear), (MESO), "//& + "(SCM_CVmix_tests), (BFB), (dumbbell), (USER) and (NONE).", default="zero") if (trim(CS%buoy_config) == "file") then call get_param(param_file, mdl, "ARCHAIC_OMIP_FORCING_FILE", CS%archaic_OMIP_file, & "If true, use the forcing variable decomposition from "//& @@ -1735,9 +1674,10 @@ subroutine surface_forcing_init(Time, G, US, param_file, diag, CS, tracer_flow_C units='W/m2', scale=US%W_m2_to_QRZ_T, fail_if_missing=.true.) endif call get_param(param_file, mdl, "WIND_CONFIG", CS%wind_config, & - "The character string that indicates how wind forcing "//& - "is specified. Valid options include (file), (2gyre), "//& - "(1gyre), (gyres), (zero), and (USER).", default="zero") + "The character string that indicates how wind forcing is specified. Valid "//& + "options include (file), (data_override), (2gyre), (1gyre), (gyres), (zero), "//& + "(const), (Neverworld), (scurves), (ideal_hurr), (SCM_ideal_hurr), "//& + "(SCM_CVmix_tests) and (USER).", default="zero") if (trim(CS%wind_config) == "file") then call get_param(param_file, mdl, "WIND_FILE", CS%wind_file, & "The file in which the wind stresses are found in "//& @@ -1964,7 +1904,7 @@ end subroutine surface_forcing_init !> Deallocate memory associated with the surface forcing module subroutine surface_forcing_end(CS, fluxes) - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call type(forcing), optional, intent(inout) :: fluxes !< A structure containing thermodynamic forcing fields ! Arguments: CS - A pointer to the control structure returned by a previous diff --git a/config_src/solo_driver/user_surface_forcing.F90 b/config_src/solo_driver/user_surface_forcing.F90 index f5372e07d2..940bcd04b4 100644 --- a/config_src/solo_driver/user_surface_forcing.F90 +++ b/config_src/solo_driver/user_surface_forcing.F90 @@ -11,7 +11,6 @@ module user_surface_forcing use MOM_forcing_type, only : forcing, mech_forcing use MOM_forcing_type, only : allocate_forcing_type, allocate_mech_forcing use MOM_grid, only : ocean_grid_type -use MOM_io, only : file_exists, read_data use MOM_time_manager, only : time_type, operator(+), operator(/) use MOM_tracer_flow_control, only : call_tracer_set_forcing use MOM_tracer_flow_control, only : tracer_flow_control_CS @@ -89,7 +88,7 @@ subroutine USER_wind_forcing(sfc_state, forces, day, G, US, CS) ! is always positive. if (associated(forces%ustar)) then ; do j=js,je ; do i=is,ie ! This expression can be changed if desired, but need not be. - forces%ustar(i,j) = G%mask2dT(i,j) * sqrt((CS%gust_const + & + forces%ustar(i,j) = G%mask2dT(i,j) * sqrt((CS%gust_const + & sqrt(0.5*(forces%taux(I-1,j)**2 + forces%taux(I,j)**2) + & 0.5*(forces%tauy(i,J-1)**2 + forces%tauy(i,J)**2))) * (US%L_to_Z/CS%Rho0)) enddo ; enddo ; endif diff --git a/config_src/unit_drivers/MOM_sum_driver.F90 b/config_src/unit_drivers/MOM_sum_driver.F90 index 5673b201ee..7e3c6d45b4 100644 --- a/config_src/unit_drivers/MOM_sum_driver.F90 +++ b/config_src/unit_drivers/MOM_sum_driver.F90 @@ -18,8 +18,6 @@ program MOM_main use MOM_coms, only : EFP_type, operator(+), operator(-), assignment(=), EFP_to_real, real_to_EFP use MOM_cpu_clock, only : cpu_clock_id, cpu_clock_begin, cpu_clock_end use MOM_cpu_clock, only : CLOCK_COMPONENT -! use MOM_diag_mediator, only : diag_mediator_end, diag_mediator_init -! use MOM_diag_mediator, only : diag_mediator_close_registration use MOM_domains, only : MOM_domains_init, MOM_infra_init, MOM_infra_end use MOM_error_handler, only : MOM_error, MOM_mesg, WARNING, FATAL, is_root_pe use MOM_error_handler, only : MOM_set_verbosity @@ -39,11 +37,10 @@ program MOM_main type(param_file_type) :: param_file ! The structure indicating the file(s) ! containing all run-time parameters. - real :: max_depth + real :: max_depth ! The maximum ocean depth [m] integer :: verbosity integer :: num_sums - integer :: n, i, j, is, ie, js, je, nz - integer :: isd, ied, jsd, jed, IsdB, IedB, JsdB, JedB + integer :: n, i, j, is, ie, js, je, isd, ied, jsd, jed integer :: unit, io_status, ierr logical :: unit_in_use @@ -55,8 +52,8 @@ program MOM_main !----------------------------------------------------------------------- character(len=4), parameter :: vers_num = 'v2.0' -! This include declares and sets the variable "version". -#include "version_variable.h" + ! This include declares and sets the variable "version". +# include "version_variable.h" character(len=40) :: mdl = "MOM_main (MOM_sum_driver)" ! This module's name. character(len=200) :: mesg @@ -85,9 +82,8 @@ program MOM_main ! call diag_mediator_init(param_file) call MOM_grid_init(grid, param_file) - is = grid%isc ; ie = grid%iec ; js = grid%jsc ; je = grid%jec ; nz = grid%ke + is = grid%isc ; ie = grid%iec ; js = grid%jsc ; je = grid%jec isd = grid%isd ; ied = grid%ied ; jsd = grid%jsd ; jed = grid%jed - IsdB = grid%IsdB ; IedB = grid%IedB ; JsdB = grid%JsdB ; JedB = grid%JedB ! Read all relevant parameters and write them to the model log. call log_version(param_file, "MOM", version, "") @@ -165,27 +161,25 @@ program MOM_main contains +!> This subroutine sets up the benchmark test case topography for debugging subroutine benchmark_init_topog_local(D, G, param_file, max_depth) type(ocean_grid_type), intent(in) :: G !< The ocean's grid structure real, dimension(SZI_(G),SZJ_(G)), intent(out) :: D !< The ocean bottom depth in m type(param_file_type), intent(in) :: param_file !< A structure to parse for run-time parameters - real, intent(in) :: max_depth !< The maximum ocean depth in m + real, intent(in) :: max_depth !< The maximum ocean depth [m] -! This subroutine sets up the benchmark test case topography real :: min_depth ! The minimum ocean depth in m. real :: PI ! 3.1415926... calculated as 4*atan(1) real :: D0 ! A constant to make the maximum ! ! basin depth MAXIMUM_DEPTH. ! real :: x, y -! This include declares and sets the variable "version". -#include "version_variable.h" - character(len=40) :: mdl = "benchmark_initialize_topography" ! This subroutine's name. + ! This include declares and sets the variable "version". +# include "version_variable.h" + character(len=40) :: mdl = "benchmark_init_topog_local" ! This subroutine's name. integer :: i, j, is, ie, js, je, isd, ied, jsd, jed is = G%isc ; ie = G%iec ; js = G%jsc ; je = G%jec isd = G%isd ; ied = G%ied ; jsd = G%jsd ; jed = G%jed - call MOM_mesg(" benchmark_initialization.F90, benchmark_initialize_topography: setting topography", 5) - call log_version(param_file, mdl, version) call get_param(param_file, mdl, "MINIMUM_DEPTH", min_depth, & "The minimum depth of the ocean.", units="m", default=0.0) diff --git a/docs/.gitignore b/docs/.gitignore index de2f06d096..e8b6a0513b 100644 --- a/docs/.gitignore +++ b/docs/.gitignore @@ -2,8 +2,17 @@ doxygen doxygen.log APIs +MOM6.tags +details/tutorial + + # Ignore sphinx-build output _build api src xml + + +# Citation output +bib*.aux +citelist.doc* diff --git a/docs/Doxyfile_nortd b/docs/Doxyfile_nortd index 76b66b9dd3..ca18bf49ee 100644 --- a/docs/Doxyfile_nortd +++ b/docs/Doxyfile_nortd @@ -1,4 +1,4 @@ -# Doxyfile 1.8.15 +# Doxyfile 1.8.19 # This file describes the settings to be used by the documentation system # doxygen (www.doxygen.org) for a project. @@ -58,7 +58,7 @@ PROJECT_LOGO = # entered, it will be relative to the location where doxygen was started. If # left blank the current directory will be used. -OUTPUT_DIRECTORY = +OUTPUT_DIRECTORY = . # If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub- # directories (in 2 levels) under the output directory of each output format and @@ -187,6 +187,16 @@ SHORT_NAMES = NO JAVADOC_AUTOBRIEF = NO +# If the JAVADOC_BANNER tag is set to YES then doxygen will interpret a line +# such as +# /*************** +# as being the beginning of a Javadoc-style comment "banner". If set to NO, the +# Javadoc-style will behave just like regular comments and it will not be +# interpreted by doxygen. +# The default value is: NO. + +JAVADOC_BANNER = NO + # If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first # line (until the first dot) of a Qt-style comment as the brief description. If # set to NO, the Qt-style will behave just like regular Qt-style comments (thus @@ -236,15 +246,45 @@ TAB_SIZE = 2 # "Side Effects:". You can put \n's in the value part of an alias to insert # newlines (in the resulting output). You can put ^^ in the value part of an # alias to insert a newline as if a physical newline was in the original file. - -ALIASES = +# When you need a literal { or } or , in the value part of an alias you have to +# escape them by means of a backslash (\), this can lead to conflicts with the +# commands \{ and \} for these it is advised to use the version @{ and @} or use +# a double escape (\\{ and \\}) + +# Reference: https://git.ligo.org/lscsoft/lalsuite-archive/commit/e6e2dae8a73a73979a64854bbf697b077803697a +#ALIASES = "eqref{1}= Eq. \\eqref{\1}" \ +# "figref{1}= Fig. [\ref \1]" \ +# "tableref{1}= Table [\ref \1]" \ +# "figure{4}= \anchor \1 \image html \1.png \"Fig. [\1]: \4\"" + +# This allows doxygen passthrough of \eqref to html for mathjax +# Single reference within a math block +ALIASES += eqref{1}="\latexonly\ref{\1}\endlatexonly\htmlonly \eqref{\1}\endhtmlonly\xmlonly \\eqref{\1}\endxmlonly" + +# Large math block with multiple references +# TODO: We should be able to overload functions but recursion is happening? For now, the +# second command creates a \eqref2 that is passed to sphinx for processing. This breaks +# the html generation for doxygen +# Doxygen 1.8.13 requires extra help via xmlonly +# See python sphinxcontrib-autodoc_doxygen module autodoc_doxygen/xmlutils.py +# \eqref{eq:ale-thickness-equation,ale-equations,thickness} +ALIASES += eqref{3}="\latexonly\ref{\1}\endlatexonly\htmlonly \eqref2{\2,\3}\endhtmlonly\xmlonly \\eqref4{\1}\\eqref2{\2,\3}\endxmlonly" + +# Reference: https://stackoverflow.com/questions/25290453/how-do-i-add-a-footnote-in-doxygen +# TODO: Use this simple js library to create actual footnotes in html +# Reference: https://github.com/jheftmann/footnoted +ALIASES += footnote{1}="\latexonly\footnote\{\1\}\endlatexonly\htmlonly[*]\endhtmlonly\xmlonly[*]\endxmlonly" + +# \image latex does the wrong things to support equations in captions, this recreates +# what it does and just passes through the string uninterpreted. +# The image also needs to be added to LATEX_EXTRA_FILES. +# Default 3rd argument: \includegraphics[width=\textwidth\,height=\textheight/2\,keepaspectratio=true] +ALIASES += imagelatex{3}="\latexonly\begin{DoxyImage}\3{\1}\doxyfigcaption{\2}\end{DoxyImage}\endlatexonly\xmlonly\2\endxmlonly" # This tag can be used to specify a number of word-keyword mappings (TCL only). # A mapping has the form "name=value". For example adding "class=itcl::class" # will allow you to use the command class in the itcl::class meaning. -TCL_SUBST = - # Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources # only. Doxygen will then generate output that is more tailored for C. For # instance, some of the names that are used will be different. The list of all @@ -273,17 +313,26 @@ OPTIMIZE_FOR_FORTRAN = YES OPTIMIZE_OUTPUT_VHDL = NO +# Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice +# sources only. Doxygen will then generate output that is more tailored for that +# language. For instance, namespaces will be presented as modules, types will be +# separated into more groups, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_SLICE = NO + # Doxygen selects the parser to use depending on the extension of the files it # parses. With this tag you can assign which parser to use for a given # extension. Doxygen has a built-in mapping, but you can override or extend it # using this tag. The format is ext=language, where ext is a file extension, and -# language is one of the parsers supported by doxygen: IDL, Java, Javascript, -# C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran: -# FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran: -# Fortran. In the later case the parser tries to guess whether the code is fixed -# or free formatted code, this is the default for Fortran type files), VHDL. For -# instance to make doxygen treat .inc files as Fortran files (default is PHP), -# and .f files as C (default is Fortran), use: inc=Fortran f=C. +# language is one of the parsers supported by doxygen: IDL, Java, JavaScript, +# Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice, VHDL, +# Fortran (fixed format Fortran: FortranFixed, free formatted Fortran: +# FortranFree, unknown formatted Fortran: Fortran. In the later case the parser +# tries to guess whether the code is fixed or free formatted code, this is the +# default for Fortran type files). For instance to make doxygen treat .inc files +# as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. # # Note: For files without extension you can use no_extension as a placeholder. # @@ -294,7 +343,7 @@ EXTENSION_MAPPING = # If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments # according to the Markdown format, which allows for more readable -# documentation. See http://daringfireball.net/projects/markdown/ for details. +# documentation. See https://daringfireball.net/projects/markdown/ for details. # The output of markdown processing is further processed by doxygen, so you can # mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in # case of backward compatibilities issues. @@ -306,7 +355,7 @@ MARKDOWN_SUPPORT = YES # to that level are automatically included in the table of contents, even if # they do not have an id attribute. # Note: This feature currently applies only to Markdown headings. -# Minimum value: 0, maximum value: 99, default value: 0. +# Minimum value: 0, maximum value: 99, default value: 5. # This tag requires that the tag MARKDOWN_SUPPORT is set to YES. TOC_INCLUDE_HEADINGS = 0 @@ -422,6 +471,19 @@ TYPEDEF_HIDES_STRUCT = NO LOOKUP_CACHE_SIZE = 0 +# The NUM_PROC_THREADS specifies the number threads doxygen is allowed to use +# during processing. When set to 0 doxygen will based this on the number of +# cores available in the system. You can set it explicitly to a value larger +# than 0 to get more control over the balance between CPU load and processing +# speed. At this moment only the input processing can be done using multiple +# threads. Since this is still an experimental feature the default is set to 1, +# which efficively disables parallel processing. Please report any issues you +# encounter. Generating dot graphs in parallel is controlled by the +# DOT_NUM_THREADS setting. +# Minimum value: 0, maximum value: 32, default value: 1. + +NUM_PROC_THREADS = 1 + #--------------------------------------------------------------------------- # Build related configuration options #--------------------------------------------------------------------------- @@ -442,6 +504,12 @@ EXTRACT_ALL = NO EXTRACT_PRIVATE = YES +# If the EXTRACT_PRIV_VIRTUAL tag is set to YES, documented private virtual +# methods of a class will be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIV_VIRTUAL = NO + # If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal # scope will be included in the documentation. # The default value is: NO. @@ -496,8 +564,8 @@ HIDE_UNDOC_MEMBERS = NO HIDE_UNDOC_CLASSES = NO # If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend -# (class|struct|union) declarations. If set to NO, these declarations will be -# included in the documentation. +# declarations. If set to NO, these declarations will be included in the +# documentation. # The default value is: NO. HIDE_FRIEND_COMPOUNDS = NO @@ -520,7 +588,7 @@ INTERNAL_DOCS = YES # names in lower-case letters. If set to YES, upper-case letters are also # allowed. This is useful if you have classes or files whose names only differ # in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. +# (including Cygwin) and Mac users are advised to set this option to NO. # The default value is: system dependent. CASE_SENSE_NAMES = YES @@ -712,7 +780,7 @@ LAYOUT_FILE = layout.xml # LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the # search path. See also \cite for info how to create references. -CITE_BIB_FILES = +CITE_BIB_FILES = ocean.bib references.bib zotero.bib #--------------------------------------------------------------------------- # Configuration options related to warning and progress messages @@ -778,7 +846,7 @@ WARN_FORMAT = "$file:$line: $text" # messages should be written. If left blank the output is written to standard # error (stderr). -WARN_LOGFILE = doxygen.log +WARN_LOGFILE = _build/doxygen_warn_nortd_log.txt #--------------------------------------------------------------------------- # Configuration options related to the input files @@ -793,11 +861,10 @@ WARN_LOGFILE = doxygen.log INPUT = ../src \ front_page.md \ ../config_src/solo_driver \ - ../config_src/dynamic_symmetric - ../config_src/external + ../config_src/dynamic_symmetric \ + ../config_src/external \ ../config_src/coupled_driver - # This tag can be used to specify the character encoding of the source files # that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses # libiconv (or the iconv built into libc) for the transcoding. See the libiconv @@ -818,28 +885,58 @@ INPUT_ENCODING = UTF-8 # If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp, # *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, # *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, -# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, -# *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf and *.qsf. +# *.m, *.markdown, *.md, *.mm, *.dox (to be provided as doxygen C comment), +# *.doc (to be provided as doxygen C comment), *.txt (to be provided as doxygen +# C comment), *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, *.f18, *.f, *.for, *.vhd, +# *.vhdl, *.ucf, *.qsf and *.ice. FILE_PATTERNS = *.c \ *.cc \ *.cxx \ *.cpp \ *.c++ \ + *.java \ + *.ii \ + *.ixx \ + *.ipp \ + *.i++ \ + *.inl \ + *.idl \ + *.ddl \ + *.odl \ *.h \ *.hh \ *.hxx \ *.hpp \ *.h++ \ + *.cs \ + *.d \ + *.php \ + *.php4 \ + *.php5 \ + *.phtml \ *.inc \ *.m \ *.markdown \ *.md \ *.mm \ *.dox \ + *.doc \ + *.txt \ + *.py \ + *.pyw \ *.f90 \ + *.f95 \ + *.f03 \ + *.f08 \ + *.f18 \ *.f \ *.for \ + *.vhd \ + *.vhdl \ + *.ucf \ + *.qsf \ + *.ice \ *.F90 # The RECURSIVE tag can be used to specify whether or not subdirectories should @@ -1096,7 +1193,7 @@ GENERATE_HTML = YES # The default directory is: html. # This tag requires that the tag GENERATE_HTML is set to YES. -HTML_OUTPUT = APIs +HTML_OUTPUT = _build/APIs # The HTML_FILE_EXTENSION tag can be used to specify the file extension for each # generated HTML page (for example: .htm, .php, .asp). @@ -1211,9 +1308,9 @@ HTML_TIMESTAMP = NO # If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML # documentation will contain a main index with vertical navigation menus that -# are dynamically created via Javascript. If disabled, the navigation index will +# are dynamically created via JavaScript. If disabled, the navigation index will # consists of multiple levels of tabs that are statically embedded in every HTML -# page. Disable this option to support browsers that do not have Javascript, +# page. Disable this option to support browsers that do not have JavaScript, # like the Qt help browser. # The default value is: YES. # This tag requires that the tag GENERATE_HTML is set to YES. @@ -1243,13 +1340,13 @@ HTML_INDEX_NUM_ENTRIES = 900 # If the GENERATE_DOCSET tag is set to YES, additional index files will be # generated that can be used as input for Apple's Xcode 3 integrated development -# environment (see: https://developer.apple.com/tools/xcode/), introduced with -# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a +# environment (see: https://developer.apple.com/xcode/), introduced with OSX +# 10.5 (Leopard). To create a documentation set, doxygen will generate a # Makefile in the HTML output directory. Running make will produce the docset in # that directory and running make install will install the docset in # ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at -# startup. See https://developer.apple.com/tools/creatingdocsetswithdoxygen.html -# for more information. +# startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy +# genXcode/_index.html for more information. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. @@ -1288,7 +1385,7 @@ DOCSET_PUBLISHER_NAME = Publisher # If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three # additional HTML index files: index.hhp, index.hhc, and index.hhk. The # index.hhp is a project file that can be read by Microsoft's HTML Help Workshop -# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on +# (see: https://www.microsoft.com/en-us/download/details.aspx?id=21138) on # Windows. # # The HTML Help Workshop contains a compiler that can convert all HTML output @@ -1319,7 +1416,7 @@ CHM_FILE = HHC_LOCATION = # The GENERATE_CHI flag controls if a separate .chi index file is generated -# (YES) or that it should be included in the master .chm file (NO). +# (YES) or that it should be included in the main .chm file (NO). # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. @@ -1364,7 +1461,7 @@ QCH_FILE = # The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help # Project output. For more information please see Qt Help Project / Namespace -# (see: http://doc.qt.io/qt-4.8/qthelpproject.html#namespace). +# (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace). # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_QHP is set to YES. @@ -1372,7 +1469,8 @@ QHP_NAMESPACE = org.doxygen.Project # The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt # Help Project output. For more information please see Qt Help Project / Virtual -# Folders (see: http://doc.qt.io/qt-4.8/qthelpproject.html#virtual-folders). +# Folders (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual- +# folders). # The default value is: doc. # This tag requires that the tag GENERATE_QHP is set to YES. @@ -1380,21 +1478,23 @@ QHP_VIRTUAL_FOLDER = doc # If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom # filter to add. For more information please see Qt Help Project / Custom -# Filters (see: http://doc.qt.io/qt-4.8/qthelpproject.html#custom-filters). +# Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom- +# filters). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_CUST_FILTER_NAME = # The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the # custom filter to add. For more information please see Qt Help Project / Custom -# Filters (see: http://doc.qt.io/qt-4.8/qthelpproject.html#custom-filters). +# Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom- +# filters). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_CUST_FILTER_ATTRS = # The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this # project's filter section matches. Qt Help Project / Filter Attributes (see: -# http://doc.qt.io/qt-4.8/qthelpproject.html#filter-attributes). +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_SECT_FILTER_ATTRS = @@ -1478,6 +1578,17 @@ TREEVIEW_WIDTH = 250 EXT_LINKS_IN_WINDOW = NO +# If the HTML_FORMULA_FORMAT option is set to svg, doxygen will use the pdf2svg +# tool (see https://github.com/dawbarton/pdf2svg) or inkscape (see +# https://inkscape.org) to generate formulas as SVG images instead of PNGs for +# the HTML output. These images will generally look nicer at scaled resolutions. +# Possible values are: png (the default) and svg (looks nicer but requires the +# pdf2svg or inkscape tool). +# The default value is: png. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FORMULA_FORMAT = svg + # Use this tag to change the font size of LaTeX formulas included as images in # the HTML documentation. When you change the font size after a successful # doxygen run you need to manually remove any form_*.png images from the HTML @@ -1498,8 +1609,14 @@ FORMULA_FONTSIZE = 10 FORMULA_TRANSPARENT = YES +# The FORMULA_MACROFILE can contain LaTeX \newcommand and \renewcommand commands +# to create new LaTeX commands to be used in formulas as building blocks. See +# the section "Including formulas" for details. + +FORMULA_MACROFILE = + # Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see -# https://www.mathjax.org) which uses client side Javascript for the rendering +# https://www.mathjax.org) which uses client side JavaScript for the rendering # instead of using pre-rendered bitmaps. Use this if you do not have LaTeX # installed or if you want to formulas look prettier in the HTML output. When # enabled you may also need to install MathJax separately and configure the path @@ -1527,10 +1644,10 @@ MATHJAX_FORMAT = HTML-CSS # Content Delivery Network so you can quickly see the result without installing # MathJax. However, it is strongly recommended to install a local copy of # MathJax from https://www.mathjax.org before deployment. -# The default value is: https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.2/. +# The default value is: https://cdn.jsdelivr.net/npm/mathjax@2. # This tag requires that the tag USE_MATHJAX is set to YES. -MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest +MATHJAX_RELPATH = https://cdn.jsdelivr.net/npm/mathjax@2 # The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax # extension names that should be enabled during MathJax rendering. For example @@ -1569,7 +1686,7 @@ MATHJAX_CODEFILE = SEARCHENGINE = YES # When the SERVER_BASED_SEARCH tag is enabled the search engine will be -# implemented using a web server instead of a web client using Javascript. There +# implemented using a web server instead of a web client using JavaScript. There # are two flavors of web server based searching depending on the EXTERNAL_SEARCH # setting. When disabled, doxygen will generate a PHP script for searching and # an index file used by the script. When EXTERNAL_SEARCH is enabled the indexing @@ -1664,11 +1781,24 @@ LATEX_CMD_NAME = latex # The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate # index for LaTeX. +# Note: This tag is used in the Makefile / make.bat. +# See also: LATEX_MAKEINDEX_CMD for the part in the generated output file +# (.tex). # The default file is: makeindex. # This tag requires that the tag GENERATE_LATEX is set to YES. MAKEINDEX_CMD_NAME = makeindex +# The LATEX_MAKEINDEX_CMD tag can be used to specify the command name to +# generate index for LaTeX. In case there is no backslash (\) as first character +# it will be automatically added in the LaTeX code. +# Note: This tag is used in the generated output file (.tex). +# See also: MAKEINDEX_CMD_NAME for the part in the Makefile / make.bat. +# The default value is: makeindex. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_MAKEINDEX_CMD = makeindex + # If the COMPACT_LATEX tag is set to YES, doxygen generates more compact LaTeX # documents. This may be useful for small projects and may help to save some # trees in general. @@ -1696,7 +1826,7 @@ PAPER_TYPE = a4 # If left blank no extra packages will be included. # This tag requires that the tag GENERATE_LATEX is set to YES. -EXTRA_PACKAGES = +EXTRA_PACKAGES =amsmath amstext # The LATEX_HEADER tag can be used to specify a personal LaTeX header for the # generated LaTeX document. The header should contain everything until the first @@ -1742,7 +1872,15 @@ LATEX_EXTRA_STYLESHEET = # markers available. # This tag requires that the tag GENERATE_LATEX is set to YES. -LATEX_EXTRA_FILES = +# Graphics that have math or equations in the caption need to be listed below. +LATEX_EXTRA_FILES = \ + images/cell_3d.png \ + images/Grid_metrics.png \ + images/h_PPM.png \ + images/Newton_PPM.png \ + images/PG_loop.png \ + images/shao3.png \ + images/shao4.png # If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated is # prepared for conversion to PDF (using ps2pdf or pdflatex). The PDF file will @@ -1753,9 +1891,11 @@ LATEX_EXTRA_FILES = PDF_HYPERLINKS = YES -# If the USE_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate -# the PDF file directly from the LaTeX files. Set this option to YES, to get a -# higher quality PDF documentation. +# If the USE_PDFLATEX tag is set to YES, doxygen will use the engine as +# specified with LATEX_CMD_NAME to generate the PDF file directly from the LaTeX +# files. Set this option to YES, to get a higher quality PDF documentation. +# +# See also section LATEX_CMD_NAME for selecting the engine. # The default value is: YES. # This tag requires that the tag GENERATE_LATEX is set to YES. @@ -1803,6 +1943,14 @@ LATEX_BIB_STYLE = plain LATEX_TIMESTAMP = NO +# The LATEX_EMOJI_DIRECTORY tag is used to specify the (relative or absolute) +# path from which the emoji images will be read. If a relative path is entered, +# it will be relative to the LATEX_OUTPUT directory. If left blank the +# LATEX_OUTPUT directory will be used. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_EMOJI_DIRECTORY = + #--------------------------------------------------------------------------- # Configuration options related to the RTF output #--------------------------------------------------------------------------- @@ -1940,6 +2088,13 @@ XML_OUTPUT = xml XML_PROGRAMLISTING = YES +# If the XML_NS_MEMB_FILE_SCOPE tag is set to YES, doxygen will include +# namespace members in file scope as well, matching the HTML output. +# The default value is: NO. +# This tag requires that the tag GENERATE_XML is set to YES. + +XML_NS_MEMB_FILE_SCOPE = NO + #--------------------------------------------------------------------------- # Configuration options related to the DOCBOOK output #--------------------------------------------------------------------------- @@ -2119,7 +2274,7 @@ TAGFILES = # tag file that is based on the input files it reads. See section "Linking to # external documentation" for more information about the usage of tag files. -GENERATE_TAGFILE = +GENERATE_TAGFILE = MOM6.tags # If the ALLEXTERNALS tag is set to YES, all external class will be listed in # the class index. If set to NO, only the inherited external classes will be @@ -2142,12 +2297,6 @@ EXTERNAL_GROUPS = YES EXTERNAL_PAGES = YES -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of 'which perl'). -# The default file (with absolute path) is: /usr/bin/perl. - -PERL_PATH = /usr/bin/perl - #--------------------------------------------------------------------------- # Configuration options related to the dot tool #--------------------------------------------------------------------------- @@ -2161,15 +2310,6 @@ PERL_PATH = /usr/bin/perl CLASS_DIAGRAMS = YES -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see: -# http://www.mcternan.me.uk/mscgen/)) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - # You can include diagrams made with dia in doxygen documentation. Doxygen will # then run dia to produce the diagram and insert it in the documentation. The # DIA_PATH tag allows you to specify the directory where the dia binary resides. diff --git a/docs/Doxyfile_nortd_latex b/docs/Doxyfile_nortd_latex new file mode 100644 index 0000000000..f779d0215d --- /dev/null +++ b/docs/Doxyfile_nortd_latex @@ -0,0 +1,2608 @@ +# Doxyfile 1.8.19 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project. +# +# All text after a double hash (##) is considered a comment and is placed in +# front of the TAG it is preceding. +# +# All text after a single hash (#) is considered a comment and will be ignored. +# The format is: +# TAG = value [value, ...] +# For lists, items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (\" \"). + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the configuration +# file that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# https://www.gnu.org/software/libiconv/ for the list of possible encodings. +# The default value is: UTF-8. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by +# double-quotes, unless you are using Doxywizard) that should identify the +# project for which the documentation is generated. This name is used in the +# title of most generated pages and in a few other places. +# The default value is: My Project. + +PROJECT_NAME = "MOM6" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. This +# could be handy for archiving the generated documentation or if some version +# control system is used. + +PROJECT_NUMBER = + +# Using the PROJECT_BRIEF tag one can provide an optional one line description +# for a project that appears at the top of each page and should give viewer a +# quick idea about the purpose of the project. Keep the description short. + +PROJECT_BRIEF = + +# With the PROJECT_LOGO tag one can specify a logo or an icon that is included +# in the documentation. The maximum height of the logo should not exceed 55 +# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy +# the logo to the output directory. + +PROJECT_LOGO = + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path +# into which the generated documentation will be written. If a relative path is +# entered, it will be relative to the location where doxygen was started. If +# left blank the current directory will be used. + +OUTPUT_DIRECTORY = . + +# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub- +# directories (in 2 levels) under the output directory of each output format and +# will distribute the generated files over these directories. Enabling this +# option can be useful when feeding doxygen a huge amount of source files, where +# putting all generated files in the same directory would otherwise causes +# performance problems for the file system. +# The default value is: NO. + +CREATE_SUBDIRS = NO + +# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII +# characters to appear in the names of generated files. If set to NO, non-ASCII +# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode +# U+3044. +# The default value is: NO. + +ALLOW_UNICODE_NAMES = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese, +# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States), +# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian, +# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages), +# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian, +# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian, +# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish, +# Ukrainian and Vietnamese. +# The default value is: English. + +OUTPUT_LANGUAGE = English + +# The OUTPUT_TEXT_DIRECTION tag is used to specify the direction in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all generated output in the proper direction. +# Possible values are: None, LTR, RTL and Context. +# The default value is: None. + +OUTPUT_TEXT_DIRECTION = None + +# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member +# descriptions after the members that are listed in the file and class +# documentation (similar to Javadoc). Set to NO to disable this. +# The default value is: YES. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief +# description of a member or function before the detailed description +# +# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. +# The default value is: YES. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator that is +# used to form the text in various listings. Each string in this list, if found +# as the leading text of the brief description, will be stripped from the text +# and the result, after processing the whole list, is used as the annotated +# text. Otherwise, the brief description is used as-is. If left blank, the +# following values are used ($name is automatically replaced with the name of +# the entity):The $name class, The $name widget, The $name file, is, provides, +# specifies, contains, represents, a, an and the. + +ABBREVIATE_BRIEF = + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# doxygen will generate a detailed section even if there is only a brief +# description. +# The default value is: NO. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. +# The default value is: NO. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path +# before files name in the file list and in the header files. If set to NO the +# shortest path that makes the file name unique will be used +# The default value is: YES. + +FULL_PATH_NAMES = YES + +# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. +# Stripping is only done if one of the specified strings matches the left-hand +# part of the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the path to +# strip. +# +# Note that you can specify absolute paths here, but also relative paths, which +# will be relative from the directory where doxygen is started. +# This tag requires that the tag FULL_PATH_NAMES is set to YES. + +STRIP_FROM_PATH = + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the +# path mentioned in the documentation of a class, which tells the reader which +# header file to include in order to use a class. If left blank only the name of +# the header file containing the class definition is used. Otherwise one should +# specify the list of include paths that are normally passed to the compiler +# using the -I flag. + +STRIP_FROM_INC_PATH = + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but +# less readable) file names. This can be useful is your file systems doesn't +# support long names like on DOS, Mac, or CD-ROM. +# The default value is: NO. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the +# first line (until the first dot) of a Javadoc-style comment as the brief +# description. If set to NO, the Javadoc-style will behave just like regular Qt- +# style comments (thus requiring an explicit @brief command for a brief +# description.) +# The default value is: NO. + +JAVADOC_AUTOBRIEF = NO + +# If the JAVADOC_BANNER tag is set to YES then doxygen will interpret a line +# such as +# /*************** +# as being the beginning of a Javadoc-style comment "banner". If set to NO, the +# Javadoc-style will behave just like regular comments and it will not be +# interpreted by doxygen. +# The default value is: NO. + +JAVADOC_BANNER = NO + +# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first +# line (until the first dot) of a Qt-style comment as the brief description. If +# set to NO, the Qt-style will behave just like regular Qt-style comments (thus +# requiring an explicit \brief command for a brief description.) +# The default value is: NO. + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a +# multi-line C++ special comment block (i.e. a block of //! or /// comments) as +# a brief description. This used to be the default behavior. The new default is +# to treat a multi-line C++ comment block as a detailed description. Set this +# tag to YES if you prefer the old behavior instead. +# +# Note that setting this tag to YES also means that rational rose comments are +# not recognized any more. +# The default value is: NO. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the +# documentation from any documented member that it re-implements. +# The default value is: YES. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new +# page for each member. If set to NO, the documentation of a member will be part +# of the file/class/namespace that contains it. +# The default value is: NO. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen +# uses this value to replace tabs by spaces in code fragments. +# Minimum value: 1, maximum value: 16, default value: 4. + +TAB_SIZE = 2 + +# This tag can be used to specify a number of aliases that act as commands in +# the documentation. An alias has the form: +# name=value +# For example adding +# "sideeffect=@par Side Effects:\n" +# will allow you to put the command \sideeffect (or @sideeffect) in the +# documentation, which will result in a user-defined paragraph with heading +# "Side Effects:". You can put \n's in the value part of an alias to insert +# newlines (in the resulting output). You can put ^^ in the value part of an +# alias to insert a newline as if a physical newline was in the original file. +# When you need a literal { or } or , in the value part of an alias you have to +# escape them by means of a backslash (\), this can lead to conflicts with the +# commands \{ and \} for these it is advised to use the version @{ and @} or use +# a double escape (\\{ and \\}) + +# Reference: https://git.ligo.org/lscsoft/lalsuite-archive/commit/e6e2dae8a73a73979a64854bbf697b077803697a +#ALIASES = "eqref{1}= Eq. \\eqref{\1}" \ +# "figref{1}= Fig. [\ref \1]" \ +# "tableref{1}= Table [\ref \1]" \ +# "figure{4}= \anchor \1 \image html \1.png \"Fig. [\1]: \4\"" + +# This allows doxygen passthrough of \eqref to html for mathjax +# Single reference within a math block +ALIASES += eqref{1}="\latexonly\ref{\1}\endlatexonly\htmlonly \eqref{\1}\endhtmlonly\xmlonly \\eqref{\1}\endxmlonly" + +# Large math block with multiple references +# TODO: We should be able to overload functions but recursion is happening? For now, the +# second command creates a \eqref2 that is passed to sphinx for processing. This breaks +# the html generation for doxygen +# Doxygen 1.8.13 requires extra help via xmlonly +# See python sphinxcontrib-autodoc_doxygen module autodoc_doxygen/xmlutils.py +# \eqref{eq:ale-thickness-equation,ale-equations,thickness} +ALIASES += eqref{3}="\latexonly\ref{\1}\endlatexonly\htmlonly \eqref2{\2,\3}\endhtmlonly\xmlonly \\eqref4{\1}\\eqref2{\2,\3}\endxmlonly" + +# Reference: https://stackoverflow.com/questions/25290453/how-do-i-add-a-footnote-in-doxygen +# TODO: Use this simple js library to create actual footnotes in html +# Reference: https://github.com/jheftmann/footnoted +ALIASES += footnote{1}="\latexonly\footnote\{\1\}\endlatexonly\htmlonly[*]\endhtmlonly\xmlonly[*]\endxmlonly" + +# \image latex does the wrong things to support equations in captions, this recreates +# what it does and just passes through the string uninterpreted. +# The image also needs to be added to LATEX_EXTRA_FILES. +# Default 3rd argument: \includegraphics[width=\textwidth\,height=\textheight/2\,keepaspectratio=true] +ALIASES += imagelatex{3}="\latexonly\begin{DoxyImage}\3{\1}\doxyfigcaption{\2}\end{DoxyImage}\endlatexonly\xmlonly\2\endxmlonly" + +# This tag can be used to specify a number of word-keyword mappings (TCL only). +# A mapping has the form "name=value". For example adding "class=itcl::class" +# will allow you to use the command class in the itcl::class meaning. + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources +# only. Doxygen will then generate output that is more tailored for C. For +# instance, some of the names that are used will be different. The list of all +# members will be omitted, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or +# Python sources only. Doxygen will then generate output that is more tailored +# for that language. For instance, namespaces will be presented as packages, +# qualified scopes will look different, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources. Doxygen will then generate output that is tailored for Fortran. +# The default value is: NO. + +OPTIMIZE_FOR_FORTRAN = YES + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for VHDL. +# The default value is: NO. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice +# sources only. Doxygen will then generate output that is more tailored for that +# language. For instance, namespaces will be presented as modules, types will be +# separated into more groups, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_SLICE = NO + +# Doxygen selects the parser to use depending on the extension of the files it +# parses. With this tag you can assign which parser to use for a given +# extension. Doxygen has a built-in mapping, but you can override or extend it +# using this tag. The format is ext=language, where ext is a file extension, and +# language is one of the parsers supported by doxygen: IDL, Java, JavaScript, +# Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice, VHDL, +# Fortran (fixed format Fortran: FortranFixed, free formatted Fortran: +# FortranFree, unknown formatted Fortran: Fortran. In the later case the parser +# tries to guess whether the code is fixed or free formatted code, this is the +# default for Fortran type files). For instance to make doxygen treat .inc files +# as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. +# +# Note: For files without extension you can use no_extension as a placeholder. +# +# Note that for custom extensions you also need to set FILE_PATTERNS otherwise +# the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments +# according to the Markdown format, which allows for more readable +# documentation. See https://daringfireball.net/projects/markdown/ for details. +# The output of markdown processing is further processed by doxygen, so you can +# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in +# case of backward compatibilities issues. +# The default value is: YES. + +MARKDOWN_SUPPORT = YES + +# When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up +# to that level are automatically included in the table of contents, even if +# they do not have an id attribute. +# Note: This feature currently applies only to Markdown headings. +# Minimum value: 0, maximum value: 99, default value: 5. +# This tag requires that the tag MARKDOWN_SUPPORT is set to YES. + +TOC_INCLUDE_HEADINGS = 0 + +# When enabled doxygen tries to link words that correspond to documented +# classes, or namespaces to their corresponding documentation. Such a link can +# be prevented in individual cases by putting a % sign in front of the word or +# globally by setting AUTOLINK_SUPPORT to NO. +# The default value is: YES. + +AUTOLINK_SUPPORT = YES + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should set this +# tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); +# versus func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. +# The default value is: NO. + +BUILTIN_STL_SUPPORT = NO + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. +# The default value is: NO. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip (see: +# https://www.riverbankcomputing.com/software/sip/intro) sources only. Doxygen +# will parse them like normal C++ but will assume all classes use public instead +# of private inheritance when no explicit protection keyword is present. +# The default value is: NO. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate +# getter and setter methods for a property. Setting this option to YES will make +# doxygen to replace the get and set methods by a property in the documentation. +# This will only work if the methods are indeed getting or setting a simple +# type. If this is not the case, or you want to show the methods anyway, you +# should set this option to NO. +# The default value is: YES. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. +# The default value is: NO. + +DISTRIBUTE_GROUP_DOC = YES + +# If one adds a struct or class to a group and this option is enabled, then also +# any nested class or struct is added to the same group. By default this option +# is disabled and one has to add nested compounds explicitly via \ingroup. +# The default value is: NO. + +GROUP_NESTED_COMPOUNDS = NO + +# Set the SUBGROUPING tag to YES to allow class member groups of the same type +# (for instance a group of public functions) to be put as a subgroup of that +# type (e.g. under the Public Functions section). Set it to NO to prevent +# subgrouping. Alternatively, this can be done per class using the +# \nosubgrouping command. +# The default value is: YES. + +SUBGROUPING = YES + +# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions +# are shown inside the group in which they are included (e.g. using \ingroup) +# instead of on a separate page (for HTML and Man pages) or section (for LaTeX +# and RTF). +# +# Note that this feature does not work in combination with +# SEPARATE_MEMBER_PAGES. +# The default value is: NO. + +INLINE_GROUPED_CLASSES = NO + +# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions +# with only public data fields or simple typedef fields will be shown inline in +# the documentation of the scope in which they are defined (i.e. file, +# namespace, or group documentation), provided this scope is documented. If set +# to NO, structs, classes, and unions are shown on a separate page (for HTML and +# Man pages) or section (for LaTeX and RTF). +# The default value is: NO. + +INLINE_SIMPLE_STRUCTS = NO + +# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or +# enum is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically be +# useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. +# The default value is: NO. + +TYPEDEF_HIDES_STRUCT = NO + +# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This +# cache is used to resolve symbols given their name and scope. Since this can be +# an expensive process and often the same symbol appears multiple times in the +# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small +# doxygen will become slower. If the cache is too large, memory is wasted. The +# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range +# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 +# symbols. At the end of a run doxygen will report the cache usage and suggest +# the optimal cache size from a speed point of view. +# Minimum value: 0, maximum value: 9, default value: 0. + +LOOKUP_CACHE_SIZE = 0 + +# The NUM_PROC_THREADS specifies the number threads doxygen is allowed to use +# during processing. When set to 0 doxygen will based this on the number of +# cores available in the system. You can set it explicitly to a value larger +# than 0 to get more control over the balance between CPU load and processing +# speed. At this moment only the input processing can be done using multiple +# threads. Since this is still an experimental feature the default is set to 1, +# which efficively disables parallel processing. Please report any issues you +# encounter. Generating dot graphs in parallel is controlled by the +# DOT_NUM_THREADS setting. +# Minimum value: 0, maximum value: 32, default value: 1. + +NUM_PROC_THREADS = 1 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in +# documentation are documented, even if no documentation was available. Private +# class members and static file members will be hidden unless the +# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. +# Note: This will also disable the warnings about undocumented members that are +# normally produced when WARNINGS is set to YES. +# The default value is: NO. + +EXTRACT_ALL = NO + +# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will +# be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIVATE = YES + +# If the EXTRACT_PRIV_VIRTUAL tag is set to YES, documented private virtual +# methods of a class will be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIV_VIRTUAL = NO + +# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal +# scope will be included in the documentation. +# The default value is: NO. + +EXTRACT_PACKAGE = YES + +# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be +# included in the documentation. +# The default value is: NO. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined +# locally in source files will be included in the documentation. If set to NO, +# only classes defined in header files are included. Does not have any effect +# for Java sources. +# The default value is: YES. + +EXTRACT_LOCAL_CLASSES = YES + +# This flag is only useful for Objective-C code. If set to YES, local methods, +# which are defined in the implementation section but not in the interface are +# included in the documentation. If set to NO, only methods in the interface are +# included. +# The default value is: NO. + +EXTRACT_LOCAL_METHODS = YES + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base name of +# the file that contains the anonymous namespace. By default anonymous namespace +# are hidden. +# The default value is: NO. + +EXTRACT_ANON_NSPACES = YES + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all +# undocumented members inside documented classes or files. If set to NO these +# members will be included in the various overviews, but no documentation +# section is generated. This option has no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. If set +# to NO, these classes will be included in the various overviews. This option +# has no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend +# declarations. If set to NO, these declarations will be included in the +# documentation. +# The default value is: NO. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any +# documentation blocks found inside the body of a function. If set to NO, these +# blocks will be appended to the function's detailed documentation block. +# The default value is: NO. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation that is typed after a +# \internal command is included. If the tag is set to NO then the documentation +# will be excluded. Set it to YES to include the internal documentation. +# The default value is: NO. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file +# names in lower-case letters. If set to YES, upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# (including Cygwin) and Mac users are advised to set this option to NO. +# The default value is: system dependent. + +CASE_SENSE_NAMES = YES + +# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with +# their full class and namespace scopes in the documentation. If set to YES, the +# scope will be hidden. +# The default value is: NO. + +HIDE_SCOPE_NAMES = NO + +# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will +# append additional text to a page's title, such as Class Reference. If set to +# YES the compound reference will be hidden. +# The default value is: NO. + +HIDE_COMPOUND_REFERENCE= NO + +# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of +# the files that are included by a file in the documentation of that file. +# The default value is: YES. + +SHOW_INCLUDE_FILES = YES + +# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each +# grouped member an include statement to the documentation, telling the reader +# which file to include in order to use the member. +# The default value is: NO. + +SHOW_GROUPED_MEMB_INC = NO + +# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include +# files with double quotes in the documentation rather than with sharp brackets. +# The default value is: NO. + +FORCE_LOCAL_INCLUDES = NO + +# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the +# documentation for inline members. +# The default value is: YES. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the +# (detailed) documentation of file and class members alphabetically by member +# name. If set to NO, the members will appear in declaration order. +# The default value is: YES. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief +# descriptions of file, namespace and class members alphabetically by member +# name. If set to NO, the members will appear in declaration order. Note that +# this will also influence the order of the classes in the class list. +# The default value is: NO. + +SORT_BRIEF_DOCS = NO + +# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the +# (brief and detailed) documentation of class members so that constructors and +# destructors are listed first. If set to NO the constructors will appear in the +# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. +# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief +# member documentation. +# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting +# detailed member documentation. +# The default value is: NO. + +SORT_MEMBERS_CTORS_1ST = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy +# of group names into alphabetical order. If set to NO the group names will +# appear in their defined order. +# The default value is: NO. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by +# fully-qualified names, including namespaces. If set to NO, the class list will +# be sorted only by class name, not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the alphabetical +# list. +# The default value is: NO. + +SORT_BY_SCOPE_NAME = NO + +# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper +# type resolution of all parameters of a function it will reject a match between +# the prototype and the implementation of a member function even if there is +# only one candidate or it is obvious which candidate to choose by doing a +# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still +# accept a match between prototype and implementation in such cases. +# The default value is: NO. + +STRICT_PROTO_MATCHING = NO + +# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo +# list. This list is created by putting \todo commands in the documentation. +# The default value is: YES. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test +# list. This list is created by putting \test commands in the documentation. +# The default value is: YES. + +GENERATE_TESTLIST = YES + +# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug +# list. This list is created by putting \bug commands in the documentation. +# The default value is: YES. + +GENERATE_BUGLIST = YES + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO) +# the deprecated list. This list is created by putting \deprecated commands in +# the documentation. +# The default value is: YES. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional documentation +# sections, marked by \if ... \endif and \cond +# ... \endcond blocks. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the +# initial value of a variable or macro / define can have for it to appear in the +# documentation. If the initializer consists of more lines than specified here +# it will be hidden. Use a value of 0 to hide initializers completely. The +# appearance of the value of individual variables and macros / defines can be +# controlled using \showinitializer or \hideinitializer command in the +# documentation regardless of this setting. +# Minimum value: 0, maximum value: 10000, default value: 30. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at +# the bottom of the documentation of classes and structs. If set to YES, the +# list will mention the files that were used to generate the documentation. +# The default value is: YES. + +SHOW_USED_FILES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This +# will remove the Files entry from the Quick Index and from the Folder Tree View +# (if specified). +# The default value is: YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces +# page. This will remove the Namespaces entry from the Quick Index and from the +# Folder Tree View (if specified). +# The default value is: YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command command input-file, where command is the value of the +# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided +# by doxygen. Whatever the program writes to standard output is used as the file +# version. For an example see the documentation. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed +# by doxygen. The layout file controls the global structure of the generated +# output files in an output format independent way. To create the layout file +# that represents doxygen's defaults, run doxygen with the -l option. You can +# optionally specify a file name after the option, if omitted DoxygenLayout.xml +# will be used as the name of the layout file. +# +# Note that if you run doxygen from a directory containing a file called +# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE +# tag is left empty. + +LAYOUT_FILE = layout.xml + +# The CITE_BIB_FILES tag can be used to specify one or more bib files containing +# the reference definitions. This must be a list of .bib files. The .bib +# extension is automatically appended if omitted. This requires the bibtex tool +# to be installed. See also https://en.wikipedia.org/wiki/BibTeX for more info. +# For LaTeX the style of the bibliography can be controlled using +# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the +# search path. See also \cite for info how to create references. + +CITE_BIB_FILES = ocean.bib references.bib zotero.bib + +#--------------------------------------------------------------------------- +# Configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated to +# standard output by doxygen. If QUIET is set to YES this implies that the +# messages are off. +# The default value is: NO. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES +# this implies that the warnings are on. +# +# Tip: Turn warnings on while writing the documentation. +# The default value is: YES. + +WARNINGS = YES + +# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate +# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag +# will automatically be disabled. +# The default value is: YES. + +WARN_IF_UNDOCUMENTED = YES + +# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some parameters +# in a documented function, or documenting parameters that don't exist or using +# markup commands wrongly. +# The default value is: YES. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that +# are documented, but have no documentation for their parameters or return +# value. If set to NO, doxygen will only warn about wrong or incomplete +# parameter documentation, but not about the absence of documentation. If +# EXTRACT_ALL is set to YES then this flag will automatically be disabled. +# The default value is: NO. + +WARN_NO_PARAMDOC = NO + +# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when +# a warning is encountered. +# The default value is: NO. + +WARN_AS_ERROR = NO + +# The WARN_FORMAT tag determines the format of the warning messages that doxygen +# can produce. The string should contain the $file, $line, and $text tags, which +# will be replaced by the file and line number from which the warning originated +# and the warning text. Optionally the format may contain $version, which will +# be replaced by the version of the file (if it could be obtained via +# FILE_VERSION_FILTER) +# The default value is: $file:$line: $text. + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning and error +# messages should be written. If left blank the output is written to standard +# error (stderr). + +WARN_LOGFILE = _build/doxygen_warn_nortd_latex_log.txt + +#--------------------------------------------------------------------------- +# Configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag is used to specify the files and/or directories that contain +# documented source files. You may enter file names like myfile.cpp or +# directories like /usr/src/myproject. Separate the files or directories with +# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING +# Note: If this tag is empty the current directory is searched. + +INPUT = ../src \ + front_page.md \ + ../config_src/solo_driver \ + ../config_src/dynamic_symmetric \ + ../config_src/external \ + ../config_src/coupled_driver + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses +# libiconv (or the iconv built into libc) for the transcoding. See the libiconv +# documentation (see: https://www.gnu.org/software/libiconv/) for the list of +# possible encodings. +# The default value is: UTF-8. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and +# *.h) to filter out the source-files in the directories. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# read by doxygen. +# +# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp, +# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, +# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, +# *.m, *.markdown, *.md, *.mm, *.dox (to be provided as doxygen C comment), +# *.doc (to be provided as doxygen C comment), *.txt (to be provided as doxygen +# C comment), *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, *.f18, *.f, *.for, *.vhd, +# *.vhdl, *.ucf, *.qsf and *.ice. + +FILE_PATTERNS = *.c \ + *.cc \ + *.cxx \ + *.cpp \ + *.c++ \ + *.java \ + *.ii \ + *.ixx \ + *.ipp \ + *.i++ \ + *.inl \ + *.idl \ + *.ddl \ + *.odl \ + *.h \ + *.hh \ + *.hxx \ + *.hpp \ + *.h++ \ + *.cs \ + *.d \ + *.php \ + *.php4 \ + *.php5 \ + *.phtml \ + *.inc \ + *.m \ + *.markdown \ + *.md \ + *.mm \ + *.dox \ + *.doc \ + *.txt \ + *.py \ + *.pyw \ + *.f90 \ + *.f95 \ + *.f03 \ + *.f08 \ + *.f18 \ + *.f \ + *.for \ + *.vhd \ + *.vhdl \ + *.ucf \ + *.qsf \ + *.ice \ + *.F90 + +# The RECURSIVE tag can be used to specify whether or not subdirectories should +# be searched for input files as well. +# The default value is: NO. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should be +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. +# +# Note that relative paths are relative to the directory from which doxygen is +# run. + +EXCLUDE = ../src/equation_of_state/TEOS10 + +# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or +# directories that are symbolic links (a Unix file system feature) are excluded +# from the input. +# The default value is: NO. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories for example use the pattern */test/* + +EXCLUDE_PATTERNS = makedep.py \ + Makefile \ + INSTALL + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories use the pattern */test/* + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or directories +# that contain example code fragments that are included (see the \include +# command). + +EXAMPLE_PATH = ../src + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and +# *.h) to filter out the source-files in the directories. If left blank all +# files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude commands +# irrespective of the value of the RECURSIVE tag. +# The default value is: NO. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or directories +# that contain images that are to be included in the documentation (see the +# \image command). + +IMAGE_PATH = images \ + ../src + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command: +# +# +# +# where is the value of the INPUT_FILTER tag, and is the +# name of an input file. Doxygen will then use the output that the filter +# program writes to standard output. If FILTER_PATTERNS is specified, this tag +# will be ignored. +# +# Note that the filter must not add or remove lines; it is applied before the +# code is scanned, but not when the output code is generated. If lines are added +# or removed, the anchors will not be placed correctly. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# properly processed by doxygen. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. The filters are a list of the form: pattern=filter +# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how +# filters are used. If the FILTER_PATTERNS tag is empty or if none of the +# patterns match the file name, INPUT_FILTER is applied. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# properly processed by doxygen. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will also be used to filter the input files that are used for +# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). +# The default value is: NO. + +FILTER_SOURCE_FILES = NO + +# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file +# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and +# it is also possible to disable source filtering for a specific pattern using +# *.ext= (so without naming a filter). +# This tag requires that the tag FILTER_SOURCE_FILES is set to YES. + +FILTER_SOURCE_PATTERNS = + +# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that +# is part of the input, its contents will be placed on the main page +# (index.html). This can be useful if you have a project on for instance GitHub +# and want to reuse the introduction page also for the doxygen output. + +USE_MDFILE_AS_MAINPAGE = front_page.md + +#--------------------------------------------------------------------------- +# Configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will be +# generated. Documented entities will be cross-referenced with these sources. +# +# Note: To get rid of all source code in the generated output, make sure that +# also VERBATIM_HEADERS is set to NO. +# The default value is: NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body of functions, +# classes and enums directly into the documentation. +# The default value is: NO. + +INLINE_SOURCES = YES + +# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any +# special comment blocks from generated source code fragments. Normal C, C++ and +# Fortran comments will always remain visible. +# The default value is: YES. + +STRIP_CODE_COMMENTS = NO + +# If the REFERENCED_BY_RELATION tag is set to YES then for each documented +# entity all documented functions referencing it will be listed. +# The default value is: NO. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES then for each documented function +# all documented entities called/used by that function will be listed. +# The default value is: NO. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set +# to YES then the hyperlinks from functions in REFERENCES_RELATION and +# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will +# link to the documentation. +# The default value is: YES. + +REFERENCES_LINK_SOURCE = YES + +# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the +# source code will show a tooltip with additional information such as prototype, +# brief description and links to the definition and documentation. Since this +# will make the HTML file larger and loading of large files a bit slower, you +# can opt to disable this feature. +# The default value is: YES. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +SOURCE_TOOLTIPS = YES + +# If the USE_HTAGS tag is set to YES then the references to source code will +# point to the HTML generated by the htags(1) tool instead of doxygen built-in +# source browser. The htags tool is part of GNU's global source tagging system +# (see https://www.gnu.org/software/global/global.html). You will need version +# 4.8.6 or higher. +# +# To use it do the following: +# - Install the latest version of global +# - Enable SOURCE_BROWSER and USE_HTAGS in the configuration file +# - Make sure the INPUT points to the root of the source tree +# - Run doxygen as normal +# +# Doxygen will invoke htags (and that will in turn invoke gtags), so these +# tools must be available from the command line (i.e. in the search path). +# +# The result: instead of the source browser generated by doxygen, the links to +# source code will now point to the output of htags. +# The default value is: NO. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a +# verbatim copy of the header file for each class for which an include is +# specified. Set to NO to disable this. +# See also: Section \class. +# The default value is: YES. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# Configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all +# compounds will be generated. Enable this if the project contains a lot of +# classes, structs, unions or interfaces. +# The default value is: YES. + +ALPHABETICAL_INDEX = YES + +# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in +# which the alphabetical index list will be split. +# Minimum value: 1, maximum value: 20, default value: 5. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +COLS_IN_ALPHA_INDEX = 1 + +# In case all classes in a project start with a common prefix, all classes will +# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag +# can be used to specify a prefix (or a list of prefixes) that should be ignored +# while generating the index headers. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output +# The default value is: YES. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a +# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of +# it. +# The default directory is: html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_OUTPUT = _build/APIs + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each +# generated HTML page (for example: .htm, .php, .asp). +# The default value is: .html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a user-defined HTML header file for +# each generated HTML page. If the tag is left blank doxygen will generate a +# standard header. +# +# To get valid HTML the header file that includes any scripts and style sheets +# that doxygen needs, which is dependent on the configuration options used (e.g. +# the setting GENERATE_TREEVIEW). It is highly recommended to start with a +# default header using +# doxygen -w html new_header.html new_footer.html new_stylesheet.css +# YourConfigFile +# and then modify the file new_header.html. See also section "Doxygen usage" +# for information on how to generate the default header that doxygen normally +# uses. +# Note: The header is subject to change so you typically have to regenerate the +# default header when upgrading to a newer version of doxygen. For a description +# of the possible markers and block names see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_HEADER = + +# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each +# generated HTML page. If the tag is left blank doxygen will generate a standard +# footer. See HTML_HEADER for more information on how to generate a default +# footer and what special commands can be used inside the footer. See also +# section "Doxygen usage" for information on how to generate the default footer +# that doxygen normally uses. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FOOTER = + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style +# sheet that is used by each HTML page. It can be used to fine-tune the look of +# the HTML output. If left blank doxygen will generate a default style sheet. +# See also section "Doxygen usage" for information on how to generate the style +# sheet that doxygen normally uses. +# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as +# it is more robust and this tag (HTML_STYLESHEET) will in the future become +# obsolete. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_STYLESHEET = + +# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined +# cascading style sheets that are included after the standard style sheets +# created by doxygen. Using this option one can overrule certain style aspects. +# This is preferred over using HTML_STYLESHEET since it does not replace the +# standard style sheet and is therefore more robust against future updates. +# Doxygen will copy the style sheet files to the output directory. +# Note: The order of the extra style sheet files is of importance (e.g. the last +# style sheet in the list overrules the setting of the previous ones in the +# list). For an example see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_STYLESHEET = + +# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or +# other source files which should be copied to the HTML output directory. Note +# that these files will be copied to the base HTML output directory. Use the +# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these +# files. In the HTML_STYLESHEET file, use the file name only. Also note that the +# files will be copied as-is; there are no commands or markers available. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_FILES = + +# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen +# will adjust the colors in the style sheet and background images according to +# this color. Hue is specified as an angle on a colorwheel, see +# https://en.wikipedia.org/wiki/Hue for more information. For instance the value +# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 +# purple, and 360 is red again. +# Minimum value: 0, maximum value: 359, default value: 220. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_HUE = 220 + +# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors +# in the HTML output. For a value of 0 the output will use grayscales only. A +# value of 255 will produce the most vivid colors. +# Minimum value: 0, maximum value: 255, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_SAT = 100 + +# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the +# luminance component of the colors in the HTML output. Values below 100 +# gradually make the output lighter, whereas values above 100 make the output +# darker. The value divided by 100 is the actual gamma applied, so 80 represents +# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not +# change the gamma. +# Minimum value: 40, maximum value: 240, default value: 80. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_GAMMA = 80 + +# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML +# page will contain the date and time when the page was generated. Setting this +# to YES can help to show when doxygen was last run and thus if the +# documentation is up to date. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_TIMESTAMP = NO + +# If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML +# documentation will contain a main index with vertical navigation menus that +# are dynamically created via JavaScript. If disabled, the navigation index will +# consists of multiple levels of tabs that are statically embedded in every HTML +# page. Disable this option to support browsers that do not have JavaScript, +# like the Qt help browser. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_MENUS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_SECTIONS = NO + +# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries +# shown in the various tree structured indices initially; the user can expand +# and collapse entries dynamically later on. Doxygen will expand the tree to +# such a level that at most the specified number of entries are visible (unless +# a fully collapsed tree already exceeds this amount). So setting the number of +# entries 1 will produce a full collapsed tree by default. 0 is a special value +# representing an infinite number of entries and will result in a full expanded +# tree by default. +# Minimum value: 0, maximum value: 9999, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_INDEX_NUM_ENTRIES = 900 + +# If the GENERATE_DOCSET tag is set to YES, additional index files will be +# generated that can be used as input for Apple's Xcode 3 integrated development +# environment (see: https://developer.apple.com/xcode/), introduced with OSX +# 10.5 (Leopard). To create a documentation set, doxygen will generate a +# Makefile in the HTML output directory. Running make will produce the docset in +# that directory and running make install will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at +# startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy +# genXcode/_index.html for more information. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_DOCSET = NO + +# This tag determines the name of the docset feed. A documentation feed provides +# an umbrella under which multiple documentation sets from a single provider +# (such as a company or product suite) can be grouped. +# The default value is: Doxygen generated docs. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# This tag specifies a string that should uniquely identify the documentation +# set bundle. This should be a reverse domain-name style string, e.g. +# com.mycompany.MyDocSet. Doxygen will append .docset to the name. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify +# the documentation publisher. This should be a reverse domain-name style +# string, e.g. com.mycompany.MyDocSet.documentation. +# The default value is: org.doxygen.Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_ID = org.doxygen.Publisher + +# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. +# The default value is: Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_NAME = Publisher + +# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three +# additional HTML index files: index.hhp, index.hhc, and index.hhk. The +# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop +# (see: https://www.microsoft.com/en-us/download/details.aspx?id=21138) on +# Windows. +# +# The HTML Help Workshop contains a compiler that can convert all HTML output +# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML +# files are now used as the Windows 98 help format, and will replace the old +# Windows help format (.hlp) on all Windows platforms in the future. Compressed +# HTML files also contain an index, a table of contents, and you can search for +# words in the documentation. The HTML workshop also contains a viewer for +# compressed HTML files. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_HTMLHELP = NO + +# The CHM_FILE tag can be used to specify the file name of the resulting .chm +# file. You can add a path in front of the file if the result should not be +# written to the html output directory. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_FILE = + +# The HHC_LOCATION tag can be used to specify the location (absolute path +# including file name) of the HTML help compiler (hhc.exe). If non-empty, +# doxygen will try to run the HTML help compiler on the generated index.hhp. +# The file has to be specified with full path. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +HHC_LOCATION = + +# The GENERATE_CHI flag controls if a separate .chi index file is generated +# (YES) or that it should be included in the main .chm file (NO). +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +GENERATE_CHI = NO + +# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc) +# and project file content. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_INDEX_ENCODING = + +# The BINARY_TOC flag controls whether a binary table of contents is generated +# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it +# enables the Previous and Next buttons. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +BINARY_TOC = NO + +# The TOC_EXPAND flag can be set to YES to add extra items for group members to +# the table of contents of the HTML help documentation and to the tree view. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +TOC_EXPAND = NO + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and +# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that +# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help +# (.qch) of the generated HTML documentation. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify +# the file name of the resulting .qch file. The path specified is relative to +# the HTML output folder. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help +# Project output. For more information please see Qt Help Project / Namespace +# (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace). +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_NAMESPACE = org.doxygen.Project + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt +# Help Project output. For more information please see Qt Help Project / Virtual +# Folders (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual- +# folders). +# The default value is: doc. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_VIRTUAL_FOLDER = doc + +# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom +# filter to add. For more information please see Qt Help Project / Custom +# Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom- +# filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the +# custom filter to add. For more information please see Qt Help Project / Custom +# Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom- +# filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this +# project's filter section matches. Qt Help Project / Filter Attributes (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_SECT_FILTER_ATTRS = + +# The QHG_LOCATION tag can be used to specify the location of Qt's +# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the +# generated .qhp file. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHG_LOCATION = + +# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be +# generated, together with the HTML files, they form an Eclipse help plugin. To +# install this plugin and make it available under the help contents menu in +# Eclipse, the contents of the directory containing the HTML and XML files needs +# to be copied into the plugins directory of eclipse. The name of the directory +# within the plugins directory should be the same as the ECLIPSE_DOC_ID value. +# After copying Eclipse needs to be restarted before the help appears. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_ECLIPSEHELP = NO + +# A unique identifier for the Eclipse help plugin. When installing the plugin +# the directory name containing the HTML and XML files should also have this +# name. Each documentation set should have its own identifier. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. + +ECLIPSE_DOC_ID = org.doxygen.Project + +# If you want full control over the layout of the generated HTML pages it might +# be necessary to disable the index and replace it with your own. The +# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top +# of each HTML page. A value of NO enables the index and the value YES disables +# it. Since the tabs in the index contain the same information as the navigation +# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +DISABLE_INDEX = NO + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. If the tag +# value is set to YES, a side panel will be generated containing a tree-like +# index structure (just like the one that is generated for HTML Help). For this +# to work a browser that supports JavaScript, DHTML, CSS and frames is required +# (i.e. any modern browser). Windows users are probably better off using the +# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can +# further fine-tune the look of the index. As an example, the default style +# sheet generated by doxygen has an example that shows how to put an image at +# the root of the tree instead of the PROJECT_NAME. Since the tree basically has +# the same information as the tab index, you could consider setting +# DISABLE_INDEX to YES when enabling this option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_TREEVIEW = YES + +# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that +# doxygen will group on one line in the generated HTML documentation. +# +# Note that a value of 0 will completely suppress the enum values from appearing +# in the overview section. +# Minimum value: 0, maximum value: 20, default value: 4. +# This tag requires that the tag GENERATE_HTML is set to YES. + +ENUM_VALUES_PER_LINE = 4 + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used +# to set the initial width (in pixels) of the frame in which the tree is shown. +# Minimum value: 0, maximum value: 1500, default value: 250. +# This tag requires that the tag GENERATE_HTML is set to YES. + +TREEVIEW_WIDTH = 250 + +# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to +# external symbols imported via tag files in a separate window. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +EXT_LINKS_IN_WINDOW = NO + +# If the HTML_FORMULA_FORMAT option is set to svg, doxygen will use the pdf2svg +# tool (see https://github.com/dawbarton/pdf2svg) or inkscape (see +# https://inkscape.org) to generate formulas as SVG images instead of PNGs for +# the HTML output. These images will generally look nicer at scaled resolutions. +# Possible values are: png (the default) and svg (looks nicer but requires the +# pdf2svg or inkscape tool). +# The default value is: png. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FORMULA_FORMAT = svg + +# Use this tag to change the font size of LaTeX formulas included as images in +# the HTML documentation. When you change the font size after a successful +# doxygen run you need to manually remove any form_*.png images from the HTML +# output directory to force them to be regenerated. +# Minimum value: 8, maximum value: 50, default value: 10. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_FONTSIZE = 10 + +# Use the FORMULA_TRANSPARENT tag to determine whether or not the images +# generated for formulas are transparent PNGs. Transparent PNGs are not +# supported properly for IE 6.0, but are supported on all modern browsers. +# +# Note that when changing this option you need to delete any form_*.png files in +# the HTML output directory before the changes have effect. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_TRANSPARENT = YES + +# The FORMULA_MACROFILE can contain LaTeX \newcommand and \renewcommand commands +# to create new LaTeX commands to be used in formulas as building blocks. See +# the section "Including formulas" for details. + +FORMULA_MACROFILE = + +# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see +# https://www.mathjax.org) which uses client side JavaScript for the rendering +# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX +# installed or if you want to formulas look prettier in the HTML output. When +# enabled you may also need to install MathJax separately and configure the path +# to it using the MATHJAX_RELPATH option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +USE_MATHJAX = YES + +# When MathJax is enabled you can set the default output format to be used for +# the MathJax output. See the MathJax site (see: +# http://docs.mathjax.org/en/latest/output.html) for more details. +# Possible values are: HTML-CSS (which is slower, but has the best +# compatibility), NativeMML (i.e. MathML) and SVG. +# The default value is: HTML-CSS. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_FORMAT = HTML-CSS + +# When MathJax is enabled you need to specify the location relative to the HTML +# output directory using the MATHJAX_RELPATH option. The destination directory +# should contain the MathJax.js script. For instance, if the mathjax directory +# is located at the same level as the HTML output directory, then +# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax +# Content Delivery Network so you can quickly see the result without installing +# MathJax. However, it is strongly recommended to install a local copy of +# MathJax from https://www.mathjax.org before deployment. +# The default value is: https://cdn.jsdelivr.net/npm/mathjax@2. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_RELPATH = https://cdn.jsdelivr.net/npm/mathjax@2 + +# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax +# extension names that should be enabled during MathJax rendering. For example +# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_EXTENSIONS = + +# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces +# of code that will be used on startup of the MathJax code. See the MathJax site +# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an +# example see the documentation. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_CODEFILE = + +# When the SEARCHENGINE tag is enabled doxygen will generate a search box for +# the HTML output. The underlying search engine uses javascript and DHTML and +# should work on any modern browser. Note that when using HTML help +# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) +# there is already a search function so this one should typically be disabled. +# For large projects the javascript based search engine can be slow, then +# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to +# search using the keyboard; to jump to the search box use + S +# (what the is depends on the OS and browser, but it is typically +# , /