From 3e0ec924fe5bd7f3663cd9492fe4cecb11bdc239 Mon Sep 17 00:00:00 2001 From: Tony Craig Date: Wed, 28 Aug 2019 11:13:50 -0700 Subject: [PATCH 01/14] Build system improvements (#354) * Add cice.make script, list makdep as prerequisite * cice.make can be used to easily call make with the different targets present in the Makefile (db_files, db_flags, clean), or to pass different flags to make * makdep is listed as a prerequisite of the *.d files, such that we don't need to call make makdep separately in cice.build * Update db_files and db_flags rules in Makefile * Remove $(INCS) variable from build rules The -I flag is used by Fortran compilers to locate files included via the "#include" preprocessor directive or the "include" statement, or modules referenced via the "use" statement. In our codebase we only use modules, and all modules files (*.mod) are put in the ${ICE_OBJDIR} directory, where the compilation takes place. So there is no need for all source directories (contained in the variable $(INCS), prefixed with -I) to be added to the build rules. This shortens the length of the displayed compilation lines. * Update documentation * add cice.build argument capabilities * update documentation * update documentation * update documentation * cleanup of documentation and implementation * update implementation based on reviews * update documentation * update documentation * update documentation * update documentation * update Makefile and documentation --- cice.setup | 2 +- configuration/scripts/Makefile | 79 ++++++++----- configuration/scripts/cice.build | 128 +++++++++++++++++----- doc/source/developer_guide/dg_scripts.rst | 7 ++ doc/source/user_guide/ug_running.rst | 82 +++++++++++++- 5 files changed, 237 insertions(+), 61 deletions(-) diff --git a/cice.setup b/cice.setup index 7380def71..ad535aba7 100755 --- a/cice.setup +++ b/cice.setup @@ -872,7 +872,7 @@ echo "${testname_base}" cd ${testname_base} source ./cice.settings set ciceexe = "../ciceexe.\${ICE_COMPILER}.\${ICE_COMMDIR}.\${ICE_BLDDEBUG}.\${ICE_THREADED}.\${ICE_IOTYPE}" -./cice.build \${ciceexe} +./cice.build --exe \${ciceexe} if !(-e \${ciceexe}) cp -p \${ICE_RUNDIR}/cice \${ciceexe} EOF end diff --git a/configuration/scripts/Makefile b/configuration/scripts/Makefile index 362397b1d..3c7857c21 100644 --- a/configuration/scripts/Makefile +++ b/configuration/scripts/Makefile @@ -8,7 +8,6 @@ # SRCS= ~ list of src files, default is all .c .F .F90 files in VPATH # VPFILE= ~ file with list of dirs, used to create VPATH # SRCFILE= ~ file with list of src files, used to create SRCS -# DEPGEN= ~ dependency generator utility, default is makdep # # ~ any macro definitions found in this file or the included # MACFILE will be over-riden by cmd-line macro definitions @@ -31,7 +30,9 @@ VPFILE := NONE VPATH := . SRCFILE := NONE SRCS := NONE -DEPGEN := ./makdep # an externally provided dependency generator + +# dependency generator +DEPGEN := ./makdep ifneq ($(VPATH),.) # this variable was specified on cmd line or in an env var @@ -58,26 +59,33 @@ endif OBJS := $(addsuffix .o, $(sort $(basename $(notdir $(SRCS))))) DEPS := $(addsuffix .d, $(sort $(basename $(notdir $(SRCS))))) INCS := $(patsubst %,-I%, $(VPATH) ) +OBJS_DEPGEN := $(addprefix $(ICE_CASEDIR)/,$(addsuffix .c, $(notdir $(DEPGEN)))) +MODDIR:= -I. RM := rm .SUFFIXES: .SUFFIXES: .F90 .F .c .o +.PHONY: all cice targets target db_files db_flags clean realclean all: $(EXEC) +cice: $(EXEC) + #------------------------------------------------------------------------------- # include the file that provides macro definitions required by build rules -# note: the MACFILE may not be needed for certain goals #------------------------------------------------------------------------------- -ifneq ($(MAKECMDGOALS), db_files) - -include $(MACFILE) -endif +-include $(MACFILE) #------------------------------------------------------------------------------- -# echo file names, paths, compile flags, etc. used during build +# echo supported targets, file names, paths, compile flags, etc. used during build #------------------------------------------------------------------------------- +targets: + @echo " " + @echo "Supported Makefile Targets are: cice, makdep, depends, clean, realclean, targets, db_files, db_flags" +target: targets + db_files: @echo " " @echo "* EXEC := $(EXEC)" @@ -89,12 +97,22 @@ db_files: @echo "* SRCS := $(SRCS)" @echo "* OBJS := $(OBJS)" @echo "* DEPS := $(DEPS)" + @echo "* ULIBS := $(ULIBS)" + @echo "* SLIBS := $(SLIBS)" + @echo "* INCLDIR := $(INCLDIR)" + @echo "* OBJS_DEPGEN := $(OBJS_DEPGEN)" db_flags: @echo " " - @echo "* cpp := $(CPP) $(CPPFLAGS) $(CPPDEFS) $(INCS) $(INCLDIR)" - @echo "* cc := $(CC) -c $(CFLAGS) $(INCS) $(INCLDIR)" - @echo "* .F.o := $(FC) -c $(FFLAGS) $(FIXEDFLAGS) $(INCS) $(INCLDIR)" - @echo "* .F90.o := $(FC) -c $(FFLAGS) $(FREEFLAGS) $(INCS) $(INCLDIR)" + @echo "* $(DEPGEN) := $(SCC) $(CFLAGS_HOST)" + @echo "* %.d : %.c := $(DEPGEN) $(INCS)" + @echo "* %.d : %.F := $(DEPGEN) $(INCS)" + @echo "* %.d : %.F90 := $(DEPGEN) $(INCS)" + @echo "* %.d : %.H := $(DEPGEN) $(INCS)" + @echo "* cpp := $(CPP) $(CPPFLAGS) $(CPPDEFS) $(INCLDIR)" + @echo "* .c.o := $(CC) $(CFLAGS) $(CPPDEFS) $(INCLDIR)" + @echo "* .F.o := $(FC) -c $(FFLAGS) $(FIXEDFLAGS) $(CPPDEFS) $(INCLDIR)" + @echo "* .F90.o := $(FC) -c $(FFLAGS) $(FREEFLAGS) $(CPPDEFS) $(MODDIR) $(INCLDIR)" + @echo "* $(notdir $(EXEC)) := $(LD) $(LDFLAGS) $(ULIBS) $(SLIBS)" #------------------------------------------------------------------------------- # build rule for makdep: MACFILE, cmd-line, or env vars must provide @@ -105,7 +123,8 @@ ifndef $(CFLAGS_HOST) CFLAGS_HOST := endif -$(DEPGEN): $(ICE_CASEDIR)/makdep.c +$(DEPGEN): $(OBJS_DEPGEN) + @ echo "Building makdep" $(SCC) -o $@ $(CFLAGS_HOST) $< #------------------------------------------------------------------------------- @@ -116,29 +135,25 @@ $(EXEC): $(OBJS) $(LD) -o $(EXEC) $(LDFLAGS) $(OBJS) $(ULIBS) $(SLIBS) .c.o: - $(CC) $(CFLAGS) $(CPPDEFS) $(INCS) $(INCLDIR) $< + $(CC) $(CFLAGS) $(CPPDEFS) $(INCLDIR) $< .F.o: - $(FC) -c $(FFLAGS) $(FIXEDFLAGS) $(CPPDEFS) $(INCS) $(INCLDIR) $< + $(FC) -c $(FFLAGS) $(FIXEDFLAGS) $(CPPDEFS) $(INCLDIR) $< .F90.o: - $(FC) -c $(FFLAGS) $(FREEFLAGS) $(CPPDEFS) $(INCS) $(INCLDIR) $< - -mostlyclean: - $(RM) -f *.f *.f90 + $(FC) -c $(FFLAGS) $(FREEFLAGS) $(CPPDEFS) $(MODDIR) $(INCLDIR) $< clean: - $(RM) -f *.f *.f90 *.d *.mod *.o $(EXEC) -# $(RM) -f *.f *.f90 *.d *.$(MOD_SUFFIX) $(OBJS) + $(RM) -f *.o *.d *.mod $(EXEC) -realclean: - $(RM) -f *.f *.f90 *.d *.$(MOD_SUFFIX) $(OBJS) $(EXEC) +realclean: clean + $(RM) -f $(DEPGEN) #------------------------------------------------------------------------------- # Build & include dependency files #------------------------------------------------------------------------------- # ASSUMPTIONS: -# o an externally provided dependency generator, $(DEPGEN), is available, +# o the dependency generator, $(DEPGEN), can be built, # its cmd line syntax is compatible with the build rules below. Eg, for # each .o file, there is a corresponding .d (dependency) file, and both # will be dependent on the same src file, eg. foo.o foo.d : foo.F90 @@ -146,28 +161,36 @@ realclean: # are understood & accepted. #------------------------------------------------------------------------------- -%.d : %.c +depends: $(DEPS) + +%.d : %.c $(DEPGEN) @ echo "Building dependency for $@" @ $(DEPGEN) -f $(INCS) $< | head -3 > $@ -%.d : %.F +%.d : %.F $(DEPGEN) @ echo "Building dependency for $@" @ $(DEPGEN) -f $(INCS) $< > $@ -%.d : %.F90 +%.d : %.F90 $(DEPGEN) @ echo "Building dependency for $@" @ $(DEPGEN) -f $(INCS) $< > $@ -%.d : %.H +%.d : %.H $(DEPGEN) @ echo "Building dependency for $@" @ $(DEPGEN) -f $(INCS) $< > $@ # the if-tests prevent DEPS files from being created when they're not needed ifneq ($(MAKECMDGOALS), db_files) ifneq ($(MAKECMDGOALS), db_flags) -ifneq ($(MAKECMDGOALS), mostlyclean) ifneq ($(MAKECMDGOALS), clean) ifneq ($(MAKECMDGOALS), realclean) +ifneq ($(MAKECMDGOALS), targets) +ifneq ($(MAKECMDGOALS), target) +ifneq ($(MAKECMDGOALS), makdep) +ifneq ($(MAKECMDGOALS), depends) -include $(DEPS) endif endif endif endif endif +endif +endif +endif diff --git a/configuration/scripts/cice.build b/configuration/scripts/cice.build index 62ea6c447..61d639e09 100755 --- a/configuration/scripts/cice.build +++ b/configuration/scripts/cice.build @@ -4,29 +4,93 @@ # If the cice binary is passed as an argument and the file exists, # copy it into the run directory and don't build the model. +set dohelp = 0 +set directmake = 0 +set target = "UnDEFineD" set ciceexe = "UnDEFineD" -if ($#argv == 1) then - set ciceexe = $1 - echo "${0}: ciceexe = ${ciceexe}" - if (-e ${ciceexe}) then - - source ./cice.settings - source ${ICE_CASEDIR}/env.${ICE_MACHCOMP} -nomodules || exit 2 - if !(-d ${ICE_RUNDIR}) mkdir -p ${ICE_RUNDIR} - cp -p ${ciceexe} ${ICE_RUNDIR}/cice - - echo "`date` ${0}:${ICE_CASENAME} build copied ${ciceexe}" >> ${ICE_CASEDIR}/README.case - if ( ${ICE_TEST} != ${ICE_SPVAL} ) then - echo "#---" >! ${ICE_CASEDIR}/test_output - echo "COPY ${ICE_TESTNAME} build" >> ${ICE_CASEDIR}/test_output - echo "PEND ${ICE_TESTNAME} run" >> ${ICE_CASEDIR}/test_output +if ($#argv == 0) then + # continue, standard way to build +else + # -h, --help + if ("$1" == "-h" || "$1" == "--help") then + set dohelp = 1 + + # --exe + else if ("$1" == "--exe") then + if ($#argv > 2) then + echo "${0}: ERROR: --exe ciceexe without other arguments is required" + exit -99 + endif + set ciceexe = $2 + echo "${0}: ciceexe = ${ciceexe}" + if (-e ${ciceexe}) then + + source ./cice.settings + source ${ICE_CASEDIR}/env.${ICE_MACHCOMP} -nomodules || exit 2 + if !(-d ${ICE_RUNDIR}) mkdir -p ${ICE_RUNDIR} + cp -p ${ciceexe} ${ICE_RUNDIR}/cice + + echo "`date` ${0}:${ICE_CASENAME} build copied ${ciceexe}" >> ${ICE_CASEDIR}/README.case + if ( ${ICE_TEST} != ${ICE_SPVAL} ) then + echo "#---" >! ${ICE_CASEDIR}/test_output + echo "COPY ${ICE_TESTNAME} build" >> ${ICE_CASEDIR}/test_output + echo "PEND ${ICE_TESTNAME} run" >> ${ICE_CASEDIR}/test_output + endif + + exit 0 endif - exit 0 - + # direct make with target + else + set directmake = 1 + set target = "$*" endif endif +if (${dohelp} == 1) then + cat << EOF1 + +NAME + cice.build [-h,--help] [make arguments] [target] + +SYNOPSIS + -h || --help + [make arguments] [target] + +DESCRIPTION + --help, -h : help + [make arguments] [target] : specify make arguments and target + +EXAMPLES + cice.build + will build the model using the standard approach with all the bells and whistles. + This is recommended. + cice.build --help + will show cice.build help + cice.build [target] + will call gmake directly and make the target + cice.build clean + is an example of a targeted build + cice.build targets + is an example of a targeted build that shows the valid Makefile targets + cice.build --version + will call make directly and pass --version as an argument to make + cice.build [make arguments] [target] + will call make directly and pass arguments and/or a target to make + +ADDITIONAL INFORMATION + The argument implementation supports -h or --help as a first argument. Otherwise, + it passes all other arguments directly to make and calls it directly. In this + mode, most of the cice.build script features are by-passed. The recommended + way to run the script is without arguments. + +SEE ALSO + User Documentation at https://github.com/cice-consortium/cice/ + +EOF1 +exit -99 +endif + #==================================== source ./cice.settings @@ -47,10 +111,7 @@ set stamp = `date '+%y%m%d-%H%M%S'` set ICE_BLDLOG_FILE = "cice.bldlog.${stamp}" set quiet = ${ICE_QUIETMODE} -if (${ICE_CLEANBUILD} == 'true') then - echo "cleaning objdir" - rm -r -f ${ICE_OBJDIR} -endif +if !(-d ${ICE_RUNDIR}) mkdir -p ${ICE_RUNDIR} if !(-d ${ICE_OBJDIR}) mkdir -p ${ICE_OBJDIR} cd ${ICE_OBJDIR} @@ -79,6 +140,22 @@ ${ICE_SANDBOX}/cicecore/shared ${ICE_SANDBOX}/icepack/columnphysics EOF +if !($?ICE_MACHINE_BLDTHRDS) then + set ICE_MACHINE_BLDTHRDS = 1 +endif + +if (${directmake} == 1) then + echo "make ${target}" + ${ICE_MACHINE_MAKE} -j ${ICE_MACHINE_BLDTHRDS} VPFILE=Filepath EXEC=${ICE_RUNDIR}/cice \ + -f ${ICE_CASEDIR}/Makefile MACFILE=${ICE_CASEDIR}/Macros.${ICE_MACHCOMP} ${target} + set bldstat = ${status} + if (${bldstat} != 0) then + echo "${0}: targeted make FAILED" + exit -99 + endif + exit 0 +endif + echo " " echo ICE_GRID = ${ICE_GRID} echo ICE_NTASK = ${ICE_NTASKS} @@ -88,16 +165,7 @@ echo "Filepath = " cat ${ICE_OBJDIR}/Filepath echo " " -echo "building makdep" -${ICE_MACHINE_MAKE} \ - -f ${ICE_CASEDIR}/Makefile MACFILE=${ICE_CASEDIR}/Macros.${ICE_MACHCOMP} makdep || exit 2 - echo "building cice > ${ICE_OBJDIR}/${ICE_BLDLOG_FILE}" - -if !(-d ${ICE_RUNDIR}) mkdir -p ${ICE_RUNDIR} -if !($?ICE_MACHINE_BLDTHRDS) then - set ICE_MACHINE_BLDTHRDS = 1 -endif if (-e ${ICE_BLDLOG_FILE}) rm ${ICE_BLDLOG_FILE} if (${ICE_CLEANBUILD} == 'true') then diff --git a/doc/source/developer_guide/dg_scripts.rst b/doc/source/developer_guide/dg_scripts.rst index e84859c4e..cdb066e6a 100755 --- a/doc/source/developer_guide/dg_scripts.rst +++ b/doc/source/developer_guide/dg_scripts.rst @@ -92,6 +92,13 @@ The machine is built by the cice.build script which invokes Make. There is a special trap for circular dependencies in the cice.build script to highlight this error when it occurs. +The **cice.build** script has some additional features including the ability to +pass a Makefile target. This is documented in :ref:`cicebuild`. In addition, there +is a hidden feature in the **cice.build** script that allows for reuse of +executables. This is used by the test suites to significantly reduce cost of +building the model. It is invoked with the ``--exe`` argument to **cice.build** +and should not be invoked by users interactively. + .. _dev_machines: Machines diff --git a/doc/source/user_guide/ug_running.rst b/doc/source/user_guide/ug_running.rst index 5be0a8683..33aab2004 100644 --- a/doc/source/user_guide/ug_running.rst +++ b/doc/source/user_guide/ug_running.rst @@ -121,7 +121,7 @@ Once a case/test is created, several files are placed in the case directory - **makdep.c** is a tool that will automatically generate the make dependencies - **Macros.[machine]** defines the Makefile macros - **Makefile** is the makefile used to build the model -- **cice.build** is a script that builds and compiles the model +- **cice.build** is a script that calls the Makefile and compiles the model - **ice\_in** is the namelist input file - **setup\_run\_dirs.csh** is a script that will create the run directories. This will be called automatically from the **cice.run** script if the user does not invoke it. - **cice.run** is a batch run script @@ -161,7 +161,7 @@ case directory, NOT the run directory. .. _case_options: -Command Line Options +**cice.setup** Command Line Options ~~~~~~~~~~~~~~~~~~~~ ``cice.setup -h`` provides a summary of the command line options. There are three different modes, ``--case``, ``--test``, and ``--suite``. This section provides details about the relevant options for setting up cases with examples. @@ -267,6 +267,84 @@ To add some optional settings, one might do:: Once the cases are created, users are free to modify the cice.settings and ice_in namelist to further modify their setup. +.. _cicebuild: + +More about **cice.build** +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +**cice.build** is copied into the case directory and should be run interactively from the +case directory to build the model. CICE is built with make and there is a generic +Makefile and a machine specific Macros file in the case directory. **cice.build** +is a wrapper for a call to make that includes several other features. + +CICE is built as follows. First, the makdep binary is created by compiling a small +C program. The makdep binary is then run and dependency files are created. The dependency +files are included into the Makefile automatically. As a result, make dependencies do not +need to be explicitly defined by the user. In the next step, make compiles the CICE +code and generates the cice binary. + +The standard and recommended way to run is with +no arguments +:: + + cice.build + +However, **cice.build** does support a couple other use modes. +:: + + cice.build [-h|--help] + +provides a summary of the usage. +:: + + cice.build [make arguments] [target] + +turns off most of the features of the cice.build script and turns it into a wrapper +for the make call. The arguments and/or target are passed to make and invoked more +or less like make [make arguments] [target]. This will be the case if either or +both the arguments or target are passed to cice.build. Some examples of that are +:: + + cice.build --version + +which will pass --version to make. +:: + + cice.build targets + +is a valid target of the CICE Makefile and simply echos all the valid +targets of the Makefile. +:: + + cice.build cice + +or :: + + cice.build all + +are largely equivalent to running **cice.build** without an argument, +although as noted earlier, many of the extra features of the cice.build script +are turned off when calling cice.build with a target or an argument. Any of the +full builds will compile makdep, generate the source code dependencies, and +compile the source code. +:: + + cice.build [clean|realclean] + cice.build [db_files|db_flags] + cice.build [makdep|depends] + +are other valid options for cleaning the build, writing out information about +the Makefile setup, and building just the makdep tool or the dependency file. +It is also possible to target a particular CICE object file. + +Finally, there is one important parameter in **cice.settings**. The ``ICE_CLEANBUILD`` +variable defines whether the model is cleaned before a build is carried out. By +default, this variable is true which means each invokation of **cice.build** will +automatically clean the prior build. If incremental builds are desired to save +time during development, the ``ICE_CLEANBUILD`` setting in **cice.settings** should +be modified. + + .. _porting: Porting From 8420dfd87eae5417fd844ed3dad44e7975127c46 Mon Sep 17 00:00:00 2001 From: Tony Craig Date: Fri, 30 Aug 2019 09:34:23 -0700 Subject: [PATCH 02/14] Port to Izumi (#356) * izumi initial port * update izumi port and add nothread_suite * update kind conversions in ice_dyn_evp_1d.F90 --- cice.setup | 88 ++++++++++++++----- cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 | 12 +-- configuration/scripts/cice.batch.csh | 12 +++ configuration/scripts/cice.launch.csh | 2 +- .../scripts/machines/Macros.cheyenne_intel | 0 .../scripts/machines/Macros.fram_intel | 0 .../scripts/machines/Macros.hobart_intel | 0 .../scripts/machines/Macros.hobart_nag | 0 .../scripts/machines/Macros.izumi_gnu | 52 +++++++++++ .../scripts/machines/Macros.izumi_intel | 52 +++++++++++ .../scripts/machines/Macros.izumi_nag | 53 +++++++++++ .../scripts/machines/Macros.izumi_pgi | 52 +++++++++++ .../scripts/machines/Macros.phase2_intel | 0 .../scripts/machines/Macros.phase3_intel | 0 .../scripts/machines/Macros.testmachine_intel | 0 .../scripts/machines/env.brooks_intel | 0 .../scripts/machines/env.cesium_intel | 0 .../scripts/machines/env.hobart_intel | 2 +- configuration/scripts/machines/env.izumi_gnu | 32 +++++++ .../scripts/machines/env.izumi_intel | 32 +++++++ configuration/scripts/machines/env.izumi_nag | 32 +++++++ configuration/scripts/machines/env.izumi_pgi | 32 +++++++ .../scripts/machines/env.millikan_intel | 0 .../scripts/options/set_nml.boxrestore | 2 - configuration/scripts/tests/nothread_suite.ts | 61 +++++++++++++ doc/source/user_guide/ug_running.rst | 1 + 26 files changed, 486 insertions(+), 31 deletions(-) mode change 100755 => 100644 configuration/scripts/machines/Macros.cheyenne_intel mode change 100755 => 100644 configuration/scripts/machines/Macros.fram_intel mode change 100755 => 100644 configuration/scripts/machines/Macros.hobart_intel mode change 100755 => 100644 configuration/scripts/machines/Macros.hobart_nag create mode 100644 configuration/scripts/machines/Macros.izumi_gnu create mode 100644 configuration/scripts/machines/Macros.izumi_intel create mode 100644 configuration/scripts/machines/Macros.izumi_nag create mode 100644 configuration/scripts/machines/Macros.izumi_pgi mode change 100755 => 100644 configuration/scripts/machines/Macros.phase2_intel mode change 100755 => 100644 configuration/scripts/machines/Macros.phase3_intel mode change 100755 => 100644 configuration/scripts/machines/Macros.testmachine_intel mode change 100644 => 100755 configuration/scripts/machines/env.brooks_intel mode change 100644 => 100755 configuration/scripts/machines/env.cesium_intel create mode 100755 configuration/scripts/machines/env.izumi_gnu create mode 100755 configuration/scripts/machines/env.izumi_intel create mode 100755 configuration/scripts/machines/env.izumi_nag create mode 100755 configuration/scripts/machines/env.izumi_pgi mode change 100644 => 100755 configuration/scripts/machines/env.millikan_intel create mode 100644 configuration/scripts/tests/nothread_suite.ts diff --git a/cice.setup b/cice.setup index ad535aba7..13ba00f85 100755 --- a/cice.setup +++ b/cice.setup @@ -493,14 +493,10 @@ EOF set blckx = `echo ${pesx} | cut -d x -f 3` set blcky = `echo ${pesx} | cut -d x -f 4` set mblck = `echo ${pesx} | cut -d x -f 5` - if ($?ICE_MACHINE_MAXPES) then - @ pesreq = ${task} * ${thrd} - if (${pesreq} > ${ICE_MACHINE_MAXPES}) then - @ task = ${ICE_MACHINE_MAXPES} / ${thrd} - @ mblck = ${mblck} * ((${pesreq} / ${ICE_MACHINE_MAXPES}) + 1) - endif + if (${task} == 0 || ${thrd} == 0 || ${blckx} == 0 || ${blcky} == 0 || ${mblck} == 0) then + echo "${0}: ERROR in -p argument, cannot have zeros" + exit -1 endif - set pesx = ${task}x${thrd}x${blckx}x${blcky}x${mblck} else set chck = `echo ${pesx} | sed 's/^[0-9][0-9]*x[0-9][0-9]*x[0-9][0-9]*x[0-9][0-9]*$/OK/'` if (${chck} == OK) then @@ -509,13 +505,10 @@ EOF set blckx = `echo ${pesx} | cut -d x -f 3` set blcky = `echo ${pesx} | cut -d x -f 4` set mblck = 0 - if ($?ICE_MACHINE_MAXPES) then - @ pesreq = ${task} * ${thrd} - if (${pesreq} > ${ICE_MACHINE_MAXPES}) then - @ task = ${ICE_MACHINE_MAXPES} / ${thrd} - endif + if (${task} == 0 || ${thrd} == 0 || ${blckx} == 0 || ${blcky} == 0) then + echo "${0}: ERROR in -p argument, cannot have zeros" + exit -1 endif - set pesx = ${task}x${thrd}x${blckx}x${blcky} else set chck = `echo ${pesx} | sed 's/^[0-9][0-9]*x[0-9][0-9]*$/OK/'` if (${chck} == OK) then @@ -524,20 +517,73 @@ EOF set blckx = 0 set blcky = 0 set mblck = 0 - if ($?ICE_MACHINE_MAXPES) then - @ pesreq = ${task} * ${thrd} - if (${pesreq} > ${ICE_MACHINE_MAXPES}) then - @ task = ${ICE_MACHINE_MAXPES} / ${thrd} - endif + if (${task} == 0 || ${thrd} == 0) then + echo "${0}: ERROR in -p argument, cannot have zeros" + exit -1 endif - set pesx = ${task}x${thrd} else - echo "${0}: ERROR in -p argument, ${pesx}, must be [m]x[n], [m]x[n]x[bx]x[by], or [m]x[n]x[bx]x[by]x[mb] " - exit -1 + set chck = `echo ${pesx} | sed 's/^[0-9][0-9]*$/OK/'` + if (${chck} == OK) then + set task = `echo ${pesx} | cut -d x -f 1` + set thrd = 1 + set blckx = 0 + set blcky = 0 + set mblck = 0 + if (${task} == 0) then + echo "${0}: ERROR in -p argument, cannot have zeros" + exit -1 + endif + else + echo "${0}: ERROR in -p argument, ${pesx}, must be [m], [m]x[n], [m]x[n]x[bx]x[by], or [m]x[n]x[bx]x[by]x[mb] " + exit -1 + endif + endif + endif + endif + + @ blkreq = ${task} * ${mblck} + # check max threads, reduce threads, increase tasks + if ($?ICE_MACHINE_MAXTHREADS) then + if (${thrd} > ${ICE_MACHINE_MAXTHREADS}) then + @ pesreq = ${task} * ${thrd} + @ task = ${pesreq} / ${ICE_MACHINE_MAXTHREADS} + @ thrd = ${ICE_MACHINE_MAXTHREADS} + @ peschk = ${task} * ${thrd} + if (${pesreq} > ${peschk}) then + @ task = ${task} + 1 endif +# echo "tcx1 reset to $task $thrd $mblck" + endif + endif + + # check max pes, reduce tasks + if ($?ICE_MACHINE_MAXPES) then + @ pesreq = ${task} * ${thrd} + if (${pesreq} > ${ICE_MACHINE_MAXPES}) then + @ task = ${ICE_MACHINE_MAXPES} / ${thrd} +# echo "tcx2 reset to $task $thrd $mblck" endif endif + # check max blocks and adjust as needed + if ($mblck > 0) then + @ mblck = ${blkreq} / ${task} + @ blkchk = ${task} * ${mblck} + if (${blkreq} > ${blkchk}) then + @ mblck = $mblck + 1 + endif +# echo "tcx3 reset to $task $thrd $mblck" + endif + + # update pesx based on use defined settings and machine limits to reflect actual value + set pesx = ${task}x${thrd}x${blckx}x${blcky}x${mblck} + if (${mblck} == 0) then + set pesx = ${task}x${thrd}x${blckx}x${blcky} + endif + if (${blckx} == 0 || ${blcky} == 0) then + set pesx = ${task}x${thrd} + endif + set testname_noid = ${spval} # create case for test cases if (${docase} == 0) then diff --git a/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 b/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 index b1f162967..b7092fc95 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 @@ -47,7 +47,7 @@ module dmi_omp #if defined (_OPENMP) ! Please note, this constant will create a compiler info for a constant ! expression in IF statements: - real(kind=dbl_kind), private :: rdomp_iam, rdomp_nt + real(kind=dbl_kind) :: rdomp_iam, rdomp_nt !$OMP THREADPRIVATE(domp_iam,domp_nt,rdomp_iam,rdomp_nt) #endif @@ -70,9 +70,9 @@ subroutine domp_init(nt_out) !$OMP PARALLEL DEFAULT(none) #if defined (_OPENMP) domp_iam = omp_get_thread_num() - rdomp_iam = real(domp_iam,8) + rdomp_iam = real(domp_iam,dbl_kind) domp_nt = omp_get_num_threads() - rdomp_nt = real(domp_nt,8) + rdomp_nt = real(domp_nt,dbl_kind) #else domp_iam = 0 domp_nt = 1 @@ -139,9 +139,9 @@ subroutine domp_get_domain_rlu(lower,upper,d_lower,d_upper) #if defined (_OPENMP) if (omp_in_parallel()) then - dlen = real(upper-lower+1, 8) - d_lower = lower + floor((rdomp_iam*dlen+p5)/rdomp_nt, 4) - d_upper = lower -1 + floor((rdomp_iam*dlen+dlen+p5)/rdomp_nt, 4) + dlen = real(upper-lower+1, dbl_kind) + d_lower = lower + floor((rdomp_iam*dlen+p5)/rdomp_nt, JPIM) + d_upper = lower -1 + floor((rdomp_iam*dlen+dlen+p5)/rdomp_nt, JPIM) endif #endif diff --git a/configuration/scripts/cice.batch.csh b/configuration/scripts/cice.batch.csh index 0647c5813..7345a3658 100755 --- a/configuration/scripts/cice.batch.csh +++ b/configuration/scripts/cice.batch.csh @@ -79,6 +79,18 @@ cat >> ${jobfile} << EOFB #PBS -l nodes=1:ppn=24 EOFB +else if (${ICE_MACHINE} =~ izumi*) then +if (${runlength} > 2) set queue = "medium" +cat >> ${jobfile} << EOFB +#PBS -j oe +###PBS -m ae +#PBS -V +#PBS -q ${queue} +#PBS -N ${ICE_CASENAME} +#PBS -l nodes=${nnodes}:ppn=${taskpernode} +#PBS -l walltime=${batchtime} +EOFB + else if (${ICE_MACHINE} =~ thunder* || ${ICE_MACHINE} =~ gordon* || ${ICE_MACHINE} =~ conrad* || ${ICE_MACHINE} =~ gaffney* || ${ICE_MACHINE} =~ koehr*) then cat >> ${jobfile} << EOFB #PBS -N ${shortcase} diff --git a/configuration/scripts/cice.launch.csh b/configuration/scripts/cice.launch.csh index 77e237683..b9ea5c806 100755 --- a/configuration/scripts/cice.launch.csh +++ b/configuration/scripts/cice.launch.csh @@ -34,7 +34,7 @@ EOFR endif #======= -else if (${ICE_MACHINE} =~ hobart*) then +else if (${ICE_MACHINE} =~ hobart* || ${ICE_MACHINE} =~ izumi*) then if (${ICE_COMMDIR} =~ serial*) then cat >> ${jobfile} << EOFR ./cice >&! \$ICE_RUNLOG_FILE diff --git a/configuration/scripts/machines/Macros.cheyenne_intel b/configuration/scripts/machines/Macros.cheyenne_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/Macros.fram_intel b/configuration/scripts/machines/Macros.fram_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/Macros.hobart_intel b/configuration/scripts/machines/Macros.hobart_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/Macros.hobart_nag b/configuration/scripts/machines/Macros.hobart_nag old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/Macros.izumi_gnu b/configuration/scripts/machines/Macros.izumi_gnu new file mode 100644 index 000000000..0d48f1013 --- /dev/null +++ b/configuration/scripts/machines/Macros.izumi_gnu @@ -0,0 +1,52 @@ +#============================================================================== +# Makefile macros for NCAR izumi, gnu compiler +#============================================================================== + +CPP := /usr/bin/cpp +CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} +CFLAGS := -c -O2 + +FIXEDFLAGS := -ffixed-line-length-132 +FREEFLAGS := -ffree-form +FFLAGS := -fconvert=big-endian -fbacktrace -ffree-line-length-none +FFLAGS_NOOPT:= -O0 + +ifeq ($(ICE_BLDDEBUG), true) + FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow +else + FFLAGS += -O2 +endif + +SCC := gcc +SFC := gfortran +MPICC := mpicc +MPIFC := mpif90 + +ifeq ($(ICE_COMMDIR), mpi) + FC := $(MPIFC) + CC := $(MPICC) +else + FC := $(SFC) + CC := $(SCC) +endif +LD:= $(FC) + +INCLDIR := -I$(NETCDF_PATH)/include + +LIB_NETCDF := $(NETCDF_PATH)/lib + +SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff + +ifeq ($(ICE_THREADED), true) + LDFLAGS += -fopenmp + CFLAGS += -fopenmp + FFLAGS += -fopenmp +endif + +## if using parallel I/O, load all 3 libraries. PIO must be first! +ifeq ($(IO_TYPE), pio) + PIO_PATH:= + INCLDIR += -I + SLIBS := $(SLIB) -L$(PIO_PATH) -lpiofS +endif + diff --git a/configuration/scripts/machines/Macros.izumi_intel b/configuration/scripts/machines/Macros.izumi_intel new file mode 100644 index 000000000..502f7b218 --- /dev/null +++ b/configuration/scripts/machines/Macros.izumi_intel @@ -0,0 +1,52 @@ +#============================================================================== +# Makefile macros for NCAR izumi, intel compiler +#============================================================================== + +CPP := /usr/bin/cpp +CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} +CFLAGS := -c -O2 -qno-opt-dynamic-align -fp-model precise + +FIXEDFLAGS := -fixed -132 +FREEFLAGS := -free +FFLAGS := -qno-opt-dynamic-align -convert big_endian -assume byterecl -ftz -traceback -assume realloc_lhs -fp-model source +FFLAGS_NOOPT:= -O0 + +ifeq ($(ICE_BLDDEBUG), true) + FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created +else + FFLAGS += -O2 +endif + +SCC := icc +SFC := ifort +MPICC := mpicc +MPIFC := mpif90 + +ifeq ($(ICE_COMMDIR), mpi) + FC := $(MPIFC) + CC := $(MPICC) +else + FC := $(SFC) + CC := $(SCC) +endif +LD:= $(FC) + +INCLDIR := -I$(NETCDF_PATH)/include + +LIB_NETCDF := $(NETCDF_PATH)/lib + +SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff -mkl + +ifeq ($(ICE_THREADED), true) + LDFLAGS += -qopenmp + CFLAGS += -qopenmp + FFLAGS += -qopenmp +endif + +## if using parallel I/O, load all 3 libraries. PIO must be first! +ifeq ($(IO_TYPE), pio) + PIO_PATH:= + INCLDIR += -I + SLIBS := $(SLIB) -L$(PIO_PATH) -lpiofS +endif + diff --git a/configuration/scripts/machines/Macros.izumi_nag b/configuration/scripts/machines/Macros.izumi_nag new file mode 100644 index 000000000..8e42df4aa --- /dev/null +++ b/configuration/scripts/machines/Macros.izumi_nag @@ -0,0 +1,53 @@ +#============================================================================== +# Makefile macros for NCAR izumi, NAG compiler +#============================================================================== + +CPP := /usr/bin/cpp +CPPDEFS := -DFORTRANUNDERSCORE -DNO_R16 $(ICE_CPPDEFS) +CFLAGS := -c + +FIXEDFLAGS := -fixed +FREEFLAGS := -free +FFLAGS := -Wp,-macro=no_com -convert=BIG_ENDIAN -wmismatch=mpi_bcast,mpi_isend,mpi_irecv,mpi_send,mpi_recv,mpi_allreduce +FFLAGS_NOOPT:= -O0 + +ifeq ($(ICE_BLDDEBUG), true) + FFLAGS += -O0 -C=all -g -time -f2003 -ieee=stop +else + FFLAGS += -O2 -ieee=full +endif + +SCC := cc +SFC := nagfor +MPICC := mpicc +MPIFC := mpif90 + +ifeq ($(ICE_COMMDIR), mpi) + FC := $(MPIFC) + CC := $(MPICC) +else + FC := $(SFC) + CC := $(SCC) +endif +LD:= $(FC) + +INCLDIR := -I$(NETCDF_PATH)/include + +LIB_NETCDF := $(NETCDF_PATH)/lib + +SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff + +ifeq ($(ICE_THREADED), true) + LDFLAGS += -openmp + CFLAGS += -fopenmp + FFLAGS += -openmp +else + FFLAGS += -gline +endif + +## if using parallel I/O, load all 3 libraries. PIO must be first! +ifeq ($(IO_TYPE), pio) + PIO_PATH:= + INCLDIR += -I + SLIBS := $(SLIB) -L$(PIO_PATH) -lpiof +endif diff --git a/configuration/scripts/machines/Macros.izumi_pgi b/configuration/scripts/machines/Macros.izumi_pgi new file mode 100644 index 000000000..8a8ca4b97 --- /dev/null +++ b/configuration/scripts/machines/Macros.izumi_pgi @@ -0,0 +1,52 @@ +#============================================================================== +# Makefile macros for NCAR izumi, pgi compiler +#============================================================================== + +CPP := pgcc -E +CPPDEFS := -DFORTRANUNDERSCORE -DNO_R16 ${ICE_CPPDEFS} +CFLAGS := -c -O2 -Kieee + +FIXEDFLAGS := -Mextend +FREEFLAGS := -Mfree +FFLAGS := -Kieee -Mbyteswapio -traceback +FFLAGS_NOOPT:= -O0 + +ifeq ($(ICE_BLDDEBUG), true) + FFLAGS += -O0 -g -Mbounds -Mchkptr +else + FFLAGS += -O2 +endif + +SCC := pgcc +SFC := pgf90 +MPICC := mpicc +MPIFC := mpif90 + +ifeq ($(ICE_COMMDIR), mpi) + FC := $(MPIFC) + CC := $(MPICC) +else + FC := $(SFC) + CC := $(SCC) +endif +LD:= $(FC) + +INCLDIR := -I$(NETCDF_PATH)/include + +LIB_NETCDF := $(NETCDF_PATH)/lib + +SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff + +ifeq ($(ICE_THREADED), true) + LDFLAGS += -mp + CFLAGS += -mp + FFLAGS += -mp +endif + +## if using parallel I/O, load all 3 libraries. PIO must be first! +ifeq ($(IO_TYPE), pio) + PIO_PATH:= + INCLDIR += -I + SLIBS := $(SLIB) -L$(PIO_PATH) -lpiofS +endif + diff --git a/configuration/scripts/machines/Macros.phase2_intel b/configuration/scripts/machines/Macros.phase2_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/Macros.phase3_intel b/configuration/scripts/machines/Macros.phase3_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/Macros.testmachine_intel b/configuration/scripts/machines/Macros.testmachine_intel old mode 100755 new mode 100644 diff --git a/configuration/scripts/machines/env.brooks_intel b/configuration/scripts/machines/env.brooks_intel old mode 100644 new mode 100755 diff --git a/configuration/scripts/machines/env.cesium_intel b/configuration/scripts/machines/env.cesium_intel old mode 100644 new mode 100755 diff --git a/configuration/scripts/machines/env.hobart_intel b/configuration/scripts/machines/env.hobart_intel index 76f4e5e55..80092297a 100755 --- a/configuration/scripts/machines/env.hobart_intel +++ b/configuration/scripts/machines/env.hobart_intel @@ -15,7 +15,7 @@ module load compiler/intel/18.0.3 endif setenv ICE_MACHINE_ENVNAME hobart -setenv ICE_MACHINE_COMPILER ifort +setenv ICE_MACHINE_COMPILER intel setenv ICE_MACHINE_MAKE gmake setenv ICE_MACHINE_WKDIR /scratch/cluster/$user/CICE_RUNS setenv ICE_MACHINE_INPUTDATA /fs/cgd/csm/inputdata diff --git a/configuration/scripts/machines/env.izumi_gnu b/configuration/scripts/machines/env.izumi_gnu new file mode 100755 index 000000000..9f9938d68 --- /dev/null +++ b/configuration/scripts/machines/env.izumi_gnu @@ -0,0 +1,32 @@ +#!/bin/csh -f + +set inp = "undefined" +if ($#argv == 1) then + set inp = $1 +endif + +if ("$inp" != "-nomodules") then + +source /usr/share/Modules/init/csh + +module purge +module load compiler/gnu/8.2.0 + +setenv OMP_STACKSIZE 64M + +endif + +setenv ICE_MACHINE_ENVNAME izumi +setenv ICE_MACHINE_COMPILER gnu +setenv ICE_MACHINE_MAKE gmake +setenv ICE_MACHINE_WKDIR /scratch/cluster/$user/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /fs/cgd/csm/inputdata +setenv ICE_MACHINE_BASELINE /scratch/cluster/$user/CICE_BASELINE +setenv ICE_MACHINE_SUBMIT "qsub" +setenv ICE_MACHINE_QSTAT "qstat " +setenv ICE_MACHINE_QUEUE "short" +setenv ICE_MACHINE_ACCT P00000000 +setenv ICE_MACHINE_TPNODE 48 +setenv ICE_MACHINE_MAXPES 384 +setenv ICE_MACHINE_MAXTHREADS 1 +setenv ICE_MACHINE_BLDTHRDS 1 diff --git a/configuration/scripts/machines/env.izumi_intel b/configuration/scripts/machines/env.izumi_intel new file mode 100755 index 000000000..63d175990 --- /dev/null +++ b/configuration/scripts/machines/env.izumi_intel @@ -0,0 +1,32 @@ +#!/bin/csh -f + +set inp = "undefined" +if ($#argv == 1) then + set inp = $1 +endif + +if ("$inp" != "-nomodules") then + +source /usr/share/Modules/init/csh + +module purge +module load compiler/intel/19.0.1 + +setenv OMP_STACKSIZE 64M + +endif + +setenv ICE_MACHINE_ENVNAME izumi +setenv ICE_MACHINE_COMPILER intel +setenv ICE_MACHINE_MAKE gmake +setenv ICE_MACHINE_WKDIR /scratch/cluster/$user/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /fs/cgd/csm/inputdata +setenv ICE_MACHINE_BASELINE /scratch/cluster/$user/CICE_BASELINE +setenv ICE_MACHINE_SUBMIT "qsub" +setenv ICE_MACHINE_QSTAT "qstat " +setenv ICE_MACHINE_QUEUE "short" +setenv ICE_MACHINE_ACCT P00000000 +setenv ICE_MACHINE_TPNODE 48 +setenv ICE_MACHINE_MAXPES 384 +setenv ICE_MACHINE_MAXTHREADS 1 +setenv ICE_MACHINE_BLDTHRDS 1 diff --git a/configuration/scripts/machines/env.izumi_nag b/configuration/scripts/machines/env.izumi_nag new file mode 100755 index 000000000..785cc410a --- /dev/null +++ b/configuration/scripts/machines/env.izumi_nag @@ -0,0 +1,32 @@ +#!/bin/csh -f + +set inp = "undefined" +if ($#argv == 1) then + set inp = $1 +endif + +if ("$inp" != "-nomodules") then + +source /usr/share/Modules/init/csh + +module purge +module load compiler/nag/6.2 + +setenv OMP_STACKSIZE 64M + +endif + +setenv ICE_MACHINE_ENVNAME izumi +setenv ICE_MACHINE_COMPILER nag +setenv ICE_MACHINE_MAKE gmake +setenv ICE_MACHINE_WKDIR /scratch/cluster/$user/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /fs/cgd/csm/inputdata +setenv ICE_MACHINE_BASELINE /scratch/cluster/$user/CICE_BASELINE +setenv ICE_MACHINE_SUBMIT "qsub" +setenv ICE_MACHINE_QSTAT "qstat " +setenv ICE_MACHINE_QUEUE "short" +setenv ICE_MACHINE_ACCT P00000000 +setenv ICE_MACHINE_TPNODE 48 +setenv ICE_MACHINE_MAXPES 384 +setenv ICE_MACHINE_MAXTHREADS 1 +setenv ICE_MACHINE_BLDTHRDS 1 diff --git a/configuration/scripts/machines/env.izumi_pgi b/configuration/scripts/machines/env.izumi_pgi new file mode 100755 index 000000000..b89eafeb8 --- /dev/null +++ b/configuration/scripts/machines/env.izumi_pgi @@ -0,0 +1,32 @@ +#!/bin/csh -f + +set inp = "undefined" +if ($#argv == 1) then + set inp = $1 +endif + +if ("$inp" != "-nomodules") then + +source /usr/share/Modules/init/csh + +module purge +module load compiler/pgi/18.10 + +setenv OMP_STACKSIZE 64M + +endif + +setenv ICE_MACHINE_ENVNAME izumi +setenv ICE_MACHINE_COMPILER pgi +setenv ICE_MACHINE_MAKE gmake +setenv ICE_MACHINE_WKDIR /scratch/cluster/$user/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /fs/cgd/csm/inputdata +setenv ICE_MACHINE_BASELINE /scratch/cluster/$user/CICE_BASELINE +setenv ICE_MACHINE_SUBMIT "qsub" +setenv ICE_MACHINE_QSTAT "qstat " +setenv ICE_MACHINE_QUEUE "short" +setenv ICE_MACHINE_ACCT P00000000 +setenv ICE_MACHINE_TPNODE 48 +setenv ICE_MACHINE_MAXPES 384 +setenv ICE_MACHINE_MAXTHREADS 1 +setenv ICE_MACHINE_BLDTHRDS 1 diff --git a/configuration/scripts/machines/env.millikan_intel b/configuration/scripts/machines/env.millikan_intel old mode 100644 new mode 100755 diff --git a/configuration/scripts/options/set_nml.boxrestore b/configuration/scripts/options/set_nml.boxrestore index 294d507aa..bc913a3dc 100644 --- a/configuration/scripts/options/set_nml.boxrestore +++ b/configuration/scripts/options/set_nml.boxrestore @@ -5,8 +5,6 @@ restart_ext = .true. use_leap_years = .true. ndtd = 2 kcatbound = 1 -distribution_type = 'cartesian' -processor_shape = 'slenderX1' ew_boundary_type = 'cyclic' ns_boundary_type = 'open' histfreq = 'd','x','x','x','x' diff --git a/configuration/scripts/tests/nothread_suite.ts b/configuration/scripts/tests/nothread_suite.ts new file mode 100644 index 000000000..d9ab722c0 --- /dev/null +++ b/configuration/scripts/tests/nothread_suite.ts @@ -0,0 +1,61 @@ +# Test Grid PEs Sets BFB-compare + +restart gx3 8x1x25x29x2 dslenderX2 +logbfb gx3 8x1x25x29x2 dslenderX2,diag1,reprosum + +smoke gx3 16x1 diag1,run5day +smoke gx3 1x1 debug,diag1,run2day +smoke gx3 4x1 debug,diag1,run5day,thread +restart gx3 16x1 thread +smoke gx3 16x1 diag24,run1year,medium + +restart gx1 160x1 droundrobin,medium +restart tx1 160x1 dsectrobin,medium +restart gx3 16x1 none +restart gx3 16x1 iobinary + +restart gx3 12x1 alt01 +restart gx3 16x1 alt02 +restart gx3 8x1 alt03 +restart gx3 16x1 alt04 +restart gx3 16x1 alt05 +restart gx3 18x1 alt01,debug,short +restart gx3 20x1 alt02,debug,short +restart gx3 24x1 alt03,debug,short +smoke gx3 24x1 alt04,debug,short +smoke gx3 32x1 alt05,debug,short + +restart gbox128 8x1 short +restart gbox128 16x1 boxdyn,short +restart gbox128 24x1 boxdyn,short,debug +restart gbox128 12x1 boxadv,short +smoke gbox128 20x1 boxadv,short,debug +restart gbox128 32x1 boxrestore,short +smoke gbox128 24x1 boxrestore,short,debug +restart gbox80 1x1 box2001 +smoke gbox80 1x1 boxslotcyl + +smoke gx3 16x1 bgcz +smoke gx3 16x1 bgcz,debug +smoke gx3 24x1 bgcskl,debug +restart gx1 128x1 bgcsklclim,medium +restart gx1 256x1 bgczclim,medium + +decomp gx3 8x1x5x29x20 +restart gx3 1x1x50x58x4 droundrobin restart_gx3_8x1x25x29x2_dslenderX2 +restart gx3 4x1x25x116x1 dslenderX1 restart_gx3_8x1x25x29x2_dslenderX2 +restart gx3 12x1x4x29x9 dspacecurve restart_gx3_8x1x25x29x2_dslenderX2 +restart gx3 16x1x8x10x10 droundrobin restart_gx3_8x1x25x29x2_dslenderX2 +restart gx3 6x1x50x58x1 droundrobin restart_gx3_8x1x25x29x2_dslenderX2 +restart gx3 8x1x19x19x5 droundrobin restart_gx3_8x1x25x29x2_dslenderX2 +restart gx3 20x1x5x29x20 dsectrobin,short restart_gx3_8x1x25x29x2_dslenderX2 +restart gx3 32x1x5x10x10 drakeX2 restart_gx3_8x1x25x29x2_dslenderX2 +restart gx3 16x1x8x10x10 droundrobin,maskhalo restart_gx3_8x1x25x29x2_dslenderX2 +restart gx3 4x1x25x29x4 droundrobin restart_gx3_8x1x25x29x2_dslenderX2 + +logbfb gx3 1x1x50x58x4 droundrobin,diag1,maskhalo,reprosum logbfb_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum +logbfb gx3 4x1x25x116x1 dslenderX1,diag1,maskhalo,reprosum logbfb_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum +logbfb gx3 20x1x5x29x20 dsectrobin,diag1,short,reprosum logbfb_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum +logbfb gx3 16x1x8x10x10 droundrobin,diag1,reprosum logbfb_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum +logbfb gx3 6x1x50x58x1 droundrobin,diag1,reprosum logbfb_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum +logbfb gx3 12x1x4x29x9 dspacecurve,diag1,maskhalo,reprosum logbfb_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum diff --git a/doc/source/user_guide/ug_running.rst b/doc/source/user_guide/ug_running.rst index 33aab2004..69b82d93e 100644 --- a/doc/source/user_guide/ug_running.rst +++ b/doc/source/user_guide/ug_running.rst @@ -400,6 +400,7 @@ system. Some variables are optional. "ICE_MACHINE_SUBMIT", "string", "batch job submission command" "ICE_MACHINE_TPNODE", "integer", "machine maximum MPI tasks per node" "ICE_MACHINE_MAXPES", "integer", "machine maximum total processors per job (optional)" + "ICE_MACHINE_MAXTHREADS", "integer", "machine maximum threads per mpi task (optional)" "ICE_MACHINE_MAXRUNLENGTH", "integer", "batch wall time limit in hours (optional)" "ICE_MACHINE_ACCT", "string", "batch default account" "ICE_MACHINE_QUEUE", "string", "batch default queue" From 804b7039b68e7d737d81393308d93e9a7ae002f2 Mon Sep 17 00:00:00 2001 From: "David A. Bailey" Date: Fri, 30 Aug 2019 10:57:20 -0600 Subject: [PATCH 03/14] Local Solar Time Computation (#323) * Fix local solar time * Add line for going past 24 * New LST calculation Addresses #3. --- cicecore/cicedynB/general/ice_forcing.F90 | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/cicecore/cicedynB/general/ice_forcing.F90 b/cicecore/cicedynB/general/ice_forcing.F90 index 4baa88fff..2a529c2fe 100644 --- a/cicecore/cicedynB/general/ice_forcing.F90 +++ b/cicecore/cicedynB/general/ice_forcing.F90 @@ -30,8 +30,8 @@ module ice_forcing use ice_timers, only: ice_timer_start, ice_timer_stop, timer_readwrite, & timer_bound use ice_arrays_column, only: oceanmixed_ice, restore_bgc - use ice_constants, only: c0, c1, c2, c3, c4, c5, c10, c12, c20, & - c180, c365, c1000, c3600 + use ice_constants, only: c0, c1, c2, c3, c4, c5, c10, c12, c15, c20, & + c180, c360, c365, c1000, c3600 use ice_constants, only: p001, p01, p1, p25, p5, p6 use ice_constants, only: cm_to_m use ice_constants, only: field_loc_center, field_type_scalar, & @@ -2260,6 +2260,7 @@ subroutine compute_shortwave(nx_block, ny_block, & sw0 , & secday , & pi , & + lontmp , & deg2rad integer (kind=int_kind) :: & @@ -2275,8 +2276,18 @@ subroutine compute_shortwave(nx_block, ny_block, & do j=jlo,jhi do i=ilo,ihi deg2rad = pi/c180 +! solar_time = mod(real(sec,kind=dbl_kind),secday)/c3600 & +! + c12*sin(p5*TLON(i,j)) + +! Convert longitude to range of -180 to 180 for LST calculation + + lontmp = mod(TLON(i,j)/deg2rad,c360) + if (lontmp .gt. c180) lontmp = lontmp - c360 + if (lontmp .lt. -c180) lontmp = lontmp + c360 + solar_time = mod(real(sec,kind=dbl_kind),secday)/c3600 & - + c12*sin(p5*TLON(i,j)) + + lontmp/c15 + if (solar_time .ge. 24._dbl_kind) solar_time = solar_time - 24._dbl_kind hour_angle = (c12 - solar_time)*pi/c12 declin = 23.44_dbl_kind*cos((172._dbl_kind-yday) & * c2*pi/c365)*deg2rad ! use dayyr instead of c365??? From 1a38c6da5fc317708bbd1ad8ba529b3fa98de79a Mon Sep 17 00:00:00 2001 From: Philippe Blain <44212482+phil-blain@users.noreply.github.com> Date: Fri, 30 Aug 2019 13:01:29 -0400 Subject: [PATCH 04/14] Refactor boxslotcyl advection test to use icepack parameters (#358) * Refactor boxslotcyl initialization to use icepack parameters * Fix documentation typo * Correct use statement rename syntax * Change days_to_s to secday --- cicecore/cicedynB/general/ice_init.F90 | 9 +++------ doc/source/user_guide/ug_testing.rst | 2 +- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index 9b48fed51..bce9d7352 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -1927,9 +1927,10 @@ subroutine boxslotcyl_data_vel(i, j, & iglob, jglob, & uvel, vvel) - use ice_constants, only: c1, c4, c2, c12, p5, cm_to_m + use ice_constants, only: c2, c12, p5, cm_to_m use ice_domain_size, only: nx_global, ny_global use ice_grid, only: dxrect + use icepack_parameters, only: secday, pi integer (kind=int_kind), intent(in) :: & i, j, & ! local indices @@ -1946,15 +1947,11 @@ subroutine boxslotcyl_data_vel(i, j, & max_vel , & ! max velocity domain_length , & ! physical domain length period ! rotational period - - real (kind=dbl_kind), parameter :: & - pi = c4*atan(c1), & ! pi - days_to_s = 86400._dbl_kind character(len=*), parameter :: subname = '(boxslotcyl_data_vel)' domain_length = dxrect*cm_to_m*nx_global - period = c12*days_to_s ! 12 days rotational period + period = c12*secday ! 12 days rotational period max_vel = pi*domain_length/period uvel(i,j) = c2*max_vel*(real(jglob(j), kind=dbl_kind) - p5) & diff --git a/doc/source/user_guide/ug_testing.rst b/doc/source/user_guide/ug_testing.rst index c5f5a4e29..24c5b729a 100644 --- a/doc/source/user_guide/ug_testing.rst +++ b/doc/source/user_guide/ug_testing.rst @@ -895,7 +895,7 @@ Below is an example of a step-by-step procedure for testing a code change that m # Create a baseline dataset (only necessary if no baseline exists on the system) # git clone the baseline code - ./cice.setup -m onyx -e intel --suite base_suite --testid base0 -bgen cice.my.baseline + ./cice.setup -m onyx -e intel --suite base_suite --testid base0 --bgen cice.my.baseline # Run the test suite with the new code # git clone the new code From c4c82941bbbad873a34ce0db3a4c89af7a7e7b94 Mon Sep 17 00:00:00 2001 From: Alice DuVivier Date: Fri, 30 Aug 2019 16:55:59 -0600 Subject: [PATCH 05/14] fixing rst errors (#359) * fixing rst errors * removing implementation notes line --- doc/source/developer_guide/dg_scripts.rst | 2 +- doc/source/user_guide/ug_implementation.rst | 134 ++++++++++---------- doc/source/user_guide/ug_running.rst | 4 +- doc/source/user_guide/ug_testing.rst | 8 +- 4 files changed, 74 insertions(+), 74 deletions(-) diff --git a/doc/source/developer_guide/dg_scripts.rst b/doc/source/developer_guide/dg_scripts.rst index cdb066e6a..da5ef7d24 100755 --- a/doc/source/developer_guide/dg_scripts.rst +++ b/doc/source/developer_guide/dg_scripts.rst @@ -83,7 +83,7 @@ examples of the syntax. .. _build: Build Scripts ------------ +-------------- CICE uses GNU Make to build the model. There is a common **Makefile** for all machines. Each machine provides a Macros file to define some Makefile variables diff --git a/doc/source/user_guide/ug_implementation.rst b/doc/source/user_guide/ug_implementation.rst index e2a144ab3..eea7d8310 100644 --- a/doc/source/user_guide/ug_implementation.rst +++ b/doc/source/user_guide/ug_implementation.rst @@ -106,15 +106,15 @@ Big Endian files. In CESM, the sea ice model may exchange coupling fluxes using a different grid than the computational grid. This functionality is -activated using the namelist variable ``gridcpl\_file``. +activated using the namelist variable ``gridcpl_file``. *********************** Grid domains and blocks *********************** In general, the global gridded domain is -``nx\_global`` :math:`\times`\ ``ny\_global``, while the subdomains used in the -block distribution are ``nx\_block`` :math:`\times`\ ``ny\_block``. The +``nx_global`` :math:`\times`\ ``ny_global``, while the subdomains used in the +block distribution are ``nx_block`` :math:`\times`\ ``ny_block``. The physical portion of a subdomain is indexed as [``ilo:ihi``, ``jlo:jhi``], with nghost “ghost” or “halo" cells outside the domain used for boundary conditions. These parameters are illustrated in :ref:`fig-grid` in one @@ -141,14 +141,14 @@ Figure :ref:`fig-grid` shows the grid parameters for a sample one-dimensional, 2 global domain decomposed into four local subdomains. Each local domain has one ghost (halo) cell on each side, and the physical portion of the local domains are labeled ``ilo:ihi``. The parameter -``nx\_block`` is the total number of cells in the local domain, including +``nx_block`` is the total number of cells in the local domain, including ghost cells, and the same numbering system is applied to each of the four subdomains. The user sets the ``NTASKS`` and ``NTHRDS`` settings in **cice.settings** -and chooses a block size ``block\_size\_x`` :math:`\times`\ ``block\_size\_y``, -``max\_blocks``, and decomposition information ``distribution\_type``, ``processor\_shape``, -and ``distribution\_type`` in **ice\_in**. That information is used to +and chooses a block size ``block_size_x`` :math:`\times`\ ``block_size_y``, +``max_blocks``, and decomposition information ``distribution_type``, ``processor_shape``, +and ``distribution_type`` in **ice\_in**. That information is used to determine how the blocks are distributed across the processors, and how the processors are distributed across the grid domain. Recommended combinations of these @@ -159,8 +159,8 @@ but the user can overwrite the defaults by manually changing the values in information to the log file, and if the block size or max blocks is inconsistent with the task and thread size, the model will abort. The code will also print a warning if the maximum number of blocks is too large. -Although this is not fatal, it does use extra memory. If ``max\_blocks`` is -set to -1, the code will compute a ``max\_blocks`` on the fly. +Although this is not fatal, it does use extra memory. If ``max_blocks`` is +set to -1, the code will compute a ``max_blocks`` on the fly. A loop at the end of routine *create\_blocks* in module **ice\_blocks.F90** will print the locations for all of the blocks on @@ -173,10 +173,10 @@ manually set in the code in each case (independently of the dbug flag in **ice\_in**), as there may be hundreds or thousands of blocks to print and this information should be needed only rarely. This information is much easier to look at using a debugger such as Totalview. There is also -an output field that can be activated in `icefields\_nml`, ``f\_blkmask``, +an output field that can be activated in `icefields\_nml`, ``f_blkmask``, that prints out the variable ``blkmask`` to the history file and which labels the blocks in the grid decomposition according to ``blkmask = -my\_task + iblk/100``. +my_task + iblk/100``. ************* Tripole grids @@ -198,11 +198,11 @@ poles and the cells between them can be grid T cells, making a “T-fold.” Both of these options are also supported by the OPA/NEMO ocean model, which calls the U-fold an “f-fold” (because it uses the Arakawa C-grid in which U cells are on T-rows). The choice of tripole grid is given by -the namelist variable ``ns\_boundary\_type``, ‘tripole’ for the U-fold and +the namelist variable ``ns_boundary_type``, ‘tripole’ for the U-fold and ‘tripoleT’ for the T-fold grid. In the U-fold tripole grid, the poles have U-index -:math:`{\tt nx\_global}/2` and ``nx\_global`` on the top U-row of the +:math:`{\tt nx\_global}/2` and ``nx_global`` on the top U-row of the physical grid, and points with U-index i and :math:`{\tt nx\_global-i}` are coincident. Let the fold have U-row index :math:`n` on the global grid; this will also be the T-row index of the T-row to the south of the @@ -267,7 +267,7 @@ masked by land, periodic conditions wrap the domain around the globe. CICE can be run on regional grids with open boundary conditions; except for variables describing grid lengths, non-land halo cells along the grid edge must be filled by restoring them to specified values. The -namelist variable ``restore\_ice`` turns this functionality on and off; the +namelist variable ``restore_ice`` turns this functionality on and off; the restoring timescale ``trestore`` may be used (it is also used for restoring ocean sea surface temperature in stand-alone ice runs). This implementation is only intended to provide the “hooks" for a more @@ -279,7 +279,7 @@ allow Neumann boundary conditions, which must be set explicitly. This has been done in an unreleased branch of the code; contact Elizabeth for more information. -For exact restarts using restoring, set ``restart\_ext`` = true in namelist +For exact restarts using restoring, set ``restart_ext`` = true in namelist to use the extended-grid subroutines. On tripole grids, the order of operations used for calculating elements @@ -308,27 +308,27 @@ The logical masks ``tmask`` and ``umask`` (which correspond to the real masks In addition to the land masks, two other masks are implemented in *dyn\_prep* in order to reduce the dynamics component’s work on a global -grid. At each time step the logical masks ``ice\_tmask`` and ``ice\_umask`` are +grid. At each time step the logical masks ``ice_tmask`` and ``ice_umask`` are determined from the current ice extent, such that they have the value “true” wherever ice exists. They also include a border of cells around the ice pack for numerical purposes. These masks are used in the dynamics component to prevent unnecessary calculations on grid points where there is no ice. They are not used in the thermodynamics component, so that ice may form in previously ice-free cells. Like the -land masks ``hm`` and ``uvm``, the ice extent masks ``ice\_tmask`` and ``ice\_umask`` +land masks ``hm`` and ``uvm``, the ice extent masks ``ice_tmask`` and ``ice_umask`` are for T cells and U cells, respectively. Improved parallel performance may result from utilizing halo masks for boundary updates of the full ice state, incremental remapping transport, or for EVP or EAP dynamics. These options are accessed through the -logical namelist flags ``maskhalo\_bound``, ``maskhalo\_remap``, and -``maskhalo\_dyn``, respectively. Only the halo cells containing needed +logical namelist flags ``maskhalo_bound``, ``maskhalo_remap``, and +``maskhalo_dyn``, respectively. Only the halo cells containing needed information are communicated. -Two additional masks are created for the user’s convenience: ``lmask\_n`` -and ``lmask\_s`` can be used to compute or write data only for the northern +Two additional masks are created for the user’s convenience: ``lmask_n`` +and ``lmask_s`` can be used to compute or write data only for the northern or southern hemispheres, respectively. Special constants (``spval`` and -``spval\_dbl``, each equal to :math:`10^{30}`) are used to indicate land +``spval_dbl``, each equal to :math:`10^{30}`) are used to indicate land points in the history files and diagnostics. @@ -338,13 +338,13 @@ points in the history files and diagnostics. Performance *************** -Namelist options (*domain\_nml*) provide considerable flexibility for +Namelist options (*domain_nml*) provide considerable flexibility for finding efficient processor and block configuration. Some of these choices are illustrated in :ref:`fig-distrb`. Users have control -of many aspects of the decomposition such as the block size (``block\_size\_x``, -``block\_size\_y``), the ``distribution\_type``, the ``distribution\_wght``, -the ``distribution\_wght\_file`` (when ``distribution\_type`` = ``wghtfile``), -and the ``processor\_shape`` (when ``distribution\_type`` = ``cartesian``). +of many aspects of the decomposition such as the block size (``block_size_x``, +``block_size_y``), the ``distribution_type``, the ``distribution_wght``, +the ``distribution_wght_file`` (when ``distribution_type`` = ``wghtfile``), +and the ``processor_shape`` (when ``distribution_type`` = ``cartesian``). The user specifies the total number of tasks and threads in **cice.settings** and the block size and decompostion in the namelist file. The main trades @@ -361,7 +361,7 @@ volume-to-surface ratio important for communication cost. Often 3 to 8 blocks per processor provide the decompositions flexiblity to create reasonable load balance configurations. -The ``distribution\_type`` options allow standard cartesian distributions +The ``distribution_type`` options allow standard cartesian distributions of blocks, redistribution via a ‘rake’ algorithm for improved load balancing across processors, and redistribution based on space-filling curves. There are also additional distribution types @@ -395,7 +395,7 @@ Figure :ref:`fig-distrbB` shows sample decompositions for (a) spiral center and (b) wghtfile for an Arctic polar grid. (c) is the weight field in the input file use to drive the decompostion in (b). -``processor\_shape`` is used with the ``distribution\_type`` cartesian option, +``processor_shape`` is used with the ``distribution_type`` cartesian option, and it allocates blocks to processors in various groupings such as tall, thin processor domains (``slenderX1`` or ``slenderX2``, often better for sea ice simulations on global grids where nearly all of @@ -405,14 +405,14 @@ which maximize the volume to surface ratio (and therefore on-processor computations to message passing, if there were ice in every grid cell). In cases where the number of processors is not a perfect square (4, 9, 16...), the -``processor\_shape`` namelist variable allows the user to choose how the +``processor_shape`` namelist variable allows the user to choose how the processors are arranged. Here again, it is better in the sea ice model to have more processors in x than in y, for example, 8 processors arranged 4x2 (``square-ice``) rather than 2x4 (``square-pop``). The latter option is offered for direct-communication compatibility with POP, in which this is the default. -``distribution\_wght`` chooses how the work-per-block estimates are +``distribution_wght`` chooses how the work-per-block estimates are weighted. The ‘block’ option is the default in POP and it weights each block equally. This is useful in POP which always has work in each block and is written with a lot of @@ -422,7 +422,7 @@ direct-communication compatibility with POP. The ‘latitude’ option weights the blocks based on latitude and the number of ocean grid cells they contain. Many of the non-cartesian decompositions support automatic land block elimination and provide alternative ways to -decompose blocks without needing the ``distribution\_wght``. +decompose blocks without needing the ``distribution_wght``. The rake distribution type is initialized as a standard, Cartesian distribution. Using the work-per-block estimates, blocks are “raked" @@ -549,7 +549,7 @@ layers and the ice thickness distribution defined by ``kcatbound`` = 0. Restart information for some tracers is also included in the netCDF restart files. -Three namelist variables control model initialization, ``ice\_ic``, ``runtype``, +Three namelist variables control model initialization, ``ice_ic``, ``runtype``, and ``restart``, as described in :ref:`tab-ic`. It is possible to do an initial run from a file **filename** in two ways: (1) set runtype = ‘initial’, restart = true and ice\_ic = **filename**, or (2) runtype = @@ -562,7 +562,7 @@ true or false, depending on whether the tracer restart data exist. With the second option, tracer restart flags are set to ‘continue’ for all active tracers. -An additional namelist option, ``restart\_ext`` specifies whether halo cells +An additional namelist option, ``restart_ext`` specifies whether halo cells are included in the restart files. This option is useful for tripole and regional grids, but can not be used with PIO. @@ -577,8 +577,8 @@ her own routines. Whether the code is to be run in stand-alone or coupled mode is determined at compile time, as described below. Table :ref:`tab-ic` shows ice initial state resulting from combinations of -``ice\_ic``, ``runtype`` and ``restart``. :math:`^a`\ If false, restart is reset to -true. :math:`^b`\ restart is reset to false. :math:`^c`\ ice\_ic is +``ice_ic``, ``runtype`` and ``restart``. :math:`^a`\ If false, restart is reset to +true. :math:`^b`\ restart is reset to false. :math:`^c`\ ice_ic is reset to ‘none.’ .. _tab-ic: @@ -607,9 +607,9 @@ The time step is chosen based on stability of the transport component (both horizontal and in thickness space) and on resolution of the physical forcing. CICE allows the dynamics, advection and ridging portion of the code to be run with a shorter timestep, -:math:`\Delta t_{dyn}` (``dt\_dyn``), than the thermodynamics timestep +:math:`\Delta t_{dyn}` (``dt_dyn``), than the thermodynamics timestep :math:`\Delta t` (``dt``). In this case, ``dt`` and the integer ndtd are -specified, and ``dt\_dyn`` = ``dt/ndtd``. +specified, and ``dt_dyn`` = ``dt/ndtd``. A conservative estimate of the horizontal transport time step bound, or CFL condition, under remapping yields @@ -632,8 +632,8 @@ As discussed in :cite:`Lipscomb07`, the maximum time step in practice is usually determined by the time scale for large changes in the ice strength (which depends in part on wind strength). Using the strength parameterization of :cite:`Rothrock75`, limits the time step to :math:`\sim`\ 30 -minutes for the old ridging scheme (``krdg\_partic`` = 0), and to -:math:`\sim`\ 2 hours for the new scheme (``krdg\_partic`` = 1), assuming +minutes for the old ridging scheme (``krdg_partic`` = 0), and to +:math:`\sim`\ 2 hours for the new scheme (``krdg_partic`` = 1), assuming :math:`\Delta x` = 10 km. Practical limits may be somewhat less, depending on the strength of the atmospheric winds. @@ -646,7 +646,7 @@ growth rate. For the 5-category ice thickness distribution used as the default in this distribution, this is not a stringent limitation: :math:`\Delta t < 19.4` hr, assuming :math:`\max(f) = 40` cm/day. -In the classic EVP or EAP approach (``kdyn`` = 1 or 2, ``revised\_evp`` = false), +In the classic EVP or EAP approach (``kdyn`` = 1 or 2, ``revised_evp`` = false), the dynamics component is subcycled ndte (:math:`N`) times per dynamics time step so that the elastic waves essentially disappear before the next time step. The subcycling time step (:math:`\Delta @@ -657,7 +657,7 @@ t_e`) is thus A second parameter, :math:`E_\circ` (``eyc``), defines the elastic wave damping timescale :math:`T`, described in Section :ref:`dynam`, as -``eyc\ * dt\_dyn``. The forcing terms are not updated during the subcycling. +``eyc * dt_dyn``. The forcing terms are not updated during the subcycling. Given the small step (``dte``) at which the EVP dynamics model is subcycled, the elastic parameter :math:`E` is also limited by stability constraints, as discussed in :cite:`Hunke97`. Linear stability @@ -683,7 +683,7 @@ temperature :math:`T_{sfc}` is computed internally. The numerical constraint on the thermodynamics time step is associated with the transport scheme rather than the thermodynamic solver. -For the revised EVP approach (``kdyn`` = 1, ``revised\_evp`` = true), the +For the revised EVP approach (``kdyn`` = 1, ``revised_evp`` = true), the relaxation parameter ``arlx1i`` effectively sets the damping timescale in the problem, and ``brlx`` represents the effective subcycling :cite:`Bouillon13` (see Section :ref:`revp`). @@ -699,16 +699,16 @@ History files ************* Model output data is averaged over the period(s) given by ``histfreq`` and -``histfreq\_n``, and written to binary or netCDF files prepended by ``history\_file`` -in **ice\_in**. These settings for history files are set in the -**setup\_nml** section of **ice\_in** (see :ref:`tabnamelist`). -If ``history\_file`` = ‘iceh’ then the +``histfreq_n``, and written to binary or netCDF files prepended by ``history_file`` +in **ice_in**. These settings for history files are set in the +**setup_nml** section of **ice_in** (see :ref:`tabnamelist`). +If ``history_file`` = ‘iceh’ then the filenames will have the form **iceh.[timeID].nc** or **iceh.[timeID].da**, depending on the output file format chosen in **cice.settings** (set -``ICE\_IOTYPE``). The netCDF history files are CF-compliant; header information for +``ICE_IOTYPE``). The netCDF history files are CF-compliant; header information for data contained in the netCDF files is displayed with the command ``ncdump -h filename.nc``. Parallel netCDF output is available using the PIO library; the -attribute ``io\_flavor`` distinguishes output files written with PIO from +attribute ``io_flavor`` distinguishes output files written with PIO from those written with standard netCDF. With binary files, a separate header file is written with equivalent information. Standard fields are output according to settings in the **icefields\_nml** section of **ice\_in** @@ -735,7 +735,7 @@ monthly) via its namelist flag, `f\_` :math:`\left<{var}\right>`, which is now a character string corresponding to ``histfreq`` or ‘x’ for none. (Grid variable flags are still logicals, since they are written to all files, no matter what the frequency is.) If there are no namelist flags -with a given ``histfreq`` value, or if an element of ``histfreq\_n`` is 0, then +with a given ``histfreq`` value, or if an element of ``histfreq_n`` is 0, then no file will be written at that frequency. The output period can be discerned from the filenames. @@ -766,7 +766,7 @@ The history variable names must be unique for netCDF, so in cases where a variable is written at more than one frequency, the variable name is appended with the frequency in files after the first one. In the example above, ``meltb`` is called ``meltb`` in the monthly file (for backward -compatibility with the default configuration) and ``meltb\_h`` in the +compatibility with the default configuration) and ``meltb_h`` in the 6-hourly file. Using the same frequency twice in ``histfreq`` will have unexpected @@ -774,7 +774,7 @@ consequences and currently will cause the code to abort. It is not possible at the moment to output averages once a month and also once every 3 months, for example. -If ``write\_ic`` is set to true in **ice\_in**, a snapshot of the same set +If ``write_ic`` is set to true in **ice\_in**, a snapshot of the same set of history fields at the start of the run will be written to the history directory in **iceh\_ic.[timeID].nc(da)**. Several history variables are hard-coded for instantaneous output regardless of the averaging flag, at @@ -811,16 +811,16 @@ Diagnostic files Like ``histfreq``, the parameter ``diagfreq`` can be used to regulate how often output is written to a log file. The log file unit to which diagnostic -output is written is set in **ice\_fileunits.F90**. If ``diag\_type`` = +output is written is set in **ice\_fileunits.F90**. If ``diag_type`` = ‘stdout’, then it is written to standard out (or to **ice.log.[ID]** if you redirect standard out as in **cice.run**); otherwise it is written -to the file given by ``diag\_file``. In addition to the standard diagnostic +to the file given by ``diag_file``. In addition to the standard diagnostic output (maximum area-averaged thickness, velocity, average albedo, total ice area, and total ice and snow volumes), the namelist options -``print\_points`` and ``print\_global`` cause additional diagnostic information -to be computed and written. ``print\_global`` outputs global sums that are +``print_points`` and ``print_global`` cause additional diagnostic information +to be computed and written. ``print_global`` outputs global sums that are useful for checking global conservation of mass and energy. -``print\_points`` writes data for two specific grid points. Currently, one +``print_points`` writes data for two specific grid points. Currently, one point is near the North Pole and the other is in the Weddell Sea; these may be changed in **ice\_in**. @@ -888,27 +888,27 @@ Restart files ************* CICE provides restart data in binary unformatted or netCDF formats, via -the ``ICE\_IOTYPE`` flag in **cice.settings** and namelist variable -``restart\_format``. Restart and history files must use the same format. As +the ``ICE_IOTYPE`` flag in **cice.settings** and namelist variable +``restart_format``. Restart and history files must use the same format. As with the history output, there is also an option for writing parallel netCDF restart files using PIO. The restart files created by CICE contain all of the variables needed for a full, exact restart. The filename begins with the character string ‘iced.’, and the restart dump frequency is given by the namelist -variables ``dumpfreq`` and ``dumpfreq\_n``. The pointer to the filename from +variables ``dumpfreq`` and ``dumpfreq_n``. The pointer to the filename from which the restart data is to be read for a continuation run is set in -``pointer\_file``. The code assumes that auxiliary binary tracer restart +``pointer_file``. The code assumes that auxiliary binary tracer restart files will be identified using the same pointer and file name prefix, but with an additional character string in the file name that is associated with each tracer set. All variables are included in netCDF restart files. Additional namelist flags provide further control of restart behavior. -``dump\_last`` = true causes a set of restart files to be written at the end +``dump_last`` = true causes a set of restart files to be written at the end of a run when it is otherwise not scheduled to occur. The flag -``use\_restart\_time`` enables the user to choose to use the model date -provided in the restart files. If ``use\_restart\_time`` = false then the +``use_restart_time`` enables the user to choose to use the model date +provided in the restart files. If ``use_restart_time`` = false then the initial model date stamp is determined from the namelist parameters. lcdf64 = true sets 64-bit netCDF output, allowing larger file sizes. @@ -917,7 +917,7 @@ of the “extended" global grid, including the physical domain and ghost (halo) cells around the outer edges, allow exact restarts on regional grids with open boundary conditions, and they will also simplify restarts on the various tripole grids. They are accessed by setting -``restart\_ext`` = true in namelist. Extended grid restarts are not +``restart_ext`` = true in namelist. Extended grid restarts are not available when using PIO; in this case extra halo update calls fill ghost cells for tripole grids (do not use PIO for regional grids). @@ -929,5 +929,5 @@ initialized with no ice. The gx3 case was run for 1 year using the 1997 forcing data provided with the code. The gx1 case was run for 20 years, so that the date of restart in the file is 1978-01-01. Note that the restart dates provided in the restart files can be overridden using the -namelist variables ``use\_restart\_time``, ``year\_init`` and ``istep0``. The -forcing time can also be overridden using ``fyear\_init``. +namelist variables ``use_restart_time``, ``year_init`` and ``istep0``. The +forcing time can also be overridden using ``fyear_init``. diff --git a/doc/source/user_guide/ug_running.rst b/doc/source/user_guide/ug_running.rst index 69b82d93e..4abd52d0a 100644 --- a/doc/source/user_guide/ug_running.rst +++ b/doc/source/user_guide/ug_running.rst @@ -10,7 +10,7 @@ Quick-start instructions are provided in the :ref:`quickstart` section. .. _software: Software Requirements -------- +---------------------- To run stand-alone, CICE requires @@ -162,7 +162,7 @@ case directory, NOT the run directory. .. _case_options: **cice.setup** Command Line Options -~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ``cice.setup -h`` provides a summary of the command line options. There are three different modes, ``--case``, ``--test``, and ``--suite``. This section provides details about the relevant options for setting up cases with examples. Testing will be described in greater detail in the :ref:`testing` section. diff --git a/doc/source/user_guide/ug_testing.rst b/doc/source/user_guide/ug_testing.rst index 24c5b729a..7a17e67ea 100644 --- a/doc/source/user_guide/ug_testing.rst +++ b/doc/source/user_guide/ug_testing.rst @@ -867,10 +867,6 @@ The script will produce output similar to: Additionally, the exit code from the test (``echo $?``) will be 0 if the test passed, and 1 if the test failed. -Implementation notes: 1) Provide a pass/fail on each of the confidence -intervals, 2) Facilitate output of a bitmap for each test so that -locations of failures can be identified. - The ``cice.t-test.py`` requires memory to store multiple two-dimensional fields spanning 1825 unique timesteps, a total of several GB. An appropriate resource is needed to run the script. If the script runs out of memory on an interactive resource, try @@ -907,6 +903,8 @@ Below is an example of a step-by-step procedure for testing a code change that m cd testsuite.test0 ./results.csh + # Note which tests failed and determine which namelist options are responsible for the failures + .. If the regression comparisons fail, then you may want to run the QC test, @@ -920,6 +918,7 @@ If the regression comparisons fail, then you may want to run the QC test, ./cice.setup -m onyx -e intel --test smoke -g gx1 -p 44x1 --testid qc_base -s qc,medium cd onyx_intel_smoke_gx1_44x1_medium_qc.qc_base + # modify ice_in to activate the namelist options that were determined above ./cice.build ./cice.submit @@ -928,6 +927,7 @@ If the regression comparisons fail, then you may want to run the QC test, ./cice.setup -m onyx -e intel --test smoke -g gx1 -p 44x1 -testid qc_test -s qc,medium cd onyx_intel_smoke_gx1_44x1_medium_qc.qc_test + # modify ice_in to activate the namelist options that were determined above ./cice.build ./cice.submit From d709a167e2a879a6446a8f8986a7da1ba274de22 Mon Sep 17 00:00:00 2001 From: Tony Craig Date: Wed, 11 Sep 2019 08:23:08 -0700 Subject: [PATCH 06/14] update icepack, nag cleanup, remove tabs (#361) * update icepack, nag cleanup, remove tabs * update dyrect in boxslotcyl --- .../cicedynB/analysis/ice_history_bgc.F90 | 12 +-- .../cicedynB/analysis/ice_history_shared.F90 | 16 ++-- cicecore/cicedynB/dynamics/ice_dyn_eap.F90 | 8 +- cicecore/cicedynB/dynamics/ice_dyn_evp.F90 | 6 +- cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 | 14 +-- cicecore/cicedynB/dynamics/ice_dyn_shared.F90 | 18 +--- .../cicedynB/dynamics/ice_transport_remap.F90 | 2 +- cicecore/cicedynB/general/ice_forcing.F90 | 86 ++++++++++--------- cicecore/cicedynB/general/ice_forcing_bgc.F90 | 60 ++++++------- cicecore/cicedynB/general/ice_init.F90 | 16 ++-- cicecore/cicedynB/general/ice_step_mod.F90 | 6 -- .../comm/mpi/ice_gather_scatter.F90 | 1 - .../comm/serial/ice_boundary.F90 | 3 +- .../cicedynB/infrastructure/ice_domain.F90 | 2 + .../infrastructure/ice_read_write.F90 | 30 ++++--- .../cicedynB/infrastructure/ice_restoring.F90 | 2 +- .../io/io_binary/ice_history_write.F90 | 2 +- .../io/io_binary/ice_restart.F90 | 15 ++-- .../io/io_netcdf/ice_restart.F90 | 9 +- .../io/io_pio/ice_history_write.F90 | 8 +- cicecore/drivers/cesm/ice_comp_esmf.F90 | 12 +-- cicecore/drivers/cesm/ice_comp_mct.F90 | 14 +-- cicecore/drivers/cesm/ice_prescribed_mod.F90 | 2 +- cicecore/drivers/cesm/ice_scam.F90 | 1 - cicecore/drivers/cice/CICE_InitMod.F90 | 4 +- cicecore/drivers/cice/CICE_RunMod.F90 | 2 +- cicecore/shared/ice_arrays_column.F90 | 3 +- cicecore/shared/ice_calendar.F90 | 8 +- cicecore/shared/ice_init_column.F90 | 17 ++-- cicecore/shared/ice_restart_column.F90 | 3 - icepack | 2 +- 31 files changed, 173 insertions(+), 211 deletions(-) diff --git a/cicecore/cicedynB/analysis/ice_history_bgc.F90 b/cicecore/cicedynB/analysis/ice_history_bgc.F90 index ae5bccb2f..c27683423 100644 --- a/cicecore/cicedynB/analysis/ice_history_bgc.F90 +++ b/cicecore/cicedynB/analysis/ice_history_bgc.F90 @@ -112,10 +112,10 @@ module ice_history_bgc f_peakval = 'x', f_bgc_Fed_ml = 'x', & f_bgc_Fep_ml = 'x', f_bgc_hum_ml = 'x', & f_bgc_N_cat1 = 'x', f_bgc_DOC_cat1 = 'x', & - f_bgc_DIC_cat1 = 'x', f_bgc_Nit_cat1 = 'x', & + f_bgc_DIC_cat1 = 'x', f_bgc_Nit_cat1 = 'x', & f_bgc_Am_cat1 = 'x', f_bgc_Sil_cat1 = 'x', & f_bgc_DMSPd_cat1= 'x', f_bgc_DMS_cat1 = 'x', & - f_bgc_DON_cat1 = 'x', f_bgc_Fed_cat1 = 'x', & + f_bgc_DON_cat1 = 'x', f_bgc_Fed_cat1 = 'x', & f_bgc_hum_cat1 = 'x', f_bgc_Fep_cat1 = 'x', & f_bgc_PON_cat1 = 'x' @@ -226,11 +226,11 @@ module ice_history_bgc n_upNO , n_upNH , & n_bTin , n_bphi , & n_iDi , n_iki , & - n_bgc_PON , n_bgc_PON_ml , & + n_bgc_PON , & n_fbri , n_hbri , & n_zfswin , n_Nitnet , & n_Amnet , n_Silnet , & - n_humnet , & + n_humnet , & n_DMSPpnet , n_DMSPdnet , & n_DMSnet , n_PONnet , & n_Nitsnow , n_Amsnow , & @@ -238,8 +238,8 @@ module ice_history_bgc n_DMSPpsnow , n_DMSPdsnow , & n_DMSsnow , n_PONsnow , & n_Nitfrac , n_Amfrac , & - n_Silfrac , n_zbgc_frac , & - n_humfrac , & + n_Silfrac , & + n_humfrac , & n_DMSPpfrac , n_DMSPdfrac , & n_DMSfrac , n_PONfrac , & n_grownet , n_PPnet , & diff --git a/cicecore/cicedynB/analysis/ice_history_shared.F90 b/cicecore/cicedynB/analysis/ice_history_shared.F90 index d8335e1f7..64a82e9ae 100644 --- a/cicecore/cicedynB/analysis/ice_history_shared.F90 +++ b/cicecore/cicedynB/analysis/ice_history_shared.F90 @@ -452,7 +452,7 @@ module ice_history_shared f_e22, & f_s11, f_s12, & f_s22, & - f_yieldstress11, & + f_yieldstress11, & f_yieldstress12, & f_yieldstress22 @@ -608,13 +608,13 @@ module ice_history_shared n_keffn_top , & n_Tinz , n_Sinz , & n_Tsnz , & - n_a11 , n_a12 , & - n_e11 , n_e12 , & - n_e22 , & - n_s11 , n_s12 , & - n_s22 , & - n_yieldstress11, n_yieldstress12, & - n_yieldstress22 + n_a11 , n_a12 , & + n_e11 , n_e12 , & + n_e22 , & + n_s11 , n_s12 , & + n_s22 , & + n_yieldstress11, n_yieldstress12, & + n_yieldstress22 interface accum_hist_field ! generic interface module procedure accum_hist_field_2D, & diff --git a/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 b/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 index 944d80607..bc42e4855 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 @@ -119,8 +119,8 @@ subroutine eap (dt) use ice_constants, only: field_loc_center, field_loc_NEcorner, & field_type_scalar, field_type_vector use ice_domain, only: nblocks, blocks_ice, halo_info, maskhalo_dyn - use ice_dyn_shared, only: fcor_blk, ndte, dtei, a_min, m_min, & - cosw, sinw, denom1, uvel_init, vvel_init, arlx1i, & + use ice_dyn_shared, only: fcor_blk, ndte, dtei, & + denom1, uvel_init, vvel_init, arlx1i, & dyn_prep1, dyn_prep2, stepu, dyn_finish, & basal_stress_coeff, basalstress use ice_flux, only: rdg_conv, strairxT, strairyT, & @@ -1615,9 +1615,9 @@ subroutine update_stress_rdg (ksub, ndte, divu, tension, & invstressconviso, & Angle_denom_gamma, Angle_denom_alpha, & Tany_1, Tany_2, & - gamma, alpha, x, y, dx, dy, da, & + x, y, dx, dy, da, & invdx, invdy, invda, invsin, & - invleng, dtemp1, dtemp2, atempprime, & + dtemp1, dtemp2, atempprime, & kxw, kyw, kaw, & puny, pi, pi2, piq, pih diff --git a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 b/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 index c1371db7a..34ba5b002 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 @@ -37,8 +37,8 @@ module ice_dyn_evp use ice_communicate, only: my_task use ice_constants, only: field_loc_center, field_loc_NEcorner, & field_type_scalar, field_type_vector - use ice_constants, only: c0, c4, p027, p055, p111, p166, & - p2, p222, p25, p333, p5, c1 + use ice_constants, only: c0, p027, p055, p111, p166, & + p222, p25, p333, p5, c1 use ice_dyn_shared, only: stepu, dyn_prep1, dyn_prep2, dyn_finish, & ndte, yield_curve, ecci, denom1, arlx1i, fcor_blk, uvel_init, & vvel_init, basal_stress_coeff, basalstress, Ktens, revp @@ -641,7 +641,7 @@ subroutine stress (nx_block, ny_block, & tensionne, tensionnw, tensionse, tensionsw, & ! tension shearne, shearnw, shearse, shearsw , & ! shearing Deltane, Deltanw, Deltase, Deltasw , & ! Delt - puny , & ! puny +! puny , & ! puny c0ne, c0nw, c0se, c0sw , & ! useful combinations c1ne, c1nw, c1se, c1sw , & ssigpn, ssigps, ssigpe, ssigpw , & diff --git a/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 b/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 index b7092fc95..e82b12fd1 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 @@ -111,8 +111,8 @@ subroutine domp_get_domain_rlu(lower,upper,d_lower,d_upper) #if defined (_OPENMP) use omp_lib, only : omp_in_parallel -#endif use ice_constants, only: p5 +#endif integer(KIND=JPIM), intent(in) :: lower,upper integer(KIND=JPIM), intent(out) :: d_lower,d_upper @@ -120,7 +120,6 @@ subroutine domp_get_domain_rlu(lower,upper,d_lower,d_upper) #if defined (_OPENMP) !-- local variables real(kind=dbl_kind) :: dlen - integer(int_kind) :: lr, ur #endif character(len=*), parameter :: subname = '(domp_get_domain_rlu)' @@ -969,9 +968,6 @@ module ice_dyn_evp_1d use ice_kinds_mod use ice_fileunits, only: nu_diag use ice_exit, only: abort_ice - !-- BEGIN: specific for the KERNEL - use ice_dyn_shared, only: revp, ecci, denom1, arlx1i, brlx - !-- END: specific for the KERNEL implicit none private @@ -1150,7 +1146,6 @@ subroutine evp_copyin_v2(nx,ny,nblk,nx_glob,ny_glob, use ice_gather_scatter, only: gather_global_ext use ice_domain, only: distrb_info use ice_communicate, only: my_task, master_task - use ice_constants, only: c0,c1,p5 implicit none @@ -1286,9 +1281,8 @@ subroutine evp_copyout(nx,ny,nblk,nx_glob,ny_glob, & I_stress12_1,I_stress12_2,I_stress12_3,I_stress12_4, & I_divu,I_rdg_conv,I_rdg_shear,I_shear,I_taubx,I_tauby ) - use ice_constants, only : c0, field_loc_center, field_loc_NEcorner, & - field_type_scalar, field_type_vector - use ice_gather_scatter, only: scatter_global_ext, scatter_global + use ice_constants, only : c0 + use ice_gather_scatter, only: scatter_global_ext use ice_domain, only: distrb_info use ice_communicate, only: my_task, master_task @@ -1420,7 +1414,7 @@ subroutine evp_kernel_v2 implicit none real(kind=dbl_kind) :: rhow - integer (kind=int_kind) :: ierr, lun, i, nthreads + integer (kind=int_kind) :: i, nthreads integer (kind=int_kind) :: na,nb,navel character(len=*), parameter :: subname = '(evp_kernel_v2)' diff --git a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 index c26e36d96..fec4c6823 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 @@ -11,7 +11,8 @@ module ice_dyn_shared use ice_kinds_mod use ice_communicate, only: my_task, master_task - use ice_constants, only: c0, c1, p01, p001 + use ice_constants, only: c0, c1, c2, p01, p001 + use ice_constants, only: omega, spval_dbl, p5, c4 use ice_blocks, only: nx_block, ny_block use ice_domain_size, only: max_blocks use ice_fileunits, only: nu_diag @@ -115,7 +116,6 @@ end subroutine alloc_dyn_shared subroutine init_evp (dt) use ice_blocks, only: nx_block, ny_block - use ice_constants, only: c0, c2, omega use ice_domain, only: nblocks use ice_domain_size, only: max_blocks use ice_flux, only: rdg_conv, rdg_shear, iceumask, & @@ -205,12 +205,6 @@ end subroutine init_evp subroutine set_evp_parameters (dt) - use ice_communicate, only: my_task, master_task - use ice_constants, only: p25, c1, c2, p5 - use ice_domain, only: distrb_info - use ice_global_reductions, only: global_minval - use ice_grid, only: dxt, dyt, tmask - real (kind=dbl_kind), intent(in) :: & dt ! time step @@ -275,8 +269,6 @@ subroutine dyn_prep1 (nx_block, ny_block, & strairx, strairy, & tmass, icetmask) - use ice_constants, only: c0 - integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions ilo,ihi,jlo,jhi ! beginning and end of physical domain @@ -409,8 +401,6 @@ subroutine dyn_prep2 (nx_block, ny_block, & uvel, vvel, & Tbu) - use ice_constants, only: c0, c1 - integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions ilo,ihi,jlo,jhi ! beginning and end of physical domain @@ -877,8 +867,6 @@ subroutine basal_stress_coeff (nx_block, ny_block, & vice, aice, & hwater, Tbu) - use ice_constants, only: c0, c1 - integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions icellu ! no. of cells where icetmask = 1 @@ -948,8 +936,6 @@ subroutine principal_stress(nx_block, ny_block, & sig1, sig2, & sigP) - use ice_constants, only: spval_dbl, p5, c4 - integer (kind=int_kind), intent(in) :: & nx_block, ny_block ! block dimensions diff --git a/cicecore/cicedynB/dynamics/ice_transport_remap.F90 b/cicecore/cicedynB/dynamics/ice_transport_remap.F90 index bf9d0f373..b641104ed 100644 --- a/cicecore/cicedynB/dynamics/ice_transport_remap.F90 +++ b/cicecore/cicedynB/dynamics/ice_transport_remap.F90 @@ -260,7 +260,7 @@ subroutine init_remap ! xxxav, xxyav, xyyav, yyyav integer (kind=int_kind) :: & - i, j, iblk ! standard indices + i, j, iblk ! standard indices character(len=*), parameter :: subname = '(init_remap)' diff --git a/cicecore/cicedynB/general/ice_forcing.F90 b/cicecore/cicedynB/general/ice_forcing.F90 index 2a529c2fe..f1a307ae0 100644 --- a/cicecore/cicedynB/general/ice_forcing.F90 +++ b/cicecore/cicedynB/general/ice_forcing.F90 @@ -32,7 +32,7 @@ module ice_forcing use ice_arrays_column, only: oceanmixed_ice, restore_bgc use ice_constants, only: c0, c1, c2, c3, c4, c5, c10, c12, c15, c20, & c180, c360, c365, c1000, c3600 - use ice_constants, only: p001, p01, p1, p25, p5, p6 + use ice_constants, only: p001, p01, p1, p2, p25, p5, p6 use ice_constants, only: cm_to_m use ice_constants, only: field_loc_center, field_type_scalar, & field_type_vector, field_loc_NEcorner @@ -414,11 +414,11 @@ subroutine init_forcing_ocn(dt) if (trim(ocn_data_type) == 'hadgem_sst' .or. & trim(ocn_data_type) == 'hadgem_sst_uvocn') then - diag = .true. ! write diagnostic information + diag = .true. ! write diagnostic information sst_file = trim (ocn_data_dir)//'/MONTHLY/sst.1997.nc' - if (my_task == master_task) then + if (my_task == master_task) then write (nu_diag,*) ' ' write (nu_diag,*) 'Initial SST file:', trim(sst_file) @@ -3336,6 +3336,7 @@ subroutine ocn_data_ncar_init 'T', 'S', 'hblt', 'U', 'V', & 'dhdx', 'dhdy', 'qdp' / +#ifdef ncdf integer (kind=int_kind) :: & fid , & ! file id dimid ! dimension id @@ -3344,6 +3345,7 @@ subroutine ocn_data_ncar_init status , & ! status flag nlat , & ! number of longitudes of data nlon ! number of latitudes of data +#endif real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & work1 @@ -3477,6 +3479,7 @@ subroutine ocn_data_ncar_init_3D use netcdf #endif +#ifdef ncdf integer (kind=int_kind) :: & n , & ! field index m , & ! month index @@ -3499,6 +3502,7 @@ subroutine ocn_data_ncar_init_3D real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & work1, work2 +#endif character(len=*), parameter :: subname = '(ocn_data_ncar_init_3D)' @@ -3815,8 +3819,8 @@ subroutine ocn_data_hadgem(dt) ! Reads in HadGEM ocean forcing data as required from netCDF files ! Current options (selected by ocn_data_type) -! hadgem_sst: read in sst only -! hadgem_sst_uvocn: read in sst plus uocn and vocn +! hadgem_sst: read in sst only +! hadgem_sst_uvocn: read in sst plus uocn and vocn ! authors: Ann Keen, Met Office @@ -3842,10 +3846,10 @@ subroutine ocn_data_hadgem(dt) logical (kind=log_kind) :: readm character (char_len) :: & - fieldname ! field name in netcdf file + fieldname ! field name in netcdf file character (char_len_long) :: & - filename ! name of netCDF file + filename ! name of netCDF file character(len=*), parameter :: subname = '(ocn_data_hadgem)' @@ -3898,7 +3902,7 @@ subroutine ocn_data_hadgem(dt) ! ----------------------------------------------------------- ! SST ! ----------------------------------------------------------- - sst_file = trim(ocn_data_dir)//'/MONTHLY/sst.1997.nc' + sst_file = trim(ocn_data_dir)//'/MONTHLY/sst.1997.nc' fieldname='sst' call read_data_nc (readm, 0, fyear, ixm, month, ixp, & maxrec, sst_file, fieldname, sst_data, & @@ -3932,23 +3936,23 @@ subroutine ocn_data_hadgem(dt) if (trim(ocn_data_type)=='hadgem_sst_uvocn') then - filename = trim(ocn_data_dir)//'/MONTHLY/uocn.1997.nc' - fieldname='uocn' - call read_data_nc (readm, 0, fyear, ixm, month, ixp, & + filename = trim(ocn_data_dir)//'/MONTHLY/uocn.1997.nc' + fieldname='uocn' + call read_data_nc (readm, 0, fyear, ixm, month, ixp, & maxrec, filename, fieldname, uocn_data, & field_loc_center, field_type_vector) - ! Interpolate to current time step - call interpolate_data (uocn_data, uocn) + ! Interpolate to current time step + call interpolate_data (uocn_data, uocn) - filename = trim(ocn_data_dir)//'/MONTHLY/vocn.1997.nc' - fieldname='vocn' - call read_data_nc (readm, 0, fyear, ixm, month, ixp, & + filename = trim(ocn_data_dir)//'/MONTHLY/vocn.1997.nc' + fieldname='vocn' + call read_data_nc (readm, 0, fyear, ixm, month, ixp, & maxrec, filename, fieldname, vocn_data, & field_loc_center, field_type_vector) - ! Interpolate to current time step - call interpolate_data (vocn_data, vocn) + ! Interpolate to current time step + call interpolate_data (vocn_data, vocn) !----------------------------------------------------------------- ! Rotate zonal/meridional vectors to local coordinates, @@ -3970,9 +3974,9 @@ subroutine ocn_data_hadgem(dt) uocn(i,j,iblk) = uocn(i,j,iblk) * cm_to_m vocn(i,j,iblk) = vocn(i,j,iblk) * cm_to_m - enddo ! i - enddo ! j - enddo ! nblocks + enddo ! i + enddo ! j + enddo ! nblocks !$OMP END PARALLEL DO !----------------------------------------------------------------- @@ -3994,7 +3998,6 @@ subroutine ocn_data_hycom_init ! + rename/link file use ice_blocks, only: nx_block, ny_block use ice_domain, only: nblocks - use ice_domain_size, only: max_blocks use ice_flux, only: sss, sst, Tf #ifdef ncdf use netcdf @@ -4453,13 +4456,11 @@ subroutine ISPOL_data !local parameters +#ifdef ncdf character (char_len_long) :: & met_file, & ! netcdf filename fieldname ! field name in netcdf file - integer (kind=int_kind) :: & - status ! status flag - real (kind=dbl_kind), dimension(2), save :: & Tair_data_p , & ! air temperature (K) for interpolation Qa_data_p, fsnow_data_p, & @@ -4479,30 +4480,33 @@ subroutine ISPOL_data ! for interpolation of hourly data integer (kind=int_kind) :: & - ixm,ixx,ixp , & ! record numbers for neighboring months - recnum , & ! record number - recnum4X , & ! record number - maxrec , & ! maximum record number - recslot , & ! spline slot for current record - dataloc ! = 1 for data located in middle of time interval + ixm,ixx,ixp , & ! record numbers for neighboring months + maxrec , & ! maximum record number + recslot , & ! spline slot for current record + dataloc ! = 1 for data located in middle of time interval ! = 2 for date located at end of time interval - real (kind=dbl_kind) :: & + real (kind=dbl_kind) :: & secday , & Qa_pnt real (kind=dbl_kind) :: & - sec1hr ! number of seconds in 1 hour + sec1hr ! number of seconds in 1 hour logical (kind=log_kind) :: read1 +#endif + + integer (kind=int_kind) :: & + recnum , & ! record number + recnum4X ! record number character(len=*), parameter :: subname = '(ISPOL_data)' +#ifdef ncdf call icepack_query_parameters(secday_out=secday) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) -#ifdef ncdf if (trim(atm_data_format) == 'nc') then ! read nc file !------------------------------------------------------------------- @@ -4647,9 +4651,9 @@ subroutine ISPOL_data ! fixed data ! May not be needed !----------------------------- - rhoa (:,:,:) = 1.3_dbl_kind ! air density (kg/m^3) - cldf(:,:,:) = c1 !0.25_dbl_kind ! cloud fraction - frain(:,:,:) = c0 ! this is available in hourlymet_rh file + rhoa (:,:,:) = 1.3_dbl_kind ! air density (kg/m^3) + cldf(:,:,:) = c1 !0.25_dbl_kind ! cloud fraction + frain(:,:,:) = c0 ! this is available in hourlymet_rh file ! Save record number for next time step oldrecnum = recnum @@ -4686,6 +4690,7 @@ subroutine ocn_data_ispol_init use netcdf #endif +#ifdef ncdf integer (kind=int_kind) :: & n , & ! field index m ! month index @@ -4697,13 +4702,14 @@ subroutine ocn_data_ispol_init 'dhdx', 'dhdy', 'qdp' / real (kind=dbl_kind) :: & - work + work integer (kind=int_kind) :: & fid ! file id integer (kind=int_kind) :: & status ! status flag +#endif character(len=*), parameter :: subname = '(ocn_data_ispol_init)' @@ -4766,10 +4772,8 @@ subroutine box2001_data ! authors: Elizabeth Hunke, LANL use ice_domain, only: nblocks - use ice_constants, only: c0, c1, c2, c3, c4, c5, p2 use ice_blocks, only: nx_block, ny_block, nghost use ice_flux, only: uocn, vocn, uatm, vatm, wind, rhoa, strax, stray - use ice_fileunits, only: nu_diag, nu_forcing use ice_grid, only: uvm ! local parameters @@ -4778,7 +4782,7 @@ subroutine box2001_data iblk, i,j ! loop indices real (kind=dbl_kind) :: & - secday, pi , c10, c12, c20, puny, period, pi2, tau + secday, pi , puny, period, pi2, tau call icepack_query_parameters(pi_out=pi, pi2_out=pi2, puny_out=puny) call icepack_query_parameters(secday_out=secday) diff --git a/cicecore/cicedynB/general/ice_forcing_bgc.F90 b/cicecore/cicedynB/general/ice_forcing_bgc.F90 index 0b34accb1..6e543a056 100644 --- a/cicecore/cicedynB/general/ice_forcing_bgc.F90 +++ b/cicecore/cicedynB/general/ice_forcing_bgc.F90 @@ -77,7 +77,7 @@ subroutine get_forcing_bgc use ice_domain, only: nblocks, blocks_ice use ice_arrays_column, only: ocean_bio_all use ice_calendar, only: yday - use ice_flux, only: sss +! use ice_flux, only: sss use ice_flux_bgc, only: sil, nit use ice_forcing, only: trestore, trest, fyear, & read_clim_data_nc, interpolate_data, & @@ -202,7 +202,7 @@ subroutine get_forcing_bgc if (istep == 1 .or. .NOT. restore_bgc) then !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block) - do iblk = 1, nblocks + do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo @@ -210,8 +210,8 @@ subroutine get_forcing_bgc jlo = this_block%jlo jhi = this_block%jhi - do j = jlo, jhi - do i = ilo, ihi + do j = jlo, jhi + do i = ilo, ihi sil(i,j,iblk) = sildat(i,j,iblk) ks = 2*icepack_max_algae + icepack_max_doc + 3 + icepack_max_dic @@ -224,7 +224,7 @@ subroutine get_forcing_bgc elseif (restore_bgc) then !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block) - do iblk = 1, nblocks + do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo @@ -232,8 +232,8 @@ subroutine get_forcing_bgc jlo = this_block%jlo jhi = this_block%jhi - do j = jlo, jhi - do i = ilo, ihi + do j = jlo, jhi + do i = ilo, ihi sil(i,j,iblk) = sil(i,j,iblk) & + (sildat(i,j,iblk)-sil(i,j,iblk))*dt/trest @@ -246,7 +246,7 @@ subroutine get_forcing_bgc endif !restore elseif (tr_bgc_Sil) then ! bgc_data_type /= 'clim' !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block) - do iblk = 1, nblocks + do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo @@ -254,10 +254,10 @@ subroutine get_forcing_bgc jlo = this_block%jlo jhi = this_block%jhi - do j = jlo, jhi - do i = ilo, ihi + do j = jlo, jhi + do i = ilo, ihi - sil(i,j,iblk) = 25.0_dbl_kind + sil(i,j,iblk) = 25.0_dbl_kind ks = 2*icepack_max_algae + icepack_max_doc + 3 + icepack_max_dic ocean_bio_all(i,j,ks,iblk) = sil(i,j,iblk) !Sil enddo @@ -283,7 +283,7 @@ subroutine get_forcing_bgc if (istep == 1 .or. .NOT. restore_bgc) then !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block) - do iblk = 1, nblocks + do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo @@ -291,10 +291,10 @@ subroutine get_forcing_bgc jlo = this_block%jlo jhi = this_block%jhi - do j = jlo, jhi - do i = ilo, ihi + do j = jlo, jhi + do i = ilo, ihi - nit(i,j,iblk) = nitdat(i,j,iblk) + nit(i,j,iblk) = nitdat(i,j,iblk) ks = icepack_max_algae + 1 ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !nit ks = 2*icepack_max_algae + icepack_max_doc + 7 + icepack_max_dic @@ -305,7 +305,7 @@ subroutine get_forcing_bgc !$OMP END PARALLEL DO elseif (restore_bgc ) then !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block) - do iblk = 1, nblocks + do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo @@ -313,8 +313,8 @@ subroutine get_forcing_bgc jlo = this_block%jlo jhi = this_block%jhi - do j = jlo, jhi - do i = ilo, ihi + do j = jlo, jhi + do i = ilo, ihi nit(i,j,iblk) = nit(i,j,iblk) & + (nitdat(i,j,iblk)-nit(i,j,iblk))*dt/trest @@ -330,7 +330,7 @@ subroutine get_forcing_bgc ! elseif (trim(nit_data_type) == 'sss' .AND. tr_bgc_Nit) then ! !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block) -! do iblk = 1, nblocks +! do iblk = 1, nblocks ! this_block = get_block(blocks_ice(iblk),iblk) ! ilo = this_block%ilo @@ -353,7 +353,7 @@ subroutine get_forcing_bgc elseif (tr_bgc_Nit) then ! bgc_data_type /= 'clim' !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block) - do iblk = 1, nblocks + do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo @@ -361,8 +361,8 @@ subroutine get_forcing_bgc jlo = this_block%jlo jhi = this_block%jhi - do j = jlo, jhi - do i = ilo, ihi + do j = jlo, jhi + do i = ilo, ihi nit(i,j,iblk) = 12.0_dbl_kind ks = icepack_max_algae + 1 @@ -444,7 +444,7 @@ subroutine get_forcing_bgc endif !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block) - do iblk = 1, nblocks + do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo @@ -452,8 +452,8 @@ subroutine get_forcing_bgc jlo = this_block%jlo jhi = this_block%jhi - do j = jlo, jhi - do i = ilo, ihi + do j = jlo, jhi + do i = ilo, ihi ks = 2*icepack_max_algae + icepack_max_doc + 3 + icepack_max_dic ocean_bio_all(i,j,ks,iblk) = sil(i,j,iblk) !Sil @@ -791,7 +791,7 @@ subroutine init_bgc_data (fed1,fep1) !------------------------------------------------------------------- if (trim(fe_data_type) == 'clim') then - diag = .true. ! write diagnostic information + diag = .true. ! write diagnostic information iron_file = trim(bgc_data_dir)//'dFe_50m_annual_Tagliabue_gx1.nc' if (my_task == master_task) then @@ -808,7 +808,7 @@ subroutine init_bgc_data (fed1,fep1) if (my_task == master_task) call ice_close_nc(fid) - diag = .true. ! write diagnostic information + diag = .true. ! write diagnostic information iron_file = trim(bgc_data_dir)//'pFe_bathy_gx1.nc' if (my_task == master_task) then @@ -858,6 +858,9 @@ subroutine faero_optics ! local parameters + logical (kind=log_kind) :: modal_aero + +#ifdef ncdf integer (kind=int_kind) :: & varid , & ! variable id status , & ! status output from netcdf routines @@ -869,11 +872,10 @@ subroutine faero_optics integer (kind=int_kind) :: & fid ! file id for netCDF file - logical (kind=log_kind) :: modal_aero - character (char_len_long) :: & optics_file, & ! netcdf filename fieldname ! field name in netcdf file +#endif character(len=*), parameter :: subname = '(faero_optics)' diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index bce9d7352..1d3aedf6e 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -19,14 +19,14 @@ module ice_init use ice_fileunits, only: nu_nml, nu_diag, nml_filename, diag_type, & ice_stdout, get_fileunit, release_fileunit, bfbflag, flush_fileunit, & ice_IOUnitsMinUnit, ice_IOUnitsMaxUnit +#ifdef CESMCOUPLED use ice_fileunits, only: inst_suffix +#endif use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_aggregate use icepack_intfc, only: icepack_init_trcr use icepack_intfc, only: icepack_init_parameters use icepack_intfc, only: icepack_init_tracer_flags - use icepack_intfc, only: icepack_init_tracer_numbers - use icepack_intfc, only: icepack_init_tracer_indices use icepack_intfc, only: icepack_query_tracer_flags use icepack_intfc, only: icepack_query_tracer_numbers use icepack_intfc, only: icepack_query_tracer_indices @@ -107,8 +107,6 @@ subroutine input_data nml_error, & ! namelist i/o error flag n ! loop index - character (len=6) :: chartmp - logical :: exists real (kind=dbl_kind) :: ustar_min, albicev, albicei, albsnowv, albsnowi, & @@ -127,8 +125,6 @@ subroutine input_data logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_pond, tr_aero logical (kind=log_kind) :: tr_pond_cesm, tr_pond_lvl, tr_pond_topo - integer (kind=int_kind) :: nt_Tsfc, nt_sice, nt_qice, nt_qsno, nt_iage, nt_FY - integer (kind=int_kind) :: nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, nt_aero integer (kind=int_kind) :: numin, numax ! unit number limits integer (kind=int_kind) :: rpcesm, rplvl, rptopo @@ -1886,7 +1882,7 @@ subroutine boxslotcyl_data_aice(aicen, i, j, & ! Geometric configuration of the slotted cylinder diam = p3 *dxrect*(nx_global-1) center_x = p5 *dxrect*(nx_global-1) - center_y = p75*dxrect*(ny_global-1) + center_y = p75*dyrect*(ny_global-1) radius = p5*diam width = p166*diam length = c5*p166*diam @@ -1899,11 +1895,11 @@ subroutine boxslotcyl_data_aice(aicen, i, j, & ! check if grid point is inside slotted cylinder in_slot = (dxrect*real(iglob(i)-1, kind=dbl_kind) >= slot_x(1)) .and. & (dxrect*real(iglob(i)-1, kind=dbl_kind) <= slot_x(2)) .and. & - (dxrect*real(jglob(j)-1, kind=dbl_kind) >= slot_y(1)) .and. & - (dxrect*real(jglob(j)-1, kind=dbl_kind) <= slot_y(2)) + (dyrect*real(jglob(j)-1, kind=dbl_kind) >= slot_y(1)) .and. & + (dyrect*real(jglob(j)-1, kind=dbl_kind) <= slot_y(2)) in_cyl = sqrt((dxrect*real(iglob(i)-1, kind=dbl_kind) - center_x)**c2 + & - (dxrect*real(jglob(j)-1, kind=dbl_kind) - center_y)**c2) <= radius + (dyrect*real(jglob(j)-1, kind=dbl_kind) - center_y)**c2) <= radius in_slotted_cyl = in_cyl .and. .not. in_slot diff --git a/cicecore/cicedynB/general/ice_step_mod.F90 b/cicecore/cicedynB/general/ice_step_mod.F90 index fb759cb27..4bbf0aa3b 100644 --- a/cicecore/cicedynB/general/ice_step_mod.F90 +++ b/cicecore/cicedynB/general/ice_step_mod.F90 @@ -947,8 +947,6 @@ subroutine ocean_mixed_layer (dt, iblk) use ice_arrays_column, only: Cdn_atm, Cdn_atm_ratio use ice_blocks, only: nx_block, ny_block - use ice_blocks, only: block, get_block - use ice_domain, only: blocks_ice use ice_flux, only: sst, Tf, Qa, uatm, vatm, wind, potT, rhoa, zlvl, & frzmlt, fhocn, fswthru, flw, flwout_ocn, fsens_ocn, flat_ocn, evap_ocn, & alvdr_ocn, alidr_ocn, alvdf_ocn, alidf_ocn, swidf, swvdf, swidr, swvdr, & @@ -970,7 +968,6 @@ subroutine ocean_mixed_layer (dt, iblk) frzmlt_max = c1000 ! max magnitude of frzmlt (W/m^2) integer (kind=int_kind) :: & - ilo,ihi,jlo,jhi, & ! beginning and end of physical domain i, j , & ! horizontal indices ij ! combined ij index @@ -986,9 +983,6 @@ subroutine ocean_mixed_layer (dt, iblk) integer (kind=int_kind), dimension(nx_block*ny_block) :: & indxi, indxj ! compressed indices for ocean cells - type (block) :: & - this_block ! block information for current block - character(len=*), parameter :: subname = '(ocn_mixed_layer)' !----------------------------------------------------------------- diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 index afdfa2dfd..e9eb49db0 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 @@ -2820,7 +2820,6 @@ subroutine scatter_global_ext(ARRAY, ARRAY_G, src_task, dst_dist) iblk, jblk, &! block indices iglb, jglb, &! global indices nrecvs, &! actual number of messages received - isrc, jsrc, &! source addresses dst_block, &! location of block in dst array ierr ! MPI error flag diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 index 9775a9dad..9c2cfd9fc 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 @@ -3434,8 +3434,7 @@ subroutine ice_HaloUpdate_stress(array1, array2, halo, & ! local variables integer (int_kind) :: & - i,j,n,nmsg, &! dummy loop indices - ierr, &! error or status flag for MPI,alloc + nmsg, &! dummy loop indices nxGlobal, &! global domain size in x (tripole) iSrc,jSrc, &! source addresses for message iDst,jDst, &! dest addresses for message diff --git a/cicecore/cicedynB/infrastructure/ice_domain.F90 b/cicecore/cicedynB/infrastructure/ice_domain.F90 index b4b4c4ab2..1776ff906 100644 --- a/cicecore/cicedynB/infrastructure/ice_domain.F90 +++ b/cicecore/cicedynB/infrastructure/ice_domain.F90 @@ -300,9 +300,11 @@ subroutine init_domain_distribution(KMTG,ULATG) i,j,n ,&! dummy loop indices ig,jg ,&! global indices work_unit ,&! size of quantized work unit +#ifdef ncdf fid ,&! file id varid ,&! var id status ,&! netcdf return code +#endif tblocks_tmp ,&! total number of blocks nblocks_tmp ,&! temporary value of nblocks nblocks_max ! max blocks on proc diff --git a/cicecore/cicedynB/infrastructure/ice_read_write.F90 b/cicecore/cicedynB/infrastructure/ice_read_write.F90 index ff05f4fe3..22d3f8260 100644 --- a/cicecore/cicedynB/infrastructure/ice_read_write.F90 +++ b/cicecore/cicedynB/infrastructure/ice_read_write.F90 @@ -1516,12 +1516,10 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & ! local variables +#ifdef ncdf real (kind=dbl_kind), dimension(:), allocatable :: & - work_z - - character(len=*), parameter :: subname = '(ice_read_nc_z)' + work_z -#ifdef ncdf ! netCDF file diagnostics: integer (kind=int_kind) :: & varid, & ! netcdf id for field @@ -1532,6 +1530,11 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & character (char_len) :: & dimname ! dimension name +#endif + + character(len=*), parameter :: subname = '(ice_read_nc_z)' + +#ifdef ncdf allocate(work_z(nilyr)) @@ -1625,8 +1628,8 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & amin, amax, asum ! min, max values and sum of input array character (char_len) :: & - lvarname, & ! variable name - dimname ! dimension name + lvarname ! variable name +! dimname ! dimension name real (kind=dbl_kind), dimension(:,:), allocatable :: & work_g1 @@ -1745,8 +1748,8 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & amin, amax, asum ! min, max values and sum of input array character (char_len) :: & - lvarname, & ! variable name - dimname ! dimension name + lvarname ! variable name +! dimname ! dimension name real (kind=dbl_kind), dimension(:,:,:), allocatable :: & work_g1 @@ -2136,15 +2139,11 @@ subroutine ice_read_vec_nc (fid, nrec, varname, work_g, diag) ! netCDF file diagnostics: integer (kind=int_kind) :: & varid, & ! netcdf id for field - status, & ! status output from netcdf routines - nvar ! sizes of netcdf vector + status ! status output from netcdf routines real (kind=dbl_kind) :: & amin, amax ! min, max values of input vector - character (char_len) :: & - dimname ! dimension name -! work_g(:) = c0 if (my_task == master_task) then @@ -2194,10 +2193,15 @@ subroutine ice_get_ncvarsize(fid,varname,recsize) varname ! field name in netcdf file integer (kind=int_kind), intent(out) :: & recsize ! Number of records in file + + ! local variables + +#ifdef ncdf integer (kind=int_kind) :: & ndims, i, status character (char_len) :: & cvar +#endif character(len=*), parameter :: subname = '(ice_get_ncvarsize)' #ifdef ncdf diff --git a/cicecore/cicedynB/infrastructure/ice_restoring.F90 b/cicecore/cicedynB/infrastructure/ice_restoring.F90 index 799b263d6..a7851ef07 100644 --- a/cicecore/cicedynB/infrastructure/ice_restoring.F90 +++ b/cicecore/cicedynB/infrastructure/ice_restoring.F90 @@ -57,7 +57,7 @@ subroutine ice_HaloRestore_init use ice_domain, only: ew_boundary_type, ns_boundary_type, & nblocks, blocks_ice use ice_grid, only: tmask, hm - use ice_flux, only: sst, Tf, Tair, salinz, Tmltz + use ice_flux, only: Tf, Tair, salinz, Tmltz use ice_restart_shared, only: restart_ext integer (int_kind) :: & diff --git a/cicecore/cicedynB/infrastructure/io/io_binary/ice_history_write.F90 b/cicecore/cicedynB/infrastructure/io/io_binary/ice_history_write.F90 index 73540f5a9..d1564fab4 100644 --- a/cicecore/cicedynB/infrastructure/io/io_binary/ice_history_write.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_binary/ice_history_write.F90 @@ -56,7 +56,7 @@ subroutine ice_write_hist(ns) ! local variables - integer (kind=int_kind) :: i,k,n,nn,nrec,nbits + integer (kind=int_kind) :: k,n,nn,nrec,nbits character (char_len) :: title character (char_len_long) :: ncfile(max_nstrm), hdrfile diff --git a/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 index a9aaef525..4d13540e5 100644 --- a/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 @@ -636,12 +636,10 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3, & ! local variables integer (kind=int_kind) :: & - n, & ! number of dimensions for variable - varid, & ! variable id - status ! status variable from netCDF routine + n ! number of dimensions for variable real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks) :: & - work2 ! input array (real, 8-byte) + work2 ! input array (real, 8-byte) character(len=*), parameter :: subname = '(read_restart_field)' @@ -699,12 +697,10 @@ subroutine write_restart_field(nu,nrec,work,atype,vname,ndim3,diag) ! local variables integer (kind=int_kind) :: & - n, & ! dimension counter - varid, & ! variable id - status ! status variable from netCDF routine + n ! dimension counter real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks) :: & - work2 ! input array (real, 8-byte) + work2 ! input array (real, 8-byte) character(len=*), parameter :: subname = '(write_restart_field)' @@ -735,8 +731,7 @@ subroutine final_restart() tr_pond_topo, tr_pond_lvl, tr_brine integer (kind=int_kind) :: & - nbtrcr, & ! number of bgc tracers - status + nbtrcr ! number of bgc tracers character(len=*), parameter :: subname = '(final_restart)' diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 index 83f9bee1a..29efa67f9 100644 --- a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 @@ -134,9 +134,9 @@ subroutine init_restart_write(filename_spec) tr_bgc_hum integer (kind=int_kind) :: & - k, n, & ! index + k, n, & ! index nx, ny, & ! global array size - iyear, imonth, iday, & ! year, month, day + iyear, & ! year nbtrcr ! number of bgc tracers character(len=char_len_long) :: filename @@ -643,11 +643,6 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3, & ! local variables - integer (kind=int_kind) :: & - n, & ! number of dimensions for variable - varid, & ! variable id - status ! status variable from netCDF routine - real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks) :: & work2 ! input array (real, 8-byte) diff --git a/cicecore/cicedynB/infrastructure/io/io_pio/ice_history_write.F90 b/cicecore/cicedynB/infrastructure/io/io_pio/ice_history_write.F90 index ff3d2d6fd..43e5dab74 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio/ice_history_write.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_pio/ice_history_write.F90 @@ -57,7 +57,7 @@ subroutine ice_write_hist (ns) use ice_restart_shared, only: runid, lcdf64 use netcdf #endif - use ice_pio + use ice_pio use pio integer (kind=int_kind), intent(in) :: ns @@ -161,7 +161,7 @@ subroutine ice_write_hist (ns) File%fh=-1 call ice_pio_init(mode='write', filename=trim(filename), File=File, & - clobber=.true., cdf64=lcdf64) + clobber=.true., cdf64=lcdf64) call ice_pio_initdecomp(iodesc=iodesc2d) call ice_pio_initdecomp(ndim3=ncat_hist, iodesc=iodesc3dc) @@ -349,7 +349,7 @@ subroutine ice_write_hist (ns) dimidex(4)=kmtidb dimidex(5)=kmtida - do i = 1, nvarz + do i = 1, nvarz if (igrdz(i)) then status = pio_def_var(File, trim(var_nz(i)%short_name), pio_real, & (/dimidex(i)/), varid) @@ -782,7 +782,7 @@ subroutine ice_write_hist (ns) ! Extra dimensions (NCAT, VGRD*) - do i = 1, nvarz + do i = 1, nvarz if (igrdz(i)) then status = pio_inq_varid(File, var_nz(i)%short_name, varid) SELECT CASE (var_nz(i)%short_name) diff --git a/cicecore/drivers/cesm/ice_comp_esmf.F90 b/cicecore/drivers/cesm/ice_comp_esmf.F90 index 8f682ebe5..8ae80abdc 100644 --- a/cicecore/drivers/cesm/ice_comp_esmf.F90 +++ b/cicecore/drivers/cesm/ice_comp_esmf.F90 @@ -16,7 +16,7 @@ module ice_comp_esmf use shr_sys_mod, only : shr_sys_abort, shr_sys_flush ! use shr_mem_mod, only : shr_get_memusage, shr_init_memusage use shr_file_mod, only : shr_file_getlogunit, shr_file_getloglevel, & - shr_file_setloglevel, shr_file_setlogunit + shr_file_setloglevel, shr_file_setlogunit use mct_mod #ifdef USE_ESMF_LIB use esmf @@ -26,10 +26,10 @@ module ice_comp_esmf use seq_flds_mod use seq_infodata_mod,only : seq_infodata_start_type_cont, & - seq_infodata_start_type_brnch, seq_infodata_start_type_start + seq_infodata_start_type_brnch, seq_infodata_start_type_start use seq_timemgr_mod, only : seq_timemgr_eclockgetdata, & seq_timemgr_restartalarmison, & - seq_timemgr_eclockdateinsync, & + seq_timemgr_eclockdateinsync, & seq_timemgr_stopalarmison use seq_comm_mct, only : seq_comm_suffix, seq_comm_inst, seq_comm_name use perf_mod, only : t_startf, t_stopf, t_barrierf @@ -44,12 +44,12 @@ module ice_comp_esmf use ice_domain, only : nblocks, blocks_ice, halo_info, distrb_info use ice_blocks, only : block, get_block, nx_block, ny_block use ice_grid, only : tlon, tlat, tarea, tmask, anglet, hm, & - grid_type, t2ugrid_vector, gridcpl_file, ocn_gridcell_frac + grid_type, t2ugrid_vector, gridcpl_file, ocn_gridcell_frac use ice_constants, only : c0, c1, spval_dbl, rad_to_deg, radius, secday use ice_communicate, only : my_task, master_task, MPI_COMM_ICE use ice_calendar, only : istep, istep1, force_restart_now, write_ic,& idate, idate0, mday, time, month, daycal, & - sec, dt, dt_dyn, calendar, & + sec, dt, dt_dyn, calendar, & calendar_type, nextsw_cday, days_per_year, & nyr, new_year, time2sec, year_init use icepack_orbital, only : eccen, obliqr, lambm0, mvelpp @@ -694,7 +694,7 @@ subroutine ice_run_esmf(comp, import_state, export_state, EClock, rc) force_restart_now = seq_timemgr_RestartAlarmIsOn(EClock) - if (calendar_type .eq. "GREGORIAN") then + if (calendar_type .eq. "GREGORIAN") then nyrp = nyr nyr = (curr_ymd/10000)+1 ! integer year of basedate if (nyr /= nyrp) then diff --git a/cicecore/drivers/cesm/ice_comp_mct.F90 b/cicecore/drivers/cesm/ice_comp_mct.F90 index da86f91be..7162d6397 100644 --- a/cicecore/drivers/cesm/ice_comp_mct.F90 +++ b/cicecore/drivers/cesm/ice_comp_mct.F90 @@ -14,7 +14,7 @@ module ice_comp_mct use shr_sys_mod, only : shr_sys_abort, shr_sys_flush ! use shr_mem_mod, only : shr_get_memusage, shr_init_memusage use shr_file_mod, only : shr_file_getlogunit, shr_file_getloglevel, & - shr_file_setloglevel, shr_file_setlogunit + shr_file_setloglevel, shr_file_setlogunit use shr_const_mod use mct_mod #ifdef USE_ESMF_LIB @@ -26,11 +26,11 @@ module ice_comp_mct use seq_flds_mod use seq_cdata_mod, only : seq_cdata, seq_cdata_setptrs use seq_infodata_mod,only : seq_infodata_type, seq_infodata_getdata, & - seq_infodata_putdata, seq_infodata_start_type_cont, & - seq_infodata_start_type_brnch, seq_infodata_start_type_start + seq_infodata_putdata, seq_infodata_start_type_cont, & + seq_infodata_start_type_brnch, seq_infodata_start_type_start use seq_timemgr_mod, only : seq_timemgr_eclockgetdata, & seq_timemgr_restartalarmison, & - seq_timemgr_eclockdateinsync, & + seq_timemgr_eclockdateinsync, & seq_timemgr_stopalarmison use seq_comm_mct, only : seq_comm_suffix, seq_comm_inst, seq_comm_name use perf_mod, only : t_startf, t_stopf, t_barrierf @@ -42,13 +42,13 @@ module ice_comp_mct use ice_domain, only : nblocks, blocks_ice, halo_info, distrb_info use ice_blocks, only : block, get_block, nx_block, ny_block use ice_grid, only : tlon, tlat, tarea, tmask, anglet, hm, & - grid_type, t2ugrid_vector, gridcpl_file, ocn_gridcell_frac + grid_type, t2ugrid_vector, gridcpl_file, ocn_gridcell_frac use ice_constants, only : c0, c1, spval_dbl, radius use ice_constants, only : ice_init_constants use ice_communicate, only : my_task, master_task, MPI_COMM_ICE use ice_calendar, only : istep, istep1, force_restart_now, write_ic,& idate, idate0, mday, time, month, daycal, & - sec, dt, dt_dyn, calendar, & + sec, dt, dt_dyn, calendar, & calendar_type, nextsw_cday, days_per_year, & nyr, new_year, time2sec, year_init use ice_timers @@ -579,7 +579,7 @@ subroutine ice_run_mct( EClock, cdata_i, x2i_i, i2x_i ) force_restart_now = seq_timemgr_RestartAlarmIsOn(EClock) -! if (calendar_type .eq. "GREGORIAN") then +! if (calendar_type .eq. "GREGORIAN") then ! nyrp = nyr ! nyr = (curr_ymd/10000)+1 ! integer year of basedate ! if (nyr /= nyrp) then diff --git a/cicecore/drivers/cesm/ice_prescribed_mod.F90 b/cicecore/drivers/cesm/ice_prescribed_mod.F90 index 10c71cc84..eeb217577 100644 --- a/cicecore/drivers/cesm/ice_prescribed_mod.F90 +++ b/cicecore/drivers/cesm/ice_prescribed_mod.F90 @@ -138,7 +138,7 @@ subroutine ice_prescribed_init(compid, gsmap, dom) namelist /ice_prescribed_nml/ & prescribed_ice, & model_year_align, & - stream_year_first , & + stream_year_first , & stream_year_last , & stream_fldVarName , & stream_fldFileName, & diff --git a/cicecore/drivers/cesm/ice_scam.F90 b/cicecore/drivers/cesm/ice_scam.F90 index 86a56d19c..f5280b259 100644 --- a/cicecore/drivers/cesm/ice_scam.F90 +++ b/cicecore/drivers/cesm/ice_scam.F90 @@ -12,4 +12,3 @@ module ice_scam end module ice_scam - diff --git a/cicecore/drivers/cice/CICE_InitMod.F90 b/cicecore/drivers/cice/CICE_InitMod.F90 index 1acbcb873..ddc966b55 100644 --- a/cicecore/drivers/cice/CICE_InitMod.F90 +++ b/cicecore/drivers/cice/CICE_InitMod.F90 @@ -72,12 +72,12 @@ subroutine cice_init use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & - get_forcing_atmo, get_forcing_ocn, alloc_forcing + get_forcing_atmo, get_forcing_ocn use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & faero_default, faero_optics, alloc_forcing_bgc use ice_grid, only: init_grid1, init_grid2, alloc_grid use ice_history, only: init_hist, accum_hist - use ice_restart_shared, only: restart, runid, runtype + use ice_restart_shared, only: restart, runtype use ice_init, only: input_data, init_state use ice_init_column, only: init_thermo_vertical, init_shortwave, init_zbgc, input_zbgc, count_tracers use ice_kinds_mod diff --git a/cicecore/drivers/cice/CICE_RunMod.F90 b/cicecore/drivers/cice/CICE_RunMod.F90 index a9f5a9707..af3e681b9 100644 --- a/cicecore/drivers/cice/CICE_RunMod.F90 +++ b/cicecore/drivers/cice/CICE_RunMod.F90 @@ -44,7 +44,7 @@ module CICE_RunMod subroutine CICE_Run use ice_calendar, only: istep, istep1, time, dt, stop_now, calendar - use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, atm_data_type + use ice_forcing, only: get_forcing_atmo, get_forcing_ocn use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & faero_default use ice_flux, only: init_flux_atm, init_flux_ocn diff --git a/cicecore/shared/ice_arrays_column.F90 b/cicecore/shared/ice_arrays_column.F90 index 71d03ed94..2ccd2d754 100644 --- a/cicecore/shared/ice_arrays_column.F90 +++ b/cicecore/shared/ice_arrays_column.F90 @@ -279,13 +279,12 @@ module ice_arrays_column subroutine alloc_arrays_column ! Allocate column arrays use ice_exit, only: abort_ice - integer (int_kind) :: nspint, max_nbtrcr, max_algae, max_aero, & + integer (int_kind) :: max_nbtrcr, max_algae, max_aero, & nmodal1, nmodal2, max_don integer (int_kind) :: ierr, ntrcr character(len=*),parameter :: subname='(alloc_arrays_column)' -! call icepack_query_parameters(nspint_out=nspint) call icepack_query_tracer_numbers(ntrcr_out=ntrcr) call icepack_query_tracer_sizes( max_nbtrcr_out=max_nbtrcr, & max_algae_out=max_algae, max_aero_out=max_aero, & diff --git a/cicecore/shared/ice_calendar.F90 b/cicecore/shared/ice_calendar.F90 index a767bdfd7..e7107f42a 100644 --- a/cicecore/shared/ice_calendar.F90 +++ b/cicecore/shared/ice_calendar.F90 @@ -54,10 +54,10 @@ module ice_calendar data daycal366/ 0,31, 60, 91,121,152,182,213,244,274,305,335,366/ real (kind=dbl_kind), parameter :: & - days_per_4c = 146097.0_dbl_kind, & - days_per_c = 36524.0_dbl_kind, & - days_per_4y = 1461.0_dbl_kind, & - days_per_y = 365.0_dbl_kind + days_per_4c = 146097.0_dbl_kind, & + days_per_c = 36524.0_dbl_kind, & + days_per_4y = 1461.0_dbl_kind, & + days_per_y = 365.0_dbl_kind integer (kind=int_kind), public :: & istep , & ! local step counter for time loop diff --git a/cicecore/shared/ice_init_column.F90 b/cicecore/shared/ice_init_column.F90 index 3f3a5bfe6..f86984195 100644 --- a/cicecore/shared/ice_init_column.F90 +++ b/cicecore/shared/ice_init_column.F90 @@ -444,11 +444,11 @@ subroutine init_shortwave !---------------------------------------------------------------- if (runtype == 'initial' .and. .not. restart) then scale_factor(i,j,iblk) = & - swvdr(i,j,iblk)*(c1 - alvdr_ai(i,j,iblk)) & - + swvdf(i,j,iblk)*(c1 - alvdf_ai(i,j,iblk)) & - + swidr(i,j,iblk)*(c1 - alidr_ai(i,j,iblk)) & - + swidf(i,j,iblk)*(c1 - alidf_ai(i,j,iblk)) - endif + swvdr(i,j,iblk)*(c1 - alvdr_ai(i,j,iblk)) & + + swvdf(i,j,iblk)*(c1 - alvdf_ai(i,j,iblk)) & + + swidr(i,j,iblk)*(c1 - alidr_ai(i,j,iblk)) & + + swidf(i,j,iblk)*(c1 - alidf_ai(i,j,iblk)) + endif enddo ! i enddo ! j @@ -895,8 +895,7 @@ subroutine input_zbgc integer (kind=int_kind) :: & nml_error, & ! namelist i/o error flag - k, mm , & ! loop index - ierr, abort_flag + abort_flag character(len=*), parameter :: subname='(input_zbgc)' @@ -1626,7 +1625,6 @@ subroutine count_tracers ! local variables integer (kind=int_kind) :: & - n , & ! loop index k, mm , & ! loop index nk , & ! layer index nk_bgc ! layer index @@ -2116,7 +2114,6 @@ end subroutine count_tracers subroutine init_zbgc - use ice_broadcast, only: broadcast_scalar use ice_state, only: trcr_base, trcr_depend, n_trcr_strata, & nt_strata use ice_arrays_column, only: R_C2N, R_chl2N, R_C2N_DON, R_Si2N @@ -2186,7 +2183,7 @@ subroutine init_zbgc tr_bgc_DMS, tr_bgc_PON, & tr_bgc_N, tr_bgc_C, tr_bgc_chl, & tr_bgc_DON, tr_bgc_Fe, tr_zaero, & - tr_bgc_hum, tr_aero + tr_bgc_hum real (kind=dbl_kind) :: & initbio_frac, & diff --git a/cicecore/shared/ice_restart_column.F90 b/cicecore/shared/ice_restart_column.F90 index 63d095f11..b46e6fdcd 100644 --- a/cicecore/shared/ice_restart_column.F90 +++ b/cicecore/shared/ice_restart_column.F90 @@ -786,9 +786,6 @@ subroutine write_restart_bgc() type (block) :: & this_block ! block information for current block - integer (kind=int_kind) :: & - ipoint - character(len=*),parameter :: subname='(write_restart_bgc)' call icepack_query_parameters(skl_bgc_out=skl_bgc, solve_zsal_out=solve_zsal) diff --git a/icepack b/icepack index 0b442b3fe..423c81679 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 0b442b3fe96d65b3c54c5facbc69653e1b87b740 +Subproject commit 423c81679147ede18b6ae9645ba3bdb74e492552 From 9da39b69a5a28f14e9c3fc0dffb6b8cdc6494db6 Mon Sep 17 00:00:00 2001 From: Alice DuVivier Date: Wed, 18 Sep 2019 19:42:38 -0600 Subject: [PATCH 07/14] Updating a few documentation issues (#364) * adding COREII citation * modifying PR template for RTD change --- .github/PULL_REQUEST_TEMPLATE | 2 +- doc/source/master_list.bib | 8 ++++++++ doc/source/user_guide/ug_case_settings.rst | 2 +- 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/.github/PULL_REQUEST_TEMPLATE b/.github/PULL_REQUEST_TEMPLATE index 5195feed8..6256f9bc6 100644 --- a/.github/PULL_REQUEST_TEMPLATE +++ b/.github/PULL_REQUEST_TEMPLATE @@ -21,7 +21,7 @@ please refer to: Date: Thu, 19 Sep 2019 17:50:24 -0500 Subject: [PATCH 08/14] =?UTF-8?q?Added=20new=20capability=20in=20ice=5Ffor?= =?UTF-8?q?cing.F90=20to=20test=20with=20JRA55=20atmospheri=E2=80=A6=20(#3?= =?UTF-8?q?50)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Added new capability in ice_forcing.F90 to test with JRA55 atmospheric forcing on gx1 grid. * Added JRA55 as an option under atm_data_type * Modified ice_forcing.F90 by replacing elseif (trim(atm_data_format) == 'nc') then ! netcdf with elseif (trim(atm_data_type) == 'JRA55') then ! netcdf Corrected comment regarding JRA55 forcing to state "E.g. record 1 gives conditions at 3 am GMT on 1 January" Modified ./scripts/options/set_nml.jra55 by setting use_restart_time = .false. setting year_init = 2005 setting fyear_init - 2005 and corrected units for precip_units = 'mks' edited ./scripts/tests/base_suite.ts by reinstating bgc restart test and added a jra55 "short" smoke test. * fix an incorrect test in the base suite * Add a check in init_forcing_atmo to check if 'use_leap_years' is set to true. If so, abort if the atm_data_type is not JRA55, default, or box2001 * Fix a mistake in diagnostic warning output for the use_leap_years check * set use_leap_years to false in the boxrestore namelist option file * add use_restart_time specification to test_nml.restart* files * Remove options in JRA55 namelist option file that are the same as the default in ice_in. Also remove the specification of use_restart_time to .false. * fix a bug in the newly-implemented use_leap_years check during initialization * add use_leap_years and use_restart_time specification to jra55 namelist option file * add new jra55_2008 and run90day namelist option files * modify the JRA55 smoke test to be a 90 day test and start in 2008. Also add a restart test for JRA55 in base_suite * update documentation to include citation for JRA55 --- cicecore/cicedynB/general/ice_forcing.F90 | 272 +++++++++++++++++- .../scripts/options/set_nml.boxrestore | 2 +- configuration/scripts/options/set_nml.jra55 | 17 ++ .../scripts/options/set_nml.jra55_2008 | 17 ++ .../scripts/options/set_nml.run90day | 5 + .../scripts/options/test_nml.restart1 | 1 + .../scripts/options/test_nml.restart2 | 2 +- configuration/scripts/tests/base_suite.ts | 2 + doc/source/master_list.bib | 9 + doc/source/user_guide/ug_case_settings.rst | 1 + 10 files changed, 324 insertions(+), 4 deletions(-) mode change 100644 => 100755 cicecore/cicedynB/general/ice_forcing.F90 create mode 100755 configuration/scripts/options/set_nml.jra55 create mode 100755 configuration/scripts/options/set_nml.jra55_2008 create mode 100644 configuration/scripts/options/set_nml.run90day mode change 100644 => 100755 configuration/scripts/tests/base_suite.ts diff --git a/cicecore/cicedynB/general/ice_forcing.F90 b/cicecore/cicedynB/general/ice_forcing.F90 old mode 100644 new mode 100755 index f1a307ae0..53eb0ec49 --- a/cicecore/cicedynB/general/ice_forcing.F90 +++ b/cicecore/cicedynB/general/ice_forcing.F90 @@ -30,7 +30,7 @@ module ice_forcing use ice_timers, only: ice_timer_start, ice_timer_stop, timer_readwrite, & timer_bound use ice_arrays_column, only: oceanmixed_ice, restore_bgc - use ice_constants, only: c0, c1, c2, c3, c4, c5, c10, c12, c15, c20, & + use ice_constants, only: c0, c1, c2, c3, c4, c5, c8, c10, c12, c15, c20, & c180, c360, c365, c1000, c3600 use ice_constants, only: p001, p01, p1, p2, p25, p5, p6 use ice_constants, only: cm_to_m @@ -115,7 +115,8 @@ module ice_forcing atm_data_format, & ! 'bin'=binary or 'nc'=netcdf ocn_data_format, & ! 'bin'=binary or 'nc'=netcdf atm_data_type, & ! 'default', 'monthly', 'ncar', - ! 'LYq' or 'hadgem' or 'oned' + ! 'LYq' or 'hadgem' or 'oned' or + ! 'JRA55' bgc_data_type, & ! 'default', 'clim' ocn_data_type, & ! 'default', 'clim', 'ncar', 'oned', ! 'hadgem_sst' or 'hadgem_sst_uvocn' @@ -211,6 +212,8 @@ subroutine init_forcing_atmo ! Determine the current and final year of the forcing cycle based on ! namelist input; initialize the atmospheric forcing data filenames. + use ice_calendar, only: use_leap_years + character(len=*), parameter :: subname = '(init_forcing_atmo)' ! Allocate forcing arrays @@ -235,6 +238,14 @@ subroutine init_forcing_atmo file=__FILE__, line=__LINE__) endif + if (use_leap_years .and. (trim(atm_data_type) /= 'JRA55' .and. & + trim(atm_data_type) /= 'default' .and. & + trim(atm_data_type) /= 'box2001')) then + write(nu_diag,*) 'use_leap_years option is currently only supported for' + write(nu_diag,*) 'JRA55, default , and box2001 atmospheric data' + call abort_ice(error_message=subname, file=__FILE__, line=__LINE__) + endif + !------------------------------------------------------------------- ! Get filenames for input forcing data !------------------------------------------------------------------- @@ -244,6 +255,8 @@ subroutine init_forcing_atmo call NCAR_files(fyear) elseif (trim(atm_data_type) == 'LYq') then call LY_files(fyear) + elseif (trim(atm_data_type) == 'JRA55') then + call JRA55_files(fyear) elseif (trim(atm_data_type) == 'hadgem') then call hadgem_files(fyear) elseif (trim(atm_data_type) == 'monthly') then @@ -539,6 +552,8 @@ subroutine get_forcing_atmo call ncar_data elseif (trim(atm_data_type) == 'LYq') then call LY_data + elseif (trim(atm_data_type) == 'JRA55') then + call JRA55_data(fyear) elseif (trim(atm_data_type) == 'hadgem') then call hadgem_data elseif (trim(atm_data_type) == 'monthly') then @@ -1376,6 +1391,10 @@ subroutine file_year (data_file, yr) i = index(data_file,'.nc') - 5 tmpname = data_file write(data_file,'(a,i4.4,a)') tmpname(1:i), yr, '.nc' + elseif (trim(atm_data_type) == 'JRA55') then ! netcdf + i = index(data_file,'.nc') - 5 + tmpname = data_file + write(data_file,'(a,i4.4,a)') tmpname(1:i), yr, '.nc' else ! LANL/NCAR naming convention i = index(data_file,'.dat') - 5 tmpname = data_file @@ -2002,6 +2021,22 @@ subroutine LY_files (yr) endif ! master_task end subroutine LY_files + subroutine JRA55_files(yr) +! + integer (kind=int_kind), intent(in) :: & + yr ! current forcing year + + character(len=*), parameter :: subname = '(JRA55_files)' + + uwind_file = & + trim(atm_data_dir)//'/8XDAILY/JRA55_03hr_forcing_2005.nc' + call file_year(uwind_file,yr) + if (my_task == master_task) then + write (nu_diag,*) ' ' + write (nu_diag,*) 'Atmospheric data files:' + write (nu_diag,*) trim(uwind_file) + endif + end subroutine JRA55_files !======================================================================= ! @@ -2225,6 +2260,239 @@ subroutine LY_data end subroutine LY_data + subroutine JRA55_data (yr) + + use ice_blocks, only: block, get_block + use ice_global_reductions, only: global_minval, global_maxval + use ice_domain, only: nblocks, distrb_info, blocks_ice + use ice_flux, only: fsnow, Tair, uatm, vatm, Qa, fsw, flw + use ice_grid, only: hm, tlon, tlat, tmask, umask + use ice_state, only: aice + use ice_calendar, only: days_per_year, use_leap_years + + + integer (kind=int_kind) :: & + ncid , & ! netcdf file id + i, j , & + ixm,ixx,ixp , & ! record numbers for neighboring months + recnum , & ! record number + maxrec , & ! maximum record number + recslot , & ! spline slot for current record + midmonth , & ! middle day of month + dataloc , & ! = 1 for data located in middle of time interval + ! = 2 for date located at end of time interval + iblk , & ! block index + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain + yr ! current forcing year + + real (kind=dbl_kind) :: & + sec3hr , & ! number of seconds in 3 hours + secday , & ! number of seconds in day + Tffresh , & + vmin, vmax + + logical (kind=log_kind) :: readm, read6,debug_n_d + + type (block) :: & + this_block ! block information for current block + + character(len=64) :: fieldname !netcdf field name + character(len=*), parameter :: subname = '(JRA55_data)' + + debug_n_d = .false. !usually false + + call icepack_query_parameters(Tffresh_out=Tffresh) + call icepack_query_parameters(secday_out=secday) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !------------------------------------------------------------------- + ! 3-hourly data + ! + ! Assume that the 3-hourly value is located at the end of the + ! 3-hour period. This is the convention for NCEP reanalysis data. + ! E.g. record 1 gives conditions at 3 am GMT on 1 January. + !------------------------------------------------------------------- + + dataloc = 2 ! data located at end of interval + sec3hr = secday/c8 ! seconds in 3 hours + !maxrec = 2920 ! 365*8; for leap years = 366*8 + + if(use_leap_years) days_per_year = 366 !overrides setting of 365 in ice_calendar + maxrec = days_per_year*8 + + if(days_per_year == 365 .and. (mod(yr, 4) == 0)) then + call abort_ice('days_per_year should be set to 366 for leap years') + end if + + ! current record number + recnum = 8*int(yday) - 7 + int(real(sec,kind=dbl_kind)/sec3hr) + + ! Compute record numbers for surrounding data (2 on each side) + + ixm = mod(recnum+maxrec-2,maxrec) + 1 + ixx = mod(recnum-1, maxrec) + 1 + + ! Compute interpolation coefficients + ! If data is located at the end of the time interval, then the + ! data value for the current record goes in slot 2 + + recslot = 2 + ixp = -99 + call interp_coeff (recnum, recslot, sec3hr, dataloc) + + ! Read + read6 = .false. + if (istep==1 .or. oldrecnum .ne. recnum) read6 = .true. + !------------------------------------------------------------------- + ! File is NETCDF with winds in NORTH and EAST direction + ! file variable names are: + ! glbrad (shortwave W/m^2) + ! dlwsfc (longwave W/m^2) + ! wndewd (eastward wind m/s) + ! wndnwd (northward wind m/s) + ! airtmp (air temperature K) + ! spchmd (specific humidity kg/kg) + ! ttlpcp (precipitation kg/m s-1) + !------------------------------------------------------------------- + call ice_open_nc(uwind_file,ncid) + + fieldname = 'airtmp' + call ice_read_nc(ncid,recnum,fieldname,Tair_data(:,:,1,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + call ice_read_nc(ncid,recnum,fieldname,Tair_data(:,:,2,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + fieldname = 'wndewd' + call ice_read_nc(ncid,recnum,fieldname,uatm_data(:,:,1,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + call ice_read_nc(ncid,recnum,fieldname,uatm_data(:,:,2,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + fieldname = 'wndnwd' + call ice_read_nc(ncid,recnum,fieldname,vatm_data(:,:,1,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + call ice_read_nc(ncid,recnum,fieldname,vatm_data(:,:,2,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + fieldname = 'spchmd' + call ice_read_nc(ncid,recnum,fieldname,Qa_data(:,:,1,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + call ice_read_nc(ncid,recnum,fieldname,Qa_data(:,:,2,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + fieldname = 'glbrad' + call ice_read_nc(ncid,recnum,fieldname,fsw_data(:,:,1,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + call ice_read_nc(ncid,recnum,fieldname,fsw_data(:,:,2,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + fieldname = 'dlwsfc' + call ice_read_nc(ncid,recnum,fieldname,flw_data(:,:,1,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + call ice_read_nc(ncid,recnum,fieldname,flw_data(:,:,2,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + fieldname = 'ttlpcp' + call ice_read_nc(ncid,recnum,fieldname,fsnow_data(:,:,1,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + call ice_read_nc(ncid,recnum,fieldname,fsnow_data(:,:,2,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + call ice_close_nc(ncid) + + + ! Interpolate + call interpolate_data (Tair_data, Tair) + call interpolate_data (uatm_data, uatm) + call interpolate_data (vatm_data, vatm) + call interpolate_data (Qa_data, Qa) + call interpolate_data (fsw_data, fsw) + call interpolate_data (flw_data, flw) + call interpolate_data (fsnow_data, fsnow) + + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) + do iblk = 1, nblocks + ! limit summer Tair values where ice is present + do j = 1, ny_block + do i = 1, nx_block + if (aice(i,j,iblk) > p1) Tair(i,j,iblk) = min(Tair(i,j,iblk), Tffresh+p1) + enddo + enddo + + do j = 1, ny_block + do i = 1, nx_block + Qa (i,j,iblk) = Qa (i,j,iblk) * hm(i,j,iblk) + Tair(i,j,iblk) = Tair(i,j,iblk) * hm(i,j,iblk) + uatm(i,j,iblk) = uatm(i,j,iblk) * hm(i,j,iblk) + vatm(i,j,iblk) = vatm(i,j,iblk) * hm(i,j,iblk) + fsw (i,j,iblk) = fsw (i,j,iblk) * hm(i,j,iblk) + flw (i,j,iblk) = flw (i,j,iblk) * hm(i,j,iblk) + fsnow(i,j,iblk) = fsnow (i,j,iblk) * hm(i,j,iblk) + enddo + enddo + + enddo ! iblk + !$OMP END PARALLEL DO + + ! Save record number + oldrecnum = recnum + + if (dbug) then + if (my_task == master_task) write (nu_diag,*) 'JRA55_bulk_data' + vmin = global_minval(fsw,distrb_info,tmask) + + vmax = global_maxval(fsw,distrb_info,tmask) + if (my_task.eq.master_task) & + write (nu_diag,*) 'fsw',vmin,vmax + vmin = global_minval(flw,distrb_info,tmask) + vmax = global_maxval(flw,distrb_info,tmask) + if (my_task.eq.master_task) & + write (nu_diag,*) 'flw',vmin,vmax + vmin =global_minval(fsnow,distrb_info,tmask) + vmax =global_maxval(fsnow,distrb_info,tmask) + if (my_task.eq.master_task) & + write (nu_diag,*) 'fsnow',vmin,vmax + vmin = global_minval(Tair,distrb_info,tmask) + vmax = global_maxval(Tair,distrb_info,tmask) + if (my_task.eq.master_task) & + write (nu_diag,*) 'Tair',vmin,vmax + vmin = global_minval(uatm,distrb_info,umask) + vmax = global_maxval(uatm,distrb_info,umask) + if (my_task.eq.master_task) & + write (nu_diag,*) 'uatm',vmin,vmax + vmin = global_minval(vatm,distrb_info,umask) + vmax = global_maxval(vatm,distrb_info,umask) + if (my_task.eq.master_task) & + write (nu_diag,*) 'vatm',vmin,vmax + vmin = global_minval(Qa,distrb_info,tmask) + vmax = global_maxval(Qa,distrb_info,tmask) + if (my_task.eq.master_task) & + write (nu_diag,*) 'Qa',vmin,vmax + if (my_task.eq.master_task) & + write (nu_diag,*) 'maxrec',maxrec + write (nu_diag,*) 'days_per_year', days_per_year + + endif ! dbug + + end subroutine JRA55_data + + !======================================================================= ! ! AOMIP shortwave forcing diff --git a/configuration/scripts/options/set_nml.boxrestore b/configuration/scripts/options/set_nml.boxrestore index bc913a3dc..6789e1ff8 100644 --- a/configuration/scripts/options/set_nml.boxrestore +++ b/configuration/scripts/options/set_nml.boxrestore @@ -2,7 +2,7 @@ nilyr = 1 ice_ic = 'default' restart = .false. restart_ext = .true. -use_leap_years = .true. +use_leap_years = .false. ndtd = 2 kcatbound = 1 ew_boundary_type = 'cyclic' diff --git a/configuration/scripts/options/set_nml.jra55 b/configuration/scripts/options/set_nml.jra55 new file mode 100755 index 000000000..d0112a857 --- /dev/null +++ b/configuration/scripts/options/set_nml.jra55 @@ -0,0 +1,17 @@ +year_init = 2005 +ice_ic = 'ICE_MACHINE_INPUTDATA/CICE_data/ic/gx1/iced_gx1_v5.nc' +grid_file = 'ICE_MACHINE_INPUTDATA/CICE_data/grid/gx1/grid_gx1.bin' +kmt_file = 'ICE_MACHINE_INPUTDATA/CICE_data/grid/gx1/kmt_gx1.bin' +bathymetry_file = 'ICE_MACHINE_INPUTDATA/CICE_data/grid/gx1/global_gx1.bathy.nc' +use_leap_years = .true. +use_restart_time = .false. +maskhalo_dyn = .true. +maskhalo_remap = .true. +maskhalo_bound = .true. +fyear_init = 2005 +atm_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx1/JRA55' +atm_data_format = 'nc' +atm_data_type = 'JRA55' +precip_units = 'mks' +ocn_data_dir = 'default' +bgc_data_dir = 'default' diff --git a/configuration/scripts/options/set_nml.jra55_2008 b/configuration/scripts/options/set_nml.jra55_2008 new file mode 100755 index 000000000..042431fc0 --- /dev/null +++ b/configuration/scripts/options/set_nml.jra55_2008 @@ -0,0 +1,17 @@ +year_init = 2008 +ice_ic = 'ICE_MACHINE_INPUTDATA/CICE_data/ic/gx1/iced_gx1_v5.nc' +grid_file = 'ICE_MACHINE_INPUTDATA/CICE_data/grid/gx1/grid_gx1.bin' +kmt_file = 'ICE_MACHINE_INPUTDATA/CICE_data/grid/gx1/kmt_gx1.bin' +bathymetry_file = 'ICE_MACHINE_INPUTDATA/CICE_data/grid/gx1/global_gx1.bathy.nc' +use_leap_years = .true. +use_restart_time = .false. +maskhalo_dyn = .true. +maskhalo_remap = .true. +maskhalo_bound = .true. +fyear_init = 2008 +atm_data_dir = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx1/JRA55' +atm_data_format = 'nc' +atm_data_type = 'JRA55' +precip_units = 'mks' +ocn_data_dir = 'default' +bgc_data_dir = 'default' diff --git a/configuration/scripts/options/set_nml.run90day b/configuration/scripts/options/set_nml.run90day new file mode 100644 index 000000000..06db1a3d8 --- /dev/null +++ b/configuration/scripts/options/set_nml.run90day @@ -0,0 +1,5 @@ +npt = 2160 +dumpfreq = 'd' +dumpfreq_n = 30 +histfreq = 'd','x','x','x','x' +f_aice = 'd' diff --git a/configuration/scripts/options/test_nml.restart1 b/configuration/scripts/options/test_nml.restart1 index 4874c25a5..82f934720 100644 --- a/configuration/scripts/options/test_nml.restart1 +++ b/configuration/scripts/options/test_nml.restart1 @@ -2,3 +2,4 @@ npt = 240 dumpfreq = 'd' dumpfreq_n = 5 runtype = 'initial' +use_restart_time = .false. diff --git a/configuration/scripts/options/test_nml.restart2 b/configuration/scripts/options/test_nml.restart2 index 5981918a8..4ae10c5a6 100644 --- a/configuration/scripts/options/test_nml.restart2 +++ b/configuration/scripts/options/test_nml.restart2 @@ -3,4 +3,4 @@ dumpfreq = 'd' dumpfreq_n = 5 runtype = 'continue' restart = .true. - +use_restart_time = .true. diff --git a/configuration/scripts/tests/base_suite.ts b/configuration/scripts/tests/base_suite.ts old mode 100644 new mode 100755 index 413047e7a..39f68a5f2 --- a/configuration/scripts/tests/base_suite.ts +++ b/configuration/scripts/tests/base_suite.ts @@ -37,3 +37,5 @@ smoke gx3 8x1 bgcskl,debug #smoke gx3 4x1 bgcz,thread smoke_gx3_8x2_bgcz restart gx1 4x2 bgcsklclim,medium restart gx1 8x1 bgczclim,medium +smoke gx1 24x1 jra55_2008,medium,run90day +restart gx1 24x1 jra55,short diff --git a/doc/source/master_list.bib b/doc/source/master_list.bib index 9c4a4d905..11bb3e887 100644 --- a/doc/source/master_list.bib +++ b/doc/source/master_list.bib @@ -928,6 +928,15 @@ @Article{Roberts18 volume = {376}, url = {http://dx.doi.org/10.1098/rsta.2017.0344} } +@Article{Tsujino18, + author = "H. Tsujino and S. Urakawa and R.J. Small and W.M. Kim and S.G. Yeager and et al.", + title = "{JRA‐55 based surface dataset for driving ocean–sea‐ice models (JRA55‐do)}", + journal = OM, + year = {2018}, + volume = {130}, + pages = {79-139}, + url = {http://dx.doi.org/10.1016/j.ocemod.2018.07.002} +} % ********************************************** % For new entries, see example entry in BIB_TEMPLATE.txt % ********************************************** diff --git a/doc/source/user_guide/ug_case_settings.rst b/doc/source/user_guide/ug_case_settings.rst index beb7c7f1d..c0ee9331d 100755 --- a/doc/source/user_guide/ug_case_settings.rst +++ b/doc/source/user_guide/ug_case_settings.rst @@ -338,6 +338,7 @@ Table of namelist options "","``restore_ice``", "true/false", "restore ice state along lateral boundaries", "" "\*","``atm_data_type``", "``default``", "constant values defined in the code", "" "","", "``LYq``", "COREII Large-Yeager (AOMIP) forcing data", ":cite:`Large09`" + "","", "``JRA55``", "JRA55 forcing data :cite:`Tsujino18`", "" "","", "``monthly``", "monthly forcing data", "" "","", "``ncar``", "NCAR bulk forcing data", "" "","", "``box2001``", "forcing data for :cite:`Hunke01` box problem", "" From 2d6710ab96e47825f5bf4237ea8c18543d1c25b2 Mon Sep 17 00:00:00 2001 From: Philippe Blain <44212482+phil-blain@users.noreply.github.com> Date: Thu, 19 Sep 2019 18:51:27 -0400 Subject: [PATCH 09/14] Uniformize MPI interface (remove preprocessor #include) (#365) --- cicecore/cicedynB/infrastructure/comm/mpi/ice_reprosum.F90 | 2 +- cicecore/cicedynB/infrastructure/comm/serial/ice_reprosum.F90 | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_reprosum.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_reprosum.F90 index ab1c5969a..8f2992eb4 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_reprosum.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_reprosum.F90 @@ -60,7 +60,7 @@ MODULE ice_reprosum !- include statements -------------------------------------------------- !----------------------------------------------------------------------- #ifndef SERIAL_REMOVE_MPI -#include +include 'mpif.h' #endif !----------------------------------------------------------------------- diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_reprosum.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_reprosum.F90 index 490b16b14..d52e58add 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_reprosum.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_reprosum.F90 @@ -61,7 +61,7 @@ MODULE ice_reprosum !- include statements -------------------------------------------------- !----------------------------------------------------------------------- #ifndef SERIAL_REMOVE_MPI -#include +include 'mpif.h' #endif !----------------------------------------------------------------------- From fb1dd12a8b656bbc52b72d77e5c07835771fad7c Mon Sep 17 00:00:00 2001 From: Tony Craig Date: Tue, 24 Sep 2019 17:49:25 -0700 Subject: [PATCH 10/14] RASM test update, new bathymetry popfile method, minor refactor to ice_dyn_evp_1d (#367) * merge ice_dyn_evp_1d modules and update ice_dyn_evp_1d interface names. Add get_bathymetry_popfile from RASM * remove old code * remove old code --- .../cicedynB/analysis/ice_diagnostics.F90 | 2 + cicecore/cicedynB/dynamics/ice_dyn_evp.F90 | 9 +- cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 | 162 ++++++++---------- cicecore/cicedynB/infrastructure/ice_grid.F90 | 102 ++++++++++- 4 files changed, 182 insertions(+), 93 deletions(-) diff --git a/cicecore/cicedynB/analysis/ice_diagnostics.F90 b/cicecore/cicedynB/analysis/ice_diagnostics.F90 index 4557b4075..5f8c56264 100644 --- a/cicecore/cicedynB/analysis/ice_diagnostics.F90 +++ b/cicecore/cicedynB/analysis/ice_diagnostics.F90 @@ -124,6 +124,7 @@ subroutine runtime_diags (dt) use ice_global_reductions, only: global_sum, global_sum_prod, global_maxval use ice_grid, only: lmask_n, lmask_s, tarean, tareas use ice_state ! everything +! tcraig, this is likely to cause circular dependency because ice_prescribed_mod is high level routine #ifdef CESMCOUPLED use ice_prescribed_mod, only: prescribed_ice #endif @@ -839,6 +840,7 @@ subroutine runtime_diags (dt) if (print_global) then ! global diags for conservations checks +! tcraig, this is likely to cause circular dependency because ice_prescribed_mod is high level routine #ifdef CESMCOUPLED if (prescribed_ice) then write (nu_diag,*) '----------------------------' diff --git a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 b/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 index 34ba5b002..002341667 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 @@ -92,7 +92,8 @@ subroutine evp (dt) aice_init, aice0, aicen, vicen, strength use ice_timers, only: timer_dynamics, timer_bound, & ice_timer_start, ice_timer_stop, timer_evp_1d, timer_evp_2d - use ice_dyn_evp_1d + use ice_dyn_evp_1d, only: ice_dyn_evp_1d_copyin, ice_dyn_evp_1d_kernel, & + ice_dyn_evp_1d_copyout use ice_dyn_shared, only: kevp_kernel real (kind=dbl_kind), intent(in) :: & @@ -353,7 +354,7 @@ subroutine evp (dt) call abort_ice(trim(subname)//' & & Kernel not tested on tripole grid. Set kevp_kernel=0') endif - call evp_copyin( & + call ice_dyn_evp_1d_copyin( & nx_block,ny_block,nblocks,nx_global+2*nghost,ny_global+2*nghost, & HTE,HTN, & !v1 dxhy,dyhx,cyp,cxp,cym,cxm,tinyarea, & @@ -367,7 +368,7 @@ subroutine evp (dt) stress12_1,stress12_2,stress12_3,stress12_4 ) if (kevp_kernel == 2) then call ice_timer_start(timer_evp_1d) - call evp_kernel_v2() + call ice_dyn_evp_1d_kernel() call ice_timer_stop(timer_evp_1d) !v1 else if (kevp_kernel == 1) then !v1 call evp_kernel_v1() @@ -375,7 +376,7 @@ subroutine evp (dt) if (my_task == 0) write(nu_diag,*) subname,' ERROR: kevp_kernel = ',kevp_kernel call abort_ice(subname//' kevp_kernel not supported.') endif - call evp_copyout( & + call ice_dyn_evp_1d_copyout( & nx_block,ny_block,nblocks,nx_global+2*nghost,ny_global+2*nghost,& !strocn uvel,vvel, strocnx,strocny, strintx,strinty, & uvel,vvel, strintx,strinty, & diff --git a/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 b/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 index e82b12fd1..0cb5d9102 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 @@ -1,9 +1,11 @@ ! ice_dyn_evp_1d ! -! Contains 3 Fortran modules, +! Contained 3 Fortran modules, ! * dmi_omp ! * bench_v2 ! * ice_dyn_evp_1d +! These were merged into one module, ice_dyn_evp_1d to support some +! coupled build systems. ! ! Modules used for: ! * convert 2D arrays into 1D vectors @@ -11,9 +13,9 @@ ! * convert 1D vectors into 2D matrices ! ! Call from ice_dyn_evp.F90: -! call evp_copyin(...) -! call evp_kernel() -! call evp_copyout(...) +! call ice_dyn_evp_1d_copyin(...) +! call ice_dyn_evp_1d_kernel() +! call ice_dyn_evp_1d_copyout(...) ! ! * REAL4 internal version: ! mv evp_kernel1d.F90 evp_kernel1d_r8.F90 @@ -27,7 +29,9 @@ !=============================================================================== !=============================================================================== -module dmi_omp + +!-- One dimension representation of EVP 2D arrays used for EVP kernels +module ice_dyn_evp_1d use ice_kinds_mod use ice_fileunits, only: nu_diag @@ -35,14 +39,59 @@ module dmi_omp implicit none private - public :: domp_init, domp_get_domain, domp_get_thread_no + public :: ice_dyn_evp_1d_copyin, ice_dyn_evp_1d_copyout, ice_dyn_evp_1d_kernel + + interface ice_dyn_evp_1d_copyin +! module procedure evp_copyin_v1 + module procedure evp_copyin_v2 + end interface + + interface ice_dyn_evp_1d_kernel +! module procedure evp_kernel_v1 + module procedure evp_kernel_v2 + end interface + + interface ice_dyn_evp_1d_copyout + module procedure evp_copyout + end interface + + interface convert_2d_1d +! module procedure convert_2d_1d_v1 + module procedure convert_2d_1d_v2 + end interface + + integer(kind=int_kind) :: & + NA_len, NAVEL_len + logical(kind=log_kind), dimension(:), allocatable :: & + skipucell + integer(kind=int_kind), dimension(:), allocatable :: & + ee,ne,se,nw,sw,sse,indi,indj,indij , halo_parent + real (kind=dbl_kind), dimension(:), allocatable :: & + cdn_ocn,aiu,uocn,vocn,forcex,forcey,Tbu,tarear, & + umassdti,fm,uarear,strintx,strinty,uvel_init,vvel_init + real (kind=dbl_kind), dimension(:), allocatable :: & + strength,uvel,vvel,dxt,dyt, & +!v1 dxhy,dyhx,cyp,cxp,cym,cxm,tinyarea, & +!v1 waterx,watery, & + stressp_1, stressp_2, stressp_3, stressp_4, & + stressm_1, stressm_2, stressm_3, stressm_4, & + stress12_1,stress12_2,stress12_3,stress12_4, & + divu,rdg_conv,rdg_shear,shear,taubx,tauby + real (kind=DBL_KIND), dimension(:), allocatable :: & + str1, str2, str3, str4, str5, str6, str7, str8 + real (kind=dbl_kind), dimension(:), allocatable :: & + HTE,HTN, & + HTEm1,HTNm1 + logical(kind=log_kind),parameter :: dbug = .false. + +!--- dmi_omp --------------------------- interface domp_get_domain module procedure domp_get_domain_rlu end interface INTEGER, PARAMETER :: JPIM = SELECTED_INT_KIND(9) - integer(int_kind), private :: domp_iam, domp_nt + integer(int_kind) :: domp_iam, domp_nt #if defined (_OPENMP) ! Please note, this constant will create a compiler info for a constant @@ -50,17 +99,29 @@ module dmi_omp real(kind=dbl_kind) :: rdomp_iam, rdomp_nt !$OMP THREADPRIVATE(domp_iam,domp_nt,rdomp_iam,rdomp_nt) #endif +!--- dmi_omp --------------------------- -contains +!--- bench_v2 -------------------------- + interface evp1d_stress + module procedure stress_i + module procedure stress_l + end interface + interface evp1d_stepu + module procedure stepu_iter + module procedure stepu_last + end interface +!--- bench_v2 -------------------------- -!---------------------------------------------------------------------------- + contains + +!=============================================================================== +!former module dmi_omp subroutine domp_init(nt_out) #if defined (_OPENMP) use omp_lib, only : omp_get_thread_num, omp_get_num_threads #endif - use ice_forcing, only : dbug integer(int_kind), intent(out) :: nt_out @@ -165,30 +226,11 @@ end subroutine domp_get_thread_no !---------------------------------------------------------------------------- -end module dmi_omp +!former end module dmi_omp !=============================================================================== -!=============================================================================== - -module bench_v2 - - use ice_fileunits, only: nu_diag - use ice_exit, only: abort_ice - - implicit none - private - public :: evp1d_stress, evp1d_stepu, evp1d_halo_update - interface evp1d_stress - module procedure stress_i - module procedure stress_l - end interface - interface evp1d_stepu - module procedure stepu_iter - module procedure stepu_last - end interface - - contains +!former module bench_v2 !---------------------------------------------------------------------------- @@ -201,7 +243,6 @@ subroutine stress_i(NA_len, & str6,str7,str8) use ice_kinds_mod - use dmi_omp, only : domp_get_domain use ice_constants, only: p027, p055, p111, p166, p222, p25, p333, p5, c1p5, c1 use icepack_parameters, only: puny use ice_dyn_shared, only: ecci, denom1, arlx1i, Ktens, revp @@ -467,7 +508,6 @@ subroutine stress_l(NA_len, tarear, & str1,str2,str3,str4,str5,str6,str7,str8 ) use ice_kinds_mod - use dmi_omp, only : domp_get_domain use ice_constants, only: p027, p055, p111, p166, p222, p25, p333, p5, c1p5, c0, c1 use icepack_parameters, only: puny use ice_dyn_shared, only: ecci, denom1, arlx1i, Ktens, revp @@ -736,7 +776,6 @@ subroutine stepu_iter(NA_len,rhow, & str1,str2,str3,str4,str5,str6,str7,str8, nw,sw,se,skipme) use ice_kinds_mod - use dmi_omp, only : domp_get_domain use ice_dyn_shared, only: brlx, revp use ice_constants, only: c0, c1 @@ -827,7 +866,6 @@ subroutine stepu_last(NA_len, rhow, & str1,str2,str3,str4,str5,str6,str7,str8, nw,sw,se,skipme) use ice_kinds_mod - use dmi_omp, only : domp_get_domain use ice_constants, only: c0, c1 use icepack_parameters, only: puny use ice_dyn_shared, only: brlx, revp, basalstress @@ -920,7 +958,6 @@ end subroutine stepu_last subroutine evp1d_halo_update(NAVEL_len,lb,ub,uvel,vvel, halo_parent) use ice_kinds_mod - use dmi_omp, only : domp_get_domain implicit none @@ -957,56 +994,9 @@ end subroutine evp1d_halo_update !---------------------------------------------------------------------------- -end module bench_v2 +!former end module bench_v2 !=============================================================================== -!=============================================================================== - -!-- One dimension representation of EVP 2D arrays used for EVP kernels -module ice_dyn_evp_1d - - use ice_kinds_mod - use ice_fileunits, only: nu_diag - use ice_exit, only: abort_ice - - implicit none - private - public :: evp_copyin, evp_copyout, evp_kernel_v2 - - interface evp_copyin -! module procedure evp_copyin_v1 - module procedure evp_copyin_v2 - end interface - interface convert_2d_1d -! module procedure convert_2d_1d_v1 - module procedure convert_2d_1d_v2 - end interface - - integer(kind=int_kind) :: & - NA_len, NAVEL_len - logical(kind=log_kind), dimension(:), allocatable :: & - skipucell - integer(kind=int_kind), dimension(:), allocatable :: & - ee,ne,se,nw,sw,sse,indi,indj,indij , halo_parent - real (kind=dbl_kind), dimension(:), allocatable :: & - cdn_ocn,aiu,uocn,vocn,forcex,forcey,Tbu,tarear, & - umassdti,fm,uarear,strintx,strinty,uvel_init,vvel_init - real (kind=dbl_kind), dimension(:), allocatable :: & - strength,uvel,vvel,dxt,dyt, & -!v1 dxhy,dyhx,cyp,cxp,cym,cxm,tinyarea, & -!v1 waterx,watery, & - stressp_1, stressp_2, stressp_3, stressp_4, & - stressm_1, stressm_2, stressm_3, stressm_4, & - stress12_1,stress12_2,stress12_3,stress12_4, & - divu,rdg_conv,rdg_shear,shear,taubx,tauby - real (kind=DBL_KIND), dimension(:), allocatable :: & - str1, str2, str3, str4, str5, str6, str7, str8 - real (kind=dbl_kind), dimension(:), allocatable :: & - HTE,HTN, & - HTEm1,HTNm1 - - contains - !---------------------------------------------------------------------------- subroutine alloc1d(na) @@ -1407,8 +1397,6 @@ subroutine evp_kernel_v2 use ice_constants, only : c0 use ice_dyn_shared, only: ndte - use bench_v2, only : evp1d_stress, evp1d_stepu, evp1d_halo_update - use dmi_omp, only : domp_init use icepack_intfc, only: icepack_query_parameters use ice_communicate, only: my_task, master_task implicit none @@ -2043,7 +2031,6 @@ end subroutine findXinY_halo subroutine numainit(l,u,uu) - use dmi_omp, only : domp_get_domain use ice_constants, only: c0 implicit none @@ -2127,6 +2114,7 @@ subroutine numainit(l,u,uu) end subroutine numainit !---------------------------------------------------------------------------- +!=============================================================================== end module ice_dyn_evp_1d diff --git a/cicecore/cicedynB/infrastructure/ice_grid.F90 b/cicecore/cicedynB/infrastructure/ice_grid.F90 index 35c3003a4..7aa967a6c 100644 --- a/cicecore/cicedynB/infrastructure/ice_grid.F90 +++ b/cicecore/cicedynB/infrastructure/ice_grid.F90 @@ -15,18 +15,21 @@ module ice_grid use ice_kinds_mod + use ice_broadcast, only: broadcast_scalar, broadcast_array use ice_boundary, only: ice_HaloUpdate, ice_HaloExtrapolate use ice_communicate, only: my_task, master_task use ice_blocks, only: block, get_block, nx_block, ny_block, nghost use ice_domain_size, only: nx_global, ny_global, max_blocks use ice_domain, only: blocks_ice, nblocks, halo_info, distrb_info, & ew_boundary_type, ns_boundary_type, init_domain_distribution - use ice_fileunits, only: nu_diag, nu_grid, nu_kmt + use ice_fileunits, only: nu_diag, nu_grid, nu_kmt, & + get_fileunit, release_fileunit use ice_gather_scatter, only: gather_global, scatter_global use ice_read_write, only: ice_read, ice_read_nc, ice_read_global, & ice_read_global_nc, ice_open, ice_open_nc, ice_close_nc use ice_timers, only: timer_bound, ice_timer_start, ice_timer_stop use ice_exit, only: abort_ice + use ice_global_reductions, only: global_minval, global_maxval use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_query_parameters @@ -540,7 +543,11 @@ subroutine init_grid2 ! bathymetry !----------------------------------------------------------------- +#ifdef RASM_MODS + call get_bathymetry_popfile +#else call get_bathymetry +#endif !---------------------------------------------------------------- ! Corner coordinates for CF compliant history files @@ -1662,7 +1669,6 @@ subroutine Tlatlon use ice_constants, only: c0, c1, c2, c4, & field_loc_center, field_type_scalar - use ice_global_reductions, only: global_minval, global_maxval integer (kind=int_kind) :: & i, j, iblk , & ! horizontal indices @@ -2357,6 +2363,98 @@ subroutine get_bathymetry end subroutine get_bathymetry +!======================================================================= +! with use_bathymetry = false, vertical depth profile generated for max KMT +! with use_bathymetry = true, expects to read in pop vert_grid file + + subroutine get_bathymetry_popfile + + integer (kind=int_kind) :: & + i, j, k, iblk ! loop indices + + integer (kind=int_kind) :: & + ntmp, nlevel , & ! number of levels (max KMT) + k1 , & ! levels + ierr , & ! error tag + fid ! fid unit number + + real (kind=dbl_kind), dimension(:),allocatable :: & + depth , & ! total depth, m + thick ! layer thickness, cm -> m + + character(len=*), parameter :: subname = '(get_bathymetry_popfile)' + + ntmp = maxval(KMT) + nlevel = global_maxval(ntmp,distrb_info) + + if (my_task==master_task) then + write(nu_diag,*) subname,' KMT max = ',nlevel + endif + + allocate(depth(nlevel),thick(nlevel)) + thick = -999999. + depth = -999999. + + if (use_bathymetry) then + + write (nu_diag,*) subname,' Bathymetry file = ', trim(bathymetry_file) + if (my_task == master_task) then + call get_fileunit(fid) + open(fid,file=bathymetry_file,form='formatted',iostat=ierr) + if (ierr/=0) call abort_ice(subname//' open error') + do k = 1,nlevel + read(fid,*,iostat=ierr) thick(k) + if (ierr/=0) call abort_ice(subname//' read error') + enddo + call release_fileunit(fid) + endif + + call broadcast_array(thick,master_task) + + else + + ! create thickness profile + k1 = min(5,nlevel) + do k = 1,k1 + thick(k) = max(10000._dbl_kind/float(nlevel),500.) + enddo + do k = k1+1,nlevel + thick(k) = min(thick(k-1)*1.2_dbl_kind,20000._dbl_kind) + enddo + + endif + + ! convert thick from cm to m + thick = thick / 100._dbl_kind + + ! convert to total depth + depth(1) = thick(1) + do k = 2, nlevel + depth(k) = depth(k-1) + thick(k) + if (depth(k) < 0.) call abort_ice(subname//' negative depth error') + enddo + + if (my_task==master_task) then + do k = 1,nlevel + write(nu_diag,'(2a,i6,2f13.7)') subname,' k, thick(m), depth(m) = ',k,thick(k),depth(k) + enddo + endif + + bathymetry = 0._dbl_kind + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + k = kmt(i,j,iblk) + if (k > nlevel) call abort_ice(subname//' kmt/nlevel error') + if (k > 0) bathymetry(i,j,iblk) = depth(k) + enddo + enddo + enddo + + deallocate(depth,thick) + + end subroutine get_bathymetry_popfile + !======================================================================= ! Read bathymetry data for basal stress calculation (grounding scheme for From 13c6b03f7787d90283625578a50023524c5e748b Mon Sep 17 00:00:00 2001 From: Tony Craig Date: Fri, 11 Oct 2019 15:09:46 -0700 Subject: [PATCH 11/14] update cice version to 6.0.2 and update icepack to 1.1.2 (#368) --- cicecore/version.txt | 2 +- doc/source/conf.py | 4 ++-- icepack | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/cicecore/version.txt b/cicecore/version.txt index de34d9d31..c6a84bfc7 100644 --- a/cicecore/version.txt +++ b/cicecore/version.txt @@ -1 +1 @@ -CICE 6.0.1 +CICE 6.0.2 diff --git a/doc/source/conf.py b/doc/source/conf.py index f9b0b7b68..4404ecbb1 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -62,9 +62,9 @@ # built documents. # # The short X.Y version. -version = u'6.0.1' +version = u'6.0.2' # The full version, including alpha/beta/rc tags. -version = u'6.0.1' +version = u'6.0.2' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/icepack b/icepack index 423c81679..5af464922 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 423c81679147ede18b6ae9645ba3bdb74e492552 +Subproject commit 5af4649222c6f1529288b721ddaf62ef0e935375 From 333a5a28ccd0bfc3059a1b35a290509ee6513102 Mon Sep 17 00:00:00 2001 From: Tony Craig Date: Wed, 16 Oct 2019 08:35:36 -0700 Subject: [PATCH 12/14] update gordon, conrad, onyx serial launch command (#369) --- configuration/scripts/cice.launch.csh | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/configuration/scripts/cice.launch.csh b/configuration/scripts/cice.launch.csh index b9ea5c806..b61921505 100755 --- a/configuration/scripts/cice.launch.csh +++ b/configuration/scripts/cice.launch.csh @@ -71,27 +71,15 @@ endif #======= else if (${ICE_MACHINE} =~ onyx*) then -if (${ICE_COMMDIR} =~ serial*) then -cat >> ${jobfile} << EOFR -./cice >&! \$ICE_RUNLOG_FILE -EOFR -else cat >> ${jobfile} << EOFR aprun -n ${ntasks} -N ${taskpernodelimit} -d ${nthrds} ./cice >&! \$ICE_RUNLOG_FILE EOFR -endif #======= else if (${ICE_MACHINE} =~ gordon* || ${ICE_MACHINE} =~ conrad*) then -if (${ICE_COMMDIR} =~ serial*) then -cat >> ${jobfile} << EOFR -./cice >&! \$ICE_RUNLOG_FILE -EOFR -else cat >> ${jobfile} << EOFR aprun -n ${ntasks} -N ${taskpernodelimit} -d ${nthrds} ./cice >&! \$ICE_RUNLOG_FILE EOFR -endif #======= else if (${ICE_MACHINE} =~ cori*) then From 2a2b1078f5229b13531af862c85507412be4f7c7 Mon Sep 17 00:00:00 2001 From: Tony Craig Date: Tue, 22 Oct 2019 08:37:23 -0700 Subject: [PATCH 13/14] Update zenodo json template and report_results script (#370) * update zenodo json template and report_results script * update .gitignore add testsuite* * update zenodo json file * update gitignore again and add ciceexe cleanup --- .gitignore | 4 ++ .zenodo.json | 10 ++-- cice.setup | 9 +++ .../scripts/tests/report_results.csh | 58 +------------------ 4 files changed, 19 insertions(+), 62 deletions(-) diff --git a/.gitignore b/.gitignore index 9574d2b2b..f32b5b0ea 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,7 @@ doc/build # Ignore macOS cache files .DS_Store + +# Ignore testsuite file/directories +testsuite* +caselist* diff --git a/.zenodo.json b/.zenodo.json index 5eebe93e6..6eb59f0d5 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -1,7 +1,7 @@ { "license": "other-open", "description": "No description provided", - "language": "English", + "language": "eng", "title": "CICE-Consortium/CICE: CICE Version m.n.p", "keywords": [ "sea ice model, CICE, Icepack" @@ -37,7 +37,7 @@ }, { "affiliation": "National Oceanographic and Atmospheric Administration (CTR)", - "name": "Tony Craig" + "name": "Anthony Craig" }, { "affiliation": "Environment and Climate Change Canada", @@ -80,7 +80,7 @@ "name": "Andrew Roberts" }, { - "affiliation": "Naval Research Laboratory Stennis Space Center (CTR)", + "affiliation": "Los Alamos National Laboratory", "name": "Matthew Turner" }, { @@ -91,14 +91,12 @@ "access_right": "open", "related_identifiers": [ { - "scheme": "url", "identifier": "https://github.com/CICE-Consortium/CICE/tree/CICE6.0.1", "relation": "isSupplementTo" }, { - "scheme": "doi", "identifier": "10.5281/zenodo.1205674", - "relation": "isVersionOf" + "relation": "isNewVersionOf" } ] } \ No newline at end of file diff --git a/cice.setup b/cice.setup index 13ba00f85..8cdf76c41 100755 --- a/cice.setup +++ b/cice.setup @@ -953,6 +953,15 @@ end if ( ${dosuite} == 1 ) then + # Delete reused ciceexe files at the end to save space +foreach file (${tsdir}/suite.run ${tsdir}/suite.submit) +cat >> $file << EOF0 + +set nonomatch && rm -f ciceexe.* && unset nonomatch + +EOF0 +end + # Add code to results.csh to count the number of failures cat >> ${tsdir}/results.csh << EOF cat ./results.log diff --git a/configuration/scripts/tests/report_results.csh b/configuration/scripts/tests/report_results.csh index 044198ef6..a29c218a3 100755 --- a/configuration/scripts/tests/report_results.csh +++ b/configuration/scripts/tests/report_results.csh @@ -52,21 +52,17 @@ set compilers = `grep -v "#" results.log | grep ${mach}_ | cut -d "_" -f 2 | sor #echo "debug ${fail}" #echo "debug ${cases}" -set xcdat = `echo $cdat | sed 's|-||g' | cut -c 3-` +set xcdat = `echo $cdat | cut -c 3-` set xctim = `echo $ctim | sed 's|:||g'` set shrepo = `echo $repo | tr '[A-Z]' '[a-z]'` set tsubdir = cice_master set hfile = "cice_by_hash" set mfile = "cice_by_mach" -set vfile = "cice_by_vers" -set bfile = "cice_by_bran" if ("${shrepo}" !~ "*cice-consortium*") then set tsubdir = cice_dev set hfile = {$hfile}_forks set mfile = {$mfile}_forks - set vfile = {$vfile}_forks - set bfile = {$bfile}_forks endif set noglob @@ -287,10 +283,8 @@ EOF set hashfile = "${wikiname}/${tsubdir}/${hfile}.md" set machfile = "${wikiname}/${tsubdir}/${mfile}.md" -set versfile = "${wikiname}/${tsubdir}/${vfile}.md" -set branfile = "${wikiname}/${tsubdir}/${bfile}.md" -foreach xfile ($hashfile $machfile $versfile $branfile) +foreach xfile ($hashfile $machfile) if (-e ${xfile}) then cp -f ${xfile} ${xfile}.prev endif @@ -319,29 +313,6 @@ else sed -i "$nline a | ${mach} | ${compiler} | ${vers} | ${cdat} | ${tcolor} ${tfail}, ${tunkn} | ${rcolor} ${rfail}, ${rothr} | [${ttotl}](${ofile}) | " ${hashfile} endif -#===================== -# update versfile -#===================== - -set chk = 0 -if (-e ${versfile}) set chk = `grep "\*\*${vers}" ${versfile} | wc -l` -if ($chk == 0) then -cat >! ${versfile} << EOF -**${vers}** : - -| machine | compiler | hash | date | test fail | comp fail | total | -| ------ | ------ | ------ | ------ | ------ | ------ | ------ | -| ${mach} | ${compiler} | ${shhash} | ${cdat} | ${tcolor} ${tfail}, ${tunkn} | ${rcolor} ${rfail}, ${rothr} | [${ttotl}](${ofile}) | - -EOF -if (-e ${versfile}.prev) cat ${versfile}.prev >> ${versfile} - -else - set oline = `grep -n "\*\*${vers}" ${versfile} | head -1 | cut -d : -f 1` - @ nline = ${oline} + 3 - sed -i "$nline a | ${mach} | ${compiler} | ${shhash} | ${cdat} | ${tcolor} ${tfail}, ${tunkn} | ${rcolor} ${rfail}, ${rothr} | [${ttotl}](${ofile}) | " ${versfile} -endif - #===================== # update machfile #===================== @@ -365,29 +336,6 @@ else sed -i "$nline a | ${vers} | ${shhash} | ${compiler} | ${cdat} | ${tcolor} ${tfail}, ${tunkn} | ${rcolor} ${rfail}, ${rothr} | [${ttotl}](${ofile}) | " ${machfile} endif -#===================== -# update branfile -#===================== - -set chk = 0 -if (-e ${branfile}) set chk = `grep "\*\*${bran}" ${branfile} | wc -l` -if ($chk == 0) then -cat >! ${branfile} << EOF -**${bran}** **${repo}**: - -| machine | compiler | hash | date | test fail | comp fail | total | -| ------ | ------ | ------ | ------ | ------ | ------ | ------ | -| ${mach} | ${compiler} | ${shhash} | ${cdat} | ${tcolor} ${tfail}, ${tunkn} | ${rcolor} ${rfail}, ${rothr} | [${ttotl}](${ofile}) | - -EOF -if (-e ${branfile}.prev) cat ${branfile}.prev >> ${branfile} - -else - set oline = `grep -n "\*\*${bran}" ${branfile} | head -1 | cut -d : -f 1` - @ nline = ${oline} + 3 - sed -i "$nline a | ${mach} | ${compiler} | ${shhash} | ${cdat} | ${tcolor} ${tfail}, ${tunkn} | ${rcolor} ${rfail}, ${rothr} | [${ttotl}](${ofile}) | " ${branfile} -endif - #foreach compiler end @@ -400,8 +348,6 @@ git add ${tsubdir}/${shhash}.${mach}*.md git add ${tsubdir}/${ofile}.md git add ${tsubdir}/${hfile}.md git add ${tsubdir}/${mfile}.md -git add ${tsubdir}/${vfile}.md -git add ${tsubdir}/${bfile}.md git commit -a -m "update $hash $mach" git push origin master cd ../ From 85f7dac5a72ce07e69288618ada9729ff022dff4 Mon Sep 17 00:00:00 2001 From: Tony Craig Date: Fri, 1 Nov 2019 18:57:39 -0700 Subject: [PATCH 14/14] Add NUOPC cap and rearrange driver directory (#376) * NUOPC branch * initial CICE6 port to CESM+NUOPC * clean up nu_diag initialization due to complexity added by nuopc cap * add nu_diag_set flag to work around the nu_diag setting in the nuopc cap * update cice driver directories * update documentation * update documentation * update documentation * update documentation * fix minor bug in cice.setup that affects settings parsing without final carriage return * update documentation * rename drivers/subroutine to drivers/direct and update documentation --- cice.setup | 2 +- cicecore/cicedynB/general/ice_init.F90 | 18 +- .../io/io_pio2/ice_history_write.F90 | 1064 ++++++++++ .../infrastructure/io/io_pio2/ice_pio.F90 | 365 ++++ .../infrastructure/io/io_pio2/ice_restart.F90 | 876 ++++++++ .../drivers/{ => direct}/hadgem3/CICE.F90 | 0 .../{ => direct}/hadgem3/CICE_FinalMod.F90 | 0 .../{ => direct}/hadgem3/CICE_InitMod.F90 | 0 .../{ => direct}/hadgem3/CICE_RunMod.F90 | 0 .../{cesm => mct/cesm1}/CICE_FinalMod.F90 | 0 .../{cesm => mct/cesm1}/CICE_InitMod.F90 | 0 .../{cesm => mct/cesm1}/CICE_RunMod.F90 | 0 .../{cesm => mct/cesm1}/CICE_RunMod.F90_debug | 0 .../{cesm => mct/cesm1}/CICE_copyright.txt | 0 .../{cesm => mct/cesm1}/ice_comp_esmf.F90 | 0 .../{cesm => mct/cesm1}/ice_comp_mct.F90 | 0 .../{cesm => mct/cesm1}/ice_cpl_indices.F90 | 0 .../{cesm => mct/cesm1}/ice_import_export.F90 | 0 .../cesm1}/ice_prescribed_mod.F90 | 0 .../drivers/{cesm => mct/cesm1}/ice_scam.F90 | 0 .../drivers/nuopc/cmeps/CICE_FinalMod.F90 | 93 + cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 | 432 ++++ cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 | 659 ++++++ .../drivers/nuopc/cmeps/CICE_copyright.txt | 17 + .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 1352 +++++++++++++ .../drivers/nuopc/cmeps/ice_import_export.F90 | 1799 +++++++++++++++++ .../nuopc/cmeps/ice_prescribed_mod.F90 | 642 ++++++ cicecore/drivers/nuopc/cmeps/ice_scam.F90 | 14 + .../drivers/nuopc/cmeps/ice_shr_methods.F90 | 999 +++++++++ .../drivers/{ => standalone}/cice/CICE.F90 | 0 .../{ => standalone}/cice/CICE_FinalMod.F90 | 0 .../{ => standalone}/cice/CICE_InitMod.F90 | 0 .../{ => standalone}/cice/CICE_RunMod.F90 | 0 .../cice/CICE_RunMod.F90_debug | 0 cicecore/shared/ice_fileunits.F90 | 14 +- configuration/scripts/cice.settings | 2 +- doc/source/developer_guide/dg_driver.rst | 29 +- doc/source/developer_guide/dg_dynamics.rst | 23 +- doc/source/user_guide/ug_implementation.rst | 5 +- doc/source/user_guide/ug_running.rst | 19 +- 40 files changed, 8388 insertions(+), 36 deletions(-) create mode 100644 cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 create mode 100644 cicecore/cicedynB/infrastructure/io/io_pio2/ice_pio.F90 create mode 100644 cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 rename cicecore/drivers/{ => direct}/hadgem3/CICE.F90 (100%) rename cicecore/drivers/{ => direct}/hadgem3/CICE_FinalMod.F90 (100%) rename cicecore/drivers/{ => direct}/hadgem3/CICE_InitMod.F90 (100%) rename cicecore/drivers/{ => direct}/hadgem3/CICE_RunMod.F90 (100%) rename cicecore/drivers/{cesm => mct/cesm1}/CICE_FinalMod.F90 (100%) rename cicecore/drivers/{cesm => mct/cesm1}/CICE_InitMod.F90 (100%) rename cicecore/drivers/{cesm => mct/cesm1}/CICE_RunMod.F90 (100%) rename cicecore/drivers/{cesm => mct/cesm1}/CICE_RunMod.F90_debug (100%) rename cicecore/drivers/{cesm => mct/cesm1}/CICE_copyright.txt (100%) rename cicecore/drivers/{cesm => mct/cesm1}/ice_comp_esmf.F90 (100%) rename cicecore/drivers/{cesm => mct/cesm1}/ice_comp_mct.F90 (100%) rename cicecore/drivers/{cesm => mct/cesm1}/ice_cpl_indices.F90 (100%) rename cicecore/drivers/{cesm => mct/cesm1}/ice_import_export.F90 (100%) rename cicecore/drivers/{cesm => mct/cesm1}/ice_prescribed_mod.F90 (100%) rename cicecore/drivers/{cesm => mct/cesm1}/ice_scam.F90 (100%) create mode 100644 cicecore/drivers/nuopc/cmeps/CICE_FinalMod.F90 create mode 100644 cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 create mode 100644 cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 create mode 100644 cicecore/drivers/nuopc/cmeps/CICE_copyright.txt create mode 100644 cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 create mode 100644 cicecore/drivers/nuopc/cmeps/ice_import_export.F90 create mode 100644 cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 create mode 100644 cicecore/drivers/nuopc/cmeps/ice_scam.F90 create mode 100644 cicecore/drivers/nuopc/cmeps/ice_shr_methods.F90 rename cicecore/drivers/{ => standalone}/cice/CICE.F90 (100%) rename cicecore/drivers/{ => standalone}/cice/CICE_FinalMod.F90 (100%) rename cicecore/drivers/{ => standalone}/cice/CICE_InitMod.F90 (100%) rename cicecore/drivers/{ => standalone}/cice/CICE_RunMod.F90 (100%) rename cicecore/drivers/{ => standalone}/cice/CICE_RunMod.F90_debug (100%) diff --git a/cice.setup b/cice.setup index 8cdf76c41..19b7fac8d 100755 --- a/cice.setup +++ b/cice.setup @@ -849,7 +849,7 @@ EOF2 cat ${ICE_SCRIPTS}/options/set_env.${name} >> ${fsmods} -cat >> ${fimods} << EOF2 +cat >> ${fsmods} << EOF2 EOF2 diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index 1d3aedf6e..239a8c867 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -16,7 +16,7 @@ module ice_init use ice_communicate, only: my_task, master_task, ice_barrier use ice_constants, only: c0, c1, c2, c3, p2, p5 use ice_exit, only: abort_ice - use ice_fileunits, only: nu_nml, nu_diag, nml_filename, diag_type, & + use ice_fileunits, only: nu_nml, nu_diag, nu_diag_set, nml_filename, diag_type, & ice_stdout, get_fileunit, release_fileunit, bfbflag, flush_fileunit, & ice_IOUnitsMinUnit, ice_IOUnitsMaxUnit #ifdef CESMCOUPLED @@ -466,11 +466,17 @@ subroutine input_data history_file = trim(runid) // ".cice" // trim(inst_suffix) //".h" restart_file = trim(runid) // ".cice" // trim(inst_suffix) //".r" incond_file = trim(runid) // ".cice" // trim(inst_suffix) //".i" - inquire(file='ice_modelio.nml'//trim(inst_suffix),exist=exists) - if (exists) then - call get_fileUnit(nu_diag) - call shr_file_setIO('ice_modelio.nml'//trim(inst_suffix),nu_diag) - end if + ! Note by tcraig - this if test is needed because the nuopc cap sets + ! nu_diag before this routine is called. This creates a conflict. + ! In addition, in the nuopc cap, shr_file_setIO will fail if the + ! needed namelist is missing (which it is in the CIME nuopc implementation) + if (.not. nu_diag_set) then + inquire(file='ice_modelio.nml'//trim(inst_suffix),exist=exists) + if (exists) then + call get_fileUnit(nu_diag) + call shr_file_setIO('ice_modelio.nml'//trim(inst_suffix),nu_diag) + end if + endif else ! each task gets unique ice log filename when if test is true, for debugging if (1 == 0) then diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 new file mode 100644 index 000000000..771d0e313 --- /dev/null +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 @@ -0,0 +1,1064 @@ +!======================================================================= +! +! Writes history in netCDF format +! +! authors Tony Craig and Bruce Briegleb, NCAR +! Elizabeth C. Hunke and William H. Lipscomb, LANL +! C. M. Bitz, UW +! +! 2004 WHL: Block structure added +! 2006 ECH: Accepted some CESM code into mainstream CICE +! Added ice_present, aicen, vicen; removed aice1...10, vice1...1. +! Added histfreq_n and histfreq='h' options, removed histfreq='w' +! Converted to free source form (F90) +! Added option for binary output instead of netCDF +! 2009 D Bailey and ECH: Generalized for multiple frequency output +! 2010 Alison McLaren and ECH: Added 3D capability +! + module ice_history_write + + use ice_kinds_mod + use ice_fileunits, only: nu_diag + use ice_exit, only: abort_ice + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_query_parameters + + implicit none + private + public :: ice_write_hist + +!======================================================================= + + contains + +!======================================================================= +! +! write average ice quantities or snapshots +! +! author: Elizabeth C. Hunke, LANL + + subroutine ice_write_hist (ns) + +#ifdef ncdf + use ice_blocks, only: nx_block, ny_block + use ice_broadcast, only: broadcast_scalar + use ice_calendar, only: time, sec, idate, idate0, write_ic, & + histfreq, dayyr, days_per_year, use_leap_years + use ice_communicate, only: my_task, master_task + use ice_constants, only: c0, c360, spval, spval_dbl + use ice_domain, only: distrb_info, nblocks + use ice_domain_size, only: nx_global, ny_global, max_blocks, max_nstrm + use ice_gather_scatter, only: gather_global + use ice_grid, only: TLON, TLAT, ULON, ULAT, hm, bm, tarea, uarea, & + dxu, dxt, dyu, dyt, HTN, HTE, ANGLE, ANGLET, tmask, & + lont_bounds, latt_bounds, lonu_bounds, latu_bounds + use ice_history_shared + use ice_arrays_column, only: hin_max + use ice_restart_shared, only: runid, lcdf64 + use netcdf +#endif + use ice_pio + use pio + + integer (kind=int_kind), intent(in) :: ns + + ! local variables + +#ifdef ncdf + integer (kind=int_kind) :: i,j,k,ic,n,nn, & + ncid,status,imtid,jmtid,kmtidi,kmtids,kmtidb, cmtid,timid, & + length,nvertexid,ivertex,kmtida + integer (kind=int_kind), dimension(2) :: dimid2 + integer (kind=int_kind), dimension(3) :: dimid3 + integer (kind=int_kind), dimension(4) :: dimidz + integer (kind=int_kind), dimension(5) :: dimidcz + integer (kind=int_kind), dimension(3) :: dimid_nverts + integer (kind=int_kind), dimension(5) :: dimidex + real (kind=real_kind) :: ltime + real (kind= dbl_kind) :: ltime2 + character (char_len) :: title + character (char_len_long) :: ncfile(max_nstrm) + + integer (kind=int_kind) :: iyear, imonth, iday + integer (kind=int_kind) :: icategory,ind,i_aice,boundid + + character (char_len) :: start_time,current_date,current_time + character (len=16) :: c_aice + character (len=8) :: cdate + + type(file_desc_t) :: File + type(io_desc_t) :: iodesc2d, & + iodesc3dc, iodesc3dv, iodesc3di, iodesc3db, iodesc3da, & + iodesc4di, iodesc4ds + type(var_desc_t) :: varid + + ! 4 coordinate variables: TLON, TLAT, ULON, ULAT + INTEGER (kind=int_kind), PARAMETER :: ncoord = 4 + + ! 4 vertices in each grid cell + INTEGER (kind=int_kind), PARAMETER :: nverts = 4 + + ! 4 variables describe T, U grid boundaries: + ! lont_bounds, latt_bounds, lonu_bounds, latu_bounds + INTEGER (kind=int_kind), PARAMETER :: nvar_verts = 4 + + TYPE coord_attributes ! netcdf coordinate attributes + character (len=11) :: short_name + character (len=45) :: long_name + character (len=20) :: units + END TYPE coord_attributes + + TYPE req_attributes ! req'd netcdf attributes + type (coord_attributes) :: req + character (len=20) :: coordinates + END TYPE req_attributes + + TYPE(req_attributes), dimension(nvar) :: var + TYPE(coord_attributes), dimension(ncoord) :: coord_var + TYPE(coord_attributes), dimension(nvar_verts) :: var_nverts + TYPE(coord_attributes), dimension(nvarz) :: var_nz + CHARACTER (char_len), dimension(ncoord) :: coord_bounds + + real (kind=dbl_kind), allocatable :: workr2(:,:,:) + real (kind=dbl_kind), allocatable :: workr3(:,:,:,:) + real (kind=dbl_kind), allocatable :: workr4(:,:,:,:,:) + real (kind=dbl_kind), allocatable :: workr3v(:,:,:,:) + + character(len=char_len_long) :: & + filename + + integer (kind=int_kind), dimension(1) :: & + tim_start,tim_length ! dimension quantities for netCDF + + integer (kind=int_kind), dimension(2) :: & + bnd_start,bnd_length ! dimension quantities for netCDF + + real (kind=dbl_kind) :: secday + real (kind=dbl_kind) :: rad_to_deg + + character(len=*), parameter :: subname = '(ice_write_hist)' + + call icepack_query_parameters(secday_out=secday) + call icepack_query_parameters(rad_to_deg_out=rad_to_deg) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + if (my_task == master_task) then + call construct_filename(ncfile(ns),'nc',ns) + + ! add local directory path name to ncfile + if (write_ic) then + ncfile(ns) = trim(incond_dir)//ncfile(ns) + else + ncfile(ns) = trim(history_dir)//ncfile(ns) + endif + filename = ncfile(ns) + end if + call broadcast_scalar(filename, master_task) + + ! create file + + File%fh=-1 + call ice_pio_init(mode='write', filename=trim(filename), File=File, & + clobber=.true., cdf64=lcdf64) + + call ice_pio_initdecomp(iodesc=iodesc2d) + call ice_pio_initdecomp(ndim3=ncat_hist, iodesc=iodesc3dc) + call ice_pio_initdecomp(ndim3=nzilyr, iodesc=iodesc3di) + call ice_pio_initdecomp(ndim3=nzblyr, iodesc=iodesc3db) + call ice_pio_initdecomp(ndim3=nzalyr, iodesc=iodesc3da) + call ice_pio_initdecomp(ndim3=nverts, inner_dim=.true., iodesc=iodesc3dv) + call ice_pio_initdecomp(ndim3=nzilyr, ndim4=ncat_hist, iodesc=iodesc4di) + call ice_pio_initdecomp(ndim3=nzslyr, ndim4=ncat_hist, iodesc=iodesc4ds) + + ltime2 = time/int(secday) + ltime = real(time/int(secday),kind=real_kind) + + !----------------------------------------------------------------- + ! define dimensions + !----------------------------------------------------------------- + + if (hist_avg .and. histfreq(ns) /= '1') then + status = pio_def_dim(File,'d2',2,boundid) + endif + + status = pio_def_dim(File,'ni',nx_global,imtid) + status = pio_def_dim(File,'nj',ny_global,jmtid) + status = pio_def_dim(File,'nc',ncat_hist,cmtid) + status = pio_def_dim(File,'nkice',nzilyr,kmtidi) + status = pio_def_dim(File,'nksnow',nzslyr,kmtids) + status = pio_def_dim(File,'nkbio',nzblyr,kmtidb) + status = pio_def_dim(File,'nkaer',nzalyr,kmtida) + status = pio_def_dim(File,'time',PIO_UNLIMITED,timid) + status = pio_def_dim(File,'nvertices',nverts,nvertexid) + + !----------------------------------------------------------------- + ! define coordinate variables: time, time_bounds + !----------------------------------------------------------------- + +!sgl status = pio_def_var(File,'time',pio_real,(/timid/),varid) + status = pio_def_var(File,'time',pio_double,(/timid/),varid) + status = pio_put_att(File,varid,'long_name','model time') + + write(cdate,'(i8.8)') idate0 + write(title,'(a,a,a,a,a,a,a)') 'days since ', & + cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' 00:00:00' + status = pio_put_att(File,varid,'units',trim(title)) + + if (days_per_year == 360) then + status = pio_put_att(File,varid,'calendar','360_day') + elseif (days_per_year == 365 .and. .not.use_leap_years ) then + status = pio_put_att(File,varid,'calendar','NoLeap') + elseif (use_leap_years) then + status = pio_put_att(File,varid,'calendar','Gregorian') + else + call abort_ice(subname//'ERROR: invalid calendar settings') + endif + + if (hist_avg .and. histfreq(ns) /= '1') then + status = pio_put_att(File,varid,'bounds','time_bounds') + endif + + ! Define attributes for time_bounds if hist_avg is true + if (hist_avg .and. histfreq(ns) /= '1') then + dimid2(1) = boundid + dimid2(2) = timid +!sgl status = pio_def_var(File,'time_bounds',pio_real,dimid2,varid) + status = pio_def_var(File,'time_bounds',pio_double,dimid2,varid) + status = pio_put_att(File,varid,'long_name', & + 'boundaries for time-averaging interval') + write(cdate,'(i8.8)') idate0 + write(title,'(a,a,a,a,a,a,a,a)') 'days since ', & + cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' 00:00:00' + status = pio_put_att(File,varid,'units',trim(title)) + endif + + !----------------------------------------------------------------- + ! define information for required time-invariant variables + !----------------------------------------------------------------- + + ind = 0 + ind = ind + 1 + coord_var(ind) = coord_attributes('TLON', & + 'T grid center longitude', 'degrees_east') + coord_bounds(ind) = 'lont_bounds' + ind = ind + 1 + coord_var(ind) = coord_attributes('TLAT', & + 'T grid center latitude', 'degrees_north') + coord_bounds(ind) = 'latt_bounds' + ind = ind + 1 + coord_var(ind) = coord_attributes('ULON', & + 'U grid center longitude', 'degrees_east') + coord_bounds(ind) = 'lonu_bounds' + ind = ind + 1 + coord_var(ind) = coord_attributes('ULAT', & + 'U grid center latitude', 'degrees_north') + coord_bounds(ind) = 'latu_bounds' + + var_nz(1) = coord_attributes('NCAT', 'category maximum thickness', 'm') + var_nz(2) = coord_attributes('VGRDi', 'vertical ice levels', '1') + var_nz(3) = coord_attributes('VGRDs', 'vertical snow levels', '1') + var_nz(4) = coord_attributes('VGRDb', 'vertical ice-bio levels', '1') + var_nz(5) = coord_attributes('VGRDa', 'vertical snow-ice-bio levels', '1') + + !----------------------------------------------------------------- + ! define information for optional time-invariant variables + !----------------------------------------------------------------- + + var(n_tmask)%req = coord_attributes('tmask', & + 'ocean grid mask', ' ') + var(n_tmask)%coordinates = 'TLON TLAT' + + var(n_blkmask)%req = coord_attributes('blkmask', & + 'ice grid block mask', ' ') + var(n_blkmask)%coordinates = 'TLON TLAT' + + var(n_tarea)%req = coord_attributes('tarea', & + 'area of T grid cells', 'm^2') + var(n_tarea)%coordinates = 'TLON TLAT' + + var(n_uarea)%req = coord_attributes('uarea', & + 'area of U grid cells', 'm^2') + var(n_uarea)%coordinates = 'ULON ULAT' + var(n_dxt)%req = coord_attributes('dxt', & + 'T cell width through middle', 'm') + var(n_dxt)%coordinates = 'TLON TLAT' + var(n_dyt)%req = coord_attributes('dyt', & + 'T cell height through middle', 'm') + var(n_dyt)%coordinates = 'TLON TLAT' + var(n_dxu)%req = coord_attributes('dxu', & + 'U cell width through middle', 'm') + var(n_dxu)%coordinates = 'ULON ULAT' + var(n_dyu)%req = coord_attributes('dyu', & + 'U cell height through middle', 'm') + var(n_dyu)%coordinates = 'ULON ULAT' + var(n_HTN)%req = coord_attributes('HTN', & + 'T cell width on North side','m') + var(n_HTN)%coordinates = 'TLON TLAT' + var(n_HTE)%req = coord_attributes('HTE', & + 'T cell width on East side', 'm') + var(n_HTE)%coordinates = 'TLON TLAT' + var(n_ANGLE)%req = coord_attributes('ANGLE', & + 'angle grid makes with latitude line on U grid', & + 'radians') + var(n_ANGLE)%coordinates = 'ULON ULAT' + var(n_ANGLET)%req = coord_attributes('ANGLET', & + 'angle grid makes with latitude line on T grid', & + 'radians') + var(n_ANGLET)%coordinates = 'TLON TLAT' + + ! These fields are required for CF compliance + ! dimensions (nx,ny,nverts) + var_nverts(n_lont_bnds) = coord_attributes('lont_bounds', & + 'longitude boundaries of T cells', 'degrees_east') + var_nverts(n_latt_bnds) = coord_attributes('latt_bounds', & + 'latitude boundaries of T cells', 'degrees_north') + var_nverts(n_lonu_bnds) = coord_attributes('lonu_bounds', & + 'longitude boundaries of U cells', 'degrees_east') + var_nverts(n_latu_bnds) = coord_attributes('latu_bounds', & + 'latitude boundaries of U cells', 'degrees_north') + + !----------------------------------------------------------------- + ! define attributes for time-invariant variables + !----------------------------------------------------------------- + + dimid2(1) = imtid + dimid2(2) = jmtid + + do i = 1, ncoord + status = pio_def_var(File, trim(coord_var(i)%short_name), pio_real, & + dimid2, varid) + status = pio_put_att(File,varid,'long_name',trim(coord_var(i)%long_name)) + status = pio_put_att(File, varid, 'units', trim(coord_var(i)%units)) + status = pio_put_att(File, varid, 'missing_value', spval) + status = pio_put_att(File, varid,'_FillValue',spval) + if (coord_var(i)%short_name == 'ULAT') then + status = pio_put_att(File,varid,'comment', & + trim('Latitude of NE corner of T grid cell')) + endif + if (f_bounds) then + status = pio_put_att(File, varid, 'bounds', trim(coord_bounds(i))) + endif + enddo + + ! Extra dimensions (NCAT, NZILYR, NZSLYR, NZBLYR, NZALYR) + dimidex(1)=cmtid + dimidex(2)=kmtidi + dimidex(3)=kmtids + dimidex(4)=kmtidb + dimidex(5)=kmtida + + do i = 1, nvarz + if (igrdz(i)) then + status = pio_def_var(File, trim(var_nz(i)%short_name), pio_real, & + (/dimidex(i)/), varid) + status = pio_put_att(File, varid, 'long_name', var_nz(i)%long_name) + status = pio_put_att(File, varid, 'units' , var_nz(i)%units) + endif + enddo + + ! Attributes for tmask defined separately, since it has no units + if (igrd(n_tmask)) then + status = pio_def_var(File, 'tmask', pio_real, dimid2, varid) + status = pio_put_att(File,varid, 'long_name', 'ocean grid mask') + status = pio_put_att(File, varid, 'coordinates', 'TLON TLAT') + status = pio_put_att(File, varid, 'missing_value', spval) + status = pio_put_att(File, varid,'_FillValue',spval) + status = pio_put_att(File,varid,'comment', '0 = land, 1 = ocean') + endif + if (igrd(n_blkmask)) then + status = pio_def_var(File, 'blkmask', pio_real, dimid2, varid) + status = pio_put_att(File,varid, 'long_name', 'ice grid block mask') + status = pio_put_att(File, varid, 'coordinates', 'TLON TLAT') + status = pio_put_att(File,varid,'comment', 'mytask + iblk/100') + status = pio_put_att(File, varid, 'missing_value', spval) + status = pio_put_att(File, varid,'_FillValue',spval) + endif + + do i = 3, nvar ! note: n_tmask=1, n_blkmask=2 + if (igrd(i)) then + status = pio_def_var(File, trim(var(i)%req%short_name), & + pio_real, dimid2, varid) + status = pio_put_att(File,varid, 'long_name', trim(var(i)%req%long_name)) + status = pio_put_att(File, varid, 'units', trim(var(i)%req%units)) + status = pio_put_att(File, varid, 'coordinates', trim(var(i)%coordinates)) + status = pio_put_att(File, varid, 'missing_value', spval) + status = pio_put_att(File, varid,'_FillValue',spval) + endif + enddo + + ! Fields with dimensions (nverts,nx,ny) + dimid_nverts(1) = nvertexid + dimid_nverts(2) = imtid + dimid_nverts(3) = jmtid + do i = 1, nvar_verts + if (f_bounds) then + status = pio_def_var(File, trim(var_nverts(i)%short_name), & + pio_real,dimid_nverts, varid) + status = & + pio_put_att(File,varid, 'long_name', trim(var_nverts(i)%long_name)) + status = & + pio_put_att(File, varid, 'units', trim(var_nverts(i)%units)) + status = pio_put_att(File, varid, 'missing_value', spval) + status = pio_put_att(File, varid,'_FillValue',spval) + endif + enddo + + !----------------------------------------------------------------- + ! define attributes for time-variant variables + !----------------------------------------------------------------- + + !----------------------------------------------------------------- + ! 2D + !----------------------------------------------------------------- + + dimid3(1) = imtid + dimid3(2) = jmtid + dimid3(3) = timid + + do n=1,num_avail_hist_fields_2D + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_def_var(File, trim(avail_hist_fields(n)%vname), & + pio_real, dimid3, varid) + status = pio_put_att(File,varid,'units', & + trim(avail_hist_fields(n)%vunit)) + status = pio_put_att(File,varid, 'long_name', & + trim(avail_hist_fields(n)%vdesc)) + status = pio_put_att(File,varid,'coordinates', & + trim(avail_hist_fields(n)%vcoord)) + status = pio_put_att(File,varid,'cell_measures', & + trim(avail_hist_fields(n)%vcellmeas)) + status = pio_put_att(File,varid,'missing_value',spval) + status = pio_put_att(File,varid,'_FillValue',spval) + + ! Add cell_methods attribute to variables if averaged + if (hist_avg .and. histfreq(ns) /= '1') then + if (TRIM(avail_hist_fields(n)%vname)/='sig1' & + .or.TRIM(avail_hist_fields(n)%vname)/='sig2' & + .or.TRIM(avail_hist_fields(n)%vname)/='sistreave' & + .or.TRIM(avail_hist_fields(n)%vname)/='sistremax' & + .or.TRIM(avail_hist_fields(n)%vname)/='sigP') then + status = pio_put_att(File,varid,'cell_methods','time: mean') + endif + endif + + if (histfreq(ns) == '1' .or. .not. hist_avg & + .or. n==n_divu(ns) .or. n==n_shear(ns) & ! snapshots + .or. n==n_sig1(ns) .or. n==n_sig2(ns) & + .or. n==n_sigP(ns) .or. n==n_trsig(ns) & + .or. n==n_sistreave(ns) .or. n==n_sistremax(ns) & + .or. n==n_mlt_onset(ns) .or. n==n_frz_onset(ns) & + .or. n==n_hisnap(ns) .or. n==n_aisnap(ns)) then + status = pio_put_att(File,varid,'time_rep','instantaneous') + else + status = pio_put_att(File,varid,'time_rep','averaged') + endif + endif + enddo ! num_avail_hist_fields_2D + + !----------------------------------------------------------------- + ! 3D (category) + !----------------------------------------------------------------- + + dimidz(1) = imtid + dimidz(2) = jmtid + dimidz(3) = cmtid + dimidz(4) = timid + + do n = n2D + 1, n3Dccum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_def_var(File, trim(avail_hist_fields(n)%vname), & + pio_real, dimidz, varid) + status = pio_put_att(File,varid,'units', & + trim(avail_hist_fields(n)%vunit)) + status = pio_put_att(File,varid, 'long_name', & + trim(avail_hist_fields(n)%vdesc)) + status = pio_put_att(File,varid,'coordinates', & + trim(avail_hist_fields(n)%vcoord)) + status = pio_put_att(File,varid,'cell_measures', & + trim(avail_hist_fields(n)%vcellmeas)) + status = pio_put_att(File,varid,'missing_value',spval) + status = pio_put_att(File,varid,'_FillValue',spval) + + ! Add cell_methods attribute to variables if averaged + if (hist_avg .and. histfreq(ns) /= '1') then + status = pio_put_att(File,varid,'cell_methods','time: mean') + endif + + if (histfreq(ns) == '1' .or. .not. hist_avg) then + status = pio_put_att(File,varid,'time_rep','instantaneous') + else + status = pio_put_att(File,varid,'time_rep','averaged') + endif + endif + enddo ! num_avail_hist_fields_3Dc + + !----------------------------------------------------------------- + ! 3D (ice layers) + !----------------------------------------------------------------- + + dimidz(1) = imtid + dimidz(2) = jmtid + dimidz(3) = kmtidi + dimidz(4) = timid + + do n = n3Dccum + 1, n3Dzcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_def_var(File, trim(avail_hist_fields(n)%vname), & + pio_real, dimidz, varid) + status = pio_put_att(File,varid,'units', & + trim(avail_hist_fields(n)%vunit)) + status = pio_put_att(File,varid, 'long_name', & + trim(avail_hist_fields(n)%vdesc)) + status = pio_put_att(File,varid,'coordinates', & + trim(avail_hist_fields(n)%vcoord)) + status = pio_put_att(File,varid,'cell_measures', & + trim(avail_hist_fields(n)%vcellmeas)) + status = pio_put_att(File,varid,'missing_value',spval) + status = pio_put_att(File,varid,'_FillValue',spval) + + ! Add cell_methods attribute to variables if averaged + if (hist_avg .and. histfreq(ns) /= '1') then + status = pio_put_att(File,varid,'cell_methods','time: mean') + endif + + if (histfreq(ns) == '1' .or. .not. hist_avg) then + status = pio_put_att(File,varid,'time_rep','instantaneous') + else + status = pio_put_att(File,varid,'time_rep','averaged') + endif + endif + enddo ! num_avail_hist_fields_3Dz + + !----------------------------------------------------------------- + ! 3D (biology ice layers) + !----------------------------------------------------------------- + + dimidz(1) = imtid + dimidz(2) = jmtid + dimidz(3) = kmtidb + dimidz(4) = timid + + do n = n3Dzcum + 1, n3Dbcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_def_var(File, trim(avail_hist_fields(n)%vname), & + pio_real, dimidz, varid) + status = pio_put_att(File,varid,'units', & + trim(avail_hist_fields(n)%vunit)) + status = pio_put_att(File,varid, 'long_name', & + trim(avail_hist_fields(n)%vdesc)) + status = pio_put_att(File,varid,'coordinates', & + trim(avail_hist_fields(n)%vcoord)) + status = pio_put_att(File,varid,'cell_measures', & + trim(avail_hist_fields(n)%vcellmeas)) + status = pio_put_att(File,varid,'missing_value',spval) + status = pio_put_att(File,varid,'_FillValue',spval) + + ! Add cell_methods attribute to variables if averaged + if (hist_avg .and. histfreq(ns) /= '1') then + status = pio_put_att(File,varid,'cell_methods','time: mean') + endif + + if (histfreq(ns) == '1' .or. .not. hist_avg) then + status = pio_put_att(File,varid,'time_rep','instantaneous') + else + status = pio_put_att(File,varid,'time_rep','averaged') + endif + endif + enddo ! num_avail_hist_fields_3Db + + !----------------------------------------------------------------- + ! 3D (biology snow layers) + !----------------------------------------------------------------- + + dimidz(1) = imtid + dimidz(2) = jmtid + dimidz(3) = kmtida + dimidz(4) = timid + + do n = n3Dbcum + 1, n3Dacum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_def_var(File, trim(avail_hist_fields(n)%vname), & + pio_real, dimidz, varid) + status = pio_put_att(File,varid,'units', & + trim(avail_hist_fields(n)%vunit)) + status = pio_put_att(File,varid, 'long_name', & + trim(avail_hist_fields(n)%vdesc)) + status = pio_put_att(File,varid,'coordinates', & + trim(avail_hist_fields(n)%vcoord)) + status = pio_put_att(File,varid,'cell_measures', & + trim(avail_hist_fields(n)%vcellmeas)) + status = pio_put_att(File,varid,'missing_value',spval) + status = pio_put_att(File,varid,'_FillValue',spval) + + ! Add cell_methods attribute to variables if averaged + if (hist_avg .and. histfreq(ns) /= '1') then + status = pio_put_att(File,varid,'cell_methods','time: mean') + endif + + if (histfreq(ns) == '1' .or. .not. hist_avg) then + status = pio_put_att(File,varid,'time_rep','instantaneous') + else + status = pio_put_att(File,varid,'time_rep','averaged') + endif + endif + enddo ! num_avail_hist_fields_3Da + + !----------------------------------------------------------------- + ! define attributes for 4D variables + ! time coordinate is dropped + !----------------------------------------------------------------- + + !----------------------------------------------------------------- + ! 4D (ice categories) + !----------------------------------------------------------------- + + dimidcz(1) = imtid + dimidcz(2) = jmtid + dimidcz(3) = kmtidi + dimidcz(4) = cmtid + dimidcz(5) = timid + + do n = n3Dacum + 1, n4Dicum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_def_var(File, trim(avail_hist_fields(n)%vname), & + pio_real, dimidcz, varid) + status = pio_put_att(File,varid,'units', & + trim(avail_hist_fields(n)%vunit)) + status = pio_put_att(File,varid, 'long_name', & + trim(avail_hist_fields(n)%vdesc)) + status = pio_put_att(File,varid,'coordinates', & + trim(avail_hist_fields(n)%vcoord)) + status = pio_put_att(File,varid,'cell_measures', & + trim(avail_hist_fields(n)%vcellmeas)) + status = pio_put_att(File,varid,'missing_value',spval) + status = pio_put_att(File,varid,'_FillValue',spval) + + ! Add cell_methods attribute to variables if averaged + if (hist_avg .and. histfreq(ns) /= '1') then + status = pio_put_att(File,varid,'cell_methods','time: mean') + endif + + if (histfreq(ns) == '1' .or. .not. hist_avg) then + status = pio_put_att(File,varid,'time_rep','instantaneous') + else + status = pio_put_att(File,varid,'time_rep','averaged') + endif + endif + enddo ! num_avail_hist_fields_4Di + + !----------------------------------------------------------------- + ! 4D (snow layers) + !----------------------------------------------------------------- + + dimidcz(1) = imtid + dimidcz(2) = jmtid + dimidcz(3) = kmtids + dimidcz(4) = cmtid + dimidcz(5) = timid + + do n = n4Dicum + 1, n4Dscum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_def_var(File, trim(avail_hist_fields(n)%vname), & + pio_real, dimidcz, varid) + status = pio_put_att(File,varid,'units', & + trim(avail_hist_fields(n)%vunit)) + status = pio_put_att(File,varid, 'long_name', & + trim(avail_hist_fields(n)%vdesc)) + status = pio_put_att(File,varid,'coordinates', & + trim(avail_hist_fields(n)%vcoord)) + status = pio_put_att(File,varid,'cell_measures', & + trim(avail_hist_fields(n)%vcellmeas)) + status = pio_put_att(File,varid,'missing_value',spval) + status = pio_put_att(File,varid,'_FillValue',spval) + + ! Add cell_methods attribute to variables if averaged + if (hist_avg .and. histfreq(ns) /= '1') then + status = pio_put_att(File,varid,'cell_methods','time: mean') + endif + + if (histfreq(ns) == '1' .or. .not. hist_avg) then + status = pio_put_att(File,varid,'time_rep','instantaneous') + else + status = pio_put_att(File,varid,'time_rep','averaged') + endif + endif + enddo ! num_avail_hist_fields_4Ds + + !----------------------------------------------------------------- + ! global attributes + !----------------------------------------------------------------- + ! ... the user should change these to something useful ... + !----------------------------------------------------------------- +#ifdef CESMCOUPLED + status = pio_put_att(File,pio_global,'title',runid) +#else + title = 'sea ice model output for CICE' + status = pio_put_att(File,pio_global,'title',trim(title)) +#endif + title = 'Diagnostic and Prognostic Variables' + status = pio_put_att(File,pio_global,'contents',trim(title)) + + write(title,'(2a)') 'Los Alamos Sea Ice Model, ', trim(version_name) + status = pio_put_att(File,pio_global,'source',trim(title)) + + if (use_leap_years) then + write(title,'(a,i3,a)') 'This year has ',int(dayyr),' days' + else + write(title,'(a,i3,a)') 'All years have exactly ',int(dayyr),' days' + endif + status = pio_put_att(File,pio_global,'comment',trim(title)) + + write(title,'(a,i8.8)') 'File written on model date ',idate + status = pio_put_att(File,pio_global,'comment2',trim(title)) + + write(title,'(a,i6)') 'seconds elapsed into model date: ',sec + status = pio_put_att(File,pio_global,'comment3',trim(title)) + + title = 'CF-1.0' + status = & + pio_put_att(File,pio_global,'conventions',trim(title)) + + call date_and_time(date=current_date, time=current_time) + write(start_time,1000) current_date(1:4), current_date(5:6), & + current_date(7:8), current_time(1:2), & + current_time(3:4) +1000 format('This dataset was created on ', & + a,'-',a,'-',a,' at ',a,':',a) + status = pio_put_att(File,pio_global,'history',trim(start_time)) + + status = pio_put_att(File,pio_global,'io_flavor','io_pio') + + !----------------------------------------------------------------- + ! end define mode + !----------------------------------------------------------------- + + status = pio_enddef(File) + + !----------------------------------------------------------------- + ! write time variable + !----------------------------------------------------------------- + + status = pio_inq_varid(File,'time',varid) +!sgl status = pio_put_var(File,varid,(/1/),ltime) + status = pio_put_var(File,varid,(/1/),ltime2) + + !----------------------------------------------------------------- + ! write time_bounds info + !----------------------------------------------------------------- + + if (hist_avg .and. histfreq(ns) /= '1') then + status = pio_inq_varid(File,'time_bounds',varid) + time_bounds=(/time_beg(ns),time_end(ns)/) + bnd_start = (/1,1/) + bnd_length = (/2,1/) + status = pio_put_var(File,varid,ival=time_bounds, & + start=bnd_start(:),count=bnd_length(:)) + endif + + !----------------------------------------------------------------- + ! write coordinate variables + !----------------------------------------------------------------- + + allocate(workr2(nx_block,ny_block,nblocks)) + + do i = 1,ncoord + status = pio_inq_varid(File, coord_var(i)%short_name, varid) + SELECT CASE (coord_var(i)%short_name) + CASE ('TLON') + ! Convert T grid longitude from -180 -> 180 to 0 to 360 + workr2(:,:,:) = mod(tlon(:,:,1:nblocks)*rad_to_deg + c360, c360) + CASE ('TLAT') + workr2(:,:,:) = tlat(:,:,1:nblocks)*rad_to_deg + CASE ('ULON') + workr2(:,:,:) = ulon(:,:,1:nblocks)*rad_to_deg + CASE ('ULAT') + workr2(:,:,:) = ulat(:,:,1:nblocks)*rad_to_deg + END SELECT + call pio_write_darray(File, varid, iodesc2d, & + workr2, status, fillval=spval_dbl) + enddo + + ! Extra dimensions (NCAT, VGRD*) + + do i = 1, nvarz + if (igrdz(i)) then + status = pio_inq_varid(File, var_nz(i)%short_name, varid) + SELECT CASE (var_nz(i)%short_name) + CASE ('NCAT') + status = pio_put_var(File, varid, hin_max(1:ncat_hist)) + CASE ('VGRDi') + status = pio_put_var(File, varid, (/(k, k=1,nzilyr)/)) + CASE ('VGRDs') + status = pio_put_var(File, varid, (/(k, k=1,nzslyr)/)) + CASE ('VGRDb') + status = pio_put_var(File, varid, (/(k, k=1,nzblyr)/)) + CASE ('VGRDa') + status = pio_put_var(File, varid, (/(k, k=1,nzalyr)/)) + END SELECT + endif + enddo + + !----------------------------------------------------------------- + ! write grid masks, area and rotation angle + !----------------------------------------------------------------- + +! if (igrd(n_tmask)) then +! status = pio_inq_varid(File, 'tmask', varid) +! call pio_write_darray(File, varid, iodesc2d, & +! hm(:,:,1:nblocks), status, fillval=spval_dbl) +! endif +! if (igrd(n_blkmask)) then +! status = pio_inq_varid(File, 'blkmask', varid) +! call pio_write_darray(File, varid, iodesc2d, & +! bm(:,:,1:nblocks), status, fillval=spval_dbl) +! endif + + do i = 1, nvar ! note: n_tmask=1, n_blkmask=2 + if (igrd(i)) then + SELECT CASE (var(i)%req%short_name) + CASE ('tmask') + workr2 = hm(:,:,1:nblocks) + CASE ('blkmask') + workr2 = bm(:,:,1:nblocks) + CASE ('tarea') + workr2 = tarea(:,:,1:nblocks) + CASE ('uarea') + workr2 = uarea(:,:,1:nblocks) + CASE ('dxu') + workr2 = dxu(:,:,1:nblocks) + CASE ('dyu') + workr2 = dyu(:,:,1:nblocks) + CASE ('dxt') + workr2 = dxt(:,:,1:nblocks) + CASE ('dyt') + workr2 = dyt(:,:,1:nblocks) + CASE ('HTN') + workr2 = HTN(:,:,1:nblocks) + CASE ('HTE') + workr2 = HTE(:,:,1:nblocks) + CASE ('ANGLE') + workr2 = ANGLE(:,:,1:nblocks) + CASE ('ANGLET') + workr2 = ANGLET(:,:,1:nblocks) + END SELECT + status = pio_inq_varid(File, var(i)%req%short_name, varid) + call pio_write_darray(File, varid, iodesc2d, & + workr2, status, fillval=spval_dbl) + endif + enddo + + !---------------------------------------------------------------- + ! Write coordinates of grid box vertices + !---------------------------------------------------------------- + + if (f_bounds) then + allocate(workr3v(nverts,nx_block,ny_block,nblocks)) + workr3v (:,:,:,:) = c0 + do i = 1, nvar_verts + SELECT CASE (var_nverts(i)%short_name) + CASE ('lont_bounds') + do ivertex = 1, nverts + workr3v(ivertex,:,:,:) = lont_bounds(ivertex,:,:,1:nblocks) + enddo + CASE ('latt_bounds') + do ivertex = 1, nverts + workr3v(ivertex,:,:,:) = latt_bounds(ivertex,:,:,1:nblocks) + enddo + CASE ('lonu_bounds') + do ivertex = 1, nverts + workr3v(ivertex,:,:,:) = lonu_bounds(ivertex,:,:,1:nblocks) + enddo + CASE ('latu_bounds') + do ivertex = 1, nverts + workr3v(ivertex,:,:,:) = latu_bounds(ivertex,:,:,1:nblocks) + enddo + END SELECT + + status = pio_inq_varid(File, var_nverts(i)%short_name, varid) + call pio_write_darray(File, varid, iodesc3dv, & + workr3v, status, fillval=spval_dbl) + enddo + deallocate(workr3v) + endif ! f_bounds + + + !----------------------------------------------------------------- + ! write variable data + !----------------------------------------------------------------- + + ! 2D + do n=1,num_avail_hist_fields_2D + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + if (status /= pio_noerr) call abort_ice(subname// & + 'ERROR getting varid for '//avail_hist_fields(n)%vname) + workr2(:,:,:) = a2D(:,:,n,1:nblocks) + call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) + call pio_write_darray(File, varid, iodesc2d,& + workr2, status, fillval=spval_dbl) + endif + enddo ! num_avail_hist_fields_2D + + deallocate(workr2) + + ! 3D (category) + allocate(workr3(nx_block,ny_block,nblocks,ncat_hist)) + do n = n2D + 1, n3Dccum + nn = n - n2D + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + if (status /= pio_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + do j = 1, nblocks + do i = 1, ncat_hist + workr3(:,:,j,i) = a3Dc(:,:,i,nn,j) + enddo + enddo + call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) + call pio_write_darray(File, varid, iodesc3dc,& + workr3, status, fillval=spval_dbl) + endif + enddo ! num_avail_hist_fields_3Dc + deallocate(workr3) + + ! 3D (vertical ice) + allocate(workr3(nx_block,ny_block,nblocks,nzilyr)) + do n = n3Dccum+1, n3Dzcum + nn = n - n3Dccum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + if (status /= pio_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + do j = 1, nblocks + do i = 1, nzilyr + workr3(:,:,j,i) = a3Dz(:,:,i,nn,j) + enddo + enddo + call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) + call pio_write_darray(File, varid, iodesc3di,& + workr3, status, fillval=spval_dbl) + endif + enddo ! num_avail_hist_fields_3Dz + deallocate(workr3) + + ! 3D (vertical ice biology) + allocate(workr3(nx_block,ny_block,nblocks,nzblyr)) + do n = n3Dzcum+1, n3Dbcum + nn = n - n3Dzcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + if (status /= pio_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + do j = 1, nblocks + do i = 1, nzblyr + workr3(:,:,j,i) = a3Db(:,:,i,nn,j) + enddo + enddo + call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) + call pio_write_darray(File, varid, iodesc3db,& + workr3, status, fillval=spval_dbl) + endif + enddo ! num_avail_hist_fields_3Db + deallocate(workr3) + + ! 3D (vertical snow biology) + allocate(workr3(nx_block,ny_block,nblocks,nzalyr)) + do n = n3Dbcum+1, n3Dacum + nn = n - n3Dbcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + if (status /= pio_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + do j = 1, nblocks + do i = 1, nzalyr + workr3(:,:,j,i) = a3Da(:,:,i,nn,j) + enddo + enddo + call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) + call pio_write_darray(File, varid, iodesc3da,& + workr3, status, fillval=spval_dbl) + endif + enddo ! num_avail_hist_fields_3Db + deallocate(workr3) + + allocate(workr4(nx_block,ny_block,nblocks,ncat_hist,nzilyr)) + ! 4D (categories, vertical ice) + do n = n3Dacum+1, n4Dicum + nn = n - n3Dacum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + if (status /= pio_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + do j = 1, nblocks + do i = 1, ncat_hist + do k = 1, nzilyr + workr4(:,:,j,i,k) = a4Di(:,:,k,i,nn,j) + enddo ! k + enddo ! i + enddo ! j + call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) + call pio_write_darray(File, varid, iodesc4di,& + workr4, status, fillval=spval_dbl) + endif + enddo ! num_avail_hist_fields_4Di + deallocate(workr4) + + allocate(workr4(nx_block,ny_block,nblocks,ncat_hist,nzslyr)) + ! 4D (categories, vertical ice) + do n = n4Dicum+1, n4Dscum + nn = n - n4Dicum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + if (status /= pio_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + do j = 1, nblocks + do i = 1, ncat_hist + do k = 1, nzslyr + workr4(:,:,j,i,k) = a4Ds(:,:,k,i,nn,j) + enddo ! k + enddo ! i + enddo ! j + call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) + call pio_write_darray(File, varid, iodesc4ds,& + workr4, status, fillval=spval_dbl) + endif + enddo ! num_avail_hist_fields_4Di + deallocate(workr4) + +! similarly for num_avail_hist_fields_4Db (define workr4b, iodesc4db) + + + !----------------------------------------------------------------- + ! clean-up PIO descriptors + !----------------------------------------------------------------- + + call pio_freedecomp(File,iodesc2d) + call pio_freedecomp(File,iodesc3dv) + call pio_freedecomp(File,iodesc3dc) + call pio_freedecomp(File,iodesc3di) + call pio_freedecomp(File,iodesc3db) + call pio_freedecomp(File,iodesc3da) + call pio_freedecomp(File,iodesc4di) + call pio_freedecomp(File,iodesc4ds) + + !----------------------------------------------------------------- + ! close output dataset + !----------------------------------------------------------------- + + call pio_closefile(File) + if (my_task == master_task) then + write(nu_diag,*) ' ' + write(nu_diag,*) 'Finished writing ',trim(ncfile(ns)) + endif + +#endif + + end subroutine ice_write_hist + +!======================================================================= + + end module ice_history_write + +!======================================================================= diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_pio.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_pio.F90 new file mode 100644 index 000000000..5fff64944 --- /dev/null +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_pio.F90 @@ -0,0 +1,365 @@ +!============================================================================ +! Writes netcdf files +! Created by Mariana Vertenstein, June 2009 + + module ice_pio + + use shr_kind_mod, only: r8 => shr_kind_r8, in=>shr_kind_in + use shr_kind_mod, only: cl => shr_kind_cl + use shr_sys_mod , only: shr_sys_flush + use ice_kinds_mod + use ice_blocks + use ice_broadcast + use ice_communicate + use ice_domain, only : nblocks, blocks_ice + use ice_domain_size + use ice_fileunits + use ice_exit, only: abort_ice + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use pio + + implicit none + private + + interface ice_pio_initdecomp + module procedure ice_pio_initdecomp_2d + module procedure ice_pio_initdecomp_3d + module procedure ice_pio_initdecomp_4d + module procedure ice_pio_initdecomp_3d_inner + end interface + + public ice_pio_init + public ice_pio_initdecomp + + type(iosystem_desc_t), pointer, public :: ice_pio_subsystem + +!=============================================================================== + + contains + +!=============================================================================== + +! Initialize the io subsystem +! 2009-Feb-17 - J. Edwards - initial version + + subroutine ice_pio_init(mode, filename, File, clobber, cdf64) + + use shr_pio_mod, only: shr_pio_getiosys, shr_pio_getiotype + + implicit none + character(len=*) , intent(in), optional :: mode + character(len=*) , intent(in), optional :: filename + type(file_desc_t) , intent(inout), optional :: File + logical , intent(in), optional :: clobber + logical , intent(in), optional :: cdf64 + + ! local variables + + integer (int_kind) :: & + nml_error ! namelist read error flag + + integer :: pio_iotype + logical :: exists + logical :: lclobber + logical :: lcdf64 + integer :: status + integer :: nmode + character(len=*), parameter :: subname = '(ice_pio_init)' + logical, save :: first_call = .true. + + ice_pio_subsystem => shr_pio_getiosys(inst_name) + pio_iotype = shr_pio_getiotype(inst_name) + + if (present(mode) .and. present(filename) .and. present(File)) then + + if (trim(mode) == 'write') then + lclobber = .false. + if (present(clobber)) lclobber=clobber + + lcdf64 = .false. + if (present(cdf64)) lcdf64=cdf64 + + if (File%fh<0) then + ! filename not open + inquire(file=trim(filename),exist=exists) + if (exists) then + if (lclobber) then + nmode = pio_clobber + if (lcdf64) nmode = ior(nmode,PIO_64BIT_OFFSET) + status = pio_createfile(ice_pio_subsystem, File, pio_iotype, trim(filename), nmode) + if (my_task == master_task) then + write(nu_diag,*) subname,' create file ',trim(filename) + end if + else + nmode = pio_write + status = pio_openfile(ice_pio_subsystem, File, pio_iotype, trim(filename), nmode) + if (my_task == master_task) then + write(nu_diag,*) subname,' open file ',trim(filename) + end if + endif + else + nmode = pio_noclobber + if (lcdf64) nmode = ior(nmode,PIO_64BIT_OFFSET) + status = pio_createfile(ice_pio_subsystem, File, pio_iotype, trim(filename), nmode) + if (my_task == master_task) then + write(nu_diag,*) subname,' create file ',trim(filename) + end if + endif + else + ! filename is already open, just return + endif + end if + + if (trim(mode) == 'read') then + inquire(file=trim(filename),exist=exists) + if (exists) then + status = pio_openfile(ice_pio_subsystem, File, pio_iotype, trim(filename), pio_nowrite) + else + if(my_task==master_task) then + write(nu_diag,*) 'ice_pio_ropen ERROR: file invalid ',trim(filename) + end if + call abort_ice(subname//'ERROR: aborting with invalid file') + endif + end if + + end if + + end subroutine ice_pio_init + +!================================================================================ + + subroutine ice_pio_initdecomp_2d(iodesc) + + type(io_desc_t), intent(out) :: iodesc + + integer (kind=int_kind) :: & + iblk,ilo,ihi,jlo,jhi,lon,lat,i,j,n,k + + type(block) :: this_block + + integer(kind=int_kind), pointer :: dof2d(:) + character(len=*), parameter :: subname = '(ice_pio_initdecomp_2d)' + + allocate(dof2d(nx_block*ny_block*nblocks)) + + n=0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j=1,ny_block + do i=1,nx_block + n = n+1 + if (j < jlo .or. j>jhi) then + dof2d(n) = 0 + else if (i < ilo .or. i > ihi) then + dof2d(n) = 0 + else + lon = this_block%i_glob(i) + lat = this_block%j_glob(j) + dof2d(n) = (lat-1)*nx_global + lon + endif + enddo !i + enddo !j + end do + + call pio_initdecomp(ice_pio_subsystem, pio_double, (/nx_global,ny_global/), & + dof2d, iodesc) + + deallocate(dof2d) + + end subroutine ice_pio_initdecomp_2d + +!================================================================================ + + subroutine ice_pio_initdecomp_3d (ndim3, iodesc, remap) + + integer(kind=int_kind), intent(in) :: ndim3 + type(io_desc_t), intent(out) :: iodesc + logical, optional :: remap + integer (kind=int_kind) :: & + iblk,ilo,ihi,jlo,jhi,lon,lat,i,j,n,k + + type(block) :: this_block + logical :: lremap + integer(kind=int_kind), pointer :: dof3d(:) + character(len=*), parameter :: subname = '(ice_pio_initdecomp_2d)' + + allocate(dof3d(nx_block*ny_block*nblocks*ndim3)) + lremap=.false. + if (present(remap)) lremap=remap + if (lremap) then + ! Reorder the ndim3 and nblocks loops to avoid a temporary array in restart read/write + n=0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do k=1,ndim3 + do j=1,ny_block + do i=1,nx_block + n = n+1 + if (j < jlo .or. j>jhi) then + dof3d(n)=0 + else if (i < ilo .or. i > ihi) then + dof3d(n) = 0 + else + lon = this_block%i_glob(i) + lat = this_block%j_glob(j) + dof3d(n) = ((lat-1)*nx_global + lon) + (k-1)*nx_global*ny_global + endif + enddo !i + enddo !j + enddo !ndim3 + enddo ! iblk + else + n=0 + do k=1,ndim3 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j=1,ny_block + do i=1,nx_block + n = n+1 + if (j < jlo .or. j>jhi) then + dof3d(n)=0 + else if (i < ilo .or. i > ihi) then + dof3d(n) = 0 + else + lon = this_block%i_glob(i) + lat = this_block%j_glob(j) + dof3d(n) = ((lat-1)*nx_global + lon) + (k-1)*nx_global*ny_global + endif + enddo !i + enddo !j + enddo ! iblk + enddo !ndim3 + endif + + call pio_initdecomp(ice_pio_subsystem, pio_double, (/nx_global,ny_global,ndim3/), & + dof3d, iodesc) + + deallocate(dof3d) + + end subroutine ice_pio_initdecomp_3d + +!================================================================================ + + subroutine ice_pio_initdecomp_3d_inner(ndim3, inner_dim, iodesc) + + integer(kind=int_kind), intent(in) :: ndim3 + logical, intent(in) :: inner_dim + type(io_desc_t), intent(out) :: iodesc + + integer (kind=int_kind) :: & + iblk,ilo,ihi,jlo,jhi,lon,lat,i,j,n,k + + type(block) :: this_block + + integer(kind=int_kind), pointer :: dof3d(:) + + character(len=*), parameter :: subname = '(ice_pio_initdecomp_3d_inner)' + + allocate(dof3d(nx_block*ny_block*nblocks*ndim3)) + + n=0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j=1,ny_block + do i=1,nx_block + do k=1,ndim3 + n = n+1 + if (j < jlo .or. j>jhi) then + dof3d(n) = 0 + else if (i < ilo .or. i > ihi) then + dof3d(n) = 0 + else + lon = this_block%i_glob(i) + lat = this_block%j_glob(j) + dof3d(n) = k + ((lon-1) + (lat-1)*nx_global)*ndim3 + endif + end do !ndim3 + enddo !i + enddo !j + end do !iblk + + call pio_initdecomp(ice_pio_subsystem, pio_double, (/ndim3,nx_global,ny_global/), & + dof3d, iodesc) + + deallocate(dof3d) + + end subroutine ice_pio_initdecomp_3d_inner + +!================================================================================ + + subroutine ice_pio_initdecomp_4d (ndim3, ndim4, iodesc) + + integer(kind=int_kind), intent(in) :: ndim3, ndim4 + type(io_desc_t), intent(out) :: iodesc + + integer (kind=int_kind) :: & + iblk,ilo,ihi,jlo,jhi,lon,lat,i,j,n,k,l + + type(block) :: this_block + + integer(kind=int_kind), pointer :: dof4d(:) + + character(len=*), parameter :: subname = '(ice_pio_initdecomp_4d)' + + allocate(dof4d(nx_block*ny_block*nblocks*ndim3*ndim4)) + + n=0 + do l=1,ndim4 + do k=1,ndim3 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j=1,ny_block + do i=1,nx_block + n = n+1 + if (j < jlo .or. j>jhi) then + dof4d(n)=0 + else if (i < ilo .or. i > ihi) then + dof4d(n) = 0 + else + lon = this_block%i_glob(i) + lat = this_block%j_glob(j) + dof4d(n) = ((lat-1)*nx_global + lon) & + + (k-1)*nx_global*ny_global & + + (l-1)*nx_global*ny_global*ndim3 + endif + enddo !i + enddo !j + enddo ! iblk + enddo !ndim3 + enddo !ndim4 + + call pio_initdecomp(ice_pio_subsystem, pio_double, & + (/nx_global,ny_global,ndim3,ndim4/), dof4d, iodesc) + + deallocate(dof4d) + + end subroutine ice_pio_initdecomp_4d + +!================================================================================ + + end module ice_pio + +!================================================================================ diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 new file mode 100644 index 000000000..f00501bfc --- /dev/null +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 @@ -0,0 +1,876 @@ +!======================================================================= +! +! Read and write ice model restart files using pio interfaces. +! authors David A Bailey, NCAR + + module ice_restart + + use ice_broadcast + use ice_exit, only: abort_ice + use ice_fileunits, only: nu_diag, nu_restart, nu_rst_pointer + use ice_kinds_mod + use ice_restart_shared, only: & + restart, restart_ext, restart_dir, restart_file, pointer_file, & + runid, runtype, use_restart_time, restart_format, lcdf64, lenstr + use ice_pio + use pio + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_query_tracer_flags, icepack_query_tracer_indices + use icepack_intfc, only: icepack_query_parameters, icepack_query_tracer_numbers + + implicit none + private + public :: init_restart_write, init_restart_read, & + read_restart_field, write_restart_field, final_restart + + type(file_desc_t) :: File + type(var_desc_t) :: vardesc + + type(io_desc_t) :: iodesc2d + type(io_desc_t) :: iodesc3d_ncat + +!======================================================================= + + contains + +!======================================================================= + +! Sets up restart file for reading. +! author David A Bailey, NCAR + + subroutine init_restart_read(ice_ic) + + use ice_calendar, only: istep0, istep1, time, time_forc, nyr, month, & + mday, sec, npt + use ice_communicate, only: my_task, master_task + use ice_domain_size, only: ncat + use ice_read_write, only: ice_open + + character(len=char_len_long), intent(in), optional :: ice_ic + + ! local variables + + character(len=char_len_long) :: & + filename, filename0 + + integer (kind=int_kind) :: status + + character(len=*), parameter :: subname = '(init_restart_read)' + + if (present(ice_ic)) then + filename = trim(ice_ic) + else + if (my_task == master_task) then + open(nu_rst_pointer,file=pointer_file) + read(nu_rst_pointer,'(a)') filename0 + filename = trim(filename0) + close(nu_rst_pointer) + write(nu_diag,*) 'Read ',pointer_file(1:lenstr(pointer_file)) + endif + call broadcast_scalar(filename, master_task) + endif + + if (my_task == master_task) then + write(nu_diag,*) 'Using restart dump=', trim(filename) + end if + + if (restart_format == 'pio') then + File%fh=-1 + call ice_pio_init(mode='read', filename=trim(filename), File=File) + + call ice_pio_initdecomp(iodesc=iodesc2d) + call ice_pio_initdecomp(ndim3=ncat , iodesc=iodesc3d_ncat,remap=.true.) + + if (use_restart_time) then + status = pio_get_att(File, pio_global, 'istep1', istep0) + status = pio_get_att(File, pio_global, 'time', time) + status = pio_get_att(File, pio_global, 'time_forc', time_forc) + call pio_seterrorhandling(File, PIO_BCAST_ERROR) + status = pio_get_att(File, pio_global, 'nyr', nyr) + call pio_seterrorhandling(File, PIO_INTERNAL_ERROR) + if (status == PIO_noerr) then + status = pio_get_att(File, pio_global, 'month', month) + status = pio_get_att(File, pio_global, 'mday', mday) + status = pio_get_att(File, pio_global, 'sec', sec) + endif + endif ! use namelist values if use_restart_time = F + endif + + if (my_task == master_task) then + write(nu_diag,*) 'Restart read at istep=',istep0,time,time_forc + endif + + call broadcast_scalar(istep0,master_task) + call broadcast_scalar(time,master_task) + call broadcast_scalar(time_forc,master_task) + call broadcast_scalar(nyr,master_task) + + istep1 = istep0 + + ! if runid is bering then need to correct npt for istep0 + if (trim(runid) == 'bering') then + npt = npt - istep0 + endif + + end subroutine init_restart_read + +!======================================================================= + +! Sets up restart file for writing. +! author David A Bailey, NCAR + + subroutine init_restart_write(filename_spec) + + use ice_calendar, only: sec, month, mday, nyr, istep1, & + time, time_forc, year_init + use ice_communicate, only: my_task, master_task + use ice_domain_size, only: nx_global, ny_global, ncat, nilyr, nslyr, & + n_aero, nblyr, n_zaero, n_algae, n_doc, & + n_dic, n_don, n_fed, n_fep + use ice_dyn_shared, only: kdyn + use ice_arrays_column, only: oceanmixed_ice + + logical (kind=log_kind) :: & + solve_zsal, skl_bgc, z_tracers + + logical (kind=log_kind) :: & + tr_iage, tr_FY, tr_lvl, tr_aero, tr_pond_cesm, & + tr_pond_topo, tr_pond_lvl, tr_brine, & + tr_bgc_N, tr_bgc_C, tr_bgc_Nit, & + tr_bgc_Sil, tr_bgc_DMS, & + tr_bgc_chl, tr_bgc_Am, & + tr_bgc_PON, tr_bgc_DON, & + tr_zaero, tr_bgc_Fe, & + tr_bgc_hum + + integer (kind=int_kind) :: & + nbtrcr + + character(len=char_len_long), intent(in), optional :: filename_spec + + ! local variables + + integer (kind=int_kind) :: & + iyear, imonth, iday ! year, month, day + + character(len=char_len_long) :: filename + + integer (kind=int_kind) :: dimid_ni, dimid_nj, dimid_ncat, & + dimid_nilyr, dimid_nslyr, dimid_naero + + integer (kind=int_kind), allocatable :: dims(:) + + integer (kind=int_kind) :: & + k, n, & ! loop index + status ! status variable from netCDF routine + + character (len=3) :: nchar, ncharb + + character(len=*), parameter :: subname = '(init_restart_write)' + + call icepack_query_tracer_numbers(nbtrcr_out=nbtrcr) + call icepack_query_tracer_flags( & + tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, & + tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & + tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, tr_brine_out=tr_brine, & + tr_bgc_N_out=tr_bgc_N, tr_bgc_C_out=tr_bgc_C, tr_bgc_Nit_out=tr_bgc_Nit, & + tr_bgc_Sil_out=tr_bgc_Sil, tr_bgc_DMS_out=tr_bgc_DMS, & + tr_bgc_chl_out=tr_bgc_chl, tr_bgc_Am_out=tr_bgc_Am, & + tr_bgc_PON_out=tr_bgc_PON, tr_bgc_DON_out=tr_bgc_DON, & + tr_zaero_out=tr_zaero, tr_bgc_Fe_out=tr_bgc_Fe, & + tr_bgc_hum_out=tr_bgc_hum) + call icepack_query_parameters(solve_zsal_out=solve_zsal, skl_bgc_out=skl_bgc, & + z_tracers_out=z_tracers) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + ! construct path/file + if (present(filename_spec)) then + filename = trim(filename_spec) + else + iyear = nyr + year_init - 1 + imonth = month + iday = mday + + write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & + restart_dir(1:lenstr(restart_dir)), & + restart_file(1:lenstr(restart_file)),'.', & + iyear,'-',month,'-',mday,'-',sec + end if + + if (restart_format /= 'bin') filename = trim(filename) // '.nc' + + ! write pointer (path/file) + if (my_task == master_task) then + open(nu_rst_pointer,file=pointer_file) + write(nu_rst_pointer,'(a)') filename + close(nu_rst_pointer) + endif + + if (restart_format == 'pio') then + + File%fh=-1 + call ice_pio_init(mode='write',filename=trim(filename), File=File, & + clobber=.true., cdf64=lcdf64 ) + + status = pio_put_att(File,pio_global,'istep1',istep1) + status = pio_put_att(File,pio_global,'time',time) + status = pio_put_att(File,pio_global,'time_forc',time_forc) + status = pio_put_att(File,pio_global,'nyr',nyr) + status = pio_put_att(File,pio_global,'month',month) + status = pio_put_att(File,pio_global,'mday',mday) + status = pio_put_att(File,pio_global,'sec',sec) + + status = pio_def_dim(File,'ni',nx_global,dimid_ni) + status = pio_def_dim(File,'nj',ny_global,dimid_nj) + status = pio_def_dim(File,'ncat',ncat,dimid_ncat) + + !----------------------------------------------------------------- + ! 2D restart fields + !----------------------------------------------------------------- + + allocate(dims(2)) + + dims(1) = dimid_ni + dims(2) = dimid_nj + + call define_rest_field(File,'uvel',dims) + call define_rest_field(File,'vvel',dims) + +#ifdef CESMCOUPLED + call define_rest_field(File,'coszen',dims) +#endif + call define_rest_field(File,'scale_factor',dims) + call define_rest_field(File,'swvdr',dims) + call define_rest_field(File,'swvdf',dims) + call define_rest_field(File,'swidr',dims) + call define_rest_field(File,'swidf',dims) + + call define_rest_field(File,'strocnxT',dims) + call define_rest_field(File,'strocnyT',dims) + + call define_rest_field(File,'stressp_1',dims) + call define_rest_field(File,'stressp_2',dims) + call define_rest_field(File,'stressp_3',dims) + call define_rest_field(File,'stressp_4',dims) + + call define_rest_field(File,'stressm_1',dims) + call define_rest_field(File,'stressm_2',dims) + call define_rest_field(File,'stressm_3',dims) + call define_rest_field(File,'stressm_4',dims) + + call define_rest_field(File,'stress12_1',dims) + call define_rest_field(File,'stress12_2',dims) + call define_rest_field(File,'stress12_3',dims) + call define_rest_field(File,'stress12_4',dims) + + call define_rest_field(File,'iceumask',dims) + + if (oceanmixed_ice) then + call define_rest_field(File,'sst',dims) + call define_rest_field(File,'frzmlt',dims) + endif + + if (tr_FY) then + call define_rest_field(File,'frz_onset',dims) + end if + + if (kdyn == 2) then + call define_rest_field(File,'a11_1',dims) + call define_rest_field(File,'a11_2',dims) + call define_rest_field(File,'a11_3',dims) + call define_rest_field(File,'a11_4',dims) + call define_rest_field(File,'a12_1',dims) + call define_rest_field(File,'a12_2',dims) + call define_rest_field(File,'a12_3',dims) + call define_rest_field(File,'a12_4',dims) + endif + + if (tr_pond_lvl) then + call define_rest_field(File,'fsnow',dims) + endif + + if (nbtrcr > 0) then + if (tr_bgc_N) then + do k=1,n_algae + write(nchar,'(i3.3)') k + call define_rest_field(File,'algalN'//trim(nchar),dims) + enddo + endif + if (tr_bgc_C) then + do k=1,n_doc + write(nchar,'(i3.3)') k + call define_rest_field(File,'doc'//trim(nchar),dims) + enddo + do k=1,n_dic + write(nchar,'(i3.3)') k + call define_rest_field(File,'dic'//trim(nchar),dims) + enddo + endif + call define_rest_field(File,'nit' ,dims) + if (tr_bgc_Am) & + call define_rest_field(File,'amm' ,dims) + if (tr_bgc_Sil) & + call define_rest_field(File,'sil' ,dims) + if (tr_bgc_hum) & + call define_rest_field(File,'hum' ,dims) + if (tr_bgc_DMS) then + call define_rest_field(File,'dmsp' ,dims) + call define_rest_field(File,'dms' ,dims) + endif + if (tr_bgc_DON) then + do k=1,n_don + write(nchar,'(i3.3)') k + call define_rest_field(File,'don'//trim(nchar),dims) + enddo + endif + if (tr_bgc_Fe ) then + do k=1,n_fed + write(nchar,'(i3.3)') k + call define_rest_field(File,'fed'//trim(nchar),dims) + enddo + do k=1,n_fep + write(nchar,'(i3.3)') k + call define_rest_field(File,'fep'//trim(nchar),dims) + enddo + endif + if (tr_zaero) then + do k=1,n_zaero + write(nchar,'(i3.3)') k + call define_rest_field(File,'zaeros'//trim(nchar),dims) + enddo + endif + endif !nbtrcr + + if (solve_zsal) call define_rest_field(File,'sss',dims) + + deallocate(dims) + + !----------------------------------------------------------------- + ! 3D restart fields (ncat) + !----------------------------------------------------------------- + + allocate(dims(3)) + + dims(1) = dimid_ni + dims(2) = dimid_nj + dims(3) = dimid_ncat + + call define_rest_field(File,'aicen',dims) + call define_rest_field(File,'vicen',dims) + call define_rest_field(File,'vsnon',dims) + call define_rest_field(File,'Tsfcn',dims) + + if (tr_iage) then + call define_rest_field(File,'iage',dims) + end if + + if (tr_FY) then + call define_rest_field(File,'FY',dims) + end if + + if (tr_lvl) then + call define_rest_field(File,'alvl',dims) + call define_rest_field(File,'vlvl',dims) + end if + + if (tr_pond_cesm) then + call define_rest_field(File,'apnd',dims) + call define_rest_field(File,'hpnd',dims) + end if + + if (tr_pond_topo) then + call define_rest_field(File,'apnd',dims) + call define_rest_field(File,'hpnd',dims) + call define_rest_field(File,'ipnd',dims) + end if + + if (tr_pond_lvl) then + call define_rest_field(File,'apnd',dims) + call define_rest_field(File,'hpnd',dims) + call define_rest_field(File,'ipnd',dims) + call define_rest_field(File,'dhs',dims) + call define_rest_field(File,'ffrac',dims) + end if + + if (tr_brine) then + call define_rest_field(File,'fbrn',dims) + call define_rest_field(File,'first_ice',dims) + endif + + if (skl_bgc) then + do k = 1, n_algae + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_N'//trim(nchar) ,dims) + enddo + if (tr_bgc_C) then + ! do k = 1, n_algae + ! write(nchar,'(i3.3)') k + ! call define_rest_field(File,'bgc_C'//trim(nchar) ,dims) + ! enddo + do k = 1, n_doc + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_DOC'//trim(nchar) ,dims) + enddo + do k = 1, n_dic + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_DIC'//trim(nchar) ,dims) + enddo + endif + if (tr_bgc_chl) then + do k = 1, n_algae + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_chl'//trim(nchar) ,dims) + enddo + endif + call define_rest_field(File,'bgc_Nit' ,dims) + if (tr_bgc_Am) & + call define_rest_field(File,'bgc_Am' ,dims) + if (tr_bgc_Sil) & + call define_rest_field(File,'bgc_Sil' ,dims) + if (tr_bgc_hum) & + call define_rest_field(File,'bgc_hum' ,dims) + if (tr_bgc_DMS) then + call define_rest_field(File,'bgc_DMSPp',dims) + call define_rest_field(File,'bgc_DMSPd',dims) + call define_rest_field(File,'bgc_DMS' ,dims) + endif + if (tr_bgc_PON) & + call define_rest_field(File,'bgc_PON' ,dims) + if (tr_bgc_DON) then + do k = 1, n_don + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_DON'//trim(nchar) ,dims) + enddo + endif + if (tr_bgc_Fe ) then + do k = 1, n_fed + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_Fed'//trim(nchar) ,dims) + enddo + do k = 1, n_fep + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_Fep'//trim(nchar) ,dims) + enddo + endif + endif !skl_bgc + if (solve_zsal) & + call define_rest_field(File,'Rayleigh',dims) + + !----------------------------------------------------------------- + ! 4D restart fields, written as layers of 3D + !----------------------------------------------------------------- + + do k=1,nilyr + write(nchar,'(i3.3)') k + call define_rest_field(File,'sice'//trim(nchar),dims) + call define_rest_field(File,'qice'//trim(nchar),dims) + enddo + + do k=1,nslyr + write(nchar,'(i3.3)') k + call define_rest_field(File,'qsno'//trim(nchar),dims) + enddo + + if (tr_aero) then + do k=1,n_aero + write(nchar,'(i3.3)') k + call define_rest_field(File,'aerosnossl'//nchar, dims) + call define_rest_field(File,'aerosnoint'//nchar, dims) + call define_rest_field(File,'aeroicessl'//nchar, dims) + call define_rest_field(File,'aeroiceint'//nchar, dims) + enddo + endif + + if (solve_zsal) then + do k = 1, nblyr + write(nchar,'(i3.3)') k + call define_rest_field(File,'zSalinity'//trim(nchar),dims) + enddo + endif + if (z_tracers) then + if (tr_zaero) then + do n = 1, n_zaero + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'zaero'//trim(ncharb)//trim(nchar),dims) + enddo !k + enddo !n + endif !tr_zaero + if (tr_bgc_Nit) then + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_Nit'//trim(nchar),dims) + enddo + endif + if (tr_bgc_N) then + do n = 1, n_algae + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_N'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + endif + if (tr_bgc_C) then + ! do n = 1, n_algae + ! write(ncharb,'(i3.3)') n + ! do k = 1, nblyr+3 + ! write(nchar,'(i3.3)') k + ! call + ! define_rest_field(File,'bgc_C'//trim(ncharb)//trim(nchar),dims) + ! enddo + ! enddo + do n = 1, n_doc + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_DOC'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + do n = 1, n_dic + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_DIC'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + endif + if (tr_bgc_chl) then + do n = 1, n_algae + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_chl'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + endif + if (tr_bgc_Am) then + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_Am'//trim(nchar),dims) + enddo + endif + if (tr_bgc_Sil) then + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_Sil'//trim(nchar),dims) + enddo + endif + if (tr_bgc_hum) then + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_hum'//trim(nchar),dims) + enddo + endif + if (tr_bgc_DMS) then + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_DMSPp'//trim(nchar),dims) + call define_rest_field(File,'bgc_DMSPd'//trim(nchar),dims) + call define_rest_field(File,'bgc_DMS'//trim(nchar),dims) + enddo + endif + if (tr_bgc_PON) then + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_PON'//trim(nchar),dims) + enddo + endif + if (tr_bgc_DON) then + do n = 1, n_don + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_DON'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + endif + if (tr_bgc_Fe ) then + do n = 1, n_fed + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_Fed'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + do n = 1, n_fep + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_Fep'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + endif + do k = 1, nbtrcr + write(nchar,'(i3.3)') k + call define_rest_field(File,'zbgc_frac'//trim(nchar),dims) + enddo + endif !z_tracers + + deallocate(dims) + status = pio_enddef(File) + + call ice_pio_initdecomp(iodesc=iodesc2d) + call ice_pio_initdecomp(ndim3=ncat , iodesc=iodesc3d_ncat, remap=.true.) + + endif + + if (my_task == master_task) then + write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) + endif + + end subroutine init_restart_write + +!======================================================================= + +! Reads a single restart field +! author David A Bailey, NCAR + + subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3,diag, & + field_loc, field_type) + + use ice_blocks, only: nx_block, ny_block + use ice_communicate, only: my_task, master_task + use ice_constants, only: c0, field_loc_center + use ice_boundary, only: ice_HaloUpdate + use ice_domain, only: halo_info, distrb_info, nblocks + use ice_domain_size, only: max_blocks, ncat + use ice_global_reductions, only: global_minval, global_maxval, global_sum + + integer (kind=int_kind), intent(in) :: & + nu , & ! unit number (not used for netcdf) + ndim3 , & ! third dimension + nrec ! record number (0 for sequential access) + + real (kind=dbl_kind), dimension(nx_block,ny_block,ndim3,max_blocks), intent(inout) :: & + work ! input array (real, 8-byte) + + character (len=4), intent(in) :: & + atype ! format for output array + ! (real/integer, 4-byte/8-byte) + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + character (len=*), intent(in) :: vname + + integer (kind=int_kind), optional, intent(in) :: & + field_loc, & ! location of field on staggered grid + field_type ! type of field (scalar, vector, angle) + + ! local variables + + integer (kind=int_kind) :: & + j, & ! dimension counter + n, & ! number of dimensions for variable + ndims, & ! number of variable dimensions + status ! status variable from netCDF routine + + real (kind=dbl_kind) :: amin,amax,asum + + character(len=*), parameter :: subname = '(read_restart_field)' + + if (restart_format == "pio") then + if (my_task == master_task) & + write(nu_diag,*)'Parallel restart file read: ',vname + + call pio_seterrorhandling(File, PIO_BCAST_ERROR) + + status = pio_inq_varid(File,trim(vname),vardesc) + + if (status /= 0) then + call abort_ice(subname//"ERROR: CICE restart? Missing variable: "//trim(vname)) + endif + + status = pio_inq_varndims(File, vardesc, ndims) + + call pio_seterrorhandling(File, PIO_INTERNAL_ERROR) + +! if (ndim3 == ncat .and. ncat>1) then + if (ndim3 == ncat .and. ndims == 3) then + call pio_read_darray(File, vardesc, iodesc3d_ncat, work, status) + if (present(field_loc)) then + do n=1,ndim3 + call ice_HaloUpdate (work(:,:,n,:), halo_info, & + field_loc, field_type) + enddo + endif +! elseif (ndim3 == 1) then + elseif (ndim3 == 1 .and. ndims == 2) then + call pio_read_darray(File, vardesc, iodesc2d, work, status) + if (present(field_loc)) then + call ice_HaloUpdate (work(:,:,1,:), halo_info, & + field_loc, field_type) + endif + else + write(nu_diag,*) "ndim3 not supported ",ndim3 + endif + + if (diag) then + if (ndim3 > 1) then + do n=1,ndim3 + amin = global_minval(work(:,:,n,:),distrb_info) + amax = global_maxval(work(:,:,n,:),distrb_info) + asum = global_sum(work(:,:,n,:), distrb_info, field_loc_center) + if (my_task == master_task) then + write(nu_diag,*) ' min and max =', amin, amax + write(nu_diag,*) ' sum =',asum + endif + enddo + else + amin = global_minval(work(:,:,1,:),distrb_info) + amax = global_maxval(work(:,:,1,:),distrb_info) + asum = global_sum(work(:,:,1,:), distrb_info, field_loc_center) + if (my_task == master_task) then + write(nu_diag,*) ' min and max =', amin, amax + write(nu_diag,*) ' sum =',asum + write(nu_diag,*) '' + endif + endif + + endif + else + call abort_ice(subname//"ERROR: Invalid restart_format: "//trim(restart_format)) + endif + + end subroutine read_restart_field + +!======================================================================= + +! Writes a single restart field. +! author David A Bailey, NCAR + + subroutine write_restart_field(nu,nrec,work,atype,vname,ndim3,diag) + + use ice_blocks, only: nx_block, ny_block + use ice_communicate, only: my_task, master_task + use ice_constants, only: c0, field_loc_center + use ice_domain, only: distrb_info, nblocks + use ice_domain_size, only: max_blocks, ncat + use ice_global_reductions, only: global_minval, global_maxval, global_sum + + integer (kind=int_kind), intent(in) :: & + nu , & ! unit number + ndim3 , & ! third dimension + nrec ! record number (0 for sequential access) + + real (kind=dbl_kind), dimension(nx_block,ny_block,ndim3,max_blocks), intent(in) :: & + work ! input array (real, 8-byte) + + character (len=4), intent(in) :: & + atype ! format for output array + ! (real/integer, 4-byte/8-byte) + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + character (len=*), intent(in) :: vname + + ! local variables + + integer (kind=int_kind) :: & + j, & ! dimension counter + n, & ! dimension counter + ndims, & ! number of variable dimensions + status ! status variable from netCDF routine + + real (kind=dbl_kind) :: amin,amax,asum + + character(len=*), parameter :: subname = '(write_restart_field)' + + if (restart_format == "pio") then + if (my_task == master_task) & + write(nu_diag,*)'Parallel restart file write: ',vname + + status = pio_inq_varid(File,trim(vname),vardesc) + + status = pio_inq_varndims(File, vardesc, ndims) + + if (ndims==3) then + call pio_write_darray(File, vardesc, iodesc3d_ncat,work(:,:,:,1:nblocks), & + status, fillval=c0) + elseif (ndims == 2) then + call pio_write_darray(File, vardesc, iodesc2d, work(:,:,1,1:nblocks), & + status, fillval=c0) + else + write(nu_diag,*) "ndims not supported",ndims,ndim3 + endif + + if (diag) then + if (ndim3 > 1) then + do n=1,ndim3 + amin = global_minval(work(:,:,n,:),distrb_info) + amax = global_maxval(work(:,:,n,:),distrb_info) + asum = global_sum(work(:,:,n,:), distrb_info, field_loc_center) + if (my_task == master_task) then + write(nu_diag,*) ' min and max =', amin, amax + write(nu_diag,*) ' sum =',asum + endif + enddo + else + amin = global_minval(work(:,:,1,:),distrb_info) + amax = global_maxval(work(:,:,1,:),distrb_info) + asum = global_sum(work(:,:,1,:), distrb_info, field_loc_center) + if (my_task == master_task) then + write(nu_diag,*) ' min and max =', amin, amax + write(nu_diag,*) ' sum =',asum + endif + endif + endif + else + call abort_ice(subname//"ERROR: Invalid restart_format: "//trim(restart_format)) + endif + + end subroutine write_restart_field + +!======================================================================= + +! Finalize the restart file. +! author David A Bailey, NCAR + + subroutine final_restart() + + use ice_calendar, only: istep1, time, time_forc + use ice_communicate, only: my_task, master_task + + character(len=*), parameter :: subname = '(final_restart)' + + if (restart_format == 'pio') then + call PIO_freeDecomp(File,iodesc2d) + call PIO_freeDecomp(File,iodesc3d_ncat) + call pio_closefile(File) + endif + + if (my_task == master_task) & + write(nu_diag,*) 'Restart read/written ',istep1,time,time_forc + + end subroutine final_restart + +!======================================================================= + +! Defines a restart field +! author David A Bailey, NCAR + + subroutine define_rest_field(File, vname, dims) + + type(file_desc_t) , intent(in) :: File + character (len=*) , intent(in) :: vname + integer (kind=int_kind), intent(in) :: dims(:) + + integer (kind=int_kind) :: & + status ! status variable from netCDF routine + + character(len=*), parameter :: subname = '(define_rest_field)' + + status = pio_def_var(File,trim(vname),pio_double,dims,vardesc) + + end subroutine define_rest_field + +!======================================================================= + + end module ice_restart + +!======================================================================= diff --git a/cicecore/drivers/hadgem3/CICE.F90 b/cicecore/drivers/direct/hadgem3/CICE.F90 similarity index 100% rename from cicecore/drivers/hadgem3/CICE.F90 rename to cicecore/drivers/direct/hadgem3/CICE.F90 diff --git a/cicecore/drivers/hadgem3/CICE_FinalMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_FinalMod.F90 similarity index 100% rename from cicecore/drivers/hadgem3/CICE_FinalMod.F90 rename to cicecore/drivers/direct/hadgem3/CICE_FinalMod.F90 diff --git a/cicecore/drivers/hadgem3/CICE_InitMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 similarity index 100% rename from cicecore/drivers/hadgem3/CICE_InitMod.F90 rename to cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 diff --git a/cicecore/drivers/hadgem3/CICE_RunMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 similarity index 100% rename from cicecore/drivers/hadgem3/CICE_RunMod.F90 rename to cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 diff --git a/cicecore/drivers/cesm/CICE_FinalMod.F90 b/cicecore/drivers/mct/cesm1/CICE_FinalMod.F90 similarity index 100% rename from cicecore/drivers/cesm/CICE_FinalMod.F90 rename to cicecore/drivers/mct/cesm1/CICE_FinalMod.F90 diff --git a/cicecore/drivers/cesm/CICE_InitMod.F90 b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 similarity index 100% rename from cicecore/drivers/cesm/CICE_InitMod.F90 rename to cicecore/drivers/mct/cesm1/CICE_InitMod.F90 diff --git a/cicecore/drivers/cesm/CICE_RunMod.F90 b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 similarity index 100% rename from cicecore/drivers/cesm/CICE_RunMod.F90 rename to cicecore/drivers/mct/cesm1/CICE_RunMod.F90 diff --git a/cicecore/drivers/cesm/CICE_RunMod.F90_debug b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90_debug similarity index 100% rename from cicecore/drivers/cesm/CICE_RunMod.F90_debug rename to cicecore/drivers/mct/cesm1/CICE_RunMod.F90_debug diff --git a/cicecore/drivers/cesm/CICE_copyright.txt b/cicecore/drivers/mct/cesm1/CICE_copyright.txt similarity index 100% rename from cicecore/drivers/cesm/CICE_copyright.txt rename to cicecore/drivers/mct/cesm1/CICE_copyright.txt diff --git a/cicecore/drivers/cesm/ice_comp_esmf.F90 b/cicecore/drivers/mct/cesm1/ice_comp_esmf.F90 similarity index 100% rename from cicecore/drivers/cesm/ice_comp_esmf.F90 rename to cicecore/drivers/mct/cesm1/ice_comp_esmf.F90 diff --git a/cicecore/drivers/cesm/ice_comp_mct.F90 b/cicecore/drivers/mct/cesm1/ice_comp_mct.F90 similarity index 100% rename from cicecore/drivers/cesm/ice_comp_mct.F90 rename to cicecore/drivers/mct/cesm1/ice_comp_mct.F90 diff --git a/cicecore/drivers/cesm/ice_cpl_indices.F90 b/cicecore/drivers/mct/cesm1/ice_cpl_indices.F90 similarity index 100% rename from cicecore/drivers/cesm/ice_cpl_indices.F90 rename to cicecore/drivers/mct/cesm1/ice_cpl_indices.F90 diff --git a/cicecore/drivers/cesm/ice_import_export.F90 b/cicecore/drivers/mct/cesm1/ice_import_export.F90 similarity index 100% rename from cicecore/drivers/cesm/ice_import_export.F90 rename to cicecore/drivers/mct/cesm1/ice_import_export.F90 diff --git a/cicecore/drivers/cesm/ice_prescribed_mod.F90 b/cicecore/drivers/mct/cesm1/ice_prescribed_mod.F90 similarity index 100% rename from cicecore/drivers/cesm/ice_prescribed_mod.F90 rename to cicecore/drivers/mct/cesm1/ice_prescribed_mod.F90 diff --git a/cicecore/drivers/cesm/ice_scam.F90 b/cicecore/drivers/mct/cesm1/ice_scam.F90 similarity index 100% rename from cicecore/drivers/cesm/ice_scam.F90 rename to cicecore/drivers/mct/cesm1/ice_scam.F90 diff --git a/cicecore/drivers/nuopc/cmeps/CICE_FinalMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_FinalMod.F90 new file mode 100644 index 000000000..c2331e4e5 --- /dev/null +++ b/cicecore/drivers/nuopc/cmeps/CICE_FinalMod.F90 @@ -0,0 +1,93 @@ +!======================================================================= +! +! This module contains routines for the final exit of the CICE model, +! including final output and clean exit from any message passing +! environments and frameworks. +! +! authors: Philip W. Jones, LANL +! 2006: Converted to free source form (F90) by Elizabeth Hunke +! 2008: E. Hunke moved ESMF code to its own driver + + module CICE_FinalMod + + use ice_kinds_mod + use ice_communicate, only: my_task, master_task + use ice_exit, only: end_run, abort_ice + use ice_fileunits, only: nu_diag, release_all_fileunits + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + + implicit none + private + public :: CICE_Finalize + +!======================================================================= + + contains + +!======================================================================= +! +! This routine shuts down CICE by exiting all relevent environments. + + subroutine CICE_Finalize + + use ice_restart_shared, only: runid + use ice_timers, only: ice_timer_stop, ice_timer_print_all, timer_total + + character(len=*), parameter :: subname = '(CICE_Finalize)' + + !------------------------------------------------------------------- + ! stop timers and print timer info + !------------------------------------------------------------------- + + call ice_timer_stop(timer_total) ! stop timing entire run + call ice_timer_print_all(stats=.false.) ! print timing information + +!echmod if (nu_diag /= 6) close (nu_diag) ! diagnostic output + call release_all_fileunits + + !------------------------------------------------------------------- + ! write 'finished' file if needed + !------------------------------------------------------------------- + + if (runid == 'bering') call writeout_finished_file() + + !------------------------------------------------------------------- + ! quit MPI + !------------------------------------------------------------------- + +#ifndef coupled + call end_run ! quit MPI +#endif + + end subroutine CICE_Finalize + +!======================================================================= +! +! Write a file indicating that this run finished cleanly. This is +! needed only for runs on machine 'bering' (set using runid = 'bering'). +! +! author: Adrian Turner, LANL + + subroutine writeout_finished_file() + + use ice_restart_shared, only: restart_dir + + character(len=char_len_long) :: filename + character(len=*), parameter :: subname = '(writeout_finished_file)' + + if (my_task == master_task) then + + filename = trim(restart_dir)//"finished" + open(11,file=filename) + write(11,*) "finished" + close(11) + + endif + + end subroutine writeout_finished_file + +!======================================================================= + + end module CICE_FinalMod + +!======================================================================= diff --git a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 new file mode 100644 index 000000000..56d30cb72 --- /dev/null +++ b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 @@ -0,0 +1,432 @@ +!======================================================================= +! +! This module contains the CICE initialization routine that sets model +! parameters and initializes the grid and CICE state variables. +! +! authors Elizabeth C. Hunke, LANL +! William H. Lipscomb, LANL +! Philip W. Jones, LANL +! +! 2006: Converted to free form source (F90) by Elizabeth Hunke +! 2008: E. Hunke moved ESMF code to its own driver + + module CICE_InitMod + + use ice_kinds_mod + use ice_exit, only: abort_ice + use ice_fileunits, only: init_fileunits, nu_diag + use icepack_intfc, only: icepack_aggregate + use icepack_intfc, only: icepack_init_itd, icepack_init_itd_hist + use icepack_intfc, only: icepack_configure + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_query_parameters, icepack_query_tracer_flags, & + icepack_query_tracer_indices, icepack_query_tracer_numbers + + implicit none + private + public :: CICE_Initialize, cice_init + +!======================================================================= + + contains + +!======================================================================= + +! Initialize the basic state, grid and all necessary parameters for +! running the CICE model. Return the initial state in routine +! export state. +! Note: This initialization driver is designed for standalone and +! CESM-coupled applications. For other +! applications (e.g., standalone CAM), this driver would be +! replaced by a different driver that calls subroutine cice_init, +! where most of the work is done. + + subroutine CICE_Initialize + + character(len=*), parameter :: subname='(CICE_Initialize)' + + !-------------------------------------------------------------------- + ! model initialization + !-------------------------------------------------------------------- + + call cice_init + + end subroutine CICE_Initialize + +!======================================================================= +! +! Initialize CICE model. + + subroutine cice_init(mpicom_ice) + + use ice_arrays_column, only: hin_max, c_hi_range, alloc_arrays_column + use ice_state, only: alloc_state + use ice_flux_bgc, only: alloc_flux_bgc + use ice_calendar, only: dt, dt_dyn, time, istep, istep1, write_ic, & + init_calendar, calendar + use ice_communicate, only: init_communicate, my_task, master_task + use ice_diagnostics, only: init_diags + use ice_domain, only: init_domain_blocks + use ice_domain_size, only: ncat + use ice_dyn_eap, only: init_eap, alloc_dyn_eap + use ice_dyn_shared, only: kdyn, init_evp, alloc_dyn_shared + use ice_flux, only: init_coupler_flux, init_history_therm, & + init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux + use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & + get_forcing_atmo, get_forcing_ocn, alloc_forcing + use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & + faero_default, faero_optics, alloc_forcing_bgc + use ice_grid, only: init_grid1, init_grid2, alloc_grid + use ice_history, only: init_hist, accum_hist + use ice_restart_shared, only: restart, runid, runtype + use ice_init, only: input_data, init_state + use ice_init_column, only: init_thermo_vertical, init_shortwave, init_zbgc, input_zbgc, count_tracers + use ice_kinds_mod + use ice_restoring, only: ice_HaloRestore_init + use ice_timers, only: timer_total, init_ice_timers, ice_timer_start + use ice_transport_driver, only: init_transport +#ifdef popcice + use drv_forcing, only: sst_sss +#endif + + integer (kind=int_kind), optional, intent(in) :: & + mpicom_ice ! communicator for sequential ccsm + + logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers + character(len=*), parameter :: subname = '(cice_init)' + + call init_communicate(mpicom_ice) ! initial setup for message passing + call init_fileunits ! unit numbers + + call icepack_configure() ! initialize icepack + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + call input_data ! namelist variables + call input_zbgc ! vertical biogeochemistry namelist + call count_tracers ! count tracers + + call init_domain_blocks ! set up block decomposition + call init_grid1 ! domain distribution + call alloc_grid ! allocate grid arrays + call alloc_arrays_column ! allocate column arrays + call alloc_state ! allocate state arrays + call alloc_dyn_shared ! allocate dyn shared arrays + call alloc_flux_bgc ! allocate flux_bgc arrays + call alloc_flux ! allocate flux arrays + call init_ice_timers ! initialize all timers + call ice_timer_start(timer_total) ! start timing entire run + call init_grid2 ! grid variables + call init_zbgc ! vertical biogeochemistry initialization + + call init_calendar ! initialize some calendar stuff + call init_hist (dt) ! initialize output history file + + if (kdyn == 2) then + call alloc_dyn_eap ! allocate dyn_eap arrays + call init_eap (dt_dyn) ! define eap dynamics parameters, variables + else ! for both kdyn = 0 or 1 + call init_evp (dt_dyn) ! define evp dynamics parameters, variables + endif + + call init_coupler_flux ! initialize fluxes exchanged with coupler +#ifdef popcice + call sst_sss ! POP data for CICE initialization +#endif + call init_thermo_vertical ! initialize vertical thermodynamics + + call icepack_init_itd(ncat, hin_max) ! ice thickness distribution + if (my_task == master_task) then + call icepack_init_itd_hist(ncat, hin_max, c_hi_range) ! output + endif + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call calendar(time) ! determine the initial date + + call init_forcing_ocn(dt) ! initialize sss and sst from data + call init_state ! initialize the ice state + call init_transport ! initialize horizontal transport + call ice_HaloRestore_init ! restored boundary conditions + + call icepack_query_parameters(skl_bgc_out=skl_bgc, z_tracers_out=z_tracers) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + if (skl_bgc .or. z_tracers) call alloc_forcing_bgc ! allocate biogeochemistry arrays + + call init_restart ! initialize restart variables + call init_diags ! initialize diagnostic output points + call init_history_therm ! initialize thermo history variables + call init_history_dyn ! initialize dynamic history variables + + call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical + !property tables + + ! Initialize shortwave components using swdn from previous timestep + ! if restarting. These components will be scaled to current forcing + ! in prep_radiation. + if (trim(runtype) == 'continue' .or. restart) & + call init_shortwave ! initialize radiative transfer + + !-------------------------------------------------------------------- + ! coupler communication or forcing data initialization + !-------------------------------------------------------------------- + + call init_forcing_atmo ! initialize atmospheric forcing (standalone) + +#ifndef coupled +#ifndef CESMCOUPLED + call get_forcing_atmo ! atmospheric forcing from data + call get_forcing_ocn(dt) ! ocean forcing from data + + ! aerosols + ! if (tr_aero) call faero_data ! data file + ! if (tr_zaero) call fzaero_data ! data file (gx1) + if (tr_aero .or. tr_zaero) call faero_default ! default values + if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry +#endif +#endif + if (z_tracers) call get_atm_bgc ! biogeochemistry + + if (runtype == 'initial' .and. .not. restart) & + call init_shortwave ! initialize radiative transfer using current swdn + + call init_flux_atm ! initialize atmosphere fluxes sent to coupler + call init_flux_ocn ! initialize ocean fluxes sent to coupler + +! if (write_ic) call accum_hist(dt) ! write initial conditions + + end subroutine cice_init + +!======================================================================= + + subroutine init_restart + + use ice_arrays_column, only: dhsn + use ice_blocks, only: nx_block, ny_block + use ice_calendar, only: time, calendar + use ice_constants, only: c0 + use ice_domain, only: nblocks + use ice_domain_size, only: ncat, n_aero + use ice_dyn_eap, only: read_restart_eap + use ice_dyn_shared, only: kdyn + use ice_grid, only: tmask + use ice_init, only: ice_ic + use ice_init_column, only: init_age, init_FY, init_lvl, & + init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & + init_aerosol, init_hbrine, init_bgc + use ice_restart_column, only: restart_age, read_restart_age, & + restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & + restart_pond_cesm, read_restart_pond_cesm, & + restart_pond_lvl, read_restart_pond_lvl, & + restart_pond_topo, read_restart_pond_topo, & + restart_aero, read_restart_aero, & + restart_hbrine, read_restart_hbrine, & + restart_zsal, restart_bgc + use ice_restart_driver, only: restartfile + use ice_restart_shared, only: runtype, restart + use ice_state ! almost everything + + integer(kind=int_kind) :: & + i, j , & ! horizontal indices + iblk ! block index + logical(kind=log_kind) :: & + tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & + tr_pond_topo, tr_aero, tr_brine, & + skl_bgc, z_tracers, solve_zsal + integer(kind=int_kind) :: & + ntrcr + integer(kind=int_kind) :: & + nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & + nt_iage, nt_FY, nt_aero + + character(len=*), parameter :: subname = '(init_restart)' + + call icepack_query_tracer_numbers(ntrcr_out=ntrcr) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call icepack_query_parameters(skl_bgc_out=skl_bgc, & + z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) + call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & + tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & + tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine) + call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & + nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & + nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + if (trim(runtype) == 'continue') then + ! start from core restart file + call restartfile() ! given by pointer in ice_in + call calendar(time) ! update time parameters + if (kdyn == 2) call read_restart_eap ! EAP + else if (restart) then ! ice_ic = core restart file + call restartfile (ice_ic) ! or 'default' or 'none' + !!! uncomment to create netcdf + ! call restartfile_v4 (ice_ic) ! CICE v4.1 binary restart file + !!! uncomment if EAP restart data exists + ! if (kdyn == 2) call read_restart_eap + endif + + ! tracers + ! ice age tracer + if (tr_iage) then + if (trim(runtype) == 'continue') & + restart_age = .true. + if (restart_age) then + call read_restart_age + else + do iblk = 1, nblocks + call init_age(trcrn(:,:,nt_iage,:,iblk)) + enddo ! iblk + endif + endif + ! first-year area tracer + if (tr_FY) then + if (trim(runtype) == 'continue') restart_FY = .true. + if (restart_FY) then + call read_restart_FY + else + do iblk = 1, nblocks + call init_FY(trcrn(:,:,nt_FY,:,iblk)) + enddo ! iblk + endif + endif + ! level ice tracer + if (tr_lvl) then + if (trim(runtype) == 'continue') restart_lvl = .true. + if (restart_lvl) then + call read_restart_lvl + else + do iblk = 1, nblocks + call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & + trcrn(:,:,nt_vlvl,:,iblk)) + enddo ! iblk + endif + endif + ! CESM melt ponds + if (tr_pond_cesm) then + if (trim(runtype) == 'continue') & + restart_pond_cesm = .true. + if (restart_pond_cesm) then + call read_restart_pond_cesm + else + do iblk = 1, nblocks + call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk)) + enddo ! iblk + endif + endif + ! level-ice melt ponds + if (tr_pond_lvl) then + if (trim(runtype) == 'continue') & + restart_pond_lvl = .true. + if (restart_pond_lvl) then + call read_restart_pond_lvl + else + do iblk = 1, nblocks + call init_meltponds_lvl(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk), & + trcrn(:,:,nt_ipnd,:,iblk), & + dhsn(:,:,:,iblk)) + enddo ! iblk + endif + endif + ! topographic melt ponds + if (tr_pond_topo) then + if (trim(runtype) == 'continue') & + restart_pond_topo = .true. + if (restart_pond_topo) then + call read_restart_pond_topo + else + do iblk = 1, nblocks + call init_meltponds_topo(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk), & + trcrn(:,:,nt_ipnd,:,iblk)) + enddo ! iblk + endif ! .not. restart_pond + endif + if (tr_aero) then ! ice aerosol + if (trim(runtype) == 'continue') restart_aero = .true. + if (restart_aero) then + call read_restart_aero + else + do iblk = 1, nblocks + call init_aerosol(trcrn(:,:,nt_aero:nt_aero+4*n_aero-1,:,iblk)) + enddo ! iblk + endif ! .not. restart_aero + endif + + if (trim(runtype) == 'continue') then + if (tr_brine) & + restart_hbrine = .true. + if (solve_zsal) & + restart_zsal = .true. + if (skl_bgc .or. z_tracers) & + restart_bgc = .true. + endif + + if (tr_brine .or. skl_bgc) then ! brine height tracer + call init_hbrine + if (tr_brine .and. restart_hbrine) call read_restart_hbrine + endif + + if (solve_zsal .or. skl_bgc .or. z_tracers) call init_bgc ! biogeochemistry + + !----------------------------------------------------------------- + ! aggregate tracers + !----------------------------------------------------------------- + + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + if (tmask(i,j,iblk)) then + call icepack_aggregate (ncat, & + aicen(i,j,:,iblk), & + trcrn(i,j,:,:,iblk),& + vicen(i,j,:,iblk), & + vsnon(i,j,:,iblk), & + aice (i,j, iblk), & + trcr (i,j,:,iblk), & + vice (i,j, iblk), & + vsno (i,j, iblk), & + aice0(i,j, iblk), & + ntrcr, & + trcr_depend, & + trcr_base, & + n_trcr_strata, & + nt_strata) + else + ! tcraig, reset all tracer values on land to zero + trcrn(i,j,:,:,iblk) = c0 + endif + enddo + enddo + enddo + !$OMP END PARALLEL DO + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + end subroutine init_restart + +!======================================================================= + + end module CICE_InitMod + +!======================================================================= diff --git a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 new file mode 100644 index 000000000..9ed65826b --- /dev/null +++ b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 @@ -0,0 +1,659 @@ +!======================================================================= +! +! Main driver for time stepping of CICE. +! +! authors Elizabeth C. Hunke, LANL +! Philip W. Jones, LANL +! William H. Lipscomb, LANL +! +! 2006 ECH: moved exit timeLoop to prevent execution of unnecessary timestep +! 2006 ECH: Streamlined for efficiency +! 2006 ECH: Converted to free source form (F90) +! 2007 BPB: Modified Delta-Eddington shortwave interface +! 2008 ECH: moved ESMF code to its own driver + + module CICE_RunMod + + use ice_kinds_mod + use perf_mod, only : t_startf, t_stopf, t_barrierf + use ice_fileunits, only: nu_diag + use ice_arrays_column, only: oceanmixed_ice + use ice_constants, only: c0, c1 + use ice_constants, only: field_loc_center, field_type_scalar + use ice_exit, only: abort_ice + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_max_aero + use icepack_intfc, only: icepack_query_parameters + use icepack_intfc, only: icepack_query_tracer_flags, icepack_query_tracer_numbers + + implicit none + private + public :: CICE_Run, ice_step + +!======================================================================= + + contains + +!======================================================================= +! +! This is the main driver routine for advancing CICE forward in time. +! +! author Elizabeth C. Hunke, LANL +! Philip W. Jones, LANL +! William H. Lipscomb, LANL + + subroutine CICE_Run + + use ice_calendar, only: istep, istep1, time, dt, stop_now, calendar + use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, atm_data_type + use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & + faero_default + use ice_flux, only: init_flux_atm, init_flux_ocn + use ice_timers, only: ice_timer_start, ice_timer_stop, & + timer_couple, timer_step + + logical (kind=log_kind) :: & + tr_aero, tr_zaero, skl_bgc, z_tracers + character(len=*), parameter :: subname = '(CICE_Run)' + + !-------------------------------------------------------------------- + ! initialize error code and step timer + !-------------------------------------------------------------------- + + call ice_timer_start(timer_step) ! start timing entire run + + call icepack_query_parameters(skl_bgc_out=skl_bgc, z_tracers_out=z_tracers) + call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !-------------------------------------------------------------------- + ! timestep loop + !-------------------------------------------------------------------- + +! timeLoop: do + + istep = istep + 1 ! update time step counters + istep1 = istep1 + 1 + time = time + dt ! determine the time and date + + call ice_timer_start(timer_couple) ! atm/ocn coupling + +#ifndef coupled +#ifndef CESMCOUPLED + call get_forcing_atmo ! atmospheric forcing from data + call get_forcing_ocn(dt) ! ocean forcing from data + + ! aerosols + ! if (tr_aero) call faero_data ! data file + ! if (tr_zaero) call fzaero_data ! data file (gx1) + if (tr_aero .or. tr_zaero) call faero_default ! default values + + if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry +#endif +#endif + if (z_tracers) call get_atm_bgc ! biogeochemistry + + call init_flux_atm ! Initialize atmosphere fluxes sent to coupler + call init_flux_ocn ! initialize ocean fluxes sent to coupler + + call calendar(time) ! at the end of the timestep + + call ice_timer_stop(timer_couple) ! atm/ocn coupling + + call ice_step + +! if (stop_now >= 1) exit timeLoop +! enddo timeLoop + + !-------------------------------------------------------------------- + ! end of timestep loop + !-------------------------------------------------------------------- + + call ice_timer_stop(timer_step) ! end timestepping loop timer + + end subroutine CICE_Run + +!======================================================================= +! +! Calls drivers for physics components, some initialization, and output +! +! author Elizabeth C. Hunke, LANL +! William H. Lipscomb, LANL + + subroutine ice_step + + use ice_boundary, only: ice_HaloUpdate + use ice_calendar, only: dt, dt_dyn, ndtd, diagfreq, write_restart, istep, idate, sec + use ice_diagnostics, only: init_mass_diags, runtime_diags + use ice_diagnostics_bgc, only: hbrine_diags, zsal_diags, bgc_diags + use ice_domain, only: halo_info, nblocks + use ice_dyn_eap, only: write_restart_eap + use ice_dyn_shared, only: kdyn, kridge + use ice_flux, only: scale_factor, init_history_therm, & + daidtt, daidtd, dvidtt, dvidtd, dagedtt, dagedtd + use ice_history, only: accum_hist + use ice_history_bgc, only: init_history_bgc + use ice_restart, only: final_restart + use ice_restart_column, only: write_restart_age, write_restart_FY, & + write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & + write_restart_pond_topo, write_restart_aero, & + write_restart_bgc, write_restart_hbrine + use ice_restart_driver, only: dumpfile + use ice_restoring, only: restore_ice, ice_HaloRestore + use ice_step_mod, only: prep_radiation, step_therm1, step_therm2, & + update_state, step_dyn_horiz, step_dyn_ridge, step_radiation, & + biogeochemistry, save_init + use ice_timers, only: ice_timer_start, ice_timer_stop, & + timer_diags, timer_column, timer_thermo, timer_bound, & + timer_hist, timer_readwrite + use ice_communicate, only: MPI_COMM_ICE + use ice_prescribed_mod + + integer (kind=int_kind) :: & + iblk , & ! block index + k , & ! dynamics supercycling index + ktherm ! thermodynamics is off when ktherm = -1 + + real (kind=dbl_kind) :: & + offset ! d(age)/dt time offset + + logical (kind=log_kind) :: & + tr_iage, tr_FY, tr_lvl, & + tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_aero, & + calc_Tsfc, skl_bgc, solve_zsal, z_tracers + + character(len=*), parameter :: subname = '(ice_step)' + + call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc, skl_bgc_out=skl_bgc, & + solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm) + call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & + tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & + tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !----------------------------------------------------------------- + ! restoring on grid boundaries + !----------------------------------------------------------------- + + if (restore_ice) call ice_HaloRestore + + !----------------------------------------------------------------- + ! initialize diagnostics and save initial state values + !----------------------------------------------------------------- + + call ice_timer_start(timer_diags) ! diagnostics/history + call init_mass_diags ! diagnostics per timestep + call init_history_therm + call init_history_bgc + call ice_timer_stop(timer_diags) ! diagnostics/history + + if (prescribed_ice) then ! read prescribed ice + call t_barrierf('cice_run_presc_BARRIER',MPI_COMM_ICE) + call t_startf ('cice_run_presc') + call ice_prescribed_run(idate, sec) + call t_stopf ('cice_run_presc') + endif + + call save_init + + call ice_timer_start(timer_column) ! column physics + call ice_timer_start(timer_thermo) ! thermodynamics + + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + + if (ktherm >= 0) then + + !----------------------------------------------------------------- + ! scale radiation fields + !----------------------------------------------------------------- + + if (calc_Tsfc) call prep_radiation (iblk) + + !----------------------------------------------------------------- + ! thermodynamics and biogeochemistry + !----------------------------------------------------------------- + + call step_therm1 (dt, iblk) ! vertical thermodynamics + call biogeochemistry (dt, iblk) ! biogeochemistry + if (.not.prescribed_ice) & + call step_therm2 (dt, iblk) ! ice thickness distribution thermo + + endif + + enddo ! iblk + !$OMP END PARALLEL DO + + ! clean up, update tendency diagnostics + offset = dt + call update_state (dt, daidtt, dvidtt, dagedtt, offset) + + call ice_timer_stop(timer_thermo) ! thermodynamics + call ice_timer_stop(timer_column) ! column physics + + !----------------------------------------------------------------- + ! dynamics, transport, ridging + !----------------------------------------------------------------- + + if (.not.prescribed_ice) then + do k = 1, ndtd + + ! momentum, stress, transport + call step_dyn_horiz (dt_dyn) + + ! ridging + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + if (kridge > 0) call step_dyn_ridge (dt_dyn, ndtd, iblk) + enddo + !$OMP END PARALLEL DO + + ! clean up, update tendency diagnostics + offset = c0 + call update_state (dt_dyn, daidtd, dvidtd, dagedtd, offset) + + enddo + endif + + !----------------------------------------------------------------- + ! albedo, shortwave radiation + !----------------------------------------------------------------- + + call ice_timer_start(timer_column) ! column physics + call ice_timer_start(timer_thermo) ! thermodynamics + + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + + if (ktherm >= 0) call step_radiation (dt, iblk) + + !----------------------------------------------------------------- + ! get ready for coupling and the next time step + !----------------------------------------------------------------- + + call coupling_prep (iblk) + + enddo ! iblk + !$OMP END PARALLEL DO + + call ice_timer_start(timer_bound) + call ice_HaloUpdate (scale_factor, halo_info, & + field_loc_center, field_type_scalar) + call ice_timer_stop(timer_bound) + + call ice_timer_stop(timer_thermo) ! thermodynamics + call ice_timer_stop(timer_column) ! column physics + + !----------------------------------------------------------------- + ! write data + !----------------------------------------------------------------- + + call ice_timer_start(timer_diags) ! diagnostics + if (mod(istep,diagfreq) == 0) then + call runtime_diags(dt) ! log file + if (solve_zsal) call zsal_diags + if (skl_bgc .or. z_tracers) call bgc_diags + if (tr_brine) call hbrine_diags + endif + call ice_timer_stop(timer_diags) ! diagnostics + + call ice_timer_start(timer_hist) ! history + call accum_hist (dt) ! history file + call ice_timer_stop(timer_hist) ! history + + call ice_timer_start(timer_readwrite) ! reading/writing + if (write_restart == 1) then + call dumpfile ! core variables for restarting + if (tr_iage) call write_restart_age + if (tr_FY) call write_restart_FY + if (tr_lvl) call write_restart_lvl + if (tr_pond_cesm) call write_restart_pond_cesm + if (tr_pond_lvl) call write_restart_pond_lvl + if (tr_pond_topo) call write_restart_pond_topo + if (tr_aero) call write_restart_aero + if (solve_zsal .or. skl_bgc .or. z_tracers) & + call write_restart_bgc + if (tr_brine) call write_restart_hbrine + if (kdyn == 2) call write_restart_eap + call final_restart + endif + + call ice_timer_stop(timer_readwrite) ! reading/writing + + end subroutine ice_step + +!======================================================================= +! +! Prepare for coupling +! +! authors: Elizabeth C. Hunke, LANL + + subroutine coupling_prep (iblk) + + use ice_arrays_column, only: alvdfn, alidfn, alvdrn, alidrn, & + albicen, albsnon, albpndn, apeffn, fzsal_g, fzsal, snowfracn + use ice_blocks, only: nx_block, ny_block, get_block, block + use ice_domain, only: blocks_ice + use ice_calendar, only: dt, nstreams + use ice_domain_size, only: ncat + use ice_flux, only: alvdf, alidf, alvdr, alidr, albice, albsno, & + albpnd, albcnt, apeff_ai, fpond, fresh, l_mpond_fresh, & + alvdf_ai, alidf_ai, alvdr_ai, alidr_ai, fhocn_ai, & + fresh_ai, fsalt_ai, fsalt, & + fswthru_ai, fhocn, fswthru, scale_factor, snowfrac, & + swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & + fsens, flat, fswabs, flwout, evap, Tref, Qref, & + scale_fluxes, frzmlt_init, frzmlt, Uref, wind, fsurfn_f, flatn_f + use ice_flux_bgc, only: faero_ocn, fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai, & + fnit, fsil, famm, fdmsp, fdms, fhum, fdust, falgalN, & + fdoc, fdic, fdon, ffep, ffed, bgcflux_ice_to_ocn + use ice_grid, only: tmask + use ice_state, only: aicen, aice, aice_init + use ice_step_mod, only: ocean_mixed_layer + use ice_timers, only: timer_couple, ice_timer_start, ice_timer_stop + + integer (kind=int_kind), intent(in) :: & + iblk ! block index + + ! local variables + + integer (kind=int_kind) :: & + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain + n , & ! thickness category index + i,j , & ! horizontal indices + k , & ! tracer index + nbtrcr ! + + type (block) :: & + this_block ! block information for current block + + logical (kind=log_kind) :: & + skl_bgc , & ! + calc_Tsfc ! + + real (kind=dbl_kind) :: & + cszn , & ! counter for history averaging + puny , & ! + rhofresh , & ! + netsw ! flag for shortwave radiation presence + + character(len=*), parameter :: subname = '(coupling_prep)' + + !----------------------------------------------------------------- + + call icepack_query_parameters(puny_out=puny, rhofresh_out=rhofresh) + call icepack_query_parameters(skl_bgc_out=skl_bgc) + call icepack_query_tracer_numbers(nbtrcr_out=nbtrcr) + call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !----------------------------------------------------------------- + ! Save current value of frzmlt for diagnostics. + ! Update mixed layer with heat and radiation from ice. + !----------------------------------------------------------------- + + do j = 1, ny_block + do i = 1, nx_block + frzmlt_init (i,j,iblk) = frzmlt(i,j,iblk) + enddo + enddo + + call ice_timer_start(timer_couple,iblk) ! atm/ocn coupling + + if (oceanmixed_ice) & + call ocean_mixed_layer (dt,iblk) ! ocean surface fluxes and sst + + !----------------------------------------------------------------- + ! Aggregate albedos + !----------------------------------------------------------------- + + do j = 1, ny_block + do i = 1, nx_block + alvdf(i,j,iblk) = c0 + alidf(i,j,iblk) = c0 + alvdr(i,j,iblk) = c0 + alidr(i,j,iblk) = c0 + + albice(i,j,iblk) = c0 + albsno(i,j,iblk) = c0 + albpnd(i,j,iblk) = c0 + apeff_ai(i,j,iblk) = c0 + snowfrac(i,j,iblk) = c0 + + ! for history averaging + cszn = c0 + netsw = swvdr(i,j,iblk)+swidr(i,j,iblk)+swvdf(i,j,iblk)+swidf(i,j,iblk) + if (netsw > puny) cszn = c1 + do n = 1, nstreams + albcnt(i,j,iblk,n) = albcnt(i,j,iblk,n) + cszn + enddo + enddo + enddo + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do n = 1, ncat + do j = jlo, jhi + do i = ilo, ihi + if (aicen(i,j,n,iblk) > puny) then + + alvdf(i,j,iblk) = alvdf(i,j,iblk) & + + alvdfn(i,j,n,iblk)*aicen(i,j,n,iblk) + alidf(i,j,iblk) = alidf(i,j,iblk) & + + alidfn(i,j,n,iblk)*aicen(i,j,n,iblk) + alvdr(i,j,iblk) = alvdr(i,j,iblk) & + + alvdrn(i,j,n,iblk)*aicen(i,j,n,iblk) + alidr(i,j,iblk) = alidr(i,j,iblk) & + + alidrn(i,j,n,iblk)*aicen(i,j,n,iblk) + + netsw = swvdr(i,j,iblk) + swidr(i,j,iblk) & + + swvdf(i,j,iblk) + swidf(i,j,iblk) + if (netsw > puny) then ! sun above horizon + albice(i,j,iblk) = albice(i,j,iblk) & + + albicen(i,j,n,iblk)*aicen(i,j,n,iblk) + albsno(i,j,iblk) = albsno(i,j,iblk) & + + albsnon(i,j,n,iblk)*aicen(i,j,n,iblk) + albpnd(i,j,iblk) = albpnd(i,j,iblk) & + + albpndn(i,j,n,iblk)*aicen(i,j,n,iblk) + endif + + apeff_ai(i,j,iblk) = apeff_ai(i,j,iblk) & ! for history + + apeffn(i,j,n,iblk)*aicen(i,j,n,iblk) + snowfrac(i,j,iblk) = snowfrac(i,j,iblk) & ! for history + + snowfracn(i,j,n,iblk)*aicen(i,j,n,iblk) + + endif ! aicen > puny + enddo + enddo + enddo + + do j = 1, ny_block + do i = 1, nx_block + + !----------------------------------------------------------------- + ! reduce fresh by fpond for coupling + !----------------------------------------------------------------- + + if (l_mpond_fresh) then + fpond(i,j,iblk) = fpond(i,j,iblk) * rhofresh/dt + fresh(i,j,iblk) = fresh(i,j,iblk) - fpond(i,j,iblk) + endif + + !---------------------------------------------------------------- + ! Store grid box mean albedos and fluxes before scaling by aice + !---------------------------------------------------------------- + + alvdf_ai (i,j,iblk) = alvdf (i,j,iblk) + alidf_ai (i,j,iblk) = alidf (i,j,iblk) + alvdr_ai (i,j,iblk) = alvdr (i,j,iblk) + alidr_ai (i,j,iblk) = alidr (i,j,iblk) + fresh_ai (i,j,iblk) = fresh (i,j,iblk) + fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) + fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) + fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) + fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) + fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) + + if (nbtrcr > 0) then + do k = 1, nbtrcr + flux_bio_ai (i,j,k,iblk) = flux_bio (i,j,k,iblk) + enddo + endif + + !----------------------------------------------------------------- + ! Save net shortwave for scaling factor in scale_factor + !----------------------------------------------------------------- + scale_factor(i,j,iblk) = & + swvdr(i,j,iblk)*(c1 - alvdr_ai(i,j,iblk)) & + + swvdf(i,j,iblk)*(c1 - alvdf_ai(i,j,iblk)) & + + swidr(i,j,iblk)*(c1 - alidr_ai(i,j,iblk)) & + + swidf(i,j,iblk)*(c1 - alidf_ai(i,j,iblk)) + + enddo + enddo + + !----------------------------------------------------------------- + ! Divide fluxes by ice area + ! - the CESM coupler assumes fluxes are per unit ice area + ! - also needed for global budget in diagnostics + !----------------------------------------------------------------- + + call scale_fluxes (nx_block, ny_block, & + tmask (:,:,iblk), nbtrcr, icepack_max_aero, & + aice (:,:,iblk), Tf (:,:,iblk), & + Tair (:,:,iblk), Qa (:,:,iblk), & + strairxT (:,:,iblk), strairyT(:,:,iblk), & + fsens (:,:,iblk), flat (:,:,iblk), & + fswabs (:,:,iblk), flwout (:,:,iblk), & + evap (:,:,iblk), & + Tref (:,:,iblk), Qref (:,:,iblk), & + fresh (:,:,iblk), fsalt (:,:,iblk), & + fhocn (:,:,iblk), fswthru (:,:,iblk), & + faero_ocn(:,:,:,iblk), & + alvdr (:,:,iblk), alidr (:,:,iblk), & + alvdf (:,:,iblk), alidf (:,:,iblk), & + fzsal (:,:,iblk), fzsal_g (:,:,iblk), & + flux_bio(:,:,1:nbtrcr,iblk), & + Uref=Uref(:,:,iblk), wind=wind(:,:,iblk) ) + + !----------------------------------------------------------------- + ! Define ice-ocean bgc fluxes + !----------------------------------------------------------------- + + if (nbtrcr > 0 .or. skl_bgc) then + call bgcflux_ice_to_ocn (nx_block, ny_block, & + flux_bio(:,:,1:nbtrcr,iblk), & + fnit(:,:,iblk), fsil(:,:,iblk), & + famm(:,:,iblk), fdmsp(:,:,iblk), & + fdms(:,:,iblk), fhum(:,:,iblk), & + fdust(:,:,iblk), falgalN(:,:,:,iblk), & + fdoc(:,:,:,iblk), fdic(:,:,:,iblk), & + fdon(:,:,:,iblk), ffep(:,:,:,iblk), & + ffed(:,:,:,iblk)) + endif + +!echmod - comment this out for efficiency, if .not. calc_Tsfc + if (.not. calc_Tsfc) then + + !--------------------------------------------------------------- + ! If surface fluxes were provided, conserve these fluxes at ice + ! free points by passing to ocean. + !--------------------------------------------------------------- + + call sfcflux_to_ocn & + (nx_block, ny_block, & + tmask (:,:,iblk), aice_init(:,:,iblk), & + fsurfn_f (:,:,:,iblk), flatn_f(:,:,:,iblk), & + fresh (:,:,iblk), fhocn (:,:,iblk)) + endif +!echmod + + call ice_timer_stop(timer_couple,iblk) ! atm/ocn coupling + + end subroutine coupling_prep + +!======================================================================= +! +! If surface heat fluxes are provided to CICE instead of CICE calculating +! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can +! be provided at points which do not have ice. (This is could be due to +! the heat fluxes being calculated on a lower resolution grid or the +! heat fluxes not recalculated at every CICE timestep.) At ice free points, +! conserve energy and water by passing these fluxes to the ocean. +! +! author: A. McLaren, Met Office + + subroutine sfcflux_to_ocn(nx_block, ny_block, & + tmask, aice, & + fsurfn_f, flatn_f, & + fresh, fhocn) + + use ice_domain_size, only: ncat + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block ! block dimensions + + logical (kind=log_kind), dimension (nx_block,ny_block), intent(in) :: & + tmask ! land/boundary mask, thickness (T-cell) + + real (kind=dbl_kind), dimension(nx_block,ny_block), intent(in):: & + aice ! initial ice concentration + + real (kind=dbl_kind), dimension(nx_block,ny_block,ncat), intent(in) :: & + fsurfn_f, & ! net surface heat flux (provided as forcing) + flatn_f ! latent heat flux (provided as forcing) + + real (kind=dbl_kind), dimension(nx_block,ny_block), intent(inout):: & + fresh , & ! fresh water flux to ocean (kg/m2/s) + fhocn ! actual ocn/ice heat flx (W/m**2) + +#ifdef CICE_IN_NEMO + + ! local variables + integer (kind=int_kind) :: & + i, j, n ! horizontal indices + + real (kind=dbl_kind) :: & + puny, & ! + rLsub ! 1/Lsub + + character(len=*), parameter :: subname = '(sfcflux_to_ocn)' + + call icepack_query_parameters(puny_out=puny) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + rLsub = c1 / Lsub + + do n = 1, ncat + do j = 1, ny_block + do i = 1, nx_block + if (tmask(i,j) .and. aice(i,j) <= puny) then + fhocn(i,j) = fhocn(i,j) & + + fsurfn_f(i,j,n) + flatn_f(i,j,n) + fresh(i,j) = fresh(i,j) & + + flatn_f(i,j,n) * rLsub + endif + enddo ! i + enddo ! j + enddo ! n + +#endif + + end subroutine sfcflux_to_ocn + +!======================================================================= + + end module CICE_RunMod + +!======================================================================= diff --git a/cicecore/drivers/nuopc/cmeps/CICE_copyright.txt b/cicecore/drivers/nuopc/cmeps/CICE_copyright.txt new file mode 100644 index 000000000..959a3ce32 --- /dev/null +++ b/cicecore/drivers/nuopc/cmeps/CICE_copyright.txt @@ -0,0 +1,17 @@ +! Copyright (c) 2019, Triad National Security, LLC +! All rights reserved. +! +! Copyright 2019. Triad National Security, LLC. This software was +! produced under U.S. Government contract DE-AC52-06NA25396 for Los +! Alamos National Laboratory (LANL), which is operated by Triad +! National Security, LLC for the U.S. Department of Energy. The U.S. +! Government has rights to use, reproduce, and distribute this software. +! NEITHER THE GOVERNMENT NOR TRIAD NATIONAL SECURITY, LLC MAKES ANY +! WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF +! THIS SOFTWARE. If software is modified to produce derivative works, +! such modified software should be clearly marked, so as not to confuse +! it with the version available from LANL. +! +! The full license and distribution policy are available from +! https://github.com/CICE-Consortium +! diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 new file mode 100644 index 000000000..29cd34320 --- /dev/null +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -0,0 +1,1352 @@ +module ice_comp_nuopc + + !---------------------------------------------------------------------------- + ! This is the NUOPC cap for CICE + !---------------------------------------------------------------------------- + + use ESMF + use NUOPC , only : NUOPC_CompDerive, NUOPC_CompSetEntryPoint, NUOPC_CompSpecialize + use NUOPC , only : NUOPC_CompFilterPhaseMap, NUOPC_IsUpdated, NUOPC_IsAtTime + use NUOPC , only : NUOPC_CompAttributeGet, NUOPC_Advertise + use NUOPC , only : NUOPC_SetAttribute, NUOPC_CompAttributeGet, NUOPC_CompAttributeSet + use NUOPC_Model , only : model_routine_SS => SetServices + use NUOPC_Model , only : model_label_Advance => label_Advance + use NUOPC_Model , only : model_label_DataInitialize => label_DataInitialize + use NUOPC_Model , only : model_label_SetRunClock => label_SetRunClock + use NUOPC_Model , only : model_label_Finalize => label_Finalize + use NUOPC_Model , only : NUOPC_ModelGet, SetVM + use shr_kind_mod , only : r8 => shr_kind_r8, cl=>shr_kind_cl, cs=>shr_kind_cs + use shr_sys_mod , only : shr_sys_abort, shr_sys_flush + use shr_file_mod , only : shr_file_getlogunit, shr_file_setlogunit + use shr_string_mod , only : shr_string_listGetNum + use shr_orb_mod , only : shr_orb_decl + use shr_const_mod + use shr_cal_mod , only : shr_cal_noleap, shr_cal_gregorian, shr_cal_ymd2date + use ice_constants , only : ice_init_constants + use ice_shr_methods , only : chkerr, state_setscalar, state_getscalar, state_diagnose, alarmInit + use ice_shr_methods , only : set_component_logging, get_component_instance + use ice_shr_methods , only : state_flddebug + use ice_import_export , only : ice_import, ice_export + use ice_import_export , only : ice_advertise_fields, ice_realize_fields + use ice_domain_size , only : nx_global, ny_global + use ice_domain , only : nblocks, blocks_ice, distrb_info + use ice_blocks , only : block, get_block, nx_block, ny_block, nblocks_x, nblocks_y + use ice_blocks , only : nblocks_tot, get_block_parameter + use ice_distribution , only : ice_distributiongetblockloc + use ice_grid , only : tlon, tlat, hm, tarea, ULON, ULAT + use ice_communicate , only : my_task, master_task, mpi_comm_ice + use ice_calendar , only : force_restart_now, write_ic + use ice_calendar , only : idate, mday, time, month, daycal, time2sec, year_init + use ice_calendar , only : sec, dt, calendar, calendar_type, nextsw_cday, istep + use ice_kinds_mod , only : dbl_kind, int_kind, char_len + use ice_scam , only : scmlat, scmlon, single_column + use ice_fileunits , only : nu_diag, nu_diag_set, inst_index, inst_name, inst_suffix, release_all_fileunits + use ice_restart_shared , only : runid, runtype, restart_dir, restart_file + use ice_history , only : accum_hist +#if (defined NEWCODE) + use ice_history_shared , only : model_doi_url ! TODO: add this functionality +#endif + use ice_prescribed_mod , only : ice_prescribed_init +#if (defined NEWCODE) + use ice_atmo , only : flux_convergence_tolerance, flux_convergence_max_iteration + use ice_atmo , only : use_coldair_outbreak_mod +#endif + use CICE_InitMod , only : CICE_Init + use CICE_RunMod , only : CICE_Run + use ice_exit , only : abort_ice + use icepack_intfc , only : icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc , only : icepack_init_orbit, icepack_init_parameters + use icepack_intfc , only : icepack_query_tracer_flags, icepack_query_parameters + use perf_mod , only : t_startf, t_stopf, t_barrierf + use ice_timers + + implicit none + + public :: SetServices + public :: SetVM + + private :: InitializeP0 + private :: InitializeAdvertise + private :: InitializeRealize + private :: ModelAdvance + private :: ModelSetRunClock + private :: ModelFinalize + + character(len=CL) :: flds_scalar_name = '' + integer :: flds_scalar_num = 0 + integer :: flds_scalar_index_nx = 0 + integer :: flds_scalar_index_ny = 0 + integer :: flds_scalar_index_nextsw_cday = 0 + + integer , parameter :: dbug = 10 + integer , parameter :: debug_import = 0 ! internal debug level + integer , parameter :: debug_export = 0 ! internal debug level + character(*), parameter :: modName = "(ice_comp_nuopc)" + character(*), parameter :: u_FILE_u = & + __FILE__ + +!======================================================================= +contains +!=============================================================================== + + subroutine SetServices(gcomp, rc) + + ! Arguments + type(ESMF_GridComp) :: gcomp + integer, intent(out) :: rc + + ! Local variables + character(len=*),parameter :: subname=trim(modName)//':(SetServices) ' + !-------------------------------- + + rc = ESMF_SUCCESS + if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + + ! the NUOPC gcomp component will register the generic methods + call NUOPC_CompDerive(gcomp, model_routine_SS, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! switching to IPD versions + call ESMF_GridCompSetEntryPoint(gcomp, ESMF_METHOD_INITIALIZE, & + userRoutine=InitializeP0, phase=0, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! set entry point for methods that require specific implementation + call NUOPC_CompSetEntryPoint(gcomp, ESMF_METHOD_INITIALIZE, & + phaseLabelList=(/"IPDv01p1"/), userRoutine=InitializeAdvertise, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call NUOPC_CompSetEntryPoint(gcomp, ESMF_METHOD_INITIALIZE, & + phaseLabelList=(/"IPDv01p3"/), userRoutine=InitializeRealize, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! attach specializing method(s) + call NUOPC_CompSpecialize(gcomp, specLabel=model_label_Advance, & + specRoutine=ModelAdvance, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_MethodRemove(gcomp, label=model_label_SetRunClock, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call NUOPC_CompSpecialize(gcomp, specLabel=model_label_SetRunClock, & + specRoutine=ModelSetRunClock, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call NUOPC_CompSpecialize(gcomp, specLabel=model_label_Finalize, & + specRoutine=ModelFinalize, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) + + end subroutine SetServices + + !=============================================================================== + + subroutine InitializeP0(gcomp, importState, exportState, clock, rc) + + ! Arguments + type(ESMF_GridComp) :: gcomp + type(ESMF_State) :: importState, exportState + type(ESMF_Clock) :: clock + integer, intent(out) :: rc + !-------------------------------- + + rc = ESMF_SUCCESS + + ! Switch to IPDv01 by filtering all other phaseMap entries + call NUOPC_CompFilterPhaseMap(gcomp, ESMF_METHOD_INITIALIZE, & + acceptStringList=(/"IPDv01p"/), rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + end subroutine InitializeP0 + + !=============================================================================== + + subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) + + ! Arguments + type(ESMF_GridComp) :: gcomp + type(ESMF_State) :: importState, exportState + type(ESMF_Clock) :: clock + integer, intent(out) :: rc + + ! Local variables + character(len=CL) :: cvalue + character(len=CL) :: logmsg + logical :: isPresent, isSet + character(len=*), parameter :: subname=trim(modName)//':(InitializeAdvertise) ' + !-------------------------------- + + call NUOPC_CompAttributeGet(gcomp, name="ScalarFieldName", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + flds_scalar_name = trim(cvalue) + call ESMF_LogWrite(trim(subname)//' flds_scalar_name = '//trim(flds_scalar_name), ESMF_LOGMSG_INFO) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call shr_sys_abort(subname//'Need to set attribute ScalarFieldName') + endif + + call NUOPC_CompAttributeGet(gcomp, name="ScalarFieldCount", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + read(cvalue, *) flds_scalar_num + write(logmsg,*) flds_scalar_num + call ESMF_LogWrite(trim(subname)//' flds_scalar_num = '//trim(logmsg), ESMF_LOGMSG_INFO) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call shr_sys_abort(subname//'Need to set attribute ScalarFieldCount') + endif + + call NUOPC_CompAttributeGet(gcomp, name="ScalarFieldIdxGridNX", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + read(cvalue,*) flds_scalar_index_nx + write(logmsg,*) flds_scalar_index_nx + call ESMF_LogWrite(trim(subname)//' : flds_scalar_index_nx = '//trim(logmsg), ESMF_LOGMSG_INFO) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call shr_sys_abort(subname//'Need to set attribute ScalarFieldIdxGridNX') + endif + + call NUOPC_CompAttributeGet(gcomp, name="ScalarFieldIdxGridNY", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + read(cvalue,*) flds_scalar_index_ny + write(logmsg,*) flds_scalar_index_ny + call ESMF_LogWrite(trim(subname)//' : flds_scalar_index_ny = '//trim(logmsg), ESMF_LOGMSG_INFO) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call shr_sys_abort(subname//'Need to set attribute ScalarFieldIdxGridNY') + endif + + call NUOPC_CompAttributeGet(gcomp, name="ScalarFieldIdxNextSwCday", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + read(cvalue,*) flds_scalar_index_nextsw_cday + write(logmsg,*) flds_scalar_index_nextsw_cday + call ESMF_LogWrite(trim(subname)//' : flds_scalar_index_nextsw_cday = '//trim(logmsg), ESMF_LOGMSG_INFO) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call shr_sys_abort(subname//'Need to set attribute ScalarFieldIdxNextSwCday') + endif + + call ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + end subroutine InitializeAdvertise + + !=============================================================================== + + subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) + + ! Arguments + type(ESMF_GridComp) :: gcomp + type(ESMF_State) :: importState + type(ESMF_State) :: exportState + type(ESMF_Clock) :: clock + integer, intent(out) :: rc + + ! Local variables + type(ESMF_DistGrid) :: distGrid + type(ESMF_Mesh) :: Emesh, EmeshTemp + integer :: spatialDim + integer :: numOwnedElements + real(R8), pointer :: ownedElemCoords(:) + real(r8), pointer :: lat(:), latMesh(:) + real(r8), pointer :: lon(:), lonMesh(:) + integer , allocatable :: gindex_ice(:) + integer , allocatable :: gindex_elim(:) + integer , allocatable :: gindex(:) + integer :: globalID + character(ESMF_MAXSTR) :: cvalue + real(r8) :: eccen, obliqr, lambm0, mvelpp + character(len=char_len) :: tfrz_option + character(ESMF_MAXSTR) :: convCIM, purpComp + type(ESMF_VM) :: vm + type(ESMF_Time) :: currTime ! Current time + type(ESMF_Time) :: startTime ! Start time + type(ESMF_Time) :: stopTime ! Stop time + type(ESMF_Time) :: refTime ! Ref time + type(ESMF_TimeInterval) :: timeStep ! Model timestep + type(ESMF_Calendar) :: esmf_calendar ! esmf calendar + type(ESMF_CalKind_Flag) :: esmf_caltype ! esmf calendar type + integer :: start_ymd ! Start date (YYYYMMDD) + integer :: start_tod ! start time of day (s) + integer :: curr_ymd ! Current date (YYYYMMDD) + integer :: curr_tod ! Current time of day (s) + integer :: stop_ymd ! stop date (YYYYMMDD) + integer :: stop_tod ! stop time of day (sec) + integer :: ref_ymd ! Reference date (YYYYMMDD) + integer :: ref_tod ! reference time of day (s) + integer :: yy,mm,dd ! Temporaries for time query + integer :: iyear ! yyyy + integer :: dtime ! time step + integer :: lmpicom + integer :: shrlogunit ! original log unit + character(len=cs) :: starttype ! infodata start type + integer :: lsize ! local size of coupling array + character(len=512) :: diro + character(len=512) :: logfile + logical :: isPresent + integer :: localPet + integer :: n,c,g,i,j,m ! indices + integer :: iblk, jblk ! indices + integer :: ig, jg ! indices + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + type(block) :: this_block ! block information for current block + integer :: compid ! component id + character(len=CL) :: tempc1,tempc2 + real(R8) :: diff_lon + integer :: npes + integer :: num_elim_global + integer :: num_elim_local + integer :: num_elim + integer :: num_ice + integer :: num_elim_gcells ! local number of eliminated gridcells + integer :: num_elim_blocks ! local number of eliminated blocks + integer :: num_total_blocks + integer :: my_elim_start, my_elim_end + real(dbl_kind) :: rad_to_deg + integer(int_kind) :: ktherm + character(*), parameter :: F00 = "('(ice_comp_nuopc) ',2a,1x,d21.14)" + character(len=*), parameter :: subname=trim(modName)//':(InitializeRealize) ' + !-------------------------------- + + rc = ESMF_SUCCESS + if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + + !---------------------------------------------------------------------------- + ! generate local mpi comm + !---------------------------------------------------------------------------- + + call ESMF_GridCompGet(gcomp, vm=vm, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_VMGet(vm, mpiCommunicator=lmpicom, localPet=localPet, PetCount=npes, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + !---------------------------------------------------------------------------- + ! determine instance information + !---------------------------------------------------------------------------- + + call get_component_instance(gcomp, inst_suffix, inst_index, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + inst_name = "ICE"//trim(inst_suffix) + + !---------------------------------------------------------------------------- + ! start cice timers + !---------------------------------------------------------------------------- + + call t_startf ('cice_init_total') + + !---------------------------------------------------------------------------- + ! Initialize constants + !---------------------------------------------------------------------------- + + call ice_init_constants(omega_in=SHR_CONST_OMEGA, radius_in=SHR_CONST_REARTH, & + spval_dbl_in=SHR_CONST_SPVAL) + call icepack_init_parameters( & + secday_in = SHR_CONST_CDAY, & + rhoi_in = SHR_CONST_RHOICE, & + rhow_in = SHR_CONST_RHOSW, & + cp_air_in = SHR_CONST_CPDAIR, & + cp_ice_in = SHR_CONST_CPICE, & + cp_ocn_in = SHR_CONST_CPSW, & + gravit_in = SHR_CONST_G, & + rhofresh_in = SHR_CONST_RHOFW, & + zvir_in = SHR_CONST_ZVIR, & + vonkar_in = SHR_CONST_KARMAN, & + cp_wv_in = SHR_CONST_CPWV, & + stefan_boltzmann_in = SHR_CONST_STEBOL, & + Tffresh_in= SHR_CONST_TKFRZ, & + Lsub_in = SHR_CONST_LATSUB, & + Lvap_in = SHR_CONST_LATVAP, & +! Lfresh_in = SHR_CONST_LATICE, & ! computed in init_parameters as Lsub-Lvap + Timelt_in = SHR_CONST_TKFRZ-SHR_CONST_TKFRZ, & + Tsmelt_in = SHR_CONST_TKFRZ-SHR_CONST_TKFRZ, & + ice_ref_salinity_in = SHR_CONST_ICE_REF_SAL, & + depressT_in = 0.054_dbl_kind, & + Tocnfrz_in= -34.0_dbl_kind*0.054_dbl_kind, & + pi_in = SHR_CONST_PI, & + snowpatch_in = 0.005_dbl_kind, & + dragio_in = 0.00962_dbl_kind) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !---------------------------------------------------------------------------- + ! Determine attributes - also needed in realize phase to get grid information + !---------------------------------------------------------------------------- + + ! Get orbital values + ! Note that these values are obtained in a call to init_orbit in ice_shortwave.F90 + ! if CESMCOUPLED is not defined + call NUOPC_CompAttributeGet(gcomp, name='orb_eccen', value=cvalue, isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent) then + read(cvalue,*) eccen + end if + call NUOPC_CompAttributeGet(gcomp, name='orb_obliqr', value=cvalue, isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent) then + read(cvalue,*) obliqr + end if + call NUOPC_CompAttributeGet(gcomp, name='orb_lambm0', value=cvalue, isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent) then + read(cvalue,*) lambm0 + end if + call NUOPC_CompAttributeGet(gcomp, name='orb_mvelpp', value=cvalue, isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent) then + read(cvalue,*) mvelpp + end if + + call icepack_init_orbit(eccen_in=eccen, mvelpp_in=mvelpp, & + lambm0_in=lambm0, obliqr_in=obliqr) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + ! Determine runtype and possibly nextsw_cday + call NUOPC_CompAttributeGet(gcomp, name='start_type', value=cvalue, isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent) then + read(cvalue,*) starttype + if (trim(starttype) == trim('startup')) then + runtype = "initial" + else if (trim(starttype) == trim('continue') ) then + runtype = "continue" + else if (trim(starttype) == trim('branch')) then + runtype = "continue" + else + call shr_sys_abort( subname//' ERROR: unknown starttype' ) + end if + + ! Note that in the mct version the atm was initialized first so that nextsw_cday could be passed to the other + ! components - this assumed that cam or datm was ALWAYS initialized first. + ! In the nuopc version it will be easier to assume that on startup - nextsw_cday is just the current time + + ! TOOD (mvertens, 2019-03-21): need to get the perpetual run working + + if (trim(runtype) /= 'initial') then + ! Set nextsw_cday to -1 (this will skip an orbital calculation on initialization + nextsw_cday = -1.0_r8 + else + call ESMF_ClockGet( clock, currTime=currTime, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_TimeGet( currTime, dayOfYear_r8=nextsw_cday, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + else + ! This would be the NEMS branch + ! Note that in NEMS - nextsw_cday is not needed in ice_orbital.F90 and what is needed is + ! simply a CPP variable declaratino of NEMSCOUPLED + + runtype = 'initial' ! determined from the namelist in ice_init if CESMCOUPLED is not defined + end if + + ! Determine single column info + call NUOPC_CompAttributeGet(gcomp, name='single_column', value=cvalue, isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent) then + read(cvalue,*) single_column + else + single_column = .false. + end if + if (single_column) then + ! Must have these attributes present + call NUOPC_CompAttributeGet(gcomp, name='scmlon', value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scmlon + call NUOPC_CompAttributeGet(gcomp, name='scmlat', value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scmlat + end if + + ! Determine runid + call NUOPC_CompAttributeGet(gcomp, name='case_name', value=cvalue, isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent) then + read(cvalue,*) runid + else + runid = 'unknown' ! read in from the namelist in ice_init.F90 if CESMCOUPLED is not defined + end if + + ! Determine tfreeze_option, flux convertence before call to cice_init + call NUOPC_CompAttributeGet(gcomp, name="tfreeze_option", value=tfrz_option, isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (.not. isPresent) then + tfrz_option = 'linear_salt' ! TODO: is this right? This must be the same as mom is using for the calculation. + end if + call icepack_init_parameters(tfrz_option_in=tfrz_option) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + +#if (defined NEWCODE) + call NUOPC_CompAttributeGet(gcomp, name="flux_convergence", value=cvalue, isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent) then + read(cvalue,*) flux_convergence_tolerance + else + flux_convergence_tolerance = 0._r8 + end if + + call NUOPC_CompAttributeGet(gcomp, name="flux_max_iteration", value=cvalue, isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent) then + read(cvalue,*) flux_convergence_max_iteration + else + flux_convergence_max_iteration = 5 + end if + + call NUOPC_CompAttributeGet(gcomp, name="coldair_outbreak_mod", value=cvalue, isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent) then + read(cvalue,*) use_coldair_outbreak_mod + else + use_coldair_outbreak_mod = .false. + end if +#endif + + ! Get clock information before call to cice_init + + call ESMF_ClockGet( clock, & + currTime=currTime, startTime=startTime, stopTime=stopTime, refTime=RefTime, & + timeStep=timeStep, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_TimeGet( currTime, yy=yy, mm=mm, dd=dd, s=curr_tod, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call shr_cal_ymd2date(yy,mm,dd,curr_ymd) + + call ESMF_TimeGet( startTime, yy=yy, mm=mm, dd=dd, s=start_tod, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call shr_cal_ymd2date(yy,mm,dd,start_ymd) + + call ESMF_TimeGet( stopTime, yy=yy, mm=mm, dd=dd, s=stop_tod, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call shr_cal_ymd2date(yy,mm,dd,stop_ymd) + + call ESMF_TimeGet( refTime, yy=yy, mm=mm, dd=dd, s=ref_tod, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call shr_cal_ymd2date(yy,mm,dd,ref_ymd) + + call ESMF_TimeIntervalGet( timeStep, s=dtime, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + dt = real(dtime) + + call ESMF_TimeGet( currTime, calkindflag=esmf_caltype, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + if (esmf_caltype == ESMF_CALKIND_NOLEAP) then + calendar_type = shr_cal_noleap + else if (esmf_caltype == ESMF_CALKIND_GREGORIAN) then + calendar_type = shr_cal_gregorian + else + call shr_sys_abort( subname//'ERROR:: bad calendar for ESMF' ) + end if + + !---------------------------------------------------------------------------- + ! Set cice logging + !---------------------------------------------------------------------------- + ! Note that sets the nu_diag module variable in ice_fileunits + ! Set the nu_diag_set flag so it's not reset later + + call set_component_logging(gcomp, my_task==master_task, nu_diag, shrlogunit, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + nu_diag_set = .true. + + call shr_file_setLogUnit (shrlogunit) + + !---------------------------------------------------------------------------- + ! Initialize cice + !---------------------------------------------------------------------------- + + ! Note that cice_init also sets time manager info as well as mpi communicator info, + ! including master_task and my_task + + call t_startf ('cice_init') + call cice_init( lmpicom ) + call t_stopf ('cice_init') + + !---------------------------------------------------------------------------- + ! reset shr logging to my log file + !---------------------------------------------------------------------------- + + call icepack_query_parameters(ktherm_out=ktherm) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + ! Now write output to nu_diag - this must happen AFTER call to cice_init + if (localPet == 0) then + write(nu_diag,F00) trim(subname),' cice init nextsw_cday = ',nextsw_cday + write(nu_diag,*) trim(subname),' tfrz_option = ',trim(tfrz_option) + if (ktherm == 2 .and. trim(tfrz_option) /= 'mushy') then + write(nu_diag,*) trim(subname),' Warning: Using ktherm = 2 and tfrz_option = ', trim(tfrz_option) + endif + write(nu_diag,*) trim(subname),' inst_name = ',trim(inst_name) + write(nu_diag,*) trim(subname),' inst_index = ',inst_index + write(nu_diag,*) trim(subname),' inst_suffix = ',trim(inst_suffix) +#if (defined NEWCODE) + write(nu_diag,*) trim(subname),' flux_convergence = ', flux_convergence_tolerance + write(nu_diag,*) trim(subname),' flux_convergence_max_iteration = ', flux_convergence_max_iteration +#endif + endif + + !--------------------------------------------------------------------------- + ! use EClock to reset calendar information on initial start + !--------------------------------------------------------------------------- + + ! - on initial run + ! - iyear, month and mday obtained from sync clock + ! - time determined from iyear, month and mday + ! - istep0 and istep1 are set to 0 + ! - on restart run + ! - istep0, time and time_forc are read from restart file + ! - istep1 is set to istep0 + ! - idate is determined from time via the call to calendar (see below) + + if (runtype == 'initial') then + if (ref_ymd /= start_ymd .or. ref_tod /= start_tod) then + if (my_task == master_task) then + write(nu_diag,*) trim(subname),': ref_ymd ',ref_ymd, ' must equal start_ymd ',start_ymd + write(nu_diag,*) trim(subname),': ref_ymd ',ref_tod, ' must equal start_ymd ',start_tod + end if + end if + + if (my_task == master_task) then + write(nu_diag,*) trim(subname),' idate from sync clock = ', start_ymd + write(nu_diag,*) trim(subname),' tod from sync clock = ', start_tod + write(nu_diag,*) trim(subname),' resetting idate to match sync clock' + end if + idate = curr_ymd + + if (idate < 0) then + if (my_task == master_task) then + write(nu_diag,*) trim(subname),' ERROR curr_ymd,year_init =',curr_ymd,year_init + write(nu_diag,*) trim(subname),' ERROR idate lt zero',idate + end if + call shr_sys_abort(subname//' :: ERROR idate lt zero') + endif + iyear = (idate/10000) ! integer year of basedate + month = (idate-iyear*10000)/100 ! integer month of basedate + mday = idate-iyear*10000-month*100 ! day of month of basedate + + if (my_task == master_task) then + write(nu_diag,*) trim(subname),' curr_ymd = ',curr_ymd + write(nu_diag,*) trim(subname),' cice year_init = ',year_init + write(nu_diag,*) trim(subname),' cice start date = ',idate + write(nu_diag,*) trim(subname),' cice start ymds = ',iyear,month,mday,start_tod + endif + + if (calendar_type /= "GREGORIAN") then + call time2sec(iyear-year_init,month,mday,time) + else + call time2sec(iyear-(year_init-1),month,mday,time) + endif + time = time+start_tod + end if + + call calendar(time) ! update calendar info + if (write_ic) then + call accum_hist(dt) ! write initial conditions + end if + + !--------------------------------------------------------------------------- + ! Determine the global index space needed for the distgrid + !--------------------------------------------------------------------------- + + ! number the local grid to get allocation size for gindex_ice + lsize = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + lsize = lsize + 1 + enddo + enddo + enddo + + ! set global index array + allocate(gindex_ice(lsize)) + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + ig = this_block%i_glob(i) + jg = this_block%j_glob(j) + gindex_ice(n) = (jg-1)*nx_global + ig + enddo + enddo + enddo + + ! Determine total number of eliminated blocks globally + globalID = 0 + num_elim_global = 0 ! number of eliminated blocks + num_total_blocks = 0 + do jblk=1,nblocks_y + do iblk=1,nblocks_x + globalID = globalID + 1 + num_total_blocks = num_total_blocks + 1 + if (distrb_info%blockLocation(globalID) == 0) then + num_elim_global = num_elim_global + 1 + end if + end do + end do + + if (num_elim_global > 0) then + + ! Distribute the eliminated blocks in a round robin fashion amoung processors + num_elim_local = num_elim_global / npes + my_elim_start = num_elim_local*localPet + min(localPet, mod(num_elim_global, npes)) + 1 + if (localPet < mod(num_elim_global, npes)) then + num_elim_local = num_elim_local + 1 + end if + my_elim_end = my_elim_start + num_elim_local - 1 + + ! Determine the number of eliminated gridcells locally + globalID = 0 + num_elim_blocks = 0 ! local number of eliminated blocks + num_elim_gcells = 0 + do jblk=1,nblocks_y + do iblk=1,nblocks_x + globalID = globalID + 1 + if (distrb_info%blockLocation(globalID) == 0) then + num_elim_blocks = num_elim_blocks + 1 + if (num_elim_blocks >= my_elim_start .and. num_elim_blocks <= my_elim_end) then + this_block = get_block(globalID, globalID) + num_elim_gcells = num_elim_gcells + & + (this_block%jhi-this_block%jlo+1) * (this_block%ihi-this_block%ilo+1) + end if + end if + end do + end do + + ! Determine the global index space of the eliminated gridcells + allocate(gindex_elim(num_elim_gcells)) + globalID = 0 + num_elim_gcells = 0 ! local number of eliminated gridcells + num_elim_blocks = 0 ! local number of eliminated blocks + do jblk=1,nblocks_y + do iblk=1,nblocks_x + globalID = globalID + 1 + if (distrb_info%blockLocation(globalID) == 0) then + this_block = get_block(globalID, globalID) + num_elim_blocks = num_elim_blocks + 1 + if (num_elim_blocks >= my_elim_start .and. num_elim_blocks <= my_elim_end) then + do j=this_block%jlo,this_block%jhi + do i=this_block%ilo,this_block%ihi + num_elim_gcells = num_elim_gcells + 1 + ig = this_block%i_glob(i) + jg = this_block%j_glob(j) + gindex_elim(num_elim_gcells) = (jg-1)*nx_global + ig + end do + end do + end if + end if + end do + end do + + ! create a global index that includes both active and eliminated gridcells + num_ice = size(gindex_ice) + num_elim = size(gindex_elim) + allocate(gindex(num_elim + num_ice)) + do n = 1,num_ice + gindex(n) = gindex_ice(n) + end do + do n = num_ice+1,num_ice+num_elim + gindex(n) = gindex_elim(n-num_ice) + end do + + deallocate(gindex_elim) + + else + + ! No eliminated land blocks + num_ice = size(gindex_ice) + allocate(gindex(num_ice)) + do n = 1,num_ice + gindex(n) = gindex_ice(n) + end do + + end if + + !--------------------------------------------------------------------------- + ! Create distGrid from global index array + !--------------------------------------------------------------------------- + + DistGrid = ESMF_DistGridCreate(arbSeqIndexList=gindex, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + !--------------------------------------------------------------------------- + ! Create the CICE mesh + !--------------------------------------------------------------------------- + + ! read in the mesh + call NUOPC_CompAttributeGet(gcomp, name='mesh_ice', value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + EMeshTemp = ESMF_MeshCreate(filename=trim(cvalue), fileformat=ESMF_FILEFORMAT_ESMFMESH, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (my_task == master_task) then + write(nu_diag,*)'mesh file for cice domain is ',trim(cvalue) + end if + + ! recreate the mesh using the above distGrid + EMesh = ESMF_MeshCreate(EMeshTemp, elementDistgrid=Distgrid, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! obtain mesh lats and lons + call ESMF_MeshGet(Emesh, spatialDim=spatialDim, numOwnedElements=numOwnedElements, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + allocate(ownedElemCoords(spatialDim*numOwnedElements)) + allocate(lonMesh(numOwnedElements), latMesh(numOwnedElements)) + call ESMF_MeshGet(Emesh, ownedElemCoords=ownedElemCoords) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + do n = 1,numOwnedElements + lonMesh(n) = ownedElemCoords(2*n-1) + latMesh(n) = ownedElemCoords(2*n) + end do + + call icepack_query_parameters(rad_to_deg_out=rad_to_deg) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + ! obtain internally generated cice lats and lons for error checks + allocate(lon(lsize)) + allocate(lat(lsize)) + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + lon(n) = tlon(i,j,iblk)*rad_to_deg + lat(n) = tlat(i,j,iblk)*rad_to_deg + enddo + enddo + enddo + + ! error check differences between internally generated lons and those read in + do n = 1,lsize + diff_lon = abs(lonMesh(n) - lon(n)) + if ( (diff_lon > 1.e2 .and. abs(diff_lon - 360_r8) > 1.e-1) .or.& + (diff_lon > 1.e-3 .and. diff_lon < 1._r8) ) then + !write(6,100)n,lonMesh(n),lon(n), diff_lon +100 format('ERROR: CICE n, lonmesh(n), lon(n), diff_lon = ',i6,2(f21.13,3x),d21.5) + !call shr_sys_abort() + end if + if (abs(latMesh(n) - lat(n)) > 1.e-1) then + !write(6,101)n,latMesh(n),lat(n), abs(latMesh(n)-lat(n)) +101 format('ERROR: CICE n, latmesh(n), lat(n), diff_lat = ',i6,2(f21.13,3x),d21.5) + !call shr_sys_abort() + end if + end do + + ! deallocate memory + deallocate(ownedElemCoords) + deallocate(lon, lonMesh) + deallocate(lat, latMesh) + + !----------------------------------------------------------------- + ! Realize the actively coupled fields + !----------------------------------------------------------------- + + call ice_realize_fields(gcomp, mesh=Emesh, & + flds_scalar_name=flds_scalar_name, flds_scalar_num=flds_scalar_num, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + +#ifdef CESMCOUPLED + !----------------------------------------------------------------- + ! Prescribed ice initialization - first get compid + !----------------------------------------------------------------- + + call NUOPC_CompAttributeGet(gcomp, name='MCTID', value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) compid ! convert from string to integer + + ! Having this if-defd means that MCT does not need to be build in a NEMS configuration + call ice_prescribed_init(lmpicom, compid, gindex_ice) +#endif + + !----------------------------------------------------------------- + ! Create cice export state + !----------------------------------------------------------------- + + call ice_export (exportstate, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call State_SetScalar(dble(nx_global), flds_scalar_index_nx, exportState, & + flds_scalar_name, flds_scalar_num, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call State_SetScalar(dble(ny_global), flds_scalar_index_ny, exportState, & + flds_scalar_name, flds_scalar_num, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! TODO (mvertens, 2018-12-21): fill in iceberg_prognostic as .false. + + if (debug_export > 0 .and. my_task==master_task) then + call State_fldDebug(exportState, flds_scalar_name, 'cice_export:', & + idate, sec, nu_diag, rc=rc) + end if + + !-------------------------------- + ! diagnostics + !-------------------------------- + + if (dbug > 1) then + call State_diagnose(exportState,subname//':ES',rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + endif + +#ifdef USE_ESMF_METADATA + convCIM = "CIM" + purpComp = "Model Component Simulation Description" + call ESMF_AttributeAdd(comp, convention=convCIM, purpose=purpComp, rc=rc) + call ESMF_AttributeSet(comp, "ShortName", "CICE", convention=convCIM, purpose=purpComp, rc=rc) + call ESMF_AttributeSet(comp, "LongName", "CICE Model", convention=convCIM, purpose=purpComp, rc=rc) + call ESMF_AttributeSet(comp, "Description", "CICE5", convention=convCIM, purpose=purpComp, rc=rc) + call ESMF_AttributeSet(comp, "ReleaseDate", "TBD", convention=convCIM, purpose=purpComp, rc=rc) + call ESMF_AttributeSet(comp, "ModelType", "Sea Ice", convention=convCIM, purpose=purpComp, rc=rc) + call ESMF_AttributeSet(comp, "Name", "David Bailey", convention=convCIM, purpose=purpComp, rc=rc) + call ESMF_AttributeSet(comp, "EmailAddress", "dbailey@ucar.edu", convention=convCIM, purpose=purpComp, rc=rc) + call ESMF_AttributeSet(comp, "ResponsiblePartyRole", "contact", convention=convCIM, purpose=purpComp, rc=rc) +#endif + + if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) + + call t_stopf ('cice_init_total') + + deallocate(gindex_ice) + deallocate(gindex) + + call shr_sys_flush(nu_diag) + + end subroutine InitializeRealize + + !=============================================================================== + + subroutine ModelAdvance(gcomp, rc) + + !--------------------------------------------------------------------------- + ! Run CICE + !--------------------------------------------------------------------------- + + ! Arguments + type(ESMF_GridComp) :: gcomp + integer, intent(out) :: rc + + ! Local variables + type(ESMF_Clock) :: clock + type(ESMF_Alarm) :: alarm + type(ESMF_Time) :: currTime + type(ESMF_Time) :: nextTime + type(ESMF_State) :: importState, exportState + character(ESMF_MAXSTR) :: cvalue + real(r8) :: eccen, obliqr, lambm0, mvelpp + integer :: shrlogunit ! original log unit + integer :: k,n ! index + logical :: stop_now ! .true. ==> stop at the end of this run phase + integer :: ymd ! Current date (YYYYMMDD) + integer :: tod ! Current time of day (sec) + integer :: curr_ymd ! Current date (YYYYMMDD) + integer :: curr_tod ! Current time of day (s) + integer :: yy,mm,dd ! year, month, day, time of day + integer :: ymd_sync ! Sync date (YYYYMMDD) + integer :: yr_sync ! Sync current year + integer :: mon_sync ! Sync current month + integer :: day_sync ! Sync current day + integer :: tod_sync ! Sync current time of day (sec) + character(CL) :: restart_date + character(CL) :: restart_filename + logical :: isPresent + character(*) , parameter :: F00 = "('(ice_comp_nuopc) ',2a,i8,d21.14)" + character(len=*),parameter :: subname=trim(modName)//':(ModelAdvance) ' + !-------------------------------- + + rc = ESMF_SUCCESS + if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + + !-------------------------------- + ! Turn on timers + !-------------------------------- + + call ice_timer_start(timer_total) ! time entire run + call t_barrierf('cice_run_total_BARRIER',mpi_comm_ice) + call t_startf ('cice_run_total') + + !-------------------------------- + ! Reset shr logging to my log file + !-------------------------------- + + call shr_file_getLogUnit (shrlogunit) + call shr_file_setLogUnit (nu_diag) + + !-------------------------------- + ! Query the Component for its clock, importState and exportState + !-------------------------------- + + call NUOPC_ModelGet(gcomp, modelClock=clock, importState=importState, exportState=exportState, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + !-------------------------------- + ! Determine time of next atmospheric shortwave calculation + !-------------------------------- + + call State_GetScalar(importState, flds_scalar_index_nextsw_cday, nextsw_cday, & + flds_scalar_name, flds_scalar_num, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + if (my_task == master_task) then + write(nu_diag,F00) trim(subname),' cice istep, nextsw_cday = ',istep, nextsw_cday + end if + + !-------------------------------- + ! Obtain orbital values + !-------------------------------- + call NUOPC_CompAttributeGet(gcomp, name='orb_eccen', value=cvalue, isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent) then + read(cvalue,*) eccen + end if + call NUOPC_CompAttributeGet(gcomp, name='orb_obliqr', value=cvalue, isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent) then + read(cvalue,*) obliqr + end if + call NUOPC_CompAttributeGet(gcomp, name='orb_lambm0', value=cvalue, isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent) then + read(cvalue,*) lambm0 + end if + call NUOPC_CompAttributeGet(gcomp, name='orb_mvelpp', value=cvalue, isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent) then + read(cvalue,*) mvelpp + end if + + call icepack_init_orbit(eccen_in=eccen, mvelpp_in=mvelpp, & + lambm0_in=lambm0, obliqr_in=obliqr) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !-------------------------------- + ! check that cice internal time is in sync with master clock before timestep update + !-------------------------------- + + ! cice clock + tod = sec + ymd = idate + + ! model clock + call ESMF_ClockGet( clock, currTime=currTime, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_TimeGet( currTime, yy=yr_sync, mm=mon_sync, dd=day_sync, s=tod_sync, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call shr_cal_ymd2date(yr_sync, mon_sync, day_sync, ymd_sync) + + ! error check + if ( (ymd /= ymd_sync) .or. (tod /= tod_sync) ) then + if (my_task == master_task) then + write(nu_diag,*)' cice ymd=',ymd ,' cice tod= ',tod + write(nu_diag,*)' sync ymd=',ymd_sync,' sync tod= ',tod_sync + end if + call ESMF_LogWrite(subname//" CICE clock not in sync with ESMF model clock",ESMF_LOGMSG_ERROR) + rc = ESMF_FAILURE + return + end if + + !-------------------------------- + ! Determine if time to write restart + !-------------------------------- + + ! Note this logic triggers off of the component clock rather than the internal cice time + ! The component clock does not get advanced until the end of the loop - not at the beginning + + call ESMF_ClockGetAlarm(clock, alarmname='alarm_restart', alarm=alarm, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + if (ESMF_AlarmIsRinging(alarm, rc=rc)) then + if (ChkErr(rc,__LINE__,u_FILE_u)) return + force_restart_now = .true. + call ESMF_AlarmRingerOff( alarm, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_ClockGetNextTime(clock, nextTime=nextTime, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_TimeGet(nexttime, yy=yy, mm=mm, dd=dd, s=tod, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + write(restart_date,"(i4.4,a,i2.2,a,i2.2,a,i5.5)") yy, '-', mm, '-',dd,'-',tod + write(restart_filename,'(4a)') trim(restart_dir), trim(restart_file), '.', trim(restart_date) + else + force_restart_now = .false. + endif + + !-------------------------------- + ! Unpack import state + !-------------------------------- + + call t_barrierf('cice_run_import_BARRIER',mpi_comm_ice) + call t_startf ('cice_run_import') + call ice_timer_start(timer_cplrecv) + + call ice_import(importState, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ice_timer_stop(timer_cplrecv) + call t_stopf ('cice_run_import') + + ! write Debug output + if (debug_import > 0 .and. my_task==master_task) then + call State_fldDebug(importState, flds_scalar_name, 'cice_import:', & + idate, sec, nu_diag, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + !-------------------------------- + ! Advance cice and timestep update + !-------------------------------- + +!tcraig if (force_restart_now) then +! call CICE_Run(restart_filename=restart_filename) +! else + call CICE_Run() +! end if + + !-------------------------------- + ! Create export state + !-------------------------------- + + call t_barrierf('cice_run_export_BARRIER',mpi_comm_ice) + call t_startf ('cice_run_export') + call ice_timer_start(timer_cplsend) + + call ice_export(exportState, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ice_timer_stop(timer_cplsend) + call t_stopf ('cice_run_export') + + if (debug_export > 0 .and. my_task==master_task) then + call State_fldDebug(exportState, flds_scalar_name, 'cice_export:', & + idate, sec, nu_diag, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! reset shr logging to my original values + call shr_file_setLogUnit (shrlogunit) + + !-------------------------------- + ! stop timers and print timer info + !-------------------------------- + ! Need to have this logic here instead of in finalize phase + ! since the finalize phase will still be called even in aqua-planet mode + + !-------------------------------- + ! Determine if time to stop + !-------------------------------- + + call ESMF_ClockGetAlarm(clock, alarmname='alarm_stop', alarm=alarm, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + if (ESMF_AlarmIsRinging(alarm, rc=rc)) then + if (ChkErr(rc,__LINE__,u_FILE_u)) return + stop_now = .true. + call ESMF_AlarmRingerOff( alarm, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + stop_now = .false. + endif + + call t_stopf ('cice_run_total') + + ! Need to stop this at the end of every run phase in a coupled run. + call ice_timer_stop(timer_total) + if (stop_now) then + call ice_timer_print_all(stats=.true.) ! print timing information + call release_all_fileunits + endif + + 105 format( A, 2i8, A, f10.2, A, f10.2, A) + + if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) + + end subroutine ModelAdvance + + !=============================================================================== + + subroutine ModelSetRunClock(gcomp, rc) + + ! intput/output variables + type(ESMF_GridComp) :: gcomp + integer, intent(out) :: rc + + ! local variables + type(ESMF_Clock) :: mclock, dclock + type(ESMF_Time) :: mcurrtime, dcurrtime + type(ESMF_Time) :: mstoptime + type(ESMF_TimeInterval) :: mtimestep, dtimestep + character(len=256) :: cvalue + character(len=256) :: restart_option ! Restart option units + integer :: restart_n ! Number until restart interval + integer :: restart_ymd ! Restart date (YYYYMMDD) + type(ESMF_ALARM) :: restart_alarm + character(len=256) :: stop_option ! Stop option units + integer :: stop_n ! Number until stop interval + integer :: stop_ymd ! Stop date (YYYYMMDD) + type(ESMF_ALARM) :: stop_alarm + character(len=128) :: name + integer :: alarmcount + character(len=*),parameter :: subname=trim(modName)//':(ModelSetRunClock) ' + !-------------------------------- + + rc = ESMF_SUCCESS + if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + + ! query the Component for its clocks + call NUOPC_ModelGet(gcomp, driverClock=dclock, modelClock=mclock, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_ClockGet(dclock, currTime=dcurrtime, timeStep=dtimestep, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_ClockGet(mclock, currTime=mcurrtime, timeStep=mtimestep, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + !-------------------------------- + ! force model clock currtime and timestep to match driver and set stoptime + !-------------------------------- + + mstoptime = mcurrtime + dtimestep + call ESMF_ClockSet(mclock, currTime=dcurrtime, timeStep=dtimestep, stopTime=mstoptime, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + !-------------------------------- + ! set restart and stop alarms + !-------------------------------- + + call ESMF_ClockGetAlarmList(mclock, alarmlistflag=ESMF_ALARMLIST_ALL, alarmCount=alarmCount, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + if (alarmCount == 0) then + + call ESMF_GridCompGet(gcomp, name=name, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_LogWrite(subname//'setting alarms for' // trim(name), ESMF_LOGMSG_INFO) + + !---------------- + ! Restart alarm + !---------------- + call NUOPC_CompAttributeGet(gcomp, name="restart_option", value=restart_option, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call NUOPC_CompAttributeGet(gcomp, name="restart_n", value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) restart_n + + call NUOPC_CompAttributeGet(gcomp, name="restart_ymd", value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) restart_ymd + + call alarmInit(mclock, restart_alarm, restart_option, & + opt_n = restart_n, & + opt_ymd = restart_ymd, & + RefTime = mcurrTime, & + alarmname = 'alarm_restart', rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_AlarmSet(restart_alarm, clock=mclock, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + !---------------- + ! Stop alarm + !---------------- + call NUOPC_CompAttributeGet(gcomp, name="stop_option", value=stop_option, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call NUOPC_CompAttributeGet(gcomp, name="stop_n", value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) stop_n + + call NUOPC_CompAttributeGet(gcomp, name="stop_ymd", value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) stop_ymd + + call alarmInit(mclock, stop_alarm, stop_option, & + opt_n = stop_n, & + opt_ymd = stop_ymd, & + RefTime = mcurrTime, & + alarmname = 'alarm_stop', rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_AlarmSet(stop_alarm, clock=mclock, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + end if + + !-------------------------------- + ! Advance model clock to trigger alarms then reset model clock back to currtime + !-------------------------------- + + call ESMF_ClockAdvance(mclock,rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_ClockSet(mclock, currTime=dcurrtime, timeStep=dtimestep, stopTime=mstoptime, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) + + end subroutine ModelSetRunClock + + !=============================================================================== + + subroutine ModelFinalize(gcomp, rc) + type(ESMF_GridComp) :: gcomp + integer, intent(out) :: rc + + ! local variables + character(*), parameter :: F00 = "('(ice_comp_nuopc) ',8a)" + character(*), parameter :: F91 = "('(ice_comp_nuopc) ',73('-'))" + character(len=*),parameter :: subname=trim(modName)//':(ModelFinalize) ' + !-------------------------------- + + !-------------------------------- + ! Finalize routine + !-------------------------------- + + rc = ESMF_SUCCESS + if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + + if (my_task == master_task) then + write(nu_diag,F91) + write(nu_diag,F00) 'CICE: end of main integration loop' + write(nu_diag,F91) + end if + + if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) + + end subroutine ModelFinalize + +end module ice_comp_nuopc diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 new file mode 100644 index 000000000..0fe2510aa --- /dev/null +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -0,0 +1,1799 @@ +module ice_import_export + + use ESMF + use NUOPC + use NUOPC_Model + use shr_sys_mod , only : shr_sys_abort, shr_sys_flush + use shr_frz_mod , only : shr_frz_freezetemp + use shr_kind_mod , only : r8 => shr_kind_r8, cl=>shr_kind_cl, cs=>shr_kind_cs + use ice_kinds_mod , only : int_kind, dbl_kind, char_len_long, log_kind + use ice_constants , only : c0, c1, spval_dbl + use ice_constants , only : field_loc_center, field_type_scalar, field_type_vector + use ice_blocks , only : block, get_block, nx_block, ny_block + use ice_domain , only : nblocks, blocks_ice, halo_info, distrb_info + use ice_domain_size , only : nx_global, ny_global, block_size_x, block_size_y, max_blocks, ncat + use ice_exit , only : abort_ice + use ice_flux , only : strairxt, strairyt, strocnxt, strocnyt + use ice_flux , only : alvdr, alidr, alvdf, alidf, Tref, Qref, Uref + use ice_flux , only : flat, fsens, flwout, evap, fswabs, fhocn, fswthru +#if (defined NEWCODE) + use ice_flux , only : fswthruvdr, fswthruvdf, fswthruidr, fswthruidf + use ice_flux , only : send_i2x_per_cat, fswthrun_ai +#endif + use ice_flux , only : fresh, fsalt, zlvl, uatm, vatm, potT, Tair, Qa + use ice_flux , only : rhoa, swvdr, swvdf, swidr, swidf, flw, frain + use ice_flux , only : fsnow, uocn, vocn, sst, ss_tltx, ss_tlty, frzmlt + use ice_flux , only : sss, tf, wind, fsw +#if (defined NEWCODE) + use ice_flux , only : faero_atm, faero_ocn + use ice_flux , only : fiso_atm, fiso_ocn, fiso_rain, fiso_evap + use ice_flux , only : Qa_iso, Qref_iso, HDO_ocn, H2_18O_ocn, H2_16O_ocn +#endif + use ice_state , only : vice, vsno, aice, aicen_init, trcr + use ice_grid , only : tlon, tlat, tarea, tmask, anglet, hm, ocn_gridcell_frac + use ice_grid , only : grid_type, t2ugrid_vector + use ice_boundary , only : ice_HaloUpdate + use ice_fileunits , only : nu_diag + use ice_communicate , only : my_task, master_task, MPI_COMM_ICE + use ice_prescribed_mod , only : prescribed_ice + use ice_shr_methods , only : chkerr, state_reset + use icepack_intfc , only : icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc , only : icepack_query_parameters, icepack_query_tracer_flags + use perf_mod , only : t_startf, t_stopf, t_barrierf + + implicit none + public + + public :: ice_advertise_fields + public :: ice_realize_fields + public :: ice_import + public :: ice_export + + private :: fldlist_add + private :: fldlist_realize + private :: state_FldChk + + interface state_getfldptr + module procedure state_getfldptr_1d + module procedure state_getfldptr_2d + module procedure state_getfldptr_3d + module procedure state_getfldptr_4d + end interface state_getfldptr + private :: state_getfldptr + + interface state_getimport + module procedure state_getimport_4d_output + module procedure state_getimport_3d_output + end interface state_getimport + private :: state_getimport + + interface state_setexport + module procedure state_setexport_4d_input + module procedure state_setexport_3d_input + end interface state_setexport + private :: state_setexport + + ! Private module data + + type fld_list_type + character(len=128) :: stdname + integer :: ungridded_lbound = 0 + integer :: ungridded_ubound = 0 + end type fld_list_type + + integer, parameter :: fldsMax = 100 + integer :: fldsToIce_num = 0 + integer :: fldsFrIce_num = 0 + type (fld_list_type) :: fldsToIce(fldsMax) + type (fld_list_type) :: fldsFrIce(fldsMax) + type(ESMF_GeomType_Flag) :: geomtype + + integer , parameter :: dbug = 10 ! i/o debug messages + character(*), parameter :: u_FILE_u = & + __FILE__ + +!============================================================================== +contains +!============================================================================== + + subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) + + ! input/output variables + type(ESMF_GridComp) :: gcomp + type(ESMF_State) :: importState + type(ESMF_State) :: exportState + character(len=*) , intent(in) :: flds_scalar_name + integer , intent(out) :: rc + + ! local variables + integer :: n + character(CS) :: stdname + character(CS) :: cvalue + logical :: flds_wiso ! use case + logical :: flds_i2o_per_cat ! .true. => select per ice thickness category + character(len=*), parameter :: subname='(ice_import_export:ice_advertise_fields)' + !------------------------------------------------------------------------------- + + rc = ESMF_SUCCESS + if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + + call NUOPC_CompAttributeGet(gcomp, name='flds_wiso', value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) flds_wiso + call ESMF_LogWrite('flds_wiso = '// trim(cvalue), ESMF_LOGMSG_INFO) + +#if (defined NEWCODE) + call NUOPC_CompAttributeGet(gcomp, name='flds_i2o_per_cat', value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) send_i2x_per_cat + call ESMF_LogWrite('flds_i2o_per_cat = '// trim(cvalue), ESMF_LOGMSG_INFO) +#endif + + !----------------- + ! advertise import fields + !----------------- + + call fldlist_add(fldsToIce_num, fldsToIce, trim(flds_scalar_name)) + + ! from ocean + call fldlist_add(fldsToIce_num, fldsToIce, 'sea_surface_slope_zonal' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'sea_surface_slope_merid' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'sea_surface_temperature' ) + call fldlist_add(fldsToIce_num, fldsToIce, 's_surf' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'ocn_current_zonal' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'ocn_current_merid' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'freezing_melting_potential' ) + if (flds_wiso) then + call fldlist_add(fldsToIce_num, fldsToIce, 'So_roce_wiso', ungridded_lbound=1, ungridded_ubound=3) + end if + + ! from atmosphere + call fldlist_add(fldsToIce_num, fldsToIce, 'inst_height_lowest' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'inst_zonal_wind_height_lowest' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'inst_merid_wind_height_lowest' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'inst_spec_humid_height_lowest' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'inst_temp_height_lowest' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_ptem' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'air_density_height_lowest' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'mean_down_sw_vis_dir_flx' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'mean_down_sw_ir_dir_flx' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'mean_down_sw_vis_dif_flx' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'mean_down_sw_ir_dif_flx' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'mean_down_lw_flx' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'mean_prec_rate' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'mean_fprec_rate' ) + + ! from atm - black carbon deposition fluxes (3) + call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_bcph', ungridded_lbound=1, ungridded_ubound=3) + + ! from atm - wet dust deposition frluxes (4 sizes) + call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_dstwet', ungridded_lbound=1, ungridded_ubound=4) + + ! from - atm dry dust deposition frluxes (4 sizes) + call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_dstdry', ungridded_lbound=1, ungridded_ubound=4) + + do n = 1,fldsToIce_num + call NUOPC_Advertise(importState, standardName=fldsToIce(n)%stdname, & + TransferOfferGeomObject='will provide', rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end do + + !----------------- + ! advertise export fields + !----------------- + + call fldlist_add(fldsFrIce_num, fldsFrIce, trim(flds_scalar_name)) + + ! ice states + call fldlist_add(fldsFrIce_num, fldsFrIce, 'ice_mask' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'ice_fraction' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'sea_ice_temperature' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_ice_volume' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_snow_volume' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_tref' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_qref' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_snowh' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_u10' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_vis_dir_albedo' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_ir_dir_albedo' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_vis_dif_albedo' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_ir_dif_albedo' ) +#if (defined NEWCODE) + if (send_i2x_per_cat) then + call fldlist_add(fldsFrIce_num, fldsFrIce, 'ice_fraction_n', & + ungridded_lbound=1, ungridded_ubound=ncat) + end if +#endif + + ! ice/atm fluxes computed by ice + call fldlist_add(fldsFrIce_num, fldsFrIce, 'stress_on_air_ice_zonal' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'stress_on_air_ice_merid' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_laten_heat_flx_atm_into_ice' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sensi_heat_flx_atm_into_ice' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_up_lw_flx_ice' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_evap_rate_atm_into_ice' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Faii_swnet' ) + + ! ice/ocn fluxes computed by ice + call fldlist_add(fldsFrIce_num, fldsFrIce, 'net_heat_flx_to_ocn' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_vis_dir_flx' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_vis_dif_flx' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_ir_dir_flx' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_ir_dif_flx' ) +#if (defined NEWCODE) + if (send_i2x_per_cat) then + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_ifrac_n', & + ungridded_lbound=1, ungridded_ubound=ncat) + end if +#endif + call fldlist_add(fldsFrIce_num , fldsFrIce, 'mean_fresh_water_to_ocean_rate' ) + call fldlist_add(fldsFrIce_num , fldsFrIce, 'mean_salt_rate' ) + call fldlist_add(fldsFrIce_num , fldsFrIce, 'stress_on_ocn_ice_zonal' ) + call fldlist_add(fldsFrIce_num , fldsFrIce, 'stress_on_ocn_ice_merid' ) + call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_bcpho' ) + call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_bcphi' ) + call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_flxdst' ) + if (flds_wiso) then + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_fresh_water_to_ocean_rate_wiso', & + ungridded_lbound=1, ungridded_ubound=3) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_evap_rate_atm_into_ice_wiso', & + ungridded_lbound=1, ungridded_ubound=3) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_qref_wiso', & + ungridded_lbound=1, ungridded_ubound=3) + end if + + do n = 1,fldsFrIce_num + call NUOPC_Advertise(exportState, standardName=fldsFrIce(n)%stdname, & + TransferOfferGeomObject='will provide', rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + enddo + + if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) + + end subroutine ice_advertise_fields + +!============================================================================== + + subroutine ice_realize_fields(gcomp, mesh, grid, flds_scalar_name, flds_scalar_num, rc) + + ! input/output variables + type(ESMF_GridComp) :: gcomp + type(ESMF_Mesh) , optional , intent(in) :: mesh + type(ESMF_Grid) , optional , intent(in) :: grid + character(len=*) , intent(in) :: flds_scalar_name + integer , intent(in) :: flds_scalar_num + integer , intent(out) :: rc + + ! local variables + type(ESMF_State) :: importState + type(ESMF_State) :: exportState + character(len=*), parameter :: subname='(ice_import_export:realize_fields)' + !--------------------------------------------------------------------------- + + rc = ESMF_SUCCESS + + call NUOPC_ModelGet(gcomp, importState=importState, exportState=exportState, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + if (present(mesh)) then + + geomtype = ESMF_GEOMTYPE_MESH + + call fldlist_realize( & + state=ExportState, & + fldList=fldsFrIce, & + numflds=fldsFrIce_num, & + flds_scalar_name=flds_scalar_name, & + flds_scalar_num=flds_scalar_num, & + tag=subname//':CICE_Export',& + mesh=mesh, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call fldlist_realize( & + state=importState, & + fldList=fldsToIce, & + numflds=fldsToIce_num, & + flds_scalar_name=flds_scalar_name, & + flds_scalar_num=flds_scalar_num, & + tag=subname//':CICE_Import',& + mesh=mesh, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + else if (present(grid)) then + + geomtype = ESMF_GEOMTYPE_GRID + + call fldlist_realize( & + state=ExportState, & + fldList=fldsFrIce, & + numflds=fldsFrIce_num, & + flds_scalar_name=flds_scalar_name, & + flds_scalar_num=flds_scalar_num, & + tag=subname//':CICE_Export',& + grid=grid, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call fldlist_realize( & + state=importState, & + fldList=fldsToIce, & + numflds=fldsToIce_num, & + flds_scalar_name=flds_scalar_name, & + flds_scalar_num=flds_scalar_num, & + tag=subname//':CICE_Import',& + grid=grid, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + end if + + end subroutine ice_realize_fields + + !============================================================================== + + subroutine ice_import( importState, rc ) + + ! input/output variables + type(ESMF_State) , intent(in) :: importState + integer , intent(out) :: rc + + ! local variables + integer,parameter :: nflds=15 + integer,parameter :: nfldv=6 + integer :: i, j, iblk, n + integer :: ilo, ihi, jlo, jhi !beginning and end of physical domain + type(block) :: this_block ! block information for current block + real (kind=dbl_kind),allocatable :: aflds(:,:,:,:) + real (kind=dbl_kind) :: workx, worky + real (kind=dbl_kind) :: MIN_RAIN_TEMP, MAX_SNOW_TEMP + real (kind=dbl_kind) :: tffresh + character(len=*), parameter :: subname = 'ice_import' + !----------------------------------------------------- + + call icepack_query_parameters(Tffresh_out=Tffresh) +! call icepack_query_parameters(tfrz_option_out=tfrz_option, & +! modal_aero_out=modal_aero, z_tracers_out=z_tracers, skl_bgc_out=skl_bgc, & +! Tffresh_out=Tffresh) +! call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_iage_out=tr_iage, & +! tr_FY_out=tr_FY, tr_pond_out=tr_pond, tr_lvl_out=tr_lvl, & +! tr_zaero_out=tr_zaero, tr_bgc_Nit_out=tr_bgc_Nit) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=u_FILE_u, line=__LINE__) + + ! Note that the precipitation fluxes received from the mediator + ! are in units of kg/s/m^2 which is what CICE requires. + ! Note also that the read in below includes only values needed + ! by the thermodynamic component of CICE. Variables uocn, vocn, + ! ss_tltx, and ss_tlty are excluded. Also, because the SOM and + ! DOM don't compute SSS. SSS is not read in and is left at + ! the initilized value (see ice_flux.F init_coupler_flux) of + ! 34 ppt + + ! Use aflds to gather the halo updates of multiple fields + ! Need to separate the scalar from the vector halo updates + + allocate(aflds(nx_block,ny_block,nflds,nblocks)) + aflds = c0 + + ! import ocean states + + call state_getimport(importState, 'sea_surface_temperature', output=aflds, index=1, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 's_surf', output=aflds, index=2, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! import ocean states + + call state_getimport(importState, 'inst_height_lowest', output=aflds, index=3, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'Sa_ptem', output=aflds, index=4, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'inst_temp_height_lowest', output=aflds, index=5, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'inst_spec_humid_height_lowest', output=aflds, index=6, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'air_density_height_lowest', output=aflds, index=7, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! import ocn/ice fluxes + + call state_getimport(importState, 'freezing_melting_potential', output=aflds, index=8, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! import atm fluxes + + call state_getimport(importState, 'mean_down_sw_vis_dir_flx', output=aflds, index=9, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'mean_down_sw_ir_dir_flx', output=aflds, index=10, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'mean_down_sw_vis_dif_flx', output=aflds, index=11, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'mean_down_sw_ir_dif_flx', output=aflds, index=12, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'mean_down_lw_flx', output=aflds, index=13, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'mean_prec_rate', output=aflds, index=14, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'mean_fprec_rate', output=aflds, index=15, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! perform a halo update + + if (.not.prescribed_ice) then + call t_startf ('cice_imp_halo') + call ice_HaloUpdate(aflds, halo_info, field_loc_center, field_type_scalar) + call t_stopf ('cice_imp_halo') + endif + + ! now fill in the ice internal data types + + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1,ny_block + do i = 1,nx_block + sst (i,j,iblk) = aflds(i,j, 1,iblk) + sss (i,j,iblk) = aflds(i,j, 2,iblk) + zlvl (i,j,iblk) = aflds(i,j, 3,iblk) + potT (i,j,iblk) = aflds(i,j, 4,iblk) + Tair (i,j,iblk) = aflds(i,j, 5,iblk) + Qa (i,j,iblk) = aflds(i,j, 6,iblk) + rhoa (i,j,iblk) = aflds(i,j, 7,iblk) + frzmlt (i,j,iblk) = aflds(i,j, 8,iblk) + swvdr(i,j,iblk) = aflds(i,j, 9,iblk) + swidr(i,j,iblk) = aflds(i,j,10,iblk) + swvdf(i,j,iblk) = aflds(i,j,11,iblk) + swidf(i,j,iblk) = aflds(i,j,12,iblk) + flw (i,j,iblk) = aflds(i,j,13,iblk) + frain(i,j,iblk) = aflds(i,j,14,iblk) + fsnow(i,j,iblk) = aflds(i,j,15,iblk) + enddo !i + enddo !j + enddo !iblk + !$OMP END PARALLEL DO + + deallocate(aflds) + allocate(aflds(nx_block,ny_block,nfldv,nblocks)) + aflds = c0 + + ! Get velocity fields from ocean and atm and slope fields from ocean + + call state_getimport(importState, 'ocn_current_zonal', output=aflds, index=1, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'ocn_current_merid', output=aflds, index=2, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'inst_zonal_wind_height_lowest', output=aflds, index=3, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'inst_merid_wind_height_lowest', output=aflds, index=4, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'sea_surface_slope_zonal', output=aflds, index=5, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'sea_surface_slope_merid', output=aflds, index=6, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + + if (.not.prescribed_ice) then + call t_startf ('cice_imp_halo') + call ice_HaloUpdate(aflds, halo_info, field_loc_center, field_type_vector) + call t_stopf ('cice_imp_halo') + endif + + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1,ny_block + do i = 1,nx_block + uocn (i,j,iblk) = aflds(i,j, 1,iblk) + vocn (i,j,iblk) = aflds(i,j, 2,iblk) + uatm (i,j,iblk) = aflds(i,j, 3,iblk) + vatm (i,j,iblk) = aflds(i,j, 4,iblk) + ss_tltx(i,j,iblk) = aflds(i,j, 5,iblk) + ss_tlty(i,j,iblk) = aflds(i,j, 6,iblk) + enddo !i + enddo !j + enddo !iblk + !$OMP END PARALLEL DO + + deallocate(aflds) + + !------------------------------------------------------- + ! Get aerosols from mediator + !------------------------------------------------------- + +#if (defined NEWCODE) + if (State_FldChk(importState, 'Faxa_bcph')) then + ! the following indices are based on what the atmosphere is sending + ! bcphidry ungridded_index=1 + ! bcphodry ungridded_index=2 + ! bcphiwet ungridded_index=3 + + ! bcphodry + call state_getimport(importState, 'Faxa_bcph', output=faero_atm, index=1, ungridded_index=2, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + ! bcphidry + bcphiwet + call state_getimport(importState, 'Faxa_bcph', output=faero_atm, index=2, ungridded_index=1, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'Faxa_bcph', output=faero_atm, index=2, do_sum=.true., ungridded_index=3, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! Sum over all dry and wet dust fluxes from ath atmosphere + if (State_FldChk(importState, 'Faxa_dstwet') .and. State_FldChk(importState, 'Faxa_dstdry')) then + call state_getimport(importState, 'Faxa_dstwet', output=faero_atm, index=3, ungridded_index=1, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'Faxa_dstdry', output=faero_atm, index=3, do_sum=.true., ungridded_index=1, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'Faxa_dstwet', output=faero_atm, index=3, do_sum=.true., ungridded_index=2, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'Faxa_dstdry', output=faero_atm, index=3, do_sum=.true., ungridded_index=2, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'Faxa_dstwet', output=faero_atm, index=3, do_sum=.true., ungridded_index=3, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'Faxa_dstdry', output=faero_atm, index=3, do_sum=.true., ungridded_index=3, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'Faxa_dstwet', output=faero_atm, index=3, do_sum=.true., ungridded_index=4, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'Faxa_dstdry', output=faero_atm, index=3, do_sum=.true., ungridded_index=4, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if +#endif + + !------------------------------------------------------- + ! Water isotopes from the mediator + !------------------------------------------------------- + + ! 16O => ungridded_index=1 + ! 18O => ungridded_index=2 + ! HDO => ungridded_index=3 + +#if (defined NEWCODE) + if (State_FldChk(importState, 'shum_wiso')) then + call state_getimport(importState, 'inst_spec_humid_height_lowest_wiso', output=Qa_iso, index=1, ungridded_index=3, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'inst_spec_humid_height_lowest_wiso', output=Qa_iso, index=2, ungridded_index=1, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'inst_spec_humid_height_lowest_wiso', output=Qa_iso, index=3, ungridded_index=2, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=1, ungridded_index=3, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=2, ungridded_index=1, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=3, ungridded_index=2, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'mean_fprec_rate_wiso', output=fiso_atm, index=1, ungridded_index=3, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'mean_fprec_rate_wiso', output=fiso_atm, index=2, ungridded_index=1, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'mean_fprec_rate_wiso', output=fiso_atm, index=3, ungridded_index=2, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'So_roce_wiso', output=HDO_ocn , ungridded_index=3, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'So_roce_wiso', output=H2_16O_ocn, ungridded_index=1, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'So_roce_wiso', output=H2_18O_ocn, ungridded_index=2, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if +#endif + + !----------------------------------------------------------------- + ! rotate zonal/meridional vectors to local coordinates + ! compute data derived quantities + !----------------------------------------------------------------- + + ! Vector fields come in on T grid, but are oriented geographically + ! need to rotate to pop-grid FIRST using ANGLET + ! then interpolate to the U-cell centers (otherwise we + ! interpolate across the pole) + ! use ANGLET which is on the T grid ! + + call t_startf ('cice_imp_ocn') + !$OMP PARALLEL DO PRIVATE(iblk,i,j,workx,worky) + do iblk = 1, nblocks + + do j = 1,ny_block + do i = 1,nx_block + ! ocean + workx = uocn (i,j,iblk) ! currents, m/s + worky = vocn (i,j,iblk) + uocn(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & ! convert to POP grid + + worky*sin(ANGLET(i,j,iblk)) + vocn(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) & + - workx*sin(ANGLET(i,j,iblk)) + + workx = ss_tltx (i,j,iblk) ! sea sfc tilt, m/m + worky = ss_tlty (i,j,iblk) + ss_tltx(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & ! convert to POP grid + + worky*sin(ANGLET(i,j,iblk)) + ss_tlty(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) & + - workx*sin(ANGLET(i,j,iblk)) + + sst(i,j,iblk) = sst(i,j,iblk) - Tffresh ! sea sfc temp (C) + + sss(i,j,iblk) = max(sss(i,j,iblk),c0) + enddo + enddo + + ! Use shr_frz_mod for this + Tf(:,:,iblk) = shr_frz_freezetemp(sss(:,:,iblk)) + + enddo + !$OMP END PARALLEL DO + call t_stopf ('cice_imp_ocn') + + ! Interpolate ocean dynamics variables from T-cell centers to + ! U-cell centers. + + if (.not.prescribed_ice) then + call t_startf ('cice_imp_t2u') + call t2ugrid_vector(uocn) + call t2ugrid_vector(vocn) + call t2ugrid_vector(ss_tltx) + call t2ugrid_vector(ss_tlty) + call t_stopf ('cice_imp_t2u') + end if + + ! Atmosphere variables are needed in T cell centers in + ! subroutine stability and are interpolated to the U grid + ! later as necessary. + + call t_startf ('cice_imp_atm') + !$OMP PARALLEL DO PRIVATE(iblk,i,j,workx,worky) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + + ! atmosphere + workx = uatm(i,j,iblk) ! wind velocity, m/s + worky = vatm(i,j,iblk) + uatm (i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & ! convert to POP grid + + worky*sin(ANGLET(i,j,iblk)) ! note uatm, vatm, wind + vatm (i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) & ! are on the T-grid here + - workx*sin(ANGLET(i,j,iblk)) + + wind (i,j,iblk) = sqrt(uatm(i,j,iblk)**2 + vatm(i,j,iblk)**2) + fsw (i,j,iblk) = swvdr(i,j,iblk) + swvdf(i,j,iblk) & + + swidr(i,j,iblk) + swidf(i,j,iblk) + enddo + enddo + enddo + !$OMP END PARALLEL DO + call t_stopf ('cice_imp_atm') + + end subroutine ice_import + + !=============================================================================== + + subroutine ice_export( exportState, rc ) + + ! input/output variables + type(ESMF_State), intent(inout) :: exportState + integer , intent(out) :: rc + + ! local variables + type(block) :: this_block ! block information for current block + integer :: i, j, iblk, n ! incides + integer :: n2 ! thickness category index + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + real (kind=dbl_kind) :: workx, worky ! tmps for converting grid + integer (kind=int_kind) :: icells ! number of ocean/ice cells + logical :: flag + integer (kind=int_kind) :: indxi (nx_block*ny_block) ! compressed indices in i + integer (kind=int_kind) :: indxj (nx_block*ny_block) ! compressed indices in i + real (kind=dbl_kind) :: Tsrf (nx_block,ny_block,max_blocks) ! surface temperature + real (kind=dbl_kind) :: tauxa (nx_block,ny_block,max_blocks) ! atmo/ice stress + real (kind=dbl_kind) :: tauya (nx_block,ny_block,max_blocks) ! atm/ice stress + real (kind=dbl_kind) :: tauxo (nx_block,ny_block,max_blocks) ! ice/ocean stress + real (kind=dbl_kind) :: tauyo (nx_block,ny_block,max_blocks) ! ice/ocean stress + real (kind=dbl_kind) :: ailohi(nx_block,ny_block,max_blocks) ! fractional ice area + real (kind=dbl_kind), allocatable :: tempfld(:,:,:) + real (kind=dbl_kind) :: tffresh + character(len=*),parameter :: subname = 'ice_export' + !----------------------------------------------------- + + rc = ESMF_SUCCESS + if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + + call icepack_query_parameters(Tffresh_out=Tffresh) +! call icepack_query_parameters(tfrz_option_out=tfrz_option, & +! modal_aero_out=modal_aero, z_tracers_out=z_tracers, skl_bgc_out=skl_bgc, & +! Tffresh_out=Tffresh) +! call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_iage_out=tr_iage, & +! tr_FY_out=tr_FY, tr_pond_out=tr_pond, tr_lvl_out=tr_lvl, & +! tr_zaero_out=tr_zaero, tr_bgc_Nit_out=tr_bgc_Nit) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=u_FILE_u, line=__LINE__) + + !calculate ice thickness from aice and vice. Also + !create Tsrf from the first tracer (trcr) in ice_state.F + + ailohi(:,:,:) = c0 + Tsrf(:,:,:) = c0 + tauxa(:,:,:) = c0 + tauya(:,:,:) = c0 + tauxo(:,:,:) = c0 + tauyo(:,:,:) = c0 + + !$OMP PARALLEL DO PRIVATE(iblk,i,j,workx,worky, this_block, ilo, ihi, jlo, jhi) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo,jhi + do i = ilo,ihi + ! ice fraction + ailohi(i,j,iblk) = min(aice(i,j,iblk), c1) + + ! surface temperature + Tsrf(i,j,iblk) = Tffresh + trcr(i,j,1,iblk) !Kelvin (original ???) + + ! wind stress (on POP T-grid: convert to lat-lon) + workx = strairxT(i,j,iblk) ! N/m^2 + worky = strairyT(i,j,iblk) ! N/m^2 + tauxa(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) - worky*sin(ANGLET(i,j,iblk)) + tauya(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) + workx*sin(ANGLET(i,j,iblk)) + + ! ice/ocean stress (on POP T-grid: convert to lat-lon) + workx = -strocnxT(i,j,iblk) ! N/m^2 + worky = -strocnyT(i,j,iblk) ! N/m^2 + tauxo(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) - worky*sin(ANGLET(i,j,iblk)) + tauyo(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) + workx*sin(ANGLET(i,j,iblk)) + enddo + enddo + enddo + !$OMP END PARALLEL DO + + flag=.false. + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo,jhi + do i = ilo,ihi + if (tmask(i,j,iblk) .and. ailohi(i,j,iblk) < c0 ) then + flag = .true. + endif + end do + end do + end do + if (flag) then + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo,jhi + do i = ilo,ihi + if (tmask(i,j,iblk) .and. ailohi(i,j,iblk) < c0 ) then + write(nu_diag,*) & + ' (ice) send: ERROR ailohi < 0.0 ',i,j,ailohi(i,j,iblk) + call shr_sys_flush(nu_diag) + endif + end do + end do + end do + endif + + !--------------------------------- + ! Create the export state + !--------------------------------- + + ! Zero out fields with tmask for proper coupler accumulation in ice free areas + call state_reset(exportState, c0, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Create a temporary field + allocate(tempfld(nx_block,ny_block,nblocks)) + + ! Fractions and mask + call state_setexport(exportState, 'ice_fraction', input=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + if (trim(grid_type) == 'latlon') then + call state_setexport(exportState, 'ice_mask', input=ocn_gridcell_frac, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + tempfld(i,j,iblk) = real(nint(hm(i,j,iblk)),kind=dbl_kind) + end do + end do + end do + call state_setexport(exportState, 'ice_mask', input=tempfld, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! ---- + ! States from ice + ! ---- + + ! surface temperature of ice covered portion (degK) + call state_setexport(exportState, 'sea_ice_temperature', input=Tsrf , lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! albedo vis dir + call state_setexport(exportState, 'inst_ice_vis_dir_albedo', input=alvdr, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! albedo nir dir + call state_setexport(exportState, 'inst_ice_ir_dir_albedo', input=alidr, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! albedo vis dif + call state_setexport(exportState, 'inst_ice_vis_dif_albedo', input=alvdf, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! albedo nir dif + call state_setexport(exportState, 'inst_ice_ir_dif_albedo', input=alidf, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! 10m atm reference wind speed (m/s) + call state_setexport(exportState, 'Si_u10' , input=Uref , lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! 2m atm reference temperature (K) + call state_setexport(exportState, 'Si_tref' , input=Tref , lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! 2m atm reference spec humidity (kg/kg) + call state_setexport(exportState, 'Si_qref' , input=Qref , lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Snow volume + call state_setexport(exportState, 'mean_snow_volume' , input=vsno , lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Ice volume + call state_setexport(exportState, 'mean_ice_volume' , input=vice , lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Snow height + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + if ( tmask(i,j,iblk) .and. ailohi(i,j,iblk) > c0 ) then + tempfld(i,j,iblk) = vsno(i,j,iblk)/ailohi(i,j,iblk) + end if + end do + end do + end do + call state_setexport(exportState, 'Si_snowh' , input=tempfld , lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! ------ + ! ice/atm fluxes computed by ice + ! ------ + + ! Zonal air/ice stress + call state_setexport(exportState, 'stress_on_air_ice_zonal' , input=tauxa, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Meridional air/ice stress + call state_setexport(exportState, 'stress_on_air_ice_merid' , input=tauya, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Latent heat flux (atm into ice) + call state_setexport(exportState, 'mean_laten_heat_flx_atm_into_ice' , input=flat, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Sensible heat flux (atm into ice) + call state_setexport(exportState, 'mean_sensi_heat_flx_atm_into_ice' , input=fsens, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! longwave outgoing (upward), average over ice fraction only + call state_setexport(exportState, 'mean_up_lw_flx_ice' , input=flwout, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Evaporative water flux (kg/m^2/s) + call state_setexport(exportState, 'mean_evap_rate_atm_into_ice' , input=evap, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Shortwave flux absorbed in ice and ocean (W/m^2) + call state_setexport(exportState, 'Faii_swnet' , input=fswabs, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! ------ + ! ice/ocn fluxes computed by ice + ! ------ + + ! flux of shortwave through ice to ocean + call state_setexport(exportState, 'mean_sw_pen_to_ocn' , input=fswthru, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + +#if (defined NEWCODE) + ! flux of vis dir shortwave through ice to ocean + call state_setexport(exportState, 'mean_sw_pen_to_ocn_vis_dir_flx' , input=fswthruvdr, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! flux of vis dif shortwave through ice to ocean + call state_setexport(exportState, 'mean_sw_pen_to_ocn_vis_dif_flx' , input=fswthruvdf, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! flux of ir dir shortwave through ice to ocean + call state_setexport(exportState, 'mean_sw_pen_to_ocn_ir_dir_flx' , input=fswthruidr, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! flux of ir dif shortwave through ice to ocean + call state_setexport(exportState, 'mean_sw_pen_to_ocn_ir_dif_flx' , input=fswthruidf, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return +#endif + + ! heat exchange with ocean + call state_setexport(exportState, 'net_heat_flx_to_ocn' , input=fhocn, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! fresh water to ocean (h2o flux from melting) + call state_setexport(exportState, 'mean_fresh_water_to_ocean_rate' , input=fresh, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! salt to ocean (salt flux from melting) + call state_setexport(exportState, 'mean_salt_rate' , input=fsalt, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! stress n i/o zonal + call state_setexport(exportState, 'stress_on_ocn_ice_zonal' , input=tauxo, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! stress n i/o meridional + call state_setexport(exportState, 'stress_on_ocn_ice_merid' , input=tauyo, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + +#if (defined NEWCODE) + ! ------ + ! optional aerosol fluxes to ocean + ! ------ + + ! hydrophobic bc + if (State_FldChk(exportState, 'Fioi_bcpho')) then + call state_setexport(exportState, 'Fioi_bcpho' , input=faero_ocn, index=1, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! hydrophilic bc + if (State_FldChk(exportState, 'Fioi_bcphi')) then + call state_setexport(exportState, 'Fioi_bcphi' , input=faero_ocn, index=2, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! dust + if (State_FldChk(exportState, 'Fioi_flxdst')) then + call state_setexport(exportState, 'Fioi_flxdst' , input=faero_ocn, index=3, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! ------ + ! optional water isotope fluxes to ocean + ! ------ + + if (State_FldChk(exportState, 'mean_fresh_water_to_ocean_rate_wiso')) then + ! 16O => ungridded_index=1 + ! 18O => ungridded_index=2 + ! HDO => ungridded_index=3 + + call state_setexport(exportState, 'mean_fresh_water_to_ocean_rate_wiso' , input=fiso_ocn, index=1, & + lmask=tmask, ifrac=ailohi, ungridded_index=3, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_setexport(exportState, 'mean_fresh_water_to_ocean_rate_wiso' , input=fiso_ocn, index=2, & + lmask=tmask, ifrac=ailohi, ungridded_index=1, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_setexport(exportState, 'mean_fresh_water_to_ocean_rate_wiso' , input=fiso_ocn, index=3, & + lmask=tmask, ifrac=ailohi, ungridded_index=2, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! ------ + ! optional water isotope fluxes to atmospehre + ! ------ + + if (State_FldChk(exportState, 'mean_evap_rate_atm_into_ice_wiso')) then + ! Isotope evap to atm + call state_setexport(exportState, 'mean_evap_rate_atm_into_ice_wiso' , input=fiso_evap, index=1, & + lmask=tmask, ifrac=ailohi, ungridded_index=3, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_setexport(exportState, 'mean_evap_rate_atm_into_ice_wiso' , input=fiso_evap, index=2, & + lmask=tmask, ifrac=ailohi, ungridded_index=1, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_setexport(exportState, 'mean_evap_rate_atm_into_ice_wiso' , input=fiso_evap, index=3, & + lmask=tmask, ifrac=ailohi, ungridded_index=2, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Isotope evap to atm + call state_setexport(exportState, 'Si_qref_wiso' , input=Qref_iso, index=1, & + lmask=tmask, ifrac=ailohi, ungridded_index=3, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_setexport(exportState, 'Si_qref_wiso' , input=Qref_iso, index=2, & + lmask=tmask, ifrac=ailohi, ungridded_index=1, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_setexport(exportState, 'Si_qref_wiso' , input=Qref_iso, index=3, & + lmask=tmask, ifrac=ailohi, ungridded_index=2, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + endif +#endif + + ! ------ + ! optional short wave penetration to ocean ice category + ! ------ + + ! ice fraction by category + if ( State_FldChk(exportState, 'ice_fraction_n') .and. & + State_FldChk(exportState, 'mean_sw_pen_to_ocn_ifrac_n')) then + do n = 1,ncat + call state_setexport(exportState, 'ice_fraction_n', input=aicen_init, index=n, & + ungridded_index=n, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! penetrative shortwave by category + ! Note: no need zero out pass-through fields over land for benefit of x2oacc fields in cpl hist files since + ! the export state has been zeroed out at the beginning + call state_setexport(exportState, 'mean_sw_pen_to_ocn_ifrac_n', input=aicen_init, index=n, & + lmask=tmask, ifrac=ailohi, ungridded_index=n, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end do + end if + + end subroutine ice_export + + !=============================================================================== + + subroutine fldlist_add(num, fldlist, stdname, ungridded_lbound, ungridded_ubound) + + ! input/output variables + integer , intent(inout) :: num + type(fld_list_type) , intent(inout) :: fldlist(:) + character(len=*) , intent(in) :: stdname + integer, optional , intent(in) :: ungridded_lbound + integer, optional , intent(in) :: ungridded_ubound + + ! local variables + character(len=*), parameter :: subname='(fldlist_add)' + !------------------------------------------------------------------------------- + + ! Set up a list of field information + + num = num + 1 + if (num > fldsMax) then + call shr_sys_abort(trim(subname)//": ERROR num > fldsMax "//trim(stdname)) + endif + fldlist(num)%stdname = trim(stdname) + + if (present(ungridded_lbound) .and. present(ungridded_ubound)) then + fldlist(num)%ungridded_lbound = ungridded_lbound + fldlist(num)%ungridded_ubound = ungridded_ubound + end if + + end subroutine fldlist_add + + !=============================================================================== + + subroutine fldlist_realize(state, fldList, numflds, flds_scalar_name, flds_scalar_num, mesh, grid, tag, rc) + + use NUOPC, only : NUOPC_IsConnected, NUOPC_Realize + use ESMF , only : ESMF_MeshLoc_Element, ESMF_FieldCreate, ESMF_TYPEKIND_R8 + use ESMF , only : ESMF_MAXSTR, ESMF_Field, ESMF_State, ESMF_Mesh, ESMF_StateRemove + use ESMF , only : ESMF_LogFoundError, ESMF_LOGMSG_INFO, ESMF_SUCCESS + use ESMF , only : ESMF_LogWrite, ESMF_LOGMSG_ERROR, ESMF_LOGERR_PASSTHRU + use ESMF , only : ESMF_VM + + ! input/output variables + type(ESMF_State) , intent(inout) :: state + type(fld_list_type) , intent(in) :: fldList(:) + integer , intent(in) :: numflds + character(len=*) , intent(in) :: flds_scalar_name + integer , intent(in) :: flds_scalar_num + character(len=*) , intent(in) :: tag + type(ESMF_Mesh), optional , intent(in) :: mesh + type(ESMF_Grid), optional , intent(in) :: grid + integer , intent(inout) :: rc + + ! local variables + integer :: n + type(ESMF_Field) :: field + character(len=80) :: stdname + character(len=*),parameter :: subname='(ice_import_export:fld_list_realize)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + do n = 1, numflds + stdname = fldList(n)%stdname + if (NUOPC_IsConnected(state, fieldName=stdname)) then + if (stdname == trim(flds_scalar_name)) then + call ESMF_LogWrite(trim(subname)//trim(tag)//" Field = "//trim(stdname)//" is connected on root pe", & + ESMF_LOGMSG_INFO) + ! Create the scalar field + call SetScalarField(field, flds_scalar_name, flds_scalar_num, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + if (present(mesh)) then + call ESMF_LogWrite(trim(subname)//trim(tag)//" Field = "//trim(stdname)//" is connected using mesh", & + ESMF_LOGMSG_INFO) + ! Create the field + if (fldlist(n)%ungridded_lbound > 0 .and. fldlist(n)%ungridded_ubound > 0) then + field = ESMF_FieldCreate(mesh, ESMF_TYPEKIND_R8, name=stdname, meshloc=ESMF_MESHLOC_ELEMENT, & + ungriddedLbound=(/fldlist(n)%ungridded_lbound/), & + ungriddedUbound=(/fldlist(n)%ungridded_ubound/), & + gridToFieldMap=(/2/), rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + field = ESMF_FieldCreate(mesh, ESMF_TYPEKIND_R8, name=stdname, meshloc=ESMF_MESHLOC_ELEMENT, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + else if (present(grid)) then + call ESMF_LogWrite(trim(subname)//trim(tag)//" Field = "//trim(stdname)//" is connected using grid", & + ESMF_LOGMSG_INFO) + if (fldlist(n)%ungridded_lbound > 0 .and. fldlist(n)%ungridded_ubound > 0) then + field = ESMF_FieldCreate(grid, ESMF_TYPEKIND_R8, name=stdname, indexflag=ESMF_INDEX_DELOCAL, & + ungriddedLBound=(/1,1/), ungriddedUBound=(/max_blocks,fldlist(n)%ungridded_ubound/), rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + field = ESMF_FieldCreate(grid, ESMF_TYPEKIND_R8, name=stdname, indexflag=ESMF_INDEX_DELOCAL, & + ungriddedLBound=(/1/), ungriddedUBound=(/max_blocks/), rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + else + call ESMF_LogWrite(subname // 'input must be grid or mesh', ESMF_LOGMSG_INFO) + rc = ESMF_FAILURE + return + end if + end if ! if not scalar field + + ! NOW call NUOPC_Realize + call NUOPC_Realize(state, field=field, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + if (stdname /= trim(flds_scalar_name)) then + call ESMF_LogWrite(subname // trim(tag) // " Field = "// trim(stdname) // " is not connected.", & + ESMF_LOGMSG_INFO) + call ESMF_StateRemove(state, (/stdname/), rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + end if + end do + + contains !- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + subroutine SetScalarField(field, flds_scalar_name, flds_scalar_num, rc) + ! ---------------------------------------------- + ! create a field with scalar data on the root pe + ! ---------------------------------------------- + use ESMF, only : ESMF_Field, ESMF_DistGrid, ESMF_Grid + use ESMF, only : ESMF_DistGridCreate, ESMF_GridCreate, ESMF_LogFoundError, ESMF_LOGERR_PASSTHRU + use ESMF, only : ESMF_FieldCreate, ESMF_GridCreate, ESMF_TYPEKIND_R8 + + type(ESMF_Field) , intent(inout) :: field + character(len=*) , intent(in) :: flds_scalar_name + integer , intent(in) :: flds_scalar_num + integer , intent(inout) :: rc + + ! local variables + type(ESMF_Distgrid) :: distgrid + type(ESMF_Grid) :: grid + character(len=*), parameter :: subname='(ice_import_export:SetScalarField)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + ! create a DistGrid with a single index space element, which gets mapped onto DE 0. + distgrid = ESMF_DistGridCreate(minIndex=(/1/), maxIndex=(/1/), rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + grid = ESMF_GridCreate(distgrid, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + field = ESMF_FieldCreate(name=trim(flds_scalar_name), grid=grid, typekind=ESMF_TYPEKIND_R8, & + ungriddedLBound=(/1/), ungriddedUBound=(/flds_scalar_num/), gridToFieldMap=(/2/), rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + end subroutine SetScalarField + + end subroutine fldlist_realize + + !=============================================================================== + + logical function State_FldChk(State, fldname) + ! ---------------------------------------------- + ! Determine if field is in state + ! ---------------------------------------------- + + ! input/output variables + type(ESMF_State) , intent(in) :: State + character(len=*) , intent(in) :: fldname + + ! local variables + type(ESMF_StateItem_Flag) :: itemType + ! ---------------------------------------------- + + call ESMF_StateGet(State, trim(fldname), itemType) + + State_FldChk = (itemType /= ESMF_STATEITEM_NOTFOUND) + + end function State_FldChk + + !=============================================================================== + + subroutine state_getimport_4d_output(state, fldname, output, index, do_sum, ungridded_index, rc) + + ! ---------------------------------------------- + ! Map import state field to output array + ! ---------------------------------------------- + + ! input/output variables + type(ESMF_State) , intent(in) :: state + character(len=*) , intent(in) :: fldname + real (kind=dbl_kind) , intent(inout) :: output(:,:,:,:) + integer , intent(in) :: index + logical, optional , intent(in) :: do_sum + integer, optional , intent(in) :: ungridded_index + integer , intent(out) :: rc + + ! local variables + type(block) :: this_block ! block information for current block + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + integer :: i, j, iblk, n, i1, j1 ! incides + real(kind=dbl_kind), pointer :: dataPtr1d(:) ! mesh + real(kind=dbl_kind), pointer :: dataPtr2d(:,:) ! mesh + real(kind=dbl_kind), pointer :: dataPtr3d(:,:,:) ! grid + real(kind=dbl_kind), pointer :: dataPtr4d(:,:,:,:) ! grid + character(len=*), parameter :: subname='(ice_import_export:state_getimport)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + if (geomtype == ESMF_GEOMTYPE_MESH) then + + ! get field pointer + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataPtr2d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call state_getfldptr(state, trim(fldname), dataPtr1d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! set values of output array + n=0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + if (present(do_sum)) then ! do sum + if (present(ungridded_index)) then + output(i,j,index,iblk) = output(i,j,index, iblk) + dataPtr2d(ungridded_index,n) + else + output(i,j,index,iblk) = output(i,j,index, iblk) + dataPtr1d(n) + end if + else ! do not do sum + if (present(ungridded_index)) then + output(i,j,index,iblk) = dataPtr2d(ungridded_index,n) + else + output(i,j,index,iblk) = dataPtr1d(n) + end if + end if + end do + end do + end do + + else if (geomtype == ESMF_GEOMTYPE_GRID) then + + ! get field pointer + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataptr4d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call state_getfldptr(state, trim(fldname), dataptr3d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! set values of output array + do iblk = 1,nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo,jhi + do i = ilo,ihi + i1 = i - ilo + 1 + j1 = j - jlo + 1 + if (present(do_sum)) then + if (present(ungridded_index)) then + output(i,j,index,iblk) = output(i,j,index,iblk) + dataPtr4d(i1,j1,iblk,ungridded_index) + else + output(i,j,index,iblk) = output(i,j,index,iblk) + dataPtr3d(i1,j1,iblk) + end if + else + if (present(ungridded_index)) then + output(i,j,index,iblk) = dataPtr4d(i1,j1,iblk,ungridded_index) + else + output(i,j,index,iblk) = dataPtr3d(i1,j1,iblk) + end if + end if + end do + end do + end do + + end if + + end subroutine state_getimport_4d_output + + !=============================================================================== + + subroutine state_getimport_3d_output(state, fldname, output, do_sum, ungridded_index, rc) + + ! ---------------------------------------------- + ! Map import state field to output array + ! ---------------------------------------------- + + ! input/output variables + type(ESMF_State) , intent(in) :: state + character(len=*) , intent(in) :: fldname + real (kind=dbl_kind) , intent(inout) :: output(:,:,:) + logical, optional , intent(in) :: do_sum + integer, optional , intent(in) :: ungridded_index + integer , intent(out) :: rc + + ! local variables + type(block) :: this_block ! block information for current block + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + integer :: i, j, iblk, n, i1, j1 ! incides + real(kind=dbl_kind), pointer :: dataPtr1d(:) ! mesh + real(kind=dbl_kind), pointer :: dataPtr2d(:,:) ! mesh + real(kind=dbl_kind), pointer :: dataPtr3d(:,:,:) ! grid + real(kind=dbl_kind), pointer :: dataPtr4d(:,:,:,:) ! grid + character(len=*) , parameter :: subname='(ice_import_export:state_getimport)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + if (geomtype == ESMF_GEOMTYPE_MESH) then + + ! get field pointer + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataPtr2d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call state_getfldptr(state, trim(fldname), dataPtr1d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! determine output array + n=0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + if (present(do_sum) .and. present(ungridded_index)) then + output(i,j,iblk) = output(i,j,iblk) + dataPtr2d(ungridded_index,n) + else if (present(do_sum)) then + output(i,j,iblk) = output(i,j,iblk) + dataPtr1d(n) + else if (present(ungridded_index)) then + output(i,j,iblk) = dataPtr2d(ungridded_index,n) + else + output(i,j,iblk) = dataPtr1d(n) + end if + end do + end do + end do + + else if (geomtype == ESMF_GEOMTYPE_GRID) then + + ! get field pointer + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataptr4d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call state_getfldptr(state, trim(fldname), dataptr3d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! set values of output array + do iblk = 1,nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo,jhi + do i = ilo,ihi + i1 = i - ilo + 1 + j1 = j - jlo + 1 + if (present(do_sum) .and. present(ungridded_index)) then + output(i,j,iblk) = output(i,j,iblk) + dataPtr4d(i1,j1,iblk,ungridded_index) + else if (present(do_sum)) then + output(i,j,iblk) = output(i,j,iblk) + dataPtr3d(i1,j1,iblk) + else if (present(ungridded_index)) then + output(i,j,iblk) = dataPtr4d(i1,j1,iblk, ungridded_index) + else + output(i,j,iblk) = dataPtr3d(i1,j1,iblk) + end if + end do + end do + end do + + end if + + end subroutine state_getimport_3d_output + + !=============================================================================== + + subroutine state_setexport_4d_input(state, fldname, input, index, lmask, ifrac, ungridded_index, rc) + + ! ---------------------------------------------- + ! Map 4d input array to export state field + ! ---------------------------------------------- + + ! input/output variables + type(ESMF_State) , intent(inout) :: state + character(len=*) , intent(in) :: fldname + real(kind=dbl_kind) , intent(in) :: input(:,:,:,:) + integer , intent(in) :: index + logical , optional, intent(in) :: lmask(:,:,:) + real(kind=dbl_kind) , optional, intent(in) :: ifrac(:,:,:) + integer , optional, intent(in) :: ungridded_index + integer , intent(out) :: rc + + ! local variables + type(block) :: this_block ! block information for current block + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + integer :: i, j, iblk, n, i1, j1 ! indices + real(kind=dbl_kind), pointer :: dataPtr1d(:) ! mesh + real(kind=dbl_kind), pointer :: dataPtr2d(:,:) ! mesh + real(kind=dbl_kind), pointer :: dataPtr3d(:,:,:) ! grid + real(kind=dbl_kind), pointer :: dataPtr4d(:,:,:,:) ! grid + character(len=*), parameter :: subname='(ice_import_export:state_setexport)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + if (geomtype == ESMF_GEOMTYPE_MESH) then + + if (present(ungridded_index)) then + write(6,*)'DEBUG: fldname = ',trim(fldname),' has ungridded index= ',ungridded_index + end if + + ! get field pointer + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataPtr2d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call state_getfldptr(state, trim(fldname), dataPtr1d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! set values of field pointer + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + if (present(lmask) .and. present(ifrac)) then + if ( lmask(i,j,iblk) .and. ifrac(i,j,iblk) > c0 ) then + if (present(ungridded_index)) then + dataPtr2d(ungridded_index,n) = input(i,j,index,iblk) + else + dataPtr1d(n) = input(i,j,index,iblk) + end if + end if + else + if (present(ungridded_index)) then + dataPtr2d(ungridded_index,n) = input(i,j,index,iblk) + else + dataPtr1d(n) = input(i,j,index,iblk) + end if + end if + end do + end do + end do + + else if (geomtype == ESMF_GEOMTYPE_GRID) then + + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataptr4d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call state_getfldptr(state, trim(fldname), dataptr3d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + do iblk = 1,nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo,jhi + do i = ilo,ihi + i1 = i - ilo + 1 + j1 = j - jlo + 1 + if (present(lmask) .and. present(ifrac)) then + if ( lmask(i,j,iblk) .and. ifrac(i,j,iblk) > c0 ) then + if (present(ungridded_index)) then + dataPtr4d(i1,j1,iblk,ungridded_index) = input(i,j,index,iblk) + end if + else + dataPtr3d(i1,j1,iblk) = input(i,j,index,iblk) + end if + else + if (present(ungridded_index)) then + dataPtr4d(i1,j1,iblk,ungridded_index) = input(i,j,index,iblk) + else + dataPtr3d(i1,j1,iblk) = input(i,j,index,iblk) + end if + end if + end do + end do + end do + + end if + + end subroutine state_setexport_4d_input + + !=============================================================================== + + subroutine state_setexport_3d_input(state, fldname, input, lmask, ifrac, ungridded_index, rc) + + ! ---------------------------------------------- + ! Map 3d input array to export state field + ! ---------------------------------------------- + + ! input/output variables + type(ESMF_State) , intent(inout) :: state + character(len=*) , intent(in) :: fldname + real(kind=dbl_kind) , intent(in) :: input(:,:,:) + logical , optional , intent(in) :: lmask(:,:,:) + real(kind=dbl_kind) , optional , intent(in) :: ifrac(:,:,:) + integer , optional , intent(in) :: ungridded_index + integer , intent(out) :: rc + + ! local variables + type(block) :: this_block ! block information for current block + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + integer :: i, j, iblk, n, i1, j1 ! incides + real(kind=dbl_kind), pointer :: dataPtr1d(:) ! mesh + real(kind=dbl_kind), pointer :: dataPtr2d(:,:) ! mesh + real(kind=dbl_kind), pointer :: dataPtr3d(:,:,:) ! grid + real(kind=dbl_kind), pointer :: dataPtr4d(:,:,:,:) ! grid + character(len=*), parameter :: subname='(ice_import_export:state_setexport)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + if (geomtype == ESMF_GEOMTYPE_MESH) then + + ! get field pointer + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataPtr2d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call state_getfldptr(state, trim(fldname), dataPtr1d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + if (present(lmask) .and. present(ifrac)) then + if ( lmask(i,j,iblk) .and. ifrac(i,j,iblk) > c0 ) then + if (present(ungridded_index)) then + dataPtr2d(ungridded_index,n) = input(i,j,iblk) + else + dataPtr1d(n) = input(i,j,iblk) + end if + end if + else + if (present(ungridded_index)) then + dataPtr2d(ungridded_index,n) = input(i,j,iblk) + else + dataPtr1d(n) = input(i,j,iblk) + end if + end if + end do + end do + end do + + else if (geomtype == ESMF_GEOMTYPE_GRID) then + + ! get field pointer + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataptr4d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call state_getfldptr(state, trim(fldname), dataptr3d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + do iblk = 1,nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo,jhi + do i = ilo,ihi + i1 = i - ilo + 1 + j1 = j - jlo + 1 + if (present(lmask) .and. present(ifrac)) then + if ( lmask(i,j,iblk) .and. ifrac(i,j,iblk) > c0 ) then + if (present(ungridded_index)) then + dataPtr4d(i1,j1,iblk,ungridded_index) = input(i,j,iblk) + else + dataPtr3d(i1,j1,iblk) = input(i,j,iblk) + end if + end if + else + if (present(ungridded_index)) then + dataPtr4d(i1,j1,iblk,ungridded_index) = input(i,j,iblk) + else + dataPtr3d(i1,j1,iblk) = input(i,j,iblk) + end if + end if + end do + end do + end do + + end if + + end subroutine state_setexport_3d_input + + !=============================================================================== + + subroutine State_GetFldPtr_1d(State, fldname, fldptr, rc) + ! ---------------------------------------------- + ! Get pointer to a state field + ! ---------------------------------------------- + + ! input/output variables + type(ESMF_State) , intent(in) :: State + character(len=*) , intent(in) :: fldname + real(kind=dbl_kind) , pointer , intent(inout) :: fldptr(:) + integer, optional , intent(out) :: rc + + ! local variables + type(ESMF_Field) :: lfield + character(len=*),parameter :: subname='(ice_import_export:State_GetFldPtr_1d)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + call ESMF_StateGet(State, itemName=trim(fldname), field=lfield, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_FieldGet(lfield, farrayPtr=fldptr, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end subroutine State_GetFldPtr_1d + + !=============================================================================== + + subroutine State_GetFldPtr_2d(State, fldname, fldptr, rc) + ! ---------------------------------------------- + ! Get pointer to a state field + ! ---------------------------------------------- + + ! input/output variables + type(ESMF_State) , intent(in) :: State + character(len=*) , intent(in) :: fldname + real(kind=dbl_kind) , pointer , intent(inout) :: fldptr(:,:) + integer , optional , intent(out) :: rc + + ! local variables + type(ESMF_Field) :: lfield + character(len=*),parameter :: subname='(ice_import_export:State_GetFldPtr_2d)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + call ESMF_StateGet(State, itemName=trim(fldname), field=lfield, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_FieldGet(lfield, farrayPtr=fldptr, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end subroutine State_GetFldPtr_2d + + !=============================================================================== + + subroutine State_GetFldPtr_3d(State, fldname, fldptr, rc) + ! ---------------------------------------------- + ! Get pointer to a state field + ! ---------------------------------------------- + + ! input/output variables + type(ESMF_State) , intent(in) :: State + character(len=*) , intent(in) :: fldname + real(kind=dbl_kind) , pointer , intent(inout) :: fldptr(:,:,:) + integer , optional , intent(out) :: rc + + ! local variables + type(ESMF_Field) :: lfield + character(len=*),parameter :: subname='(ice_import_export:State_GetFldPtr_3d)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + call ESMF_StateGet(State, itemName=trim(fldname), field=lfield, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_FieldGet(lfield, farrayPtr=fldptr, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end subroutine State_GetFldPtr_3d + + !=============================================================================== + + subroutine State_GetFldPtr_4d(State, fldname, fldptr, rc) + ! ---------------------------------------------- + ! Get pointer to a state field + ! ---------------------------------------------- + + ! input/output variables + type(ESMF_State) , intent(in) :: State + character(len=*) , intent(in) :: fldname + real(kind=dbl_kind) , pointer , intent(inout) :: fldptr(:,:,:,:) + integer , optional , intent(out) :: rc + + ! local variables + type(ESMF_Field) :: lfield + character(len=*),parameter :: subname='(ice_import_export:State_GetFldPtr_3d)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + call ESMF_StateGet(State, itemName=trim(fldname), field=lfield, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_FieldGet(lfield, farrayPtr=fldptr, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end subroutine State_GetFldPtr_4d + +end module ice_import_export diff --git a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 new file mode 100644 index 000000000..8d994e690 --- /dev/null +++ b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 @@ -0,0 +1,642 @@ +module ice_prescribed_mod + +#ifdef CESMCOUPLED + + ! !DESCRIPTION: + ! The prescribed ice model reads in ice concentration data from a netCDF + ! file. Ice thickness, temperature, the ice temperature profile are + ! prescribed. Air/ice fluxes are computed to get surface temperature, + ! Ice/ocean fluxes are set to zero, and ice dynamics are not calculated. + ! Regridding and data cycling capabilities are included. + + ! !USES: + use shr_nl_mod, only : shr_nl_find_group_name + use shr_strdata_mod + use shr_dmodel_mod + use shr_string_mod + use shr_ncread_mod + use shr_sys_mod + use shr_mct_mod + use mct_mod + use pio + + use ice_broadcast + use ice_communicate , only : my_task, master_task, MPI_COMM_ICE + use ice_kinds_mod + use ice_fileunits + use ice_exit , only : abort_ice + use ice_domain_size , only : nx_global, ny_global, ncat, nilyr, nslyr, max_blocks + use ice_constants + use ice_blocks , only : nx_block, ny_block, block, get_block + use ice_domain , only : nblocks, distrb_info, blocks_ice + use ice_grid , only : TLAT, TLON, hm, tmask, tarea, grid_type, ocn_gridcell_frac + use ice_calendar , only : idate, sec, calendar_type + use ice_arrays_column, only : hin_max + use ice_read_write + use ice_exit, only: abort_ice + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_query_tracer_indices, icepack_query_tracer_numbers + use icepack_intfc, only: icepack_query_parameters + + implicit none + private ! except + + ! MEMBER FUNCTIONS: + public :: ice_prescribed_init ! initialize input data stream + public :: ice_prescribed_run ! get time slices and time interp + public :: ice_prescribed_phys ! set prescribed ice state and fluxes + + ! !PUBLIC DATA MEMBERS: + logical(kind=log_kind), public :: prescribed_ice ! true if prescribed ice + + integer(SHR_KIND_IN),parameter :: nFilesMaximum = 400 ! max number of files + integer(kind=int_kind) :: stream_year_first ! first year in stream to use + integer(kind=int_kind) :: stream_year_last ! last year in stream to use + integer(kind=int_kind) :: model_year_align ! align stream_year_first + ! with this model year + + character(len=char_len_long) :: stream_fldVarName + character(len=char_len_long) :: stream_fldFileName(nFilesMaximum) + character(len=char_len_long) :: stream_domTvarName + character(len=char_len_long) :: stream_domXvarName + character(len=char_len_long) :: stream_domYvarName + character(len=char_len_long) :: stream_domAreaName + character(len=char_len_long) :: stream_domMaskName + character(len=char_len_long) :: stream_domFileName + character(len=char_len_long) :: stream_mapread + logical(kind=log_kind) :: prescribed_ice_fill ! true if data fill required + + type(shr_strdata_type) :: sdat ! prescribed data stream + character(len=char_len_long) :: fldList ! list of fields in data stream + real(kind=dbl_kind),allocatable :: ice_cov(:,:,:) ! ice cover + +! real (kind=dbl_kind), parameter :: & +! cp_sno = 0.0_dbl_kind & ! specific heat of snow (J/kg/K) +! , rLfi = Lfresh*rhoi & ! latent heat of fusion ice (J/m^3) +! , rLfs = Lfresh*rhos & ! latent heat of fusion snow (J/m^3) +! , rLvi = Lvap*rhoi & ! latent heat of vapor*rhoice (J/m^3) +! , rLvs = Lvap*rhos & ! latent heat of vapor*rhosno (J/m^3) +! , rcpi = cp_ice*rhoi & ! heat capacity of fresh ice (J/m^3) +! , rcps = cp_sno*rhos & ! heat capacity of snow (J/m^3) +! , rcpidepressT = rcpi*depressT & ! param for finding T(z) from q (J/m^3) +! , rLfidepressT = rLfi*depressT ! param for heat capacity (J deg/m^3) +! ! heat capacity of sea ice, rhoi*C=rcpi+rLfidepressT*salinity/T^2 + +!======================================================================= +contains +!=============================================================================== + + subroutine ice_prescribed_init(mpicom, compid, gindex) + + use shr_pio_mod, only : shr_pio_getiotype, shr_pio_getiosys, shr_pio_getioformat + ! !DESCRIPTION: + ! Prescribed ice initialization - needed to + ! work with new shr_strdata module derived type + + ! !INPUT/OUTPUT PARAMETERS: + implicit none + include 'mpif.h' + + integer(kind=int_kind), intent(in) :: mpicom + integer(kind=int_kind), intent(in) :: compid + integer(kind=int_kind), intent(in) :: gindex(:) + + !----- Local ------ + type(mct_gsMap) :: gsmap_ice + type(mct_gGrid) :: dom_ice + integer(kind=int_kind) :: lsize + integer(kind=int_kind) :: gsize + integer(kind=int_kind) :: nml_error ! namelist i/o error flag + integer(kind=int_kind) :: n, nFile, ierr + character(len=8) :: fillalgo + character(*),parameter :: subName = '(ice_prescribed_init)' + + namelist /ice_prescribed_nml/ & + prescribed_ice, & + model_year_align, & + stream_year_first , & + stream_year_last , & + stream_fldVarName , & + stream_fldFileName, & + stream_domTvarName, & + stream_domXvarName, & + stream_domYvarName, & + stream_domAreaName, & + stream_domMaskName, & + stream_domFileName, & + stream_mapread, & + prescribed_ice_fill + + ! default values for namelist + prescribed_ice = .false. ! if true, prescribe ice + stream_year_first = 1 ! first year in pice stream to use + stream_year_last = 1 ! last year in pice stream to use + model_year_align = 1 ! align stream_year_first with this model year + stream_fldVarName = 'ice_cov' + stream_fldFileName(:) = ' ' + stream_domTvarName = 'time' + stream_domXvarName = 'lon' + stream_domYvarName = 'lat' + stream_domAreaName = 'area' + stream_domMaskName = 'mask' + stream_domFileName = ' ' + stream_mapread = 'NOT_SET' + prescribed_ice_fill = .false. ! true if pice data fill required + + ! read from input file + call get_fileunit(nu_nml) + if (my_task == master_task) then + open (nu_nml, file=nml_filename, status='old',iostat=nml_error) + call shr_nl_find_group_name(nu_nml, 'ice_prescribed_nml', status=nml_error) + if (nml_error == 0) then + read(nu_nml, ice_prescribed_nml, iostat=nml_error) + if (nml_error > 0) then + call shr_sys_abort( 'problem on read of ice_prescribed namelist in ice_prescribed_mod' ) + endif + endif + end if + call release_fileunit(nu_nml) + call broadcast_scalar(prescribed_ice, master_task) + + ! *** If not prescribed ice then return *** + if (.not. prescribed_ice) RETURN + + call broadcast_scalar(model_year_align,master_task) + call broadcast_scalar(stream_year_first,master_task) + call broadcast_scalar(stream_year_last,master_task) + call broadcast_scalar(stream_fldVarName,master_task) + call broadcast_scalar(stream_domTvarName,master_task) + call broadcast_scalar(stream_domXvarName,master_task) + call broadcast_scalar(stream_domYvarName,master_task) + call broadcast_scalar(stream_domAreaName,master_task) + call broadcast_scalar(stream_domMaskName,master_task) + call broadcast_scalar(stream_domFileName,master_task) + call broadcast_scalar(stream_mapread,master_task) + call broadcast_scalar(prescribed_ice_fill,master_task) + call mpi_bcast(stream_fldFileName, len(stream_fldFileName(1))*NFilesMaximum, & + MPI_CHARACTER, 0, MPI_COMM_ICE, ierr) + + nFile = 0 + do n=1,nFilesMaximum + if (stream_fldFileName(n) /= ' ') nFile = nFile + 1 + end do + + ! Read shr_strdata_nml namelist + if (prescribed_ice_fill) then + fillalgo='nn' + else + fillalgo='none' + endif + + if (my_task == master_task) then + write(nu_diag,*) ' ' + write(nu_diag,*) 'This is the prescribed ice coverage option.' + write(nu_diag,*) ' stream_year_first = ',stream_year_first + write(nu_diag,*) ' stream_year_last = ',stream_year_last + write(nu_diag,*) ' model_year_align = ',model_year_align + write(nu_diag,*) ' stream_fldVarName = ',trim(stream_fldVarName) + do n = 1,nFile + write(nu_diag,*) ' stream_fldFileName = ',trim(stream_fldFileName(n)),n + end do + write(nu_diag,*) ' stream_domTvarName = ',trim(stream_domTvarName) + write(nu_diag,*) ' stream_domXvarName = ',trim(stream_domXvarName) + write(nu_diag,*) ' stream_domYvarName = ',trim(stream_domYvarName) + write(nu_diag,*) ' stream_domFileName = ',trim(stream_domFileName) + write(nu_diag,*) ' stream_mapread = ',trim(stream_mapread) + write(nu_diag,*) ' stream_fillalgo = ',trim(fillalgo) + write(nu_diag,*) ' ' + endif + + gsize = nx_global*ny_global + lsize = size(gindex) + call mct_gsMap_init( gsmap_ice, gindex, MPI_COMM_ICE, compid, lsize, gsize) + call ice_prescribed_set_domain( lsize, MPI_COMM_ICE, gsmap_ice, dom_ice ) + + call shr_strdata_create(sdat,name="prescribed_ice", & + mpicom=MPI_COMM_ICE, compid=compid, & + gsmap=gsmap_ice, ggrid=dom_ice, & + nxg=nx_global,nyg=ny_global, & + yearFirst=stream_year_first, & + yearLast=stream_year_last, & + yearAlign=model_year_align, & + offset=0, & + domFilePath='', & + domFileName=trim(stream_domFileName), & + domTvarName=stream_domTvarName, & + domXvarName=stream_domXvarName, & + domYvarName=stream_domYvarName, & + domAreaName=stream_domAreaName, & + domMaskName=stream_domMaskName, & + filePath='', & + filename=stream_fldFileName(1:nFile), & + fldListFile=stream_fldVarName, & + fldListModel=stream_fldVarName, & + fillalgo=trim(fillalgo), & + calendar=trim(calendar_type), & + mapread=trim(stream_mapread)) + + if (my_task == master_task) then + call shr_strdata_print(sdat,'SPRESICE data') + endif + + !----------------------------------------------------------------- + ! For one ice category, set hin_max(1) to something big + !----------------------------------------------------------------- + if (ncat == 1) then + hin_max(1) = 999._dbl_kind + end if + end subroutine ice_prescribed_init + + !======================================================================= + + subroutine ice_prescribed_run(mDateIn, secIn) + + ! !DESCRIPTION: + ! Finds two time slices bounding current model time, remaps if necessary + + implicit none + + ! !INPUT/OUTPUT PARAMETERS: + integer(kind=int_kind), intent(in) :: mDateIn ! Current model date (yyyymmdd) + integer(kind=int_kind), intent(in) :: secIn ! Elapsed seconds on model date + + ! local varaibles + integer(kind=int_kind) :: i,j,n,iblk ! loop indices and counter + integer(kind=int_kind) :: ilo,ihi,jlo,jhi ! beginning and end of physical domain + type (block) :: this_block + real(kind=dbl_kind) :: aice_max ! maximun ice concentration + logical, save :: first_time = .true. + character(*),parameter :: subName = '(ice_prescribed_run)' + character(*),parameter :: F00 = "(a,2g20.13)" + + !------------------------------------------------------------------------ + ! Interpolate to new ice coverage + !------------------------------------------------------------------------ + + call shr_strdata_advance(sdat,mDateIn,SecIn,MPI_COMM_ICE,'cice_pice') + + if (first_time) then + allocate(ice_cov(nx_block,ny_block,max_blocks)) + endif + + ice_cov(:,:,:) = c0 ! This initializes ghost cells as well + + n=0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + ice_cov(i,j,iblk) = sdat%avs(1)%rAttr(1,n) + end do + end do + end do + + !-------------------------------------------------------------------- + ! Check to see that ice concentration is in fraction, not percent + !-------------------------------------------------------------------- + if (first_time) then + aice_max = maxval(ice_cov) + + if (aice_max > c10) then + write(nu_diag,F00) subname//" ERROR: Ice conc data must be in fraction, aice_max= ",& + aice_max + call abort_ice(subName) + end if + first_time = .false. + end if + + !----------------------------------------------------------------- + ! Set prescribed ice state and fluxes + !----------------------------------------------------------------- + + call ice_prescribed_phys() + + end subroutine ice_prescribed_run + + !=============================================================================== + !BOP =========================================================================== + ! + ! !IROUTINE: ice_prescribed_phys -- set prescribed ice state and fluxes + ! + ! !DESCRIPTION: + ! + ! Set prescribed ice state using input ice concentration; + ! set surface ice temperature to atmospheric value; use + ! linear temperature gradient in ice to ocean temperature. + ! + ! !REVISION HISTORY: + ! 2005-May-23 - J. Schramm - Updated with data models + ! 2004-July - J. Schramm - Modified to allow variable snow cover + ! 2001-May - B. P. Briegleb - Original version + ! + ! !INTERFACE: ------------------------------------------------------------------ + + subroutine ice_prescribed_phys + + ! !USES: + use ice_flux + use ice_state + use icepack_intfc, only : icepack_aggregate + use ice_dyn_evp + implicit none + + !----- Local ------ + integer(kind=int_kind) :: layer ! level index + integer(kind=int_kind) :: nc ! ice category index + integer(kind=int_kind) :: i,j,k ! longitude, latitude and level indices + integer(kind=int_kind) :: iblk + integer(kind=int_kind) :: nt_Tsfc, nt_sice, nt_qice, nt_qsno, ntrcr + + real(kind=dbl_kind) :: slope ! diff in underlying ocean tmp and ice surface tmp + real(kind=dbl_kind) :: Ti ! ice level temperature + real(kind=dbl_kind) :: Tmlt ! ice level melt temperature + real(kind=dbl_kind) :: qin_save(nilyr) + real(kind=dbl_kind) :: qsn_save(nslyr) + real(kind=dbl_kind) :: hi ! ice prescribed (hemispheric) ice thickness + real(kind=dbl_kind) :: hs ! snow thickness + real(kind=dbl_kind) :: zn ! normalized ice thickness + real(kind=dbl_kind) :: salin(nilyr) ! salinity (ppt) + real(kind=dbl_kind) :: rad_to_deg, pi, puny + real(kind=dbl_kind) :: rhoi, rhos, cp_ice, cp_ocn, lfresh, depressT + + real(kind=dbl_kind), parameter :: nsal = 0.407_dbl_kind + real(kind=dbl_kind), parameter :: msal = 0.573_dbl_kind + real(kind=dbl_kind), parameter :: saltmax = 3.2_dbl_kind ! max salinity at ice base (ppm) + character(*),parameter :: subName = '(ice_prescribed_phys)' + + call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_sice_out=nt_sice, & + nt_qice_out=nt_qice, nt_qsno_out=nt_qsno) + call icepack_query_tracer_numbers(ntrcr_out=ntrcr) + call icepack_query_parameters(rad_to_deg_out=rad_to_deg, pi_out=pi, & + puny_out=puny, rhoi_out=rhoi, rhos_out=rhos, cp_ice_out=cp_ice, cp_ocn_out=cp_ocn, & + lfresh_out=lfresh, depressT_out=depressT) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !----------------------------------------------------------------- + ! Initialize ice state + !----------------------------------------------------------------- + + ! TODO - can we now get rid of the following??? + + ! aicen(:,:,:,:) = c0 + ! vicen(:,:,:,:) = c0 + ! eicen(:,:,:,:) = c0 + + ! do nc=1,ncat + ! trcrn(:,:,nt_Tsfc,nc,:) = Tf(:,:,:) + ! enddo + + !----------------------------------------------------------------- + ! Set ice cover over land to zero, not sure if this should be + ! be done earier, before time/spatial interp?????? + !----------------------------------------------------------------- + do iblk = 1,nblocks + do j = 1,ny_block + do i = 1,nx_block + if (tmask(i,j,iblk)) then + if (ice_cov(i,j,iblk) .lt. eps04) ice_cov(i,j,iblk) = c0 + if (ice_cov(i,j,iblk) .gt. c1) ice_cov(i,j,iblk) = c1 + else + ice_cov(i,j,iblk) = c0 + end if + enddo + enddo + enddo + + do iblk = 1,nblocks + do j = 1,ny_block + do i = 1,nx_block + + if (tmask(i,j,iblk)) then ! Over ocean points + + !-------------------------------------------------------------- + ! Place ice where ice concentration > .0001 + !-------------------------------------------------------------- + + if (ice_cov(i,j,iblk) >= eps04) then + + hi = 0.0_dbl_kind + !---------------------------------------------------------- + ! Set ice thickness in each hemisphere + !---------------------------------------------------------- + if(TLAT(i,j,iblk)*rad_to_deg > 40.0_dbl_kind) then + hi = 2.0_dbl_kind + else if(TLAT(i,j,iblk)*rad_to_deg < -40.0_dbl_kind) then + hi = 1.0_dbl_kind + end if + + !---------------------------------------------------------- + ! All ice in appropriate thickness category + !---------------------------------------------------------- + do nc = 1,ncat + + if(hin_max(nc-1) < hi .and. hi < hin_max(nc)) then + + if (aicen(i,j,nc,iblk) > c0) then + hs = vsnon(i,j,nc,iblk) / aicen(i,j,nc,iblk) + else + hs = c0 + endif + + aicen(i,j,nc,iblk) = ice_cov(i,j,iblk) + vicen(i,j,nc,iblk) = hi*aicen(i,j,nc,iblk) + vsnon(i,j,nc,iblk) = hs*aicen(i,j,nc,iblk) + + !--------------------------------------------------------- + ! make linear temp profile and compute enthalpy + !--------------------------------------------------------- + + if (abs(trcrn(i,j,nt_qice,nc,iblk)) < puny) then + + if (aice(i,j,iblk) < puny) & + trcrn(i,j,nt_Tsfc,nc,iblk) = Tf(i,j,iblk) + + slope = Tf(i,j,iblk) - trcrn(i,j,nt_Tsfc,nc,iblk) + do k = 1, nilyr + zn = (real(k,kind=dbl_kind)-p5) / real(nilyr,kind=dbl_kind) + Ti = trcrn(i,j,nt_Tsfc,nc,iblk) + slope*zn + salin(k) = (saltmax/c2)*(c1-cos(pi*zn**(nsal/(msal+zn)))) + Tmlt = -salin(k)*depressT + trcrn(i,j,nt_sice+k-1,nc,iblk) = salin(k) + trcrn(i,j,nt_qice+k-1,nc,iblk) = & + -(rhoi * (cp_ice*(Tmlt-Ti) & + + Lfresh*(c1-Tmlt/Ti) - cp_ocn*Tmlt)) + enddo + + do k=1,nslyr + trcrn(i,j,nt_qsno+k-1,nc,iblk) = & + -rhos*(Lfresh - cp_ice*trcrn(i,j,nt_Tsfc,nc,iblk)) + enddo + + endif ! aice < puny + end if ! hin_max + enddo ! ncat + else + trcrn(i,j,nt_Tsfc,:,iblk) = Tf(i,j,iblk) + aicen(i,j,:,iblk) = c0 + vicen(i,j,:,iblk) = c0 + vsnon(i,j,:,iblk) = c0 + trcrn(i,j,nt_sice:nt_sice+nilyr-1,:,iblk) = c0 + trcrn(i,j,nt_qice:nt_qice+nilyr-1,:,iblk) = c0 + trcrn(i,j,nt_qsno:nt_qsno+nslyr-1,:,iblk) = c0 + end if ! ice_cov >= eps04 + + !-------------------------------------------------------------------- + ! compute aggregate ice state and open water area + !-------------------------------------------------------------------- + call icepack_aggregate (ncat, & + aicen(i,j,:,iblk), & + trcrn(i,j,1:ntrcr,:,iblk), & + vicen(i,j,:,iblk), vsnon(i,j, :,iblk), & + aice (i,j, iblk), & + trcr (i,j,1:ntrcr, iblk), & + vice (i,j, iblk), vsno (i,j, iblk), & + aice0(i,j, iblk), & + ntrcr, & + trcr_depend(1:ntrcr), & + trcr_base(1:ntrcr,:), & + n_trcr_strata(1:ntrcr), & + nt_strata(1:ntrcr,:)) + + end if ! tmask + enddo ! i + enddo ! j + enddo ! iblk + + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + aice_init(i,j,iblk) = aice(i,j,iblk) + enddo + enddo + enddo + + !-------------------------------------------------------------------- + ! set non-computed fluxes, ice velocities, ice-ocn stresses to zero + !-------------------------------------------------------------------- + + frzmlt (:,:,:) = c0 + uvel (:,:,:) = c0 + vvel (:,:,:) = c0 + strocnxT (:,:,:) = c0 + strocnyT (:,:,:) = c0 + + !----------------------------------------------------------------- + ! other atm and ocn fluxes + !----------------------------------------------------------------- + call init_flux_atm + call init_flux_ocn + + end subroutine ice_prescribed_phys + + !=============================================================================== + + subroutine ice_prescribed_set_domain( lsize, mpicom, gsmap_i, dom_i ) + + ! Arguments + integer , intent(in) :: lsize + integer , intent(in) :: mpicom + type(mct_gsMap), intent(in) :: gsMap_i + type(mct_ggrid), intent(inout) :: dom_i + + ! Local Variables + integer :: i, j, iblk, n ! indices + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + real(dbl_kind), pointer :: data1(:) ! temporary + real(dbl_kind), pointer :: data2(:) ! temporary + real(dbl_kind), pointer :: data3(:) ! temporary + real(dbl_kind), pointer :: data4(:) ! temporary + real(dbl_kind), pointer :: data5(:) ! temporary + real(dbl_kind), pointer :: data6(:) ! temporary + integer , pointer :: idata(:) ! temporary + real(kind=dbl_kind) :: rad_to_deg + type(block) :: this_block ! block information for current block + character(*),parameter :: subName = '(ice_prescribed_set_domain)' + !-------------------------------- + + call icepack_query_parameters(rad_to_deg_out=rad_to_deg) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + ! Initialize mct domain type + call mct_gGrid_init(GGrid=dom_i, & + CoordChars='lat:lon:hgt', OtherChars='area:aream:mask:frac', lsize=lsize ) + call mct_aVect_zero(dom_i%data) + + ! Determine global gridpoint number attribute, GlobGridNum, which is set automatically by MCT + call mct_gsMap_orderedPoints(gsMap_i, my_task, idata) + call mct_gGrid_importIAttr(dom_i,'GlobGridNum',idata,lsize) + deallocate(idata) + + ! Determine domain (numbering scheme is: West to East and South to North to South pole) + ! Initialize attribute vector with special value + + allocate(data1(lsize)) + allocate(data2(lsize)) + allocate(data3(lsize)) + allocate(data4(lsize)) + allocate(data5(lsize)) + allocate(data6(lsize)) + + data1(:) = -9999.0_dbl_kind + data2(:) = -9999.0_dbl_kind + data3(:) = -9999.0_dbl_kind + data4(:) = -9999.0_dbl_kind + call mct_gGrid_importRAttr(dom_i,"lat" ,data1,lsize) + call mct_gGrid_importRAttr(dom_i,"lon" ,data2,lsize) + call mct_gGrid_importRAttr(dom_i,"area" ,data3,lsize) + call mct_gGrid_importRAttr(dom_i,"aream",data4,lsize) + data5(:) = 0.0_dbl_kind + data6(:) = 0.0_dbl_kind + call mct_gGrid_importRAttr(dom_i,"mask" ,data5,lsize) + call mct_gGrid_importRAttr(dom_i,"frac" ,data6,lsize) + + ! Fill in correct values for domain components + ! lat/lon in degrees, area in radians^2, mask is 1 (ocean), 0 (non-ocean) + n=0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + + data1(n) = TLON(i,j,iblk)*rad_to_deg + data2(n) = TLAT(i,j,iblk)*rad_to_deg + data3(n) = tarea(i,j,iblk)/(radius*radius) + + data5(n) = real(nint(hm(i,j,iblk)),kind=dbl_kind) + if (trim(grid_type) == 'latlon') then + data6(n) = ocn_gridcell_frac(i,j,iblk) + else + data6(n) = real(nint(hm(i,j,iblk)),kind=dbl_kind) + end if + + enddo !i + enddo !j + enddo !iblk + call mct_gGrid_importRattr(dom_i,"lon" ,data1,lsize) + call mct_gGrid_importRattr(dom_i,"lat" ,data2,lsize) + call mct_gGrid_importRattr(dom_i,"area",data3,lsize) + call mct_gGrid_importRattr(dom_i,"mask",data5,lsize) + call mct_gGrid_importRattr(dom_i,"frac",data6,lsize) + + deallocate(data1, data2, data3, data4, data5, data6) + + end subroutine ice_prescribed_set_domain + +#endif + +end module ice_prescribed_mod diff --git a/cicecore/drivers/nuopc/cmeps/ice_scam.F90 b/cicecore/drivers/nuopc/cmeps/ice_scam.F90 new file mode 100644 index 000000000..f5280b259 --- /dev/null +++ b/cicecore/drivers/nuopc/cmeps/ice_scam.F90 @@ -0,0 +1,14 @@ +module ice_scam + + use ice_kinds_mod + + implicit none + + ! single column control variables (only used for latlon grid) + + logical :: single_column ! true => single column mode + real (kind=dbl_kind) scmlat ! single column latitude (degrees) + real (kind=dbl_kind) scmlon ! single column longitude (degrees) + +end module ice_scam + diff --git a/cicecore/drivers/nuopc/cmeps/ice_shr_methods.F90 b/cicecore/drivers/nuopc/cmeps/ice_shr_methods.F90 new file mode 100644 index 000000000..24a4226e5 --- /dev/null +++ b/cicecore/drivers/nuopc/cmeps/ice_shr_methods.F90 @@ -0,0 +1,999 @@ +module ice_shr_methods + + use ESMF , only : operator(<), operator(/=), operator(+) + use ESMF , only : operator(-), operator(*) , operator(>=) + use ESMF , only : operator(<=), operator(>), operator(==) + use ESMF , only : ESMF_LOGERR_PASSTHRU, ESMF_LogFoundError, ESMF_LOGMSG_ERROR, ESMF_MAXSTR + use ESMF , only : ESMF_SUCCESS, ESMF_LogWrite, ESMF_LOGMSG_INFO, ESMF_FAILURE + use ESMF , only : ESMF_State, ESMF_StateGet + use ESMF , only : ESMF_Field, ESMF_FieldGet + use ESMF , only : ESMF_GridComp, ESMF_GridCompGet, ESMF_GridCompSet + use ESMF , only : ESMF_GeomType_Flag, ESMF_FieldStatus_Flag + use ESMF , only : ESMF_Mesh, ESMF_MeshGet + use ESMF , only : ESMF_GEOMTYPE_MESH, ESMF_GEOMTYPE_GRID, ESMF_FIELDSTATUS_COMPLETE + use ESMF , only : ESMF_Clock, ESMF_ClockCreate, ESMF_ClockGet, ESMF_ClockSet + use ESMF , only : ESMF_ClockPrint, ESMF_ClockAdvance + use ESMF , only : ESMF_Alarm, ESMF_AlarmCreate, ESMF_AlarmGet, ESMF_AlarmSet + use ESMF , only : ESMF_Calendar, ESMF_CALKIND_NOLEAP, ESMF_CALKIND_GREGORIAN + use ESMF , only : ESMF_Time, ESMF_TimeGet, ESMF_TimeSet + use ESMF , only : ESMF_TimeInterval, ESMF_TimeIntervalSet, ESMF_TimeIntervalGet + use ESMF , only : ESMF_VM, ESMF_VMGet, ESMF_VMBroadcast, ESMF_VMGetCurrent + use NUOPC , only : NUOPC_CompAttributeGet + use NUOPC_Model , only : NUOPC_ModelGet + use shr_kind_mod , only : r8 => shr_kind_r8, cl=>shr_kind_cl, cs=>shr_kind_cs + use shr_sys_mod , only : shr_sys_abort + use shr_file_mod , only : shr_file_setlogunit, shr_file_getLogUnit + + implicit none + private + + public :: memcheck + public :: get_component_instance + public :: set_component_logging + public :: log_clock_advance + public :: state_getscalar + public :: state_setscalar + public :: state_reset + public :: state_flddebug + public :: state_diagnose + public :: alarmInit + public :: chkerr + + private :: timeInit + private :: field_getfldptr + + ! Clock and alarm options + character(len=*), private, parameter :: & + optNONE = "none" , & + optNever = "never" , & + optNSteps = "nsteps" , & + optNStep = "nstep" , & + optNSeconds = "nseconds" , & + optNSecond = "nsecond" , & + optNMinutes = "nminutes" , & + optNMinute = "nminute" , & + optNHours = "nhours" , & + optNHour = "nhour" , & + optNDays = "ndays" , & + optNDay = "nday" , & + optNMonths = "nmonths" , & + optNMonth = "nmonth" , & + optNYears = "nyears" , & + optNYear = "nyear" , & + optMonthly = "monthly" , & + optYearly = "yearly" , & + optDate = "date" , & + optIfdays0 = "ifdays0" + + ! Module data + integer, parameter :: SecPerDay = 86400 ! Seconds per day + integer, parameter :: memdebug_level=1 + character(len=1024) :: msgString + character(len=*), parameter :: u_FILE_u = & + __FILE__ + +!=============================================================================== +contains +!=============================================================================== + + subroutine memcheck(string, level, mastertask) + + ! input/output variables + character(len=*) , intent(in) :: string + integer , intent(in) :: level + logical , intent(in) :: mastertask + + ! local variables + integer :: ierr + integer, external :: GPTLprint_memusage + character(len=*), parameter :: subname='(memcheck)' + !----------------------------------------------------------------------- + + if ((mastertask .and. memdebug_level > level) .or. memdebug_level > level+1) then + ierr = GPTLprint_memusage(string) + endif + + end subroutine memcheck + +!=============================================================================== + + subroutine get_component_instance(gcomp, inst_suffix, inst_index, rc) + + ! input/output variables + type(ESMF_GridComp) :: gcomp + character(len=*) , intent(out) :: inst_suffix + integer , intent(out) :: inst_index + integer , intent(out) :: rc + + ! local variables + logical :: isPresent + character(len=4) :: cvalue + character(len=*), parameter :: subname='(get_component_instance)' + !----------------------------------------------------------------------- + + rc = ESMF_SUCCESS + + call NUOPC_CompAttributeGet(gcomp, name="inst_suffix", isPresent=isPresent, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + if (isPresent) then + call NUOPC_CompAttributeGet(gcomp, name="inst_suffix", value=inst_suffix, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + cvalue = inst_suffix(2:) + read(cvalue, *) inst_index + else + inst_suffix = "" + inst_index=1 + endif + + end subroutine get_component_instance + +!=============================================================================== + + subroutine set_component_logging(gcomp, mastertask, logunit, shrlogunit, rc) + + ! input/output variables + type(ESMF_GridComp) :: gcomp + logical, intent(in) :: mastertask + integer, intent(out) :: logunit + integer, intent(out) :: shrlogunit + integer, intent(out) :: rc + + ! local variables + character(len=CL) :: diro + character(len=CL) :: logfile + character(len=*), parameter :: subname='(set_component_logging)' + !----------------------------------------------------------------------- + + rc = ESMF_SUCCESS + + shrlogunit = 6 + + if (mastertask) then + call NUOPC_CompAttributeGet(gcomp, name="diro", value=diro, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call NUOPC_CompAttributeGet(gcomp, name="logfile", value=logfile, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + open(newunit=logunit,file=trim(diro)//"/"//trim(logfile)) + else + logUnit = 6 + endif + + call shr_file_setLogUnit (logunit) + + end subroutine set_component_logging + +!=============================================================================== + + subroutine log_clock_advance(clock, component, logunit, rc) + + ! input/output variables + type(ESMF_Clock) :: clock + character(len=*) , intent(in) :: component + integer , intent(in) :: logunit + integer , intent(out) :: rc + + ! local variables + character(len=CL) :: cvalue, prestring + character(len=*), parameter :: subname='(log_clock_advance)' + !----------------------------------------------------------------------- + + rc = ESMF_SUCCESS + + write(prestring, *) "------>Advancing ",trim(component)," from: " + call ESMF_ClockPrint(clock, options="currTime", unit=cvalue, preString=trim(prestring), rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + write(logunit, *) trim(cvalue) + + call ESMF_ClockPrint(clock, options="stopTime", unit=cvalue, & + preString="--------------------------------> to: ", rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + write(logunit, *) trim(cvalue) + + end subroutine log_clock_advance + +!=============================================================================== + + subroutine state_getscalar(state, scalar_id, scalar_value, flds_scalar_name, flds_scalar_num, rc) + + ! ---------------------------------------------- + ! Get scalar data from State for a particular name and broadcast it to all other pets + ! ---------------------------------------------- + + ! input/output variables + type(ESMF_State), intent(in) :: state + integer, intent(in) :: scalar_id + real(r8), intent(out) :: scalar_value + character(len=*), intent(in) :: flds_scalar_name + integer, intent(in) :: flds_scalar_num + integer, intent(inout) :: rc + + ! local variables + integer :: mytask, ierr, len + type(ESMF_VM) :: vm + type(ESMF_Field) :: field + real(r8), pointer :: farrayptr(:,:) + real(r8) :: tmp(1) + character(len=*), parameter :: subname='(state_getscalar)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + call ESMF_VMGetCurrent(vm, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + call ESMF_VMGet(vm, localPet=mytask, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + call ESMF_StateGet(State, itemName=trim(flds_scalar_name), field=field, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + if (mytask == 0) then + call ESMF_FieldGet(field, farrayPtr = farrayptr, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + if (scalar_id < 0 .or. scalar_id > flds_scalar_num) then + call ESMF_LogWrite(trim(subname)//": ERROR in scalar_id", ESMF_LOGMSG_INFO, line=__LINE__, file=u_FILE_u) + rc = ESMF_FAILURE + if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, line=__LINE__, file=u_FILE_u)) return + endif + tmp(:) = farrayptr(scalar_id,:) + endif + call ESMF_VMBroadCast(vm, tmp, 1, 0, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + scalar_value = tmp(1) + + end subroutine state_getscalar + +!================================================================================ + + subroutine state_setscalar(scalar_value, scalar_id, State, flds_scalar_name, flds_scalar_num, rc) + + ! ---------------------------------------------- + ! Set scalar data from State for a particular name + ! ---------------------------------------------- + + ! input/output arguments + real(r8), intent(in) :: scalar_value + integer, intent(in) :: scalar_id + type(ESMF_State), intent(inout) :: State + character(len=*), intent(in) :: flds_scalar_name + integer, intent(in) :: flds_scalar_num + integer, intent(inout) :: rc + + ! local variables + integer :: mytask + type(ESMF_Field) :: lfield + type(ESMF_VM) :: vm + real(r8), pointer :: farrayptr(:,:) + character(len=*), parameter :: subname='(state_setscalar)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + call ESMF_VMGetCurrent(vm, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + call ESMF_VMGet(vm, localPet=mytask, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + call ESMF_StateGet(State, itemName=trim(flds_scalar_name), field=lfield, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + if (mytask == 0) then + call ESMF_FieldGet(lfield, farrayPtr = farrayptr, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + if (scalar_id < 0 .or. scalar_id > flds_scalar_num) then + call ESMF_LogWrite(trim(subname)//": ERROR in scalar_id", ESMF_LOGMSG_INFO) + rc = ESMF_FAILURE + return + endif + farrayptr(scalar_id,1) = scalar_value + endif + + end subroutine state_setscalar + +!=============================================================================== + + subroutine state_reset(State, reset_value, rc) + + ! ---------------------------------------------- + ! Set all fields to value in State to value + ! ---------------------------------------------- + + ! intput/output variables + type(ESMF_State) , intent(inout) :: State + real(R8) , intent(in) :: reset_value + integer , intent(out) :: rc + + ! local variables + integer :: i,j,n + type(ESMF_Field) :: lfield + integer :: fieldCount + integer :: lrank + character(ESMF_MAXSTR), allocatable :: lfieldnamelist(:) + real(R8), pointer :: fldptr1(:) + real(R8), pointer :: fldptr2(:,:) + real(R8), parameter :: czero = 0.0_R8 + character(len=*),parameter :: subname='(state_reset)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + call ESMF_StateGet(State, itemCount=fieldCount, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + allocate(lfieldnamelist(fieldCount)) + call ESMF_StateGet(State, itemNameList=lfieldnamelist, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + do n = 1, fieldCount + call ESMF_StateGet(State, itemName=trim(lfieldnamelist(n)), field=lfield, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call field_getfldptr(lfield, fldptr1=fldptr1, fldptr2=fldptr2, rank=lrank, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + if (lrank == 0) then + ! no local data + elseif (lrank == 1) then + fldptr1 = reset_value + elseif (lrank == 2) then + fldptr2 = reset_value + else + call ESMF_LogWrite(trim(subname)//": ERROR in rank "//trim(lfieldnamelist(n)), ESMF_LOGMSG_ERROR) + rc = ESMF_FAILURE + return + endif + enddo + + deallocate(lfieldnamelist) + + end subroutine state_reset + +!=============================================================================== + + subroutine state_flddebug(state, flds_scalar_name, prefix, ymd, tod, logunit, rc) + + ! input/output variables + type(ESMF_State) :: state + character(len=*) , intent(in) :: flds_scalar_name + character(len=*) , intent(in) :: prefix + integer , intent(in) :: ymd + integer , intent(in) :: tod + integer , intent(in) :: logunit + integer , intent(out) :: rc + + ! local variables + integer :: n, nfld, ungridded_index + integer :: lsize + real(R8), pointer :: dataPtr1d(:) + real(R8), pointer :: dataPtr2d(:,:) + integer :: fieldCount + integer :: ungriddedUBound(1) + integer :: gridToFieldMap(1) + character(len=ESMF_MAXSTR) :: string + type(ESMF_Field) , allocatable :: lfields(:) + integer , allocatable :: dimCounts(:) + character(len=ESMF_MAXSTR) , allocatable :: fieldNameList(:) + character(len=*), parameter :: subname='(state_flddebug)' + !----------------------------------------------------- + + ! Determine the list of fields and the dimension count for each field + call ESMF_StateGet(state, itemCount=fieldCount, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + allocate(fieldNameList(fieldCount)) + allocate(lfields(fieldCount)) + allocate(dimCounts(fieldCount)) + + call ESMF_StateGet(state, itemNameList=fieldNameList, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + do nfld=1, fieldCount + call ESMF_StateGet(state, itemName=trim(fieldNameList(nfld)), field=lfields(nfld), rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldGet(lfields(nfld), dimCount=dimCounts(nfld), rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end do + + ! Determine local size of field + do nfld=1, fieldCount + if (dimCounts(nfld) == 1) then + call ESMF_FieldGet(lfields(nfld), farrayPtr=dataPtr1d, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + lsize = size(dataPtr1d) + exit + end if + end do + + ! Write out debug output + do n = 1,lsize + do nfld=1, fieldCount + if (dimCounts(nfld) == 1) then + call ESMF_FieldGet(lfields(nfld), farrayPtr=dataPtr1d, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (trim(fieldNameList(nfld)) /= flds_scalar_name .and. dataPtr1d(n) /= 0.) then + string = trim(prefix) // ' ymd, tod, index, '// trim(fieldNameList(nfld)) //' = ' + write(logunit,100) trim(string), ymd, tod, n, dataPtr1d(n) + end if + else if (dimCounts(nfld) == 2) then + call ESMF_FieldGet(lfields(nfld), ungriddedUBound=ungriddedUBound, gridtoFieldMap=gridToFieldMap, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldGet(lfields(nfld), farrayPtr=dataPtr2d, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + do ungridded_index = 1,ungriddedUBound(1) + if (trim(fieldNameList(nfld)) /= flds_scalar_name) then + string = trim(prefix) // ' ymd, tod, lev, index, '// trim(fieldNameList(nfld)) //' = ' + if (gridToFieldMap(1) == 1) then + if (dataPtr2d(n,ungridded_index) /= 0.) then + write(logunit,101) trim(string), ymd, tod, ungridded_index, n, dataPtr2d(n,ungridded_index) + end if + else if (gridToFieldMap(1) == 2) then + if (dataPtr2d(ungridded_index,n) /= 0.) then + write(logunit,101) trim(string), ymd, tod, ungridded_index, n, dataPtr2d(ungridded_index,n) + end if + end if + end if + end do + end if + end do + end do +100 format(a60,3(i8,2x),d21.14) +101 format(a60,4(i8,2x),d21.14) + + deallocate(fieldNameList) + deallocate(lfields) + deallocate(dimCounts) + + end subroutine state_flddebug + +!=============================================================================== + + subroutine state_diagnose(State, string, rc) + + ! ---------------------------------------------- + ! Diagnose status of State + ! ---------------------------------------------- + + type(ESMF_State), intent(in) :: state + character(len=*), intent(in) :: string + integer , intent(out) :: rc + + ! local variables + integer :: i,j,n + type(ESMf_Field) :: lfield + integer :: fieldCount, lrank + character(ESMF_MAXSTR) ,pointer :: lfieldnamelist(:) + real(r8), pointer :: dataPtr1d(:) + real(r8), pointer :: dataPtr2d(:,:) + character(len=*),parameter :: subname='(state_diagnose)' + ! ---------------------------------------------- + + call ESMF_StateGet(state, itemCount=fieldCount, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + allocate(lfieldnamelist(fieldCount)) + + call ESMF_StateGet(state, itemNameList=lfieldnamelist, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + do n = 1, fieldCount + + call ESMF_StateGet(state, itemName=lfieldnamelist(n), field=lfield, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + call field_getfldptr(lfield, fldptr1=dataPtr1d, fldptr2=dataPtr2d, rank=lrank, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + if (lrank == 0) then + ! no local data + elseif (lrank == 1) then + if (size(dataPtr1d) > 0) then + write(msgString,'(A,3g14.7,i8)') trim(string)//': '//trim(lfieldnamelist(n)), & + minval(dataPtr1d), maxval(dataPtr1d), sum(dataPtr1d), size(dataPtr1d) + else + write(msgString,'(A,a)') trim(string)//': '//trim(lfieldnamelist(n))," no data" + endif + elseif (lrank == 2) then + if (size(dataPtr2d) > 0) then + write(msgString,'(A,3g14.7,i8)') trim(string)//': '//trim(lfieldnamelist(n)), & + minval(dataPtr2d), maxval(dataPtr2d), sum(dataPtr2d), size(dataPtr2d) + else + write(msgString,'(A,a)') trim(string)//': '//trim(lfieldnamelist(n))," no data" + endif + else + call ESMF_LogWrite(trim(subname)//": ERROR rank not supported ", ESMF_LOGMSG_ERROR) + rc = ESMF_FAILURE + return + endif + call ESMF_LogWrite(trim(msgString), ESMF_LOGMSG_INFO) + enddo + + deallocate(lfieldnamelist) + + end subroutine state_diagnose + +!=============================================================================== + + subroutine field_getfldptr(field, fldptr1, fldptr2, rank, abort, rc) + + ! ---------------------------------------------- + ! for a field, determine rank and return fldptr1 or fldptr2 + ! abort is true by default and will abort if fldptr is not yet allocated in field + ! rank returns 0, 1, or 2. 0 means fldptr not allocated and abort=false + ! ---------------------------------------------- + + ! input/output variables + type(ESMF_Field) , intent(in) :: field + real(r8), pointer , intent(inout), optional :: fldptr1(:) + real(r8), pointer , intent(inout), optional :: fldptr2(:,:) + integer , intent(out) , optional :: rank + logical , intent(in) , optional :: abort + integer , intent(out) , optional :: rc + + ! local variables + type(ESMF_GeomType_Flag) :: geomtype + type(ESMF_FieldStatus_Flag) :: status + type(ESMF_Mesh) :: lmesh + integer :: lrank, nnodes, nelements + logical :: labort + character(len=*), parameter :: subname='(field_getfldptr)' + ! ---------------------------------------------- + + if (.not.present(rc)) then + call ESMF_LogWrite(trim(subname)//": ERROR rc not present ", & + ESMF_LOGMSG_ERROR, line=__LINE__, file=u_FILE_u) + rc = ESMF_FAILURE + return + endif + + rc = ESMF_SUCCESS + + labort = .true. + if (present(abort)) then + labort = abort + endif + lrank = -99 + + call ESMF_FieldGet(field, status=status, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + if (status /= ESMF_FIELDSTATUS_COMPLETE) then + lrank = 0 + if (labort) then + call ESMF_LogWrite(trim(subname)//": ERROR data not allocated ", ESMF_LOGMSG_INFO, rc=rc) + rc = ESMF_FAILURE + return + else + call ESMF_LogWrite(trim(subname)//": WARNING data not allocated ", ESMF_LOGMSG_INFO, rc=rc) + endif + else + + call ESMF_FieldGet(field, geomtype=geomtype, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + if (geomtype == ESMF_GEOMTYPE_GRID) then + call ESMF_FieldGet(field, rank=lrank, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + elseif (geomtype == ESMF_GEOMTYPE_MESH) then + call ESMF_FieldGet(field, rank=lrank, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldGet(field, mesh=lmesh, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_MeshGet(lmesh, numOwnedNodes=nnodes, numOwnedElements=nelements, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + if (nnodes == 0 .and. nelements == 0) lrank = 0 + else + call ESMF_LogWrite(trim(subname)//": ERROR geomtype not supported ", & + ESMF_LOGMSG_INFO, rc=rc) + rc = ESMF_FAILURE + return + endif ! geomtype + + if (lrank == 0) then + call ESMF_LogWrite(trim(subname)//": no local nodes or elements ", & + ESMF_LOGMSG_INFO) + elseif (lrank == 1) then + if (.not.present(fldptr1)) then + call ESMF_LogWrite(trim(subname)//": ERROR missing rank=1 array ", & + ESMF_LOGMSG_ERROR, line=__LINE__, file=u_FILE_u) + rc = ESMF_FAILURE + return + endif + call ESMF_FieldGet(field, farrayPtr=fldptr1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + elseif (lrank == 2) then + if (.not.present(fldptr2)) then + call ESMF_LogWrite(trim(subname)//": ERROR missing rank=2 array ", & + ESMF_LOGMSG_ERROR, line=__LINE__, file=u_FILE_u) + rc = ESMF_FAILURE + return + endif + call ESMF_FieldGet(field, farrayPtr=fldptr2, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + else + call ESMF_LogWrite(trim(subname)//": ERROR in rank ", & + ESMF_LOGMSG_ERROR, line=__LINE__, file=u_FILE_u) + rc = ESMF_FAILURE + return + endif + + endif ! status + + if (present(rank)) then + rank = lrank + endif + + end subroutine field_getfldptr + +!=============================================================================== + + subroutine alarmInit( clock, alarm, option, & + opt_n, opt_ymd, opt_tod, RefTime, alarmname, rc) + + ! Setup an alarm in a clock + ! Notes: The ringtime sent to AlarmCreate MUST be the next alarm + ! time. If you send an arbitrary but proper ringtime from the + ! past and the ring interval, the alarm will always go off on the + ! next clock advance and this will cause serious problems. Even + ! if it makes sense to initialize an alarm with some reference + ! time and the alarm interval, that reference time has to be + ! advance forward to be >= the current time. In the logic below + ! we set an appropriate "NextAlarm" and then we make sure to + ! advance it properly based on the ring interval. + + ! input/output variables + type(ESMF_Clock) , intent(inout) :: clock ! clock + type(ESMF_Alarm) , intent(inout) :: alarm ! alarm + character(len=*) , intent(in) :: option ! alarm option + integer , optional , intent(in) :: opt_n ! alarm freq + integer , optional , intent(in) :: opt_ymd ! alarm ymd + integer , optional , intent(in) :: opt_tod ! alarm tod (sec) + type(ESMF_Time) , optional , intent(in) :: RefTime ! ref time + character(len=*) , optional , intent(in) :: alarmname ! alarm name + integer , intent(inout) :: rc ! Return code + + ! local variables + type(ESMF_Calendar) :: cal ! calendar + integer :: lymd ! local ymd + integer :: ltod ! local tod + integer :: cyy,cmm,cdd,csec ! time info + character(len=64) :: lalarmname ! local alarm name + logical :: update_nextalarm ! update next alarm + type(ESMF_Time) :: CurrTime ! Current Time + type(ESMF_Time) :: NextAlarm ! Next restart alarm time + type(ESMF_TimeInterval) :: AlarmInterval ! Alarm interval + integer :: sec + character(len=*), parameter :: subname = '(alarmInit): ' + !------------------------------------------------------------------------------- + + rc = ESMF_SUCCESS + + lalarmname = 'alarm_unknown' + if (present(alarmname)) lalarmname = trim(alarmname) + ltod = 0 + if (present(opt_tod)) ltod = opt_tod + lymd = -1 + if (present(opt_ymd)) lymd = opt_ymd + + call ESMF_ClockGet(clock, CurrTime=CurrTime, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + call ESMF_TimeGet(CurrTime, yy=cyy, mm=cmm, dd=cdd, s=csec, rc=rc ) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + ! initial guess of next alarm, this will be updated below + if (present(RefTime)) then + NextAlarm = RefTime + else + NextAlarm = CurrTime + endif + + ! Determine calendar + call ESMF_ClockGet(clock, calendar=cal) + + ! Determine inputs for call to create alarm + selectcase (trim(option)) + + case (optNONE) + call ESMF_TimeIntervalSet(AlarmInterval, yy=9999, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_TimeSet( NextAlarm, yy=9999, mm=12, dd=1, s=0, calendar=cal, rc=rc ) + if (chkerr(rc,__LINE__,u_FILE_u)) return + update_nextalarm = .false. + + case (optNever) + call ESMF_TimeIntervalSet(AlarmInterval, yy=9999, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_TimeSet( NextAlarm, yy=9999, mm=12, dd=1, s=0, calendar=cal, rc=rc ) + if (chkerr(rc,__LINE__,u_FILE_u)) return + update_nextalarm = .false. + + case (optDate) + if (.not. present(opt_ymd)) then + call shr_sys_abort(subname//trim(option)//' requires opt_ymd') + end if + if (lymd < 0 .or. ltod < 0) then + call shr_sys_abort(subname//trim(option)//'opt_ymd, opt_tod invalid') + end if + call ESMF_TimeIntervalSet(AlarmInterval, yy=9999, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call timeInit(NextAlarm, lymd, cal, ltod, rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + update_nextalarm = .false. + + case (optIfdays0) + if (.not. present(opt_ymd)) then + call shr_sys_abort(subname//trim(option)//' requires opt_ymd') + end if + if (.not.present(opt_n)) then + call shr_sys_abort(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call shr_sys_abort(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, mm=1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_TimeSet( NextAlarm, yy=cyy, mm=cmm, dd=opt_n, s=0, calendar=cal, rc=rc ) + if (chkerr(rc,__LINE__,u_FILE_u)) return + update_nextalarm = .true. + + case (optNSteps) + if (.not.present(opt_n)) then + call shr_sys_abort(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call shr_sys_abort(subname//trim(option)//' invalid opt_n') + end if + call ESMF_ClockGet(clock, TimeStep=AlarmInterval, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNStep) + if (.not.present(opt_n)) call shr_sys_abort(subname//trim(option)//' requires opt_n') + if (opt_n <= 0) call shr_sys_abort(subname//trim(option)//' invalid opt_n') + call ESMF_ClockGet(clock, TimeStep=AlarmInterval, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNSeconds) + if (.not.present(opt_n)) then + call shr_sys_abort(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call shr_sys_abort(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, s=1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNSecond) + if (.not.present(opt_n)) then + call shr_sys_abort(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call shr_sys_abort(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, s=1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNMinutes) + call ESMF_TimeIntervalSet(AlarmInterval, s=60, rc=rc) + if (.not.present(opt_n)) then + call shr_sys_abort(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call shr_sys_abort(subname//trim(option)//' invalid opt_n') + end if + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNMinute) + if (.not.present(opt_n)) then + call shr_sys_abort(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call shr_sys_abort(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, s=60, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNHours) + if (.not.present(opt_n)) then + call shr_sys_abort(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call shr_sys_abort(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, s=3600, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNHour) + if (.not.present(opt_n)) then + call shr_sys_abort(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call shr_sys_abort(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, s=3600, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNDays) + if (.not.present(opt_n)) then + call shr_sys_abort(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call shr_sys_abort(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, d=1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNDay) + if (.not.present(opt_n)) then + call shr_sys_abort(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call shr_sys_abort(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, d=1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNMonths) + if (.not.present(opt_n)) then + call shr_sys_abort(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call shr_sys_abort(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, mm=1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNMonth) + if (.not.present(opt_n)) then + call shr_sys_abort(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call shr_sys_abort(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, mm=1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optMonthly) + call ESMF_TimeIntervalSet(AlarmInterval, mm=1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_TimeSet( NextAlarm, yy=cyy, mm=cmm, dd=1, s=0, calendar=cal, rc=rc ) + if (chkerr(rc,__LINE__,u_FILE_u)) return + update_nextalarm = .true. + + case (optNYears) + if (.not.present(opt_n)) then + call shr_sys_abort(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call shr_sys_abort(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, yy=1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNYear) + if (.not.present(opt_n)) then + call shr_sys_abort(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call shr_sys_abort(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, yy=1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optYearly) + call ESMF_TimeIntervalSet(AlarmInterval, yy=1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_TimeSet( NextAlarm, yy=cyy, mm=1, dd=1, s=0, calendar=cal, rc=rc ) + if (chkerr(rc,__LINE__,u_FILE_u)) return + update_nextalarm = .true. + + case default + call shr_sys_abort(subname//'unknown option '//trim(option)) + + end select + + ! -------------------------------------------------------------------------------- + ! --- AlarmInterval and NextAlarm should be set --- + ! -------------------------------------------------------------------------------- + + ! --- advance Next Alarm so it won't ring on first timestep for + ! --- most options above. go back one alarminterval just to be careful + + if (update_nextalarm) then + NextAlarm = NextAlarm - AlarmInterval + do while (NextAlarm <= CurrTime) + NextAlarm = NextAlarm + AlarmInterval + enddo + endif + + alarm = ESMF_AlarmCreate( name=lalarmname, clock=clock, ringTime=NextAlarm, & + ringInterval=AlarmInterval, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + end subroutine alarmInit + +!=============================================================================== + + subroutine timeInit( Time, ymd, cal, tod, rc) + + ! Create the ESMF_Time object corresponding to the given input time, + ! given in YMD (Year Month Day) and TOD (Time-of-day) format. + ! Set the time by an integer as YYYYMMDD and integer seconds in the day + + ! input/output parameters: + type(ESMF_Time) , intent(inout) :: Time ! ESMF time + integer , intent(in) :: ymd ! year, month, day YYYYMMDD + type(ESMF_Calendar) , intent(in) :: cal ! ESMF calendar + integer , intent(in) :: tod ! time of day in seconds + integer , intent(out) :: rc + + ! local variables + integer :: year, mon, day ! year, month, day as integers + integer :: tdate ! temporary date + integer :: date ! coded-date (yyyymmdd) + character(len=*), parameter :: subname='(timeInit)' + !------------------------------------------------------------------------------- + + rc = ESMF_SUCCESS + + if ( (ymd < 0) .or. (tod < 0) .or. (tod > SecPerDay) )then + call shr_sys_abort( subname//'ERROR yymmdd is a negative number or time-of-day out of bounds' ) + end if + + tdate = abs(date) + year = int(tdate/10000) + if (date < 0) year = -year + mon = int( mod(tdate,10000)/ 100) + day = mod(tdate, 100) + + call ESMF_TimeSet( Time, yy=year, mm=mon, dd=day, s=tod, calendar=cal, rc=rc ) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + end subroutine timeInit + +!=============================================================================== + + logical function chkerr(rc, line, file) + + integer, intent(in) :: rc + integer, intent(in) :: line + character(len=*), intent(in) :: file + + integer :: lrc + character(len=*), parameter :: subname='(chkerr)' + + chkerr = .false. + lrc = rc + if (ESMF_LogFoundError(rcToCheck=lrc, msg=ESMF_LOGERR_PASSTHRU, line=line, file=file)) then + chkerr = .true. + endif + end function chkerr + +end module ice_shr_methods diff --git a/cicecore/drivers/cice/CICE.F90 b/cicecore/drivers/standalone/cice/CICE.F90 similarity index 100% rename from cicecore/drivers/cice/CICE.F90 rename to cicecore/drivers/standalone/cice/CICE.F90 diff --git a/cicecore/drivers/cice/CICE_FinalMod.F90 b/cicecore/drivers/standalone/cice/CICE_FinalMod.F90 similarity index 100% rename from cicecore/drivers/cice/CICE_FinalMod.F90 rename to cicecore/drivers/standalone/cice/CICE_FinalMod.F90 diff --git a/cicecore/drivers/cice/CICE_InitMod.F90 b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 similarity index 100% rename from cicecore/drivers/cice/CICE_InitMod.F90 rename to cicecore/drivers/standalone/cice/CICE_InitMod.F90 diff --git a/cicecore/drivers/cice/CICE_RunMod.F90 b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 similarity index 100% rename from cicecore/drivers/cice/CICE_RunMod.F90 rename to cicecore/drivers/standalone/cice/CICE_RunMod.F90 diff --git a/cicecore/drivers/cice/CICE_RunMod.F90_debug b/cicecore/drivers/standalone/cice/CICE_RunMod.F90_debug similarity index 100% rename from cicecore/drivers/cice/CICE_RunMod.F90_debug rename to cicecore/drivers/standalone/cice/CICE_RunMod.F90_debug diff --git a/cicecore/shared/ice_fileunits.F90 b/cicecore/shared/ice_fileunits.F90 index 2620aa499..6914c6b1d 100644 --- a/cicecore/shared/ice_fileunits.F90 +++ b/cicecore/shared/ice_fileunits.F90 @@ -62,8 +62,7 @@ module ice_fileunits nu_restart_eap, & ! restart input file for eap dynamics nu_rst_pointer, & ! pointer to latest restart file nu_history , & ! binary history output file - nu_hdr , & ! header file for binary history output - nu_diag ! diagnostics output file + nu_hdr ! header file for binary history output character (32), public :: & nml_filename = 'ice_in' ! namelist input file name @@ -73,6 +72,12 @@ module ice_fileunits ice_stdout = 6, & ! reserved unit for standard output ice_stderr = 6 ! reserved unit for standard error + integer (kind=int_kind), public :: & + nu_diag = ice_stdout ! diagnostics output file, unit number may be overwritten + + logical (kind=log_kind), public :: & + nu_diag_set = .false. ! flag to indicate whether nu_diag is already set + integer (kind=int_kind), public :: & ice_IOUnitsMinUnit = 11, & ! do not use unit numbers below ice_IOUnitsMaxUnit = 99 ! or above, set by setup_nml @@ -99,13 +104,14 @@ subroutine init_fileunits character(len=*),parameter :: subname='(init_fileunits)' - nu_diag = ice_stdout ! default - allocate(ice_IOUnitsInUse(ice_IOUnitsMaxUnit)) ice_IOUnitsInUse = .false. + ice_IOUnitsInUse(ice_stdin) = .true. ! reserve unit 5 ice_IOUnitsInUse(ice_stdout) = .true. ! reserve unit 6 ice_IOUnitsInUse(ice_stderr) = .true. + if (nu_diag >= 1 .and. nu_diag <= ice_IOUnitsMaxUnit) & + ice_IOUnitsInUse(nu_diag) = .true. ! reserve unit nu_diag call get_fileunit(nu_grid) call get_fileunit(nu_kmt) diff --git a/configuration/scripts/cice.settings b/configuration/scripts/cice.settings index 0513f1dd1..8bb860916 100755 --- a/configuration/scripts/cice.settings +++ b/configuration/scripts/cice.settings @@ -12,7 +12,7 @@ setenv ICE_OBJDIR ${ICE_RUNDIR}/compile setenv ICE_RSTDIR ${ICE_RUNDIR}/restart setenv ICE_HSTDIR ${ICE_RUNDIR}/history setenv ICE_LOGDIR ${ICE_CASEDIR}/logs -setenv ICE_DRVOPT cice +setenv ICE_DRVOPT standalone/cice setenv ICE_IOTYPE netcdf # set to none if netcdf library is unavailable setenv ICE_CLEANBUILD true setenv ICE_QUIETMODE false diff --git a/doc/source/developer_guide/dg_driver.rst b/doc/source/developer_guide/dg_driver.rst index c5ce1ab6e..db922e48f 100755 --- a/doc/source/developer_guide/dg_driver.rst +++ b/doc/source/developer_guide/dg_driver.rst @@ -7,22 +7,37 @@ Driver and Coupling ==================== The driver and coupling layer is found in **cicecore/drivers/**. The standalone driver is found -under **cicecore/drivers/cice/** and other high level coupling layers are found in other directories. -In general, CICE will build with only one of these drivers, depending how the model is run and -coupled. Within the **cicecore/drivers/cice/** directory, the following files are found, +under **cicecore/drivers/standalone/cice/** and other high level coupling layers are found in other directories. +CICE is designed to build with only one of these drivers at a time, depending how the model is run and coupled. Within the **cicecore/drivers/standalone/cice/** directory, the following files are found, **CICE.F90** is the top level program file and that calls CICE_Initialize, CICE_Run, and CICE_Finalize methods. **CICE_InitMod.F90** contains the CICE_Initialize method and other next level source code. **CICE_RunMod.F90** contains the CICE_Run method and other next level source code. **CICE_FinalMod.F90** contains the CICE_Finalize method and other next level source code. -Other **cicecore/drivers/** directories are similarly implemented with a top level coupling layer, -that is largely specified by an external coupled system and then some version of the **CICE_InitMod.F90**, -**CICE_RunMod.F90**, and **CICE_FinalMod.F90** files. +The files provide the top level sequencing for calling the standalone CICE model. + +Adding a New Driver +------------------------ + +The drivers directory contains two levels of subdirectories. The first layer indicates the coupling infrastructure or strategy and the second later indicates the application or coupler the driver is written for. At the present time, the directory structures is:: + + drivers/direct/hadgem3 + drivers/mct/cesm1 + drivers/nuopc/cmeps + drivers/standalone/cice + +The standalone driver is **drivers/standalone/cice**, and this is the driver used when running with the CICE scripts in standalone mode. New drivers can be added as needed when coupling to new infrastructure or in new applications. We encourage the community to use the drivers directory to facilitate reuse with the understanding that the driver code could also reside in the application. Users should follow the naming strategy as best as possible. Drivers should be added under the appropriate subdirectory indicative of the coupling infrastructure. New subdirectories (such as oasis or esmf) can be added in the future as needed. The community will have to decide when it's appropriate to share drivers between different applications, when to update drivers, and when to create new drivers. There are a number of trade-offs to consider including backwards compatibility with earlier versions of applications, code reuse, and independence. As a general rule, driver directories should not be deleted and names should not be reused to avoid confusion with prior versions that were fundamentally different. The number of drivers will likely increase over time as new infrastructure and applications are added and as versions evolve in time. + +The current drivers subdirectories are mct, nuopc, standalone, and direct. The standalone subdirectory contains drivers to run the model in standalone mode as a standalone program. The direct subdirectory contains coupling interfaces that supporting calling the ice model directory from other models as subroutines. The subdirectory mct contains subdirectories for applications/couplers that provide coupling via mct interfaces. And the subdirectory nuopc contains subdirectories for applications/couplers that provide coupling via nuopc interfaces. + +The varied **cicecore/drivers/** directories are generally implemented similar to the standalone cice case with versions of **CICE_InitMod.F90**, **CICE_RunMod.F90**, and **CICE_FinalMod.F90** files in addition to files consistent with the coupling layer. + +As features are added to the CICE model over time that require changes in the calling sequence, it's possible that all drivers will need to be updated. These kinds of changes are impactful and not taken lightly. It will be up to the community as a whole to work together to maintain the various drivers in these situations. Calling Sequence -~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +------------------------ The initialize calling sequence looks something like:: diff --git a/doc/source/developer_guide/dg_dynamics.rst b/doc/source/developer_guide/dg_dynamics.rst index 21790f199..d9cc08841 100755 --- a/doc/source/developer_guide/dg_dynamics.rst +++ b/doc/source/developer_guide/dg_dynamics.rst @@ -3,7 +3,7 @@ .. _dev_dynamics: -Dynamics and Infrastructure +Dynamics ============================ The CICE **cicecore/** directory consists of the non icepack source code. Within that @@ -27,11 +27,8 @@ coupling layers. kinds, and restart capabilities. -Dynamics -~~~~~~~~~~~~~~ - Dynamical Solvers -************************ +-------------------- The dynamics solvers are found in **cicecore/cicedynB/dynamics/**. A couple of different solvers are available including EVP, revised EVP, and EAP. The dynamics solver is specified in namelist with the @@ -52,30 +49,30 @@ abort if set. To override the abort, use value 102 for testing. Transport -************** +----------------- The transport (advection) methods are found in **cicecore/cicedynB/dynamics/**. Two methods are supported, upwind and remap. These are set in namelist via the advection variable. Infrastructure -~~~~~~~~~~~~~~~~~~~~ +======================= Kinds -********* +------------------ **cicecore/shared/ice_kinds_mod.F90** defines the kinds datatypes used in CICE. These kinds are used throughout CICE code to define variable types. The CICE kinds are adopted from the kinds defined in Icepack for consistency in interfaces. Constants -************* +------------------ **cicecore/shared/ice_constants.F90** defines several model constants. Some are hardwired parameters while others have internal defaults and can be set thru namelist. Dynamic Array Allocation -************************** +------------------ CICE v5 and earlier was implemented using mainly static arrays and required several CPPs to be set to define grid size, blocks sizes, tracer numbers, and so forth. With CICE v6 and later, arrays are dynamically allocated and those @@ -89,7 +86,7 @@ as they have been migrated to :ref:`tabnamelist` Time Manager -**************** +------------------ Time manager data is module data in **cicecore/shared/ice_calendar.F90**. Much of the time manager data is public and operated on during the model timestepping. The model timestepping actually takes @@ -103,7 +100,7 @@ place in the **CICE_RunMod.F90** file which is part of the driver code and tends Communication -******************** +------------------ Two low-level communications packages, mpi and serial, are provided as part of CICE. This software provides a middle layer between the model and the underlying libraries. Only the CICE mpi or @@ -119,7 +116,7 @@ if the number of MPI tasks is set to 1. The serial library allows the model to core or with OpenMP parallelism only without requiring an MPI library. I/O -*********** +------------------ There are three low-level IO packages in CICE, io_netcdf, io_binary, and io_pio. This software provides a middle layer between the model and the underlying IO writing. diff --git a/doc/source/user_guide/ug_implementation.rst b/doc/source/user_guide/ug_implementation.rst index eea7d8310..bba0b5f46 100644 --- a/doc/source/user_guide/ug_implementation.rst +++ b/doc/source/user_guide/ug_implementation.rst @@ -48,7 +48,7 @@ as follows **cicecore/cicedynB/** routines associated with the dynamics core -**cicecore/driver/** +**cicecore/drivers/** top-level CICE drivers and coupling layers **cicecore/shared/** @@ -66,6 +66,9 @@ as follows **cice.setup** main CICE script for creating cases +**dot files** + various files that begin with . and store information about the git repository or other tools. + A case (compile) directory is created upon initial execution of the script **cice.setup** at the user-specified location provided after the -c flag. Executing the command ``./cice.setup -h`` provides helpful information for diff --git a/doc/source/user_guide/ug_running.rst b/doc/source/user_guide/ug_running.rst index 4abd52d0a..4dad350e3 100644 --- a/doc/source/user_guide/ug_running.rst +++ b/doc/source/user_guide/ug_running.rst @@ -350,9 +350,19 @@ be modified. Porting ------- +There are four basic issues that need to be addressed when porting, and these are addressed in four separate files in the script system, + +- setup of the environment such as compilers, environment variables, and other support software (in **env.[machine]_[environment]**) + +- setup of the Macros file to support the model build (in **Macros.[machine]_[environment]**) + +- setup of the batch submission scripts (in **cice.batch.csh**) + +- setup of the model launch command (in **cice.launch.csh**) + To port, an **env.[machine]_[environment]** and **Macros.[machine]_[environment]** file have to be added to the **configuration/scripts/machines/** directory and the -**configuration/scripts/cice.batch.csh** file needs to be modified. +**configuration/scripts/cice.batch.csh** and **configuration/scripts/cice.launch.csh** files need to be modified. In general, the machine is specified in ``cice.setup`` with ``--mach`` and the environment (compiler) is specified with ``--env``. @@ -365,7 +375,10 @@ and the environment (compiler) is specified with ``--env``. - cd .. to **configuration/scripts/** - Edit the **cice.batch.csh** script to add a section for your machine - with batch settings and job launch settings + with batch settings + +- Edit the **cice.batch.csh** script to add a section for your machine + with job launch settings - Download and untar a forcing dataset to the location defined by ``ICE_MACHINE_INPUTDATA`` in the env file @@ -375,7 +388,7 @@ to carry this out is to create an initial set of changes as described above, the create a case and manually modify the **env.[machine]** file and **Macros.[machine]** file until the case can build and run. Then copy the files from the case directory back to **configuration/scripts/machines/** and update -the **configuration/scripts/cice.batch.csh** file, retest, +the **configuration/scripts/cice.batch.csh** and **configuratin/scripts/cice.launch.csh** files, retest, and then add and commit the updated machine files to the repository. .. _machvars: