From d6a1d80edea33928118d9e013fb9984432bab4bc Mon Sep 17 00:00:00 2001 From: "David A. Bailey" Date: Thu, 9 Jul 2020 15:25:09 -0600 Subject: [PATCH 01/13] CICE addition of fswthru by components. (#479) * Start of fswthru by components * Remove CESM2 MCT drivers * Updated fswthru by components * Update documentation --- cicecore/cicedynB/general/ice_flux.F90 | 32 ++++++++++++++++- cicecore/cicedynB/general/ice_step_mod.F90 | 34 +++++++++++++++---- .../drivers/nuopc/cmeps/ice_import_export.F90 | 10 +++--- .../drivers/standalone/cice/CICE_RunMod.F90 | 11 ++++-- cicecore/shared/ice_arrays_column.F90 | 8 +++++ cicecore/shared/ice_init_column.F90 | 14 ++++++-- doc/source/cice_index.rst | 4 +++ 7 files changed, 97 insertions(+), 16 deletions(-) diff --git a/cicecore/cicedynB/general/ice_flux.F90 b/cicecore/cicedynB/general/ice_flux.F90 index 6b16edb77..16abbe162 100644 --- a/cicecore/cicedynB/general/ice_flux.F90 +++ b/cicecore/cicedynB/general/ice_flux.F90 @@ -217,7 +217,11 @@ module ice_flux fresh , & ! fresh water flux to ocean (kg/m^2/s) fsalt , & ! salt flux to ocean (kg/m^2/s) fhocn , & ! net heat flux to ocean (W/m^2) - fswthru ! shortwave penetrating to ocean (W/m^2) + fswthru , & ! shortwave penetrating to ocean (W/m^2) + fswthru_vdr , & ! vis dir shortwave penetrating to ocean (W/m^2) + fswthru_vdf , & ! vis dif shortwave penetrating to ocean (W/m^2) + fswthru_idr , & ! nir dir shortwave penetrating to ocean (W/m^2) + fswthru_idf ! nir dif shortwave penetrating to ocean (W/m^2) ! internal @@ -438,6 +442,10 @@ subroutine alloc_flux fsalt (nx_block,ny_block,max_blocks), & ! salt flux to ocean (kg/m^2/s) fhocn (nx_block,ny_block,max_blocks), & ! net heat flux to ocean (W/m^2) fswthru (nx_block,ny_block,max_blocks), & ! shortwave penetrating to ocean (W/m^2) + fswthru_vdr (nx_block,ny_block,max_blocks), & ! vis dir shortwave penetrating to ocean (W/m^2) + fswthru_vdf (nx_block,ny_block,max_blocks), & ! vis dif shortwave penetrating to ocean (W/m^2) + fswthru_idr (nx_block,ny_block,max_blocks), & ! nir dir shortwave penetrating to ocean (W/m^2) + fswthru_idf (nx_block,ny_block,max_blocks), & ! nir dif shortwave penetrating to ocean (W/m^2) scale_factor (nx_block,ny_block,max_blocks), & ! scaling factor for shortwave components strairx_ocn(nx_block,ny_block,max_blocks), & ! stress on ocean by air, x-direction strairy_ocn(nx_block,ny_block,max_blocks), & ! stress on ocean by air, y-direction @@ -684,6 +692,10 @@ subroutine init_coupler_flux fpond (:,:,:) = c0 fhocn (:,:,:) = c0 fswthru (:,:,:) = c0 + fswthru_vdr (:,:,:) = c0 + fswthru_vdf (:,:,:) = c0 + fswthru_idr (:,:,:) = c0 + fswthru_idf (:,:,:) = c0 fresh_da(:,:,:) = c0 ! data assimilation fsalt_da(:,:,:) = c0 flux_bio (:,:,:,:) = c0 ! bgc @@ -783,6 +795,10 @@ subroutine init_flux_ocn fpond (:,:,:) = c0 fhocn (:,:,:) = c0 fswthru (:,:,:) = c0 + fswthru_vdr (:,:,:) = c0 + fswthru_vdf (:,:,:) = c0 + fswthru_idr (:,:,:) = c0 + fswthru_idf (:,:,:) = c0 faero_ocn (:,:,:,:) = c0 fiso_ocn (:,:,:,:) = c0 @@ -978,6 +994,8 @@ subroutine scale_fluxes (nx_block, ny_block, & Tref, Qref, & fresh, fsalt, & fhocn, fswthru, & + fswthru_vdr, fswthru_vdf, & + fswthru_idr, fswthru_idf, & faero_ocn, & alvdr, alidr, & alvdf, alidf, & @@ -1022,6 +1040,10 @@ subroutine scale_fluxes (nx_block, ny_block, & fsalt , & ! salt flux to ocean (kg/m2/s) fhocn , & ! actual ocn/ice heat flx (W/m**2) fswthru , & ! sw radiation through ice bot (W/m**2) + fswthru_vdr , & ! vis dir sw radiation through ice bot (W/m**2) + fswthru_vdf , & ! vis dif sw radiation through ice bot (W/m**2) + fswthru_idr , & ! nir dir sw radiation through ice bot (W/m**2) + fswthru_idf , & ! nir dif sw radiation through ice bot (W/m**2) alvdr , & ! visible, direct (fraction) alidr , & ! near-ir, direct (fraction) alvdf , & ! visible, diffuse (fraction) @@ -1090,6 +1112,10 @@ subroutine scale_fluxes (nx_block, ny_block, & fsalt (i,j) = fsalt (i,j) * ar fhocn (i,j) = fhocn (i,j) * ar fswthru (i,j) = fswthru (i,j) * ar + fswthru_vdr (i,j) = fswthru_vdr (i,j) * ar + fswthru_vdf (i,j) = fswthru_vdf (i,j) * ar + fswthru_idr (i,j) = fswthru_idr (i,j) * ar + fswthru_idf (i,j) = fswthru_idf (i,j) * ar alvdr (i,j) = alvdr (i,j) * ar alidr (i,j) = alidr (i,j) * ar alvdf (i,j) = alvdf (i,j) * ar @@ -1118,6 +1144,10 @@ subroutine scale_fluxes (nx_block, ny_block, & fsalt (i,j) = c0 fhocn (i,j) = c0 fswthru (i,j) = c0 + fswthru_vdr (i,j) = c0 + fswthru_vdf (i,j) = c0 + fswthru_idr (i,j) = c0 + fswthru_idf (i,j) = c0 alvdr (i,j) = c0 ! zero out albedo where ice is absent alidr (i,j) = c0 alvdf (i,j) = c0 diff --git a/cicecore/cicedynB/general/ice_step_mod.F90 b/cicecore/cicedynB/general/ice_step_mod.F90 index 2f1a1c75b..333c22cd3 100644 --- a/cicecore/cicedynB/general/ice_step_mod.F90 +++ b/cicecore/cicedynB/general/ice_step_mod.F90 @@ -78,7 +78,8 @@ subroutine prep_radiation (iblk) use ice_flux, only: scale_factor, swvdr, swvdf, swidr, swidf, & alvdr_ai, alvdf_ai, alidr_ai, alidf_ai, & alvdr_init, alvdf_init, alidr_init, alidf_init - use ice_arrays_column, only: fswsfcn, fswintn, fswthrun, & + use ice_arrays_column, only: fswsfcn, fswintn, & + fswthrun, fswthrun_vdr, fswthrun_vdf, fswthrun_idr, fswthrun_idf, & fswpenln, Sswabsn, Iswabsn use ice_state, only: aice, aicen use ice_timers, only: ice_timer_start, ice_timer_stop, timer_sw @@ -130,7 +131,12 @@ subroutine prep_radiation (iblk) alvdr_ai = alvdr_ai(i,j, iblk), alvdf_ai = alvdf_ai(i,j, iblk), & alidr_ai = alidr_ai(i,j, iblk), alidf_ai = alidf_ai(i,j, iblk), & fswsfcn = fswsfcn (i,j, :,iblk), fswintn = fswintn (i,j, :,iblk), & - fswthrun = fswthrun(i,j, :,iblk), fswpenln = fswpenln(i,j,:,:,iblk), & + fswthrun = fswthrun(i,j, :,iblk), & + fswthrun_vdr = fswthrun_vdr(i,j, :,iblk), & + fswthrun_vdf = fswthrun_vdf(i,j, :,iblk), & + fswthrun_idr = fswthrun_idr(i,j, :,iblk), & + fswthrun_idf = fswthrun_idf(i,j, :,iblk), & + fswpenln = fswpenln(i,j,:,:,iblk), & Sswabsn = Sswabsn (i,j,:,:,iblk), Iswabsn = Iswabsn (i,j,:,:,iblk)) enddo ! i @@ -157,7 +163,8 @@ subroutine step_therm1 (dt, iblk) Cdn_ocn, Cdn_ocn_skin, Cdn_ocn_floe, Cdn_ocn_keel, Cdn_atm_ratio, & Cdn_atm, Cdn_atm_skin, Cdn_atm_floe, Cdn_atm_rdg, Cdn_atm_pond, & hfreebd, hdraft, hridge, distrdg, hkeel, dkeel, lfloe, dfloe, & - fswsfcn, fswintn, fswthrun, Sswabsn, Iswabsn + fswsfcn, fswintn, Sswabsn, Iswabsn, & + fswthrun, fswthrun_vdr, fswthrun_vdf, fswthrun_idr, fswthrun_idf use ice_blocks, only: block, get_block, nx_block, ny_block use ice_calendar, only: yday use ice_domain, only: blocks_ice @@ -168,7 +175,8 @@ subroutine step_therm1 (dt, iblk) flw, fsnow, fpond, sss, mlt_onset, frz_onset, fcondbotn, fcondbot, & frain, Tair, strairxT, strairyT, fsurf, fcondtop, fsens, & flat, fswabs, flwout, evap, evaps, evapi, Tref, Qref, Uref, fresh, fsalt, fhocn, & - fswthru, meltt, melts, meltb, congel, snoice, & + fswthru, fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & + meltt, melts, meltb, congel, snoice, & flatn_f, fsensn_f, fsurfn_f, fcondtopn_f use ice_flux_bgc, only: dsnown, faero_atm, faero_ocn, fiso_atm, fiso_ocn, & Qa_iso, Qref_iso, fiso_evap, HDO_ocn, H2_16O_ocn, H2_18O_ocn @@ -389,6 +397,10 @@ subroutine step_therm1 (dt, iblk) fswsfcn = fswsfcn (i,j,:,iblk), & fswintn = fswintn (i,j,:,iblk), & fswthrun = fswthrun (i,j,:,iblk), & + fswthrun_vdr = fswthrun_vdr (i,j,:,iblk),& + fswthrun_vdf = fswthrun_vdf (i,j,:,iblk),& + fswthrun_idr = fswthrun_idr (i,j,:,iblk),& + fswthrun_idf = fswthrun_idf (i,j,:,iblk),& fswabs = fswabs (i,j, iblk), & flwout = flwout (i,j, iblk), & Sswabsn = Sswabsn (i,j,:,:,iblk), & @@ -405,6 +417,10 @@ subroutine step_therm1 (dt, iblk) fsalt = fsalt (i,j, iblk), & fhocn = fhocn (i,j, iblk), & fswthru = fswthru (i,j, iblk), & + fswthru_vdr = fswthru_vdr (i,j, iblk),& + fswthru_vdf = fswthru_vdf (i,j, iblk),& + fswthru_idr = fswthru_idr (i,j, iblk),& + fswthru_idf = fswthru_idf (i,j, iblk),& flatn_f = flatn_f (i,j,:,iblk), & fsensn_f = fsensn_f (i,j,:,iblk), & fsurfn_f = fsurfn_f (i,j,:,iblk), & @@ -985,7 +1001,8 @@ end subroutine step_dyn_ridge subroutine step_radiation (dt, iblk) use ice_arrays_column, only: ffracn, dhsn, & - fswsfcn, fswintn, fswthrun, fswpenln, Sswabsn, Iswabsn, & + fswsfcn, fswintn, fswpenln, Sswabsn, Iswabsn, & + fswthrun, fswthrun_vdr, fswthrun_vdf, fswthrun_idr, fswthrun_idf, & albicen, albsnon, albpndn, & alvdrn, alidrn, alvdfn, alidfn, apeffn, trcrn_sw, snowfracn, & kaer_tab, waer_tab, gaer_tab, kaer_bc_tab, waer_bc_tab, & @@ -1122,7 +1139,12 @@ subroutine step_radiation (dt, iblk) alvdrn =alvdrn (i,j,: ,iblk), alvdfn =alvdfn (i,j,: ,iblk), & alidrn =alidrn (i,j,: ,iblk), alidfn =alidfn (i,j,: ,iblk), & fswsfcn =fswsfcn (i,j,: ,iblk), fswintn =fswintn (i,j,: ,iblk), & - fswthrun =fswthrun (i,j,: ,iblk), fswpenln=fswpenln(i,j,:,:,iblk), & + fswthrun =fswthrun (i,j,: ,iblk), & + fswthrun_vdr =fswthrun_vdr (i,j,: ,iblk), & + fswthrun_vdf =fswthrun_vdf (i,j,: ,iblk), & + fswthrun_idr =fswthrun_idr (i,j,: ,iblk), & + fswthrun_idf =fswthrun_idf (i,j,: ,iblk), & + fswpenln=fswpenln(i,j,:,:,iblk), & Sswabsn =Sswabsn (i,j,:,:,iblk), Iswabsn =Iswabsn (i,j,:,:,iblk), & albicen =albicen (i,j,: ,iblk), albsnon =albsnon (i,j,: ,iblk), & albpndn =albpndn (i,j,: ,iblk), apeffn =apeffn (i,j,: ,iblk), & diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index 083283895..765bc3dd8 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -14,7 +14,7 @@ module ice_import_export use ice_flux , only : alvdr, alidr, alvdf, alidf, Tref, Qref, Uref use ice_flux , only : flat, fsens, flwout, evap, fswabs, fhocn, fswthru #if (defined NEWCODE) - use ice_flux , only : fswthruvdr, fswthruvdf, fswthruidr, fswthruidf + use ice_flux , only : fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf use ice_flux , only : send_i2x_per_cat, fswthrun_ai use ice_flux , only : faero_atm, faero_ocn use ice_flux , only : fiso_atm, fiso_ocn, fiso_rain, fiso_evap @@ -1007,19 +1007,19 @@ subroutine ice_export( exportState, rc ) #if (defined NEWCODE) ! flux of vis dir shortwave through ice to ocean - call state_setexport(exportState, 'mean_sw_pen_to_ocn_vis_dir_flx' , input=fswthruvdr, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'mean_sw_pen_to_ocn_vis_dir_flx' , input=fswthru_vdr, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! flux of vis dif shortwave through ice to ocean - call state_setexport(exportState, 'mean_sw_pen_to_ocn_vis_dif_flx' , input=fswthruvdf, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'mean_sw_pen_to_ocn_vis_dif_flx' , input=fswthru_vdf, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! flux of ir dir shortwave through ice to ocean - call state_setexport(exportState, 'mean_sw_pen_to_ocn_ir_dir_flx' , input=fswthruidr, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'mean_sw_pen_to_ocn_ir_dir_flx' , input=fswthru_idr, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! flux of ir dif shortwave through ice to ocean - call state_setexport(exportState, 'mean_sw_pen_to_ocn_ir_dif_flx' , input=fswthruidf, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'mean_sw_pen_to_ocn_ir_dif_flx' , input=fswthru_idf, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return #endif diff --git a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 index 7645c43f3..363749e8f 100644 --- a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 @@ -356,7 +356,9 @@ subroutine coupling_prep (iblk) albpnd, albcnt, apeff_ai, fpond, fresh, l_mpond_fresh, & alvdf_ai, alidf_ai, alvdr_ai, alidr_ai, fhocn_ai, & fresh_ai, fsalt_ai, fsalt, & - fswthru_ai, fhocn, fswthru, scale_factor, snowfrac, & + fswthru_ai, fhocn, & + fswthru, fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & + scale_factor, snowfrac, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & fsens, flat, fswabs, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt @@ -550,7 +552,12 @@ subroutine coupling_prep (iblk) evap (:,:,iblk), & Tref (:,:,iblk), Qref (:,:,iblk), & fresh (:,:,iblk), fsalt (:,:,iblk), & - fhocn (:,:,iblk), fswthru (:,:,iblk), & + fhocn (:,:,iblk), & + fswthru (:,:,iblk), & + fswthru_vdr (:,:,iblk), & + fswthru_vdf (:,:,iblk), & + fswthru_idr (:,:,iblk), & + fswthru_idf (:,:,iblk), & faero_ocn(:,:,:,iblk), & alvdr (:,:,iblk), alidr (:,:,iblk), & alvdf (:,:,iblk), alidf (:,:,iblk), & diff --git a/cicecore/shared/ice_arrays_column.F90 b/cicecore/shared/ice_arrays_column.F90 index 64c4de612..06efd6e94 100644 --- a/cicecore/shared/ice_arrays_column.F90 +++ b/cicecore/shared/ice_arrays_column.F90 @@ -106,6 +106,10 @@ module ice_arrays_column public :: & fswsfcn , & ! SW absorbed at ice/snow surface (W m-2) fswthrun , & ! SW through ice to ocean (W/m^2) + fswthrun_vdr , & ! vis dir SW through ice to ocean (W/m^2) + fswthrun_vdf , & ! vis dif SW through ice to ocean (W/m^2) + fswthrun_idr , & ! nir dir SW through ice to ocean (W/m^2) + fswthrun_idf , & ! nir dif SW through ice to ocean (W/m^2) fswintn ! SW absorbed in ice interior, below surface (W m-2) real (kind=dbl_kind), dimension (:,:,:,:,:), allocatable, & @@ -359,6 +363,10 @@ subroutine alloc_arrays_column snowfracn (nx_block,ny_block,ncat,max_blocks), & ! Category snow fraction used in radiation fswsfcn (nx_block,ny_block,ncat,max_blocks), & ! SW absorbed at ice/snow surface (W m-2) fswthrun (nx_block,ny_block,ncat,max_blocks), & ! SW through ice to ocean (W/m^2) + fswthrun_vdr (nx_block,ny_block,ncat,max_blocks), & ! vis dir SW through ice to ocean (W/m^2) + fswthrun_vdf (nx_block,ny_block,ncat,max_blocks), & ! vis dif SW through ice to ocean (W/m^2) + fswthrun_idr (nx_block,ny_block,ncat,max_blocks), & ! nir dir SW through ice to ocean (W/m^2) + fswthrun_idf (nx_block,ny_block,ncat,max_blocks), & ! nir dif SW through ice to ocean (W/m^2) fswintn (nx_block,ny_block,ncat,max_blocks), & ! SW absorbed in ice interior, below surface (W m-2) first_ice_real & (nx_block,ny_block,ncat,max_blocks), & ! .true. = c1, .false. = c0 diff --git a/cicecore/shared/ice_init_column.F90 b/cicecore/shared/ice_init_column.F90 index 9e4838087..0370a0d7e 100644 --- a/cicecore/shared/ice_init_column.F90 +++ b/cicecore/shared/ice_init_column.F90 @@ -181,7 +181,8 @@ end subroutine init_thermo_vertical subroutine init_shortwave use ice_arrays_column, only: fswpenln, Iswabsn, Sswabsn, albicen, & - albsnon, alvdrn, alidrn, alvdfn, alidfn, fswsfcn, fswthrun, & + albsnon, alvdrn, alidrn, alvdfn, alidfn, fswsfcn, & + fswthrun, fswthrun_vdr, fswthrun_vdf, fswthrun_idr, fswthrun_idf, & fswintn, albpndn, apeffn, trcrn_sw, dhsn, ffracn, snowfracn, & kaer_tab, waer_tab, gaer_tab, kaer_bc_tab, waer_bc_tab, gaer_bc_tab, bcenh, & swgrid, igrid @@ -304,6 +305,10 @@ subroutine init_shortwave fswsfcn(i,j,n,iblk) = c0 fswintn(i,j,n,iblk) = c0 fswthrun(i,j,n,iblk) = c0 + fswthrun_vdr(i,j,n,iblk) = c0 + fswthrun_vdf(i,j,n,iblk) = c0 + fswthrun_idr(i,j,n,iblk) = c0 + fswthrun_idf(i,j,n,iblk) = c0 enddo ! ncat enddo @@ -363,7 +368,12 @@ subroutine init_shortwave alvdrn=alvdrn(i,j,:,iblk), alvdfn=alvdfn(i,j,:,iblk), & alidrn=alidrn(i,j,:,iblk), alidfn=alidfn(i,j,:,iblk), & fswsfcn=fswsfcn(i,j,:,iblk), fswintn=fswintn(i,j,:,iblk), & - fswthrun=fswthrun(i,j,:,iblk), fswpenln=fswpenln(i,j,:,:,iblk), & + fswthrun=fswthrun(i,j,:,iblk), & + fswthrun_vdr=fswthrun_vdr(i,j,:,iblk), & + fswthrun_vdf=fswthrun_vdf(i,j,:,iblk), & + fswthrun_idr=fswthrun_idr(i,j,:,iblk), & + fswthrun_idf=fswthrun_idf(i,j,:,iblk), & + fswpenln=fswpenln(i,j,:,:,iblk), & Sswabsn=Sswabsn(i,j,:,:,iblk), Iswabsn=Iswabsn(i,j,:,:,iblk), & albicen=albicen(i,j,:,iblk), albsnon=albsnon(i,j,:,iblk), & albpndn=albpndn(i,j,:,iblk), apeffn=apeffn(i,j,:,iblk), & diff --git a/doc/source/cice_index.rst b/doc/source/cice_index.rst index 1884d03f1..8cadc5073 100644 --- a/doc/source/cice_index.rst +++ b/doc/source/cice_index.rst @@ -252,6 +252,10 @@ either Celsius or Kelvin units). "fswint", "shortwave absorbed in ice interior", "W/m\ :math:`^2`" "fswpenl", "shortwave penetrating through ice layers", "W/m\ :math:`^2`" "fswthru", "shortwave penetrating to ocean", "W/m\ :math:`^2`" + "fswthru_vdr", "visible direct shortwave penetrating to ocean", "W/m\ :math:`^2`" + "fswthru_vdf", "visible diffuse shortwave penetrating to ocean", "W/m\ :math:`^2`" + "fswthru_idr", "near IR direct shortwave penetrating to ocean", "W/m\ :math:`^2`" + "fswthru_idf", "near IR diffuse shortwave penetrating to ocean", "W/m\ :math:`^2`" "fswthru_ai", "grid-box-mean shortwave penetrating to ocean (fswthru)", "W/m\ :math:`^2`" "fyear", "current data year", "" "fyear_final", "last data year", "" From 15a884cd816896115780f1d2ef607329ab7a45e9 Mon Sep 17 00:00:00 2001 From: Tony Craig Date: Mon, 13 Jul 2020 11:45:07 -0700 Subject: [PATCH 02/13] Update README.md Update Version Index to Release Table --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 0c5940a7a..36913d5f5 100644 --- a/README.md +++ b/README.md @@ -34,7 +34,7 @@ Head over to our [Contribution guide][contributing] to learn more about how you Information about the CICE model -* **CICE Version Index**: https://github.com/CICE-Consortium/CICE/wiki/CICE-Version-Index +* **CICE Release Table**: https://github.com/CICE-Consortium/CICE/wiki/CICE-Release-Table Numbered CICE releases since version 6 with associated documentation and DOIs. From 78b7a1220476685b439e8d81995e401923c8aeb1 Mon Sep 17 00:00:00 2001 From: Elizabeth Hunke Date: Mon, 13 Jul 2020 16:33:46 -0600 Subject: [PATCH 03/13] move Ktens out of basalstress conditional in diagnostic output (#488) --- cicecore/cicedynB/general/ice_init.F90 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index f43c08793..9b4e24722 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -1174,8 +1174,8 @@ subroutine input_data write(nu_diag,1007) ' k2 = ', k2, ' free parameter for landfast ice' write(nu_diag,1007) ' alphab = ', alphab, ' factor for landfast ice' write(nu_diag,1007) ' threshold_hw = ', threshold_hw, ' max water depth for grounding ice' - write(nu_diag,1007) ' Ktens = ', Ktens, ' tensile strength factor' endif + write(nu_diag,1007) ' Ktens = ', Ktens, ' tensile strength factor' endif ! kdyn enabled write(nu_diag,*) ' ' From c22c6d501407136bfbf866e9549c2de50caadb84 Mon Sep 17 00:00:00 2001 From: Elizabeth Hunke Date: Mon, 13 Jul 2020 16:34:14 -0600 Subject: [PATCH 04/13] clarify CICE citation info in doc (#487) --- doc/source/intro/citing.rst | 26 +++++++++++++++++++------- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/doc/source/intro/citing.rst b/doc/source/intro/citing.rst index 8f4e142c8..c128bc4e6 100644 --- a/doc/source/intro/citing.rst +++ b/doc/source/intro/citing.rst @@ -5,14 +5,26 @@ Citing the CICE code ==================== -If you use the CICE code, please cite the version you are using with the CICE -Digital Object Identifier (DOI): +Each individual release has its own Digital Object Identifier (DOI), +e.g. CICE v6.1.2 has DOI 10.5281/zenodo.3888653. All versions of +this lineage (e.g. CICE6) can be cited by using the DOI +10.5281/zenodo.1205674 (https://zenodo.org/record/1205674). This DOI +represents all v6 releases, and will always resolve to the latest one. +More information can be found by following the DOI link to zenodo. -DOI:10.5281/zenodo.1205674 (https://zenodo.org/record/1205674) +If you use CICE, please cite the version number of the code you +are using or modifying. -This DOI can be used to cite all CICE versions and the URL will default to the most recent version. -However, each released version of CICE will also receive its own, unique DOI that can be -used for citations as well. +If using code from the CICE-Consortium repository ``master`` branch +that includes modifications +that have not yet been released with a version number, then in +addition to the most recent version number, the hash at time of +download can be cited, determined by executing the command ``git log`` +in your clone. -Please also make the CICE Consortium aware of any publications and model use. +A hash can also be cited for your own modifications, once they have +been committed to a repository branch. + +Please also make the CICE Consortium aware of any publications and +model use. From ee5a0e965738307aaa7d6ad60a1a03828ffebef1 Mon Sep 17 00:00:00 2001 From: Tony Craig Date: Wed, 15 Jul 2020 12:30:11 -0700 Subject: [PATCH 05/13] Update README.md --- README.md | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 36913d5f5..a584e8ac9 100644 --- a/README.md +++ b/README.md @@ -11,21 +11,19 @@ CICE is a computationally efficient model for simulating the growth, melting, an This repository contains the files and code needed to run the CICE sea ice numerical model starting with version 6. CICE is maintained by the CICE Consortium. Versions prior to v6 are found in the [CICE-svn-trunk repository](https://github.com/CICE-Consortium/CICE-svn-trunk). -CICE consists of a top level driver and dynamical core plus the [Icepack column physics code][icepack], which is included in CICE as a Git submodule. Because Icepack is a submodule of CICE, Icepack and CICE development are handled independently with respect to the GitHub repositories even though development and testing may be done together. +CICE consists of a top level driver and dynamical core plus the [Icepack][icepack] column physics code], which is included in CICE as a Git submodule. Because Icepack is a submodule of CICE, Icepack and CICE development are handled independently with respect to the GitHub repositories even though development and testing may be done together. [icepack]: https://github.com/CICE-Consortium/Icepack -The first point of contact with the CICE Consortium is the [Consortium Community Forum][forum]. +The first point of contact with the CICE Consortium is the Consortium Community [Forum][forum]. This forum is monitored by Consortium members and also opened to the whole community. Please do not use our issue tracker for general support questions. -[doc-resources]: https://github.com/CICE-Consortium/About-Us/wiki/Resource-Index#model-documentation -[doc-running]: https://cice-consortium-cice.readthedocs.io/en/master/user_guide/ug_running.html [forum]: https://xenforo.cgd.ucar.edu/cesm/forums/cice-consortium.146/ If you expect to make any changes to the code, we recommend that you first fork both the CICE and Icepack repositories. In order to incorporate your developments into the Consortium code it is imperative you follow the guidance for Pull Requests and requisite testing. -Head over to our [Contribution guide][contributing] to learn more about how you can help improve CICE. +Head over to our [Contributing][contributing] guide to learn more about how you can help improve CICE. [contributing]: https://github.com/CICE-Consortium/About-Us/wiki/Contributing From b055c7f01ad4ad8f5d946d4456188a8f8b8eb55d Mon Sep 17 00:00:00 2001 From: "David A. Bailey" Date: Fri, 17 Jul 2020 09:58:10 -0600 Subject: [PATCH 06/13] Add restart_coszen namelist option (#480) * updated orbital calculations needed for cesm * fixed problems in updated orbital calculations needed for cesm * update CICE6 to support coupling with UFS * put in changes so that both ufsatm and cesm requirements for potential temperature and density are satisfied * update icepack submodule * Revert "update icepack submodule" This reverts commit e70d1abcbeb4351195a2b81c6ce3f623c936426c. * update comp_ice.backend with temporary ice_timers fix * Fix threading problem in init_bgc * Fix additional OMP problems * changes for coldstart running * Move the forapps directory * remove cesmcoupled ifdefs * Fix logging issues for NUOPC * removal of many cpp-ifdefs * fix compile errors * fixes to get cesm working * fixed white space issue * Add restart_coszen namelist option * Move restart_coszen to forcing_nml * Update documentation on restart_coszen Co-authored-by: Mariana Vertenstein Co-authored-by: apcraig Co-authored-by: Denise Worthen --- .../cicedynB/analysis/ice_history_shared.F90 | 2 - cicecore/cicedynB/general/ice_init.F90 | 7 +- .../infrastructure/ice_restart_driver.F90 | 17 +- .../io/io_netcdf/ice_restart.F90 | 8 +- .../infrastructure/io/io_pio2/ice_restart.F90 | 8 +- cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 | 81 +-- cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 | 50 +- .../drivers/nuopc/cmeps/cice_wrapper_mod.F90 | 35 ++ .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 511 ++++++------------ .../drivers/nuopc/cmeps/ice_import_export.F90 | 130 ++--- .../nuopc/cmeps/ice_prescribed_mod.F90 | 149 ++--- cicecore/shared/ice_restart_shared.F90 | 1 + .../forapps/ufs/comp_ice.backend.libcice | 9 +- configuration/scripts/ice_in | 1 + doc/source/cice_index.rst | 1 + doc/source/user_guide/ug_case_settings.rst | 3 +- doc/source/user_guide/ug_implementation.rst | 4 + 17 files changed, 385 insertions(+), 632 deletions(-) create mode 100644 cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 diff --git a/cicecore/cicedynB/analysis/ice_history_shared.F90 b/cicecore/cicedynB/analysis/ice_history_shared.F90 index b5f2226fa..ce177ad1e 100644 --- a/cicecore/cicedynB/analysis/ice_history_shared.F90 +++ b/cicecore/cicedynB/analysis/ice_history_shared.F90 @@ -672,9 +672,7 @@ subroutine construct_filename(ncfile,suffix,ns) iday = mday isec = sec - dt -#ifdef CESMCOUPLED if (write_ic) isec = sec -#endif ! construct filename if (write_ic) then write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index 9b4e24722..c0f7a4eaa 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -73,7 +73,7 @@ subroutine input_data restart_pond_cesm, restart_pond_lvl, restart_pond_topo, restart_aero, & restart_fsd, restart_iso use ice_restart_shared, only: & - restart, restart_ext, restart_dir, restart_file, pointer_file, & + restart, restart_ext, restart_coszen, restart_dir, restart_file, pointer_file, & runid, runtype, use_restart_time, restart_format, lcdf64 use ice_history_shared, only: hist_avg, history_dir, history_file, & incond_dir, incond_file, version_name, & @@ -212,7 +212,7 @@ subroutine input_data oceanmixed_ice, restore_ice, restore_ocn, trestore, & precip_units, default_season, wave_spec_type,nfreq, & atm_data_type, ocn_data_type, bgc_data_type, fe_data_type, & - ice_data_type, wave_spec_file, & + ice_data_type, wave_spec_file, restart_coszen, & fyear_init, ycycle, & atm_data_dir, ocn_data_dir, bgc_data_dir, & atm_data_format, ocn_data_format, rotate_wind, & @@ -269,6 +269,7 @@ subroutine input_data restart_dir = './' ! write to executable dir for default restart_file = 'iced' ! restart file name prefix restart_ext = .false. ! if true, read/write ghost cells + restart_coszen = .false. ! if true, read/write coszen use_restart_time = .true. ! if true, use time info written in file pointer_file = 'ice.restart_file' restart_format = 'default' ! restart file format @@ -563,6 +564,7 @@ subroutine input_data call broadcast_scalar(restart, master_task) call broadcast_scalar(restart_dir, master_task) call broadcast_scalar(restart_ext, master_task) + call broadcast_scalar(restart_coszen, master_task) call broadcast_scalar(use_restart_time, master_task) call broadcast_scalar(restart_format, master_task) call broadcast_scalar(lcdf64, master_task) @@ -1458,6 +1460,7 @@ subroutine input_data write(nu_diag,*) ' restart_dir = ', & trim(restart_dir) write(nu_diag,*) ' restart_ext = ', restart_ext + write(nu_diag,*) ' restart_coszen = ', restart_coszen write(nu_diag,*) ' restart_format = ', & trim(restart_format) write(nu_diag,*) ' lcdf64 = ', & diff --git a/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 b/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 index d3829b9c4..5a6c79503 100644 --- a/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 +++ b/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 @@ -23,7 +23,7 @@ module ice_restart_driver field_loc_center, field_loc_NEcorner, & field_type_scalar, field_type_vector use ice_restart_shared, only: restart_dir, pointer_file, & - runid, use_restart_time, lenstr + runid, use_restart_time, lenstr, restart_coszen use ice_restart use ice_exit, only: abort_ice use ice_fileunits, only: nu_diag, nu_rst_pointer, nu_restart, nu_dump @@ -58,9 +58,7 @@ subroutine dumpfile(filename_spec) stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4 -#ifdef CESMCOUPLED use ice_flux, only: coszen -#endif use ice_state, only: aicen, vicen, vsnon, trcrn, uvel, vvel character(len=char_len_long), intent(in), optional :: filename_spec @@ -132,9 +130,9 @@ subroutine dumpfile(filename_spec) !----------------------------------------------------------------- ! radiation fields !----------------------------------------------------------------- -#ifdef CESMCOUPLED - call write_restart_field(nu_dump,0,coszen,'ruf8','coszen',1,diag) -#endif + + if (restart_coszen) call write_restart_field(nu_dump,0,coszen,'ruf8','coszen',1,diag) + call write_restart_field(nu_dump,0,scale_factor,'ruf8','scale_factor',1,diag) call write_restart_field(nu_dump,0,swvdr,'ruf8','swvdr',1,diag) @@ -209,9 +207,7 @@ subroutine restartfile (ice_ic) stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4 -#ifdef CESMCOUPLED use ice_flux, only: coszen -#endif use ice_grid, only: tmask, grid_type use ice_state, only: trcr_depend, aice, vice, vsno, trcr, & aice0, aicen, vicen, vsnon, trcrn, aice_init, uvel, vvel, & @@ -310,11 +306,8 @@ subroutine restartfile (ice_ic) if (my_task == master_task) & write(nu_diag,*) 'radiation fields' -#ifdef CESMCOUPLED - call read_restart_field(nu_restart,0,coszen,'ruf8', & -! 'coszen',1,diag, field_loc_center, field_type_scalar) + if (restart_coszen) call read_restart_field(nu_restart,0,coszen,'ruf8', & 'coszen',1,diag) -#endif call read_restart_field(nu_restart,0,scale_factor,'ruf8', & 'scale_factor',1,diag, field_loc_center, field_type_scalar) call read_restart_field(nu_restart,0,swvdr,'ruf8', & diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 index d4decf6f7..8bb09398e 100644 --- a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 @@ -11,7 +11,7 @@ module ice_restart use netcdf use ice_restart_shared, only: & restart_ext, restart_dir, restart_file, pointer_file, & - runid, use_restart_time, lcdf64, lenstr + runid, use_restart_time, lcdf64, lenstr, restart_coszen use ice_fileunits, only: nu_diag, nu_rst_pointer use ice_exit, only: abort_ice use icepack_intfc, only: icepack_query_parameters @@ -84,7 +84,6 @@ subroutine init_restart_read(ice_ic) endif endif ! use namelist values if use_restart_time = F - write(nu_diag,*) 'Restart read at istep=',istep0,time,time_forc endif call broadcast_scalar(istep0,master_task) @@ -227,10 +226,9 @@ subroutine init_restart_write(filename_spec) call define_rest_field(ncid,'uvel',dims) call define_rest_field(ncid,'vvel',dims) + + if (restart_coszen) call define_rest_field(ncid,'coszen',dims) -#ifdef CESMCOUPLED - call define_rest_field(ncid,'coszen',dims) -#endif call define_rest_field(ncid,'scale_factor',dims) call define_rest_field(ncid,'swvdr',dims) call define_rest_field(ncid,'swvdf',dims) diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 index 5bb880dc5..b11dcf0d0 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 @@ -11,7 +11,8 @@ module ice_restart use ice_kinds_mod use ice_restart_shared, only: & restart, restart_ext, restart_dir, restart_file, pointer_file, & - runid, runtype, use_restart_time, restart_format, lcdf64, lenstr + runid, runtype, use_restart_time, restart_format, lcdf64, lenstr, & + restart_coszen use ice_pio use pio use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted @@ -245,10 +246,7 @@ subroutine init_restart_write(filename_spec) call define_rest_field(File,'uvel',dims) call define_rest_field(File,'vvel',dims) - -#ifdef CESMCOUPLED - call define_rest_field(File,'coszen',dims) -#endif + if (restart_coszen) call define_rest_field(File,'coszen',dims) call define_rest_field(File,'scale_factor',dims) call define_rest_field(File,'swvdr',dims) call define_rest_field(File,'swvdf',dims) diff --git a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 index 3dcd8fb2f..917774908 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 @@ -25,40 +25,21 @@ module CICE_InitMod implicit none private - public :: CICE_Initialize, cice_init + public :: cice_init !======================================================================= contains -!======================================================================= - -! Initialize the basic state, grid and all necessary parameters for -! running the CICE model. Return the initial state in routine -! export state. -! Note: This initialization driver is designed for standalone and -! CESM-coupled applications. For other -! applications (e.g., standalone CAM), this driver would be -! replaced by a different driver that calls subroutine cice_init, -! where most of the work is done. - - subroutine CICE_Initialize - - character(len=*), parameter :: subname='(CICE_Initialize)' - !-------------------------------------------------------------------- - ! model initialization - !-------------------------------------------------------------------- - - call cice_init - - end subroutine CICE_Initialize - !======================================================================= ! ! Initialize CICE model. subroutine cice_init + ! Initialize the basic state, grid and all necessary parameters for + ! running the CICE model. + use ice_arrays_column, only: hin_max, c_hi_range, alloc_arrays_column use ice_arrays_column, only: floe_rad_l, floe_rad_c, & floe_binwidth, c_fsd_range @@ -74,8 +55,7 @@ subroutine cice_init use ice_dyn_shared, only: kdyn, init_evp, alloc_dyn_shared use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux - use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & - get_forcing_atmo, get_forcing_ocn, get_wave_spec + use ice_forcing, only: init_forcing_ocn use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & faero_default, faero_optics, alloc_forcing_bgc, fiso_default use ice_grid, only: init_grid1, init_grid2, alloc_grid @@ -87,9 +67,6 @@ subroutine cice_init use ice_restoring, only: ice_HaloRestore_init use ice_timers, only: timer_total, init_ice_timers, ice_timer_start use ice_transport_driver, only: init_transport -#ifdef popcice - use drv_forcing, only: sst_sss -#endif logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers, & tr_iso, tr_fsd, wave_spec @@ -129,10 +106,6 @@ subroutine cice_init endif call init_coupler_flux ! initialize fluxes exchanged with coupler - -#ifdef popcice - call sst_sss ! POP data for CICE initialization -#endif call init_thermo_vertical ! initialize vertical thermodynamics call icepack_init_itd(ncat=ncat, hin_max=hin_max) ! ice thickness distribution @@ -158,7 +131,9 @@ subroutine cice_init call calendar(time) ! determine the initial date + ! TODO: - why is this being called when you are using CMEPS? call init_forcing_ocn(dt) ! initialize sss and sst from data + call init_state ! initialize the ice state call init_transport ! initialize horizontal transport call ice_HaloRestore_init ! restored boundary conditions @@ -182,51 +157,31 @@ subroutine cice_init if (icepack_warnings_aborted()) call abort_ice(trim(subname), & file=__FILE__,line= __LINE__) - if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical - !property tables + if (tr_aero .or. tr_zaero) then + call faero_optics !initialize aerosol optical property tables + end if ! Initialize shortwave components using swdn from previous timestep ! if restarting. These components will be scaled to current forcing ! in prep_radiation. - if (trim(runtype) == 'continue' .or. restart) & - call init_shortwave ! initialize radiative transfer - -! istep = istep + 1 ! update time step counters -! istep1 = istep1 + 1 -! time = time + dt ! determine the time and date -! call calendar(time) ! at the end of the first timestep - !-------------------------------------------------------------------- - ! coupler communication or forcing data initialization - !-------------------------------------------------------------------- - -#ifndef coupled - call init_forcing_atmo ! initialize atmospheric forcing (standalone) + if (trim(runtype) == 'continue' .or. restart) then + call init_shortwave ! initialize radiative transfer + end if -#ifndef CESMCOUPLED - if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice - call get_forcing_atmo ! atmospheric forcing from data - call get_forcing_ocn(dt) ! ocean forcing from data + !-------------------------------------------------------------------- + ! coupler communication or forcing data initialization + !-------------------------------------------------------------------- - ! isotopes - if (tr_iso) call fiso_default ! default values - ! aerosols - ! if (tr_aero) call faero_data ! data file - ! if (tr_zaero) call fzaero_data ! data file (gx1) - if (tr_aero .or. tr_zaero) call faero_default ! default values - if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry -#endif -#endif if (z_tracers) call get_atm_bgc ! biogeochemistry - if (runtype == 'initial' .and. .not. restart) & + if (runtype == 'initial' .and. .not. restart) then call init_shortwave ! initialize radiative transfer using current swdn + end if call init_flux_atm ! initialize atmosphere fluxes sent to coupler call init_flux_ocn ! initialize ocean fluxes sent to coupler -! if (write_ic) call accum_hist(dt) ! write initial conditions - end subroutine cice_init !======================================================================= diff --git a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 index aed00a9a0..486c36dcc 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 @@ -15,9 +15,7 @@ module CICE_RunMod use ice_kinds_mod -#ifdef CESMCOUPLED - use perf_mod, only : t_startf, t_stopf, t_barrierf -#endif + use cice_wrapper_mod, only : t_startf, t_stopf, t_barrierf use ice_fileunits, only: nu_diag use ice_arrays_column, only: oceanmixed_ice use ice_constants, only: c0, c1 @@ -79,48 +77,22 @@ subroutine CICE_Run ! timestep loop !-------------------------------------------------------------------- -! timeLoop: do - -! call ice_step - - istep = istep + 1 ! update time step counters - istep1 = istep1 + 1 - time = time + dt ! determine the time and date - -! call calendar(time) ! at the end of the timestep + istep = istep + 1 ! update time step counters + istep1 = istep1 + 1 + time = time + dt ! determine the time and date - call ice_timer_start(timer_couple) ! atm/ocn coupling - -#ifndef coupled -#ifndef CESMCOUPLED -! for now, wave_spectrum is constant in time -! if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice - call get_forcing_atmo ! atmospheric forcing from data - call get_forcing_ocn(dt) ! ocean forcing from data - - ! isotopes - if (tr_iso) call fiso_default ! default values - ! aerosols - ! if (tr_aero) call faero_data ! data file - ! if (tr_zaero) call fzaero_data ! data file (gx1) - if (tr_aero .or. tr_zaero) call faero_default ! default values - - if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry -#endif -#endif - if (z_tracers) call get_atm_bgc ! biogeochemistry + call ice_timer_start(timer_couple) ! atm/ocn coupling - call init_flux_atm ! Initialize atmosphere fluxes sent to coupler - call init_flux_ocn ! initialize ocean fluxes sent to coupler + if (z_tracers) call get_atm_bgc ! biogeochemistry - call calendar(time) ! at the end of the timestep + call init_flux_atm ! Initialize atmosphere fluxes sent to coupler + call init_flux_ocn ! initialize ocean fluxes sent to coupler - call ice_timer_stop(timer_couple) ! atm/ocn coupling + call calendar(time) ! at the end of the timestep - call ice_step + call ice_timer_stop(timer_couple) ! atm/ocn coupling -! if (stop_now >= 1) exit timeLoop -! enddo timeLoop + call ice_step !-------------------------------------------------------------------- ! end of timestep loop diff --git a/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 new file mode 100644 index 000000000..0da2ed491 --- /dev/null +++ b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 @@ -0,0 +1,35 @@ +module cice_wrapper_mod + +#ifdef CESMCOUPLED + use perf_mod , only : t_startf, t_stopf, t_barrierf + use shr_file_mod , only : shr_file_getlogunit, shr_file_setlogunit + +#else +contains + + ! These are just stub routines put in place to remove + + subroutine shr_file_setLogUnit(nunit) + integer, intent(in) :: nunit + ! do nothing for this stub - its just here to replace + ! having cppdefs in the main program + end subroutine shr_file_setLogUnit + subroutine shr_file_getLogUnit(nunit) + integer, intent(in) :: nunit + ! do nothing for this stub - its just here to replace + ! having cppdefs in the main program + end subroutine shr_file_getLogUnit + + subroutine t_startf(string) + character(len=*) :: string + end subroutine t_startf + subroutine t_stopf(string) + character(len=*) :: string + end subroutine t_stopf + subroutine t_barrierf(string, comm) + character(len=*) :: string + integer:: comm + end subroutine t_barrierf +#endif + +end module cice_wrapper_mod diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index 81fb1a308..aff4b5099 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -15,12 +15,6 @@ module ice_comp_nuopc use NUOPC_Model , only : model_label_SetRunClock => label_SetRunClock use NUOPC_Model , only : model_label_Finalize => label_Finalize use NUOPC_Model , only : NUOPC_ModelGet, SetVM -#ifdef CESMCOUPLED - use shr_file_mod , only : shr_file_getlogunit, shr_file_setlogunit - use shr_orb_mod , only : shr_orb_decl, shr_orb_params, SHR_ORB_UNDEF_REAL, SHR_ORB_UNDEF_INT - use shr_const_mod - use shr_cal_mod , only : shr_cal_noleap, shr_cal_gregorian -#endif use ice_constants , only : ice_init_constants use ice_shr_methods , only : chkerr, state_setscalar, state_getscalar, state_diagnose, alarmInit use ice_shr_methods , only : set_component_logging, get_component_instance @@ -33,36 +27,30 @@ module ice_comp_nuopc use ice_blocks , only : nblocks_tot, get_block_parameter use ice_distribution , only : ice_distributiongetblockloc use ice_grid , only : tlon, tlat, hm, tarea, ULON, ULAT - use ice_communicate , only : my_task, master_task, mpi_comm_ice + use ice_communicate , only : init_communicate, my_task, master_task, mpi_comm_ice use ice_calendar , only : force_restart_now, write_ic use ice_calendar , only : idate, mday, time, month, daycal, time2sec, year_init use ice_calendar , only : sec, dt, calendar, calendar_type, nextsw_cday, istep use ice_kinds_mod , only : dbl_kind, int_kind, char_len, char_len_long use ice_scam , only : scmlat, scmlon, single_column - use ice_fileunits , only : nu_diag, nu_diag_set, inst_index, inst_name, inst_suffix, release_all_fileunits, flush_fileunit + use ice_fileunits , only : nu_diag, nu_diag_set, inst_index, inst_name + use ice_fileunits , only : inst_suffix, release_all_fileunits, flush_fileunit use ice_restart_shared , only : runid, runtype, restart_dir, restart_file use ice_history , only : accum_hist -#if (defined NEWCODE) - use ice_history_shared , only : model_doi_url ! TODO: add this functionality -#endif -#ifdef CESMCOUPLED - use ice_prescribed_mod , only : ice_prescribed_init -#endif -#if (defined NEWCODE) - use ice_atmo , only : flux_convergence_tolerance, flux_convergence_max_iteration - use ice_atmo , only : use_coldair_outbreak_mod -#endif - use CICE_InitMod , only : CICE_Init - use CICE_RunMod , only : CICE_Run + use CICE_InitMod , only : cice_init + use CICE_RunMod , only : cice_run use ice_exit , only : abort_ice use icepack_intfc , only : icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc , only : icepack_init_orbit, icepack_init_parameters, icepack_query_orbit use icepack_intfc , only : icepack_query_tracer_flags, icepack_query_parameters + use cice_wrapper_mod , only : t_startf, t_stopf, t_barrierf + use cice_wrapper_mod , only : shr_file_getlogunit, shr_file_setlogunit #ifdef CESMCOUPLED - use perf_mod , only : t_startf, t_stopf, t_barrierf + use shr_const_mod + use shr_orb_mod , only : shr_orb_decl, shr_orb_params, SHR_ORB_UNDEF_REAL, SHR_ORB_UNDEF_INT #endif use ice_timers - use ice_communicate, only: init_communicate + use ice_prescribed_mod , only : ice_prescribed_init implicit none private @@ -76,32 +64,33 @@ module ice_comp_nuopc private :: ModelAdvance private :: ModelSetRunClock private :: ModelFinalize -#ifdef CESMCOUPLED - private :: ice_orbital_init ! only for cesm -#endif + private :: ice_orbital_init ! only valid for cesm character(len=char_len_long) :: flds_scalar_name = '' - integer :: flds_scalar_num = 0 - integer :: flds_scalar_index_nx = 0 - integer :: flds_scalar_index_ny = 0 - integer :: flds_scalar_index_nextsw_cday = 0 + integer :: flds_scalar_num = 0 + integer :: flds_scalar_index_nx = 0 + integer :: flds_scalar_index_ny = 0 + integer :: flds_scalar_index_nextsw_cday = 0 character(len=char_len_long) :: orb_mode ! attribute - orbital mode - integer :: orb_iyear ! attribute - orbital year - integer :: orb_iyear_align ! attribute - associated with model year - real(dbl_kind) :: orb_obliq ! attribute - obliquity in degrees - real(dbl_kind) :: orb_mvelp ! attribute - moving vernal equinox longitude - real(dbl_kind) :: orb_eccen ! attribute and update- orbital eccentricity + integer :: orb_iyear ! attribute - orbital year + integer :: orb_iyear_align ! attribute - associated with model year + real(dbl_kind) :: orb_obliq ! attribute - obliquity in degrees + real(dbl_kind) :: orb_mvelp ! attribute - moving vernal equinox longitude + real(dbl_kind) :: orb_eccen ! attribute and update- orbital eccentricity character(len=*) , parameter :: orb_fixed_year = 'fixed_year' character(len=*) , parameter :: orb_variable_year = 'variable_year' character(len=*) , parameter :: orb_fixed_parameters = 'fixed_parameters' - integer , parameter :: dbug = 10 - integer , parameter :: debug_import = 0 ! internal debug level - integer , parameter :: debug_export = 0 ! internal debug level - character(*), parameter :: modName = "(ice_comp_nuopc)" - character(*), parameter :: u_FILE_u = & + character(len=*),parameter :: shr_cal_noleap = 'NO_LEAP' + character(len=*),parameter :: shr_cal_gregorian = 'GREGORIAN' + + integer , parameter :: dbug = 10 + integer , parameter :: debug_import = 0 ! internal debug level + integer , parameter :: debug_export = 0 ! internal debug level + character(*), parameter :: modName = "(ice_comp_nuopc)" + character(*), parameter :: u_FILE_u = & __FILE__ !======================================================================= @@ -191,7 +180,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) ! Local variables character(len=char_len_long) :: cvalue character(len=char_len_long) :: logmsg - logical :: isPresent, isSet + logical :: isPresent, isSet character(len=*), parameter :: subname=trim(modName)//':(InitializeAdvertise) ' !-------------------------------- @@ -245,8 +234,6 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) write(logmsg,*) flds_scalar_index_nextsw_cday call ESMF_LogWrite(trim(subname)//' : flds_scalar_index_nextsw_cday = '//trim(logmsg), ESMF_LOGMSG_INFO) if (ChkErr(rc,__LINE__,u_FILE_u)) return - else - call abort_ice(subname//'Need to set attribute ScalarFieldIdxNextSwCday') endif call ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) @@ -266,70 +253,70 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) integer, intent(out) :: rc ! Local variables - type(ESMF_DistGrid) :: distGrid - type(ESMF_Mesh) :: Emesh, EmeshTemp - integer :: spatialDim - integer :: numOwnedElements - real(dbl_kind), pointer :: ownedElemCoords(:) - real(dbl_kind), pointer :: lat(:), latMesh(:) - real(dbl_kind), pointer :: lon(:), lonMesh(:) - integer , allocatable :: gindex_ice(:) - integer , allocatable :: gindex_elim(:) - integer , allocatable :: gindex(:) - integer :: globalID - character(ESMF_MAXSTR) :: cvalue - real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp - character(len=char_len) :: tfrz_option - character(ESMF_MAXSTR) :: convCIM, purpComp - type(ESMF_VM) :: vm - type(ESMF_Time) :: currTime ! Current time - type(ESMF_Time) :: startTime ! Start time - type(ESMF_Time) :: stopTime ! Stop time - type(ESMF_Time) :: refTime ! Ref time - type(ESMF_TimeInterval) :: timeStep ! Model timestep - type(ESMF_Calendar) :: esmf_calendar ! esmf calendar - type(ESMF_CalKind_Flag) :: esmf_caltype ! esmf calendar type - integer :: start_ymd ! Start date (YYYYMMDD) - integer :: start_tod ! start time of day (s) - integer :: curr_ymd ! Current date (YYYYMMDD) - integer :: curr_tod ! Current time of day (s) - integer :: stop_ymd ! stop date (YYYYMMDD) - integer :: stop_tod ! stop time of day (sec) - integer :: ref_ymd ! Reference date (YYYYMMDD) - integer :: ref_tod ! reference time of day (s) - integer :: yy,mm,dd ! Temporaries for time query - integer :: iyear ! yyyy - integer :: dtime ! time step - integer :: lmpicom - integer :: shrlogunit ! original log unit - character(len=char_len) :: starttype ! infodata start type - integer :: lsize ! local size of coupling array - character(len=512) :: diro - character(len=512) :: logfile - logical :: isPresent - integer :: localPet - integer :: n,c,g,i,j,m ! indices - integer :: iblk, jblk ! indices - integer :: ig, jg ! indices - integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain - type(block) :: this_block ! block information for current block - integer :: compid ! component id + real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp + type(ESMF_DistGrid) :: distGrid + type(ESMF_Mesh) :: Emesh, EmeshTemp + integer :: spatialDim + integer :: numOwnedElements + real(dbl_kind), pointer :: ownedElemCoords(:) + real(dbl_kind), pointer :: lat(:), latMesh(:) + real(dbl_kind), pointer :: lon(:), lonMesh(:) + integer , allocatable :: gindex_ice(:) + integer , allocatable :: gindex_elim(:) + integer , allocatable :: gindex(:) + integer :: globalID + character(ESMF_MAXSTR) :: cvalue + character(len=char_len) :: tfrz_option + character(ESMF_MAXSTR) :: convCIM, purpComp + type(ESMF_VM) :: vm + type(ESMF_Time) :: currTime ! Current time + type(ESMF_Time) :: startTime ! Start time + type(ESMF_Time) :: stopTime ! Stop time + type(ESMF_Time) :: refTime ! Ref time + type(ESMF_TimeInterval) :: timeStep ! Model timestep + type(ESMF_Calendar) :: esmf_calendar ! esmf calendar + type(ESMF_CalKind_Flag) :: esmf_caltype ! esmf calendar type + integer :: start_ymd ! Start date (YYYYMMDD) + integer :: start_tod ! start time of day (s) + integer :: curr_ymd ! Current date (YYYYMMDD) + integer :: curr_tod ! Current time of day (s) + integer :: stop_ymd ! stop date (YYYYMMDD) + integer :: stop_tod ! stop time of day (sec) + integer :: ref_ymd ! Reference date (YYYYMMDD) + integer :: ref_tod ! reference time of day (s) + integer :: yy,mm,dd ! Temporaries for time query + integer :: iyear ! yyyy + integer :: dtime ! time step + integer :: lmpicom + integer :: shrlogunit ! original log unit + character(len=char_len) :: starttype ! infodata start type + integer :: lsize ! local size of coupling array + logical :: isPresent + logical :: isSet + integer :: localPet + integer :: n,c,g,i,j,m ! indices + integer :: iblk, jblk ! indices + integer :: ig, jg ! indices + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + type(block) :: this_block ! block information for current block + integer :: compid ! component id character(len=char_len_long) :: tempc1,tempc2 - real(dbl_kind) :: diff_lon - integer :: npes - integer :: num_elim_global - integer :: num_elim_local - integer :: num_elim - integer :: num_ice - integer :: num_elim_gcells ! local number of eliminated gridcells - integer :: num_elim_blocks ! local number of eliminated blocks - integer :: num_total_blocks - integer :: my_elim_start, my_elim_end - real(dbl_kind) :: rad_to_deg - integer(int_kind) :: ktherm - character(*), parameter :: F00 = "('(ice_comp_nuopc) ',2a,1x,d21.14)" - character(len=*), parameter :: subname=trim(modName)//':(InitializeRealize) ' - logical :: mastertask + real(dbl_kind) :: diff_lon + integer :: npes + integer :: num_elim_global + integer :: num_elim_local + integer :: num_elim + integer :: num_ice + integer :: num_elim_gcells ! local number of eliminated gridcells + integer :: num_elim_blocks ! local number of eliminated blocks + integer :: num_total_blocks + integer :: my_elim_start, my_elim_end + real(dbl_kind) :: rad_to_deg + integer(int_kind) :: ktherm + logical :: mastertask + character(len=char_len_long) :: diag_filename = 'unset' + character(len=*), parameter :: F00 = "('(ice_comp_nuopc) ',2a,1x,d21.14)" + character(len=*), parameter :: subname=trim(modName)//':(InitializeRealize) ' !-------------------------------- rc = ESMF_SUCCESS @@ -345,6 +332,14 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call ESMF_VMGet(vm, mpiCommunicator=lmpicom, localPet=localPet, PetCount=npes, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return + !---------------------------------------------------------------------------- + ! Initialize cice communicators + !---------------------------------------------------------------------------- + + call init_communicate(lmpicom) ! initial setup for message passing + mastertask = .false. + if (my_task == master_task) mastertask = .true. + !---------------------------------------------------------------------------- ! determine instance information !---------------------------------------------------------------------------- @@ -358,9 +353,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) ! start cice timers !---------------------------------------------------------------------------- -#ifdef CESMCOUPLED call t_startf ('cice_init_total') -#endif !---------------------------------------------------------------------------- ! Initialize constants @@ -395,6 +388,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) pi_in = SHR_CONST_PI, & snowpatch_in = 0.005_dbl_kind, & dragio_in = 0.00962_dbl_kind) + call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -407,44 +401,9 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) ! Get orbital values ! Note that these values are obtained in a call to init_orbit in ice_shortwave.F90 ! if CESMCOUPLED is not defined -#ifdef CESMCOUPLED - mastertask = .false. - if (my_task == master_task) mastertask = .true. + call ice_orbital_init(gcomp, clock, nu_diag, mastertask, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return -#else - ! Start with icepack values then update with values defined in configure file if they exist - call icepack_query_orbit(eccen_out=eccen, mvelpp_out=mvelpp, lambm0_out=lambm0, obliqr_out=obliqr) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - call NUOPC_CompAttributeGet(gcomp, name='orb_eccen', value=cvalue, isPresent=isPresent, rc=rc) - !if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) eccen - end if - call NUOPC_CompAttributeGet(gcomp, name='orb_obliqr', value=cvalue, isPresent=isPresent, rc=rc) - !if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) obliqr - end if - call NUOPC_CompAttributeGet(gcomp, name='orb_lambm0', value=cvalue, isPresent=isPresent, rc=rc) - !if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) lambm0 - end if - call NUOPC_CompAttributeGet(gcomp, name='orb_mvelpp', value=cvalue, isPresent=isPresent, rc=rc) - !if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) mvelpp - end if - - call icepack_init_orbit(eccen_in=eccen, mvelpp_in=mvelpp, lambm0_in=lambm0, obliqr_in=obliqr) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) -#endif ! Determine runtype and possibly nextsw_cday call NUOPC_CompAttributeGet(gcomp, name='start_type', value=cvalue, isPresent=isPresent, rc=rc) @@ -461,12 +420,8 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call abort_ice( subname//' ERROR: unknown starttype' ) end if - ! Note that in the mct version the atm was initialized first so that nextsw_cday could be passed to the other - ! components - this assumed that cam or datm was ALWAYS initialized first. - ! In the nuopc version it will be easier to assume that on startup - nextsw_cday is just the current time - + ! We assume here that on startup - nextsw_cday is just the current time ! TOOD (mvertens, 2019-03-21): need to get the perpetual run working - if (trim(runtype) /= 'initial') then ! Set nextsw_cday to -1 (this will skip an orbital calculation on initialization nextsw_cday = -1.0_dbl_kind @@ -477,82 +432,36 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if else - ! This would be the NEMS branch - ! Note that in NEMS - nextsw_cday is not needed in ice_orbital.F90 and what is needed is - ! simply a CPP variable declaratino of NEMSCOUPLED - runtype = 'initial' ! determined from the namelist in ice_init if CESMCOUPLED is not defined end if - single_column = .false. -#ifdef CESMCOUPLED - ! Determine single column info - call NUOPC_CompAttributeGet(gcomp, name='single_column', value=cvalue, isPresent=isPresent, rc=rc) + ! Determine if single column + call NUOPC_CompAttributeGet(gcomp, name='single_column', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then + if (isPresent .and. isSet) then read(cvalue,*) single_column - end if -#endif - if (single_column) then - ! Must have these attributes present - call NUOPC_CompAttributeGet(gcomp, name='scmlon', value=cvalue, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) scmlon - call NUOPC_CompAttributeGet(gcomp, name='scmlat', value=cvalue, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) scmlat + if (single_column) then + call NUOPC_CompAttributeGet(gcomp, name='scmlon', value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scmlon + call NUOPC_CompAttributeGet(gcomp, name='scmlat', value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scmlat + end if + else + single_column = .false. end if ! Determine runid - call NUOPC_CompAttributeGet(gcomp, name='case_name', value=cvalue, isPresent=isPresent, rc=rc) - if (isPresent) then + call NUOPC_CompAttributeGet(gcomp, name='case_name', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (isPresent .and. isSet) then read(cvalue,*) runid else - runid = 'unknown' ! read in from the namelist in ice_init.F90 if CESMCOUPLED is not defined + ! read in from the namelist in ice_init.F90 if this is not an attribute passed from the driver + runid = 'unknown' end if -#ifdef CESMCOUPLED - ! Determine tfreeze_option, flux convertence before call to cice_init - ! tcx, what is going on here? if not present, set it? if present, ignore it? - call NUOPC_CompAttributeGet(gcomp, name="tfreeze_option", value=tfrz_option, isPresent=isPresent, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (.not. isPresent) then - tfrz_option = 'linear_salt' ! TODO: is this right? This must be the same as mom is using for the calculation. - end if - call icepack_init_parameters(tfrz_option_in=tfrz_option) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) -#endif - -#if (defined NEWCODE) - call NUOPC_CompAttributeGet(gcomp, name="flux_convergence", value=cvalue, isPresent=isPresent, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) flux_convergence_tolerance - else - flux_convergence_tolerance = 0._dbl_kind - end if - - call NUOPC_CompAttributeGet(gcomp, name="flux_max_iteration", value=cvalue, isPresent=isPresent, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) flux_convergence_max_iteration - else - flux_convergence_max_iteration = 5 - end if - - call NUOPC_CompAttributeGet(gcomp, name="coldair_outbreak_mod", value=cvalue, isPresent=isPresent, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) use_coldair_outbreak_mod - else - use_coldair_outbreak_mod = .false. - end if -#endif - ! Get clock information before call to cice_init - call ESMF_ClockGet( clock, & currTime=currTime, startTime=startTime, stopTime=stopTime, refTime=RefTime, & timeStep=timeStep, rc=rc) @@ -580,8 +489,6 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call ESMF_TimeGet( currTime, calkindflag=esmf_caltype, rc=rc ) if (ChkErr(rc,__LINE__,u_FILE_u)) return - -#ifdef CESMCOUPLED if (esmf_caltype == ESMF_CALKIND_NOLEAP) then calendar_type = shr_cal_noleap else if (esmf_caltype == ESMF_CALKIND_GREGORIAN) then @@ -589,25 +496,32 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) else call abort_ice( subname//'ERROR:: bad calendar for ESMF' ) end if -#endif !---------------------------------------------------------------------------- ! Set cice logging !---------------------------------------------------------------------------- + ! Note - this must be done AFTER the communicators are set ! Note that sets the nu_diag module variable in ice_fileunits ! Set the nu_diag_set flag so it's not reset later - call init_communicate(lmpicom) ! initial setup for message passing + call shr_file_setLogUnit (shrlogunit) - mastertask = .false. - if (my_task == master_task) mastertask = .true. - call set_component_logging(gcomp, mastertask, nu_diag, shrlogunit, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - nu_diag_set = .true. + call NUOPC_CompAttributeGet(gcomp, name="diro", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + diag_filename = trim(cvalue) + end if + call NUOPC_CompAttributeGet(gcomp, name="logfile", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + diag_filename = trim(diag_filename) // '/' // trim(cvalue) + end if -#ifdef CESMCOUPLED - call shr_file_setLogUnit (shrlogunit) -#endif + if (trim(diag_filename) /= 'unset') then + call set_component_logging(gcomp, mastertask, nu_diag, shrlogunit, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + nu_diag_set = .true. + end if !---------------------------------------------------------------------------- ! Initialize cice @@ -616,13 +530,9 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) ! Note that cice_init also sets time manager info as well as mpi communicator info, ! including master_task and my_task -#ifdef CESMCOUPLED call t_startf ('cice_init') -#endif call cice_init -#ifdef CESMCOUPLED call t_stopf ('cice_init') -#endif !---------------------------------------------------------------------------- ! reset shr logging to my log file @@ -634,7 +544,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) file=__FILE__, line=__LINE__) ! Now write output to nu_diag - this must happen AFTER call to cice_init - if (localPet == 0) then + if (mastertask) then write(nu_diag,F00) trim(subname),' cice init nextsw_cday = ',nextsw_cday write(nu_diag,*) trim(subname),' tfrz_option = ',trim(tfrz_option) if (ktherm == 2 .and. trim(tfrz_option) /= 'mushy') then @@ -643,10 +553,6 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) write(nu_diag,*) trim(subname),' inst_name = ',trim(inst_name) write(nu_diag,*) trim(subname),' inst_index = ',inst_index write(nu_diag,*) trim(subname),' inst_suffix = ',trim(inst_suffix) -#if (defined NEWCODE) - write(nu_diag,*) trim(subname),' flux_convergence = ', flux_convergence_tolerance - write(nu_diag,*) trim(subname),' flux_convergence_max_iteration = ', flux_convergence_max_iteration -#endif endif !--------------------------------------------------------------------------- @@ -931,18 +837,18 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) flds_scalar_name=flds_scalar_name, flds_scalar_num=flds_scalar_num, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return -#ifdef CESMCOUPLED !----------------------------------------------------------------- ! Prescribed ice initialization - first get compid !----------------------------------------------------------------- - call NUOPC_CompAttributeGet(gcomp, name='MCTID', value=cvalue, rc=rc) + call NUOPC_CompAttributeGet(gcomp, name='MCTID', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) compid ! convert from string to integer - - ! Having this if-defd means that MCT does not need to be build in a NEMS configuration + if (isPresent .and. isSet) then + read(cvalue,*) compid ! convert from string to integer + else + compid = 0 + end if call ice_prescribed_init(lmpicom, compid, gindex_ice) -#endif !----------------------------------------------------------------- ! Create cice export state @@ -959,7 +865,6 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! TODO (mvertens, 2018-12-21): fill in iceberg_prognostic as .false. - if (debug_export > 0 .and. my_task==master_task) then call State_fldDebug(exportState, flds_scalar_name, 'cice_export:', & idate, sec, nu_diag, rc=rc) @@ -974,25 +879,9 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return endif -#ifdef USE_ESMF_METADATA - convCIM = "CIM" - purpComp = "Model Component Simulation Description" - call ESMF_AttributeAdd(comp, convention=convCIM, purpose=purpComp, rc=rc) - call ESMF_AttributeSet(comp, "ShortName", "CICE", convention=convCIM, purpose=purpComp, rc=rc) - call ESMF_AttributeSet(comp, "LongName", "CICE Model", convention=convCIM, purpose=purpComp, rc=rc) - call ESMF_AttributeSet(comp, "Description", "CICE5", convention=convCIM, purpose=purpComp, rc=rc) - call ESMF_AttributeSet(comp, "ReleaseDate", "TBD", convention=convCIM, purpose=purpComp, rc=rc) - call ESMF_AttributeSet(comp, "ModelType", "Sea Ice", convention=convCIM, purpose=purpComp, rc=rc) - call ESMF_AttributeSet(comp, "Name", "David Bailey", convention=convCIM, purpose=purpComp, rc=rc) - call ESMF_AttributeSet(comp, "EmailAddress", "dbailey@ucar.edu", convention=convCIM, purpose=purpComp, rc=rc) - call ESMF_AttributeSet(comp, "ResponsiblePartyRole", "contact", convention=convCIM, purpose=purpComp, rc=rc) -#endif - if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) -#ifdef CESMCOUPLED call t_stopf ('cice_init_total') -#endif deallocate(gindex_ice) deallocate(gindex) @@ -1036,7 +925,7 @@ subroutine ModelAdvance(gcomp, rc) integer :: tod_sync ! Sync current time of day (sec) character(char_len_long) :: restart_date character(char_len_long) :: restart_filename - logical :: isPresent + logical :: isPresent, isSet character(*) , parameter :: F00 = "('(ice_comp_nuopc) ',2a,i8,d21.14)" character(len=*),parameter :: subname=trim(modName)//':(ModelAdvance) ' !-------------------------------- @@ -1049,19 +938,15 @@ subroutine ModelAdvance(gcomp, rc) !-------------------------------- call ice_timer_start(timer_total) ! time entire run -#ifdef CESMCOUPLED call t_barrierf('cice_run_total_BARRIER',mpi_comm_ice) call t_startf ('cice_run_total') -#endif !-------------------------------- ! Reset shr logging to my log file !-------------------------------- -#ifdef CESMCOUPLED call shr_file_getLogUnit (shrlogunit) call shr_file_setLogUnit (nu_diag) -#endif !-------------------------------- ! Query the Component for its clock, importState and exportState @@ -1074,10 +959,18 @@ subroutine ModelAdvance(gcomp, rc) ! Determine time of next atmospheric shortwave calculation !-------------------------------- - call State_GetScalar(importState, flds_scalar_index_nextsw_cday, nextsw_cday, & - flds_scalar_name, flds_scalar_num, rc) + call NUOPC_CompAttributeGet(gcomp, name="ScalarFieldIdxNextSwCday", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - + if (isPresent .and. isSet) then + call State_GetScalar(importState, flds_scalar_index_nextsw_cday, nextsw_cday, & + flds_scalar_name, flds_scalar_num, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call ESMF_ClockGetNextTime(clock, nextTime, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_TimeGet(nextTime, dayOfYear_r8=nextsw_cday, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if if (my_task == master_task) then write(nu_diag,F00) trim(subname),' cice istep, nextsw_cday = ',istep, nextsw_cday end if @@ -1085,44 +978,8 @@ subroutine ModelAdvance(gcomp, rc) !-------------------------------- ! Obtain orbital values !-------------------------------- -#ifdef CESMCOUPLED call ice_orbital_init(gcomp, clock, nu_diag, my_task==master_task, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return -#else - ! Start with icepack values then update with values defined in configure file if they exist - ! tcx, This should be identical with initialization, why do it again? Get rid of it - call icepack_query_orbit(eccen_out=eccen, mvelpp_out=mvelpp, lambm0_out=lambm0, obliqr_out=obliqr) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - call NUOPC_CompAttributeGet(gcomp, name='orb_eccen', value=cvalue, isPresent=isPresent, rc=rc) - !if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) eccen - end if - call NUOPC_CompAttributeGet(gcomp, name='orb_obliqr', value=cvalue, isPresent=isPresent, rc=rc) - !if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) obliqr - end if - call NUOPC_CompAttributeGet(gcomp, name='orb_lambm0', value=cvalue, isPresent=isPresent, rc=rc) - !if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) lambm0 - end if - call NUOPC_CompAttributeGet(gcomp, name='orb_mvelpp', value=cvalue, isPresent=isPresent, rc=rc) - !if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) mvelpp - end if - - call icepack_init_orbit(eccen_in=eccen, mvelpp_in=mvelpp, & - lambm0_in=lambm0, obliqr_in=obliqr) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) -#endif !-------------------------------- ! check that cice internal time is in sync with master clock before timestep update @@ -1181,19 +1038,11 @@ subroutine ModelAdvance(gcomp, rc) ! Unpack import state !-------------------------------- -#ifdef CESMCOUPLED call t_barrierf('cice_run_import_BARRIER',mpi_comm_ice) call t_startf ('cice_run_import') - call ice_timer_start(timer_cplrecv) -#endif - call ice_import(importState, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - -#ifdef CESMCOUPLED - call ice_timer_stop(timer_cplrecv) call t_stopf ('cice_run_import') -#endif ! write Debug output if (debug_import > 0 .and. my_task==master_task) then @@ -1206,29 +1055,17 @@ subroutine ModelAdvance(gcomp, rc) ! Advance cice and timestep update !-------------------------------- -!tcraig if (force_restart_now) then -! call CICE_Run(restart_filename=restart_filename) -! else - call CICE_Run() -! end if + call CICE_Run() !-------------------------------- ! Create export state !-------------------------------- -#ifdef CESMCOUPLED call t_barrierf('cice_run_export_BARRIER',mpi_comm_ice) call t_startf ('cice_run_export') - call ice_timer_start(timer_cplsend) -#endif - call ice_export(exportState, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - -#ifdef CESMCOUPLED - call ice_timer_stop(timer_cplsend) call t_stopf ('cice_run_export') -#endif if (debug_export > 0 .and. my_task==master_task) then call State_fldDebug(exportState, flds_scalar_name, 'cice_export:', & @@ -1236,10 +1073,8 @@ subroutine ModelAdvance(gcomp, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if -#ifdef CESMCOUPLED ! reset shr logging to my original values call shr_file_setLogUnit (shrlogunit) -#endif !-------------------------------- ! stop timers and print timer info @@ -1263,9 +1098,7 @@ subroutine ModelAdvance(gcomp, rc) stop_now = .false. endif -#ifdef CESMCOUPLED call t_stopf ('cice_run_total') -#endif ! Need to stop this at the end of every run phase in a coupled run. call ice_timer_stop(timer_total) @@ -1339,7 +1172,7 @@ subroutine ModelSetRunClock(gcomp, rc) call ESMF_GridCompGet(gcomp, name=name, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call ESMF_LogWrite(subname//'setting alarms for' // trim(name), ESMF_LOGMSG_INFO) + call ESMF_LogWrite(subname//'setting alarms for ' // trim(name), ESMF_LOGMSG_INFO) !---------------- ! Restart alarm @@ -1436,7 +1269,6 @@ end subroutine ModelFinalize !=============================================================================== -#ifdef CESMCOUPLED subroutine ice_orbital_init(gcomp, clock, logunit, mastertask, rc) !---------------------------------------------------------- @@ -1451,19 +1283,22 @@ subroutine ice_orbital_init(gcomp, clock, logunit, mastertask, rc) integer , intent(out) :: rc ! output error ! local variables - real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp + real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp character(len=char_len_long) :: msgstr ! temporary character(len=char_len_long) :: cvalue ! temporary - type(ESMF_Time) :: CurrTime ! current time - integer :: year ! model year at current time - integer :: orb_year ! orbital year for current orbital computation - logical :: lprint - logical :: first_time = .true. + type(ESMF_Time) :: CurrTime ! current time + integer :: year ! model year at current time + integer :: orb_year ! orbital year for current orbital computation + logical :: lprint + logical :: first_time = .true. character(len=*) , parameter :: subname = "(cice_orbital_init)" !------------------------------------------------------------------------------- rc = ESMF_SUCCESS +#ifndef CESMCOUPLED + return +#else if (first_time) then ! Determine orbital attributes from input @@ -1570,23 +1405,18 @@ subroutine ice_orbital_init(gcomp, clock, logunit, mastertask, rc) file=__FILE__, line=__LINE__) first_time = .false. - - end subroutine ice_orbital_init #endif - !=============================================================================== + end subroutine ice_orbital_init + !=============================================================================== subroutine ice_cal_ymd2date(year, month, day, date) - implicit none - - ! !INPUT/OUTPUT PARAMETERS: - + ! input/output parameters: integer,intent(in ) :: year,month,day ! calendar year,month,day integer,intent(out) :: date ! coded (yyyymmdd) calendar date !--- local --- character(*),parameter :: subName = "(ice_cal_ymd2date)" - !------------------------------------------------------------------------------- ! NOTE: ! this calendar has a year zero (but no day or month zero) @@ -1599,4 +1429,5 @@ end subroutine ice_cal_ymd2date !=============================================================================== + end module ice_comp_nuopc diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index 765bc3dd8..11cfcfbab 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -35,9 +35,9 @@ module ice_import_export use icepack_intfc , only : icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc , only : icepack_query_parameters, icepack_query_tracer_flags use icepack_intfc , only : icepack_liquidus_temperature + use cice_wrapper_mod , only : t_startf, t_stopf, t_barrierf #ifdef CESMCOUPLED use shr_frz_mod , only : shr_frz_freezetemp - use perf_mod , only : t_startf, t_stopf, t_barrierf #endif implicit none @@ -105,30 +105,35 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam integer , intent(out) :: rc ! local variables - integer :: n + integer :: n character(char_len) :: stdname character(char_len) :: cvalue - logical :: flds_wiso ! use case - logical :: flds_i2o_per_cat ! .true. => select per ice thickness category + logical :: flds_wiso ! use case + logical :: flds_i2o_per_cat ! .true. => select per ice thickness category + logical :: isPresent, isSet character(len=*), parameter :: subname='(ice_import_export:ice_advertise_fields)' !------------------------------------------------------------------------------- rc = ESMF_SUCCESS if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + ! Determine if the following attributes are sent by the driver and if so read them in flds_wiso = .false. - flds_i2o_per_cat = .false. -#ifdef CESMCOUPLED - call NUOPC_CompAttributeGet(gcomp, name='flds_wiso', value=cvalue, rc=rc) + call NUOPC_CompAttributeGet(gcomp, name='flds_wiso', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) flds_wiso - call ESMF_LogWrite('flds_wiso = '// trim(cvalue), ESMF_LOGMSG_INFO) + if (isPresent .and. isSet) then + read(cvalue,*) flds_wiso + call ESMF_LogWrite('flds_wiso = '// trim(cvalue), ESMF_LOGMSG_INFO) + end if + #if (defined NEWCODE) - call NUOPC_CompAttributeGet(gcomp, name='flds_i2o_per_cat', value=cvalue, rc=rc) + flds_i2o_per_cat = .false. + call NUOPC_CompAttributeGet(gcomp, name='flds_i2o_per_cat', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) send_i2x_per_cat - call ESMF_LogWrite('flds_i2o_per_cat = '// trim(cvalue), ESMF_LOGMSG_INFO) -#endif + if (isPresent .and. isSet) then + read(cvalue,*) send_i2x_per_cat + call ESMF_LogWrite('flds_i2o_per_cat = '// trim(cvalue), ESMF_LOGMSG_INFO) + end if #endif !----------------- @@ -166,16 +171,14 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_ptem' ) !cesm call fldlist_add(fldsToIce_num, fldsToIce, 'air_density_height_lowest' ) !cesm -#ifdef CESMCOUPLED + ! the following are advertised but might not be connected if they are not present + ! in the cmeps esmFldsExchange_xxx_mod.F90 that is model specific ! from atm - black carbon deposition fluxes (3) call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_bcph', ungridded_lbound=1, ungridded_ubound=3) - ! from atm - wet dust deposition frluxes (4 sizes) call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_dstwet', ungridded_lbound=1, ungridded_ubound=4) - ! from - atm dry dust deposition frluxes (4 sizes) call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_dstdry', ungridded_lbound=1, ungridded_ubound=4) -#endif do n = 1,fldsToIce_num call NUOPC_Advertise(importState, standardName=fldsToIce(n)%stdname, & @@ -190,20 +193,23 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsFrIce_num, fldsFrIce, trim(flds_scalar_name)) ! ice states - call fldlist_add(fldsFrIce_num, fldsFrIce, 'ice_mask' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'ice_fraction' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'sea_ice_temperature' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_ice_volume' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_snow_volume' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_tref' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_qref' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_snowh' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_u10' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_vis_dir_albedo' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_ir_dir_albedo' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_vis_dif_albedo' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_ir_dif_albedo' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'ice_mask' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'ice_fraction' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'sea_ice_surface_temperature' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_ice_volume' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_snow_volume' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_tref' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_qref' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_snowh' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_u10' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_vis_dir_albedo' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_ir_dir_albedo' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_vis_dif_albedo' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_ir_dif_albedo' ) + #if (defined NEWCODE) + ! the following are advertised but might not be connected if they are not present + ! in the cmeps esmFldsExchange_xxx_mod.F90 that is model specific if (send_i2x_per_cat) then call fldlist_add(fldsFrIce_num, fldsFrIce, 'ice_fraction_n', & ungridded_lbound=1, ungridded_ubound=ncat) @@ -226,6 +232,7 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_vis_dif_flx' ) call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_ir_dir_flx' ) call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_ir_dif_flx' ) + #if (defined NEWCODE) if (send_i2x_per_cat) then call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_ifrac_n', & @@ -236,16 +243,18 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsFrIce_num , fldsFrIce, 'mean_salt_rate' ) call fldlist_add(fldsFrIce_num , fldsFrIce, 'stress_on_ocn_ice_zonal' ) call fldlist_add(fldsFrIce_num , fldsFrIce, 'stress_on_ocn_ice_merid' ) -#ifdef CESMCOUPLED + + ! the following are advertised but might not be connected if they are not present + ! in the cmeps esmFldsExchange_xxx_mod.F90 that is model specific call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_bcpho' ) call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_bcphi' ) call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_flxdst' ) -#endif + if (flds_wiso) then call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_fresh_water_to_ocean_rate_wiso', & ungridded_lbound=1, ungridded_ubound=3) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_evap_rate_atm_into_ice_wiso', & - ungridded_lbound=1, ungridded_ubound=3) + !call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_evap_rate_atm_into_ice_wiso', & + ! ungridded_lbound=1, ungridded_ubound=3) call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_qref_wiso', & ungridded_lbound=1, ungridded_ubound=3) end if @@ -446,13 +455,9 @@ subroutine ice_import( importState, rc ) ! perform a halo update if (.not.prescribed_ice) then -#ifdef CESMCOUPLED call t_startf ('cice_imp_halo') -#endif call ice_HaloUpdate(aflds, halo_info, field_loc_center, field_type_scalar) -#ifdef CESMCOUPLED call t_stopf ('cice_imp_halo') -#endif endif ! now fill in the ice internal data types @@ -537,13 +542,9 @@ subroutine ice_import( importState, rc ) if (.not.prescribed_ice) then -#ifdef CESMCOUPLED call t_startf ('cice_imp_halo') -#endif call ice_HaloUpdate(aflds, halo_info, field_loc_center, field_type_vector) -#ifdef CESMCOUPLED call t_stopf ('cice_imp_halo') -#endif endif !$OMP PARALLEL DO PRIVATE(iblk,i,j) @@ -656,9 +657,8 @@ subroutine ice_import( importState, rc ) ! interpolate across the pole) ! use ANGLET which is on the T grid ! -#ifdef CESMCOUPLED call t_startf ('cice_imp_ocn') -#endif + !$OMP PARALLEL DO PRIVATE(iblk,i,j,workx,worky) do iblk = 1, nblocks @@ -667,14 +667,16 @@ subroutine ice_import( importState, rc ) ! ocean workx = uocn (i,j,iblk) ! currents, m/s worky = vocn (i,j,iblk) - uocn(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & ! convert to POP grid + + uocn(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & ! rotate to align with model i,j + worky*sin(ANGLET(i,j,iblk)) vocn(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) & - workx*sin(ANGLET(i,j,iblk)) workx = ss_tltx (i,j,iblk) ! sea sfc tilt, m/m worky = ss_tlty (i,j,iblk) - ss_tltx(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & ! convert to POP grid + + ss_tltx(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & ! rotate to align with model i,j + worky*sin(ANGLET(i,j,iblk)) ss_tlty(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) & - workx*sin(ANGLET(i,j,iblk)) @@ -682,47 +684,46 @@ subroutine ice_import( importState, rc ) sst(i,j,iblk) = sst(i,j,iblk) - Tffresh ! sea sfc temp (C) sss(i,j,iblk) = max(sss(i,j,iblk),c0) -#ifndef CESMCOUPLED -!tcx should this be icepack_sea_freezing_temperature? - Tf (i,j,iblk) = icepack_liquidus_temperature(sss(i,j,iblk)) -#endif + enddo enddo + end do #ifdef CESMCOUPLED - ! Use shr_frz_mod for this, overwrite Tf computed above - Tf(:,:,iblk) = shr_frz_freezetemp(sss(:,:,iblk)) + ! Use shr_frz_mod for this + Tf(:,:,iblk) = shr_frz_freezetemp(sss(:,:,iblk)) +#else + !$OMP PARALLEL DO PRIVATE(iblk,i,j,workx,worky) + do iblk = 1, nblocks + do j = 1,ny_block + do i = 1,nx_block + !TODO: tcx should this be icepack_sea_freezing_temperature? + Tf (i,j,iblk) = icepack_liquidus_temperature(sss(i,j,iblk)) + end do + end do + end do + !$OMP END PARALLEL DO #endif - enddo - !$OMP END PARALLEL DO -#ifdef CESMCOUPLED call t_stopf ('cice_imp_ocn') -#endif ! Interpolate ocean dynamics variables from T-cell centers to ! U-cell centers. if (.not.prescribed_ice) then -#ifdef CESMCOUPLED call t_startf ('cice_imp_t2u') -#endif call t2ugrid_vector(uocn) call t2ugrid_vector(vocn) call t2ugrid_vector(ss_tltx) call t2ugrid_vector(ss_tlty) -#ifdef CESMCOUPLED call t_stopf ('cice_imp_t2u') -#endif end if ! Atmosphere variables are needed in T cell centers in ! subroutine stability and are interpolated to the U grid ! later as necessary. -#ifdef CESMCOUPLED call t_startf ('cice_imp_atm') -#endif !$OMP PARALLEL DO PRIVATE(iblk,i,j,workx,worky) do iblk = 1, nblocks do j = 1, ny_block @@ -743,9 +744,7 @@ subroutine ice_import( importState, rc ) enddo enddo !$OMP END PARALLEL DO -#ifdef CESMCOUPLED call t_stopf ('cice_imp_atm') -#endif end subroutine ice_import @@ -908,7 +907,8 @@ subroutine ice_export( exportState, rc ) ! ---- ! surface temperature of ice covered portion (degK) - call state_setexport(exportState, 'sea_ice_temperature', input=Tsrf , lmask=tmask, ifrac=ailohi, rc=rc) + !call state_setexport(exportState, 'sea_ice_temperature', input=Tsrf , lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'sea_ice_surface_temperature', input=Tsrf , lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! albedo vis dir diff --git a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 index dd56ac441..78ea39b4e 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 @@ -1,5 +1,12 @@ module ice_prescribed_mod + ! !DESCRIPTION: + ! The prescribed ice model reads in ice concentration data from a netCDF + ! file. Ice thickness, temperature, the ice temperature profile are + ! prescribed. Air/ice fluxes are computed to get surface temperature, + ! Ice/ocean fluxes are set to zero, and ice dynamics are not calculated. + ! Regridding and data cycling capabilities are included. + #ifndef CESMCOUPLED use ice_kinds_mod @@ -7,19 +14,21 @@ module ice_prescribed_mod implicit none private ! except + public :: ice_prescribed_init ! initialize input data stream logical(kind=log_kind), parameter, public :: prescribed_ice = .false. ! true if prescribed ice -#else +contains + ! This is a stub routine for now + subroutine ice_prescribed_init(mpicom, compid, gindex) + integer(kind=int_kind), intent(in) :: mpicom + integer(kind=int_kind), intent(in) :: compid + integer(kind=int_kind), intent(in) :: gindex(:) + ! do nothing + end subroutine ice_prescribed_init - ! !DESCRIPTION: - ! The prescribed ice model reads in ice concentration data from a netCDF - ! file. Ice thickness, temperature, the ice temperature profile are - ! prescribed. Air/ice fluxes are computed to get surface temperature, - ! Ice/ocean fluxes are set to zero, and ice dynamics are not calculated. - ! Regridding and data cycling capabilities are included. +#else - ! !USES: - use shr_nl_mod, only : shr_nl_find_group_name + use shr_nl_mod , only : shr_nl_find_group_name use shr_strdata_mod use shr_dmodel_mod use shr_string_mod @@ -28,24 +37,23 @@ module ice_prescribed_mod use shr_mct_mod use mct_mod use pio - use ice_broadcast - use ice_communicate , only : my_task, master_task, MPI_COMM_ICE + use ice_communicate , only : my_task, master_task, MPI_COMM_ICE use ice_kinds_mod use ice_fileunits - use ice_exit , only : abort_ice - use ice_domain_size , only : nx_global, ny_global, ncat, nilyr, nslyr, max_blocks + use ice_exit , only : abort_ice + use ice_domain_size , only : nx_global, ny_global, ncat, nilyr, nslyr, max_blocks use ice_constants - use ice_blocks , only : nx_block, ny_block, block, get_block - use ice_domain , only : nblocks, distrb_info, blocks_ice - use ice_grid , only : TLAT, TLON, hm, tmask, tarea, grid_type, ocn_gridcell_frac - use ice_calendar , only : idate, sec, calendar_type - use ice_arrays_column, only : hin_max + use ice_blocks , only : nx_block, ny_block, block, get_block + use ice_domain , only : nblocks, distrb_info, blocks_ice + use ice_grid , only : TLAT, TLON, hm, tmask, tarea, grid_type, ocn_gridcell_frac + use ice_calendar , only : idate, sec, calendar_type + use ice_arrays_column , only : hin_max use ice_read_write - use ice_exit, only: abort_ice - use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted - use icepack_intfc, only: icepack_query_tracer_indices, icepack_query_tracer_sizes - use icepack_intfc, only: icepack_query_parameters + use ice_exit , only: abort_ice + use icepack_intfc , only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc , only: icepack_query_tracer_indices, icepack_query_tracer_sizes + use icepack_intfc , only: icepack_query_parameters implicit none private ! except @@ -56,56 +64,38 @@ module ice_prescribed_mod public :: ice_prescribed_phys ! set prescribed ice state and fluxes ! !PUBLIC DATA MEMBERS: - logical(kind=log_kind), public :: prescribed_ice ! true if prescribed ice - + logical(kind=log_kind), public :: prescribed_ice ! true if prescribed ice integer(kind=int_kind),parameter :: nFilesMaximum = 400 ! max number of files - integer(kind=int_kind) :: stream_year_first ! first year in stream to use - integer(kind=int_kind) :: stream_year_last ! last year in stream to use - integer(kind=int_kind) :: model_year_align ! align stream_year_first - ! with this model year - - character(len=char_len_long) :: stream_fldVarName - character(len=char_len_long) :: stream_fldFileName(nFilesMaximum) - character(len=char_len_long) :: stream_domTvarName - character(len=char_len_long) :: stream_domXvarName - character(len=char_len_long) :: stream_domYvarName - character(len=char_len_long) :: stream_domAreaName - character(len=char_len_long) :: stream_domMaskName - character(len=char_len_long) :: stream_domFileName - character(len=char_len_long) :: stream_mapread - logical(kind=log_kind) :: prescribed_ice_fill ! true if data fill required - - type(shr_strdata_type) :: sdat ! prescribed data stream - character(len=char_len_long) :: fldList ! list of fields in data stream - real(kind=dbl_kind),allocatable :: ice_cov(:,:,:) ! ice cover - -! real (kind=dbl_kind), parameter :: & -! cp_sno = 0.0_dbl_kind & ! specific heat of snow (J/kg/K) -! , rLfi = Lfresh*rhoi & ! latent heat of fusion ice (J/m^3) -! , rLfs = Lfresh*rhos & ! latent heat of fusion snow (J/m^3) -! , rLvi = Lvap*rhoi & ! latent heat of vapor*rhoice (J/m^3) -! , rLvs = Lvap*rhos & ! latent heat of vapor*rhosno (J/m^3) -! , rcpi = cp_ice*rhoi & ! heat capacity of fresh ice (J/m^3) -! , rcps = cp_sno*rhos & ! heat capacity of snow (J/m^3) -! , rcpidepressT = rcpi*depressT & ! param for finding T(z) from q (J/m^3) -! , rLfidepressT = rLfi*depressT ! param for heat capacity (J deg/m^3) -! ! heat capacity of sea ice, rhoi*C=rcpi+rLfidepressT*salinity/T^2 - -!======================================================================= + integer(kind=int_kind) :: stream_year_first ! first year in stream to use + integer(kind=int_kind) :: stream_year_last ! last year in stream to use + integer(kind=int_kind) :: model_year_align ! align stream_year_first with this model year + character(len=char_len_long) :: stream_fldVarName + character(len=char_len_long) :: stream_fldFileName(nFilesMaximum) + character(len=char_len_long) :: stream_domTvarName + character(len=char_len_long) :: stream_domXvarName + character(len=char_len_long) :: stream_domYvarName + character(len=char_len_long) :: stream_domAreaName + character(len=char_len_long) :: stream_domMaskName + character(len=char_len_long) :: stream_domFileName + character(len=char_len_long) :: stream_mapread + logical(kind=log_kind) :: prescribed_ice_fill ! true if data fill required + type(shr_strdata_type) :: sdat ! prescribed data stream + character(len=char_len_long) :: fldList ! list of fields in data stream + real(kind=dbl_kind),allocatable :: ice_cov(:,:,:) ! ice cover + contains -!=============================================================================== subroutine ice_prescribed_init(mpicom, compid, gindex) - use shr_pio_mod, only : shr_pio_getiotype, shr_pio_getiosys, shr_pio_getioformat - ! !DESCRIPTION: ! Prescribed ice initialization - needed to ! work with new shr_strdata module derived type - ! !INPUT/OUTPUT PARAMETERS: + use shr_pio_mod, only : shr_pio_getiotype, shr_pio_getiosys, shr_pio_getioformat + implicit none include 'mpif.h' + ! !nput/output parameters: integer(kind=int_kind), intent(in) :: mpicom integer(kind=int_kind), intent(in) :: compid integer(kind=int_kind), intent(in) :: gindex(:) @@ -257,7 +247,6 @@ subroutine ice_prescribed_init(mpicom, compid, gindex) end subroutine ice_prescribed_init !======================================================================= - subroutine ice_prescribed_run(mDateIn, secIn) ! !DESCRIPTION: @@ -329,25 +318,12 @@ subroutine ice_prescribed_run(mDateIn, secIn) end subroutine ice_prescribed_run !=============================================================================== - !BOP =========================================================================== - ! - ! !IROUTINE: ice_prescribed_phys -- set prescribed ice state and fluxes - ! - ! !DESCRIPTION: - ! - ! Set prescribed ice state using input ice concentration; - ! set surface ice temperature to atmospheric value; use - ! linear temperature gradient in ice to ocean temperature. - ! - ! !REVISION HISTORY: - ! 2005-May-23 - J. Schramm - Updated with data models - ! 2004-July - J. Schramm - Modified to allow variable snow cover - ! 2001-May - B. P. Briegleb - Original version - ! - ! !INTERFACE: ------------------------------------------------------------------ - subroutine ice_prescribed_phys + ! Set prescribed ice state using input ice concentration; + ! set surface ice temperature to atmospheric value; use + ! linear temperature gradient in ice to ocean temperature. + ! !USES: use ice_flux use ice_state @@ -389,20 +365,6 @@ subroutine ice_prescribed_phys if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - !----------------------------------------------------------------- - ! Initialize ice state - !----------------------------------------------------------------- - - ! TODO - can we now get rid of the following??? - - ! aicen(:,:,:,:) = c0 - ! vicen(:,:,:,:) = c0 - ! eicen(:,:,:,:) = c0 - - ! do nc=1,ncat - ! trcrn(:,:,nt_Tsfc,nc,:) = Tf(:,:,:) - ! enddo - !----------------------------------------------------------------- ! Set ice cover over land to zero, not sure if this should be ! be done earier, before time/spatial interp?????? @@ -548,7 +510,6 @@ subroutine ice_prescribed_phys end subroutine ice_prescribed_phys !=============================================================================== - subroutine ice_prescribed_set_domain( lsize, mpicom, gsmap_i, dom_i ) ! Arguments diff --git a/cicecore/shared/ice_restart_shared.F90 b/cicecore/shared/ice_restart_shared.F90 index 82b82c5ce..6578ef3ad 100644 --- a/cicecore/shared/ice_restart_shared.F90 +++ b/cicecore/shared/ice_restart_shared.F90 @@ -11,6 +11,7 @@ module ice_restart_shared logical (kind=log_kind), public :: & restart , & ! if true, initialize using restart file instead of defaults restart_ext, & ! if true, read/write extended grid (with ghost cells) + restart_coszen, & ! if true, read/write coszen use_restart_time ! if true, use time written in core restart file character (len=char_len), public :: & diff --git a/configuration/scripts/forapps/ufs/comp_ice.backend.libcice b/configuration/scripts/forapps/ufs/comp_ice.backend.libcice index eb1b8a4e7..ca718548a 100755 --- a/configuration/scripts/forapps/ufs/comp_ice.backend.libcice +++ b/configuration/scripts/forapps/ufs/comp_ice.backend.libcice @@ -18,10 +18,10 @@ setenv THRD no # set to yes for OpenMP threading if (${SITE} =~ cheyenne*) then setenv ARCH cheyenne_intel -#else if (${SITE} =~ Orion*) then -# setenv ARCH orion_intel -#else if (${SITE} =~ hera*) then -# setenv ARCH hera_intel +else if (${SITE} =~ Orion*) then + setenv ARCH orion_intel +else if (${SITE} =~ hera*) then + setenv ARCH hera_intel else echo "CICE6 ${0}: ERROR in ARCH setup, ${hname}" exit -2 @@ -129,6 +129,7 @@ endif mkdir -p ${BINDIR} cp -f ${OBJDIR}/libcice6.a ${BINDIR}/ cp -f ${OBJDIR}/ice_comp_nuopc.mod ${BINDIR}/ +cp -f ${OBJDIR}/ice_timers.mod ${BINDIR}/ cat >! ${BINDIR}/cice6.mk << EOF # ESMF self-describing build dependency makefile fragment diff --git a/configuration/scripts/ice_in b/configuration/scripts/ice_in index 9d35b4366..feb08eb09 100644 --- a/configuration/scripts/ice_in +++ b/configuration/scripts/ice_in @@ -181,6 +181,7 @@ wave_spec_type = 'none' wave_spec_file = 'unknown_wave_spec_file' nfreq = 25 + restart_coszen = .false. restore_ice = .false. restore_ocn = .false. trestore = 90 diff --git a/doc/source/cice_index.rst b/doc/source/cice_index.rst index 8cadc5073..2bbf4e231 100644 --- a/doc/source/cice_index.rst +++ b/doc/source/cice_index.rst @@ -527,6 +527,7 @@ either Celsius or Kelvin units). "restart_format", ":math:`\bullet` restart file format", "" "restart_[tracer]", ":math:`\bullet` if true, read tracer restart file", "" "restart_ext", ":math:`\bullet` if true, read/write halo cells in restart file", "" + "restart_coszen", ":math:`\bullet` if true, read/write coszen in restart file", "" "restore_bgc", ":math:`\bullet` if true, restore nitrate/silicate to data", "" "restore_ice", ":math:`\bullet` if true, restore ice state along lateral boundaries", "" "restore_ocn", ":math:`\bullet` restore sst to data", "" diff --git a/doc/source/user_guide/ug_case_settings.rst b/doc/source/user_guide/ug_case_settings.rst index 84d3633b1..619acce44 100644 --- a/doc/source/user_guide/ug_case_settings.rst +++ b/doc/source/user_guide/ug_case_settings.rst @@ -126,7 +126,7 @@ setup_nml "``histfreq_n``", "integer array", "frequency history output is written with ``histfreq``", "1,1,1,1,1" "``history_dir``", "string", "path to history output directory", "'./'" "``history_file``", "string", "output file for history", "'iceh'" - "``history_format``", "``default``", "read/write restart files in default format", "``default``" + "``history_format``", "``default``", "read/write history files in default format", "``default``" "", "``pio_pnetcdf``", "read/write restart files with pnetcdf in pio", "" "``history_precision``", "integer", "history file precision: 4 or 8 byte", "4" "``ice_ic``", "``default``", "latitude and sst dependent initial condition", "``default``" @@ -451,6 +451,7 @@ forcing_nml "", "``mm_per_month``", "", "" "", "``mm_per_sec``", "(same as MKS units)", "" "", "``m_per_sec``", "", "" + "``restart_coszen``", "logical", "read/write coszen in restart files", "``.false.``" "``restore_ocn``", "logical", "restore sst to data", "``.false.``" "``restore_ice``", "logical", "restore ice state along lateral boundaries", "``.false.``" "``tfrz_option``", "``linear_salt``", "linear functino of salinity (ktherm=1)", "``mushy``" diff --git a/doc/source/user_guide/ug_implementation.rst b/doc/source/user_guide/ug_implementation.rst index b7d9c0f47..44d4ef1d6 100644 --- a/doc/source/user_guide/ug_implementation.rst +++ b/doc/source/user_guide/ug_implementation.rst @@ -569,6 +569,10 @@ An additional namelist option, ``restart_ext`` specifies whether halo cells are included in the restart files. This option is useful for tripole and regional grids, but can not be used with PIO. +An additional namelist option, ``restart_coszen`` specifies whether the +cosine of the zenith angle is included in the restart files. This is mainly +used in coupled models. + MPI is initialized in *init\_communicate* for both coupled and stand-alone MPI runs. The ice component communicates with a flux coupler or other climate components via external routines that handle the From 819eeddaacdd354c35db1be41ddd05bbb95fcf11 Mon Sep 17 00:00:00 2001 From: Tony Craig Date: Fri, 31 Jul 2020 09:11:38 -0700 Subject: [PATCH 07/13] Update CPP implementation (#490) * Update CPPs Rename ncdf to USE_NETCDF (ncdf still works) and update USE_NETCDF implementation Update NO_I8 Update NO_R16 Remove popcice * Convert ORCA_GRID CPP to orca_halogrid namelist Convert a RASM_MODS CPP to bathymetry_format namelist to support reading a pop vertical grid file Convert gather_scatter_barrier CPP to add_mpi_barriers namelist Document * update documentation * update documentation, remove coupled macros as this is going away soon and change cpp directive to cpp macro Co-authored-by: Philippe Blain --- cicecore/cicedynB/general/ice_forcing.F90 | 91 +++------ cicecore/cicedynB/general/ice_forcing_bgc.F90 | 24 +-- cicecore/cicedynB/general/ice_init.F90 | 28 ++- .../comm/mpi/ice_communicate.F90 | 7 + .../comm/mpi/ice_gather_scatter.F90 | 69 +++---- .../infrastructure/comm/mpi/ice_reprosum.F90 | 4 +- .../comm/serial/ice_communicate.F90 | 3 + .../comm/serial/ice_reprosum.F90 | 4 +- .../cicedynB/infrastructure/ice_domain.F90 | 24 ++- cicecore/cicedynB/infrastructure/ice_grid.F90 | 34 +++- .../infrastructure/ice_read_write.F90 | 173 +++++++++--------- .../io/io_netcdf/ice_history_write.F90 | 11 +- .../io/io_netcdf/ice_restart.F90 | 39 ++++ .../io/io_pio2/ice_history_write.F90 | 6 - .../infrastructure/io/io_pio2/ice_restart.F90 | 2 +- cicecore/drivers/direct/hadgem3/CICE.F90 | 2 - .../drivers/direct/hadgem3/CICE_InitMod.F90 | 6 - cicecore/drivers/mct/cesm1/CICE_InitMod.F90 | 6 - cicecore/drivers/nuopc/dmi/CICE.F90 | 2 - cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 | 6 - cicecore/drivers/standalone/cice/CICE.F90 | 2 - .../drivers/standalone/cice/CICE_InitMod.F90 | 6 - configuration/scripts/cice.build | 4 +- .../forapps/ufs/comp_ice.backend.libcice | 4 +- configuration/scripts/ice_in | 3 + doc/source/cice_index.rst | 1 + doc/source/user_guide/ug_case_settings.rst | 54 +++++- doc/source/user_guide/ug_implementation.rst | 6 + doc/source/user_guide/ug_running.rst | 22 ++- 29 files changed, 362 insertions(+), 281 deletions(-) diff --git a/cicecore/cicedynB/general/ice_forcing.F90 b/cicecore/cicedynB/general/ice_forcing.F90 index 66a7d9ef3..4c88037ed 100755 --- a/cicecore/cicedynB/general/ice_forcing.F90 +++ b/cicecore/cicedynB/general/ice_forcing.F90 @@ -1,3 +1,6 @@ +#ifdef ncdf +#define USE_NETCDF +#endif !======================================================================= ! ! Reads and interpolates forcing data for atmosphere and ocean quantities. @@ -300,9 +303,6 @@ subroutine init_forcing_ocn(dt) use ice_domain, only: nblocks use ice_domain_size, only: max_blocks use ice_flux, only: sss, sst, Tf -#ifdef ncdf - use netcdf -#endif real (kind=dbl_kind), intent(in) :: & dt ! time step @@ -866,7 +866,6 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & character(len=*), parameter :: subname = '(read_data_nc)' -#ifdef ncdf integer (kind=int_kind) :: & nrec , & ! record number to read n2, n4 , & ! like ixm and ixp, but @@ -967,9 +966,6 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & call ice_timer_stop(timer_readwrite) ! reading/writing -#else - field_data = c0 ! to satisfy intent(out) attribute -#endif end subroutine read_data_nc !======================================================================= @@ -1007,7 +1003,6 @@ subroutine read_data_nc_hycom (flag, recd, & intent(out) :: & field_data ! 2 values needed for interpolation -#ifdef ncdf ! local variables integer (kind=int_kind) :: & fid ! file id for netCDF routines @@ -1040,11 +1035,6 @@ subroutine read_data_nc_hycom (flag, recd, & call ice_timer_stop(timer_readwrite) ! reading/writing -#else - field_data = c0 ! to satisfy intent(out) attribute - write(*,*)'ERROR: CICE not compiled with NetCDF' - stop -#endif end subroutine read_data_nc_hycom !======================================================================= @@ -3342,9 +3332,6 @@ subroutine oned_data use ice_flux, only: uatm, vatm, Tair, fsw, fsnow, Qa, rhoa, frain -#ifdef ncdf - use netcdf - ! local parameters character (char_len_long) :: & @@ -3402,7 +3389,7 @@ subroutine oned_data Temp = work Tair(:,:,:) = Temp - if (my_task == master_task) status = nf90_close(fid) + call ice_close_nc(fid) ! hourly solar data beginning Jan 1, 1989, 01:00 met_file = fsw_file @@ -3412,7 +3399,7 @@ subroutine oned_data call ice_read_nc(fid,istep1,fieldname,work,diag) fsw(:,:,:) = work - if (my_task == master_task) status = nf90_close(fid) + call ice_close_nc(fid) ! hourly interpolated monthly data beginning Jan 1, 1989, 01:00 met_file = humid_file @@ -3426,7 +3413,7 @@ subroutine oned_data call ice_read_nc(fid,istep1,fieldname,work,diag) fsnow(:,:,:) = work - if (my_task == master_task) status = nf90_close(fid) + call ice_close_nc(fid) !------------------------------------------------------------------- ! Find specific humidity using Hyland-Wexler formulation @@ -3447,8 +3434,6 @@ subroutine oned_data cldf (:,:,:) = p25 ! cloud fraction frain(:,:,:) = c0 ! this is available in hourlymet_rh file -#endif - end subroutine oned_data !======================================================================= @@ -3648,7 +3633,7 @@ subroutine ocn_data_ncar_init use ice_blocks, only: nx_block, ny_block use ice_domain_size, only: max_blocks -#ifdef ncdf +#ifdef USE_NETCDF use netcdf #endif @@ -3664,7 +3649,6 @@ subroutine ocn_data_ncar_init 'T', 'S', 'hblt', 'U', 'V', & 'dhdx', 'dhdy', 'qdp' / -#ifdef ncdf integer (kind=int_kind) :: & fid , & ! file id dimid ! dimension id @@ -3673,7 +3657,6 @@ subroutine ocn_data_ncar_init status , & ! status flag nlat , & ! number of longitudes of data nlon ! number of latitudes of data -#endif real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & work1 @@ -3701,7 +3684,7 @@ subroutine ocn_data_ncar_init endif ! master_task if (trim(ocn_data_format) == 'nc') then -#ifdef ncdf +#ifdef USE_NETCDF if (my_task == master_task) then call ice_open_nc(sst_file, fid) @@ -3741,7 +3724,10 @@ subroutine ocn_data_ncar_init enddo ! month loop enddo ! field loop - if (my_task == master_task) status = nf90_close(fid) + if (my_task == master_task) call ice_close_nc(fid) +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined for '//trim(sst_file), & + file=__FILE__, line=__LINE__) #endif else ! binary format @@ -3803,11 +3789,11 @@ subroutine ocn_data_ncar_init_3D use ice_domain_size, only: max_blocks use ice_grid, only: to_ugrid, ANGLET use ice_read_write, only: ice_read_nc_uv -#ifdef ncdf +#ifdef USE_NETCDF use netcdf #endif -#ifdef ncdf +#ifdef USE_NETCDF integer (kind=int_kind) :: & n , & ! field index m , & ! month index @@ -3856,7 +3842,7 @@ subroutine ocn_data_ncar_init_3D endif ! master_task if (trim(ocn_data_format) == 'nc') then -#ifdef ncdf +#ifdef USE_NETCDF if (my_task == master_task) then call ice_open_nc(sst_file, fid) @@ -3902,7 +3888,7 @@ subroutine ocn_data_ncar_init_3D enddo ! month loop enddo ! field loop - if (my_task == master_task) status = nf90_close(fid) + if (my_task == master_task) call ice_close_nc(fid) ! Rotate vector quantities and shift to U-grid do n=4,6,2 @@ -3923,6 +3909,9 @@ subroutine ocn_data_ncar_init_3D enddo ! month loop enddo ! field loop +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif else ! binary format @@ -4327,9 +4316,6 @@ subroutine ocn_data_hycom_init use ice_blocks, only: nx_block, ny_block use ice_domain, only: nblocks use ice_flux, only: sss, sst, Tf -#ifdef ncdf - use netcdf -#endif integer (kind=int_kind) :: & i, j, iblk , & ! horizontal indices @@ -4611,7 +4597,6 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & character(len=*), parameter :: subname = '(read_data_nc_point)' -#ifdef ncdf integer (kind=int_kind) :: & nrec , & ! record number to read n2, n4 , & ! like ixm and ixp, but @@ -4723,9 +4708,6 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & call ice_timer_stop(timer_readwrite) ! reading/writing -#else - field_data = c0 ! to satisfy intent(out) attribute -#endif end subroutine read_data_nc_point !======================================================================= @@ -4779,13 +4761,9 @@ subroutine ISPOL_data ! use ice_flux, only: uatm, vatm, Tair, fsw, Qa, rhoa, & frain, fsnow, flw -#ifdef ncdf - use netcdf -#endif !local parameters -#ifdef ncdf character (char_len_long) :: & met_file, & ! netcdf filename fieldname ! field name in netcdf file @@ -4822,7 +4800,6 @@ subroutine ISPOL_data sec1hr ! number of seconds in 1 hour logical (kind=log_kind) :: read1 -#endif integer (kind=int_kind) :: & recnum , & ! record number @@ -4830,7 +4807,6 @@ subroutine ISPOL_data character(len=*), parameter :: subname = '(ISPOL_data)' -#ifdef ncdf call icepack_query_parameters(secday_out=secday) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -4965,14 +4941,6 @@ subroutine ISPOL_data flw(:,:,:) = c1intp * flw_data_p(1) & + c2intp * flw_data_p(2) endif !nc -#else - - uatm(:,:,:) = c0 !wind velocity (m/s) - vatm(:,:,:) = c0 - fsw(:,:,:) = c0 - fsnow (:,:,:) = c0 - -#endif !flw given cldf and Tair calculated in prepare_forcing @@ -5015,11 +4983,7 @@ subroutine ocn_data_ispol_init ! use ice_gather_scatter use ice_read_write -#ifdef ncdf - use netcdf -#endif -#ifdef ncdf integer (kind=int_kind) :: & n , & ! field index m ! month index @@ -5038,7 +5002,6 @@ subroutine ocn_data_ispol_init integer (kind=int_kind) :: & status ! status flag -#endif character(len=*), parameter :: subname = '(ocn_data_ispol_init)' @@ -5058,7 +5021,6 @@ subroutine ocn_data_ispol_init endif ! master_task if (trim(ocn_data_format) == 'nc') then -#ifdef ncdf if (my_task == master_task) then call ice_open_nc(sst_file, fid) endif ! master_task @@ -5078,8 +5040,7 @@ subroutine ocn_data_ispol_init enddo ! month loop enddo ! field loop - if (my_task == master_task) status = nf90_close(fid) -#endif + if (my_task == master_task) call ice_close_nc(fid) else ! binary format call abort_ice (error_message=subname//'new ocean forcing is netcdf only', & @@ -5188,9 +5149,6 @@ subroutine get_wave_spec use ice_constants, only: c0 use ice_domain_size, only: nfreq use ice_timers, only: ice_timer_start, ice_timer_stop, timer_fsd -#ifdef ncdf - use netcdf -#endif ! local variables integer (kind=int_kind) :: & @@ -5228,16 +5186,19 @@ subroutine get_wave_spec ! read more realistic data from a file if ((trim(wave_spec_type) == 'constant').OR.(trim(wave_spec_type) == 'random')) then if (trim(wave_spec_file(1:4)) == 'unkn') then - call abort_ice (subname//'ERROR: wave_spec_file '//trim(wave_spec_file)) + call abort_ice (subname//'ERROR: wave_spec_file '//trim(wave_spec_file), & + file=__FILE__, line=__LINE__) else -#ifdef ncdf +#ifdef USE_NETCDF call ice_open_nc(wave_spec_file,fid) call ice_read_nc_xyf (fid, 1, 'efreq', wave_spectrum(:,:,:,:), dbug, & field_loc_center, field_type_scalar) call ice_close_nc(fid) #else - write (nu_diag,*) "wave spectrum file not available, requires ncdf" + write (nu_diag,*) "wave spectrum file not available, requires cpp USE_NETCDF" write (nu_diag,*) "wave spectrum file not available, using default profile" + call abort_ice (subname//'ERROR: wave_spec_file '//trim(wave_spec_file), & + file=__FILE__, line=__LINE__) #endif endif endif diff --git a/cicecore/cicedynB/general/ice_forcing_bgc.F90 b/cicecore/cicedynB/general/ice_forcing_bgc.F90 index 4eedcfb80..e5ef851fa 100644 --- a/cicecore/cicedynB/general/ice_forcing_bgc.F90 +++ b/cicecore/cicedynB/general/ice_forcing_bgc.F90 @@ -1,3 +1,6 @@ +#ifdef ncdf +#define USE_NETCDF +#endif !======================================================================= ! ! Reads and interpolates forcing data for biogeochemistry @@ -587,7 +590,6 @@ subroutine faero_data use ice_flux_bgc, only: faero_atm use ice_forcing, only: interp_coeff_monthly, read_clim_data_nc, interpolate_data -#ifdef ncdf ! local parameters real (kind=dbl_kind), dimension(:,:,:,:), allocatable, & @@ -672,7 +674,6 @@ subroutine faero_data where (faero_atm(:,:,:,:) > 1.e20) faero_atm(:,:,:,:) = c0 deallocate( aero1_data, aero2_data, aero3_data ) -#endif end subroutine faero_data @@ -688,7 +689,6 @@ subroutine fzaero_data use ice_flux_bgc, only: faero_atm use ice_forcing, only: interp_coeff_monthly, read_clim_data_nc, interpolate_data -#ifdef ncdf ! local parameters real (kind=dbl_kind), dimension(:,:,:,:), allocatable, & @@ -766,7 +766,6 @@ subroutine fzaero_data where (faero_atm(:,:,nlt_zaero(1),:) > 1.e20) faero_atm(:,:,nlt_zaero(1),:) = c0 deallocate( aero_data ) -#endif end subroutine fzaero_data @@ -780,10 +779,6 @@ subroutine init_bgc_data (fed1,fep1) use ice_read_write, only: ice_open_nc, ice_read_nc, ice_close_nc -#ifdef ncdf - use netcdf -#endif - real (kind=dbl_kind), dimension(nx_block, ny_block, max_blocks), intent(inout) :: & fed1, & ! first dissolved iron pool (nM) fep1 ! first particulate iron pool (nM) @@ -868,7 +863,7 @@ subroutine faero_optics gaer_bc_tab, & ! BC aerosol asymmetry parameter (cos(theta)) bcenh ! BC absorption enhancement facto -#ifdef ncdf +#ifdef USE_NETCDF use netcdf #endif @@ -876,7 +871,6 @@ subroutine faero_optics logical (kind=log_kind) :: modal_aero -#ifdef ncdf integer (kind=int_kind) :: & varid , & ! variable id status , & ! status output from netcdf routines @@ -891,7 +885,6 @@ subroutine faero_optics character (char_len_long) :: & optics_file, & ! netcdf filename fieldname ! field name in netcdf file -#endif character(len=*), parameter :: subname = '(faero_optics)' @@ -968,8 +961,8 @@ subroutine faero_optics if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) -#ifdef ncdf if (modal_aero) then +#ifdef USE_NETCDF optics_file = & '/usr/projects/climate/njeffery/DATA/CAM/snicar/snicar_optics_5bnd_mam_c140303.nc' @@ -1004,12 +997,11 @@ subroutine faero_optics call broadcast_array(bcenh(n,:,k), master_task) enddo enddo - endif ! modal_aero #else - if (modal_aero) then - call abort_ice(subname//'ERROR: netcdf required for modal_aero') - endif + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif + endif ! modal_aero end subroutine faero_optics diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index c0f7a4eaa..19c729826 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -59,7 +59,7 @@ subroutine input_data use ice_broadcast, only: broadcast_scalar, broadcast_array use ice_diagnostics, only: diag_file, print_global, print_points, latpnt, lonpnt - use ice_domain, only: close_boundaries, ns_boundary_type + use ice_domain, only: close_boundaries, ns_boundary_type, orca_halogrid use ice_domain_size, only: ncat, nilyr, nslyr, nblyr, nfsd, nfreq, & n_iso, n_aero, n_zaero, n_algae, & n_doc, n_dic, n_don, n_fed, n_fep, & @@ -92,6 +92,7 @@ subroutine input_data use ice_arrays_column, only: bgc_data_dir, fe_data_type use ice_grid, only: grid_file, gridcpl_file, kmt_file, & bathymetry_file, use_bathymetry, & + bathymetry_format, & grid_type, grid_format, & dxrect, dyrect use ice_dyn_shared, only: ndte, kdyn, revised_evp, yield_curve, & @@ -160,10 +161,10 @@ subroutine input_data namelist /grid_nml/ & grid_format, grid_type, grid_file, kmt_file, & - bathymetry_file, use_bathymetry, nfsd, & + bathymetry_file, use_bathymetry, nfsd, bathymetry_format, & ncat, nilyr, nslyr, nblyr, & kcatbound, gridcpl_file, dxrect, dyrect, & - close_boundaries + close_boundaries, orca_halogrid namelist /tracer_nml/ & tr_iage, restart_age, & @@ -279,8 +280,10 @@ subroutine input_data grid_type = 'rectangular' ! define rectangular grid internally grid_file = 'unknown_grid_file' gridcpl_file = 'unknown_gridcpl_file' - bathymetry_file = 'unknown_bathymetry_file' - use_bathymetry = .false. + orca_halogrid = .false. ! orca haloed grid + bathymetry_file = 'unknown_bathymetry_file' + bathymetry_format = 'default' + use_bathymetry = .false. kmt_file = 'unknown_kmt_file' version_name = 'unknown_version_name' ncat = 0 ! number of ice thickness categories @@ -577,7 +580,9 @@ subroutine input_data call broadcast_scalar(grid_type, master_task) call broadcast_scalar(grid_file, master_task) call broadcast_scalar(gridcpl_file, master_task) + call broadcast_scalar(orca_halogrid, master_task) call broadcast_scalar(bathymetry_file, master_task) + call broadcast_scalar(bathymetry_format, master_task) call broadcast_scalar(use_bathymetry, master_task) call broadcast_scalar(kmt_file, master_task) call broadcast_scalar(kitd, master_task) @@ -777,16 +782,6 @@ subroutine input_data abort_list = trim(abort_list)//":1" endif -#ifndef ncdf - if (grid_format /= 'bin' .or. atm_data_format /= 'bin' .or. ocn_data_format /= 'bin') then - if (my_task == master_task) then - write(nu_diag,*) subname//' ERROR: ncdf CPP flag unset, data formats must be bin' - write(nu_diag,*) subname//' ERROR: check grid_format, atm_data_format, ocn_data_format or set ncdf CPP' - endif - abort_list = trim(abort_list)//":2" - endif -#endif - if (advection /= 'remap' .and. advection /= 'upwind' .and. advection /= 'none') then if (my_task == master_task) write(nu_diag,*) subname//' ERROR: invalid advection=',trim(advection) abort_list = trim(abort_list)//":3" @@ -1082,6 +1077,7 @@ subroutine input_data tmpstr2 = ' bathymetric input data is not used' endif write(nu_diag,1012) ' use_bathymetry = ', use_bathymetry,trim(tmpstr2) + write(nu_diag,*) ' bathymetry_format= ', trim(bathymetry_format) endif write(nu_diag,1022) ' nilyr = ', nilyr, ' number of ice layers (equal thickness)' write(nu_diag,1022) ' nslyr = ', nslyr, ' number of snow layers (equal thickness)' @@ -1485,6 +1481,8 @@ subroutine input_data endif write(nu_diag,1010) ' close_boundaries = ', & close_boundaries + write(nu_diag,1010) ' orca_halogrid = ', & + orca_halogrid write(nu_diag,1010) ' conserv_check = ', conserv_check diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_communicate.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_communicate.F90 index d574ebdfe..a7d186083 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_communicate.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_communicate.F90 @@ -45,6 +45,9 @@ module ice_communicate mpitagHalo = 1, &! MPI tags for various mpitag_gs = 1000 ! communication patterns + logical (log_kind), public :: & + add_mpi_barriers = .false. ! turn on mpi barriers for throttling + !*********************************************************************** contains @@ -98,7 +101,11 @@ subroutine init_communicate(mpicom) master_task = 0 call MPI_COMM_RANK (MPI_COMM_ICE, my_task, ierr) +#if (defined NO_R16) + mpiR16 = MPI_REAL8 +#else mpiR16 = MPI_REAL16 +#endif mpiR8 = MPI_REAL8 mpiR4 = MPI_REAL4 diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 index ba6476904..010a5c8c4 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 @@ -16,7 +16,8 @@ module ice_gather_scatter use mpi ! MPI Fortran module use ice_kinds_mod - use ice_communicate, only: my_task, mpiR8, mpiR4, mpitag_gs, MPI_COMM_ICE + use ice_communicate, only: my_task, mpiR8, mpiR4, mpitag_gs, MPI_COMM_ICE, & + ice_barrier, add_mpi_barriers use ice_constants, only: spval_dbl, c0, & field_loc_center, field_loc_NEcorner, field_loc_Nface, field_loc_Eface, & field_loc_noupdate, & @@ -233,9 +234,9 @@ subroutine gather_global_dbl(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) endif -#ifdef gather_scatter_barrier - call MPI_BARRIER(MPI_COMM_ICE, ierr) -#endif + if (add_mpi_barriers) then + call ice_barrier() + endif !----------------------------------------------------------------------- @@ -400,9 +401,9 @@ subroutine gather_global_real(ARRAY_G, ARRAY, dst_task, src_dist) endif -#ifdef gather_scatter_barrier - call MPI_BARRIER(MPI_COMM_ICE, ierr) -#endif + if (add_mpi_barriers) then + call ice_barrier() + endif !----------------------------------------------------------------------- @@ -567,9 +568,9 @@ subroutine gather_global_int(ARRAY_G, ARRAY, dst_task, src_dist) endif -#ifdef gather_scatter_barrier - call MPI_BARRIER(MPI_COMM_ICE, ierr) -#endif + if (add_mpi_barriers) then + call ice_barrier() + endif !----------------------------------------------------------------------- @@ -961,9 +962,9 @@ subroutine gather_global_ext_dbl(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) endif -#ifdef gather_scatter_barrier - call MPI_BARRIER(MPI_COMM_ICE, ierr) -#endif + if (add_mpi_barriers) then + call ice_barrier() + endif !----------------------------------------------------------------------- @@ -1284,9 +1285,9 @@ subroutine gather_global_ext_int(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) endif -#ifdef gather_scatter_barrier - call MPI_BARRIER(MPI_COMM_ICE, ierr) -#endif + if (add_mpi_barriers) then + call ice_barrier() + endif !----------------------------------------------------------------------- @@ -1607,9 +1608,9 @@ subroutine gather_global_ext_log(ARRAY_G, ARRAY, dst_task, src_dist, spc_val) endif -#ifdef gather_scatter_barrier - call MPI_BARRIER(MPI_COMM_ICE, ierr) -#endif + if (add_mpi_barriers) then + call ice_barrier() + endif !----------------------------------------------------------------------- @@ -1983,9 +1984,9 @@ subroutine scatter_global_dbl(ARRAY, ARRAY_G, src_task, dst_dist, & enddo endif -#ifdef gather_scatter_barrier - call MPI_BARRIER(MPI_COMM_ICE, ierr) -#endif + if (add_mpi_barriers) then + call ice_barrier() + endif !----------------------------------------------------------------------- @@ -2372,9 +2373,9 @@ subroutine scatter_global_real(ARRAY, ARRAY_G, src_task, dst_dist, & enddo endif -#ifdef gather_scatter_barrier - call MPI_BARRIER(MPI_COMM_ICE, ierr) -#endif + if (add_mpi_barriers) then + call ice_barrier() + endif !----------------------------------------------------------------------- @@ -2761,9 +2762,9 @@ subroutine scatter_global_int(ARRAY, ARRAY_G, src_task, dst_dist, & enddo endif -#ifdef gather_scatter_barrier - call MPI_BARRIER(MPI_COMM_ICE, ierr) -#endif + if (add_mpi_barriers) then + call ice_barrier() + endif !----------------------------------------------------------------------- @@ -3093,9 +3094,9 @@ subroutine scatter_global_ext(ARRAY, ARRAY_G, src_task, dst_dist) deallocate(rcv_request, rcv_status) endif -#ifdef gather_scatter_barrier - call MPI_BARRIER(MPI_COMM_ICE, ierr) -#endif + if (add_mpi_barriers) then + call ice_barrier() + endif !----------------------------------------------------------------------- @@ -3379,9 +3380,9 @@ subroutine scatter_global_stress(ARRAY, ARRAY_G1, ARRAY_G2, & deallocate(rcv_request, rcv_status) endif -#ifdef gather_scatter_barrier - call MPI_BARRIER(MPI_COMM_ICE, ierr) -#endif + if (add_mpi_barriers) then + call ice_barrier() + endif !----------------------------------------------------------------------- diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_reprosum.F90 b/cicecore/cicedynB/infrastructure/comm/mpi/ice_reprosum.F90 index f85109339..27f66f712 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_reprosum.F90 +++ b/cicecore/cicedynB/infrastructure/comm/mpi/ice_reprosum.F90 @@ -39,7 +39,7 @@ MODULE ice_reprosum #ifndef SERIAL_REMOVE_MPI use mpi ! MPI Fortran module #endif -#if ( defined noI8 ) +#if defined (NO_I8) ! Workaround for when shr_kind_i8 is not supported. use ice_kinds_mod, only: r8 => dbl_kind, i8 => int_kind #else @@ -1032,7 +1032,7 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & #ifdef SERIAL_REMOVE_MPI i8_arr_gsum_level = i8_arr_lsum_level #else -#if ( defined noI8 ) +#if defined (NO_I8) ! Workaround for when i8 is not supported. ! if (detailed_timing) call xicex_timer_start("repro_sum_allr_i4") call mpi_allreduce (i8_arr_lsum_level, i8_arr_gsum_level, & diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_communicate.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_communicate.F90 index 2468f485b..dbcf78899 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_communicate.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_communicate.F90 @@ -27,6 +27,9 @@ module ice_communicate my_task, &! MPI task number for this task master_task ! task number of master task + logical (log_kind), public :: & + add_mpi_barriers = .false. ! turn on mpi barriers for throttling + !*********************************************************************** contains diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_reprosum.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_reprosum.F90 index ec852e2c3..1e4307535 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_reprosum.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_reprosum.F90 @@ -40,7 +40,7 @@ MODULE ice_reprosum #ifndef SERIAL_REMOVE_MPI use mpi ! MPI Fortran module #endif -#if ( defined noI8 ) +#if defined (NO_I8) ! Workaround for when shr_kind_i8 is not supported. use ice_kinds_mod, only: r8 => dbl_kind, i8 => int_kind #else @@ -1033,7 +1033,7 @@ subroutine ice_reprosum_int (arr, arr_gsum, nsummands, dsummands, nflds, & #ifdef SERIAL_REMOVE_MPI i8_arr_gsum_level = i8_arr_lsum_level #else -#if ( defined noI8 ) +#if defined (NO_I8) ! Workaround for when i8 is not supported. ! if (detailed_timing) call xicex_timer_start("repro_sum_allr_i4") call mpi_allreduce (i8_arr_lsum_level, i8_arr_gsum_level, & diff --git a/cicecore/cicedynB/infrastructure/ice_domain.F90 b/cicecore/cicedynB/infrastructure/ice_domain.F90 index 3916039b5..cc57ea585 100644 --- a/cicecore/cicedynB/infrastructure/ice_domain.F90 +++ b/cicecore/cicedynB/infrastructure/ice_domain.F90 @@ -1,3 +1,6 @@ +#ifdef ncdf +#define USE_NETCDF +#endif !||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||| module ice_domain @@ -14,7 +17,8 @@ module ice_domain use ice_kinds_mod use ice_constants, only: shlat, nhlat - use ice_communicate, only: my_task, master_task, get_num_procs + use ice_communicate, only: my_task, master_task, get_num_procs, & + add_mpi_barriers use ice_broadcast, only: broadcast_scalar, broadcast_array use ice_blocks, only: block, get_block, create_blocks, nghost, & nblocks_x, nblocks_y, nblocks_tot, nx_block, ny_block @@ -26,7 +30,7 @@ module ice_domain use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_query_parameters -#ifdef ncdf +#ifdef USE_NETCDF use netcdf #endif @@ -58,7 +62,8 @@ module ice_domain logical (kind=log_kind), public :: & maskhalo_dyn , & ! if true, use masked halo updates for dynamics maskhalo_remap , & ! if true, use masked halo updates for transport - maskhalo_bound ! if true, use masked halo updates for bound_state + maskhalo_bound , & ! if true, use masked halo updates for bound_state + orca_halogrid ! if true, input fields are haloed as defined by orca grid !----------------------------------------------------------------------- ! @@ -128,7 +133,8 @@ subroutine init_domain_blocks ns_boundary_type, & maskhalo_dyn, & maskhalo_remap, & - maskhalo_bound + maskhalo_bound, & + add_mpi_barriers !---------------------------------------------------------------------- ! @@ -146,6 +152,7 @@ subroutine init_domain_blocks maskhalo_dyn = .false. ! if true, use masked halos for dynamics maskhalo_remap = .false. ! if true, use masked halos for transport maskhalo_bound = .false. ! if true, use masked halos for bound_state + add_mpi_barriers = .false. ! if true, throttle communication max_blocks = -1 ! max number of blocks per processor block_size_x = -1 ! size of block in first horiz dimension block_size_y = -1 ! size of block in second horiz dimension @@ -182,6 +189,7 @@ subroutine init_domain_blocks call broadcast_scalar(maskhalo_dyn, master_task) call broadcast_scalar(maskhalo_remap, master_task) call broadcast_scalar(maskhalo_bound, master_task) + call broadcast_scalar(add_mpi_barriers, master_task) if (my_task == master_task) then if (max_blocks < 1) then max_blocks=int( & @@ -259,6 +267,7 @@ subroutine init_domain_blocks write(nu_diag,'(a,l6)') ' maskhalo_dyn = ', maskhalo_dyn write(nu_diag,'(a,l6)') ' maskhalo_remap = ', maskhalo_remap write(nu_diag,'(a,l6)') ' maskhalo_bound = ', maskhalo_bound + write(nu_diag,'(a,l6)') ' add_mpi_barriers = ', add_mpi_barriers write(nu_diag,'(a,2i6)') ' block_size_x,_y = ', block_size_x, block_size_y write(nu_diag,'(a,i6)') ' max_blocks = ', max_blocks write(nu_diag,'(a,i6,/)')' Number of ghost cells = ', nghost @@ -303,7 +312,7 @@ subroutine init_domain_distribution(KMTG,ULATG) i,j,n ,&! dummy loop indices ig,jg ,&! global indices work_unit ,&! size of quantized work unit -#ifdef ncdf +#ifdef USE_NETCDF fid ,&! file id varid ,&! var id status ,&! netcdf return code @@ -439,7 +448,7 @@ subroutine init_domain_distribution(KMTG,ULATG) allocate(wght(nx_global,ny_global)) if (my_task == master_task) then ! cannot use ice_read_write due to circular dependency -#ifdef ncdf +#ifdef USE_NETCDF write(nu_diag,*) 'read ',trim(distribution_wght_file),minval(wght),maxval(wght) status = nf90_open(distribution_wght_file, NF90_NOWRITE, fid) if (status /= nf90_noerr) then @@ -449,7 +458,8 @@ subroutine init_domain_distribution(KMTG,ULATG) status = nf90_get_var(fid, varid, wght) status = nf90_close(fid) #else - call abort_ice (subname//'ERROR: distribution_wght file needs ncdf cpp ') + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif endif call broadcast_array(wght, master_task) diff --git a/cicecore/cicedynB/infrastructure/ice_grid.F90 b/cicecore/cicedynB/infrastructure/ice_grid.F90 index f4b5fef6e..34b37cf29 100644 --- a/cicecore/cicedynB/infrastructure/ice_grid.F90 +++ b/cicecore/cicedynB/infrastructure/ice_grid.F90 @@ -1,3 +1,6 @@ +#ifdef ncdf +#define USE_NETCDF +#endif !======================================================================= ! Spatial grids, masks, and boundary conditions @@ -45,6 +48,7 @@ module ice_grid grid_file , & ! input file for POP grid info kmt_file , & ! input file for POP grid info bathymetry_file, & ! input bathymetry for basalstress + bathymetry_format, & ! bathymetry file format (default or pop) grid_spacing , & ! default of 30.e3m or set by user in namelist grid_type ! current options are rectangular (default), ! displaced_pole, tripole, regional @@ -541,11 +545,14 @@ subroutine init_grid2 ! bathymetry !----------------------------------------------------------------- -#ifdef RASM_MODS - call get_bathymetry_popfile -#else - call get_bathymetry -#endif + if (trim(bathymetry_format) == 'default') then + call get_bathymetry + elseif (trim(bathymetry_format) == 'pop') then + call get_bathymetry_popfile + else + call abort_ice(subname//'ERROR: bathymetry_format value must be default or pop', & + file=__FILE__, line=__LINE__) + endif !---------------------------------------------------------------- ! Corner coordinates for CF compliant history files @@ -713,13 +720,14 @@ end subroutine popgrid subroutine popgrid_nc -#ifdef ncdf use ice_blocks, only: nx_block, ny_block use ice_constants, only: c0, c1, & field_loc_center, field_loc_NEcorner, & field_type_scalar, field_type_angle use ice_domain_size, only: max_blocks +#ifdef USE_NETCDF use netcdf +#endif integer (kind=int_kind) :: & i, j, iblk, & @@ -752,6 +760,7 @@ subroutine popgrid_nc character(len=*), parameter :: subname = '(popgrid_nc)' +#ifdef USE_NETCDF call icepack_query_parameters(pi_out=pi) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -866,7 +875,11 @@ subroutine popgrid_nc call ice_close_nc(fid_grid) call ice_close_nc(fid_kmt) endif +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif + end subroutine popgrid_nc #ifdef CESMCOUPLED @@ -879,13 +892,14 @@ end subroutine popgrid_nc subroutine latlongrid -#ifdef ncdf ! use ice_boundary use ice_domain_size use ice_scam, only : scmlat, scmlon, single_column use ice_constants, only: c0, c1, p5, p25, & field_loc_center, field_type_scalar, radius +#ifdef USE_NETCDF use netcdf +#endif integer (kind=int_kind) :: & i, j, iblk @@ -927,6 +941,7 @@ subroutine latlongrid character(len=*), parameter :: subname = '(lonlatgrid)' +#ifdef USE_NETCDF !----------------------------------------------------------------- ! - kmt file is actually clm fractional land file ! - Determine consistency of dimensions @@ -1139,6 +1154,9 @@ subroutine latlongrid !$OMP END PARALLEL DO call makemask +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif end subroutine latlongrid @@ -2510,11 +2528,9 @@ subroutine read_basalstress_bathy character(len=*), parameter :: subname = '(read_basalstress_bathy)' if (my_task == master_task) then - write (nu_diag,*) ' ' write (nu_diag,*) 'Bathymetry file: ', trim(bathymetry_file) call icepack_warnings_flush(nu_diag) - endif call ice_open_nc(bathymetry_file,fid_init) diff --git a/cicecore/cicedynB/infrastructure/ice_read_write.F90 b/cicecore/cicedynB/infrastructure/ice_read_write.F90 index f497db49b..87d0813cc 100644 --- a/cicecore/cicedynB/infrastructure/ice_read_write.F90 +++ b/cicecore/cicedynB/infrastructure/ice_read_write.F90 @@ -1,3 +1,6 @@ +#ifdef ncdf +#define USE_NETCDF +#endif !======================================================================= ! Routines for opening, reading and writing external files @@ -15,13 +18,13 @@ module ice_read_write field_loc_noupdate, field_type_noupdate use ice_communicate, only: my_task, master_task use ice_broadcast, only: broadcast_scalar - use ice_domain, only: distrb_info + use ice_domain, only: distrb_info, orca_halogrid use ice_domain_size, only: max_blocks, nx_global, ny_global, ncat use ice_blocks, only: nx_block, ny_block, nghost use ice_exit, only: abort_ice use ice_fileunits, only: nu_diag -#ifdef ncdf +#ifdef USE_NETCDF use netcdf #endif @@ -1044,7 +1047,7 @@ subroutine ice_open_nc(filename, fid) character(len=*), parameter :: subname = '(ice_open_nc)' -#ifdef ncdf +#ifdef USE_NETCDF integer (kind=int_kind) :: & status ! status variable from netCDF routine @@ -1058,6 +1061,8 @@ subroutine ice_open_nc(filename, fid) endif ! my_task = master_task #else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined for '//trim(filename), & + file=__FILE__, line=__LINE__) fid = -999 ! to satisfy intent(out) attribute #endif end subroutine ice_open_nc @@ -1101,7 +1106,7 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & character(len=*), parameter :: subname = '(ice_read_nc_xy)' -#ifdef ncdf +#ifdef USE_NETCDF ! netCDF file diagnostics: integer (kind=int_kind) :: & varid , & ! variable id @@ -1121,18 +1126,17 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & integer (kind=int_kind) :: nx, ny -#ifdef ORCA_GRID real (kind=dbl_kind), dimension(:,:), allocatable :: & work_g2 - if (.not. present(restart_ext)) then + if (orca_halogrid .and. .not. present(restart_ext)) then if (my_task == master_task) then allocate(work_g2(nx_global+2,ny_global+1)) else allocate(work_g2(1,1)) ! to save memory endif + work_g2(:,:) = c0 endif -#endif nx = nx_global ny = ny_global @@ -1166,22 +1170,16 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & ! Read global array !-------------------------------------------------------------- -#ifndef ORCA_GRID - status = nf90_get_var( fid, varid, work_g1, & - start=(/1,1,nrec/), & - count=(/nx,ny,1/) ) -#else - if (.not. present(restart_ext)) then + if (orca_halogrid .and. .not. present(restart_ext)) then status = nf90_get_var( fid, varid, work_g2, & start=(/1,1,nrec/), & count=(/nx_global+2,ny_global+1,1/) ) work_g1 = work_g2(2:nx_global+1,1:ny_global) else status = nf90_get_var( fid, varid, work_g1, & - start=(/1,1,nrec/), & - count=(/nx,ny,1/) ) + start=(/1,1,nrec/), & + count=(/nx,ny,1/) ) endif -#endif endif ! my_task = master_task @@ -1225,11 +1223,11 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & endif deallocate(work_g1) -#ifdef ORCA_GRID - if (.not. present(restart_ext)) deallocate(work_g2) -#endif + if (orca_halogrid .and. .not. present(restart_ext)) deallocate(work_g2) #else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work = c0 ! to satisfy intent(out) attribute #endif end subroutine ice_read_nc_xy @@ -1273,7 +1271,7 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & character(len=*), parameter :: subname = '(ice_read_nc_xyz)' -#ifdef ncdf +#ifdef USE_NETCDF ! netCDF file diagnostics: integer (kind=int_kind) :: & n, & ! ncat index @@ -1294,18 +1292,17 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & integer (kind=int_kind) :: nx, ny -#ifdef ORCA_GRID real (kind=dbl_kind), dimension(:,:,:), allocatable :: & work_g2 - if (.not. present(restart_ext)) then + if (orca_halogrid .and. .not. present(restart_ext)) then if (my_task == master_task) then allocate(work_g2(nx_global+2,ny_global+1,ncat)) else allocate(work_g2(1,1,ncat)) ! to save memory endif + work_g2(:,:,:) = c0 endif -#endif nx = nx_global ny = ny_global @@ -1339,12 +1336,7 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & ! Read global array !-------------------------------------------------------------- -#ifndef ORCA_GRID - status = nf90_get_var( fid, varid, work_g1, & - start=(/1,1,1,nrec/), & - count=(/nx,ny,ncat,1/) ) -#else - if (.not. present(restart_ext)) then + if (orca_halogrid .and. .not. present(restart_ext)) then status = nf90_get_var( fid, varid, work_g2, & start=(/1,1,1,nrec/), & count=(/nx_global+2,ny_global+1,ncat,1/) ) @@ -1354,7 +1346,6 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & start=(/1,1,1,nrec/), & count=(/nx,ny,ncat,1/) ) endif -#endif endif ! my_task = master_task @@ -1407,11 +1398,11 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & endif deallocate(work_g1) -#ifdef ORCA_GRID - if (.not. present(restart_ext)) deallocate(work_g2) -#endif + if (orca_halogrid .and. .not. present(restart_ext)) deallocate(work_g2) #else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work = c0 ! to satisfy intent(out) attribute #endif end subroutine ice_read_nc_xyz @@ -1458,7 +1449,6 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & ! local variables -#ifdef ncdf ! netCDF file diagnostics: integer (kind=int_kind) :: & varid, & ! variable id @@ -1480,18 +1470,20 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & integer (kind=int_kind) :: nx, ny -#ifdef ORCA_GRID + character(len=*), parameter :: subname = '(ice_read_nc_xyf)' + +#ifdef USE_NETCDF real (kind=dbl_kind), dimension(:,:,:), allocatable :: & work_g2 - if (.not. present(restart_ext)) then + if (orca_halogrid .and. .not. present(restart_ext)) then if (my_task == master_task) then allocate(work_g2(nx_global+2,ny_global+1,nfreq)) else allocate(work_g2(1,1,nfreq)) ! to save memory endif + work_g2(:,:,:) = c0 endif -#endif nx = nx_global ny = ny_global @@ -1526,13 +1518,7 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & ! Read global array !-------------------------------------------------------------- -#ifndef ORCA_GRID - status = nf90_get_var( fid, varid, work_g1, & - start=(/1,1,1,nrec/), & - count=(/nx,ny,nfreq,1/) ) -#else - print *, 'restart_ext',restart_ext - if (.not. present(restart_ext)) then + if (orca_halogrid .and. .not. present(restart_ext)) then status = nf90_get_var( fid, varid, work_g2, & start=(/1,1,1,nrec/), & count=(/nx_global+2,ny_global+1,nfreq,1/) ) @@ -1542,8 +1528,6 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & start=(/1,1,1,nrec/), & count=(/nx,ny,nfreq,1/) ) endif - print *, 'fid',fid ,' varid',varid -#endif status = nf90_get_att(fid, varid, "missing_value", missingvalue) endif ! my_task = master_task @@ -1601,11 +1585,11 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & where (work > 1.0e+30_dbl_kind) work = c0 deallocate(work_g1) -#ifdef ORCA_GRID - if (.not. present(restart_ext)) deallocate(work_g2) -#endif + if (orca_halogrid .and. .not. present(restart_ext)) deallocate(work_g2) #else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work = c0 ! to satisfy intent(out) attribute #endif @@ -1640,7 +1624,7 @@ subroutine ice_read_nc_point(fid, nrec, varname, work, diag, & character(len=*), parameter :: subname = '(ice_read_nc_point)' -#ifdef ncdf +#ifdef USE_NETCDF ! netCDF file diagnostics: integer (kind=int_kind) :: & varid, & ! netcdf id for field @@ -1699,6 +1683,8 @@ subroutine ice_read_nc_point(fid, nrec, varname, work, diag, & work = workg(1) #else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work = c0 ! to satisfy intent(out) attribute #endif end subroutine ice_read_nc_point @@ -1731,7 +1717,7 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & ! local variables -#ifdef ncdf +#ifdef USE_NETCDF real (kind=dbl_kind), dimension(:), allocatable :: & work_z @@ -1749,7 +1735,7 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & character(len=*), parameter :: subname = '(ice_read_nc_z)' -#ifdef ncdf +#ifdef USE_NETCDF allocate(work_z(nilyr)) @@ -1795,6 +1781,8 @@ subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & deallocate(work_z) #else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work = c0 ! to satisfy intent(out) attribute #endif end subroutine ice_read_nc_z @@ -1831,7 +1819,7 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & character(len=*), parameter :: subname = '(ice_read_nc_xy)' -#ifdef ncdf +#ifdef USE_NETCDF ! netCDF file diagnostics: integer (kind=int_kind) :: & status ! status output from netcdf routines @@ -1915,7 +1903,11 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & deallocate(work_g1) +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif + end subroutine ice_write_nc_xy !======================================================================= @@ -1950,7 +1942,7 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & character(len=*), parameter :: subname = '(ice_read_nc_xyz)' -#ifdef ncdf +#ifdef USE_NETCDF ! netCDF file diagnostics: integer (kind=int_kind) :: & n, & ! ncat index @@ -2045,7 +2037,11 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & deallocate(work_g1) +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif + end subroutine ice_write_nc_xyz !======================================================================= @@ -2076,7 +2072,7 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) character(len=*), parameter :: subname = '(ice_read_global_nc)' -#ifdef ncdf +#ifdef USE_NETCDF ! netCDF file diagnostics: integer (kind=int_kind) :: & varid, & ! netcdf id for field @@ -2091,18 +2087,18 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) ! character (char_len) :: & ! dimname ! dimension name ! -#ifdef ORCA_GRID real (kind=dbl_kind), dimension(:,:), allocatable :: & work_g3 - if (my_task == master_task) then - allocate(work_g3(nx_global+2,ny_global+1)) - else - allocate(work_g3(1,1)) ! to save memory - endif + if (orca_halogrid) then + if (my_task == master_task) then + allocate(work_g3(nx_global+2,ny_global+1)) + else + allocate(work_g3(1,1)) ! to save memory + endif + work_g3(:,:) = c0 + endif - work_g3(:,:) = c0 -#endif work_g(:,:) = c0 if (my_task == master_task) then @@ -2121,16 +2117,16 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) ! Read global array !-------------------------------------------------------------- -#ifndef ORCA_GRID - status = nf90_get_var( fid, varid, work_g, & - start=(/1,1,nrec/), & - count=(/nx_global,ny_global,1/) ) -#else - status = nf90_get_var( fid, varid, work_g3, & - start=(/1,1,nrec/), & - count=(/nx_global+2,ny_global+1,1/) ) - work_g=work_g3(2:nx_global+1,1:ny_global) -#endif + if (orca_halogrid) then + status = nf90_get_var( fid, varid, work_g3, & + start=(/1,1,nrec/), & + count=(/nx_global+2,ny_global+1,1/) ) + work_g=work_g3(2:nx_global+1,1:ny_global) + else + status = nf90_get_var( fid, varid, work_g, & + start=(/1,1,nrec/), & + count=(/nx_global,ny_global,1/) ) + endif endif ! my_task = master_task !------------------------------------------------------------------- @@ -2153,13 +2149,14 @@ subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) write(nu_diag,*) 'min, max, sum = ', amin, amax, asum, trim(varname) endif -#ifdef ORCA_GRID - deallocate(work_g3) -#endif + if (orca_halogrid) deallocate(work_g3) #else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work_g = c0 ! to satisfy intent(out) attribute #endif + end subroutine ice_read_global_nc !======================================================================= @@ -2176,13 +2173,16 @@ subroutine ice_close_nc(fid) character(len=*), parameter :: subname = '(ice_close_nc)' -#ifdef ncdf +#ifdef USE_NETCDF integer (kind=int_kind) :: & status ! status variable from netCDF routine if (my_task == master_task) then status = nf90_close(fid) endif +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif end subroutine ice_close_nc @@ -2227,7 +2227,7 @@ subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & character(len=*), parameter :: subname = '(ice_read_nc_uv)' -#ifdef ncdf +#ifdef USE_NETCDF ! netCDF file diagnostics: integer (kind=int_kind) :: & varid , & ! variable id @@ -2318,8 +2318,11 @@ subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & deallocate(work_g1) #else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work = c0 ! to satisfy intent(out) attribute #endif + end subroutine ice_read_nc_uv !======================================================================= @@ -2350,7 +2353,7 @@ subroutine ice_read_vec_nc (fid, nrec, varname, work_g, diag) character(len=*), parameter :: subname = '(ice_read_vec_nc)' -#ifdef ncdf +#ifdef USE_NETCDF ! netCDF file diagnostics: integer (kind=int_kind) :: & varid, & ! netcdf id for field @@ -2393,9 +2396,11 @@ subroutine ice_read_vec_nc (fid, nrec, varname, work_g, diag) endif #else - write(*,*) 'ERROR: ncdf not defined during compilation' + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) work_g = c0 ! to satisfy intent(out) attribute #endif + end subroutine ice_read_vec_nc !======================================================================= @@ -2411,7 +2416,7 @@ subroutine ice_get_ncvarsize(fid,varname,recsize) ! local variables -#ifdef ncdf +#ifdef USE_NETCDF integer (kind=int_kind) :: & ndims, i, status character (char_len) :: & @@ -2419,7 +2424,7 @@ subroutine ice_get_ncvarsize(fid,varname,recsize) #endif character(len=*), parameter :: subname = '(ice_get_ncvarsize)' -#ifdef ncdf +#ifdef USE_NETCDF if (my_task == master_task) then status=nf90_inquire(fid, nDimensions = nDims) if (status /= nf90_noerr) then @@ -2437,9 +2442,11 @@ subroutine ice_get_ncvarsize(fid,varname,recsize) endif endif #else - write(*,*) 'ERROR: ncdf not defined during compilation' + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) recsize = 0 ! to satisfy intent(out) attribute #endif + end subroutine ice_get_ncvarsize !======================================================================= diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 index 5b6aa0dd8..b3024302e 100644 --- a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 @@ -1,3 +1,6 @@ +#ifdef ncdf +#define USE_NETCDF +#endif !======================================================================= ! ! Writes history in netCDF format @@ -41,7 +44,6 @@ module ice_history_write subroutine ice_write_hist (ns) use ice_kinds_mod -#ifdef ncdf use ice_arrays_column, only: hin_max, floe_rad_c use ice_blocks, only: nx_block, ny_block use ice_broadcast, only: broadcast_scalar @@ -56,6 +58,7 @@ subroutine ice_write_hist (ns) lont_bounds, latt_bounds, lonu_bounds, latu_bounds use ice_history_shared use ice_restart_shared, only: runid, lcdf64 +#ifdef USE_NETCDF use netcdf #endif @@ -63,7 +66,6 @@ subroutine ice_write_hist (ns) ! local variables -#ifdef ncdf real (kind=dbl_kind), dimension(:,:), allocatable :: work_g1 real (kind=real_kind), dimension(:,:), allocatable :: work_gr real (kind=real_kind), dimension(:,:,:), allocatable :: work_gr3 @@ -120,6 +122,7 @@ subroutine ice_write_hist (ns) character(len=*), parameter :: subname = '(ice_write_hist)' +#ifdef USE_NETCDF call icepack_query_parameters(secday_out=secday, rad_to_deg_out=rad_to_deg) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -1571,6 +1574,10 @@ subroutine ice_write_hist (ns) write(nu_diag,*) ' ' write(nu_diag,*) 'Finished writing ',trim(ncfile(ns)) endif + +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) #endif end subroutine ice_write_hist diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 index 8bb09398e..53c7dac60 100644 --- a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 @@ -1,3 +1,6 @@ +#ifdef ncdf +#define USE_NETCDF +#endif !======================================================================= ! Read and write ice model restart files using netCDF or binary @@ -8,7 +11,9 @@ module ice_restart use ice_broadcast use ice_kinds_mod +#ifdef USE_NETCDF use netcdf +#endif use ice_restart_shared, only: & restart_ext, restart_dir, restart_file, pointer_file, & runid, use_restart_time, lcdf64, lenstr, restart_coszen @@ -52,6 +57,7 @@ subroutine init_restart_read(ice_ic) character(len=*), parameter :: subname = '(init_restart_read)' +#ifdef USE_NETCDF if (present(ice_ic)) then filename = trim(ice_ic) else @@ -97,6 +103,10 @@ subroutine init_restart_read(ice_ic) if (trim(runid) == 'bering') then npt = npt - istep0 endif +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined for '//trim(ice_ic), & + file=__FILE__, line=__LINE__) +#endif end subroutine init_restart_read @@ -153,6 +163,7 @@ subroutine init_restart_write(filename_spec) character(len=*), parameter :: subname = '(init_restart_write)' +#ifdef USE_NETCDF call icepack_query_parameters( & solve_zsal_out=solve_zsal, skl_bgc_out=skl_bgc, z_tracers_out=z_tracers) call icepack_query_tracer_sizes( & @@ -619,6 +630,11 @@ subroutine init_restart_write(filename_spec) write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif ! master_task +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined for '//trim(filename_spec), & + file=__FILE__, line=__LINE__) +#endif + end subroutine init_restart_write !======================================================================= @@ -661,6 +677,7 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3, & character(len=*), parameter :: subname = '(read_restart_field)' +#ifdef USE_NETCDF if (present(field_loc)) then if (ndim3 == ncat) then if (restart_ext) then @@ -699,6 +716,11 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3, & endif endif +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) +#endif + end subroutine read_restart_field !======================================================================= @@ -740,6 +762,7 @@ subroutine write_restart_field(nu,nrec,work,atype,vname,ndim3,diag) character(len=*), parameter :: subname = '(write_restart_field)' +#ifdef USE_NETCDF status = nf90_inq_varid(ncid,trim(vname),varid) if (ndim3 == ncat) then if (restart_ext) then @@ -758,6 +781,11 @@ subroutine write_restart_field(nu,nrec,work,atype,vname,ndim3,diag) write(nu_diag,*) 'ndim3 not supported',ndim3 endif +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) +#endif + end subroutine write_restart_field !======================================================================= @@ -774,11 +802,17 @@ subroutine final_restart() character(len=*), parameter :: subname = '(final_restart)' +#ifdef USE_NETCDF status = nf90_close(ncid) if (my_task == master_task) & write(nu_diag,*) 'Restart read/written ',istep1,time,time_forc +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) +#endif + end subroutine final_restart !======================================================================= @@ -799,7 +833,12 @@ subroutine define_rest_field(ncid, vname, dims) character(len=*), parameter :: subname = '(define_rest_field)' +#ifdef USE_NETCDF status = nf90_def_var(ncid,trim(vname),nf90_double,dims,varid) +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) +#endif end subroutine define_rest_field diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 index d030b439b..7e16f2591 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 @@ -39,7 +39,6 @@ module ice_history_write subroutine ice_write_hist (ns) -#ifdef ncdf use ice_blocks, only: nx_block, ny_block use ice_broadcast, only: broadcast_scalar use ice_calendar, only: time, sec, idate, idate0, write_ic, & @@ -55,8 +54,6 @@ subroutine ice_write_hist (ns) use ice_history_shared use ice_arrays_column, only: hin_max, floe_rad_c use ice_restart_shared, only: runid, lcdf64 - use netcdf -#endif use ice_pio use pio @@ -64,7 +61,6 @@ subroutine ice_write_hist (ns) ! local variables -#ifdef ncdf integer (kind=int_kind) :: i,j,k,ic,n,nn, & ncid,status,imtid,jmtid,kmtidi,kmtids,kmtidb, cmtid,timid, & length,nvertexid,ivertex,kmtida,fmtid @@ -1300,8 +1296,6 @@ subroutine ice_write_hist (ns) write(nu_diag,*) 'Finished writing ',trim(ncfile(ns)) endif -#endif - end subroutine ice_write_hist !======================================================================= diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 index b11dcf0d0..eb703abcd 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 @@ -662,7 +662,7 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3,diag, & use ice_global_reductions, only: global_minval, global_maxval, global_sum integer (kind=int_kind), intent(in) :: & - nu , & ! unit number (not used for netcdf) + nu , & ! unit number ndim3 , & ! third dimension nrec ! record number (0 for sequential access) diff --git a/cicecore/drivers/direct/hadgem3/CICE.F90 b/cicecore/drivers/direct/hadgem3/CICE.F90 index b38c1aa29..72bf1b747 100644 --- a/cicecore/drivers/direct/hadgem3/CICE.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE.F90 @@ -17,7 +17,6 @@ ! https://github.com/CICE-Consortium ! !======================================================================= -#ifndef popcice ! ! Main driver routine for CICE. Initializes and steps through the model. ! This program should be compiled if CICE is run as a separate executable, @@ -56,7 +55,6 @@ program icemodel end program icemodel -#endif !======================================================================= ! ! Wrapper for the print_state debugging routine. diff --git a/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 index b208bcbef..ab528cee7 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 @@ -87,9 +87,6 @@ subroutine cice_init use ice_restoring, only: ice_HaloRestore_init use ice_timers, only: timer_total, init_ice_timers, ice_timer_start use ice_transport_driver, only: init_transport -#ifdef popcice - use drv_forcing, only: sst_sss -#endif logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers, & tr_fsd, wave_spec @@ -131,9 +128,6 @@ subroutine cice_init endif call init_coupler_flux ! initialize fluxes exchanged with coupler -#ifdef popcice - call sst_sss ! POP data for CICE initialization -#endif call init_thermo_vertical ! initialize vertical thermodynamics call icepack_init_itd(ncat=ncat, hin_max=hin_max) ! ice thickness distribution diff --git a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 index b72745e30..28419a238 100644 --- a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 @@ -87,9 +87,6 @@ subroutine cice_init(mpicom_ice) use ice_restoring, only: ice_HaloRestore_init use ice_timers, only: timer_total, init_ice_timers, ice_timer_start use ice_transport_driver, only: init_transport -#ifdef popcice - use drv_forcing, only: sst_sss -#endif integer (kind=int_kind), optional, intent(in) :: & mpicom_ice ! communicator for sequential ccsm @@ -134,9 +131,6 @@ subroutine cice_init(mpicom_ice) call init_coupler_flux ! initialize fluxes exchanged with coupler -#ifdef popcice - call sst_sss ! POP data for CICE initialization -#endif call init_thermo_vertical ! initialize vertical thermodynamics call icepack_init_itd(ncat=ncat, hin_max=hin_max) ! ice thickness distribution diff --git a/cicecore/drivers/nuopc/dmi/CICE.F90 b/cicecore/drivers/nuopc/dmi/CICE.F90 index 56dffc6b7..ec1963d38 100644 --- a/cicecore/drivers/nuopc/dmi/CICE.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE.F90 @@ -17,7 +17,6 @@ ! https://github.com/CICE-Consortium ! !======================================================================= -#ifndef popcice ! ! Main driver routine for CICE. Initializes and steps through the model. ! This program should be compiled if CICE is run as a separate executable, @@ -57,7 +56,6 @@ program icemodel end program icemodel -#endif !======================================================================= ! ! Wrapper for the print_state debugging routine. diff --git a/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 b/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 index adafb3d36..4e236bb11 100644 --- a/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 @@ -92,9 +92,6 @@ subroutine cice_init(mpi_comm) use ice_restoring, only: ice_HaloRestore_init use ice_timers, only: timer_total, init_ice_timers, ice_timer_start use ice_transport_driver, only: init_transport -#ifdef popcice - use drv_forcing, only: sst_sss -#endif integer (kind=int_kind), optional, intent(in) :: & mpi_comm ! communicator for sequential ccsm @@ -146,9 +143,6 @@ subroutine cice_init(mpi_comm) call init_coupler_flux ! initialize fluxes exchanged with coupler -#ifdef popcice - call sst_sss ! POP data for CICE initialization -#endif call init_thermo_vertical ! initialize vertical thermodynamics call icepack_init_itd(ncat=ncat, hin_max=hin_max) ! ice thickness distribution diff --git a/cicecore/drivers/standalone/cice/CICE.F90 b/cicecore/drivers/standalone/cice/CICE.F90 index 56dffc6b7..ec1963d38 100644 --- a/cicecore/drivers/standalone/cice/CICE.F90 +++ b/cicecore/drivers/standalone/cice/CICE.F90 @@ -17,7 +17,6 @@ ! https://github.com/CICE-Consortium ! !======================================================================= -#ifndef popcice ! ! Main driver routine for CICE. Initializes and steps through the model. ! This program should be compiled if CICE is run as a separate executable, @@ -57,7 +56,6 @@ program icemodel end program icemodel -#endif !======================================================================= ! ! Wrapper for the print_state debugging routine. diff --git a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 index 59bbca31c..9dbd2713d 100644 --- a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 @@ -87,9 +87,6 @@ subroutine cice_init use ice_restoring, only: ice_HaloRestore_init use ice_timers, only: timer_total, init_ice_timers, ice_timer_start use ice_transport_driver, only: init_transport -#ifdef popcice - use drv_forcing, only: sst_sss -#endif logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers, & tr_iso, tr_fsd, wave_spec @@ -134,9 +131,6 @@ subroutine cice_init call init_coupler_flux ! initialize fluxes exchanged with coupler -#ifdef popcice - call sst_sss ! POP data for CICE initialization -#endif call init_thermo_vertical ! initialize vertical thermodynamics call icepack_init_itd(ncat=ncat, hin_max=hin_max) ! ice thickness distribution diff --git a/configuration/scripts/cice.build b/configuration/scripts/cice.build index b51484201..b9aed44fe 100755 --- a/configuration/scripts/cice.build +++ b/configuration/scripts/cice.build @@ -117,10 +117,10 @@ cd ${ICE_OBJDIR} if (${ICE_IOTYPE} == 'netcdf') then set IODIR = io_netcdf - setenv ICE_CPPDEFS "${ICE_CPPDEFS} -Dncdf" + setenv ICE_CPPDEFS "${ICE_CPPDEFS} -DUSE_NETCDF" else if (${ICE_IOTYPE} =~ pio*) then set IODIR = io_pio2 - setenv ICE_CPPDEFS "${ICE_CPPDEFS} -Dncdf" + setenv ICE_CPPDEFS "${ICE_CPPDEFS} -DUSE_NETCDF" else set IODIR = io_binary endif diff --git a/configuration/scripts/forapps/ufs/comp_ice.backend.libcice b/configuration/scripts/forapps/ufs/comp_ice.backend.libcice index ca718548a..ecd7494c0 100755 --- a/configuration/scripts/forapps/ufs/comp_ice.backend.libcice +++ b/configuration/scripts/forapps/ufs/comp_ice.backend.libcice @@ -57,10 +57,10 @@ if !($?IO_TYPE) then endif if ($IO_TYPE == 'netcdf3' || $IO_TYPE == 'netcdf4') then setenv IODIR io_netcdf - setenv ICE_CPPDEFS "${ICE_CPPDEFS} -Dncdf" + setenv ICE_CPPDEFS "${ICE_CPPDEFS} -DUSE_NETCDF" else if ($IO_TYPE == 'pio') then setenv IODIR io_pio - setenv ICE_CPPDEFS "${ICE_CPPDEFS} -Dncdf" + setenv ICE_CPPDEFS "${ICE_CPPDEFS} -DUSE_NETCDF" else setenv IODIR io_binary endif diff --git a/configuration/scripts/ice_in b/configuration/scripts/ice_in index feb08eb09..45a37a464 100644 --- a/configuration/scripts/ice_in +++ b/configuration/scripts/ice_in @@ -52,6 +52,7 @@ grid_file = 'grid' kmt_file = 'kmt' bathymetry_file = 'unknown_bathymetry_file' + bathymetry_format = 'default' use_bathymetry = .false. gridcpl_file = 'unknown_gridcpl_file' kcatbound = 0 @@ -63,6 +64,7 @@ nilyr = 7 nslyr = 1 nblyr = 7 + orca_halogrid = .false. / &tracer_nml @@ -217,6 +219,7 @@ maskhalo_dyn = .false. maskhalo_remap = .false. maskhalo_bound = .false. + add_mpi_barriers = .false. / &zbgc_nml diff --git a/doc/source/cice_index.rst b/doc/source/cice_index.rst index 2bbf4e231..229fa92d5 100644 --- a/doc/source/cice_index.rst +++ b/doc/source/cice_index.rst @@ -29,6 +29,7 @@ either Celsius or Kelvin units). "a4Df", "history field accumulations, 4D categories, fsd", "" "a_min", "minimum area concentration for computing velocity", "0.001" "a_rapid_mode", ":math:`{\bullet}` brine channel diameter", "" + "add_mpi_barriers", ":math:`\bullet` turns on MPI barriers for communication throttling", "" "advection", ":math:`\bullet` type of advection algorithm used (‘remap’ or ‘upwind’)", "remap" "afsd(n)", "floe size distribution (in category n)", "" "ahmax", ":math:`\bullet` thickness above which ice albedo is constant", "0.3m" diff --git a/doc/source/user_guide/ug_case_settings.rst b/doc/source/user_guide/ug_case_settings.rst index 619acce44..29b3ca15a 100644 --- a/doc/source/user_guide/ug_case_settings.rst +++ b/doc/source/user_guide/ug_case_settings.rst @@ -8,7 +8,51 @@ Case Settings There are two important files that define the case, **cice.settings** and **ice_in**. **cice.settings** is a list of env variables that define many values used to setup, build and run the case. **ice_in** is the input namelist file -for CICE. Variables in both files are described below. +for CICE. Variables in both files are described below. In addition, the first +table lists available preprocessor macros to activate or deactivate various +features when compiling. + +.. _tabcpps: + +Table of C Preprocessor (CPP) Macros +--------------------------------------------------- + +The CICE model supports a number of C Preprocessor (CPP) Macros. These +can be turned on during compilation to activate different pieces of source +code. The main purpose is to introduce build-time code modifications to +include or exclude certain libraries or Fortran language features. More information +can be found in :ref:`cicecpps`. The following CPPs are available. + +.. csv-table:: **CPP Macros** + :header: "CPP name", "description" + :widths: 15, 60 + + "","" + "**General Macros**", "" + "CESM1_PIO", "Provide backwards compatible support for PIO interfaces/version released with CESM1 in about 2010" + "ESMF_INTERFACE", "Turns on ESMF support in a subset of driver code. Also USE_ESMF_LIB and USE_ESMF_METADATA" + "FORTRANUNDERSCORE", "Used in ice_shr_reprosum86.c to support Fortran-C interfaces. This should generally be turned on at all times. There are other CPPs (FORTRANDOUBULEUNDERSCORE, FORTRANCAPS, etc) in ice_shr_reprosum.c that are generally not used in CICE but could be useful if problems arise in the Fortran-C interfaces" + "GPTL", "Turns on GPTL initialization if needed for PIO" + "key_oasis3", "Leverages Oasis CPPs to define the local MPI communicator" + "key_oasis3mct", "Leverages Oasis CPPs to define the local MPI communicator" + "key_oasis4", "Leverages Oasis CPPs to define the local MPI communicator" + "key_iomput", "Leverages Oasis CPPs to define the local MPI communicator" + "NO_F2003", "Turns off some Fortran 2003 features" + "NO_I8", "Converts integer*8 to integer*4. This could have adverse affects for certain algorithms including the ddpdd implementation associated with the ``bfbflag``" + "NO_R16", "Converts real*16 to real*8. This could have adverse affects for certain algorithms including the lsum16 implementation associated with the ``bfbflag``" + "USE_NETCDF", "Turns on netcdf code. This is normally on and is needed for released configurations. An older value, ncdf, is still supported" + "","" + "**Application Macros**", "" + "CESMCOUPLED", "Turns on code changes for the CESM coupled application " + "CICE_IN_NEMO", "Turns on code changes for coupling in the NEMO ocean model" + "CICE_DMI", "Turns on code changes for the DMI coupled model application" + "ICE_DA", "Turns on code changes in the hadgem driver" + "RASM_MODS", "Turns on code changes for the RASM coupled application" + "","" + "**Library Macros**", "" + "_OPENMP", "Automatically defined when compiling with OpenMP " + "_OPENACC", "Automatically defined when compiling with OpenACC " + .. _tabsettings: @@ -37,7 +81,7 @@ can be modified as needed. "ICE_RSTDIR", "string", "unused", "${ICE_RUNDIR}/restart" "ICE_HSTDIR", "string", "unused", "${ICE_RUNDIR}/history" "ICE_LOGDIR", "string", "log directory", "${ICE_CASEDIR}/logs" - "ICE_DRVOPT", "string", "unused", "cice" + "ICE_DRVOPT", "string", "unused", "standalone/cice" "ICE_IOTYPE", "string", "I/O format", "set by cice.setup" " ", "netcdf", "serial netCDF" " ", "pio", "parallel netCDF" @@ -170,6 +214,8 @@ grid_nml "", "", "", "" "``bathymetry_file``", "string", "name of bathymetry file to be read", "‘unknown_bathymetry_file’" + "``bathymetry_format``", "``default``", "NetCDF depth field", "‘default’" + "", "``pop``", "pop thickness file in cm in ascii format", "" "``close_boundaries``", "logical", "set land on edges of grid", "``.false.``" "``dxrect``", "real", "x-direction grid spacing for rectangular grid in cm", "0.0" "``dyrect``", "real", "y-direction grid spacing for rectangular grid in cm", "0.0" @@ -192,6 +238,7 @@ grid_nml "``nfsd``", "integer", "number of floe size categories", "1" "``nilyr``", "integer", "number of vertical layers in ice", "0" "``nslyr``", "integer", "number of vertical layers in snow", "0" + "``orca_halogrid``", "logical", "use orca haloed grid for data/grid read", "``.false.``" "``use_bathymetry``", "logical", "use read in bathymetry file for basalstress option", "``.false.``" "", "", "", "" @@ -203,6 +250,7 @@ domain_nml :widths: 15, 15, 30, 15 "", "", "", "" + "``add_mpi_barriers``", "logical", "throttle communication", "``.false.``" "``block_size_x``", "integer", "block size in x direction", "-1" "``block_size_y``", "integer", "block size in y direction", "-1" "``distribution_type``", "``cartesian``", "2D cartesian block distribution method", "``cartesian``" @@ -651,5 +699,3 @@ icefields_nml "", "``md``", "*e.g.,* write both monthly and daily files", "" "", "", "", "" - - diff --git a/doc/source/user_guide/ug_implementation.rst b/doc/source/user_guide/ug_implementation.rst index 44d4ef1d6..cbfe37b0c 100644 --- a/doc/source/user_guide/ug_implementation.rst +++ b/doc/source/user_guide/ug_implementation.rst @@ -181,6 +181,12 @@ that prints out the variable ``blkmask`` to the history file and which labels the blocks in the grid decomposition according to ``blkmask = my_task + iblk/100``. +The namelist ``add_mpi_barriers`` can be set to ``.true.`` to help +throttle communication for communication intensive configurations. This +may slow the code down a bit. These barriers have been added to +a few select locations, but it's possible others may be needed. As a general +rule, ``add_mpi_barriers`` should be ``.false.``. + ************* Tripole grids ************* diff --git a/doc/source/user_guide/ug_running.rst b/doc/source/user_guide/ug_running.rst index 8befee9cb..c4971e235 100644 --- a/doc/source/user_guide/ug_running.rst +++ b/doc/source/user_guide/ug_running.rst @@ -14,9 +14,10 @@ Software Requirements To run stand-alone, CICE requires +- bash and csh - gmake (GNU Make) - Fortran and C compilers (Intel, PGI, GNU, Cray, and NAG have been tested) -- NetCDF +- NetCDF (this is actually optional but required to test out of the box configurations) - MPI (this is actually optional but without it you can only run on 1 processor) Below are lists of software versions that the Consortium has tested at some point. There is no @@ -350,6 +351,25 @@ automatically clean the prior build. If incremental builds are desired to save time during development, the ``ICE_CLEANBUILD`` setting in **cice.settings** should be modified. +.. _cicecpps: + +C Preprocessor (CPP) Macros +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +There are a number of C Preprocessing Macros supported in the CICE model. These +allow certain coding features like NetCDF, MPI, or specific Fortran features to be +excluded or included during the compile. + +The CPPs are defined by the `CPPDEFS` variable in the Makefile. They are defined +by passing the -D[CPP] to the C and Fortran compilers (ie. -DUSE_NETCDF) and this +is what needs to be set in the `CPPDEFS` variable. The value of `ICE_CPPDEFS` in +**cice.settings** is copied into the Makefile `CPPDEFS` variable as are settings +hardwired into the **Macros.[machine]_[environment]** file. + +In general, ``-DFORTRANUNDERSCORE`` should always be set to support the Fortran/C +interfaces in **ice_shr_reprosum.c**. In addition, if NetCDF is used, ``-DUSE_NETCDF`` +should also be defined. A list of available CPPs can be found in +:ref:`tabcpps`. .. _porting: From 06b3be50c60880d117411b9160b4c4d2839d39ea Mon Sep 17 00:00:00 2001 From: Tony Craig Date: Fri, 31 Jul 2020 09:12:54 -0700 Subject: [PATCH 08/13] Remove "coupled" CPP flag, add ssh_stress namelist (#496) * Remove "coupled" CPP flag and replace as needed Remove "fake MPI" capability in serial/ice_communicate.F90 Add ssh_stress variable/namelist with options 'geostrophic' or 'slope' to specify the computation of the sea surface height stress term. Add ssh_stress to ice_in, set to 'geostrophic'. Add ssh_stress to namelist documentation Change "coupled" to CESMCOUPLED in ice_init on check for oceanmixed_ice setting Clean up drivers remove "coupled" and CESMCOUPLED from standalone/cice as if neither were set remove "coupled" macro end_run call in nuopc/cmeps, CESMCOUPLED left mostly alone remove "coupled" and CESMCOUPLED from mct/cesm1 as if both were set remove "coupled" and CESMCOUPLED from direct/hadgem as if both were set no changes to nuopc/dmi, prefer dmi make those changes Fix error in coriolis namelist documentation where Cstar was in the wrong place NOTE: Coupled systems should leverage the new ssh_stress namelist to turn on the 'slope' option. The coupled CPP flag is no longer in use. * rename ssh_stress option from slope to coupled and update documentation * update documentation * update warning message for oceanmixed_ice --- cicecore/cicedynB/dynamics/ice_dyn_shared.F90 | 39 +++++----- cicecore/cicedynB/general/ice_init.F90 | 29 +++---- .../comm/serial/ice_communicate.F90 | 77 +------------------ .../drivers/direct/hadgem3/CICE_FinalMod.F90 | 5 +- .../drivers/direct/hadgem3/CICE_InitMod.F90 | 24 +++--- .../drivers/direct/hadgem3/CICE_RunMod.F90 | 20 +++-- cicecore/drivers/mct/cesm1/CICE_FinalMod.F90 | 5 +- cicecore/drivers/mct/cesm1/CICE_InitMod.F90 | 26 +++---- cicecore/drivers/mct/cesm1/CICE_RunMod.F90 | 23 +++--- .../drivers/nuopc/cmeps/CICE_FinalMod.F90 | 5 +- .../drivers/standalone/cice/CICE_FinalMod.F90 | 2 - .../drivers/standalone/cice/CICE_InitMod.F90 | 4 - .../drivers/standalone/cice/CICE_RunMod.F90 | 4 - .../standalone/cice/CICE_RunMod.F90_debug | 4 - configuration/scripts/ice_in | 1 + doc/source/user_guide/ug_case_settings.rst | 4 +- 16 files changed, 90 insertions(+), 182 deletions(-) diff --git a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 index df50dd99e..c3dc83a24 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 +++ b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 @@ -35,10 +35,11 @@ module ice_dyn_shared ndte ! number of subcycles: ndte=dt/dte character (len=char_len), public :: & - coriolis ! 'constant', 'zero', or 'latitude' + coriolis , & ! 'constant', 'zero', or 'latitude' + ssh_stress ! 'geostrophic' or 'coupled' logical (kind=log_kind), public :: & - revised_evp ! if true, use revised evp procedure + revised_evp ! if true, use revised evp procedure integer (kind=int_kind), public :: & kevp_kernel ! 0 = 2D org version @@ -475,9 +476,7 @@ subroutine dyn_prep2 (nx_block, ny_block, & integer (kind=int_kind) :: & i, j, ij -#ifdef coupled real (kind=dbl_kind) :: gravit -#endif logical (kind=log_kind), dimension(nx_block,ny_block) :: & iceumask_old ! old-time iceumask @@ -577,12 +576,12 @@ subroutine dyn_prep2 (nx_block, ny_block, & ! Define variables for momentum equation !----------------------------------------------------------------- -#ifdef coupled - call icepack_query_parameters(gravit_out=gravit) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) -#endif + if (trim(ssh_stress) == 'coupled') then + call icepack_query_parameters(gravit_out=gravit) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + endif do ij = 1, icellu i = indxui(ij) @@ -597,14 +596,18 @@ subroutine dyn_prep2 (nx_block, ny_block, & watery(i,j) = vocn(i,j)*cosw + uocn(i,j)*sinw*sign(c1,fm(i,j)) ! combine tilt with wind stress -#ifndef coupled - ! calculate tilt from geostrophic currents if needed - strtltx(i,j) = -fm(i,j)*vocn(i,j) - strtlty(i,j) = fm(i,j)*uocn(i,j) -#else - strtltx(i,j) = -gravit*umass(i,j)*ss_tltx(i,j) - strtlty(i,j) = -gravit*umass(i,j)*ss_tlty(i,j) -#endif + if (trim(ssh_stress) == 'geostrophic') then + ! calculate tilt from geostrophic currents if needed + strtltx(i,j) = -fm(i,j)*vocn(i,j) + strtlty(i,j) = fm(i,j)*uocn(i,j) + elseif (trim(ssh_stress) == 'coupled') then + strtltx(i,j) = -gravit*umass(i,j)*ss_tltx(i,j) + strtlty(i,j) = -gravit*umass(i,j)*ss_tlty(i,j) + else + call abort_ice(subname//' ERROR: unknown ssh_stress='//trim(ssh_stress), & + file=__FILE__, line=__LINE__) + endif + forcex(i,j) = strairx(i,j) + strtltx(i,j) forcey(i,j) = strairy(i,j) + strtlty(i,j) enddo diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index 19c729826..27008670a 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -98,7 +98,7 @@ subroutine input_data use ice_dyn_shared, only: ndte, kdyn, revised_evp, yield_curve, & kevp_kernel, & basalstress, k1, k2, alphab, threshold_hw, & - Ktens, e_ratio, coriolis, & + Ktens, e_ratio, coriolis, ssh_stress, & kridge, ktransport, brlx, arlx use ice_transport_driver, only: advection, conserv_check use ice_restoring, only: restore_ice @@ -187,7 +187,7 @@ subroutine input_data namelist /dynamics_nml/ & kdyn, ndte, revised_evp, yield_curve, & kevp_kernel, & - brlx, arlx, & + brlx, arlx, ssh_stress, & advection, coriolis, kridge, ktransport, & kstrength, krdg_partic, krdg_redist, mu_rdg, & e_ratio, Ktens, Cf, basalstress, & @@ -327,6 +327,7 @@ subroutine input_data ktherm = 1 ! -1 = OFF, 0 = 0-layer, 1 = BL99, 2 = mushy thermo conduct = 'bubbly' ! 'MU71' or 'bubbly' (Pringle et al 2007) coriolis = 'latitude' ! latitude dependent, or 'constant' + ssh_stress = 'geostrophic' ! 'geostrophic' or 'coupled' kridge = 1 ! -1 = off, 1 = on ktransport = 1 ! -1 = off, 1 = on calc_Tsfc = .true. ! calculate surface temperature @@ -616,6 +617,7 @@ subroutine input_data call broadcast_scalar(albedo_type, master_task) call broadcast_scalar(ktherm, master_task) call broadcast_scalar(coriolis, master_task) + call broadcast_scalar(ssh_stress, master_task) call broadcast_scalar(kridge, master_task) call broadcast_scalar(ktransport, master_task) call broadcast_scalar(conduct, master_task) @@ -1148,6 +1150,13 @@ subroutine input_data endif write(nu_diag,*) 'coriolis = ',trim(coriolis),trim(tmpstr2) + if (trim(ssh_stress) == 'geostrophic') then + tmpstr2 = ': from ocean velocity' + elseif (trim(ssh_stress) == 'coupled') then + tmpstr2 = ': from coupled sea surface height gradients' + endif + write(nu_diag,*) 'ssh_stress = ',trim(ssh_stress),trim(tmpstr2) + if (ktransport == 1) then tmpstr2 = ' transport enabled' if (trim(advection) == 'remap') then @@ -1305,6 +1314,11 @@ subroutine input_data tmpstr2 = ' ocean mixed layer calculation (SST) disabled' endif write(nu_diag,1012) ' oceanmixed_ice = ', oceanmixed_ice,trim(tmpstr2) + if (oceanmixed_ice) then + write(nu_diag,*) ' WARNING: ocean mixed layer ON' + write(nu_diag,*) ' WARNING: will impact ocean forcing interaction' + write(nu_diag,*) ' WARNING: coupled forcing will be modified by mixed layer routine' + endif if (trim(tfrz_option) == 'minus1p8') then tmpstr2 = ': constant ocean freezing temperature (-1.8C)' elseif (trim(tfrz_option) == 'linear_salt') then @@ -1535,17 +1549,6 @@ subroutine input_data if (restore_ice .or. restore_ocn) & write(nu_diag,1020) ' trestore = ', trestore -#ifdef coupled - if( oceanmixed_ice ) then - write(nu_diag,*) subname//' WARNING ** WARNING ** WARNING ** WARNING ' - write(nu_diag,*) subname//' WARNING: coupled CPP and oceanmixed_ice namelist are BOTH ON' - write(nu_diag,*) subname//' WARNING: Ocean data received from coupler will' - write(nu_diag,*) subname//' WARNING: be altered by mixed layer routine!' - write(nu_diag,*) subname//' WARNING ** WARNING ** WARNING ** WARNING ' - write(nu_diag,*) ' ' - endif -#endif - write(nu_diag,*) ' ' write(nu_diag,'(a30,2f8.2)') 'Diagnostic point 1: lat, lon =', & latpnt(1), lonpnt(1) diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_communicate.F90 b/cicecore/cicedynB/infrastructure/comm/serial/ice_communicate.F90 index dbcf78899..c9df264dd 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_communicate.F90 +++ b/cicecore/cicedynB/infrastructure/comm/serial/ice_communicate.F90 @@ -46,12 +46,6 @@ subroutine init_communicate ! !----------------------------------------------------------------------- -#ifdef coupled - use mpi ! MPI Fortran module - - integer (int_kind) :: ierr ! MPI error flag -#endif - character(len=*), parameter :: subname = '(init_communicate)' !----------------------------------------------------------------------- @@ -61,27 +55,9 @@ subroutine init_communicate ! !----------------------------------------------------------------------- -#ifdef coupled - call MPI_INIT(ierr) - call MPI_COMM_RANK (MPI_COMM_ICE, my_task, ierr) -#else my_task = 0 -#endif - master_task = 0 -#ifdef coupled -!----------------------------------------------------------------------- -! -! On some 64-bit machines where real_kind and dbl_kind are -! identical, the MPI implementation uses MPI_REAL for both. -! In these cases, set MPI_DBL to MPI_REAL. -! -!----------------------------------------------------------------------- - - MPI_DBL = MPI_DOUBLE_PRECISION - -#endif !----------------------------------------------------------------------- end subroutine init_communicate @@ -139,11 +115,6 @@ subroutine create_communicator(new_comm, num_procs) ! this routine should be called from init_domain1 when the ! domain configuration (e.g. nprocs_btrop) has been determined -#ifdef coupled - - use mpi ! MPI Fortran module - -#endif ! !INPUT PARAMETERS: integer (int_kind), intent(in) :: & @@ -154,54 +125,8 @@ subroutine create_communicator(new_comm, num_procs) integer (int_kind), intent(out) :: & new_comm ! new communicator for this distribution -#ifdef coupled -!----------------------------------------------------------------------- -! -! local variables -! -!----------------------------------------------------------------------- - - integer (int_kind) :: & - MPI_GROUP_ICE, &! group of processors assigned to ice - MPI_GROUP_NEW ! group of processors assigned to new dist - - integer (int_kind) :: & - ierr ! error flag for MPI comms - - integer (int_kind), dimension(3) :: & - range ! range of tasks assigned to new dist - ! (assumed 0,num_procs-1) - - character(len=*), parameter :: subname = '(create_communicator)' - -!----------------------------------------------------------------------- -! -! determine group of processes assigned to distribution -! -!----------------------------------------------------------------------- - - call MPI_COMM_GROUP (MPI_COMM_ICE, MPI_GROUP_ICE, ierr) - - range(1) = 0 - range(2) = num_procs-1 - range(3) = 1 - -!----------------------------------------------------------------------- -! -! create subroup and communicator for new distribution -! note: MPI_COMM_CREATE must be called by all procs in MPI_COMM_ICE -! -!----------------------------------------------------------------------- - - call MPI_GROUP_RANGE_INCL(MPI_GROUP_ICE, 1, range, & - MPI_GROUP_NEW, ierr) - - call MPI_COMM_CREATE (MPI_COMM_ICE, MPI_GROUP_NEW, & - new_comm, ierr) - -#else new_comm = MPI_COMM_ICE -#endif + !----------------------------------------------------------------------- end subroutine create_communicator diff --git a/cicecore/drivers/direct/hadgem3/CICE_FinalMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_FinalMod.F90 index 6b5a53abe..397950023 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_FinalMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_FinalMod.F90 @@ -58,9 +58,8 @@ subroutine CICE_Finalize ! quit MPI !------------------------------------------------------------------- -#ifndef coupled - call end_run ! quit MPI -#endif +! standalone +! call end_run ! quit MPI end subroutine CICE_Finalize diff --git a/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 index ab528cee7..dc41ff9fd 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 @@ -196,19 +196,17 @@ subroutine cice_init call init_forcing_atmo ! initialize atmospheric forcing (standalone) #endif -#ifndef coupled -#ifndef CESMCOUPLED - if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice - call get_forcing_atmo ! atmospheric forcing from data - call get_forcing_ocn(dt) ! ocean forcing from data - - ! aerosols - ! if (tr_aero) call faero_data ! data file - ! if (tr_zaero) call fzaero_data ! data file (gx1) - if (tr_aero .or. tr_zaero) call faero_default ! default values - if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry -#endif -#endif +! standalone +! if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice +! call get_forcing_atmo ! atmospheric forcing from data +! call get_forcing_ocn(dt) ! ocean forcing from data + +! ! aerosols +! ! if (tr_aero) call faero_data ! data file +! ! if (tr_zaero) call fzaero_data ! data file (gx1) +! if (tr_aero .or. tr_zaero) call faero_default ! default values +! if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry + if (z_tracers) call get_atm_bgc ! biogeochemistry if (runtype == 'initial' .and. .not. restart) & diff --git a/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 index 90af92122..e43b4a24d 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 @@ -92,21 +92,19 @@ subroutine CICE_Run call ice_timer_start(timer_couple) ! atm/ocn coupling -#ifndef coupled -#ifndef CESMCOUPLED +! standalone ! for now, wave_spectrum is constant in time ! if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice - call get_forcing_atmo ! atmospheric forcing from data - call get_forcing_ocn(dt) ! ocean forcing from data +! call get_forcing_atmo ! atmospheric forcing from data +! call get_forcing_ocn(dt) ! ocean forcing from data - ! aerosols - ! if (tr_aero) call faero_data ! data file - ! if (tr_zaero) call fzaero_data ! data file (gx1) - if (tr_aero .or. tr_zaero) call faero_default ! default values +! ! aerosols +! ! if (tr_aero) call faero_data ! data file +! ! if (tr_zaero) call fzaero_data ! data file (gx1) +! if (tr_aero .or. tr_zaero) call faero_default ! default values + +! if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry - if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry -#endif -#endif if (z_tracers) call get_atm_bgc ! biogeochemistry call init_flux_atm ! initialize atmosphere fluxes sent to coupler diff --git a/cicecore/drivers/mct/cesm1/CICE_FinalMod.F90 b/cicecore/drivers/mct/cesm1/CICE_FinalMod.F90 index c2331e4e5..943787498 100644 --- a/cicecore/drivers/mct/cesm1/CICE_FinalMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_FinalMod.F90 @@ -55,9 +55,8 @@ subroutine CICE_Finalize ! quit MPI !------------------------------------------------------------------- -#ifndef coupled - call end_run ! quit MPI -#endif +! standalone +! call end_run ! quit MPI end subroutine CICE_Finalize diff --git a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 index 28419a238..80bb2570e 100644 --- a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 @@ -200,21 +200,19 @@ subroutine cice_init(mpicom_ice) call init_forcing_atmo ! initialize atmospheric forcing (standalone) -#ifndef coupled -#ifndef CESMCOUPLED - if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice - call get_forcing_atmo ! atmospheric forcing from data - call get_forcing_ocn(dt) ! ocean forcing from data +! for standalone +! if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice +! call get_forcing_atmo ! atmospheric forcing from data +! call get_forcing_ocn(dt) ! ocean forcing from data + +! ! isotopes +! if (tr_iso) call fiso_default ! default values +! ! aerosols +! ! if (tr_aero) call faero_data ! data file +! ! if (tr_zaero) call fzaero_data ! data file (gx1) +! if (tr_aero .or. tr_zaero) call faero_default ! default values +! if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry - ! isotopes - if (tr_iso) call fiso_default ! default values - ! aerosols - ! if (tr_aero) call faero_data ! data file - ! if (tr_zaero) call fzaero_data ! data file (gx1) - if (tr_aero .or. tr_zaero) call faero_default ! default values - if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry -#endif -#endif if (z_tracers) call get_atm_bgc ! biogeochemistry if (runtype == 'initial' .and. .not. restart) & diff --git a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 index f5e7de02f..ee217712b 100644 --- a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 @@ -89,23 +89,20 @@ subroutine CICE_Run call ice_timer_start(timer_couple) ! atm/ocn coupling -#ifndef coupled -#ifndef CESMCOUPLED +! for standalone ! for now, wave_spectrum is constant in time ! if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice - call get_forcing_atmo ! atmospheric forcing from data - call get_forcing_ocn(dt) ! ocean forcing from data +! call get_forcing_atmo ! atmospheric forcing from data +! call get_forcing_ocn(dt) ! ocean forcing from data - ! isotopes - if (tr_iso) call fiso_default ! default values - ! aerosols - ! if (tr_aero) call faero_data ! data file - ! if (tr_zaero) call fzaero_data ! data file (gx1) - if (tr_aero .or. tr_zaero) call faero_default ! default values +! ! isotopes +! if (tr_iso) call fiso_default ! default values +! ! aerosols +! ! if (tr_aero) call faero_data ! data file +! ! if (tr_zaero) call fzaero_data ! data file (gx1) +! if (tr_aero .or. tr_zaero) call faero_default ! default values +! if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry - if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry -#endif -#endif if (z_tracers) call get_atm_bgc ! biogeochemistry call init_flux_atm ! Initialize atmosphere fluxes sent to coupler diff --git a/cicecore/drivers/nuopc/cmeps/CICE_FinalMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_FinalMod.F90 index c2331e4e5..943787498 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_FinalMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_FinalMod.F90 @@ -55,9 +55,8 @@ subroutine CICE_Finalize ! quit MPI !------------------------------------------------------------------- -#ifndef coupled - call end_run ! quit MPI -#endif +! standalone +! call end_run ! quit MPI end subroutine CICE_Finalize diff --git a/cicecore/drivers/standalone/cice/CICE_FinalMod.F90 b/cicecore/drivers/standalone/cice/CICE_FinalMod.F90 index 0cd1ff177..dd0ca0b20 100644 --- a/cicecore/drivers/standalone/cice/CICE_FinalMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_FinalMod.F90 @@ -65,9 +65,7 @@ subroutine CICE_Finalize ! quit MPI !------------------------------------------------------------------- -#ifndef coupled call end_run ! quit MPI -#endif end subroutine CICE_Finalize diff --git a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 index 9dbd2713d..0a8614eb2 100644 --- a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 @@ -200,8 +200,6 @@ subroutine cice_init call init_forcing_atmo ! initialize atmospheric forcing (standalone) -#ifndef coupled -#ifndef CESMCOUPLED if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice call get_forcing_atmo ! atmospheric forcing from data call get_forcing_ocn(dt) ! ocean forcing from data @@ -213,8 +211,6 @@ subroutine cice_init ! if (tr_zaero) call fzaero_data ! data file (gx1) if (tr_aero .or. tr_zaero) call faero_default ! default values if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry -#endif -#endif if (z_tracers) call get_atm_bgc ! biogeochemistry if (runtype == 'initial' .and. .not. restart) & diff --git a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 index 363749e8f..b45db2514 100644 --- a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 @@ -94,8 +94,6 @@ subroutine CICE_Run call ice_timer_start(timer_couple) ! atm/ocn coupling -#ifndef coupled -#ifndef CESMCOUPLED ! for now, wave_spectrum is constant in time ! if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice call get_forcing_atmo ! atmospheric forcing from data @@ -109,8 +107,6 @@ subroutine CICE_Run if (tr_aero .or. tr_zaero) call faero_default ! default values if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry -#endif -#endif if (z_tracers) call get_atm_bgc ! biogeochemistry call init_flux_atm ! Initialize atmosphere fluxes sent to coupler diff --git a/cicecore/drivers/standalone/cice/CICE_RunMod.F90_debug b/cicecore/drivers/standalone/cice/CICE_RunMod.F90_debug index 7ca555433..c7ae7601f 100644 --- a/cicecore/drivers/standalone/cice/CICE_RunMod.F90_debug +++ b/cicecore/drivers/standalone/cice/CICE_RunMod.F90_debug @@ -94,8 +94,6 @@ call ice_timer_start(timer_couple) ! atm/ocn coupling -#ifndef coupled -#ifndef CESMCOUPLED ! for now, wave_spectrum is constant in time ! if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice call get_forcing_atmo ! atmospheric forcing from data @@ -109,8 +107,6 @@ if (tr_aero .or. tr_zaero) call faero_default ! default values if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry -#endif -#endif if (z_tracers) call get_atm_bgc ! biogeochemistry call init_flux_atm ! Initialize atmosphere fluxes sent to coupler diff --git a/configuration/scripts/ice_in b/configuration/scripts/ice_in index 45a37a464..860d6c95b 100644 --- a/configuration/scripts/ice_in +++ b/configuration/scripts/ice_in @@ -135,6 +135,7 @@ coriolis = 'latitude' kridge = 1 ktransport = 1 + ssh_stress = 'geostrophic' / &shortwave_nml diff --git a/doc/source/user_guide/ug_case_settings.rst b/doc/source/user_guide/ug_case_settings.rst index 29b3ca15a..eb648e847 100644 --- a/doc/source/user_guide/ug_case_settings.rst +++ b/doc/source/user_guide/ug_case_settings.rst @@ -362,9 +362,9 @@ dynamics_nml "``basalstress``", "logical", "use basal stress parameterization for landfast ice", "``.false.``" "``Cf``", "real", "ratio of ridging work to PE change in ridging", "17.0" "``coriolis``", "``constant``", "constant coriolis value = 1.46e-4", "``latitude``" - "``Cstar``", "real", "constant in Hibler strength formula", "20" "", "``latitude``", "coriolis variable by latitude", "" "", "``zero``", "zero coriolis", "" + "``Cstar``", "real", "constant in Hibler strength formula", "20" "``e_ratio``", "real", "EVP ellipse aspect ratio", "2.0" "``kdyn``", "``-1``", "dynamics algorithm OFF", "1" "", "``0``", "dynamics OFF", "" @@ -389,6 +389,8 @@ dynamics_nml "``ndte``", "integer", "number of EVP subcycles", "120" "``Pstar``", "real", "constant in Hibler strength formula (N/m\ :math:`^2`)", "2.75e4" "``revised_evp``", "logical", "use revised EVP formulation", "``.false.``" + "``ssh_stress``", "``coupled``", "computed from coupled sea surface height gradient", "``geostrophic``" + "", "``geostropic``", "computed from ocean velocity", "" "``threshold_hw``", "real", "Max water depth for grounding (see :cite:`Amundrud04`)", "30." "``yield_curve``", "``ellipse``", "elliptical yield curve", "``ellipse``" "", "", "", "" From 9e8a30036855fa63850a44e0eee1733921c4e510 Mon Sep 17 00:00:00 2001 From: Tony Craig Date: Fri, 31 Jul 2020 09:26:29 -0700 Subject: [PATCH 09/13] Add support for .cice_set, git clone --depth=1, atmbndy namelist output, script warning messages for --set conflicts (#494) * Add support for .cice_set (#464) Add warning messages when multiple set options overlap (#243) Fix namelist output diagnostic for atmbndy (#493) Modify git clone in script to use --depth=1 (#492) * update doc precedence * update documentation, add info about .cice_set format Co-authored-by: Philippe Blain --- cice.setup | 19 ++++++++++++- cicecore/cicedynB/general/ice_init.F90 | 4 +-- configuration/scripts/parse_namelist.sh | 27 +++++++++++++++++- configuration/scripts/parse_settings.sh | 28 +++++++++++++++++++ configuration/scripts/tests/cice.lcov.csh | 2 +- .../scripts/tests/cice_test_codecov.csh | 4 +-- .../scripts/tests/report_results.csh | 2 +- doc/source/user_guide/ug_running.rst | 15 ++++++++-- doc/source/user_guide/ug_testing.rst | 6 ++-- 9 files changed, 94 insertions(+), 13 deletions(-) diff --git a/cice.setup b/cice.setup index 43fdd836c..3efe94827 100755 --- a/cice.setup +++ b/cice.setup @@ -40,6 +40,7 @@ set suitebuild = true set suitereuse = true set suiterun = false set suitesubmit = true +set ignoreuserset = false if ($#argv < 1) then set helpheader = 1 @@ -98,6 +99,7 @@ DESCRIPTION --acct : account number for the batch submission --grid, -g : grid, grid (default = ${grid}) --set, -s : case option setting(s), comma separated (default = " ") + --ignore-user-set: ignore ~/.cice_set if it exists --queue : queue for the batch submission For testing @@ -112,7 +114,7 @@ DESCRIPTION --diff : generate comparison against another case --report : automatically post results when tests are complete --coverage : generate and report test coverage metrics when tests are complete, - requires GNU compiler (ie. normally ``--env gnu``) + requires GNU compiler (ie. normally --env gnu) --setup-only : for suite, setup testcases, no build, no submission --setup-build : for suite, setup and build testcases, no submission --setup-build-run : for suite, setup, build, and run interactively @@ -263,6 +265,10 @@ while (1) set suitesubmit = true shift argv + else if ("$option" == "--ignore-user-set") then + set ignoreuserset = true + shift argv + # arguments with settings else shift argv @@ -412,6 +418,17 @@ set vers = ${ICE_VERSION} set shhash = `echo ${hash} | cut -c 1-10` if ( ${dosuite} == 0 ) then + # grab user defined default sets + if ("${ignoreuserset}" == "false" && -e ~/.cice_set) then + set setsu1 = `cat ~/.cice_set` + # get rid of spaces if they exist! + set setsuser = `echo ${setsu1} | sed 's/ //g'` + if ( ${sets} == "" ) then + set sets = "${setsuser}" + else + set sets = "${setsuser},${sets}" + endif + endif set teststring = "${test} ${grid} ${pesx} ${sets}" if ( $bfbcomp != ${spval} ) then if ( ${sets} == "" ) then diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index 27008670a..e9e0ade69 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -1295,12 +1295,12 @@ subroutine input_data write(nu_diag,1012) ' calc_strair = ', calc_strair,' calculate wind stress and speed' write(nu_diag,1012) ' rotate_wind = ', rotate_wind,' rotate wind/stress to computational grid' write(nu_diag,1012) ' formdrag = ', formdrag,' use form drag parameterization' - if (trim(atmbndy) == 'constant') then + if (trim(atmbndy) == 'default') then tmpstr2 = ': stability-based boundary layer' write(nu_diag,1012) ' highfreq = ', highfreq,' high-frequency atmospheric coupling' write(nu_diag,1022) ' natmiter = ', natmiter,' number of atmo boundary layer iterations' write(nu_diag,1006) ' atmiter_conv = ', atmiter_conv,' convergence criterion for ustar' - elseif (trim(atmbndy) == 'default') then + elseif (trim(atmbndy) == 'constant') then tmpstr2 = ': boundary layer uses bulk transfer coefficients' endif write(nu_diag,*) 'atmbndy = ', trim(atmbndy),trim(tmpstr2) diff --git a/configuration/scripts/parse_namelist.sh b/configuration/scripts/parse_namelist.sh index c94420f6e..ea539a2d0 100755 --- a/configuration/scripts/parse_namelist.sh +++ b/configuration/scripts/parse_namelist.sh @@ -5,12 +5,15 @@ if [[ "$#" -ne 2 ]]; then exit -1 fi +scriptname=`basename "$0"` filename=$1 filemods=$2 #echo "$0 $1 $2" echo "running parse_namelist.sh" foundstring="FoundSTRING" +vnamearray=() +valuearray=() while read -r line do @@ -24,17 +27,39 @@ do value=`echo $line | sed "s|^[[:space:]]*\([^[:space:]]*\)[[:space:]]*=[[:space:]]*\([^[:space:]]*\).*$|\2|g"` # echo "$line $vname $value" + found=${foundstring} + for i in "${!vnamearray[@]}"; do + if [[ "${found}" == "${foundstring}" ]]; then + vn=${vnamearray[$i]} + vv=${valuearray[$i]} +# echo "names/values $i ${vname} ${vn} ${value} ${vv}" + if [[ "$vname" == "$vn" ]]; then + found=$i + if [[ "$value" != "${vv}" ]]; then +# echo "names/values $i ${vname} ${vn} ${value} ${vv}" + echo "${scriptname} WARNING: re-overriding $vname from ${vv} to ${value}" + fi + fi + fi + done + #sed -i 's|\(^\s*'"$vname"'\s*\=\s*\)\(.*$\)|\1'"$value"'|g' $filename cp ${filename} ${filename}.check sed -i.sedbak -e 's|\(^[[:space:]]*'"$vname"'[[:space:]]*=[[:space:]]*\)\(.*$\)|\1'"$foundstring"'|g' ${filename}.check grep -q ${foundstring} ${filename}.check if [ $? -eq 0 ]; then sed -i.sedbak -e 's|\(^[[:space:]]*'"$vname"'[[:space:]]*=[[:space:]]*\)\(.*$\)|\1'"$value"'|g' ${filename} + if [[ "${found}" == "${foundstring}" ]]; then + vnamearray+=($vname) + valuearray+=($value) + else + valuearray[$found]=${value} + fi if [[ -e "${filename}.sedbak" ]]; then rm ${filename}.sedbak fi else - echo "$0 ERROR: parsing error for ${vname}" + echo "${scriptname} ERROR: parsing error for ${vname}" exit -99 fi rm ${filename}.check ${filename}.check.sedbak diff --git a/configuration/scripts/parse_settings.sh b/configuration/scripts/parse_settings.sh index f797dbebe..d6ed31c15 100755 --- a/configuration/scripts/parse_settings.sh +++ b/configuration/scripts/parse_settings.sh @@ -5,11 +5,15 @@ if [[ "$#" -ne 2 ]]; then exit -1 fi +scriptname=`basename "$0"` filename=$1 filemods=$2 #echo "$0 $1 $2" echo "running parse_settings.sh" +foundstring="FoundSTRING" +vnamearray=() +valuearray=() while read -r line do @@ -23,8 +27,32 @@ do value=`echo $line | sed "s|\(^[[:space:]]*set[^[:space:]]*\)[[:space:]][[:space:]]*\([^[:space:]]*\)[[:space:]][[:space:]]*\([^[:space:]]*\).*$|\3|g"` # echo "$line $vname $value" + found=${foundstring} + for i in "${!vnamearray[@]}"; do + if [[ "${found}" == "${foundstring}" ]]; then + vn=${vnamearray[$i]} + vv=${valuearray[$i]} +# echo "names/values $i ${vname} ${vn} ${value} ${vv}" + if [[ "$vname" == "$vn" ]]; then + found=$i + if [[ "$value" != "${vv}" ]]; then +# echo "names/values $i ${vname} ${vn} ${value} ${vv}" + echo "${scriptname} WARNING: re-overriding $vname from ${vv} to ${value}" + fi + fi + fi + done + #sed -i 's|\(^\s*set.* '"$vname"' \)[^#]*\(#*.*$\)|\1 '"$value"' \2|g' $filename sed -i.sedbak -e 's|\(^[[:space:]]*set.* '"$vname"' \)[^#]*\(#*.*$\)|\1 '"$value"' \2|g' $filename + + if [[ "${found}" == "${foundstring}" ]]; then + vnamearray+=($vname) + valuearray+=($value) + else + valuearray[$found]=${value} + fi + if [[ -e "${filename}.sedbak" ]]; then rm ${filename}.sedbak fi diff --git a/configuration/scripts/tests/cice.lcov.csh b/configuration/scripts/tests/cice.lcov.csh index 8107778d9..5772833d1 100644 --- a/configuration/scripts/tests/cice.lcov.csh +++ b/configuration/scripts/tests/cice.lcov.csh @@ -9,7 +9,7 @@ set lcovhtmldir = lcov_cice_${report_name} genhtml -o ./${lcovhtmldir} --precision 2 -t "${report_name}" total.info rm -r -f ${lcovrepo} -git clone https://github.com/apcraig/${lcovrepo} +git clone --depth=1 https://github.com/apcraig/${lcovrepo} cp -p -r ${lcovhtmldir} ${lcovrepo}/ cd ${lcovrepo} diff --git a/configuration/scripts/tests/cice_test_codecov.csh b/configuration/scripts/tests/cice_test_codecov.csh index be9399f1b..d9a69e898 100755 --- a/configuration/scripts/tests/cice_test_codecov.csh +++ b/configuration/scripts/tests/cice_test_codecov.csh @@ -29,7 +29,7 @@ cd ${testdir} # Check out current cice master echo " " echo "*** checkout current cice master ***" -git clone https://github.com/cice-consortium/cice cice.master.${date} --recursive +git clone --depth=1 https://github.com/cice-consortium/cice cice.master.${date} --recursive cd cice.master.${date} set hash = `git rev-parse --short HEAD ` cd ../ @@ -40,7 +40,7 @@ cd ../ # This also copies in all dot file at the root that do not start with .g (ie. .git*) echo " " echo "*** checkout current test_cice_master ***" -git clone https://github.com/apcraig/test_cice_icepack test_cice_icepack.${date} +git clone --depth=1 https://github.com/apcraig/test_cice_icepack test_cice_icepack.${date} cd test_cice_icepack.${date} echo " " echo "*** remove current files and copy in cice master files ***" diff --git a/configuration/scripts/tests/report_results.csh b/configuration/scripts/tests/report_results.csh index e3f8eed70..2eb3731d5 100755 --- a/configuration/scripts/tests/report_results.csh +++ b/configuration/scripts/tests/report_results.csh @@ -25,7 +25,7 @@ set wikirepo = "https://github.com/CICE-Consortium/Test-Results.wiki.git" set wikiname = Test-Results.wiki rm -r -f ${wikiname} -git clone ${wikirepo} ${wikiname} +git clone --depth=1 ${wikirepo} ${wikiname} if ($status != 0) then echo " " echo "${0}: ERROR git clone failed" diff --git a/doc/source/user_guide/ug_running.rst b/doc/source/user_guide/ug_running.rst index c4971e235..957cfc4fc 100644 --- a/doc/source/user_guide/ug_running.rst +++ b/doc/source/user_guide/ug_running.rst @@ -203,7 +203,10 @@ specifies the compilation environment associated with the machine. This should specifies the grid. This is a string and for the current CICE driver, gx1, gx3, and tx1 are supported. (default = gx3) ``--set``, ``-s`` SET1,SET2,SET3 - specifies the optional settings for the case. The settings for ``--suite`` are defined in the suite file. Multiple settings can be specified by providing a comma deliminated set of values without spaces between settings. The available settings are in **configurations/scripts/options** and ``cice.setup --help`` will also list them. These settings files can change either the namelist values or overall case settings (such as the debug flag). + specifies the optional settings for the case. The settings for ``--suite`` are defined in the suite file. Multiple settings can be specified by providing a comma deliminated set of values without spaces between settings. The available settings are in **configurations/scripts/options** and ``cice.setup --help`` will also list them. These settings files can change either the namelist values or overall case settings (such as the debug flag). For cases and tests (not suites), settings defined in **~/.cice_set** (if it exists) will be included in the --set options. This behaviour can be overridden with the `--ignore-user-set`` command line option. + +``--ignore-user-set`` + ignores settings defined in **~/.cice.set** (if it exists) for cases and tests. **~/.cice_set** is always ignored for test suites. For CICE, when setting up cases, the ``--case`` and ``--mach`` must be specified. It's also recommended that ``--env`` be set explicitly as well. @@ -229,7 +232,13 @@ settings (options), the set_env.setting and set_nml.setting will be used to change the defaults. This is done as part of the ``cice.setup`` and the modifications are resolved in the **cice.settings** and **ice_in** file placed in the case directory. If multiple options are chosen that conflict, then the last -option chosen takes precedent. Not all options are compatible with each other. +option chosen takes precedence. Not all options are compatible with each other. + +Settings defined in **~/.cice_set** (if it exists) will be included in the ``--set`` +options. This behaviour can be overridden with the `--ignore-user-set`` command +line option. The format of the **~/.cice_set** file is a identical to the +``--set`` option, a single comma-delimited line of options. Settings on the +command line will take precedence over settings defined in **~/.cice_set**. Some of the options are @@ -473,7 +482,7 @@ the **env.[machine]** file. The easiest way to change a user's default is to create a file in your home directory called **.cice\_proj** and add your preferred account name to the first line. There is also an option (``--acct``) in **cice.setup** to define the account number. -The order of precedent is **cice.setup** command line option, +The order of precedence is **cice.setup** command line option, **.cice\_proj** setting, and then value in the **env.[machine]** file. .. _queue: diff --git a/doc/source/user_guide/ug_testing.rst b/doc/source/user_guide/ug_testing.rst index 8f8fe9441..5369efe5f 100644 --- a/doc/source/user_guide/ug_testing.rst +++ b/doc/source/user_guide/ug_testing.rst @@ -56,6 +56,8 @@ For individual tests, the following command line options can be set ``--set`` SET1,SET2,SET3 (see :ref:`case_options`) +``--ignore-user-set`` (see :ref:`case_options`) + ``--acct`` ACCOUNT (see :ref:`case_options`) ``--grid`` GRID (see :ref:`case_options`) @@ -312,7 +314,7 @@ If a user adds ``--set`` to the suite, all tests in that suite will add that opt ./cice.setup --suite base_suite,decomp_suite --mach wolf --env gnu --testid myid -s debug -The option settings defined in the suite have precendent over the command line +The option settings defined in the suite have precendence over the command line values if there are conflicts. The predefined test suites are defined under **configuration/scripts/tests** and @@ -459,7 +461,7 @@ Test Suite Examples ./results.csh If there are conflicts between the ``--set`` options in the suite and on the command line, - the suite will take precedent. + the suite will take precedence. 5) **Multiple test suites from a single command line** From 6a3e60cfc4747403b87f0bc5ca49f80caf1ac2d8 Mon Sep 17 00:00:00 2001 From: "David A. Bailey" Date: Fri, 31 Jul 2020 11:53:01 -0600 Subject: [PATCH 10/13] Send swpen by category (#495) Send broadband fswthrun by categories to coupler. --- cicecore/cicedynB/general/ice_flux.F90 | 14 ++++++++++++++ cicecore/cicedynB/general/ice_step_mod.F90 | 21 +++++++++++++++++++-- 2 files changed, 33 insertions(+), 2 deletions(-) diff --git a/cicecore/cicedynB/general/ice_flux.F90 b/cicecore/cicedynB/general/ice_flux.F90 index 16abbe162..97b726fdb 100644 --- a/cicecore/cicedynB/general/ice_flux.F90 +++ b/cicecore/cicedynB/general/ice_flux.F90 @@ -311,6 +311,11 @@ module ice_flux fresh_da, & ! fresh water flux to ocean due to data assim (kg/m^2/s) fsalt_da ! salt flux to ocean due to data assimilation(kg/m^2/s) + real (kind=dbl_kind), dimension (:,:,:,:), allocatable, public :: & + fswthrun_ai ! per-category fswthru * ai (W/m^2) + + logical (kind=log_kind), public :: send_i2x_per_cat = .false. + !----------------------------------------------------------------- ! internal !----------------------------------------------------------------- @@ -713,6 +718,11 @@ subroutine init_coupler_flux ffep (:,:,:,:)= c0 ffed (:,:,:,:)= c0 + if (send_i2x_per_cat) then + allocate(fswthrun_ai(nx_block,ny_block,ncat,max_blocks)) + fswthrun_ai(:,:,:,:) = c0 + endif + !----------------------------------------------------------------- ! derived or computed fields !----------------------------------------------------------------- @@ -806,6 +816,10 @@ subroutine init_flux_ocn H2_16O_ocn (:,:,:) = c0 H2_18O_ocn (:,:,:) = c0 + if (send_i2x_per_cat) then + fswthrun_ai(:,:,:,:) = c0 + endif + end subroutine init_flux_ocn !======================================================================= diff --git a/cicecore/cicedynB/general/ice_step_mod.F90 b/cicecore/cicedynB/general/ice_step_mod.F90 index 333c22cd3..7a2493d58 100644 --- a/cicecore/cicedynB/general/ice_step_mod.F90 +++ b/cicecore/cicedynB/general/ice_step_mod.F90 @@ -177,7 +177,8 @@ subroutine step_therm1 (dt, iblk) flat, fswabs, flwout, evap, evaps, evapi, Tref, Qref, Uref, fresh, fsalt, fhocn, & fswthru, fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & meltt, melts, meltb, congel, snoice, & - flatn_f, fsensn_f, fsurfn_f, fcondtopn_f + flatn_f, fsensn_f, fsurfn_f, fcondtopn_f, & + send_i2x_per_cat, fswthrun_ai use ice_flux_bgc, only: dsnown, faero_atm, faero_ocn, fiso_atm, fiso_ocn, & Qa_iso, Qref_iso, fiso_evap, HDO_ocn, H2_16O_ocn, H2_18O_ocn use ice_grid, only: lmask_n, lmask_s, tmask @@ -310,7 +311,8 @@ subroutine step_therm1 (dt, iblk) enddo endif ! tr_aero - if (tmask(i,j,iblk)) & + if (tmask(i,j,iblk)) then + call icepack_step_therm1(dt=dt, ncat=ncat, & nilyr=nilyr, nslyr=nslyr, & aicen_init = aicen_init (i,j,:,iblk), & @@ -452,6 +454,21 @@ subroutine step_therm1 (dt, iblk) frz_onset = frz_onset (i,j, iblk), & yday=yday, prescribed_ice=prescribed_ice) + !----------------------------------------------------------------- + ! handle per-category i2x fields, no merging + !----------------------------------------------------------------- + + if (send_i2x_per_cat) then + do n = 1, ncat + ! TODO (mvertens, 2018-12-22): do we need to add the band separated quantities + ! for MOM6 here also? + + fswthrun_ai(i,j,n,iblk) = fswthrun(i,j,n,iblk)*aicen_init(i,j,n,iblk) + enddo ! ncat + endif + + endif + if (tr_iso) then do n = 1, ncat if (vicen(i,j,n,iblk) > puny) & From 003aae07e2da475e9ae75c713bb0c1f5a701b823 Mon Sep 17 00:00:00 2001 From: "David A. Bailey" Date: Fri, 31 Jul 2020 16:51:42 -0600 Subject: [PATCH 11/13] Add sw_redist option to CICE (#497) This integrates the sw_redist changes from Icepack into CICE. Add three new namelist, sw_redist, sw_frac, and sw_dtemp. The alt03 and alt04 test cases have been updated so that ktherm=1 is bfb. The default settings are such that ktherm=2 is bfb. This is the other piece to addressing issue #485 and CICE-Consortium/Icepack#280. --- cicecore/cicedynB/general/ice_init.F90 | 29 ++++++++++++++++++--- configuration/scripts/ice_in | 3 +++ configuration/scripts/options/set_nml.alt03 | 3 +++ configuration/scripts/options/set_nml.alt04 | 3 +++ doc/source/user_guide/ug_case_settings.rst | 3 +++ icepack | 2 +- 6 files changed, 38 insertions(+), 5 deletions(-) diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 index e9e0ade69..d3b096eb3 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -118,7 +118,8 @@ subroutine input_data ahmax, R_ice, R_pnd, R_snw, dT_mlt, rsnw_mlt, emissivity, & mu_rdg, hs0, dpscale, rfracmin, rfracmax, pndaspect, hs1, hp1, & a_rapid_mode, Rac_rapid_mode, aspect_rapid_mode, dSdt_slow_mode, & - phi_c_slow_mode, phi_i_mushy, kalg, atmiter_conv, Pstar, Cstar + phi_c_slow_mode, phi_i_mushy, kalg, atmiter_conv, Pstar, Cstar, & + sw_frac, sw_dtemp integer (kind=int_kind) :: ktherm, kstrength, krdg_partic, krdg_redist, natmiter, & kitd, kcatbound @@ -126,7 +127,8 @@ subroutine input_data character (len=char_len) :: shortwave, albedo_type, conduct, fbot_xfer_type, & tfrz_option, frzpnd, atmbndy, wave_spec_type - logical (kind=log_kind) :: calc_Tsfc, formdrag, highfreq, calc_strair, wave_spec + logical (kind=log_kind) :: calc_Tsfc, formdrag, highfreq, calc_strair, wave_spec, & + sw_redist logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_pond logical (kind=log_kind) :: tr_iso, tr_aero, tr_fsd @@ -182,7 +184,8 @@ subroutine input_data namelist /thermo_nml/ & kitd, ktherm, conduct, ksno, & a_rapid_mode, Rac_rapid_mode, aspect_rapid_mode, & - dSdt_slow_mode, phi_c_slow_mode, phi_i_mushy + dSdt_slow_mode, phi_c_slow_mode, phi_i_mushy, & + sw_redist, sw_frac, sw_dtemp namelist /dynamics_nml/ & kdyn, ndte, revised_evp, yield_curve, & @@ -439,6 +442,11 @@ subroutine input_data phi_c_slow_mode = 0.05_dbl_kind ! critical liquid fraction porosity cutoff phi_i_mushy = 0.85_dbl_kind ! liquid fraction of congelation ice + ! shortwave redistribution in the thermodynamics + sw_redist = .false. + sw_frac = 0.9_dbl_kind + sw_dtemp = 0.02_dbl_kind + !----------------------------------------------------------------- ! read from input file !----------------------------------------------------------------- @@ -723,6 +731,9 @@ subroutine input_data call broadcast_scalar(dSdt_slow_mode, master_task) call broadcast_scalar(phi_c_slow_mode, master_task) call broadcast_scalar(phi_i_mushy, master_task) + call broadcast_scalar(sw_redist, master_task) + call broadcast_scalar(sw_frac, master_task) + call broadcast_scalar(sw_dtemp, master_task) #ifdef CESMCOUPLED pointer_file = trim(pointer_file) // trim(inst_suffix) @@ -966,6 +977,12 @@ subroutine input_data endif endif !tcraig + if (ktherm == 1 .and. .not.sw_redist) then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: ktherm = 1 and sw_redist = ',sw_redist + write(nu_diag,*) subname//' WARNING: For consistency, set sw_redist = .true.' + endif + endif if (formdrag) then if (trim(atmbndy) == 'constant') then @@ -1244,6 +1261,9 @@ subroutine input_data write(nu_diag,1007) ' ksno = ', ksno,' snow thermal conductivity' if (ktherm == 1) & write(nu_diag,*) 'conduct = ', trim(conduct),' ice thermal conductivity' + write(nu_diag,1012) ' sw_redist = ', sw_redist,' redistribute internal shortwave to surface' + write(nu_diag,1002) ' sw_frac = ', sw_frac,' fraction redistributed' + write(nu_diag,1002) ' sw_dtemp = ', sw_dtemp,' temperature difference from freezing to redistribute' if (ktherm == 2) then write(nu_diag,1002) ' a_rapid_mode = ', a_rapid_mode,' brine channel diameter' write(nu_diag,1007) ' Rac_rapid_mode = ', Rac_rapid_mode,' critical Rayleigh number' @@ -1630,7 +1650,8 @@ subroutine input_data wave_spec_type_in = wave_spec_type, & wave_spec_in=wave_spec, nfreq_in=nfreq, & tfrz_option_in=tfrz_option, kalg_in=kalg, fbot_xfer_type_in=fbot_xfer_type, & - Pstar_in=Pstar, Cstar_in=Cstar) + Pstar_in=Pstar, Cstar_in=Cstar, & + sw_redist_in=sw_redist, sw_frac_in=sw_frac, sw_dtemp_in=sw_dtemp) call icepack_init_tracer_flags(tr_iage_in=tr_iage, tr_FY_in=tr_FY, & tr_lvl_in=tr_lvl, tr_iso_in=tr_iso, tr_aero_in=tr_aero, & tr_fsd_in=tr_fsd, tr_pond_in=tr_pond, & diff --git a/configuration/scripts/ice_in b/configuration/scripts/ice_in index 860d6c95b..a26579df1 100644 --- a/configuration/scripts/ice_in +++ b/configuration/scripts/ice_in @@ -108,6 +108,9 @@ dSdt_slow_mode = -5.0e-8 phi_c_slow_mode = 0.05 phi_i_mushy = 0.85 + sw_redist = .false. + sw_frac = 0.9d0 + sw_dtemp = 0.02d0 / &dynamics_nml diff --git a/configuration/scripts/options/set_nml.alt03 b/configuration/scripts/options/set_nml.alt03 index 43681ab9d..f82491d9d 100644 --- a/configuration/scripts/options/set_nml.alt03 +++ b/configuration/scripts/options/set_nml.alt03 @@ -14,6 +14,9 @@ tr_aero = .true. calc_Tsfc = .false. kdyn = 2 ktherm = 1 +sw_redist = .true. +sw_frac = 0.9d0 +sw_dtemp = 0.02d0 tfrz_option = 'linear_salt' revised_evp = .false. Ktens = 0. diff --git a/configuration/scripts/options/set_nml.alt04 b/configuration/scripts/options/set_nml.alt04 index 786decae6..937704294 100644 --- a/configuration/scripts/options/set_nml.alt04 +++ b/configuration/scripts/options/set_nml.alt04 @@ -13,6 +13,9 @@ tr_pond_lvl = .true. tr_aero = .true. kitd = 0 ktherm = 1 +sw_redist = .true. +sw_frac = 0.9d0 +sw_dtemp = 0.02d0 conduct = 'MU71' kdyn = 1 kevp_kernel = 102 diff --git a/doc/source/user_guide/ug_case_settings.rst b/doc/source/user_guide/ug_case_settings.rst index eb648e847..550162515 100644 --- a/doc/source/user_guide/ug_case_settings.rst +++ b/doc/source/user_guide/ug_case_settings.rst @@ -344,6 +344,9 @@ thermo_nml "``phi_c_slow_mode``", ":math:`0<\phi_c < 1`", "critical liquid fraction", "0.05" "``phi_i_mushy``", ":math:`0<\phi_i < 1`", "solid fraction at lower boundary", "0.85" "``Rac_rapid_mode``", "real", "critical Rayleigh number", "10.0" + "``sw_redist``", "logical", "redistribute internal shortwave to surface", "``.false.``" + "``sw_frac``", "real", "fraction redistributed", "0.9" + "``sw_dtemp``", "real", "temperature difference from melt to start redistributing", "0.02" "", "", "", "" dynamics_nml diff --git a/icepack b/icepack index b1e41d9f1..aecc9b7d2 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit b1e41d9f12a59390aacdb933889c3c4a87c9e8d2 +Subproject commit aecc9b7d200ba95d3a76a953bc5098419731076a From 12d16ed9e6d8f8e750264afb324a92627b845bff Mon Sep 17 00:00:00 2001 From: Tony Craig Date: Fri, 7 Aug 2020 17:17:01 -0700 Subject: [PATCH 12/13] Update Icepack to 4c42a82, non bit-for-bit changes (#499) * update icepack to 4c42a82 * remove two failing test configurations with iobinary due to bathymetry netcdf file requirements --- configuration/scripts/tests/io_suite.ts | 5 +++-- icepack | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/configuration/scripts/tests/io_suite.ts b/configuration/scripts/tests/io_suite.ts index c1edec292..3e98642e9 100755 --- a/configuration/scripts/tests/io_suite.ts +++ b/configuration/scripts/tests/io_suite.ts @@ -1,8 +1,9 @@ # Test Grid PEs Sets BFB-compare +# some iobinary configurations fail due to bathymetry netcdf file requirement, remove them restart gx3 8x4 debug,histall,iobinary,precision8 -restart gx3 12x2 alt01,histall,iobinary +#restart gx3 12x2 alt01,histall,iobinary restart gx3 16x2 alt02,histall,iobinary,precision8 -restart gx3 4x2 alt03,histall,iobinary +#restart gx3 4x2 alt03,histall,iobinary restart gx3 8x4 alt04,histall,iobinary,precision8 restart gx3 4x4 alt05,histall,iobinary restart gx3 32x1 bgcz,histall,iobinary,precision8 diff --git a/icepack b/icepack index aecc9b7d2..4c42a82e3 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit aecc9b7d200ba95d3a76a953bc5098419731076a +Subproject commit 4c42a82e3d92f191a9c52bca3831e8d242e2e4c0 From eb775174709983bbae9f09d4abd9d442f53e4c49 Mon Sep 17 00:00:00 2001 From: "David A. Bailey" Date: Wed, 12 Aug 2020 14:59:33 -0600 Subject: [PATCH 13/13] Latest NUOPC caps (#500) * updated orbital calculations needed for cesm * fixed problems in updated orbital calculations needed for cesm * update CICE6 to support coupling with UFS * put in changes so that both ufsatm and cesm requirements for potential temperature and density are satisfied * update icepack submodule * Revert "update icepack submodule" This reverts commit e70d1abcbeb4351195a2b81c6ce3f623c936426c. * update comp_ice.backend with temporary ice_timers fix * Fix threading problem in init_bgc * Fix additional OMP problems * changes for coldstart running * Move the forapps directory * remove cesmcoupled ifdefs * Fix logging issues for NUOPC * removal of many cpp-ifdefs * fix compile errors * fixes to get cesm working * fixed white space issue * Add restart_coszen namelist option * Update NUOPC cap to work with latest CICE6 master * nuopc,cmeps or s2s build updates * fixes for dbug_flag * Update nuopc2 to latest CICE master * Fix some merge problems * Fix dbug variable * Manual merge of UFS changes Co-authored-by: Mariana Vertenstein Co-authored-by: apcraig Co-authored-by: Denise Worthen --- cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 | 8 ++- .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 48 +++++++++++++- .../drivers/nuopc/cmeps/ice_import_export.F90 | 65 +++++++++---------- .../forapps/ufs/comp_ice.backend.clean | 8 +-- .../forapps/ufs/comp_ice.backend.libcice | 8 ++- .../scripts/machines/Macros.cheyenne_intel | 2 +- .../scripts/machines/Macros.hera_intel | 2 +- .../scripts/machines/Macros.orion_intel | 2 +- 8 files changed, 93 insertions(+), 50 deletions(-) diff --git a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 index 486c36dcc..644ef72fa 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 @@ -346,6 +346,7 @@ subroutine coupling_prep (iblk) alvdf_ai, alidf_ai, alvdr_ai, alidr_ai, fhocn_ai, & fresh_ai, fsalt_ai, fsalt, & fswthru_ai, fhocn, fswthru, scale_factor, snowfrac, & + fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & fsens, flat, fswabs, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt, Uref, wind @@ -543,7 +544,12 @@ subroutine coupling_prep (iblk) evap (:,:,iblk), & Tref (:,:,iblk), Qref (:,:,iblk), & fresh (:,:,iblk), fsalt (:,:,iblk), & - fhocn (:,:,iblk), fswthru (:,:,iblk), & + fhocn (:,:,iblk), & + fswthru (:,:,iblk), & + fswthru_vdr(:,:,iblk), & + fswthru_vdf(:,:,iblk), & + fswthru_idr(:,:,iblk), & + fswthru_idf(:,:,iblk), & faero_ocn(:,:,:,iblk), & alvdr (:,:,iblk), alidr (:,:,iblk), & alvdf (:,:,iblk), alidf (:,:,iblk), & diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index aff4b5099..da3d95369 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -86,7 +86,7 @@ module ice_comp_nuopc character(len=*),parameter :: shr_cal_noleap = 'NO_LEAP' character(len=*),parameter :: shr_cal_gregorian = 'GREGORIAN' - integer , parameter :: dbug = 10 + integer :: dbug = 0 integer , parameter :: debug_import = 0 ! internal debug level integer , parameter :: debug_export = 0 ! internal debug level character(*), parameter :: modName = "(ice_comp_nuopc)" @@ -236,6 +236,14 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return endif + call NUOPC_CompAttributeGet(gcomp, name='dbug_flag', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + read(cvalue,*) dbug + end if + write(logmsg,'(i6)') dbug + call ESMF_LogWrite('CICE_cap: dbug = '//trim(logmsg), ESMF_LOGMSG_INFO) + call ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -539,6 +547,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) !---------------------------------------------------------------------------- call icepack_query_parameters(ktherm_out=ktherm) + call icepack_query_parameters(tfrz_option_out=tfrz_option) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -602,6 +611,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) write(nu_diag,*) trim(subname),' cice calendar_type = ',trim(calendar_type) endif +#ifdef CESMCOUPLED if (calendar_type == "GREGORIAN" .or. & calendar_type == "Gregorian" .or. & calendar_type == "gregorian") then @@ -609,6 +619,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) else call time2sec(iyear-year_init,month,mday,time) endif +#endif time = time+start_tod end if @@ -874,8 +885,8 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) ! diagnostics !-------------------------------- - if (dbug > 1) then - call State_diagnose(exportState,subname//':ES',rc=rc) + if (dbug > 0) then + call state_diagnose(exportState,subname//':ES',rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return endif @@ -905,8 +916,10 @@ subroutine ModelAdvance(gcomp, rc) ! Local variables type(ESMF_Clock) :: clock type(ESMF_Alarm) :: alarm + type(ESMF_Time) :: startTime type(ESMF_Time) :: currTime type(ESMF_Time) :: nextTime + type(ESMF_TimeInterval) :: timeStep type(ESMF_State) :: importState, exportState character(ESMF_MAXSTR) :: cvalue real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp @@ -928,11 +941,31 @@ subroutine ModelAdvance(gcomp, rc) logical :: isPresent, isSet character(*) , parameter :: F00 = "('(ice_comp_nuopc) ',2a,i8,d21.14)" character(len=*),parameter :: subname=trim(modName)//':(ModelAdvance) ' + character(char_len_long) :: msgString !-------------------------------- rc = ESMF_SUCCESS if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + ! query the Component for its clock, importState and exportState + call ESMF_GridCompGet(gcomp, clock=clock, importState=importState, & + exportState=exportState, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_ClockPrint(clock, options="currTime", & + preString="------>Advancing ICE from: ", unit=msgString, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_LogWrite(subname//trim(msgString), ESMF_LOGMSG_INFO) + + call ESMF_ClockGet(clock, startTime=startTime, currTime=currTime, & + timeStep=timeStep, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_TimePrint(currTime + timeStep, & + preString="--------------------------------> to: ", unit=msgString, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_LogWrite(trim(msgString), ESMF_LOGMSG_INFO) + !-------------------------------- ! Turn on timers !-------------------------------- @@ -1050,6 +1083,10 @@ subroutine ModelAdvance(gcomp, rc) idate, sec, nu_diag, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if + if (dbug > 0) then + call state_diagnose(importState,subname//':IS',rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if !-------------------------------- ! Advance cice and timestep update @@ -1067,11 +1104,16 @@ subroutine ModelAdvance(gcomp, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return call t_stopf ('cice_run_export') + ! write Debug output if (debug_export > 0 .and. my_task==master_task) then call State_fldDebug(exportState, flds_scalar_name, 'cice_export:', & idate, sec, nu_diag, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if + if (dbug > 0) then + call state_diagnose(exportState,subname//':ES',rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if ! reset shr logging to my original values call shr_file_setLogUnit (shrlogunit) diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index 11cfcfbab..b32085143 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -13,13 +13,11 @@ module ice_import_export use ice_flux , only : strairxt, strairyt, strocnxt, strocnyt use ice_flux , only : alvdr, alidr, alvdf, alidf, Tref, Qref, Uref use ice_flux , only : flat, fsens, flwout, evap, fswabs, fhocn, fswthru -#if (defined NEWCODE) use ice_flux , only : fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf use ice_flux , only : send_i2x_per_cat, fswthrun_ai - use ice_flux , only : faero_atm, faero_ocn - use ice_flux , only : fiso_atm, fiso_ocn, fiso_rain, fiso_evap - use ice_flux , only : Qa_iso, Qref_iso, HDO_ocn, H2_18O_ocn, H2_16O_ocn -#endif + use ice_flux_bgc , only : faero_atm, faero_ocn + use ice_flux_bgc , only : fiso_atm, fiso_ocn, fiso_evap + use ice_flux_bgc , only : Qa_iso, Qref_iso, HDO_ocn, H2_18O_ocn, H2_16O_ocn use ice_flux , only : fresh, fsalt, zlvl, uatm, vatm, potT, Tair, Qa use ice_flux , only : rhoa, swvdr, swvdf, swidr, swidf, flw, frain use ice_flux , only : fsnow, uocn, vocn, sst, ss_tltx, ss_tlty, frzmlt @@ -35,6 +33,7 @@ module ice_import_export use icepack_intfc , only : icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc , only : icepack_query_parameters, icepack_query_tracer_flags use icepack_intfc , only : icepack_liquidus_temperature + use icepack_intfc , only : icepack_sea_freezing_temperature use cice_wrapper_mod , only : t_startf, t_stopf, t_barrierf #ifdef CESMCOUPLED use shr_frz_mod , only : shr_frz_freezetemp @@ -87,7 +86,7 @@ module ice_import_export type (fld_list_type) :: fldsFrIce(fldsMax) type(ESMF_GeomType_Flag) :: geomtype - integer , parameter :: dbug = 10 ! i/o debug messages + integer , parameter :: io_dbug = 10 ! i/o debug messages character(*), parameter :: u_FILE_u = & __FILE__ @@ -115,7 +114,7 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam !------------------------------------------------------------------------------- rc = ESMF_SUCCESS - if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + if (io_dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) ! Determine if the following attributes are sent by the driver and if so read them in flds_wiso = .false. @@ -126,7 +125,6 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call ESMF_LogWrite('flds_wiso = '// trim(cvalue), ESMF_LOGMSG_INFO) end if -#if (defined NEWCODE) flds_i2o_per_cat = .false. call NUOPC_CompAttributeGet(gcomp, name='flds_i2o_per_cat', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -134,7 +132,6 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam read(cvalue,*) send_i2x_per_cat call ESMF_LogWrite('flds_i2o_per_cat = '// trim(cvalue), ESMF_LOGMSG_INFO) end if -#endif !----------------- ! advertise import fields @@ -207,14 +204,12 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_vis_dif_albedo' ) call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_ir_dif_albedo' ) -#if (defined NEWCODE) ! the following are advertised but might not be connected if they are not present ! in the cmeps esmFldsExchange_xxx_mod.F90 that is model specific if (send_i2x_per_cat) then call fldlist_add(fldsFrIce_num, fldsFrIce, 'ice_fraction_n', & ungridded_lbound=1, ungridded_ubound=ncat) end if -#endif ! ice/atm fluxes computed by ice call fldlist_add(fldsFrIce_num, fldsFrIce, 'stress_on_air_ice_zonal' ) @@ -233,12 +228,10 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_ir_dir_flx' ) call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_ir_dif_flx' ) -#if (defined NEWCODE) if (send_i2x_per_cat) then call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_ifrac_n', & ungridded_lbound=1, ungridded_ubound=ncat) end if -#endif call fldlist_add(fldsFrIce_num , fldsFrIce, 'mean_fresh_water_to_ocean_rate' ) call fldlist_add(fldsFrIce_num , fldsFrIce, 'mean_salt_rate' ) call fldlist_add(fldsFrIce_num , fldsFrIce, 'stress_on_ocn_ice_zonal' ) @@ -253,8 +246,8 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam if (flds_wiso) then call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_fresh_water_to_ocean_rate_wiso', & ungridded_lbound=1, ungridded_ubound=3) - !call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_evap_rate_atm_into_ice_wiso', & - ! ungridded_lbound=1, ungridded_ubound=3) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_evap_rate_atm_into_ice_wiso', & + ungridded_lbound=1, ungridded_ubound=3) call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_qref_wiso', & ungridded_lbound=1, ungridded_ubound=3) end if @@ -265,7 +258,7 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam if (ChkErr(rc,__LINE__,u_FILE_u)) return enddo - if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) + if (io_dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) end subroutine ice_advertise_fields @@ -361,12 +354,22 @@ subroutine ice_import( importState, rc ) real (kind=dbl_kind),allocatable :: aflds(:,:,:,:) real (kind=dbl_kind) :: workx, worky real (kind=dbl_kind) :: MIN_RAIN_TEMP, MAX_SNOW_TEMP - real (kind=dbl_kind) :: tffresh + real (kind=dbl_kind) :: Tffresh real (kind=dbl_kind) :: inst_pres_height_lowest + character(len=char_len) :: tfrz_option + integer(int_kind) :: ktherm character(len=*), parameter :: subname = 'ice_import' + character(len=1024) :: msgString !----------------------------------------------------- call icepack_query_parameters(Tffresh_out=Tffresh) + call icepack_query_parameters(tfrz_option_out=tfrz_option) + call icepack_query_parameters(ktherm_out=ktherm) + if (io_dbug > 5) then + write(msgString,'(A,i8)')trim(subname)//' tfrz_option = ' & + // trim(tfrz_option)//', ktherm = ',ktherm + call ESMF_LogWrite(trim(msgString), ESMF_LOGMSG_INFO) + end if ! call icepack_query_parameters(tfrz_option_out=tfrz_option, & ! modal_aero_out=modal_aero, z_tracers_out=z_tracers, skl_bgc_out=skl_bgc, & ! Tffresh_out=Tffresh) @@ -568,7 +571,6 @@ subroutine ice_import( importState, rc ) ! Get aerosols from mediator !------------------------------------------------------- -#if (defined NEWCODE) if (State_FldChk(importState, 'Faxa_bcph')) then ! the following indices are based on what the atmosphere is sending ! bcphidry ungridded_index=1 @@ -604,7 +606,6 @@ subroutine ice_import( importState, rc ) call state_getimport(importState, 'Faxa_dstdry', output=faero_atm, index=3, do_sum=.true., ungridded_index=4, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if -#endif !------------------------------------------------------- ! Water isotopes from the mediator @@ -614,7 +615,6 @@ subroutine ice_import( importState, rc ) ! 18O => ungridded_index=2 ! HDO => ungridded_index=3 -#if (defined NEWCODE) if (State_FldChk(importState, 'shum_wiso')) then call state_getimport(importState, 'inst_spec_humid_height_lowest_wiso', output=Qa_iso, index=1, ungridded_index=3, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -623,12 +623,12 @@ subroutine ice_import( importState, rc ) call state_getimport(importState, 'inst_spec_humid_height_lowest_wiso', output=Qa_iso, index=3, ungridded_index=2, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=1, ungridded_index=3, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=2, ungridded_index=1, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=3, ungridded_index=2, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return +! call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=1, ungridded_index=3, rc=rc) +! if (ChkErr(rc,__LINE__,u_FILE_u)) return +! call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=2, ungridded_index=1, rc=rc) +! if (ChkErr(rc,__LINE__,u_FILE_u)) return +! call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=3, ungridded_index=2, rc=rc) +! if (ChkErr(rc,__LINE__,u_FILE_u)) return call state_getimport(importState, 'mean_fprec_rate_wiso', output=fiso_atm, index=1, ungridded_index=3, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -644,7 +644,6 @@ subroutine ice_import( importState, rc ) call state_getimport(importState, 'So_roce_wiso', output=H2_18O_ocn, ungridded_index=2, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if -#endif !----------------------------------------------------------------- ! rotate zonal/meridional vectors to local coordinates @@ -697,8 +696,7 @@ subroutine ice_import( importState, rc ) do iblk = 1, nblocks do j = 1,ny_block do i = 1,nx_block - !TODO: tcx should this be icepack_sea_freezing_temperature? - Tf (i,j,iblk) = icepack_liquidus_temperature(sss(i,j,iblk)) + Tf(i,j,iblk) = icepack_sea_freezing_temperature(sss(i,j,iblk)) end do end do end do @@ -773,12 +771,12 @@ subroutine ice_export( exportState, rc ) real (kind=dbl_kind) :: tauyo (nx_block,ny_block,max_blocks) ! ice/ocean stress real (kind=dbl_kind) :: ailohi(nx_block,ny_block,max_blocks) ! fractional ice area real (kind=dbl_kind), allocatable :: tempfld(:,:,:) - real (kind=dbl_kind) :: tffresh + real (kind=dbl_kind) :: Tffresh character(len=*),parameter :: subname = 'ice_export' !----------------------------------------------------- rc = ESMF_SUCCESS - if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + if (io_dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) call icepack_query_parameters(Tffresh_out=Tffresh) ! call icepack_query_parameters(tfrz_option_out=tfrz_option, & @@ -907,7 +905,6 @@ subroutine ice_export( exportState, rc ) ! ---- ! surface temperature of ice covered portion (degK) - !call state_setexport(exportState, 'sea_ice_temperature', input=Tsrf , lmask=tmask, ifrac=ailohi, rc=rc) call state_setexport(exportState, 'sea_ice_surface_temperature', input=Tsrf , lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -1005,7 +1002,6 @@ subroutine ice_export( exportState, rc ) call state_setexport(exportState, 'mean_sw_pen_to_ocn' , input=fswthru, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return -#if (defined NEWCODE) ! flux of vis dir shortwave through ice to ocean call state_setexport(exportState, 'mean_sw_pen_to_ocn_vis_dir_flx' , input=fswthru_vdr, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -1021,7 +1017,6 @@ subroutine ice_export( exportState, rc ) ! flux of ir dif shortwave through ice to ocean call state_setexport(exportState, 'mean_sw_pen_to_ocn_ir_dif_flx' , input=fswthru_idf, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return -#endif ! heat exchange with ocean call state_setexport(exportState, 'net_heat_flx_to_ocn' , input=fhocn, lmask=tmask, ifrac=ailohi, rc=rc) @@ -1043,7 +1038,6 @@ subroutine ice_export( exportState, rc ) call state_setexport(exportState, 'stress_on_ocn_ice_merid' , input=tauyo, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return -#if (defined NEWCODE) ! ------ ! optional aerosol fluxes to ocean ! ------ @@ -1134,7 +1128,6 @@ subroutine ice_export( exportState, rc ) if (ChkErr(rc,__LINE__,u_FILE_u)) return end do end if -#endif end subroutine ice_export diff --git a/configuration/scripts/forapps/ufs/comp_ice.backend.clean b/configuration/scripts/forapps/ufs/comp_ice.backend.clean index 7eef2ed1a..823f1f586 100755 --- a/configuration/scripts/forapps/ufs/comp_ice.backend.clean +++ b/configuration/scripts/forapps/ufs/comp_ice.backend.clean @@ -10,10 +10,10 @@ setenv OBJDIR $EXEDIR/compile ; if !(-d $OBJDIR) mkdir -p $OBJDIR if (${SITE} =~ cheyenne*) then setenv ARCH cheyenne_intel -#else if (${SITE} =~ Orion*) then -# setenv ARCH orion_intel -#else if (${SITE} =~ hera*) then -# setenv ARCH hera_intel +else if (${SITE} =~ orion*) then + setenv ARCH orion_intel +else if (${SITE} =~ hera*) then + setenv ARCH hera_intel else echo "CICE6 ${0}: ERROR in ARCH setup, ${hname}" exit -2 diff --git a/configuration/scripts/forapps/ufs/comp_ice.backend.libcice b/configuration/scripts/forapps/ufs/comp_ice.backend.libcice index ecd7494c0..ea38e048b 100755 --- a/configuration/scripts/forapps/ufs/comp_ice.backend.libcice +++ b/configuration/scripts/forapps/ufs/comp_ice.backend.libcice @@ -18,7 +18,7 @@ setenv THRD no # set to yes for OpenMP threading if (${SITE} =~ cheyenne*) then setenv ARCH cheyenne_intel -else if (${SITE} =~ Orion*) then +else if (${SITE} =~ orion*) then setenv ARCH orion_intel else if (${SITE} =~ hera*) then setenv ARCH hera_intel @@ -68,9 +68,11 @@ endif # Build in debug mode. If DEBUG=Y, enable DEBUG compilation. This # flag is set in ${ROOTDIR}/coupledFV3_MOM6_CICE_debug.appBuilder file. if (! $?DEBUG) then - setenv ICE_BLDDEBUG true + setenv ICE_BLDDEBUG false else - if ($DEBUG != "Y") then + if ($DEBUG == "Y") then + setenv ICE_BLDDEBUG true + else setenv ICE_BLDDEBUG false endif endif diff --git a/configuration/scripts/machines/Macros.cheyenne_intel b/configuration/scripts/machines/Macros.cheyenne_intel index 902224766..243295487 100644 --- a/configuration/scripts/machines/Macros.cheyenne_intel +++ b/configuration/scripts/machines/Macros.cheyenne_intel @@ -12,7 +12,7 @@ FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -trace FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created + FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -link_mpi=dbg else FFLAGS += -O2 endif diff --git a/configuration/scripts/machines/Macros.hera_intel b/configuration/scripts/machines/Macros.hera_intel index 519e3a5ba..230f43e70 100644 --- a/configuration/scripts/machines/Macros.hera_intel +++ b/configuration/scripts/machines/Macros.hera_intel @@ -12,7 +12,7 @@ FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -trace FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created + FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -link_mpi=dbg else FFLAGS += -O2 endif diff --git a/configuration/scripts/machines/Macros.orion_intel b/configuration/scripts/machines/Macros.orion_intel index aae839f4e..6dffdd0a2 100644 --- a/configuration/scripts/machines/Macros.orion_intel +++ b/configuration/scripts/machines/Macros.orion_intel @@ -12,7 +12,7 @@ FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -trace FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created + FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -link_mpi=dbg else FFLAGS += -O2 endif