From e628a9a696bd146a0036e946884b9f9102ebd8e4 Mon Sep 17 00:00:00 2001 From: Denise Worthen Date: Mon, 23 Jan 2023 07:58:06 -0500 Subject: [PATCH] Update CICE for latest Consortium/main (#56) --- .github/workflows/test-cice.yml | 14 + cice.setup | 17 +- .../analysis/ice_diagnostics.F90 | 45 +- .../analysis/ice_diagnostics_bgc.F90 | 0 .../analysis/ice_history.F90 | 65 +- .../analysis/ice_history_bgc.F90 | 31 +- .../analysis/ice_history_drag.F90 | 30 +- .../analysis/ice_history_fsd.F90 | 30 +- .../analysis/ice_history_mechred.F90 | 30 +- .../analysis/ice_history_pond.F90 | 36 +- .../analysis/ice_history_shared.F90 | 2 +- .../analysis/ice_history_snow.F90 | 34 +- .../dynamics/ice_dyn_eap.F90 | 129 ++-- .../dynamics/ice_dyn_evp.F90 | 617 ++++++++++-------- .../dynamics/ice_dyn_evp_1d.F90 | 2 +- .../dynamics/ice_dyn_shared.F90 | 45 +- .../dynamics/ice_dyn_vp.F90 | 20 +- .../dynamics/ice_transport_driver.F90 | 0 .../dynamics/ice_transport_remap.F90 | 0 .../general/ice_flux.F90 | 0 .../general/ice_flux_bgc.F90 | 0 .../general/ice_forcing.F90 | 16 +- .../general/ice_forcing_bgc.F90 | 0 .../general/ice_init.F90 | 271 +++++--- .../general/ice_state.F90 | 0 .../general/ice_step_mod.F90 | 0 .../infrastructure/comm/mpi/ice_boundary.F90 | 3 - .../infrastructure/comm/mpi/ice_broadcast.F90 | 0 .../comm/mpi/ice_communicate.F90 | 0 .../infrastructure/comm/mpi/ice_exit.F90 | 0 .../comm/mpi/ice_gather_scatter.F90 | 0 .../comm/mpi/ice_global_reductions.F90 | 0 .../infrastructure/comm/mpi/ice_reprosum.F90 | 0 .../infrastructure/comm/mpi/ice_timers.F90 | 0 .../comm/serial/ice_boundary.F90 | 3 - .../comm/serial/ice_broadcast.F90 | 0 .../comm/serial/ice_communicate.F90 | 0 .../infrastructure/comm/serial/ice_exit.F90 | 0 .../comm/serial/ice_gather_scatter.F90 | 0 .../comm/serial/ice_global_reductions.F90 | 0 .../comm/serial/ice_reprosum.F90 | 0 .../infrastructure/comm/serial/ice_timers.F90 | 0 .../infrastructure/ice_blocks.F90 | 0 .../infrastructure/ice_domain.F90 | 34 +- .../infrastructure/ice_grid.F90 | 109 ++-- .../infrastructure/ice_memusage.F90 | 0 .../infrastructure/ice_memusage_gptl.c | 2 +- .../infrastructure/ice_read_write.F90 | 0 .../infrastructure/ice_restart_driver.F90 | 156 +++-- .../infrastructure/ice_restoring.F90 | 0 .../infrastructure/ice_shr_reprosum86.c | 0 .../io/io_binary/ice_history_write.F90 | 0 .../io/io_binary/ice_restart.F90 | 0 .../io/io_netcdf/ice_history_write.F90 | 0 .../io/io_netcdf/ice_restart.F90 | 0 .../io/io_pio2/ice_history_write.F90 | 0 .../infrastructure/io/io_pio2/ice_pio.F90 | 0 .../infrastructure/io/io_pio2/ice_restart.F90 | 15 +- cicecore/cicedynB | 1 + .../drivers/direct/hadgem3/CICE_InitMod.F90 | 14 +- .../direct/nemo_concepts/CICE_InitMod.F90 | 14 +- cicecore/drivers/mct/cesm1/CICE_InitMod.F90 | 12 +- cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 | 13 +- .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 6 +- .../drivers/nuopc/cmeps/ice_import_export.F90 | 4 +- cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 | 31 +- .../drivers/standalone/cice/CICE_InitMod.F90 | 12 +- .../unittest/gridavgchk/CICE_InitMod.F90 | 12 +- .../drivers/unittest/sumchk/CICE_InitMod.F90 | 13 +- cicecore/shared/ice_fileunits.F90 | 64 +- cicecore/shared/ice_init_column.F90 | 9 + cicecore/version.txt | 2 +- configuration/scripts/cice.build | 12 +- configuration/scripts/ice_in | 2 + configuration/scripts/options/set_nml.dyneap | 1 + .../scripts/options/set_nml.saltflux | 2 + configuration/scripts/tests/base_suite.ts | 3 +- configuration/scripts/tests/decomp_suite.ts | 2 + configuration/scripts/tests/first_suite.ts | 15 +- configuration/scripts/tests/gridsys_suite.ts | 13 +- configuration/scripts/tests/omp_suite.ts | 112 ++-- configuration/scripts/tests/perf_suite.ts | 33 +- doc/source/cice_index.rst | 3 +- doc/source/conf.py | 4 +- doc/source/developer_guide/dg_dynamics.rst | 22 +- doc/source/developer_guide/dg_forcing.rst | 2 +- doc/source/science_guide/sg_horiztrans.rst | 9 +- doc/source/science_guide/sg_tracers.rst | 2 +- doc/source/user_guide/ug_case_settings.rst | 21 +- doc/source/user_guide/ug_implementation.rst | 2 +- doc/source/user_guide/ug_testing.rst | 83 ++- doc/source/user_guide/ug_troubleshooting.rst | 20 + icepack | 2 +- 93 files changed, 1446 insertions(+), 882 deletions(-) rename cicecore/{cicedynB => cicedyn}/analysis/ice_diagnostics.F90 (98%) rename cicecore/{cicedynB => cicedyn}/analysis/ice_diagnostics_bgc.F90 (100%) rename cicecore/{cicedynB => cicedyn}/analysis/ice_history.F90 (98%) rename cicecore/{cicedynB => cicedyn}/analysis/ice_history_bgc.F90 (99%) rename cicecore/{cicedynB => cicedyn}/analysis/ice_history_drag.F90 (92%) rename cicecore/{cicedynB => cicedyn}/analysis/ice_history_fsd.F90 (95%) rename cicecore/{cicedynB => cicedyn}/analysis/ice_history_mechred.F90 (94%) rename cicecore/{cicedynB => cicedyn}/analysis/ice_history_pond.F90 (92%) rename cicecore/{cicedynB => cicedyn}/analysis/ice_history_shared.F90 (99%) rename cicecore/{cicedynB => cicedyn}/analysis/ice_history_snow.F90 (93%) rename cicecore/{cicedynB => cicedyn}/dynamics/ice_dyn_eap.F90 (98%) rename cicecore/{cicedynB => cicedyn}/dynamics/ice_dyn_evp.F90 (83%) rename cicecore/{cicedynB => cicedyn}/dynamics/ice_dyn_evp_1d.F90 (99%) rename cicecore/{cicedynB => cicedyn}/dynamics/ice_dyn_shared.F90 (99%) rename cicecore/{cicedynB => cicedyn}/dynamics/ice_dyn_vp.F90 (99%) rename cicecore/{cicedynB => cicedyn}/dynamics/ice_transport_driver.F90 (100%) rename cicecore/{cicedynB => cicedyn}/dynamics/ice_transport_remap.F90 (100%) rename cicecore/{cicedynB => cicedyn}/general/ice_flux.F90 (100%) rename cicecore/{cicedynB => cicedyn}/general/ice_flux_bgc.F90 (100%) rename cicecore/{cicedynB => cicedyn}/general/ice_forcing.F90 (99%) rename cicecore/{cicedynB => cicedyn}/general/ice_forcing_bgc.F90 (100%) rename cicecore/{cicedynB => cicedyn}/general/ice_init.F90 (94%) rename cicecore/{cicedynB => cicedyn}/general/ice_state.F90 (100%) rename cicecore/{cicedynB => cicedyn}/general/ice_step_mod.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/mpi/ice_boundary.F90 (99%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/mpi/ice_broadcast.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/mpi/ice_communicate.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/mpi/ice_exit.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/mpi/ice_gather_scatter.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/mpi/ice_global_reductions.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/mpi/ice_reprosum.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/mpi/ice_timers.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/serial/ice_boundary.F90 (99%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/serial/ice_broadcast.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/serial/ice_communicate.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/serial/ice_exit.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/serial/ice_gather_scatter.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/serial/ice_global_reductions.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/serial/ice_reprosum.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/comm/serial/ice_timers.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/ice_blocks.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/ice_domain.F90 (97%) rename cicecore/{cicedynB => cicedyn}/infrastructure/ice_grid.F90 (99%) rename cicecore/{cicedynB => cicedyn}/infrastructure/ice_memusage.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/ice_memusage_gptl.c (98%) rename cicecore/{cicedynB => cicedyn}/infrastructure/ice_read_write.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/ice_restart_driver.F90 (92%) rename cicecore/{cicedynB => cicedyn}/infrastructure/ice_restoring.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/ice_shr_reprosum86.c (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/io/io_binary/ice_history_write.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/io/io_binary/ice_restart.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/io/io_netcdf/ice_history_write.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/io/io_netcdf/ice_restart.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/io/io_pio2/ice_history_write.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/io/io_pio2/ice_pio.F90 (100%) rename cicecore/{cicedynB => cicedyn}/infrastructure/io/io_pio2/ice_restart.F90 (98%) create mode 120000 cicecore/cicedynB create mode 100644 configuration/scripts/options/set_nml.dyneap create mode 100644 configuration/scripts/options/set_nml.saltflux diff --git a/.github/workflows/test-cice.yml b/.github/workflows/test-cice.yml index c60a07721..70fdc4c14 100644 --- a/.github/workflows/test-cice.yml +++ b/.github/workflows/test-cice.yml @@ -74,6 +74,20 @@ jobs: ln -s ${GITHUB_WORKSPACE}/../CICE ${HOME}/cice # ls -al ${HOME}/ # ls -al ${GITHUB_WORKSPACE}/ + - name: check for tabs + run: | + cd $HOME/cice/cicecore + set cnt = 0 + set ffiles = `find -P . -iname "*.f*"` + set cfiles = `find -P . -iname "*.c*"` + foreach file ($ffiles $cfiles) + set fcnt = `sed -n '/\t/p' $file | wc -l` + @ cnt = $cnt + $fcnt + if ($fcnt > 0) then + echo "TAB found: $fcnt $file" + endif + end + exit $cnt - name: setup conda env shell: /bin/bash {0} run: | diff --git a/cice.setup b/cice.setup index 586fe3464..30da0ed2e 100755 --- a/cice.setup +++ b/cice.setup @@ -455,7 +455,22 @@ if ( ${dosuite} == 0 ) then set sets = "" else - set tarrays = `echo ${testsuite} | sed 's/,/ /g' | fmt -1 | sort -u` + # generate unique set of suites in tarrays in order they are set + set tarrays0 = `echo ${testsuite} | sed 's/,/ /g' | fmt -1 ` + #echo "${0}: tarrays0 = ${tarrays0}" + set tarrays = $tarrays0[1] + foreach t1 ( ${tarrays0} ) + set found = 0 + foreach t2 ( ${tarrays} ) + if ( ${t1} == ${t2} ) then + set found = 1 + endif + end + if ( ${found} == 0 ) then + set tarrays = ( ${tarrays} ${t1} ) + endif + end + #echo "${0}: tarrays = ${tarrays}" set testsuitecnt = 0 foreach tarray ( ${tarrays} ) @ testsuitecnt = ${testsuitecnt} + 1 diff --git a/cicecore/cicedynB/analysis/ice_diagnostics.F90 b/cicecore/cicedyn/analysis/ice_diagnostics.F90 similarity index 98% rename from cicecore/cicedynB/analysis/ice_diagnostics.F90 rename to cicecore/cicedyn/analysis/ice_diagnostics.F90 index 8879d6632..53631b2d4 100644 --- a/cicecore/cicedynB/analysis/ice_diagnostics.F90 +++ b/cicecore/cicedyn/analysis/ice_diagnostics.F90 @@ -87,6 +87,8 @@ module ice_diagnostics totms , & ! total ice/snow water mass (sh) totmin , & ! total ice water mass (nh) totmis , & ! total ice water mass (sh) + totsn , & ! total salt mass (nh) + totss , & ! total salt mass (sh) toten , & ! total ice/snow energy (J) totes ! total ice/snow energy (J) @@ -154,7 +156,7 @@ subroutine runtime_diags (dt) rhofresh, lfresh, lvap, ice_ref_salinity, Tffresh character (len=char_len) :: & - snwredist + snwredist, saltflux_option ! hemispheric state quantities real (kind=dbl_kind) :: & @@ -162,6 +164,8 @@ subroutine runtime_diags (dt) umaxs, hmaxs, shmaxs, areas, snwmxs, extents, shmaxst, & etotn, mtotn, micen, msnwn, pmaxn, ketotn, & etots, mtots, mices, msnws, pmaxs, ketots, & + stotn, & + stots, & urmsn, albtotn, arean_alb, mpndn, ptotn, spondn, & urmss, albtots, areas_alb, mpnds, ptots, sponds @@ -226,7 +230,7 @@ subroutine runtime_diags (dt) awtvdr_out=awtvdr, awtidr_out=awtidr, awtvdf_out=awtvdf, awtidf_out=awtidf, & rhofresh_out=rhofresh, lfresh_out=lfresh, lvap_out=lvap, & ice_ref_salinity_out=ice_ref_salinity,snwredist_out=snwredist, & - snwgrain_out=snwgrain) + snwgrain_out=snwgrain, saltflux_option_out=saltflux_option) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -512,6 +516,15 @@ subroutine runtime_diags (dt) etots = global_sum(work1, distrb_info, & field_loc_center, tareas) + ! total salt volume + call total_salt (work2) + + stotn = global_sum(work2, distrb_info, & + field_loc_center, tarean) + stots = global_sum(work2, distrb_info, & + field_loc_center, tareas) + + !----------------------------------------------------------------- ! various fluxes !----------------------------------------------------------------- @@ -785,12 +798,22 @@ subroutine runtime_diags (dt) swerrs = (fswnets - fswdns) / (fswnets - c1) ! salt mass - msltn = micen*ice_ref_salinity*p001 - mslts = mices*ice_ref_salinity*p001 + if (saltflux_option == 'prognostic') then + ! compute the total salt mass + msltn = stotn*rhoi*p001 + mslts = stots*rhoi*p001 + + ! change in salt mass + delmsltn = rhoi*(stotn-totsn)*p001 + delmslts = rhoi*(stots-totss)*p001 + else + msltn = micen*ice_ref_salinity*p001 + mslts = mices*ice_ref_salinity*p001 - ! change in salt mass - delmsltn = delmxn*ice_ref_salinity*p001 - delmslts = delmxs*ice_ref_salinity*p001 + ! change in salt mass + delmsltn = delmxn*ice_ref_salinity*p001 + delmslts = delmxs*ice_ref_salinity*p001 + endif ! salt error serrn = (sfsaltn + delmsltn) / (msltn + c1) @@ -1275,7 +1298,7 @@ subroutine init_mass_diags rhoi, rhos, rhofresh real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & - work1 + work1, work2 character(len=*), parameter :: subname = '(init_mass_diags)' @@ -1310,6 +1333,12 @@ subroutine init_mass_diags toten = global_sum(work1, distrb_info, field_loc_center, tarean) totes = global_sum(work1, distrb_info, field_loc_center, tareas) + ! north/south salt + call total_salt (work2) + totsn = global_sum(work2, distrb_info, field_loc_center, tarean) + totss = global_sum(work2, distrb_info, field_loc_center, tareas) + + if (print_points) then do n = 1, npnt if (my_task == pmloc(n)) then diff --git a/cicecore/cicedynB/analysis/ice_diagnostics_bgc.F90 b/cicecore/cicedyn/analysis/ice_diagnostics_bgc.F90 similarity index 100% rename from cicecore/cicedynB/analysis/ice_diagnostics_bgc.F90 rename to cicecore/cicedyn/analysis/ice_diagnostics_bgc.F90 diff --git a/cicecore/cicedynB/analysis/ice_history.F90 b/cicecore/cicedyn/analysis/ice_history.F90 similarity index 98% rename from cicecore/cicedynB/analysis/ice_history.F90 rename to cicecore/cicedyn/analysis/ice_history.F90 index f5e7d0d16..f19158f6a 100644 --- a/cicecore/cicedynB/analysis/ice_history.F90 +++ b/cicecore/cicedyn/analysis/ice_history.F90 @@ -81,6 +81,7 @@ subroutine init_hist (dt) use ice_history_fsd, only: init_hist_fsd_2D, init_hist_fsd_3Df, & init_hist_fsd_4Df, f_afsd, f_afsdn use ice_restart_shared, only: restart + use ice_fileunits, only: goto_nml real (kind=dbl_kind), intent(in) :: & dt ! time step @@ -102,7 +103,11 @@ subroutine init_hist (dt) character (len=25) :: & cstr_gat, cstr_gau, cstr_gav, & ! mask area name for t, u, v atm grid (ga) cstr_got, cstr_gou, cstr_gov ! mask area name for t, u, v ocn grid (go) - character(len=char_len) :: description + character (len=25) :: & + gridstr2D, gridstr ! temporary string names + character(len=char_len) :: description + character(len=char_len_long) :: tmpstr2 ! for namelist check + character(len=char_len) :: nml_name ! text namelist name character(len=*), parameter :: subname = '(init_hist)' @@ -226,24 +231,39 @@ subroutine init_hist (dt) file=__FILE__, line=__LINE__) if (my_task == master_task) then - write(nu_diag,*) subname,' Reading icefields_nml' + nml_name = 'icefields_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! open file call get_fileunit(nu_nml) open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_nml open file '// & + call abort_ice(subname//'ERROR: '//trim(nml_name)//' open file '// & trim(nml_filename), & file=__FILE__, line=__LINE__) endif + ! seek to this namelist + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=icefields_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: ' // trim(nml_name) // ' reading ' // & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_nml reading ', & - file=__FILE__, line=__LINE__) - endif + close(nu_nml) call release_fileunit(nu_nml) endif @@ -418,10 +438,6 @@ subroutine init_hist (dt) f_taubyE = f_tauby endif -#ifndef ncdf - f_bounds = .false. -#endif - ! write dimensions for 3D or 4D history variables ! note: list of variables checked here is incomplete if (f_aicen(1:1) /= 'x' .or. f_vicen(1:1) /= 'x' .or. & @@ -1307,21 +1323,25 @@ subroutine init_hist (dt) select case (grid_ice) case('B') description = ", on U grid (NE corner values)" + gridstr2d = trim(ustr2D) + gridstr = trim(ucstr) case ('CD','C') description = ", on T grid" + gridstr2d = trim(tstr2D) + gridstr = trim(tcstr) end select - call define_hist_field(n_sig1,"sig1","1",ustr2D, ucstr, & + call define_hist_field(n_sig1,"sig1","1",gridstr2d, gridstr, & "norm. principal stress 1", & "sig1 is instantaneous" // trim(description), c1, c0, & ns1, f_sig1) - call define_hist_field(n_sig2,"sig2","1",ustr2D, ucstr, & + call define_hist_field(n_sig2,"sig2","1",gridstr2d, gridstr, & "norm. principal stress 2", & "sig2 is instantaneous" // trim(description), c1, c0, & ns1, f_sig2) - call define_hist_field(n_sigP,"sigP","1",ustr2D, ucstr, & + call define_hist_field(n_sigP,"sigP","1",gridstr2d, gridstr, & "ice pressure", & "sigP is instantaneous" // trim(description), c1, c0, & ns1, f_sigP) @@ -2162,12 +2182,13 @@ subroutine accum_hist (dt) real (kind=dbl_kind) :: awtvdr, awtidr, awtvdf, awtidf, puny, secday, rad_to_deg real (kind=dbl_kind) :: Tffresh, rhoi, rhos, rhow, ice_ref_salinity - real (kind=dbl_kind) :: rho_ice, rho_ocn, Tice, Sbr, phi, rhob, dfresh, dfsalt + real (kind=dbl_kind) :: rho_ice, rho_ocn, Tice, Sbr, phi, rhob, dfresh, dfsalt, sicen logical (kind=log_kind) :: formdrag, skl_bgc logical (kind=log_kind) :: tr_pond, tr_aero, tr_brine, tr_snow integer (kind=int_kind) :: ktherm integer (kind=int_kind) :: nt_sice, nt_qice, nt_qsno, nt_iage, nt_FY, nt_Tsfc, & nt_alvl, nt_vlvl + character (len=char_len) :: saltflux_option type (block) :: & this_block ! block information for current block @@ -2179,6 +2200,7 @@ subroutine accum_hist (dt) call icepack_query_parameters(Tffresh_out=Tffresh, rhoi_out=rhoi, rhos_out=rhos, & rhow_out=rhow, ice_ref_salinity_out=ice_ref_salinity) call icepack_query_parameters(formdrag_out=formdrag, skl_bgc_out=skl_bgc, ktherm_out=ktherm) + call icepack_query_parameters(saltflux_option_out=saltflux_option) call icepack_query_tracer_flags(tr_pond_out=tr_pond, tr_aero_out=tr_aero, & tr_brine_out=tr_brine, tr_snow_out=tr_snow) call icepack_query_tracer_indices(nt_sice_out=nt_sice, nt_qice_out=nt_qice, & @@ -2263,7 +2285,7 @@ subroutine accum_hist (dt) !--------------------------------------------------------------- !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block, & - !$OMP k,n,qn,ns,sn,rho_ocn,rho_ice,Tice,Sbr,phi,rhob,dfresh,dfsalt, & + !$OMP k,n,qn,ns,sn,rho_ocn,rho_ice,Tice,Sbr,phi,rhob,dfresh,dfsalt,sicen, & !$OMP worka,workb,worka3,Tinz4d,Sinz4d,Tsnz4d) do iblk = 1, nblocks @@ -3222,7 +3244,16 @@ subroutine accum_hist (dt) dfresh = -rhoi*frazil(i,j,iblk)/dt endif endif - dfsalt = ice_ref_salinity*p001*dfresh + if (saltflux_option == 'prognostic') then + sicen = c0 + do k = 1, nzilyr + sicen = sicen + trcr(i,j,nt_sice+k-1,iblk)*vice(i,j,iblk) & + / real(nzilyr,kind=dbl_kind) + enddo + dfsalt = sicen*p001*dfresh + else + dfsalt = ice_ref_salinity*p001*dfresh + endif worka(i,j) = aice(i,j,iblk)*(fsalt(i,j,iblk)+dfsalt) endif enddo diff --git a/cicecore/cicedynB/analysis/ice_history_bgc.F90 b/cicecore/cicedyn/analysis/ice_history_bgc.F90 similarity index 99% rename from cicecore/cicedynB/analysis/ice_history_bgc.F90 rename to cicecore/cicedyn/analysis/ice_history_bgc.F90 index 003e76120..6974a087b 100644 --- a/cicecore/cicedynB/analysis/ice_history_bgc.F90 +++ b/cicecore/cicedyn/analysis/ice_history_bgc.F90 @@ -271,6 +271,7 @@ subroutine init_hist_bgc_2D use ice_communicate, only: my_task, master_task use ice_history_shared, only: tstr2D, tcstr, define_hist_field, & f_fsalt, f_fsalt_ai, f_sice + use ice_fileunits, only: goto_nml integer (kind=int_kind) :: n, ns integer (kind=int_kind) :: nml_error ! namelist i/o error flag @@ -283,6 +284,9 @@ subroutine init_hist_bgc_2D tr_bgc_DON, tr_bgc_Fe, tr_bgc_hum, & skl_bgc, solve_zsal, z_tracers + character(len=char_len) :: nml_name ! for namelist check + character(len=char_len_long) :: tmpstr2 ! for namelist check + character(len=*), parameter :: subname = '(init_hist_bgc_2D)' call icepack_query_parameters(skl_bgc_out=skl_bgc, & @@ -305,24 +309,39 @@ subroutine init_hist_bgc_2D !----------------------------------------------------------------- if (my_task == master_task) then - write(nu_diag,*) subname,' Reading icefields_bgc_nml' + nml_name = 'icefields_bgc_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! check if can open file call get_fileunit(nu_nml) open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_bgc_nml open file '// & + call abort_ice(subname//'ERROR: '//trim(nml_name)//' open file '// & trim(nml_filename), & file=__FILE__, line=__LINE__) endif + ! seek to namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=icefields_bgc_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: ' // trim(nml_name) // ' reading ' // & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_bgc_nml reading ', & - file=__FILE__, line=__LINE__) - endif + close(nu_nml) call release_fileunit(nu_nml) endif diff --git a/cicecore/cicedynB/analysis/ice_history_drag.F90 b/cicecore/cicedyn/analysis/ice_history_drag.F90 similarity index 92% rename from cicecore/cicedynB/analysis/ice_history_drag.F90 rename to cicecore/cicedyn/analysis/ice_history_drag.F90 index fba19b364..dd9e3cb59 100644 --- a/cicecore/cicedynB/analysis/ice_history_drag.F90 +++ b/cicecore/cicedyn/analysis/ice_history_drag.F90 @@ -64,10 +64,13 @@ subroutine init_hist_drag_2D use ice_calendar, only: nstreams use ice_communicate, only: my_task, master_task use ice_history_shared, only: tstr2D, tcstr, define_hist_field + use ice_fileunits, only: goto_nml integer (kind=int_kind) :: ns integer (kind=int_kind) :: nml_error ! namelist i/o error flag logical (kind=log_kind) :: formdrag + character(len=char_len_long) :: tmpstr2 ! for namelist check + character(len=char_len) :: nml_name ! for namelist check character(len=*), parameter :: subname = '(init_hist_drag_2D)' @@ -81,24 +84,39 @@ subroutine init_hist_drag_2D !----------------------------------------------------------------- if (my_task == master_task) then - write(nu_diag,*) subname,' Reading icefields_drag_nml' + nml_name = 'icefields_drag_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! open namelist file call get_fileunit(nu_nml) open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_drag_nml open file '// & + call abort_ice(subname//'ERROR: '//trim(nml_name)//' open file '// & trim(nml_filename), & file=__FILE__, line=__LINE__) endif + ! go to this namelist + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=icefields_drag_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: ' // trim(nml_name) // ' reading ' // & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_drag_nml reading ', & - file=__FILE__, line=__LINE__) - endif + close(nu_nml) call release_fileunit(nu_nml) endif diff --git a/cicecore/cicedynB/analysis/ice_history_fsd.F90 b/cicecore/cicedyn/analysis/ice_history_fsd.F90 similarity index 95% rename from cicecore/cicedynB/analysis/ice_history_fsd.F90 rename to cicecore/cicedyn/analysis/ice_history_fsd.F90 index b52db4e05..610f56608 100644 --- a/cicecore/cicedynB/analysis/ice_history_fsd.F90 +++ b/cicecore/cicedyn/analysis/ice_history_fsd.F90 @@ -76,10 +76,13 @@ subroutine init_hist_fsd_2D use ice_calendar, only: nstreams use ice_communicate, only: my_task, master_task use ice_history_shared, only: tstr2D, tcstr, define_hist_field + use ice_fileunits, only: goto_nml integer (kind=int_kind) :: ns integer (kind=int_kind) :: nml_error ! namelist i/o error flag logical (kind=log_kind) :: tr_fsd, wave_spec + character (len=char_len_long) :: tmpstr2 ! test namelist + character(len=char_len) :: nml_name ! text namelist name character(len=*), parameter :: subname = '(init_hist_fsd_2D)' @@ -96,24 +99,39 @@ subroutine init_hist_fsd_2D !----------------------------------------------------------------- if (my_task == master_task) then - write(nu_diag,*) subname,' Reading icefields_fsd_nml' + nml_name = 'icefields_fsd_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! open namelist file call get_fileunit(nu_nml) open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_fsd_nml open file '// & + call abort_ice(subname//'ERROR: '//trim(nml_name)//' open file '// & trim(nml_filename), & file=__FILE__, line=__LINE__) endif + ! goto this namelist + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=icefields_fsd_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: ' // trim(nml_name) // ' reading ' // & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_fsd_nml reading ', & - file=__FILE__, line=__LINE__) - endif + close(nu_nml) call release_fileunit(nu_nml) endif diff --git a/cicecore/cicedynB/analysis/ice_history_mechred.F90 b/cicecore/cicedyn/analysis/ice_history_mechred.F90 similarity index 94% rename from cicecore/cicedynB/analysis/ice_history_mechred.F90 rename to cicecore/cicedyn/analysis/ice_history_mechred.F90 index 98c58bc39..e0d15fcf2 100644 --- a/cicecore/cicedynB/analysis/ice_history_mechred.F90 +++ b/cicecore/cicedyn/analysis/ice_history_mechred.F90 @@ -84,11 +84,14 @@ subroutine init_hist_mechred_2D use ice_calendar, only: nstreams, histfreq use ice_communicate, only: my_task, master_task use ice_history_shared, only: tstr2D, tcstr, define_hist_field + use ice_fileunits, only: goto_nml integer (kind=int_kind) :: ns integer (kind=int_kind) :: nml_error ! namelist i/o error flag real (kind=dbl_kind) :: secday logical (kind=log_kind) :: tr_lvl + character(len=char_len_long) :: tmpstr2 ! for namelist check + character(len=char_len) :: nml_name ! for namelist check character(len=*), parameter :: subname = '(init_hist_mechred_2D)' @@ -103,24 +106,39 @@ subroutine init_hist_mechred_2D !----------------------------------------------------------------- if (my_task == master_task) then - write(nu_diag,*) subname,' Reading icefields_mechred_nml' + nml_name = 'icefields_mechred_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! open namelist file call get_fileunit(nu_nml) open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_mechred_nml open file '// & + call abort_ice(subname//'ERROR: '//trim(nml_name)//' open file '// & trim(nml_filename), & file=__FILE__, line=__LINE__) endif + ! goto this namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=icefields_mechred_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: ' // trim(nml_name) // ' reading ' // & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_mechred_nml reading ', & - file=__FILE__, line=__LINE__) - endif + close(nu_nml) call release_fileunit(nu_nml) endif diff --git a/cicecore/cicedynB/analysis/ice_history_pond.F90 b/cicecore/cicedyn/analysis/ice_history_pond.F90 similarity index 92% rename from cicecore/cicedynB/analysis/ice_history_pond.F90 rename to cicecore/cicedyn/analysis/ice_history_pond.F90 index 8818ff94e..d209e6db6 100644 --- a/cicecore/cicedynB/analysis/ice_history_pond.F90 +++ b/cicecore/cicedyn/analysis/ice_history_pond.F90 @@ -69,10 +69,13 @@ subroutine init_hist_pond_2D use ice_calendar, only: nstreams, histfreq use ice_communicate, only: my_task, master_task use ice_history_shared, only: tstr2D, tcstr, define_hist_field + use ice_fileunits, only: goto_nml integer (kind=int_kind) :: ns integer (kind=int_kind) :: nml_error ! namelist i/o error flag logical (kind=log_kind) :: tr_pond + character(len=char_len_long) :: tmpstr2 ! for namelist check + character(len=char_len) :: nml_name ! text namelist name character(len=*), parameter :: subname = '(init_hist_pond_2D)' @@ -86,24 +89,39 @@ subroutine init_hist_pond_2D !----------------------------------------------------------------- if (my_task == master_task) then - write(nu_diag,*) subname,' Reading icefields_pond_nml' + nml_name = 'icefields_pond_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! open namelist file call get_fileunit(nu_nml) open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_pond_nml open file '// & - trim(nml_filename), & - file=__FILE__, line=__LINE__) + call abort_ice(subname//'ERROR: '//trim(nml_name)//' open file '// & + trim(nml_filename), & + file=__FILE__, line=__LINE__) endif - + + ! goto this namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=icefields_pond_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: ' // trim(nml_name) // ' reading ' // & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_pond_nml reading ', & - file=__FILE__, line=__LINE__) - endif + close(nu_nml) call release_fileunit(nu_nml) endif diff --git a/cicecore/cicedynB/analysis/ice_history_shared.F90 b/cicecore/cicedyn/analysis/ice_history_shared.F90 similarity index 99% rename from cicecore/cicedynB/analysis/ice_history_shared.F90 rename to cicecore/cicedyn/analysis/ice_history_shared.F90 index d9c62edde..70aa5e14c 100644 --- a/cicecore/cicedynB/analysis/ice_history_shared.F90 +++ b/cicecore/cicedyn/analysis/ice_history_shared.F90 @@ -40,7 +40,7 @@ module ice_history_shared logical (kind=log_kind), public :: & hist_avg ! if true, write averaged data instead of snapshots - character (len=char_len), public :: & + character (len=char_len_long), public :: & history_file , & ! output file for history incond_file ! output file for snapshot initial conditions diff --git a/cicecore/cicedynB/analysis/ice_history_snow.F90 b/cicecore/cicedyn/analysis/ice_history_snow.F90 similarity index 93% rename from cicecore/cicedynB/analysis/ice_history_snow.F90 rename to cicecore/cicedyn/analysis/ice_history_snow.F90 index 0ec4144bf..62e65b5a3 100644 --- a/cicecore/cicedynB/analysis/ice_history_snow.F90 +++ b/cicecore/cicedyn/analysis/ice_history_snow.F90 @@ -69,6 +69,7 @@ subroutine init_hist_snow_2D (dt) use ice_history_shared, only: tstr2D, tcstr, define_hist_field use ice_fileunits, only: nu_nml, nml_filename, & get_fileunit, release_fileunit + use ice_fileunits, only: goto_nml real (kind=dbl_kind), intent(in) :: & dt ! time step @@ -76,7 +77,10 @@ subroutine init_hist_snow_2D (dt) integer (kind=int_kind) :: ns integer (kind=int_kind) :: nml_error ! namelist i/o error flag real (kind=dbl_kind) :: rhofresh, secday - logical (kind=log_kind) :: tr_snow + logical (kind=log_kind) :: tr_snow + character(len=char_len_long) :: tmpstr2 ! for namelist check + character(len=char_len) :: nml_name ! for namelist check + character(len=*), parameter :: subname = '(init_hist_snow_2D)' call icepack_query_tracer_flags(tr_snow_out=tr_snow) @@ -92,26 +96,42 @@ subroutine init_hist_snow_2D (dt) !----------------------------------------------------------------- if (my_task == master_task) then - write(nu_diag,*) subname,' Reading icefields_snow_nml' + nml_name = 'icefields_snow_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! open namelist file call get_fileunit(nu_nml) open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_snow_nml open file '// & + call abort_ice(subname//'ERROR: '//trim(nml_name)//' open file '// & trim(nml_filename), & file=__FILE__, line=__LINE__) endif + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=icefields_snow_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: ' // trim(nml_name) // ' reading ' // & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: icefields_snow_nml reading ', & - file=__FILE__, line=__LINE__) - endif + close(nu_nml) call release_fileunit(nu_nml) + endif else ! .not. tr_snow diff --git a/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 b/cicecore/cicedyn/dynamics/ice_dyn_eap.F90 similarity index 98% rename from cicecore/cicedynB/dynamics/ice_dyn_eap.F90 rename to cicecore/cicedyn/dynamics/ice_dyn_eap.F90 index 28a047c4e..e240fc8f1 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_eap.F90 +++ b/cicecore/cicedyn/dynamics/ice_dyn_eap.F90 @@ -25,6 +25,7 @@ module ice_dyn_eap p001, p027, p055, p111, p166, p222, p25, p333 use ice_fileunits, only: nu_diag, nu_dump_eap, nu_restart_eap use ice_exit, only: abort_ice + use ice_flux, only: rdg_shear ! use ice_timers, only: & ! ice_timer_start, ice_timer_stop, & ! timer_tmp1, timer_tmp2, timer_tmp3, timer_tmp4, & @@ -36,8 +37,7 @@ module ice_dyn_eap implicit none private - public :: eap, init_eap, write_restart_eap, read_restart_eap, & - alloc_dyn_eap + public :: eap, init_eap, write_restart_eap, read_restart_eap ! Look-up table needed for calculating structure tensor integer (int_kind), parameter :: & @@ -71,42 +71,16 @@ module ice_dyn_eap real (kind=dbl_kind) :: & puny, pi, pi2, piq, pih -!======================================================================= - - contains - -!======================================================================= -! Allocate space for all variables -! - subroutine alloc_dyn_eap + real (kind=dbl_kind), parameter :: & + kfriction = 0.45_dbl_kind - integer (int_kind) :: ierr + real (kind=dbl_kind), save :: & + invdx, invdy, invda, invsin - character(len=*), parameter :: subname = '(alloc_dyn_eap)' - allocate( a11_1 (nx_block,ny_block,max_blocks), & - a11_2 (nx_block,ny_block,max_blocks), & - a11_3 (nx_block,ny_block,max_blocks), & - a11_4 (nx_block,ny_block,max_blocks), & - a12_1 (nx_block,ny_block,max_blocks), & - a12_2 (nx_block,ny_block,max_blocks), & - a12_3 (nx_block,ny_block,max_blocks), & - a12_4 (nx_block,ny_block,max_blocks), & - e11 (nx_block,ny_block,max_blocks), & - e12 (nx_block,ny_block,max_blocks), & - e22 (nx_block,ny_block,max_blocks), & - yieldstress11(nx_block,ny_block,max_blocks), & - yieldstress12(nx_block,ny_block,max_blocks), & - yieldstress22(nx_block,ny_block,max_blocks), & - s11 (nx_block,ny_block,max_blocks), & - s12 (nx_block,ny_block,max_blocks), & - s22 (nx_block,ny_block,max_blocks), & - a11 (nx_block,ny_block,max_blocks), & - a12 (nx_block,ny_block,max_blocks), & - stat=ierr) - if (ierr/=0) call abort_ice(subname//' ERROR: Out of memory') +!======================================================================= - end subroutine alloc_dyn_eap + contains !======================================================================= ! Elastic-anisotropic-plastic dynamics driver @@ -134,7 +108,8 @@ subroutine eap (dt) dyn_prep1, dyn_prep2, stepu, dyn_finish, & seabed_stress_factor_LKD, seabed_stress_factor_prob, & seabed_stress_method, seabed_stress, & - stack_fields, unstack_fields, iceTmask, iceUmask + stack_fields, unstack_fields, iceTmask, iceUmask, & + fld2, fld3, fld4 use ice_flux, only: rdg_conv, strairxT, strairyT, & strairxU, strairyU, uocn, vocn, ss_tltx, ss_tlty, fmU, & strtltxU, strtltyU, strocnxU, strocnyU, strintxU, strintyU, taubxU, taubyU, & @@ -186,11 +161,6 @@ subroutine eap (dt) umass , & ! total mass of ice and snow (u grid) umassdti ! mass of U-cell/dte (kg/m^2 s) - real (kind=dbl_kind), allocatable :: & - fld2(:,:,:,:), & ! temporary for stacking fields for halo update - fld3(:,:,:,:), & ! temporary for stacking fields for halo update - fld4(:,:,:,:) ! temporary for stacking fields for halo update - real (kind=dbl_kind), dimension(nx_block,ny_block,8):: & strtmp ! stress combinations for momentum equation @@ -214,10 +184,6 @@ subroutine eap (dt) ! Initialize !----------------------------------------------------------------- - allocate(fld2(nx_block,ny_block,2,max_blocks)) - allocate(fld3(nx_block,ny_block,3,max_blocks)) - allocate(fld4(nx_block,ny_block,4,max_blocks)) - ! This call is needed only if dt changes during runtime. ! call set_evp_parameters (dt) @@ -226,7 +192,7 @@ subroutine eap (dt) do j = 1, ny_block do i = 1, nx_block rdg_conv (i,j,iblk) = c0 -! rdg_shear(i,j,iblk) = c0 + rdg_shear(i,j,iblk) = c0 ! always zero. Could be moved divu (i,j,iblk) = c0 shear(i,j,iblk) = c0 e11(i,j,iblk) = c0 @@ -554,7 +520,6 @@ subroutine eap (dt) enddo ! subcycling - deallocate(fld2,fld3,fld4) if (maskhalo_dyn) call ice_HaloDestroy(halo_info_mask) !----------------------------------------------------------------- @@ -588,6 +553,8 @@ subroutine init_eap use ice_blocks, only: nx_block, ny_block use ice_domain, only: nblocks + use ice_calendar, only: dt_dyn + use ice_dyn_shared, only: init_dyn_shared ! local variables @@ -599,7 +566,7 @@ subroutine init_eap eps6 = 1.0e-6_dbl_kind integer (kind=int_kind) :: & - ix, iy, iz, ia + ix, iy, iz, ia, ierr integer (kind=int_kind), parameter :: & nz = 100 @@ -609,6 +576,8 @@ subroutine init_eap da, dx, dy, dz, & phi + real (kind=dbl_kind) :: invstressconviso + character(len=*), parameter :: subname = '(init_eap)' call icepack_query_parameters(puny_out=puny, & @@ -619,6 +588,31 @@ subroutine init_eap phi = pi/c12 ! diamond shaped floe smaller angle (default phi = 30 deg) + call init_dyn_shared(dt_dyn) + + allocate( a11_1 (nx_block,ny_block,max_blocks), & + a11_2 (nx_block,ny_block,max_blocks), & + a11_3 (nx_block,ny_block,max_blocks), & + a11_4 (nx_block,ny_block,max_blocks), & + a12_1 (nx_block,ny_block,max_blocks), & + a12_2 (nx_block,ny_block,max_blocks), & + a12_3 (nx_block,ny_block,max_blocks), & + a12_4 (nx_block,ny_block,max_blocks), & + e11 (nx_block,ny_block,max_blocks), & + e12 (nx_block,ny_block,max_blocks), & + e22 (nx_block,ny_block,max_blocks), & + yieldstress11(nx_block,ny_block,max_blocks), & + yieldstress12(nx_block,ny_block,max_blocks), & + yieldstress22(nx_block,ny_block,max_blocks), & + s11 (nx_block,ny_block,max_blocks), & + s12 (nx_block,ny_block,max_blocks), & + s22 (nx_block,ny_block,max_blocks), & + a11 (nx_block,ny_block,max_blocks), & + a12 (nx_block,ny_block,max_blocks), & + stat=ierr) + if (ierr/=0) call abort_ice(subname//' ERROR: Out of memory') + + !$OMP PARALLEL DO PRIVATE(iblk,i,j) SCHEDULE(runtime) do iblk = 1, nblocks do j = 1, ny_block @@ -640,6 +634,7 @@ subroutine init_eap a12_2 (i,j,iblk) = c0 a12_3 (i,j,iblk) = c0 a12_4 (i,j,iblk) = c0 + rdg_shear (i,j,iblk) = c0 enddo ! i enddo ! j enddo ! iblk @@ -657,6 +652,9 @@ subroutine init_eap zinit = -pih dy = pi/real(ny_yield-1,kind=dbl_kind) yinit = -dy + invdx = c1/dx + invdy = c1/dy + invda = c1/da do ia=1,na_yield do ix=1,nx_yield @@ -712,6 +710,12 @@ subroutine init_eap enddo enddo + ! Factor to maintain the same stress as in EVP (see Section 3) + ! Can be set to 1 otherwise + + invstressconviso = c1/(c1+kfriction*kfriction) + invsin = c1/sin(pi2/c12) * invstressconviso + end subroutine init_eap !======================================================================= @@ -1590,22 +1594,12 @@ subroutine update_stress_rdg (ksub, ndte, divu, tension, & rotstemp11s, rotstemp12s, rotstemp22s, & sig11, sig12, sig22, & sgprm11, sgprm12, sgprm22, & - invstressconviso, & Angle_denom_gamma, Angle_denom_alpha, & Tany_1, Tany_2, & x, y, dx, dy, da, & dtemp1, dtemp2, atempprime, & kxw, kyw, kaw - real (kind=dbl_kind), save :: & - invdx, invdy, invda, invsin - - logical (kind=log_kind), save :: & - first_call = .true. - - real (kind=dbl_kind), parameter :: & - kfriction = 0.45_dbl_kind - ! tcraig, temporary, should be moved to namelist ! turns on interpolation in stress_rdg logical(kind=log_kind), parameter :: & @@ -1613,14 +1607,6 @@ subroutine update_stress_rdg (ksub, ndte, divu, tension, & character(len=*), parameter :: subname = '(update_stress_rdg)' - ! Factor to maintain the same stress as in EVP (see Section 3) - ! Can be set to 1 otherwise - - if (first_call) then - invstressconviso = c1/(c1+kfriction*kfriction) - invsin = c1/sin(pi2/c12) * invstressconviso - endif - ! compute eigenvalues, eigenvectors and angles for structure tensor, strain rates ! 1) structure tensor @@ -1717,17 +1703,6 @@ subroutine update_stress_rdg (ksub, ndte, divu, tension, & if (y > pi) y = y - pi if (y < 0) y = y + pi - ! Now calculate updated stress tensor - - if (first_call) then - dx = pi/real(nx_yield-1,kind=dbl_kind) - dy = pi/real(ny_yield-1,kind=dbl_kind) - da = p5/real(na_yield-1,kind=dbl_kind) - invdx = c1/dx - invdy = c1/dy - invda = c1/da - endif - if (interpolate_stress_rdg) then ! Interpolated lookup @@ -1869,8 +1844,6 @@ subroutine update_stress_rdg (ksub, ndte, divu, tension, & + rotstemp22s*dtemp22 endif - first_call = .false. - end subroutine update_stress_rdg !======================================================================= diff --git a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 b/cicecore/cicedyn/dynamics/ice_dyn_evp.F90 similarity index 83% rename from cicecore/cicedynB/dynamics/ice_dyn_evp.F90 rename to cicecore/cicedyn/dynamics/ice_dyn_evp.F90 index 8eab5e260..cf111cccf 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_evp.F90 +++ b/cicecore/cicedyn/dynamics/ice_dyn_evp.F90 @@ -55,7 +55,61 @@ module ice_dyn_evp implicit none private - public :: evp +! all c or cd + real (kind=dbl_kind), allocatable :: & + uocnN (:,:,:) , & ! i ocean current (m/s) + vocnN (:,:,:) , & ! j ocean current (m/s) + ss_tltxN (:,:,:) , & ! sea surface slope, x-direction (m/m) + ss_tltyN (:,:,:) , & ! sea surface slope, y-direction (m/m) + cdn_ocnN (:,:,:) , & ! ocn drag coefficient + waterxN (:,:,:) , & ! for ocean stress calculation, x (m/s) + wateryN (:,:,:) , & ! for ocean stress calculation, y (m/s) + forcexN (:,:,:) , & ! work array: combined atm stress and ocn tilt, x + forceyN (:,:,:) , & ! work array: combined atm stress and ocn tilt, y + aiN (:,:,:) , & ! ice fraction on N-grid + nmass (:,:,:) , & ! total mass of ice and snow (N grid) + nmassdti (:,:,:) ! mass of N-cell/dte (kg/m^2 s) +! all c or d + real (kind=dbl_kind), allocatable :: & + uocnE (:,:,:) , & ! i ocean current (m/s) + vocnE (:,:,:) , & ! j ocean current (m/s) + ss_tltxE (:,:,:) , & ! sea surface slope, x-direction (m/m) + ss_tltyE (:,:,:) , & ! sea surface slope, y-direction (m/m) + cdn_ocnE (:,:,:) , & ! ocn drag coefficient + waterxE (:,:,:) , & ! for ocean stress calculation, x (m/s) + wateryE (:,:,:) , & ! for ocean stress calculation, y (m/s) + forcexE (:,:,:) , & ! work array: combined atm stress and ocn tilt, x + forceyE (:,:,:) , & ! work array: combined atm stress and ocn tilt, y + aiE (:,:,:) , & ! ice fraction on E-grid + emass (:,:,:) , & ! total mass of ice and snow (E grid) + emassdti (:,:,:) ! mass of E-cell/dte (kg/m^2 s) + + real (kind=dbl_kind), allocatable :: & + strengthU(:,:,:) , & ! strength averaged to U points + divergU (:,:,:) , & ! div array on U points, differentiate from divu + tensionU (:,:,:) , & ! tension array on U points + shearU (:,:,:) , & ! shear array on U points + deltaU (:,:,:) , & ! delta array on U points + zetax2T (:,:,:) , & ! zetax2 = 2*zeta (bulk viscosity) + zetax2U (:,:,:) , & ! zetax2T averaged to U points + etax2T (:,:,:) , & ! etax2 = 2*eta (shear viscosity) + etax2U (:,:,:) ! etax2T averaged to U points + + real (kind=dbl_kind), allocatable :: & + uocnU (:,:,:) , & ! i ocean current (m/s) + vocnU (:,:,:) , & ! j ocean current (m/s) + ss_tltxU (:,:,:) , & ! sea surface slope, x-direction (m/m) + ss_tltyU (:,:,:) , & ! sea surface slope, y-direction (m/m) + cdn_ocnU (:,:,:) , & ! ocn drag coefficient + tmass (:,:,:) , & ! total mass of ice and snow (kg/m^2) + waterxU (:,:,:) , & ! for ocean stress calculation, x (m/s) + wateryU (:,:,:) , & ! for ocean stress calculation, y (m/s) + forcexU (:,:,:) , & ! work array: combined atm stress and ocn tilt, x + forceyU (:,:,:) , & ! work array: combined atm stress and ocn tilt, y + umass (:,:,:) , & ! total mass of ice and snow (u grid) + umassdti (:,:,:) ! mass of U-cell/dte (kg/m^2 s) + + public :: evp, init_evp !======================================================================= @@ -64,6 +118,84 @@ module ice_dyn_evp !======================================================================= ! Elastic-viscous-plastic dynamics driver ! + subroutine init_evp + use ice_blocks, only: nx_block, ny_block + use ice_domain_size, only: max_blocks + use ice_grid, only: grid_ice + use ice_calendar, only: dt_dyn + use ice_dyn_shared, only: init_dyn_shared + +!allocate c and cd grid var. Follow structucre of eap + integer (int_kind) :: ierr + + character(len=*), parameter :: subname = '(alloc_dyn_evp)' + + call init_dyn_shared(dt_dyn) + + allocate( uocnU (nx_block,ny_block,max_blocks), & ! i ocean current (m/s) + vocnU (nx_block,ny_block,max_blocks), & ! j ocean current (m/s) + ss_tltxU (nx_block,ny_block,max_blocks), & ! sea surface slope, x-direction (m/m) + ss_tltyU (nx_block,ny_block,max_blocks), & ! sea surface slope, y-direction (m/m) + cdn_ocnU (nx_block,ny_block,max_blocks), & ! ocn drag coefficient + tmass (nx_block,ny_block,max_blocks), & ! total mass of ice and snow (kg/m^2) + waterxU (nx_block,ny_block,max_blocks), & ! for ocean stress calculation, x (m/s) + wateryU (nx_block,ny_block,max_blocks), & ! for ocean stress calculation, y (m/s) + forcexU (nx_block,ny_block,max_blocks), & ! work array: combined atm stress and ocn tilt, x + forceyU (nx_block,ny_block,max_blocks), & ! work array: combined atm stress and ocn tilt, y + umass (nx_block,ny_block,max_blocks), & ! total mass of ice and snow (u grid) + umassdti (nx_block,ny_block,max_blocks), & ! mass of U-cell/dte (kg/m^2 s) + stat=ierr) + if (ierr/=0) call abort_ice(subname//' ERROR: Out of memory B-Grid evp') + + + if (grid_ice == 'CD' .or. grid_ice == 'C') then + + allocate( strengthU(nx_block,ny_block,max_blocks), & + divergU (nx_block,ny_block,max_blocks), & + tensionU (nx_block,ny_block,max_blocks), & + shearU (nx_block,ny_block,max_blocks), & + deltaU (nx_block,ny_block,max_blocks), & + zetax2T (nx_block,ny_block,max_blocks), & + zetax2U (nx_block,ny_block,max_blocks), & + etax2T (nx_block,ny_block,max_blocks), & + etax2U (nx_block,ny_block,max_blocks), & + stat=ierr) + if (ierr/=0) call abort_ice(subname//' ERROR: Out of memory U evp') + + allocate( uocnN (nx_block,ny_block,max_blocks), & + vocnN (nx_block,ny_block,max_blocks), & + ss_tltxN (nx_block,ny_block,max_blocks), & + ss_tltyN (nx_block,ny_block,max_blocks), & + cdn_ocnN (nx_block,ny_block,max_blocks), & + waterxN (nx_block,ny_block,max_blocks), & + wateryN (nx_block,ny_block,max_blocks), & + forcexN (nx_block,ny_block,max_blocks), & + forceyN (nx_block,ny_block,max_blocks), & + aiN (nx_block,ny_block,max_blocks), & + nmass (nx_block,ny_block,max_blocks), & + nmassdti (nx_block,ny_block,max_blocks), & + stat=ierr) + if (ierr/=0) call abort_ice(subname//' ERROR: Out of memory N evp') + + allocate( uocnE (nx_block,ny_block,max_blocks), & + vocnE (nx_block,ny_block,max_blocks), & + ss_tltxE (nx_block,ny_block,max_blocks), & + ss_tltyE (nx_block,ny_block,max_blocks), & + cdn_ocnE (nx_block,ny_block,max_blocks), & + waterxE (nx_block,ny_block,max_blocks), & + wateryE (nx_block,ny_block,max_blocks), & + forcexE (nx_block,ny_block,max_blocks), & + forceyE (nx_block,ny_block,max_blocks), & + aiE (nx_block,ny_block,max_blocks), & + emass (nx_block,ny_block,max_blocks), & + emassdti (nx_block,ny_block,max_blocks), & + stat=ierr) + if (ierr/=0) call abort_ice(subname//' ERROR: Out of memory E evp') + + endif + + end subroutine init_evp + #ifdef CICE_IN_NEMO ! Wind stress is set during this routine from the values supplied ! via NEMO (unless calc_strair is true). These values are supplied @@ -116,7 +248,7 @@ subroutine evp (dt) DminTarea, visc_method, deformations, deformationsC_T, deformationsCD_T, & strain_rates_U, & iceTmask, iceUmask, iceEmask, iceNmask, & - dyn_haloUpdate + dyn_haloUpdate, fld2, fld3, fld4 real (kind=dbl_kind), intent(in) :: & dt ! time step @@ -145,64 +277,6 @@ subroutine evp (dt) indxUi , & ! compressed index in i-direction indxUj ! compressed index in j-direction - real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & - uocnU , & ! i ocean current (m/s) - vocnU , & ! j ocean current (m/s) - ss_tltxU , & ! sea surface slope, x-direction (m/m) - ss_tltyU , & ! sea surface slope, y-direction (m/m) - cdn_ocnU , & ! ocn drag coefficient - tmass , & ! total mass of ice and snow (kg/m^2) - waterxU , & ! for ocean stress calculation, x (m/s) - wateryU , & ! for ocean stress calculation, y (m/s) - forcexU , & ! work array: combined atm stress and ocn tilt, x - forceyU , & ! work array: combined atm stress and ocn tilt, y - umass , & ! total mass of ice and snow (u grid) - umassdti ! mass of U-cell/dte (kg/m^2 s) - - real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & - uocnN , & ! i ocean current (m/s) - vocnN , & ! j ocean current (m/s) - ss_tltxN , & ! sea surface slope, x-direction (m/m) - ss_tltyN , & ! sea surface slope, y-direction (m/m) - cdn_ocnN , & ! ocn drag coefficient - waterxN , & ! for ocean stress calculation, x (m/s) - wateryN , & ! for ocean stress calculation, y (m/s) - forcexN , & ! work array: combined atm stress and ocn tilt, x - forceyN , & ! work array: combined atm stress and ocn tilt, y - aiN , & ! ice fraction on N-grid - nmass , & ! total mass of ice and snow (N grid) - nmassdti ! mass of N-cell/dte (kg/m^2 s) - - real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & - uocnE , & ! i ocean current (m/s) - vocnE , & ! j ocean current (m/s) - ss_tltxE , & ! sea surface slope, x-direction (m/m) - ss_tltyE , & ! sea surface slope, y-direction (m/m) - cdn_ocnE , & ! ocn drag coefficient - waterxE , & ! for ocean stress calculation, x (m/s) - wateryE , & ! for ocean stress calculation, y (m/s) - forcexE , & ! work array: combined atm stress and ocn tilt, x - forceyE , & ! work array: combined atm stress and ocn tilt, y - aiE , & ! ice fraction on E-grid - emass , & ! total mass of ice and snow (E grid) - emassdti ! mass of E-cell/dte (kg/m^2 s) - - real (kind=dbl_kind), allocatable :: & - fld2(:,:,:,:), & ! 2 bundled fields - fld3(:,:,:,:), & ! 3 bundled fields - fld4(:,:,:,:) ! 4 bundled fields - - real (kind=dbl_kind), allocatable :: & - strengthU(:,:,:), & ! strength averaged to U points - divergU (:,:,:), & ! div array on U points, differentiate from divu - tensionU (:,:,:), & ! tension array on U points - shearU (:,:,:), & ! shear array on U points - deltaU (:,:,:), & ! delta array on U points - zetax2T (:,:,:), & ! zetax2 = 2*zeta (bulk viscosity) - zetax2U (:,:,:), & ! zetax2T averaged to U points - etax2T (:,:,:), & ! etax2 = 2*eta (shear viscosity) - etax2U (:,:,:) ! etax2T averaged to U points - real (kind=dbl_kind), dimension(nx_block,ny_block,8):: & strtmp ! stress combinations for momentum equation @@ -218,9 +292,6 @@ subroutine evp (dt) type (block) :: & this_block ! block information for current block - logical (kind=log_kind), save :: & - first_time = .true. ! first time logical - character(len=*), parameter :: subname = '(evp)' call ice_timer_start(timer_dynamics) ! dynamics @@ -229,21 +300,8 @@ subroutine evp (dt) ! Initialize !----------------------------------------------------------------- - allocate(fld2(nx_block,ny_block,2,max_blocks)) - allocate(fld3(nx_block,ny_block,3,max_blocks)) - allocate(fld4(nx_block,ny_block,4,max_blocks)) - if (grid_ice == 'CD' .or. grid_ice == 'C') then - allocate(strengthU(nx_block,ny_block,max_blocks)) - allocate(divergU (nx_block,ny_block,max_blocks)) - allocate(tensionU (nx_block,ny_block,max_blocks)) - allocate(shearU (nx_block,ny_block,max_blocks)) - allocate(deltaU (nx_block,ny_block,max_blocks)) - allocate(zetax2T (nx_block,ny_block,max_blocks)) - allocate(zetax2U (nx_block,ny_block,max_blocks)) - allocate(etax2T (nx_block,ny_block,max_blocks)) - allocate(etax2U (nx_block,ny_block,max_blocks)) strengthU(:,:,:) = c0 divergU (:,:,:) = c0 tensionU (:,:,:) = c0 @@ -383,20 +441,20 @@ subroutine evp (dt) endif endif - !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block,ij,i,j) SCHEDULE(runtime) - do iblk = 1, nblocks + if (trim(grid_ice) == 'B') then + !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block,ij,i,j) SCHEDULE(runtime) + do iblk = 1, nblocks - !----------------------------------------------------------------- - ! more preparation for dynamics - !----------------------------------------------------------------- + !----------------------------------------------------------------- + ! more preparation for dynamics + !----------------------------------------------------------------- - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi - if (trim(grid_ice) == 'B') then call dyn_prep2 (nx_block, ny_block, & ilo, ihi, jlo, jhi, & icellT (iblk), icellU (iblk), & @@ -409,7 +467,7 @@ subroutine evp (dt) strairxU (:,:,iblk), strairyU (:,:,iblk), & ss_tltxU (:,:,iblk), ss_tltyU (:,:,iblk), & iceTmask (:,:,iblk), iceUmask (:,:,iblk), & - fmU (:,:,iblk), dt, & + fmU (:,:,iblk), dt , & strtltxU (:,:,iblk), strtltyU (:,:,iblk), & strocnxU (:,:,iblk), strocnyU (:,:,iblk), & strintxU (:,:,iblk), strintyU (:,:,iblk), & @@ -426,7 +484,35 @@ subroutine evp (dt) uvel (:,:,iblk), vvel (:,:,iblk), & TbU (:,:,iblk)) - elseif (trim(grid_ice) == 'CD' .or. grid_ice == 'C') then + !----------------------------------------------------------------- + ! ice strength + !----------------------------------------------------------------- + + strength(:,:,iblk) = c0 ! initialize + do ij = 1, icellT(iblk) + i = indxTi(ij, iblk) + j = indxTj(ij, iblk) + call icepack_ice_strength(ncat = ncat, & + aice = aice (i,j, iblk), & + vice = vice (i,j, iblk), & + aice0 = aice0 (i,j, iblk), & + aicen = aicen (i,j,:,iblk), & + vicen = vicen (i,j,:,iblk), & + strength = strength(i,j, iblk)) + enddo ! ij + + enddo ! iblk + !$OMP END PARALLEL DO + elseif (trim(grid_ice) == 'CD' .or. grid_ice == 'C') then + !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block,ij,i,j) SCHEDULE(runtime) + do iblk = 1, nblocks + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + call dyn_prep2 (nx_block, ny_block, & ilo, ihi, jlo, jhi, & icellT (iblk), icellU (iblk), & @@ -455,122 +541,108 @@ subroutine evp (dt) uvel_init (:,:,iblk), vvel_init (:,:,iblk), & uvel (:,:,iblk), vvel (:,:,iblk), & TbU (:,:,iblk)) - endif - !----------------------------------------------------------------- - ! ice strength - !----------------------------------------------------------------- + !----------------------------------------------------------------- + ! ice strength + !----------------------------------------------------------------- - strength(:,:,iblk) = c0 ! initialize - do ij = 1, icellT(iblk) - i = indxTi(ij, iblk) - j = indxTj(ij, iblk) - call icepack_ice_strength(ncat = ncat, & - aice = aice (i,j, iblk), & - vice = vice (i,j, iblk), & - aice0 = aice0 (i,j, iblk), & - aicen = aicen (i,j,:,iblk), & - vicen = vicen (i,j,:,iblk), & - strength = strength(i,j, iblk) ) - enddo ! ij - - enddo ! iblk - !$OMP END PARALLEL DO + strength(:,:,iblk) = c0 ! initialize + do ij = 1, icellT(iblk) + i = indxTi(ij, iblk) + j = indxTj(ij, iblk) + call icepack_ice_strength(ncat = ncat, & + aice = aice (i,j, iblk), & + vice = vice (i,j, iblk), & + aice0 = aice0 (i,j, iblk), & + aicen = aicen (i,j,:,iblk), & + vicen = vicen (i,j,:,iblk), & + strength = strength(i,j, iblk) ) + enddo ! ij - if (grid_ice == 'CD' .or. grid_ice == 'C') then - !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block,i,j) SCHEDULE(runtime) - do iblk = 1, nblocks + !----------------------------------------------------------------- + ! more preparation for dynamics on N grid + !----------------------------------------------------------------- - !----------------------------------------------------------------- - ! more preparation for dynamics on N grid - !----------------------------------------------------------------- + call dyn_prep2 (nx_block, ny_block, & + ilo, ihi, jlo, jhi, & + icellT (iblk), icellN (iblk), & + indxTi (:,iblk), indxTj (:,iblk), & + indxNi (:,iblk), indxNj (:,iblk), & + aiN (:,:,iblk), nmass (:,:,iblk), & + nmassdti (:,:,iblk), fcorN_blk (:,:,iblk), & + nmask (:,:,iblk), & + uocnN (:,:,iblk), vocnN (:,:,iblk), & + strairxN (:,:,iblk), strairyN (:,:,iblk), & + ss_tltxN (:,:,iblk), ss_tltyN (:,:,iblk), & + iceTmask (:,:,iblk), iceNmask (:,:,iblk), & + fmN (:,:,iblk), dt , & + strtltxN (:,:,iblk), strtltyN (:,:,iblk), & + strocnxN (:,:,iblk), strocnyN (:,:,iblk), & + strintxN (:,:,iblk), strintyN (:,:,iblk), & + taubxN (:,:,iblk), taubyN (:,:,iblk), & + waterxN (:,:,iblk), wateryN (:,:,iblk), & + forcexN (:,:,iblk), forceyN (:,:,iblk), & + stressp_1 (:,:,iblk), stressp_2 (:,:,iblk), & + stressp_3 (:,:,iblk), stressp_4 (:,:,iblk), & + stressm_1 (:,:,iblk), stressm_2 (:,:,iblk), & + stressm_3 (:,:,iblk), stressm_4 (:,:,iblk), & + stress12_1(:,:,iblk), stress12_2(:,:,iblk), & + stress12_3(:,:,iblk), stress12_4(:,:,iblk), & + uvelN_init(:,:,iblk), vvelN_init(:,:,iblk), & + uvelN (:,:,iblk), vvelN (:,:,iblk), & + TbN (:,:,iblk)) - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi + !----------------------------------------------------------------- + ! more preparation for dynamics on E grid + !----------------------------------------------------------------- - call dyn_prep2 (nx_block, ny_block, & - ilo, ihi, jlo, jhi, & - icellT (iblk), icellN (iblk), & - indxTi (:,iblk), indxTj (:,iblk), & - indxNi (:,iblk), indxNj (:,iblk), & - aiN (:,:,iblk), nmass (:,:,iblk), & - nmassdti (:,:,iblk), fcorN_blk (:,:,iblk), & - nmask (:,:,iblk), & - uocnN (:,:,iblk), vocnN (:,:,iblk), & - strairxN (:,:,iblk), strairyN (:,:,iblk), & - ss_tltxN (:,:,iblk), ss_tltyN (:,:,iblk), & - iceTmask (:,:,iblk), iceNmask (:,:,iblk), & - fmN (:,:,iblk), dt, & - strtltxN (:,:,iblk), strtltyN (:,:,iblk), & - strocnxN (:,:,iblk), strocnyN (:,:,iblk), & - strintxN (:,:,iblk), strintyN (:,:,iblk), & - taubxN (:,:,iblk), taubyN (:,:,iblk), & - waterxN (:,:,iblk), wateryN (:,:,iblk), & - forcexN (:,:,iblk), forceyN (:,:,iblk), & - stressp_1 (:,:,iblk), stressp_2 (:,:,iblk), & - stressp_3 (:,:,iblk), stressp_4 (:,:,iblk), & - stressm_1 (:,:,iblk), stressm_2 (:,:,iblk), & - stressm_3 (:,:,iblk), stressm_4 (:,:,iblk), & - stress12_1(:,:,iblk), stress12_2(:,:,iblk), & - stress12_3(:,:,iblk), stress12_4(:,:,iblk), & - uvelN_init(:,:,iblk), vvelN_init(:,:,iblk), & - uvelN (:,:,iblk), vvelN (:,:,iblk), & - TbN (:,:,iblk)) + call dyn_prep2 (nx_block, ny_block, & + ilo, ihi, jlo, jhi, & + icellT (iblk), icellE (iblk), & + indxTi (:,iblk), indxTj (:,iblk), & + indxEi (:,iblk), indxEj (:,iblk), & + aiE (:,:,iblk), emass (:,:,iblk), & + emassdti (:,:,iblk), fcorE_blk (:,:,iblk), & + emask (:,:,iblk), & + uocnE (:,:,iblk), vocnE (:,:,iblk), & + strairxE (:,:,iblk), strairyE (:,:,iblk), & + ss_tltxE (:,:,iblk), ss_tltyE (:,:,iblk), & + iceTmask (:,:,iblk), iceEmask (:,:,iblk), & + fmE (:,:,iblk), dt , & + strtltxE (:,:,iblk), strtltyE (:,:,iblk), & + strocnxE (:,:,iblk), strocnyE (:,:,iblk), & + strintxE (:,:,iblk), strintyE (:,:,iblk), & + taubxE (:,:,iblk), taubyE (:,:,iblk), & + waterxE (:,:,iblk), wateryE (:,:,iblk), & + forcexE (:,:,iblk), forceyE (:,:,iblk), & + stressp_1 (:,:,iblk), stressp_2 (:,:,iblk), & + stressp_3 (:,:,iblk), stressp_4 (:,:,iblk), & + stressm_1 (:,:,iblk), stressm_2 (:,:,iblk), & + stressm_3 (:,:,iblk), stressm_4 (:,:,iblk), & + stress12_1(:,:,iblk), stress12_2(:,:,iblk), & + stress12_3(:,:,iblk), stress12_4(:,:,iblk), & + uvelE_init(:,:,iblk), vvelE_init(:,:,iblk), & + uvelE (:,:,iblk), vvelE (:,:,iblk), & + TbE (:,:,iblk)) - !----------------------------------------------------------------- - ! more preparation for dynamics on E grid - !----------------------------------------------------------------- - call dyn_prep2 (nx_block, ny_block, & - ilo, ihi, jlo, jhi, & - icellT (iblk), icellE (iblk), & - indxTi (:,iblk), indxTj (:,iblk), & - indxEi (:,iblk), indxEj (:,iblk), & - aiE (:,:,iblk), emass (:,:,iblk), & - emassdti (:,:,iblk), fcorE_blk (:,:,iblk), & - emask (:,:,iblk), & - uocnE (:,:,iblk), vocnE (:,:,iblk), & - strairxE (:,:,iblk), strairyE (:,:,iblk), & - ss_tltxE (:,:,iblk), ss_tltyE (:,:,iblk), & - iceTmask (:,:,iblk), iceEmask (:,:,iblk), & - fmE (:,:,iblk), dt, & - strtltxE (:,:,iblk), strtltyE (:,:,iblk), & - strocnxE (:,:,iblk), strocnyE (:,:,iblk), & - strintxE (:,:,iblk), strintyE (:,:,iblk), & - taubxE (:,:,iblk), taubyE (:,:,iblk), & - waterxE (:,:,iblk), wateryE (:,:,iblk), & - forcexE (:,:,iblk), forceyE (:,:,iblk), & - stressp_1 (:,:,iblk), stressp_2 (:,:,iblk), & - stressp_3 (:,:,iblk), stressp_4 (:,:,iblk), & - stressm_1 (:,:,iblk), stressm_2 (:,:,iblk), & - stressm_3 (:,:,iblk), stressm_4 (:,:,iblk), & - stress12_1(:,:,iblk), stress12_2(:,:,iblk), & - stress12_3(:,:,iblk), stress12_4(:,:,iblk), & - uvelE_init(:,:,iblk), vvelE_init(:,:,iblk), & - uvelE (:,:,iblk), vvelE (:,:,iblk), & - TbE (:,:,iblk)) - - - do i=1,nx_block - do j=1,ny_block - if (.not.iceUmask(i,j,iblk)) then - stresspU (i,j,iblk) = c0 - stressmU (i,j,iblk) = c0 - stress12U(i,j,iblk) = c0 - endif - if (.not.iceTmask(i,j,iblk)) then - stresspT (i,j,iblk) = c0 - stressmT (i,j,iblk) = c0 - stress12T(i,j,iblk) = c0 - endif - enddo - enddo - enddo ! iblk - !$OMP END PARALLEL DO + do i=1,nx_block + do j=1,ny_block + if (.not.iceUmask(i,j,iblk)) then + stresspU (i,j,iblk) = c0 + stressmU (i,j,iblk) = c0 + stress12U(i,j,iblk) = c0 + endif + if (.not.iceTmask(i,j,iblk)) then + stresspT (i,j,iblk) = c0 + stressmT (i,j,iblk) = c0 + stress12T(i,j,iblk) = c0 + endif + enddo + enddo + enddo ! iblk + !$OMP END PARALLEL DO endif ! grid_ice @@ -721,10 +793,6 @@ subroutine evp (dt) if (evp_algorithm == "shared_mem_1d" ) then - if (first_time .and. my_task == master_task) then - write(nu_diag,'(3a)') subname,' Entering evp_algorithm version ',evp_algorithm - first_time = .false. - endif if (trim(grid_type) == 'tripole') then call abort_ice(trim(subname)//' & & Kernel not tested on tripole grid. Set evp_algorithm=standard_2d') @@ -877,7 +945,8 @@ subroutine evp (dt) uarea (:,:,iblk), DminTarea (:,:,iblk), & strength (:,:,iblk), shearU (:,:,iblk), & zetax2T (:,:,iblk), etax2T (:,:,iblk), & - stresspT (:,:,iblk), stressmT (:,:,iblk)) + stresspT (:,:,iblk), stressmT (:,:,iblk), & + stress12T (:,:,iblk)) enddo !$OMP END PARALLEL DO @@ -1211,12 +1280,6 @@ subroutine evp (dt) call ice_timer_stop(timer_evp_2d) endif ! evp_algorithm - deallocate(fld2,fld3,fld4) - if (grid_ice == 'CD' .or. grid_ice == 'C') then - deallocate(strengthU, divergU, tensionU, shearU, deltaU) - deallocate(zetax2T, zetax2U, etax2T, etax2U) - endif - if (maskhalo_dyn) then call ice_HaloDestroy(halo_info_mask) endif @@ -1658,17 +1721,18 @@ end subroutine stress ! Kimmritz, M., S. Danilov and M. Losch (2016). The adaptive EVP method ! for solving the sea ice momentum equation. Ocean Model., 101, 59-67. - subroutine stressC_T (nx_block, ny_block , & - icellT , & - indxTi , indxTj , & - uvelE , vvelE , & - uvelN , vvelN , & - dxN , dyE , & - dxT , dyT , & - uarea , DminTarea, & - strength, shearU , & - zetax2T , etax2T , & - stressp , stressm ) + subroutine stressC_T (nx_block, ny_block , & + icellT , & + indxTi , indxTj , & + uvelE , vvelE , & + uvelN , vvelN , & + dxN , dyE , & + dxT , dyT , & + uarea , DminTarea , & + strength , shearU , & + zetax2T , etax2T , & + stresspT , stressmT , & + stress12T) use ice_dyn_shared, only: strain_rates_T, capping, & visc_replpress, e_factor @@ -1691,15 +1755,16 @@ subroutine stressC_T (nx_block, ny_block , & dxT , & ! width of T-cell through the middle (m) dyT , & ! height of T-cell through the middle (m) strength , & ! ice strength (N/m) - shearU , & ! shearU + shearU , & ! shearU local for this routine uarea , & ! area of u cell DminTarea ! deltaminEVP*tarea real (kind=dbl_kind), dimension (nx_block,ny_block), intent(inout) :: & zetax2T , & ! zetax2 = 2*zeta (bulk viscosity) etax2T , & ! etax2 = 2*eta (shear viscosity) - stressp , & ! sigma11+sigma22 - stressm ! sigma11-sigma22 + stresspT , & ! sigma11+sigma22 + stressmT , & ! sigma11-sigma22 + stress12T ! sigma12 ! local variables @@ -1712,7 +1777,9 @@ subroutine stressC_T (nx_block, ny_block , & real (kind=dbl_kind) :: & shearTsqr , & ! strain rates squared at T point + shearT , & ! strain rate at T point DeltaT , & ! delt at T point + uareaavgr , & ! 1 / uarea avg rep_prsT ! replacement pressure at T point character(len=*), parameter :: subname = '(stressC_T)' @@ -1728,7 +1795,7 @@ subroutine stressC_T (nx_block, ny_block , & uvelN (:,:), vvelN (:,:), & dxN (:,:), dyE (:,:), & dxT (:,:), dyT (:,:), & - divT (:,:), tensionT(:,:) ) + divT (:,:), tensionT(:,:)) do ij = 1, icellT i = indxTi(ij) @@ -1739,11 +1806,19 @@ subroutine stressC_T (nx_block, ny_block , & ! U point values (Bouillon et al., 2013, Kimmritz et al., 2016 !----------------------------------------------------------------- + uareaavgr = c1/(uarea(i,j)+uarea(i,j-1)+uarea(i-1,j-1)+uarea(i-1,j)) + shearTsqr = (shearU(i ,j )**2 * uarea(i ,j ) & + shearU(i ,j-1)**2 * uarea(i ,j-1) & + shearU(i-1,j-1)**2 * uarea(i-1,j-1) & + shearU(i-1,j )**2 * uarea(i-1,j )) & - / (uarea(i,j)+uarea(i,j-1)+uarea(i-1,j-1)+uarea(i-1,j)) + * uareaavgr + + shearT = (shearU(i ,j ) * uarea(i ,j ) & + + shearU(i ,j-1) * uarea(i ,j-1) & + + shearU(i-1,j-1) * uarea(i-1,j-1) & + + shearU(i-1,j ) * uarea(i-1,j )) & + * uareaavgr DeltaT = sqrt(divT(i,j)**2 + e_factor*(tensionT(i,j)**2 + shearTsqr)) @@ -1752,7 +1827,7 @@ subroutine stressC_T (nx_block, ny_block , & !----------------------------------------------------------------- call visc_replpress (strength(i,j), DminTarea(i,j), DeltaT, & - zetax2T (i,j), etax2T (i,j), rep_prsT, capping) + zetax2T (i,j), etax2T(i,j), rep_prsT, capping) !----------------------------------------------------------------- ! the stresses ! kg/s^2 @@ -1760,11 +1835,14 @@ subroutine stressC_T (nx_block, ny_block , & ! NOTE: for comp. efficiency 2 x zeta and 2 x eta are used in the code - stressp(i,j) = (stressp(i,j)*(c1-arlx1i*revp) & - + arlx1i*(zetax2T(i,j)*divT(i,j) - rep_prsT)) * denom1 + stresspT(i,j) = (stresspT (i,j)*(c1-arlx1i*revp) & + + arlx1i*(zetax2T(i,j)*divT(i,j) - rep_prsT)) * denom1 - stressm(i,j) = (stressm(i,j)*(c1-arlx1i*revp) & - + arlx1i*etax2T(i,j)*tensionT(i,j)) * denom1 + stressmT(i,j) = (stressmT (i,j)*(c1-arlx1i*revp) & + + arlx1i*etax2T(i,j)*tensionT(i,j)) * denom1 + + stress12T(i,j) = (stress12T(i,j)*(c1-arlx1i*revp) & + + arlx1i*p5*etax2T(i,j)*shearT ) * denom1 enddo ! ij @@ -1783,13 +1861,13 @@ end subroutine stressC_T ! Kimmritz, M., S. Danilov and M. Losch (2016). The adaptive EVP method ! for solving the sea ice momentum equation. Ocean Model., 101, 59-67. - subroutine stressC_U (nx_block , ny_block, & - icellU, & - indxUi , indxUj, & + subroutine stressC_U (nx_block , ny_block ,& + icellU ,& + indxUi , indxUj ,& uarea , & - etax2U , deltaU, & - strengthU, shearU, & - stress12 ) + etax2U , deltaU ,& + strengthU, shearU ,& + stress12U) use ice_dyn_shared, only: visc_replpress, & visc_method, deltaminEVP, capping @@ -1810,7 +1888,7 @@ subroutine stressC_U (nx_block , ny_block, & strengthU ! ice strength at the U point real (kind=dbl_kind), dimension (nx_block,ny_block), intent(inout) :: & - stress12 ! sigma12 + stress12U ! sigma12 ! local variables @@ -1829,15 +1907,15 @@ subroutine stressC_U (nx_block , ny_block, & ! viscosities and replacement pressure at U point ! avg_zeta: Bouillon et al. 2013, C1 method of Kimmritz et al. 2016 ! avg_strength: C2 method of Kimmritz et al. 2016 - ! if outside do and stress12 equation repeated in each loop for performance + ! if outside do and stress12U equation repeated in each loop for performance !----------------------------------------------------------------- if (visc_method == 'avg_zeta') then do ij = 1, icellU i = indxUi(ij) j = indxUj(ij) - stress12(i,j) = (stress12(i,j)*(c1-arlx1i*revp) & - + arlx1i*p5*etax2U(i,j)*shearU(i,j)) * denom1 + stress12U(i,j) = (stress12U(i,j)*(c1-arlx1i*revp) & + + arlx1i*p5*etax2U(i,j)*shearU(i,j)) * denom1 enddo elseif (visc_method == 'avg_strength') then @@ -1847,10 +1925,10 @@ subroutine stressC_U (nx_block , ny_block, & DminUarea = deltaminEVP*uarea(i,j) ! only need etax2U here, but other terms are calculated with etax2U ! minimal extra calculations here even though it seems like there is - call visc_replpress (strengthU(i,j), DminUarea, DeltaU(i,j), & + call visc_replpress (strengthU(i,j), DminUarea, deltaU(i,j), & lzetax2U , letax2U , lrep_prsU , capping) - stress12(i,j) = (stress12(i,j)*(c1-arlx1i*revp) & - + arlx1i*p5*letax2U*shearU(i,j)) * denom1 + stress12U(i,j) = (stress12U(i,j)*(c1-arlx1i*revp) & + + arlx1i*p5*letax2U*shearU(i,j)) * denom1 enddo endif @@ -1863,18 +1941,18 @@ end subroutine stressC_U ! author: JF Lemieux, ECCC ! Nov 2021 - subroutine stressCD_T (nx_block, ny_block, & - icellT, & - indxTi, indxTj, & - uvelE, vvelE, & - uvelN, vvelN, & - dxN, dyE, & - dxT, dyT, & - DminTarea, & - strength, & - zetax2T, etax2T, & - stresspT, stressmT, & - stress12T ) + subroutine stressCD_T (nx_block, ny_block , & + icellT , & + indxTi , indxTj , & + uvelE , vvelE , & + uvelN , vvelN , & + dxN , dyE , & + dxT , dyT , & + DminTarea, & + strength, & + zetax2T , etax2T , & + stresspT, stressmT , & + stress12T) use ice_dyn_shared, only: strain_rates_T, capping, & visc_replpress @@ -1914,7 +1992,7 @@ subroutine stressCD_T (nx_block, ny_block, & real (kind=dbl_kind), dimension (nx_block,ny_block) :: & divT , & ! divergence at T point tensionT , & ! tension at T point - shearT , & ! sheat at T point + shearT , & ! shear at T point DeltaT ! delt at T point real (kind=dbl_kind) :: & @@ -1935,7 +2013,7 @@ subroutine stressCD_T (nx_block, ny_block, & dxN (:,:), dyE (:,:), & dxT (:,:), dyT (:,:), & divT (:,:), tensionT(:,:), & - shearT(:,:), DeltaT (:,:) ) + shearT(:,:), DeltaT (:,:)) do ij = 1, icellT i = indxTi(ij) @@ -1946,7 +2024,7 @@ subroutine stressCD_T (nx_block, ny_block, & !----------------------------------------------------------------- call visc_replpress (strength(i,j), DminTarea(i,j), DeltaT(i,j), & - zetax2T (i,j), etax2T (i,j), rep_prsT , capping) + zetax2T (i,j), etax2T(i,j), rep_prsT , capping) !----------------------------------------------------------------- ! the stresses ! kg/s^2 @@ -1973,19 +2051,18 @@ end subroutine stressCD_T ! author: JF Lemieux, ECCC ! Nov 2021 - subroutine stressCD_U (nx_block, ny_block, & - icellU, & - indxUi, indxUj, & - uarea, & - zetax2U, etax2U, & - strengthU, & - divergU, tensionU, & - shearU, DeltaU, & - stresspU, stressmU, & - stress12U ) - - use ice_dyn_shared, only: strain_rates_U, & - visc_replpress, & + subroutine stressCD_U (nx_block, ny_block, & + icellU , & + indxUi , indxUj , & + uarea , & + zetax2U , etax2U , & + strengthU , & + divergU , tensionU, & + shearU , deltaU , & + stresspU , stressmU, & + stress12U) + + use ice_dyn_shared, only: visc_replpress, & visc_method, deltaminEVP, capping integer (kind=int_kind), intent(in) :: & @@ -2043,7 +2120,7 @@ subroutine stressCD_U (nx_block, ny_block, & DminUarea = deltaminEVP*uarea(i,j) ! only need etax2U here, but other terms are calculated with etax2U ! minimal extra calculations here even though it seems like there is - call visc_replpress (strengthU(i,j), DminUarea, DeltaU(i,j), & + call visc_replpress (strengthU(i,j), DminUarea, deltaU(i,j), & lzetax2U , letax2U , lrep_prsU , capping) endif diff --git a/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 b/cicecore/cicedyn/dynamics/ice_dyn_evp_1d.F90 similarity index 99% rename from cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 rename to cicecore/cicedyn/dynamics/ice_dyn_evp_1d.F90 index e874611bd..b7daab0a0 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_evp_1d.F90 +++ b/cicecore/cicedyn/dynamics/ice_dyn_evp_1d.F90 @@ -889,7 +889,7 @@ subroutine evp1d_halo_update(NAVEL_len, lb, ub, uvel, vvel, & #ifdef _OPENACC !$acc parallel & - !$acc present(uvel, vvel) & + !$acc present(uvel, vvel) !$acc loop do iw = 1, NAVEL_len if (halo_parent(iw) == 0) cycle diff --git a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 b/cicecore/cicedyn/dynamics/ice_dyn_shared.F90 similarity index 99% rename from cicecore/cicedynB/dynamics/ice_dyn_shared.F90 rename to cicecore/cicedyn/dynamics/ice_dyn_shared.F90 index 187ec55cc..a12e6fddd 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 +++ b/cicecore/cicedyn/dynamics/ice_dyn_shared.F90 @@ -24,9 +24,8 @@ module ice_dyn_shared implicit none private public :: set_evp_parameters, stepu, stepuv_CD, stepu_C, stepv_C, & - principal_stress, init_dyn, dyn_prep1, dyn_prep2, dyn_finish, & + principal_stress, init_dyn_shared, dyn_prep1, dyn_prep2, dyn_finish, & seabed_stress_factor_LKD, seabed_stress_factor_prob, & - alloc_dyn_shared, & deformations, deformationsC_T, deformationsCD_T, & strain_rates, strain_rates_T, strain_rates_U, & visc_replpress, & @@ -94,6 +93,11 @@ module ice_dyn_shared fcorE_blk(:,:,:), & ! Coriolis parameter at E points (1/s) fcorN_blk(:,:,:) ! Coriolis parameter at N points (1/s) + real (kind=dbl_kind), allocatable, public :: & + fld2(:,:,:,:), & ! 2 bundled fields + fld3(:,:,:,:), & ! 3 bundled fields + fld4(:,:,:,:) ! 4 bundled fields + real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & uvel_init , & ! x-component of velocity (m/s), beginning of timestep vvel_init ! y-component of velocity (m/s), beginning of timestep @@ -176,6 +180,15 @@ subroutine alloc_dyn_shared vvel_init (nx_block,ny_block,max_blocks), & ! y-component of velocity (m/s), beginning of timestep iceTmask (nx_block,ny_block,max_blocks), & ! T mask for dynamics iceUmask (nx_block,ny_block,max_blocks), & ! U mask for dynamics + fcor_blk (nx_block,ny_block,max_blocks), & ! Coriolis + DminTarea (nx_block,ny_block,max_blocks), & ! + stat=ierr) + if (ierr/=0) call abort_ice(subname//': Out of memory') + + allocate( & + fld2(nx_block,ny_block,2,max_blocks), & + fld3(nx_block,ny_block,3,max_blocks), & + fld4(nx_block,ny_block,4,max_blocks), & stat=ierr) if (ierr/=0) call abort_ice(subname//': Out of memory') @@ -187,6 +200,8 @@ subroutine alloc_dyn_shared vvelN_init (nx_block,ny_block,max_blocks), & ! y-component of velocity (m/s), beginning of timestep iceEmask (nx_block,ny_block,max_blocks), & ! T mask for dynamics iceNmask (nx_block,ny_block,max_blocks), & ! U mask for dynamics + fcorE_blk (nx_block,ny_block,max_blocks), & ! Coriolis + fcorN_blk (nx_block,ny_block,max_blocks), & ! Coriolis stat=ierr) if (ierr/=0) call abort_ice(subname//': Out of memory') endif @@ -197,18 +212,18 @@ end subroutine alloc_dyn_shared ! Initialize parameters and variables needed for the dynamics ! author: Elizabeth C. Hunke, LANL - subroutine init_dyn (dt) + subroutine init_dyn_shared (dt) use ice_blocks, only: nx_block, ny_block use ice_domain, only: nblocks, halo_dynbundle use ice_domain_size, only: max_blocks - use ice_flux, only: rdg_conv, rdg_shear, & + use ice_flux, only: & stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4, & stresspT, stressmT, stress12T, & stresspU, stressmU, stress12U - use ice_state, only: uvel, vvel, uvelE, vvelE, uvelN, vvelN, divu, shear + use ice_state, only: uvel, vvel, uvelE, vvelE, uvelN, vvelN use ice_grid, only: ULAT, NLAT, ELAT, tarea real (kind=dbl_kind), intent(in) :: & @@ -221,10 +236,11 @@ subroutine init_dyn (dt) nprocs, & ! number of processors iblk ! block index - character(len=*), parameter :: subname = '(init_dyn)' + character(len=*), parameter :: subname = '(init_dyn_shared)' call set_evp_parameters (dt) - + ! allocate dyn shared (init_uvel,init_vvel) + call alloc_dyn_shared ! Set halo_dynbundle, this is empirical at this point, could become namelist halo_dynbundle = .true. nprocs = get_num_procs() @@ -237,14 +253,6 @@ subroutine init_dyn (dt) write(nu_diag,*) 'halo_dynbundle =', halo_dynbundle endif - allocate(fcor_blk(nx_block,ny_block,max_blocks)) - allocate(DminTarea(nx_block,ny_block,max_blocks)) - - if (grid_ice == 'CD' .or. grid_ice == 'C') then - allocate(fcorE_blk(nx_block,ny_block,max_blocks)) - allocate(fcorN_blk(nx_block,ny_block,max_blocks)) - endif - !$OMP PARALLEL DO PRIVATE(iblk,i,j) SCHEDULE(runtime) do iblk = 1, nblocks do j = 1, ny_block @@ -260,11 +268,6 @@ subroutine init_dyn (dt) vvelN(i,j,iblk) = c0 endif - ! strain rates - divu (i,j,iblk) = c0 - shear(i,j,iblk) = c0 - rdg_conv (i,j,iblk) = c0 - rdg_shear(i,j,iblk) = c0 ! Coriolis parameter if (trim(coriolis) == 'constant') then @@ -330,7 +333,7 @@ subroutine init_dyn (dt) enddo ! iblk !$OMP END PARALLEL DO - end subroutine init_dyn + end subroutine init_dyn_shared !======================================================================= ! Set parameters needed for the evp dynamics. diff --git a/cicecore/cicedynB/dynamics/ice_dyn_vp.F90 b/cicecore/cicedyn/dynamics/ice_dyn_vp.F90 similarity index 99% rename from cicecore/cicedynB/dynamics/ice_dyn_vp.F90 rename to cicecore/cicedyn/dynamics/ice_dyn_vp.F90 index 6534e7568..3915004b4 100644 --- a/cicecore/cicedynB/dynamics/ice_dyn_vp.F90 +++ b/cicecore/cicedyn/dynamics/ice_dyn_vp.F90 @@ -48,7 +48,7 @@ module ice_dyn_vp use ice_dyn_shared, only: dyn_prep1, dyn_prep2, dyn_finish, & cosw, sinw, fcor_blk, uvel_init, vvel_init, & seabed_stress_factor_LKD, seabed_stress_factor_prob, seabed_stress_method, & - seabed_stress, Ktens, stack_fields, unstack_fields + seabed_stress, Ktens, stack_fields, unstack_fields, fld2, fld3, fld4 use ice_fileunits, only: nu_diag use ice_flux, only: fmU use ice_global_reductions, only: global_sum @@ -88,7 +88,7 @@ module ice_dyn_vp reltol_andacc ! relative tolerance for Anderson acceleration character (len=char_len), public :: & - precond , & ! preconditioner for fgmres: 'ident' (identity), 'diag' (diagonal), + precond , & ! preconditioner for fgmres: 'ident' (identity), 'diag' (diagonal), ! 'pgmres' (Jacobi-preconditioned GMRES) algo_nonlin , & ! nonlinear algorithm: 'picard' (Picard iteration), 'anderson' (Anderson acceleration) ortho_type ! type of orthogonalization for FGMRES ('cgs' or 'mgs') @@ -105,11 +105,6 @@ module ice_dyn_vp indxUi(:,:) , & ! compressed index in i-direction indxUj(:,:) ! compressed index in j-direction - real (kind=dbl_kind), allocatable :: & - fld2(:,:,:,:), & ! work array for boundary updates - fld3(:,:,:,:), & ! work array for boundary updates - fld4(:,:,:,:) ! work array for boundary updates - !======================================================================= contains @@ -126,6 +121,8 @@ subroutine init_vp use ice_constants, only: c1, & field_loc_center, field_type_scalar use ice_domain, only: blocks_ice, halo_info + use ice_calendar, only: dt_dyn + use ice_dyn_shared, only: init_dyn_shared ! use ice_grid, only: tarea ! local variables @@ -137,15 +134,14 @@ subroutine init_vp type (block) :: & this_block ! block information for current block + call init_dyn_shared(dt_dyn) + ! Initialize module variables allocate(icellT(max_blocks), icellU(max_blocks)) allocate(indxTi(nx_block*ny_block, max_blocks), & indxTj(nx_block*ny_block, max_blocks), & indxUi(nx_block*ny_block, max_blocks), & indxUj(nx_block*ny_block, max_blocks)) - allocate(fld2(nx_block,ny_block,2,max_blocks)) - allocate(fld3(nx_block,ny_block,3,max_blocks)) - allocate(fld4(nx_block,ny_block,4,max_blocks)) end subroutine init_vp @@ -3348,7 +3344,7 @@ subroutine pgmres (zetax2 , etax2 , & ! Update workspace with boundary values ! NOTE: skipped for efficiency since this is just a preconditioner - ! unless bfbflag is active + ! unless bfbflag is active if (bfbflag /= 'off') then call stack_fields(workspace_x, workspace_y, fld2) call ice_timer_start(timer_bound) @@ -3569,7 +3565,7 @@ subroutine precondition(zetax2 , etax2, & type (ice_halo), intent(in) :: & halo_info_mask ! ghost cell update info for masked halo - + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks), intent(in) :: & vx , & ! input vector (x components) vy ! input vector (y components) diff --git a/cicecore/cicedynB/dynamics/ice_transport_driver.F90 b/cicecore/cicedyn/dynamics/ice_transport_driver.F90 similarity index 100% rename from cicecore/cicedynB/dynamics/ice_transport_driver.F90 rename to cicecore/cicedyn/dynamics/ice_transport_driver.F90 diff --git a/cicecore/cicedynB/dynamics/ice_transport_remap.F90 b/cicecore/cicedyn/dynamics/ice_transport_remap.F90 similarity index 100% rename from cicecore/cicedynB/dynamics/ice_transport_remap.F90 rename to cicecore/cicedyn/dynamics/ice_transport_remap.F90 diff --git a/cicecore/cicedynB/general/ice_flux.F90 b/cicecore/cicedyn/general/ice_flux.F90 similarity index 100% rename from cicecore/cicedynB/general/ice_flux.F90 rename to cicecore/cicedyn/general/ice_flux.F90 diff --git a/cicecore/cicedynB/general/ice_flux_bgc.F90 b/cicecore/cicedyn/general/ice_flux_bgc.F90 similarity index 100% rename from cicecore/cicedynB/general/ice_flux_bgc.F90 rename to cicecore/cicedyn/general/ice_flux_bgc.F90 diff --git a/cicecore/cicedynB/general/ice_forcing.F90 b/cicecore/cicedyn/general/ice_forcing.F90 similarity index 99% rename from cicecore/cicedynB/general/ice_forcing.F90 rename to cicecore/cicedyn/general/ice_forcing.F90 index ff79778c5..541efb282 100755 --- a/cicecore/cicedynB/general/ice_forcing.F90 +++ b/cicecore/cicedyn/general/ice_forcing.F90 @@ -118,7 +118,7 @@ module ice_forcing real (kind=dbl_kind), dimension(:,:,:,:,:), allocatable :: & wave_spectrum_data ! field values at 2 temporal data points - + character(char_len), public :: & atm_data_format, & ! 'bin'=binary or 'nc'=netcdf ocn_data_format, & ! 'bin'=binary or 'nc'=netcdf @@ -5650,7 +5650,7 @@ subroutine get_wave_spec file=__FILE__, line=__LINE__) else #ifdef USE_NETCDF - call wave_spec_data + call wave_spec_data #else write (nu_diag,*) "wave spectrum file not available, requires cpp USE_NETCDF" write (nu_diag,*) "wave spectrum file not available, using default profile" @@ -5682,9 +5682,9 @@ subroutine wave_spec_data use ice_grid, only: hm, tlon, tlat, tmask, umask use ice_calendar, only: days_per_year, use_leap_years - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & ncid , & ! netcdf file id - i, j, freq , & + i, j, freq , & ixm,ixx,ixp , & ! record numbers for neighboring months recnum , & ! record number maxrec , & ! maximum record number @@ -5710,7 +5710,7 @@ subroutine wave_spec_data wave_spectrum_profile ! wave spectrum character(len=64) :: fieldname !netcdf field name - character(char_len_long) :: spec_file + character(char_len_long) :: spec_file character(char_len) :: wave_spec_type logical (kind=log_kind) :: wave_spec character(len=*), parameter :: subname = '(wave_spec_data)' @@ -5736,7 +5736,7 @@ subroutine wave_spec_data yr = fyear ! current year !------------------------------------------------------------------- ! 6-hourly data - ! + ! ! Assume that the 6-hourly value is located at the end of the ! 6-hour period. This is the convention for NCEP reanalysis data. ! E.g. record 1 gives conditions at 6 am GMT on 1 January. @@ -5785,9 +5785,9 @@ subroutine wave_spec_data call ice_read_nc_xyf(ncid,recnum,'efreq',wave_spectrum_data(:,:,:,2,:),debug_n_d, & field_loc=field_loc_center, & field_type=field_type_scalar) - call ice_close_nc(ncid) + call ice_close_nc(ncid) + - ! Interpolate call interpolate_wavespec_data (wave_spectrum_data, wave_spectrum) diff --git a/cicecore/cicedynB/general/ice_forcing_bgc.F90 b/cicecore/cicedyn/general/ice_forcing_bgc.F90 similarity index 100% rename from cicecore/cicedynB/general/ice_forcing_bgc.F90 rename to cicecore/cicedyn/general/ice_forcing_bgc.F90 diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedyn/general/ice_init.F90 similarity index 94% rename from cicecore/cicedynB/general/ice_init.F90 rename to cicecore/cicedyn/general/ice_init.F90 index 45ae58d8b..d56ad002e 100644 --- a/cicecore/cicedynB/general/ice_init.F90 +++ b/cicecore/cicedyn/general/ice_init.F90 @@ -17,11 +17,11 @@ module ice_init use ice_constants, only: c0, c1, c2, c3, c5, c12, p2, p3, p5, p75, p166, & cm_to_m use ice_exit, only: abort_ice - use ice_fileunits, only: nu_nml, nu_diag, nu_diag_set, nml_filename, diag_type, & + use ice_fileunits, only: nu_nml, nu_diag, nml_filename, diag_type, & ice_stdout, get_fileunit, release_fileunit, bfbflag, flush_fileunit, & ice_IOUnitsMinUnit, ice_IOUnitsMaxUnit #ifdef CESMCOUPLED - use ice_fileunits, only: inst_suffix + use ice_fileunits, only: inst_suffix, nu_diag_set #endif use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_aggregate @@ -124,6 +124,8 @@ subroutine input_data use ice_restoring, only: restore_ice use ice_timers, only: timer_stats use ice_memusage, only: memory_stats + use ice_fileunits, only: goto_nml + #ifdef CESMCOUPLED use shr_file_mod, only: shr_file_setIO #endif @@ -151,7 +153,7 @@ subroutine input_data kitd, kcatbound, ktransport character (len=char_len) :: shortwave, albedo_type, conduct, fbot_xfer_type, & - tfrz_option, frzpnd, atmbndy, wave_spec_type, snwredist, snw_aging_table, & + tfrz_option, saltflux_option, frzpnd, atmbndy, wave_spec_type, snwredist, snw_aging_table, & capping_method logical (kind=log_kind) :: calc_Tsfc, formdrag, highfreq, calc_strair, wave_spec, & @@ -163,9 +165,11 @@ subroutine input_data integer (kind=int_kind) :: numin, numax ! unit number limits integer (kind=int_kind) :: rplvl, rptopo - real (kind=dbl_kind) :: Cf, ksno, puny + real (kind=dbl_kind) :: Cf, ksno, puny, ice_ref_salinity + character (len=char_len) :: abort_list - character (len=128) :: tmpstr2 + character (len=char_len) :: nml_name ! namelist name + character (len=char_len_long) :: tmpstr2 character(len=*), parameter :: subname='(input_data)' @@ -260,6 +264,7 @@ subroutine input_data highfreq, natmiter, atmiter_conv, calc_dragio, & ustar_min, emissivity, iceruf, iceruf_ocn, & fbot_xfer_type, update_ocn_f, l_mpond_fresh, tfrz_option, & + saltflux_option,ice_ref_salinity, & oceanmixed_ice, restore_ice, restore_ocn, trestore, & precip_units, default_season, wave_spec_type,nfreq, & atm_data_type, ocn_data_type, bgc_data_type, fe_data_type, & @@ -497,6 +502,8 @@ subroutine input_data precip_units = 'mks' ! 'mm_per_month' or ! 'mm_per_sec' = 'mks' = kg/m^2 s tfrz_option = 'mushy' ! freezing temp formulation + saltflux_option = 'constant' ! saltflux calculation + ice_ref_salinity = 4.0_dbl_kind ! Ice reference salinity for coupling oceanmixed_ice = .false. ! if true, use internal ocean mixed layer wave_spec_type = 'none' ! type of wave spectrum forcing nfreq = 25 ! number of wave frequencies @@ -584,6 +591,7 @@ subroutine input_data if (my_task == master_task) then + ! open namelist file call get_fileunit(nu_nml) open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then @@ -592,141 +600,228 @@ subroutine input_data file=__FILE__, line=__LINE__) endif - write(nu_diag,*) subname,' Reading setup_nml' - rewind(unit=nu_nml, iostat=nml_error) + ! read setup_nml + nml_name = 'setup_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: setup_nml rewind ', & + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=setup_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: '//trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: setup_nml reading ', & - file=__FILE__, line=__LINE__) - endif - write(nu_diag,*) subname,' Reading grid_nml' - rewind(unit=nu_nml, iostat=nml_error) + ! read grid_nml + nml_name = 'grid_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: grid_nml rewind ', & + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=grid_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: ' //trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: grid_nml reading ', & - file=__FILE__, line=__LINE__) - endif - write(nu_diag,*) subname,' Reading tracer_nml' - rewind(unit=nu_nml, iostat=nml_error) + ! read tracer_nml + nml_name = 'tracer_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: tracer_nml rewind ', & + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=tracer_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: ' //trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: tracer_nml reading ', & - file=__FILE__, line=__LINE__) - endif - write(nu_diag,*) subname,' Reading thermo_nml' - rewind(unit=nu_nml, iostat=nml_error) + ! read thermo_nml + nml_name = 'thermo_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: thermo_nml rewind ', & + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=thermo_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: '//trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: thermo_nml reading ', & - file=__FILE__, line=__LINE__) - endif - write(nu_diag,*) subname,' Reading dynamics_nml' - rewind(unit=nu_nml, iostat=nml_error) + ! read dynamics_nml + nml_name = 'dynamics_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: dynamics_nml rewind ', & + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=dynamics_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: '//trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: dynamics_nml reading ', & - file=__FILE__, line=__LINE__) - endif - write(nu_diag,*) subname,' Reading shortwave_nml' - rewind(unit=nu_nml, iostat=nml_error) + ! read shortwave_nml + nml_name = 'shortwave_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: shortwave_nml rewind ', & + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=shortwave_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: '//trim(nml_name)//' reading '//& + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: shortwave_nml reading ', & - file=__FILE__, line=__LINE__) - endif - write(nu_diag,*) subname,' Reading ponds_nml' - rewind(unit=nu_nml, iostat=nml_error) + ! read ponds_nml + nml_name = 'ponds_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: ponds_nml rewind ', & + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=ponds_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: '//trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: ponds_nml reading ', & - file=__FILE__, line=__LINE__) - endif - write(nu_diag,*) subname,' Reading snow_nml' - rewind(unit=nu_nml, iostat=nml_error) + ! read snow_nml + nml_name = 'snow_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: snow_nml rewind ', & + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=snow_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: '//trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: snow_nml reading ', & - file=__FILE__, line=__LINE__) - endif - write(nu_diag,*) subname,' Reading forcing_nml' - rewind(unit=nu_nml, iostat=nml_error) + ! read forcing_nml + nml_name = 'forcing_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: forcing_nml rewind ', & + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & file=__FILE__, line=__LINE__) endif + + ! read namelist nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=forcing_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: '// trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: forcing_nml reading ', & - file=__FILE__, line=__LINE__) - endif + ! done reading namelist. close(nu_nml) call release_fileunit(nu_nml) endif @@ -758,8 +853,8 @@ subroutine input_data ! each task gets unique ice log filename when if test is true, for debugging if (1 == 0) then call get_fileUnit(nu_diag) - write(tmpstr,'(a,i4.4)') "ice.log.task_",my_task - open(nu_diag,file=tmpstr) + write(tmpstr2,'(a,i4.4)') "ice.log.task_",my_task + open(nu_diag,file=tmpstr2) endif end if if (trim(ice_ic) /= 'default' .and. & @@ -979,6 +1074,8 @@ subroutine input_data call broadcast_scalar(wave_spec_file, master_task) call broadcast_scalar(nfreq, master_task) call broadcast_scalar(tfrz_option, master_task) + call broadcast_scalar(saltflux_option, master_task) + call broadcast_scalar(ice_ref_salinity, master_task) call broadcast_scalar(ocn_data_format, master_task) call broadcast_scalar(bgc_data_type, master_task) call broadcast_scalar(fe_data_type, master_task) @@ -1414,6 +1511,12 @@ subroutine input_data write(nu_diag,*) subname//' WARNING: For consistency, set tfrz_option = mushy' endif endif + if (ktherm == 1 .and. trim(saltflux_option) /= 'constant') then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: ktherm = 1 and saltflux_option = ',trim(saltflux_option) + write(nu_diag,*) subname//' WARNING: For consistency, set saltflux_option = constant' + endif + endif !tcraig if (ktherm == 1 .and. .not.sw_redist) then if (my_task == master_task) then @@ -1532,7 +1635,7 @@ subroutine input_data write(nu_diag,*) subname//' WARNING: tr_fsd=T but wave_spec=F - not recommended' endif end if - + ! compute grid locations for thermo, u and v fields grid_ice_thrm = 'T' @@ -1974,6 +2077,10 @@ subroutine input_data write(nu_diag,*) ' WARNING: will impact ocean forcing interaction' write(nu_diag,*) ' WARNING: coupled forcing will be modified by mixed layer routine' endif + write(nu_diag,1030) ' saltflux_option = ', trim(saltflux_option) + if (trim(saltflux_option) == 'constant') then + write(nu_diag,1002) ' ice_ref_salinity = ',ice_ref_salinity + endif if (trim(tfrz_option) == 'minus1p8') then tmpstr2 = ' : constant ocean freezing temperature (-1.8C)' elseif (trim(tfrz_option) == 'linear_salt') then @@ -2027,11 +2134,14 @@ subroutine input_data if (trim(wave_spec_type) == 'none') then tmpstr2 = ' : no wave data provided, no wave-ice interactions' elseif (trim(wave_spec_type) == 'profile') then - tmpstr2 = ' : use fixed dummy wave spectrum for testing, sea surface height generated using constant phase (1 iteration of wave fracture)' + tmpstr2 = ' : use fixed dummy wave spectrum for testing, sea surface height generated '// & + 'using constant phase (1 iteration of wave fracture)' elseif (trim(wave_spec_type) == 'constant') then - tmpstr2 = ' : wave spectrum data file provided, sea surface height generated using constant phase (1 iteration of wave fracture)' + tmpstr2 = ' : wave spectrum data file provided, sea surface height generated '// & + 'using constant phase (1 iteration of wave fracture)' elseif (trim(wave_spec_type) == 'random') then - tmpstr2 = ' : wave spectrum data file provided, sea surface height generated using random number (multiple iterations of wave fracture to convergence)' + tmpstr2 = ' : wave spectrum data file provided, sea surface height generated using '// & + 'random number (multiple iterations of wave fracture to convergence)' else tmpstr2 = ' : unknown value' endif @@ -2312,8 +2422,7 @@ subroutine input_data grid_type /= 'rectangular' .and. & grid_type /= 'cpom_grid' .and. & grid_type /= 'regional' .and. & - grid_type /= 'latlon' .and. & - grid_type /= 'setmask' ) then + grid_type /= 'latlon') then if (my_task == master_task) write(nu_diag,*) subname//' ERROR: unknown grid_type=',trim(grid_type) abort_list = trim(abort_list)//":20" endif @@ -2375,6 +2484,7 @@ subroutine input_data wave_spec_type_in = wave_spec_type, & wave_spec_in=wave_spec, nfreq_in=nfreq, & tfrz_option_in=tfrz_option, kalg_in=kalg, fbot_xfer_type_in=fbot_xfer_type, & + saltflux_option_in=saltflux_option, ice_ref_salinity_in=ice_ref_salinity, & Pstar_in=Pstar, Cstar_in=Cstar, iceruf_in=iceruf, iceruf_ocn_in=iceruf_ocn, calc_dragio_in=calc_dragio, & windmin_in=windmin, drhosdwind_in=drhosdwind, & rsnw_fall_in=rsnw_fall, rsnw_tmax_in=rsnw_tmax, rhosnew_in=rhosnew, & @@ -2790,7 +2900,7 @@ subroutine set_state_var (nx_block, ny_block, & indxi, indxj ! compressed indices for cells with aicen > puny real (kind=dbl_kind) :: & - Tsfc, sum, hbar, abar, puny, rhos, Lfresh, rad_to_deg, rsnw_fall, dist_ratio + Tsfc, sum, hbar, abar, puny, rhos, Lfresh, rad_to_deg, rsnw_fall, dist_ratio, Tffresh real (kind=dbl_kind), dimension(ncat) :: & ainit, hinit ! initial area, thickness @@ -2832,7 +2942,7 @@ subroutine set_state_var (nx_block, ny_block, & nt_smice_out=nt_smice, nt_smliq_out=nt_smliq, & nt_rhos_out=nt_rhos, nt_rsnw_out=nt_rsnw) call icepack_query_parameters(rhos_out=rhos, Lfresh_out=Lfresh, puny_out=puny, & - rad_to_deg_out=rad_to_deg, rsnw_fall_out=rsnw_fall) + rad_to_deg_out=rad_to_deg, rsnw_fall_out=rsnw_fall, Tffresh_out=Tffresh) call icepack_query_parameters(secday_out=secday, pi_out=pi) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -3070,7 +3180,12 @@ subroutine set_state_var (nx_block, ny_block, & do i = ilo, ihi if (tmask(i,j)) then ! place ice in high latitudes where ocean sfc is cold +#ifdef CESMCOUPLED + ! Option to use Tair instead. + if ( (Tair (i,j) <= Tffresh) .and. & +#else if ( (sst (i,j) <= Tf(i,j)+p2) .and. & +#endif (TLAT(i,j) < edge_init_sh/rad_to_deg .or. & TLAT(i,j) > edge_init_nh/rad_to_deg) ) then icells = icells + 1 diff --git a/cicecore/cicedynB/general/ice_state.F90 b/cicecore/cicedyn/general/ice_state.F90 similarity index 100% rename from cicecore/cicedynB/general/ice_state.F90 rename to cicecore/cicedyn/general/ice_state.F90 diff --git a/cicecore/cicedynB/general/ice_step_mod.F90 b/cicecore/cicedyn/general/ice_step_mod.F90 similarity index 100% rename from cicecore/cicedynB/general/ice_step_mod.F90 rename to cicecore/cicedyn/general/ice_step_mod.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 b/cicecore/cicedyn/infrastructure/comm/mpi/ice_boundary.F90 similarity index 99% rename from cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 rename to cicecore/cicedyn/infrastructure/comm/mpi/ice_boundary.F90 index 9fda67dad..68436cd0f 100644 --- a/cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 +++ b/cicecore/cicedyn/infrastructure/comm/mpi/ice_boundary.F90 @@ -6915,9 +6915,6 @@ subroutine primary_grid_lengths_global_ext( & ! This subroutine adds ghost cells to global primary grid lengths array ! ARRAY_I and outputs result to array ARRAY_O -! Note duplicate implementation of this subroutine in: -! cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 - use ice_constants, only: c0 use ice_domain_size, only: nx_global, ny_global diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_broadcast.F90 b/cicecore/cicedyn/infrastructure/comm/mpi/ice_broadcast.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/mpi/ice_broadcast.F90 rename to cicecore/cicedyn/infrastructure/comm/mpi/ice_broadcast.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_communicate.F90 b/cicecore/cicedyn/infrastructure/comm/mpi/ice_communicate.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/mpi/ice_communicate.F90 rename to cicecore/cicedyn/infrastructure/comm/mpi/ice_communicate.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_exit.F90 b/cicecore/cicedyn/infrastructure/comm/mpi/ice_exit.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/mpi/ice_exit.F90 rename to cicecore/cicedyn/infrastructure/comm/mpi/ice_exit.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 b/cicecore/cicedyn/infrastructure/comm/mpi/ice_gather_scatter.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/mpi/ice_gather_scatter.F90 rename to cicecore/cicedyn/infrastructure/comm/mpi/ice_gather_scatter.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_global_reductions.F90 b/cicecore/cicedyn/infrastructure/comm/mpi/ice_global_reductions.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/mpi/ice_global_reductions.F90 rename to cicecore/cicedyn/infrastructure/comm/mpi/ice_global_reductions.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_reprosum.F90 b/cicecore/cicedyn/infrastructure/comm/mpi/ice_reprosum.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/mpi/ice_reprosum.F90 rename to cicecore/cicedyn/infrastructure/comm/mpi/ice_reprosum.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/mpi/ice_timers.F90 b/cicecore/cicedyn/infrastructure/comm/mpi/ice_timers.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/mpi/ice_timers.F90 rename to cicecore/cicedyn/infrastructure/comm/mpi/ice_timers.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 b/cicecore/cicedyn/infrastructure/comm/serial/ice_boundary.F90 similarity index 99% rename from cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 rename to cicecore/cicedyn/infrastructure/comm/serial/ice_boundary.F90 index f10a9f432..2b81c4441 100644 --- a/cicecore/cicedynB/infrastructure/comm/serial/ice_boundary.F90 +++ b/cicecore/cicedyn/infrastructure/comm/serial/ice_boundary.F90 @@ -4686,9 +4686,6 @@ subroutine primary_grid_lengths_global_ext( & ! This subroutine adds ghost cells to global primary grid lengths array ! ARRAY_I and outputs result to array ARRAY_O -! Note duplicate implementation of this subroutine in: -! cicecore/cicedynB/infrastructure/comm/mpi/ice_boundary.F90 - use ice_constants, only: c0 use ice_domain_size, only: nx_global, ny_global diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_broadcast.F90 b/cicecore/cicedyn/infrastructure/comm/serial/ice_broadcast.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/serial/ice_broadcast.F90 rename to cicecore/cicedyn/infrastructure/comm/serial/ice_broadcast.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_communicate.F90 b/cicecore/cicedyn/infrastructure/comm/serial/ice_communicate.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/serial/ice_communicate.F90 rename to cicecore/cicedyn/infrastructure/comm/serial/ice_communicate.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_exit.F90 b/cicecore/cicedyn/infrastructure/comm/serial/ice_exit.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/serial/ice_exit.F90 rename to cicecore/cicedyn/infrastructure/comm/serial/ice_exit.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_gather_scatter.F90 b/cicecore/cicedyn/infrastructure/comm/serial/ice_gather_scatter.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/serial/ice_gather_scatter.F90 rename to cicecore/cicedyn/infrastructure/comm/serial/ice_gather_scatter.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_global_reductions.F90 b/cicecore/cicedyn/infrastructure/comm/serial/ice_global_reductions.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/serial/ice_global_reductions.F90 rename to cicecore/cicedyn/infrastructure/comm/serial/ice_global_reductions.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_reprosum.F90 b/cicecore/cicedyn/infrastructure/comm/serial/ice_reprosum.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/serial/ice_reprosum.F90 rename to cicecore/cicedyn/infrastructure/comm/serial/ice_reprosum.F90 diff --git a/cicecore/cicedynB/infrastructure/comm/serial/ice_timers.F90 b/cicecore/cicedyn/infrastructure/comm/serial/ice_timers.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/comm/serial/ice_timers.F90 rename to cicecore/cicedyn/infrastructure/comm/serial/ice_timers.F90 diff --git a/cicecore/cicedynB/infrastructure/ice_blocks.F90 b/cicecore/cicedyn/infrastructure/ice_blocks.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/ice_blocks.F90 rename to cicecore/cicedyn/infrastructure/ice_blocks.F90 diff --git a/cicecore/cicedynB/infrastructure/ice_domain.F90 b/cicecore/cicedyn/infrastructure/ice_domain.F90 similarity index 97% rename from cicecore/cicedynB/infrastructure/ice_domain.F90 rename to cicecore/cicedyn/infrastructure/ice_domain.F90 index 10254aa93..ff1fac723 100644 --- a/cicecore/cicedynB/infrastructure/ice_domain.F90 +++ b/cicecore/cicedyn/infrastructure/ice_domain.F90 @@ -104,7 +104,7 @@ subroutine init_domain_blocks use ice_distribution, only: processor_shape use ice_domain_size, only: ncat, nilyr, nslyr, max_blocks, & nx_global, ny_global, block_size_x, block_size_y - + use ice_fileunits, only: goto_nml !---------------------------------------------------------------------- ! ! local variables @@ -114,6 +114,9 @@ subroutine init_domain_blocks integer (int_kind) :: & nml_error ! namelist read error flag + character(len=char_len) :: nml_name ! text namelist name + character(len=char_len_long) :: tmpstr2 ! for namelist check + character(len=*), parameter :: subname = '(init_domain_blocks)' !---------------------------------------------------------------------- @@ -167,26 +170,39 @@ subroutine init_domain_blocks landblockelim = .true. ! on by default if (my_task == master_task) then - write(nu_diag,*) subname,' Reading domain_nml' - + nml_name = 'domain_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + call get_fileunit(nu_nml) open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then call abort_ice(subname//'ERROR: domain_nml open file '// & - trim(nml_filename), & - file=__FILE__, line=__LINE__) + trim(nml_filename), & + file=__FILE__, line=__LINE__) endif + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=domain_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: ' // trim(nml_name) // ' reading ' // & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: domain_nml reading ', & - file=__FILE__, line=__LINE__) - endif + close(nu_nml) call release_fileunit(nu_nml) + endif call broadcast_scalar(nprocs, master_task) diff --git a/cicecore/cicedynB/infrastructure/ice_grid.F90 b/cicecore/cicedyn/infrastructure/ice_grid.F90 similarity index 99% rename from cicecore/cicedynB/infrastructure/ice_grid.F90 rename to cicecore/cicedyn/infrastructure/ice_grid.F90 index dfccdd413..b775c21f2 100644 --- a/cicecore/cicedynB/infrastructure/ice_grid.F90 +++ b/cicecore/cicedyn/infrastructure/ice_grid.F90 @@ -507,23 +507,24 @@ subroutine init_grid2 ! Diagnose OpenMP thread schedule, force order in output !----------------------------------------------------------------- +! This code does not work in CESM. Needs to be investigated further. #if defined (_OPENMP) - !$OMP PARALLEL DO ORDERED PRIVATE(iblk) SCHEDULE(runtime) - do iblk = 1, nblocks - if (my_task == master_task) then - !$OMP ORDERED - if (iblk == 1) then - call omp_get_schedule(ompsk,ompcs) - write(nu_diag,*) '' - write(nu_diag,*) subname,' OpenMP runtime thread schedule:' - write(nu_diag,*) subname,' omp schedule = ',ompsk,ompcs - endif - write(nu_diag,*) subname,' block, thread = ',iblk,OMP_GET_THREAD_NUM() - call flush_fileunit(nu_diag) - !$OMP END ORDERED - endif - enddo - !$OMP END PARALLEL DO + !$OMP PARALLEL DO ORDERED PRIVATE(iblk) SCHEDULE(runtime) + do iblk = 1, nblocks + if (my_task == master_task) then + !$OMP ORDERED + if (iblk == 1) then + call omp_get_schedule(ompsk,ompcs) +! write(nu_diag,*) '' + write(nu_diag,*) subname,' OpenMP runtime thread schedule:' + write(nu_diag,*) subname,' omp schedule = ',ompsk,ompcs + endif + write(nu_diag,*) subname,' block, thread = ',iblk,OMP_GET_THREAD_NUM() + !$OMP END ORDERED + endif + enddo + !$OMP END PARALLEL DO + call flush_fileunit(nu_diag) #endif !----------------------------------------------------------------- @@ -1393,15 +1394,15 @@ subroutine rectgrid ! original rectgrid defines latlon first call rectgrid_scale_dxdy else - ! rectgrid no grid spacing. + ! rectgrid no grid spacing. ! original method with addition to use namelist lat/lon reference - + if (my_task == master_task) then work_g1 = c0 length = dxrect*cm_to_m/radius*rad_to_deg - + work_g1(1,:) = lonrefrect ! reference lon from namelist - + do j = 1, ny_global do i = 2, nx_global work_g1(i,j) = work_g1(i-1,j) + length ! ULON @@ -1413,13 +1414,13 @@ subroutine rectgrid field_loc_NEcorner, field_type_scalar) call ice_HaloExtrapolate(ULON, distrb_info, & ew_boundary_type, ns_boundary_type) - + if (my_task == master_task) then work_g1 = c0 length = dyrect*cm_to_m/radius*rad_to_deg - + work_g1(:,1) = latrefrect ! reference latitude from namelist - + do i = 1, nx_global do j = 2, ny_global work_g1(i,j) = work_g1(i,j-1) + length ! ULAT @@ -1532,32 +1533,32 @@ subroutine rectgrid end subroutine rectgrid !======================================================================= - + subroutine rectgrid_scale_dxdy - + ! generate a variable spaced rectangluar grid. ! extend spacing from center of grid outward. use ice_constants, only: c0, c1, c2, radius, cm_to_m, & field_loc_center, field_loc_NEcorner, field_type_scalar - + integer (kind=int_kind) :: & i, j, iblk, & imid, jmid, & center1, center2 ! array centers for expanding dx, dy - + real (kind=dbl_kind) :: & length, & rad_to_deg real (kind=dbl_kind), dimension(:,:), allocatable :: & work_g1 - + character(len=*), parameter :: subname = '(rectgrid_scale_dxdy)' - + call icepack_query_parameters(rad_to_deg_out=rad_to_deg) allocate(work_g1(nx_global,ny_global)) - + ! determine dx spacing ! strategy: initialize with dxrect. ! if want to scale the grid, work from center outwards, @@ -1565,51 +1566,51 @@ subroutine rectgrid_scale_dxdy ! this assumes dx varies in x direction only. ! (i.e, dx is the same across same y location) if (my_task == master_task) then - + ! initialize with initial dxrect work_g1(:,:) = dxrect - + ! check if nx is even or odd ! if even, middle 2 columns are center ! of odd, middle 1 column is center if (mod(nx_global,2) == 0) then ! nx_global is even - + ! with even number of x locatons, ! the center two y columns are center center1 = nx_global/2 ! integer math center2 = center1 + 1 ! integer math - + else ! nx_global = odd ! only one center index. set center2=center1 center1 = ceiling(real(nx_global/2),int_kind) center2 = center1 endif - + ! note loop over only half the x grid points (center1)-1 ! working from the center outward. do j = 1, ny_global do i = 1, center1-1 ! work from center1 to left work_g1(center1-i,j) = dxscale*work_g1(center1-i+1,j) - + ! work from center2 to right work_g1(center2+i,j) = dxscale*work_g1(center2+i-1,j) enddo ! i enddo ! j - + endif ! my_task == master_task - - + + ! note work_g1 is converted to meters in primary_grid_lengths_HTN call primary_grid_lengths_HTN(work_g1) ! dxU, dxT, dxN, dxE - + ! make ULON array if (my_task == master_task) then - + ! make first column reference lon in radians. ! the remaining work_g1 is still dx in meters work_g1(1,:) = lonrefrect/rad_to_deg ! radians - + ! loop over remaining points and add spacing to successive ! x locations do j = 1, ny_global @@ -1623,7 +1624,7 @@ subroutine rectgrid_scale_dxdy field_loc_NEcorner, field_type_scalar) call ice_HaloExtrapolate(ULON, distrb_info, & ew_boundary_type, ns_boundary_type) - + ! determine dy spacing ! strategy: initialize with dyrect. ! if want to scale the grid, work from center outwards, @@ -1631,7 +1632,7 @@ subroutine rectgrid_scale_dxdy ! this assumes dy varies in y direction only. ! (i.e, dy is the same across same x location) if (my_task == master_task) then - + ! initialize with initial dxrect work_g1(:,:) = dyrect @@ -1639,25 +1640,25 @@ subroutine rectgrid_scale_dxdy ! if even, middle 2 rows are center ! of odd, middle 1 row is center if (mod(ny_global,2) == 0) then ! ny_global is even - + ! with even number of x locatons, ! the center two y columns are center center1 = ny_global/2 ! integer math center2 = center1 + 1 ! integer math - + else ! ny_global = odd ! only one center index. set center2=center1 center1 = ceiling(real(ny_global/2),int_kind) center2 = center1 endif - + ! note loop over only half the y grid points (center1)-1 ! working from the center outward. do i = 1, nx_global do j = 1, center1-1 ! work from center1 to bottom work_g1(i,center1-j) = dyscale*work_g1(i,center1-j+1) - + ! work from center2 to top work_g1(i,center2+j) = dyscale*work_g1(i,center2+j-1) enddo ! i @@ -1665,15 +1666,15 @@ subroutine rectgrid_scale_dxdy endif ! mytask == master_task ! note work_g1 is converted to meters primary_grid_lengths_HTE call primary_grid_lengths_HTE(work_g1) ! dyU, dyT, dyN, dyE - + ! make ULAT array if (my_task == master_task) then - + ! make first row reference lat in radians. ! the remaining work_g1 is still dy in meters work_g1(:,1) = latrefrect/rad_to_deg ! radians - - + + ! loop over remaining points and add spacing to successive ! x locations do j = 2, ny_global ! start from j=2. j=1 is latrefrect @@ -1687,10 +1688,10 @@ subroutine rectgrid_scale_dxdy field_loc_NEcorner, field_type_scalar) call ice_HaloExtrapolate(ULAT, distrb_info, & ew_boundary_type, ns_boundary_type) - + deallocate(work_g1) - + end subroutine rectgrid_scale_dxdy !======================================================================= diff --git a/cicecore/cicedynB/infrastructure/ice_memusage.F90 b/cicecore/cicedyn/infrastructure/ice_memusage.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/ice_memusage.F90 rename to cicecore/cicedyn/infrastructure/ice_memusage.F90 diff --git a/cicecore/cicedynB/infrastructure/ice_memusage_gptl.c b/cicecore/cicedyn/infrastructure/ice_memusage_gptl.c similarity index 98% rename from cicecore/cicedynB/infrastructure/ice_memusage_gptl.c rename to cicecore/cicedyn/infrastructure/ice_memusage_gptl.c index 309c8824b..32b31171d 100644 --- a/cicecore/cicedynB/infrastructure/ice_memusage_gptl.c +++ b/cicecore/cicedyn/infrastructure/ice_memusage_gptl.c @@ -196,7 +196,7 @@ int ice_memusage_gptl (int *size, int *rss, int *share, int *text, int *datastac */ ret = fscanf (fd, "%d %d %d %d %d %d %d", - size, rss, share, text, datastack, &dum, &dum); + size, rss, share, text, datastack, &dum, &dum); ret = fclose (fd); return 0; diff --git a/cicecore/cicedynB/infrastructure/ice_read_write.F90 b/cicecore/cicedyn/infrastructure/ice_read_write.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/ice_read_write.F90 rename to cicecore/cicedyn/infrastructure/ice_read_write.F90 diff --git a/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 b/cicecore/cicedyn/infrastructure/ice_restart_driver.F90 similarity index 92% rename from cicecore/cicedynB/infrastructure/ice_restart_driver.F90 rename to cicecore/cicedyn/infrastructure/ice_restart_driver.F90 index bd5a49eaf..ffe9ec587 100644 --- a/cicecore/cicedynB/infrastructure/ice_restart_driver.F90 +++ b/cicecore/cicedyn/infrastructure/ice_restart_driver.F90 @@ -55,7 +55,7 @@ subroutine dumpfile(filename_spec) use ice_blocks, only: nx_block, ny_block use ice_domain, only: nblocks use ice_domain_size, only: nilyr, nslyr, ncat, max_blocks - use ice_dyn_shared, only: iceUmask, iceEmask, iceNmask + use ice_dyn_shared, only: iceUmask, iceEmask, iceNmask, kdyn use ice_flux, only: scale_factor, swvdr, swvdf, swidr, swidf, & strocnxT_iavg, strocnyT_iavg, sst, frzmlt, & stressp_1, stressp_2, stressp_3, stressp_4, & @@ -215,45 +215,52 @@ subroutine dumpfile(filename_spec) !----------------------------------------------------------------- ! ice mask for dynamics !----------------------------------------------------------------- - - !$OMP PARALLEL DO PRIVATE(iblk,i,j) - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - work1(i,j,iblk) = c0 - if (iceUmask(i,j,iblk)) work1(i,j,iblk) = c1 - enddo - enddo - enddo - !$OMP END PARALLEL DO - call write_restart_field(nu_dump,0,work1,'ruf8','iceumask',1,diag) - - if (grid_ice == 'CD' .or. grid_ice == 'C') then - + if (kdyn > 0) then !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block do i = 1, nx_block work1(i,j,iblk) = c0 - if (iceNmask(i,j,iblk)) work1(i,j,iblk) = c1 + if (iceUmask(i,j,iblk)) work1(i,j,iblk) = c1 enddo enddo enddo !$OMP END PARALLEL DO - call write_restart_field(nu_dump,0,work1,'ruf8','icenmask',1,diag) + call write_restart_field(nu_dump,0,work1,'ruf8','iceumask',1,diag) - !$OMP PARALLEL DO PRIVATE(iblk,i,j) - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - work1(i,j,iblk) = c0 - if (iceEmask(i,j,iblk)) work1(i,j,iblk) = c1 - enddo + if (grid_ice == 'CD' .or. grid_ice == 'C') then + + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + work1(i,j,iblk) = c0 + if (iceNmask(i,j,iblk)) work1(i,j,iblk) = c1 + enddo + enddo enddo - enddo - !$OMP END PARALLEL DO - call write_restart_field(nu_dump,0,work1,'ruf8','iceemask',1,diag) + !$OMP END PARALLEL DO + call write_restart_field(nu_dump,0,work1,'ruf8','icenmask',1,diag) + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + work1(i,j,iblk) = c0 + if (iceEmask(i,j,iblk)) work1(i,j,iblk) = c1 + enddo + enddo + enddo + !$OMP END PARALLEL DO + call write_restart_field(nu_dump,0,work1,'ruf8','iceemask',1,diag) + endif + else + work1(:,:,:) = c0 + call write_restart_field(nu_dump,0,work1,'ruf8','iceumask',1,diag) + if (grid_ice == 'CD' .or. grid_ice == 'C') then + call write_restart_field(nu_dump,0,work1,'ruf8','icenmask',1,diag) + call write_restart_field(nu_dump,0,work1,'ruf8','iceemask',1,diag) + endif endif ! for mixed layer model @@ -277,7 +284,7 @@ subroutine restartfile (ice_ic) use ice_domain, only: nblocks, halo_info use ice_domain_size, only: nilyr, nslyr, ncat, & max_blocks - use ice_dyn_shared, only: iceUmask, iceEmask, iceNmask + use ice_dyn_shared, only: iceUmask, iceEmask, iceNmask,kdyn use ice_flux, only: scale_factor, swvdr, swvdf, swidr, swidf, & strocnxT_iavg, strocnyT_iavg, sst, frzmlt, & stressp_1, stressp_2, stressp_3, stressp_4, & @@ -524,57 +531,76 @@ subroutine restartfile (ice_ic) !----------------------------------------------------------------- ! ice mask for dynamics !----------------------------------------------------------------- - if (my_task == master_task) & - write(nu_diag,*) 'ice mask for dynamics' - - call read_restart_field(nu_restart,0,work1,'ruf8', & - 'iceumask',1,diag,field_loc_center, field_type_scalar) - - iceUmask(:,:,:) = .false. - !$OMP PARALLEL DO PRIVATE(iblk,i,j) - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - if (work1(i,j,iblk) > p5) iceUmask(i,j,iblk) = .true. - enddo - enddo - enddo - !$OMP END PARALLEL DO + if (kdyn > 0) then - if (grid_ice == 'CD' .or. grid_ice == 'C') then - - if (query_field(nu_restart,'icenmask')) then + if (my_task == master_task) & + write(nu_diag,*) 'ice mask for dynamics' + if (query_field(nu_restart,'iceumask')) then call read_restart_field(nu_restart,0,work1,'ruf8', & - 'icenmask',1,diag,field_loc_center, field_type_scalar) + 'iceumask',1,diag,field_loc_center, field_type_scalar) - iceNmask(:,:,:) = .false. + iceUmask(:,:,:) = .false. !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block do i = 1, nx_block - if (work1(i,j,iblk) > p5) iceNmask(i,j,iblk) = .true. + if (work1(i,j,iblk) > p5) iceUmask(i,j,iblk) = .true. enddo enddo enddo !$OMP END PARALLEL DO endif - - if (query_field(nu_restart,'iceemask')) then - call read_restart_field(nu_restart,0,work1,'ruf8', & - 'iceemask',1,diag,field_loc_center, field_type_scalar) - - iceEmask(:,:,:) = .false. - !$OMP PARALLEL DO PRIVATE(iblk,i,j) - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - if (work1(i,j,iblk) > p5) iceEmask(i,j,iblk) = .true. + if (grid_ice == 'CD' .or. grid_ice == 'C') then + + if (query_field(nu_restart,'icenmask')) then + call read_restart_field(nu_restart,0,work1,'ruf8', & + 'icenmask',1,diag,field_loc_center, field_type_scalar) + + iceNmask(:,:,:) = .false. + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + if (work1(i,j,iblk) > p5) iceNmask(i,j,iblk) = .true. + enddo + enddo enddo + !$OMP END PARALLEL DO + endif + + if (query_field(nu_restart,'iceemask')) then + call read_restart_field(nu_restart,0,work1,'ruf8', & + 'iceemask',1,diag,field_loc_center, field_type_scalar) + + iceEmask(:,:,:) = .false. + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + if (work1(i,j,iblk) > p5) iceEmask(i,j,iblk) = .true. + enddo + enddo enddo - enddo - !$OMP END PARALLEL DO + !$OMP END PARALLEL DO + endif + endif + else + if (my_task == master_task) & + write(nu_diag,*) 'ice mask for dynamics - not used, however mandatory to read in binary files' + if (query_field(nu_restart,'iceumask')) then + call read_restart_field(nu_restart,0,work1,'ruf8', & + 'iceumask',1,diag,field_loc_center, field_type_scalar) + endif + if (grid_ice == 'CD' .or. grid_ice == 'C') then + if (query_field(nu_restart,'icenmask')) then + call read_restart_field(nu_restart,0,work1,'ruf8', & + 'icenmask',1,diag,field_loc_center, field_type_scalar) + endif + if (query_field(nu_restart,'iceemask')) then + call read_restart_field(nu_restart,0,work1,'ruf8', & + 'iceemask',1,diag,field_loc_center, field_type_scalar) + endif endif - endif ! set Tsfcn to c0 on land diff --git a/cicecore/cicedynB/infrastructure/ice_restoring.F90 b/cicecore/cicedyn/infrastructure/ice_restoring.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/ice_restoring.F90 rename to cicecore/cicedyn/infrastructure/ice_restoring.F90 diff --git a/cicecore/cicedynB/infrastructure/ice_shr_reprosum86.c b/cicecore/cicedyn/infrastructure/ice_shr_reprosum86.c similarity index 100% rename from cicecore/cicedynB/infrastructure/ice_shr_reprosum86.c rename to cicecore/cicedyn/infrastructure/ice_shr_reprosum86.c diff --git a/cicecore/cicedynB/infrastructure/io/io_binary/ice_history_write.F90 b/cicecore/cicedyn/infrastructure/io/io_binary/ice_history_write.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/io/io_binary/ice_history_write.F90 rename to cicecore/cicedyn/infrastructure/io/io_binary/ice_history_write.F90 diff --git a/cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 b/cicecore/cicedyn/infrastructure/io/io_binary/ice_restart.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/io/io_binary/ice_restart.F90 rename to cicecore/cicedyn/infrastructure/io/io_binary/ice_restart.F90 diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 b/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_history_write.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 rename to cicecore/cicedyn/infrastructure/io/io_netcdf/ice_history_write.F90 diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 b/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_restart.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/io/io_netcdf/ice_restart.F90 rename to cicecore/cicedyn/infrastructure/io/io_netcdf/ice_restart.F90 diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 b/cicecore/cicedyn/infrastructure/io/io_pio2/ice_history_write.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 rename to cicecore/cicedyn/infrastructure/io/io_pio2/ice_history_write.F90 diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_pio.F90 b/cicecore/cicedyn/infrastructure/io/io_pio2/ice_pio.F90 similarity index 100% rename from cicecore/cicedynB/infrastructure/io/io_pio2/ice_pio.F90 rename to cicecore/cicedyn/infrastructure/io/io_pio2/ice_pio.F90 diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 b/cicecore/cicedyn/infrastructure/io/io_pio2/ice_restart.F90 similarity index 98% rename from cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 rename to cicecore/cicedyn/infrastructure/io/io_pio2/ice_restart.F90 index 679a2b6e6..7019f7128 100644 --- a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 +++ b/cicecore/cicedyn/infrastructure/io/io_pio2/ice_restart.F90 @@ -749,10 +749,6 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3,diag, & ! if (ndim3 == ncat .and. ncat>1) then if (ndim3 == ncat .and. ndims == 3) then call pio_read_darray(File, vardesc, iodesc3d_ncat, work, status) -!#ifndef CESM1_PIO -!! This only works for PIO2 -! where (work == PIO_FILL_DOUBLE) work = c0 -!#endif if (present(field_loc)) then do n=1,ndim3 call ice_HaloUpdate (work(:,:,n,:), halo_info, & @@ -762,10 +758,6 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3,diag, & ! elseif (ndim3 == 1) then elseif (ndim3 == 1 .and. ndims == 2) then call pio_read_darray(File, vardesc, iodesc2d, work, status) -!#ifndef CESM1_PIO -!! This only works for PIO2 -! where (work == PIO_FILL_DOUBLE) work = c0 -!#endif if (present(field_loc)) then call ice_HaloUpdate (work(:,:,1,:), halo_info, & field_loc, field_type) @@ -942,11 +934,8 @@ logical function query_field(nu,vname) query_field = .false. - if (my_task == master_task) then - status = pio_inq_varid(File,trim(vname),vardesc) - if (status == PIO_noerr) query_field = .true. - endif - call broadcast_scalar(query_field,master_task) + status = pio_inq_varid(File,trim(vname),vardesc) + if (status == PIO_noerr) query_field = .true. end function query_field diff --git a/cicecore/cicedynB b/cicecore/cicedynB new file mode 120000 index 000000000..70695ca4b --- /dev/null +++ b/cicecore/cicedynB @@ -0,0 +1 @@ +cicedyn \ No newline at end of file diff --git a/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 index 0b8ed689e..85050d8c9 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 @@ -70,9 +70,10 @@ subroutine cice_init use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd - use ice_dyn_eap, only: init_eap, alloc_dyn_eap - use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared + use ice_dyn_eap, only: init_eap + use ice_dyn_evp, only: init_evp use ice_dyn_vp, only: init_vp + use ice_dyn_shared, only: kdyn use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & @@ -111,7 +112,6 @@ subroutine cice_init call alloc_grid ! allocate grid call alloc_arrays_column ! allocate column arrays call alloc_state ! allocate state - call alloc_dyn_shared ! allocate dyn shared (init_uvel,init_vvel) call alloc_flux_bgc ! allocate flux_bgc call alloc_flux ! allocate flux call init_ice_timers ! initialize all timers @@ -122,9 +122,9 @@ subroutine cice_init call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file - call init_dyn (dt_dyn) ! define dynamics parameters, variables - if (kdyn == 2) then - call alloc_dyn_eap ! allocate dyn_eap arrays + if (kdyn == 1) then + call init_evp + else if (kdyn == 2) then call init_eap ! define eap dynamics parameters, variables else if (kdyn == 3) then call init_vp ! define vp dynamics parameters, variables @@ -262,7 +262,7 @@ subroutine init_restart nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & nt_iage, nt_FY, nt_aero, nt_fsd - character(len=*),parameter :: subname = '(init_restart)' + character(len=*), parameter :: subname = '(init_restart)' call icepack_query_tracer_sizes(ntrcr_out=ntrcr) call icepack_warnings_flush(nu_diag) diff --git a/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 b/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 index 0b8ed689e..85050d8c9 100644 --- a/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 +++ b/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 @@ -70,9 +70,10 @@ subroutine cice_init use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd - use ice_dyn_eap, only: init_eap, alloc_dyn_eap - use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared + use ice_dyn_eap, only: init_eap + use ice_dyn_evp, only: init_evp use ice_dyn_vp, only: init_vp + use ice_dyn_shared, only: kdyn use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & @@ -111,7 +112,6 @@ subroutine cice_init call alloc_grid ! allocate grid call alloc_arrays_column ! allocate column arrays call alloc_state ! allocate state - call alloc_dyn_shared ! allocate dyn shared (init_uvel,init_vvel) call alloc_flux_bgc ! allocate flux_bgc call alloc_flux ! allocate flux call init_ice_timers ! initialize all timers @@ -122,9 +122,9 @@ subroutine cice_init call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file - call init_dyn (dt_dyn) ! define dynamics parameters, variables - if (kdyn == 2) then - call alloc_dyn_eap ! allocate dyn_eap arrays + if (kdyn == 1) then + call init_evp + else if (kdyn == 2) then call init_eap ! define eap dynamics parameters, variables else if (kdyn == 3) then call init_vp ! define vp dynamics parameters, variables @@ -262,7 +262,7 @@ subroutine init_restart nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & nt_iage, nt_FY, nt_aero, nt_fsd - character(len=*),parameter :: subname = '(init_restart)' + character(len=*), parameter :: subname = '(init_restart)' call icepack_query_tracer_sizes(ntrcr_out=ntrcr) call icepack_warnings_flush(nu_diag) diff --git a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 index a8bf96ad2..5efa18a28 100644 --- a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 @@ -72,9 +72,10 @@ subroutine cice_init(mpicom_ice) use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd - use ice_dyn_eap, only: init_eap, alloc_dyn_eap - use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared + use ice_dyn_eap, only: init_eap + use ice_dyn_evp, only: init_evp use ice_dyn_vp, only: init_vp + use ice_dyn_shared, only: kdyn use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & @@ -125,7 +126,6 @@ subroutine cice_init(mpicom_ice) call alloc_grid ! allocate grid arrays call alloc_arrays_column ! allocate column arrays call alloc_state ! allocate state arrays - call alloc_dyn_shared ! allocate dyn shared arrays call alloc_flux_bgc ! allocate flux_bgc arrays call alloc_flux ! allocate flux arrays call init_ice_timers ! initialize all timers @@ -135,9 +135,9 @@ subroutine cice_init(mpicom_ice) call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file - call init_dyn (dt_dyn) ! define dynamics parameters, variables - if (kdyn == 2) then - call alloc_dyn_eap ! allocate dyn_eap arrays + if (kdyn == 1) then + call init_evp + else if (kdyn == 2) then call init_eap ! define eap dynamics parameters, variables else if (kdyn == 3) then call init_vp ! define vp dynamics parameters, variables diff --git a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 index 5fbde9cce..0ba672f3d 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 @@ -36,7 +36,6 @@ subroutine cice_init1() use ice_domain , only: init_domain_blocks use ice_arrays_column , only: alloc_arrays_column use ice_state , only: alloc_state - use ice_dyn_shared , only: alloc_dyn_shared use ice_flux_bgc , only: alloc_flux_bgc use ice_flux , only: alloc_flux use ice_timers , only: timer_total, init_ice_timers, ice_timer_start @@ -59,7 +58,6 @@ subroutine cice_init1() call alloc_grid ! allocate grid arrays call alloc_arrays_column ! allocate column arrays call alloc_state ! allocate state arrays - call alloc_dyn_shared ! allocate dyn shared arrays call alloc_flux_bgc ! allocate flux_bgc arrays call alloc_flux ! allocate flux arrays call init_ice_timers ! initialize all timers @@ -79,9 +77,10 @@ subroutine cice_init2() use ice_communicate , only: my_task, master_task use ice_diagnostics , only: init_diags use ice_domain_size , only: ncat, nfsd, nfreq - use ice_dyn_eap , only: init_eap, alloc_dyn_eap - use ice_dyn_shared , only: kdyn, init_dyn + use ice_dyn_eap , only: init_eap + use ice_dyn_evp , only: init_evp use ice_dyn_vp , only: init_vp + use ice_dyn_shared , only: kdyn use ice_flux , only: init_coupler_flux, init_history_therm use ice_flux , only: init_history_dyn, init_flux_atm, init_flux_ocn use ice_forcing , only: init_snowtable @@ -107,9 +106,9 @@ subroutine cice_init2() call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file - call init_dyn (dt_dyn) ! define dynamics parameters, variables - if (kdyn == 2) then - call alloc_dyn_eap ! allocate dyn_eap arrays + if (kdyn == 1) then + call init_evp ! define evp dynamics parameters, variables + elseif (kdyn == 2) then call init_eap ! define eap dynamics parameters, variables else if (kdyn == 3) then call init_vp ! define vp dynamics parameters, variables diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index afdee5590..b94fcff05 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -21,7 +21,7 @@ module ice_comp_nuopc use ice_import_export , only : ice_import, ice_export, ice_advertise_fields, ice_realize_fields use ice_domain_size , only : nx_global, ny_global - use ice_grid , only : grid_type, init_grid2 + use ice_grid , only : grid_format, init_grid2 use ice_communicate , only : init_communicate, my_task, master_task, mpi_comm_ice use ice_calendar , only : force_restart_now, write_ic, init_calendar use ice_calendar , only : idate, mday, mmonth, myear, year_init @@ -576,7 +576,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) call t_startf ('cice_init1') call cice_init1 call t_stopf ('cice_init1') - + !----------------------------------------------------------------- ! Advertise fields !----------------------------------------------------------------- @@ -684,7 +684,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! Initialize the cice mesh and the cice mask - if (trim(grid_type) == 'setmask') then + if (trim(grid_format) == 'meshnc') then ! In this case cap code determines the mask file call ice_mesh_setmask_from_maskfile(ice_maskfile, ice_mesh, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index e4db010de..60059e39a 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -27,7 +27,7 @@ module ice_import_export use ice_arrays_column , only : floe_rad_c, wave_spectrum use ice_state , only : vice, vsno, aice, aicen_init, trcr, trcrn use ice_grid , only : tlon, tlat, tarea, tmask, anglet, hm - use ice_grid , only : grid_type + use ice_grid , only : grid_format use ice_mesh_mod , only : ocn_gridcell_frac use ice_boundary , only : ice_HaloUpdate use ice_fileunits , only : nu_diag, flush_fileunit @@ -1059,7 +1059,7 @@ subroutine ice_export( exportState, rc ) call state_setexport(exportState, 'ice_fraction', input=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (trim(grid_type) == 'setmask') then + if (trim(grid_format) == 'meshnc') then call state_setexport(exportState, 'ice_mask', input=ocn_gridcell_frac, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return else diff --git a/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 b/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 index 22596429d..dc83c7703 100644 --- a/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 @@ -15,6 +15,7 @@ module CICE_InitMod use ice_kinds_mod use ice_exit, only: abort_ice use ice_fileunits, only: init_fileunits, nu_diag + use ice_memusage, only: ice_memusage_init, ice_memusage_print use icepack_intfc, only: icepack_aggregate use icepack_intfc, only: icepack_init_itd, icepack_init_itd_hist use icepack_intfc, only: icepack_init_fsd_bounds, icepack_init_wave @@ -47,9 +48,9 @@ subroutine CICE_Initialize(mpi_comm) integer (kind=int_kind), optional, intent(in) :: mpi_comm ! communicator from nuopc character(len=*), parameter :: subname='(CICE_Initialize)' - !-------------------------------------------------------------------- - ! model initialization - !-------------------------------------------------------------------- + !-------------------------------------------------------------------- + ! model initialization + !-------------------------------------------------------------------- if (present(mpi_comm)) then call cice_init(mpi_comm) @@ -70,15 +71,16 @@ subroutine cice_init(mpi_comm) floe_binwidth, c_fsd_range use ice_state, only: alloc_state use ice_flux_bgc, only: alloc_flux_bgc - use ice_calendar, only: dt, dt_dyn, istep, istep1, write_ic, & + use ice_calendar, only: dt, dt_dyn, write_ic, & init_calendar, advance_timestep, calc_timesteps use ice_communicate, only: init_communicate, my_task, master_task use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd - use ice_dyn_eap, only: init_eap, alloc_dyn_eap - use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared + use ice_dyn_eap, only: init_eap + use ice_dyn_evp, only: init_evp use ice_dyn_vp, only: init_vp + use ice_dyn_shared, only: kdyn use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & @@ -122,12 +124,17 @@ subroutine cice_init(mpi_comm) call input_zbgc ! vertical biogeochemistry namelist call count_tracers ! count tracers + ! Call this as early as possible, must be after memory_stats is read + if (my_task == master_task) then + call ice_memusage_init(nu_diag) + call ice_memusage_print(nu_diag,subname//':start') + endif + call init_domain_blocks ! set up block decomposition call init_grid1 ! domain distribution call alloc_grid ! allocate grid arrays call alloc_arrays_column ! allocate column arrays call alloc_state ! allocate state arrays - call alloc_dyn_shared ! allocate dyn shared arrays call alloc_flux_bgc ! allocate flux_bgc arrays call alloc_flux ! allocate flux arrays call init_ice_timers ! initialize all timers @@ -137,9 +144,9 @@ subroutine cice_init(mpi_comm) call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file - call init_dyn (dt_dyn) ! define dynamics parameters, variables - if (kdyn == 2) then - call alloc_dyn_eap ! allocate dyn_eap arrays + if (kdyn == 1) then + call init_evp + else if (kdyn == 2) then call init_eap ! define eap dynamics parameters, variables else if (kdyn == 3) then call init_vp ! define vp dynamics parameters, variables @@ -254,6 +261,10 @@ subroutine cice_init(mpi_comm) if (write_ic) call accum_hist(dt) ! write initial conditions + if (my_task == master_task) then + call ice_memusage_print(nu_diag,subname//':end') + endif + end subroutine cice_init !======================================================================= diff --git a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 index 9c30b15a3..8de05a121 100644 --- a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 @@ -72,9 +72,10 @@ subroutine cice_init use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd - use ice_dyn_eap, only: init_eap, alloc_dyn_eap - use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared + use ice_dyn_eap, only: init_eap + use ice_dyn_evp, only: init_evp use ice_dyn_vp, only: init_vp + use ice_dyn_shared, only: kdyn use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & @@ -122,7 +123,6 @@ subroutine cice_init call alloc_grid ! allocate grid arrays call alloc_arrays_column ! allocate column arrays call alloc_state ! allocate state arrays - call alloc_dyn_shared ! allocate dyn shared arrays call alloc_flux_bgc ! allocate flux_bgc arrays call alloc_flux ! allocate flux arrays call init_ice_timers ! initialize all timers @@ -132,9 +132,9 @@ subroutine cice_init call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file - call init_dyn (dt_dyn) ! define dynamics parameters, variables - if (kdyn == 2) then - call alloc_dyn_eap ! allocate dyn_eap arrays + if (kdyn == 1) then + call init_evp + else if (kdyn == 2) then call init_eap ! define eap dynamics parameters, variables else if (kdyn == 3) then call init_vp ! define vp dynamics parameters, variables diff --git a/cicecore/drivers/unittest/gridavgchk/CICE_InitMod.F90 b/cicecore/drivers/unittest/gridavgchk/CICE_InitMod.F90 index 84d1a3a60..9ed1c5cbc 100644 --- a/cicecore/drivers/unittest/gridavgchk/CICE_InitMod.F90 +++ b/cicecore/drivers/unittest/gridavgchk/CICE_InitMod.F90 @@ -70,9 +70,10 @@ subroutine cice_init use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd - use ice_dyn_eap, only: init_eap, alloc_dyn_eap - use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared + use ice_dyn_eap, only: init_eap + use ice_dyn_evp, only: init_evp use ice_dyn_vp, only: init_vp + use ice_dyn_shared, only: kdyn use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & @@ -113,7 +114,6 @@ subroutine cice_init call alloc_grid ! allocate grid arrays call alloc_arrays_column ! allocate column arrays call alloc_state ! allocate state arrays - call alloc_dyn_shared ! allocate dyn shared arrays call alloc_flux_bgc ! allocate flux_bgc arrays call alloc_flux ! allocate flux arrays call init_ice_timers ! initialize all timers @@ -123,9 +123,9 @@ subroutine cice_init call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file - call init_dyn (dt_dyn) ! define dynamics parameters, variables - if (kdyn == 2) then - call alloc_dyn_eap ! allocate dyn_eap arrays + if (kdyn == 1) then + call init_evp + else if (kdyn == 2) then call init_eap ! define eap dynamics parameters, variables else if (kdyn == 3) then call init_vp ! define vp dynamics parameters, variables diff --git a/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 b/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 index 84d1a3a60..8a5070d25 100644 --- a/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 +++ b/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 @@ -70,8 +70,10 @@ subroutine cice_init use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd - use ice_dyn_eap, only: init_eap, alloc_dyn_eap - use ice_dyn_shared, only: kdyn, init_dyn, alloc_dyn_shared + use ice_dyn_eap, only: init_eap + use ice_dyn_evp, only: init_evp + use ice_dyn_vp, only: init_vp + use ice_dyn_shared, only: kdyn use ice_dyn_vp, only: init_vp use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux @@ -113,7 +115,6 @@ subroutine cice_init call alloc_grid ! allocate grid arrays call alloc_arrays_column ! allocate column arrays call alloc_state ! allocate state arrays - call alloc_dyn_shared ! allocate dyn shared arrays call alloc_flux_bgc ! allocate flux_bgc arrays call alloc_flux ! allocate flux arrays call init_ice_timers ! initialize all timers @@ -123,9 +124,9 @@ subroutine cice_init call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file - call init_dyn (dt_dyn) ! define dynamics parameters, variables - if (kdyn == 2) then - call alloc_dyn_eap ! allocate dyn_eap arrays + if (kdyn == 1) then + call init_evp + else if (kdyn == 2) then call init_eap ! define eap dynamics parameters, variables else if (kdyn == 3) then call init_vp ! define vp dynamics parameters, variables diff --git a/cicecore/shared/ice_fileunits.F90 b/cicecore/shared/ice_fileunits.F90 index 1854dda64..72a40f513 100644 --- a/cicecore/shared/ice_fileunits.F90 +++ b/cicecore/shared/ice_fileunits.F90 @@ -28,7 +28,8 @@ module ice_fileunits implicit none private public :: init_fileunits, get_fileunit, flush_fileunit, & - release_fileunit, release_all_fileunits + release_fileunit, release_all_fileunits, & + goto_nml character (len=char_len), public :: & diag_type ! 'stdout' or 'file' @@ -116,7 +117,11 @@ subroutine init_fileunits ice_IOUnitsInUse(ice_stdout) = .true. ! reserve unit 6 ice_IOUnitsInUse(ice_stderr) = .true. if (nu_diag >= 1 .and. nu_diag <= ice_IOUnitsMaxUnit) & - ice_IOUnitsInUse(nu_diag) = .true. ! reserve unit nu_diag + ice_IOUnitsInUse(nu_diag) = .true. ! reserve unit nu_diag +#ifdef CESMCOUPLED + ! CESM can have negative unit numbers. + if (nu_diag < 0) nu_diag_set = .true. +#endif call get_fileunit(nu_grid) call get_fileunit(nu_kmt) @@ -239,7 +244,12 @@ subroutine release_all_fileunits call release_fileunit(nu_rst_pointer) call release_fileunit(nu_history) call release_fileunit(nu_hdr) +#ifdef CESMCOUPLED + ! CESM can have negative unit numbers + if (nu_diag > 0 .and. nu_diag /= ice_stdout) call release_fileunit(nu_diag) +#else if (nu_diag /= ice_stdout) call release_fileunit(nu_diag) +#endif end subroutine release_all_fileunits @@ -311,6 +321,56 @@ subroutine flush_fileunit(iunit) end subroutine flush_fileunit +!======================================================================= + +!======================================================= + + subroutine goto_nml(iunit, nml, status) + ! Search to namelist group within ice_in file. + ! for compilers that do not allow optional namelists + + ! passed variables + integer(kind=int_kind), intent(in) :: & + iunit ! namelist file unit + + character(len=*), intent(in) :: & + nml ! namelist to search for + + integer(kind=int_kind), intent(out) :: & + status ! status of subrouine + + ! local variables + character(len=char_len) :: & + file_str, & ! string in file + nml_str ! namelist string to test + + integer(kind=int_kind) :: & + i, n ! dummy integers + + + ! rewind file + rewind(iunit) + + ! define test string with ampersand + nml_str = '&' // trim(adjustl(nml)) + + ! search for the record containing the namelist group we're looking for + do + read(iunit, '(a)', iostat=status) file_str + if (status /= 0) then + exit ! e.g. end of file + else + if (index(adjustl(file_str), nml_str) == 1) then + exit ! i.e. found record we're looking for + end if + end if + end do + + ! backspace to namelist name in file + backspace(iunit) + + end subroutine goto_nml + !======================================================================= end module ice_fileunits diff --git a/cicecore/shared/ice_init_column.F90 b/cicecore/shared/ice_init_column.F90 index 06ab79cdb..0d06b0aac 100644 --- a/cicecore/shared/ice_init_column.F90 +++ b/cicecore/shared/ice_init_column.F90 @@ -1477,6 +1477,14 @@ subroutine input_zbgc restart_zsal = .false. endif + if (solve_zsal) then + if (my_task == master_task) then + write(nu_diag,*) subname,' ERROR: solve_zsal=T deprecated' + endif + abort_flag = 101 + endif + +#ifdef UNDEPRECATE_ZSAL if (solve_zsal .and. nblyr < 1) then if (my_task == master_task) then write(nu_diag,*) subname,' ERROR: solve_zsal=T but 0 zsalinity tracers' @@ -1490,6 +1498,7 @@ subroutine input_zbgc endif abort_flag = 102 endif +#endif if (tr_brine .and. nblyr < 1 ) then if (my_task == master_task) then diff --git a/cicecore/version.txt b/cicecore/version.txt index 154cda3d7..953395fa1 100644 --- a/cicecore/version.txt +++ b/cicecore/version.txt @@ -1 +1 @@ -CICE 6.4.0 +CICE 6.4.1 diff --git a/configuration/scripts/cice.build b/configuration/scripts/cice.build index d75d74253..66b7b1321 100755 --- a/configuration/scripts/cice.build +++ b/configuration/scripts/cice.build @@ -128,12 +128,12 @@ endif ### List of source code directories (in order of importance). cat >! Filepath << EOF ${ICE_SANDBOX}/cicecore/drivers/${ICE_DRVOPT} -${ICE_SANDBOX}/cicecore/cicedynB/dynamics -${ICE_SANDBOX}/cicecore/cicedynB/general -${ICE_SANDBOX}/cicecore/cicedynB/analysis -${ICE_SANDBOX}/cicecore/cicedynB/infrastructure -${ICE_SANDBOX}/cicecore/cicedynB/infrastructure/io/$IODIR -${ICE_SANDBOX}/cicecore/cicedynB/infrastructure/comm/${ICE_COMMDIR} +${ICE_SANDBOX}/cicecore/cicedyn/dynamics +${ICE_SANDBOX}/cicecore/cicedyn/general +${ICE_SANDBOX}/cicecore/cicedyn/analysis +${ICE_SANDBOX}/cicecore/cicedyn/infrastructure +${ICE_SANDBOX}/cicecore/cicedyn/infrastructure/io/$IODIR +${ICE_SANDBOX}/cicecore/cicedyn/infrastructure/comm/${ICE_COMMDIR} ${ICE_SANDBOX}/cicecore/shared ${ICE_SANDBOX}/icepack/columnphysics EOF diff --git a/configuration/scripts/ice_in b/configuration/scripts/ice_in index 8262f34ec..32db0270b 100644 --- a/configuration/scripts/ice_in +++ b/configuration/scripts/ice_in @@ -254,6 +254,8 @@ update_ocn_f = .false. l_mpond_fresh = .false. tfrz_option = 'mushy' + saltflux_option = 'constant' + ice_ref_salinity = 4.0 oceanmixed_ice = .true. wave_spec_type = 'none' wave_spec_file = 'unknown_wave_spec_file' diff --git a/configuration/scripts/options/set_nml.dyneap b/configuration/scripts/options/set_nml.dyneap new file mode 100644 index 000000000..6ebab625e --- /dev/null +++ b/configuration/scripts/options/set_nml.dyneap @@ -0,0 +1 @@ +kdyn = 2 diff --git a/configuration/scripts/options/set_nml.saltflux b/configuration/scripts/options/set_nml.saltflux new file mode 100644 index 000000000..d50ddc4e3 --- /dev/null +++ b/configuration/scripts/options/set_nml.saltflux @@ -0,0 +1,2 @@ + ktherm = 2 + saltflux_option = 'prognostic' diff --git a/configuration/scripts/tests/base_suite.ts b/configuration/scripts/tests/base_suite.ts index 3007380ab..8685ab9a8 100644 --- a/configuration/scripts/tests/base_suite.ts +++ b/configuration/scripts/tests/base_suite.ts @@ -22,6 +22,7 @@ restart gx3 4x4 alt04 restart gx3 4x4 alt05 restart gx3 8x2 alt06 restart gx3 8x3 alt07 +restart gx3 8x3 saltflux restart gx3 18x2 debug,maskhalo restart gx3 6x2 alt01,debug,short restart gx3 8x2 alt02,debug,short @@ -70,8 +71,6 @@ restart gx3 4x4 histall,precision8,cdf64 smoke gx3 30x1 bgcz,histall smoke gx3 14x2 fsd12,histall smoke gx3 4x1 dynpicard -smoke gx3 8x2 diag24,run5day,zsal,debug -restart gx3 8x2 zsal restart gx3 8x2 gx3ncarbulk,debug restart gx3 4x4 gx3ncarbulk,diag1 smoke gx3 4x1 calcdragio diff --git a/configuration/scripts/tests/decomp_suite.ts b/configuration/scripts/tests/decomp_suite.ts index c39c3ddfe..8d47506d6 100644 --- a/configuration/scripts/tests/decomp_suite.ts +++ b/configuration/scripts/tests/decomp_suite.ts @@ -3,6 +3,8 @@ restart gx3 4x2x25x29x4 dslenderX2 restart gx1 64x1x16x16x10 dwghtfile restart gbox180 16x1x6x6x60 dspacecurve,debugblocks decomp gx3 4x2x25x29x5 none +decomp gx3 4x2x25x29x5 dynpicard,reprosum +decomp gx3 4x2x25x29x5 dyneap restart gx3 1x1x50x58x4 droundrobin,thread restart_gx3_4x2x25x29x4_dslenderX2 restart gx3 4x1x25x116x1 dslenderX1,thread restart_gx3_4x2x25x29x4_dslenderX2 restart gx3 6x2x4x29x18 dspacecurve restart_gx3_4x2x25x29x4_dslenderX2 diff --git a/configuration/scripts/tests/first_suite.ts b/configuration/scripts/tests/first_suite.ts index b42d917ea..bef24d9eb 100644 --- a/configuration/scripts/tests/first_suite.ts +++ b/configuration/scripts/tests/first_suite.ts @@ -1,6 +1,19 @@ # Test Grid PEs Sets BFB-compare smoke gx3 8x2 diag1,run5day +# decomp_suite restart gx3 4x2x25x29x4 dslenderX2 smoke gx3 4x2x25x29x4 debug,run2day,dslenderX2 -smoke gx3 4x2x25x29x4 dslenderX2,diag1,reprosum,cmplog +# reprosum_suite +smoke gx3 4x2x25x29x4 dslenderX2,diag1,reprosum +# travis_suite smoke gx3 1x2 run2day +# gridsys_suite +smoke gx3 1x1x100x116x1 reprosum,run10day +smoke gx1 32x1x16x16x32 reprosum,run10day +smoke gx3 1x1x100x116x1 reprosum,run10day,gridcd +smoke gx1 32x1x16x16x32 reprosum,run10day,gridcd +smoke gx3 1x1x100x116x1 reprosum,run10day,gridc +smoke gx1 32x1x16x16x32 reprosum,run10day,gridc +# perf_suite +smoke gx1 32x1x16x16x15 run2day,droundrobin +smoke gx1 64x1x16x16x8 run2day,droundrobin,thread diff --git a/configuration/scripts/tests/gridsys_suite.ts b/configuration/scripts/tests/gridsys_suite.ts index d9752073f..faf01344a 100644 --- a/configuration/scripts/tests/gridsys_suite.ts +++ b/configuration/scripts/tests/gridsys_suite.ts @@ -1,4 +1,11 @@ # Test Grid PEs Sets BFB-compare +smoke gx3 1x1x100x116x1 reprosum,run10day +smoke gx1 32x1x16x16x32 reprosum,run10day +smoke gx3 1x1x100x116x1 reprosum,run10day,gridcd +smoke gx1 32x1x16x16x32 reprosum,run10day,gridcd +smoke gx3 1x1x100x116x1 reprosum,run10day,gridc +smoke gx1 32x1x16x16x32 reprosum,run10day,gridc + smoke gx3 8x2 diag1,run5day smoke gx3 8x4 diag1,run5day,debug restart gx3 4x2 debug,diag1 @@ -12,11 +19,9 @@ smoke gbox80 4x2 boxclosed,boxforcee,run1day smoke gbox80 4x1 boxclosed,boxforcene,run1day,kmtislands smoke gbox80 4x2 boxopen,kmtislands,boxforcee,run1day smoke gbox80 2x2 boxclosed,boxforcen,run1day,vargrid -smoke gx3 1x1x100x116x1 reprosum,run10day smoke gx3 1x1x25x29x16 reprosum,run10day,dwblockall smoke_gx3_1x1x100x116x1_reprosum_run10day smoke gx3 1x1x5x4x580 reprosum,run10day,dwblockall smoke_gx3_1x1x100x116x1_reprosum_run10day smoke gx3 1x1x5x4x580 reprosum,run10day smoke_gx3_1x1x100x116x1_reprosum_run10day -smoke gx1 32x1x16x16x32 reprosum,run10day smoke gx1 32x1x16x16x32 reprosum,run10day,cmplogrest,dwblockall smoke_gx1_32x1x16x16x32_reprosum_run10day smoke gx1 32x1x16x12x40 reprosum,run10day,cmplogrest,dwblockall smoke_gx1_32x1x16x16x32_reprosum_run10day smoke gx1 32x1x16x12x40 reprosum,run10day,cmplogrest smoke_gx1_32x1x16x16x32_reprosum_run10day @@ -34,11 +39,9 @@ smoke gbox80 4x2 boxclosed,boxforcee,run1day,gridcd smoke gbox80 4x1 boxclosed,boxforcene,run1day,kmtislands,gridcd smoke gbox80 4x2 boxopen,kmtislands,boxforcee,run1day,gridcd smoke gbox80 2x2 boxclosed,boxforcen,run1day,vargrid,gridcd -smoke gx3 1x1x100x116x1 reprosum,run10day,gridcd smoke gx3 1x1x25x29x16 reprosum,run10day,dwblockall,gridcd smoke_gx3_1x1x100x116x1_gridcd_reprosum_run10day smoke gx3 1x1x5x4x580 reprosum,run10day,dwblockall,gridcd smoke_gx3_1x1x100x116x1_gridcd_reprosum_run10day smoke gx3 1x1x5x4x580 reprosum,run10day,gridcd smoke_gx3_1x1x100x116x1_gridcd_reprosum_run10day -smoke gx1 32x1x16x16x32 reprosum,run10day,gridcd smoke gx1 32x1x16x16x32 reprosum,run10day,cmplogrest,dwblockall,gridcd smoke_gx1_32x1x16x16x32_gridcd_reprosum_run10day smoke gx1 32x1x16x12x40 reprosum,run10day,cmplogrest,dwblockall,gridcd smoke_gx1_32x1x16x16x32_gridcd_reprosum_run10day smoke gx1 32x1x16x12x40 reprosum,run10day,cmplogrest,gridcd smoke_gx1_32x1x16x16x32_gridcd_reprosum_run10day @@ -56,11 +59,9 @@ smoke gbox80 4x2 boxclosed,boxforcee,run1day,gridc smoke gbox80 4x1 boxclosed,boxforcene,run1day,kmtislands,gridc smoke gbox80 4x2 boxopen,kmtislands,boxforcee,run1day,gridc smoke gbox80 2x2 boxclosed,boxforcen,run1day,vargrid,gridc -smoke gx3 1x1x100x116x1 reprosum,run10day,gridc smoke gx3 1x1x25x29x16 reprosum,run10day,dwblockall,gridc smoke_gx3_1x1x100x116x1_gridc_reprosum_run10day smoke gx3 1x1x5x4x580 reprosum,run10day,dwblockall,gridc smoke_gx3_1x1x100x116x1_gridc_reprosum_run10day smoke gx3 1x1x5x4x580 reprosum,run10day,gridc smoke_gx3_1x1x100x116x1_gridc_reprosum_run10day -smoke gx1 32x1x16x16x32 reprosum,run10day,gridc smoke gx1 32x1x16x16x32 reprosum,run10day,cmplogrest,dwblockall,gridc smoke_gx1_32x1x16x16x32_gridc_reprosum_run10day smoke gx1 32x1x16x12x40 reprosum,run10day,cmplogrest,dwblockall,gridc smoke_gx1_32x1x16x16x32_gridc_reprosum_run10day smoke gx1 32x1x16x12x40 reprosum,run10day,cmplogrest,gridc smoke_gx1_32x1x16x16x32_gridc_reprosum_run10day diff --git a/configuration/scripts/tests/omp_suite.ts b/configuration/scripts/tests/omp_suite.ts index 686fa72db..62630e874 100644 --- a/configuration/scripts/tests/omp_suite.ts +++ b/configuration/scripts/tests/omp_suite.ts @@ -1,5 +1,7 @@ # Test Grid PEs Sets BFB-compare +#gridB + smoke gx3 8x4 diag1,reprosum,run10day smoke gx3 6x2 alt01,reprosum,run10day smoke gx3 8x2 alt02,reprosum,run10day @@ -15,7 +17,6 @@ smoke gx3 14x2 fsd12,reprosum,run10day smoke gx3 11x2 isotope,reprosum,run10day smoke gx3 8x4 snwitdrdg,snwgrain,icdefault,reprosum,run10day smoke gx3 6x4 dynpicard,reprosum,run10day -smoke gx3 8x3 zsal,reprosum,run10day smoke gx3 1x1x100x116x1 reprosum,run10day,thread smoke gbox128 8x2 reprosum,run10day @@ -25,6 +26,62 @@ smoke gbox128 14x2 boxrestore,reprosum,run10day smoke gbox80 4x5 box2001,reprosum,run10day smoke gbox80 11x3 boxslotcyl,reprosum,run10day +#gridC + +smoke gx3 8x4 diag1,reprosum,run10day,gridc +smoke gx3 6x2 alt01,reprosum,run10day,gridc +smoke gx3 8x2 alt02,reprosum,run10day,gridc +#smoke gx3 12x2 alt03,droundrobin,reprosum,run10day,gridc +smoke gx3 4x4 alt04,reprosum,run10day,gridc +smoke gx3 4x4 alt05,reprosum,run10day,gridc +smoke gx3 8x2 alt06,reprosum,run10day,gridc +smoke gx3 7x2 alt07,reprosum,run10day,gridc +smoke gx3 8x2 bgczm,reprosum,run10day,gridc +smoke gx1 15x2 reprosum,run10day,gridc +smoke gx1 15x2 seabedprob,reprosum,run10day,gridc +smoke gx3 14x2 fsd12,reprosum,run10day,gridc +smoke gx3 11x2 isotope,reprosum,run10day,gridc +smoke gx3 8x4 snwitdrdg,snwgrain,icdefault,reprosum,run10day,gridc +#smoke gx3 6x4 dynpicard,reprosum,run10day,gridc +smoke gx3 1x1x100x116x1 reprosum,run10day,gridc,thread + +smoke gbox128 8x2 reprosum,run10day,gridc +smoke gbox128 12x2 boxnodyn,reprosum,run10day,gridc +#smoke gbox128 9x2 boxadv,reprosum,run10day,gridc +smoke gbox128 14x2 boxrestore,reprosum,run10day,gridc +smoke gbox80 4x5 box2001,reprosum,run10day,gridc +smoke gbox80 11x3 boxslotcyl,reprosum,run10day,gridc + +#gridCD + +smoke gx3 8x4 diag1,reprosum,run10day,gridcd +smoke gx3 6x2 alt01,reprosum,run10day,gridcd +smoke gx3 8x2 alt02,reprosum,run10day,gridcd +#smoke gx3 12x2 alt03,droundrobin,reprosum,run10day,gridcd +smoke gx3 4x4 alt04,reprosum,run10day,gridcd +smoke gx3 4x4 alt05,reprosum,run10day,gridcd +smoke gx3 8x2 alt06,reprosum,run10day,gridcd +smoke gx3 7x2 alt07,reprosum,run10day,gridcd +smoke gx3 8x2 bgczm,reprosum,run10day,gridcd +smoke gx1 15x2 reprosum,run10day,gridcd +smoke gx1 15x2 seabedprob,reprosum,run10day,gridcd +smoke gx3 14x2 fsd12,reprosum,run10day,gridcd +smoke gx3 11x2 isotope,reprosum,run10day,gridcd +smoke gx3 8x4 snwitdrdg,snwgrain,icdefault,reprosum,run10day,gridcd +#smoke gx3 6x4 dynpicard,reprosum,run10day,gridcd +smoke gx3 1x1x100x116x1 reprosum,run10day,gridcd,thread + +smoke gbox128 8x2 reprosum,run10day,gridcd +smoke gbox128 12x2 boxnodyn,reprosum,run10day,gridcd +#smoke gbox128 9x2 boxadv,reprosum,run10day,gridcd +smoke gbox128 14x2 boxrestore,reprosum,run10day,gridcd +smoke gbox80 4x5 box2001,reprosum,run10day,gridcd +smoke gbox80 11x3 boxslotcyl,reprosum,run10day,gridcd + +sleep 180 + +#gridB + smoke gx3 4x2 diag1,reprosum,run10day,cmplogrest smoke_gx3_8x4_diag1_reprosum_run10day smoke gx3 4x1 diag1,reprosum,run10day,cmplogrest,thread smoke_gx3_8x4_diag1_reprosum_run10day smoke gx3 8x1 alt01,reprosum,run10day,cmplogrest,thread smoke_gx3_6x2_alt01_reprosum_run10day @@ -41,7 +98,6 @@ smoke gx3 8x1 fsd12,reprosum,run10day,cmplogrest,thread smoke gx3 8x1 isotope,reprosum,run10day,cmplogrest,thread smoke_gx3_11x2_isotope_reprosum_run10day smoke gx3 8x1 snwitdrdg,snwgrain,icdefault,reprosum,run10day,cmplogrest,thread smoke_gx3_8x4_icdefault_reprosum_run10day_snwgrain_snwitdrdg smoke gx3 8x1 dynpicard,reprosum,run10day,cmplogrest,thread smoke_gx3_6x4_dynpicard_reprosum_run10day -smoke gx3 8x1 zsal,reprosum,run10day,cmplogrest,thread smoke_gx3_8x3_reprosum_run10day_zsal smoke gx3 4x2x25x29x4 reprosum,run10day smoke_gx3_1x1x100x116x1_reprosum_run10day_thread smoke gx3 8x4x5x4x80 reprosum,run10day smoke_gx3_1x1x100x116x1_reprosum_run10day_thread @@ -54,31 +110,6 @@ smoke gbox80 8x1 boxslotcyl,reprosum,run10day,cmplogrest,thread #gridC -smoke gx3 8x4 diag1,reprosum,run10day,gridc -smoke gx3 6x2 alt01,reprosum,run10day,gridc -smoke gx3 8x2 alt02,reprosum,run10day,gridc -#smoke gx3 12x2 alt03,droundrobin,reprosum,run10day,gridc -smoke gx3 4x4 alt04,reprosum,run10day,gridc -smoke gx3 4x4 alt05,reprosum,run10day,gridc -smoke gx3 8x2 alt06,reprosum,run10day,gridc -smoke gx3 7x2 alt07,reprosum,run10day,gridc -smoke gx3 8x2 bgczm,reprosum,run10day,gridc -smoke gx1 15x2 reprosum,run10day,gridc -smoke gx1 15x2 seabedprob,reprosum,run10day,gridc -smoke gx3 14x2 fsd12,reprosum,run10day,gridc -smoke gx3 11x2 isotope,reprosum,run10day,gridc -smoke gx3 8x4 snwitdrdg,snwgrain,icdefault,reprosum,run10day,gridc -#smoke gx3 6x4 dynpicard,reprosum,run10day,gridc -smoke gx3 8x3 zsal,reprosum,run10day,gridc -smoke gx3 1x1x100x116x1 reprosum,run10day,gridc,thread - -smoke gbox128 8x2 reprosum,run10day,gridc -smoke gbox128 12x2 boxnodyn,reprosum,run10day,gridc -#smoke gbox128 9x2 boxadv,reprosum,run10day,gridc -smoke gbox128 14x2 boxrestore,reprosum,run10day,gridc -smoke gbox80 4x5 box2001,reprosum,run10day,gridc -smoke gbox80 11x3 boxslotcyl,reprosum,run10day,gridc - smoke gx3 4x2 diag1,reprosum,run10day,cmplogrest,gridc smoke_gx3_8x4_diag1_gridc_reprosum_run10day smoke gx3 4x1 diag1,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_8x4_diag1_gridc_reprosum_run10day smoke gx3 8x1 alt01,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_6x2_alt01_gridc_reprosum_run10day @@ -95,7 +126,6 @@ smoke gx3 8x1 fsd12,reprosum,run10day,cmplogrest,thread,grid smoke gx3 8x1 isotope,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_11x2_gridc_isotope_reprosum_run10day smoke gx3 8x1 snwitdrdg,snwgrain,icdefault,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_8x4_gridc_icdefault_reprosum_run10day_snwgrain_snwitdrdg #smoke gx3 8x1 dynpicard,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_6x4_dynpicard_gridc_reprosum_run10day -smoke gx3 8x1 zsal,reprosum,run10day,cmplogrest,thread,gridc smoke_gx3_8x3_gridc_reprosum_run10day_zsal smoke gx3 4x2x25x29x4 reprosum,run10day,gridc smoke_gx3_1x1x100x116x1_gridc_reprosum_run10day_thread smoke gx3 8x4x5x4x80 reprosum,run10day,gridc smoke_gx3_1x1x100x116x1_gridc_reprosum_run10day_thread @@ -108,31 +138,6 @@ smoke gbox80 8x1 boxslotcyl,reprosum,run10day,cmplogrest,thread #gridCD -smoke gx3 8x4 diag1,reprosum,run10day,gridcd -smoke gx3 6x2 alt01,reprosum,run10day,gridcd -smoke gx3 8x2 alt02,reprosum,run10day,gridcd -#smoke gx3 12x2 alt03,droundrobin,reprosum,run10day,gridcd -smoke gx3 4x4 alt04,reprosum,run10day,gridcd -smoke gx3 4x4 alt05,reprosum,run10day,gridcd -smoke gx3 8x2 alt06,reprosum,run10day,gridcd -smoke gx3 7x2 alt07,reprosum,run10day,gridcd -smoke gx3 8x2 bgczm,reprosum,run10day,gridcd -smoke gx1 15x2 reprosum,run10day,gridcd -smoke gx1 15x2 seabedprob,reprosum,run10day,gridcd -smoke gx3 14x2 fsd12,reprosum,run10day,gridcd -smoke gx3 11x2 isotope,reprosum,run10day,gridcd -smoke gx3 8x4 snwitdrdg,snwgrain,icdefault,reprosum,run10day,gridcd -#smoke gx3 6x4 dynpicard,reprosum,run10day,gridcd -smoke gx3 8x3 zsal,reprosum,run10day,gridcd -smoke gx3 1x1x100x116x1 reprosum,run10day,gridcd,thread - -smoke gbox128 8x2 reprosum,run10day,gridcd -smoke gbox128 12x2 boxnodyn,reprosum,run10day,gridcd -#smoke gbox128 9x2 boxadv,reprosum,run10day,gridcd -smoke gbox128 14x2 boxrestore,reprosum,run10day,gridcd -smoke gbox80 4x5 box2001,reprosum,run10day,gridcd -smoke gbox80 11x3 boxslotcyl,reprosum,run10day,gridcd - smoke gx3 4x2 diag1,reprosum,run10day,cmplogrest,gridcd smoke_gx3_8x4_diag1_gridcd_reprosum_run10day smoke gx3 4x1 diag1,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_8x4_diag1_gridcd_reprosum_run10day smoke gx3 8x1 alt01,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_6x2_alt01_gridcd_reprosum_run10day @@ -149,7 +154,6 @@ smoke gx3 8x1 fsd12,reprosum,run10day,cmplogrest,thread,grid smoke gx3 8x1 isotope,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_11x2_gridcd_isotope_reprosum_run10day smoke gx3 8x1 snwitdrdg,snwgrain,icdefault,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_8x4_gridcd_icdefault_reprosum_run10day_snwgrain_snwitdrdg #smoke gx3 8x1 dynpicard,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_6x4_dynpicard_gridcd_reprosum_run10day -smoke gx3 8x1 zsal,reprosum,run10day,cmplogrest,thread,gridcd smoke_gx3_8x3_gridcd_reprosum_run10day_zsal smoke gx3 4x2x25x29x4 reprosum,run10day,gridcd smoke_gx3_1x1x100x116x1_gridcd_reprosum_run10day_thread smoke gx3 8x4x5x4x80 reprosum,run10day,gridcd smoke_gx3_1x1x100x116x1_gridcd_reprosum_run10day_thread diff --git a/configuration/scripts/tests/perf_suite.ts b/configuration/scripts/tests/perf_suite.ts index 9a17d8a55..a4d8ef588 100644 --- a/configuration/scripts/tests/perf_suite.ts +++ b/configuration/scripts/tests/perf_suite.ts @@ -1,25 +1,24 @@ # Test Grid PEs Sets BFB-compare -smoke gx1 1x1x320x384x1 run2day,droundrobin +smoke gx1 32x1x16x16x15 run2day,droundrobin smoke gx1 64x1x16x16x8 run2day,droundrobin,thread -sleep 180 # -smoke gx1 1x1x320x384x1 run2day,droundrobin -smoke gx1 1x1x160x192x4 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day -smoke gx1 1x1x80x96x16 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day -smoke gx1 1x1x40x48x64 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day -smoke gx1 1x1x20x24x256 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day +smoke gx1 1x1x320x384x1 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day +smoke gx1 1x1x160x192x4 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day +smoke gx1 1x1x80x96x16 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day +smoke gx1 1x1x40x48x64 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day +smoke gx1 1x1x20x24x256 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day # -smoke gx1 1x1x16x16x480 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day -smoke gx1 2x1x16x16x240 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day -smoke gx1 4x1x16x16x120 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day -smoke gx1 8x1x16x16x60 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day -smoke gx1 16x1x16x16x30 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day -smoke gx1 32x1x16x16x15 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day -smoke gx1 64x1x16x16x8 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day -smoke gx1 128x1x16x16x4 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day +smoke gx1 1x1x16x16x480 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day +smoke gx1 2x1x16x16x240 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day +smoke gx1 4x1x16x16x120 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day +smoke gx1 8x1x16x16x60 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day +smoke gx1 16x1x16x16x30 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day +#smoke gx1 32x1x16x16x15 run2day,droundrobin +smoke gx1 64x1x16x16x8 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day +smoke gx1 128x1x16x16x4 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day # -smoke gx1 64x1x16x16x8 run2day,droundrobin smoke_gx1_1x1x320x384x1_droundrobin_run2day -smoke gx1 64x1x16x16x8 run2day,droundrobin,thread +smoke gx1 64x1x16x16x8 run2day,droundrobin smoke_gx1_32x1x16x16x15_droundrobin_run2day +#smoke gx1 64x1x16x16x8 run2day,droundrobin,thread smoke gx1 32x2x16x16x16 run2day,droundrobin smoke_gx1_64x1x16x16x8_droundrobin_run2day_thread smoke gx1 16x4x16x16x32 run2day,droundrobin smoke_gx1_64x1x16x16x8_droundrobin_run2day_thread smoke gx1 8x8x16x16x64 run2day,droundrobin smoke_gx1_64x1x16x16x8_droundrobin_run2day_thread diff --git a/doc/source/cice_index.rst b/doc/source/cice_index.rst index a8b9d08f1..0e9d21517 100644 --- a/doc/source/cice_index.rst +++ b/doc/source/cice_index.rst @@ -350,7 +350,7 @@ either Celsius or Kelvin units). Deprecated parameters are listed at the end. "ice_ic", "choice of initial conditions (see :ref:`tab-ic`)", "" "ice_stdout", "unit number for standard output", "" "ice_stderr", "unit number for standard error output", "" - "ice_ref_salinity", "reference salinity for ice–ocean exchanges", "4. ppt" + "ice_ref_salinity", "reference salinity for ice–ocean exchanges", "" "icells", "number of grid cells with specified property (for vectorization)", "" "iceruf", "ice surface roughness at atmosphere interface", "5.\ :math:`\times`\ 10\ :math:`^{-4}` m" "iceruf_ocn", "under-ice roughness (at ocean interface)", "0.03 m" @@ -677,6 +677,7 @@ either Celsius or Kelvin units). Deprecated parameters are listed at the end. "Tf", "freezing temperature", "C" "Tffresh", "freezing temp of fresh ice", "273.15 K" "tfrz_option", "form of ocean freezing temperature", "" + "saltflux_option", "form of coupled salt flux ", "" "thinS", "minimum ice thickness for brine tracer", "" "timer_stats", "logical to turn on extra timer statistics", ".false." "timesecs", "total elapsed time in seconds", "s" diff --git a/doc/source/conf.py b/doc/source/conf.py index a1b2871ae..88b98bc09 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -62,9 +62,9 @@ # built documents. # # The short X.Y version. -version = u'6.4.0' +version = u'6.4.1' # The full version, including alpha/beta/rc tags. -version = u'6.4.0' +version = u'6.4.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/source/developer_guide/dg_dynamics.rst b/doc/source/developer_guide/dg_dynamics.rst index 48dead1cb..1f1430e71 100644 --- a/doc/source/developer_guide/dg_dynamics.rst +++ b/doc/source/developer_guide/dg_dynamics.rst @@ -9,14 +9,14 @@ Dynamics The CICE **cicecore/** directory consists of the non icepack source code. Within that directory there are the following subdirectories -**cicecore/cicedynB/analysis** contains higher level history and diagnostic routines. +**cicecore/cicedyn/analysis** contains higher level history and diagnostic routines. -**cicecore/cicedynB/dynamics** contains all the dynamical evp, eap, and transport routines. +**cicecore/cicedyn/dynamics** contains all the dynamical evp, eap, and transport routines. -**cicecore/cicedynB/general** contains routines associated with forcing, flux calculation, +**cicecore/cicedyn/general** contains routines associated with forcing, flux calculation, initialization, and model timestepping. -**cicecore/cicedynB/infrastructure** contains most of the low-level infrastructure associated +**cicecore/cicedyn/infrastructure** contains most of the low-level infrastructure associated with communication (halo updates, gather, scatter, global sums, etc) and I/O reading and writing binary and netcdf files. @@ -29,7 +29,7 @@ coupling layers. Dynamical Solvers -------------------- -The dynamics solvers are found in **cicecore/cicedynB/dynamics/**. A couple of different solvers are +The dynamics solvers are found in **cicecore/cicedyn/dynamics/**. A couple of different solvers are available including EVP, EAP and VP. The dynamics solver is specified in namelist with the ``kdyn`` variable. ``kdyn=1`` is evp, ``kdyn=2`` is eap, ``kdyn=3`` is VP. @@ -41,7 +41,7 @@ with the tripole grid. Transport ----------------- -The transport (advection) methods are found in **cicecore/cicedynB/dynamics/**. Two methods are supported, +The transport (advection) methods are found in **cicecore/cicedyn/dynamics/**. Two methods are supported, upwind and remap. These are set in namelist via the ``advection`` variable. Transport can be disabled with the ``ktransport`` namelist variable. @@ -94,11 +94,11 @@ Two low-level communications packages, mpi and serial, are provided as part of C provides a middle layer between the model and the underlying libraries. Only the CICE mpi or serial directories are compiled with CICE, not both. -**cicedynB/infrastructure/comm/mpi/** +**cicedyn/infrastructure/comm/mpi/** is based on MPI and provides various methods to do halo updates, global sums, gather/scatter, broadcasts and similar using some fairly generic interfaces to isolate the MPI calls in the code. -**cicedynB/infrastructure/comm/serial/** support the same interfaces, but operates +**cicedyn/infrastructure/comm/serial/** support the same interfaces, but operates in shared memory mode with no MPI. The serial library will be used, by default in the CICE scripts, if the number of MPI tasks is set to 1. The serial library allows the model to be run on a single core or with OpenMP parallelism only without requiring an MPI library. @@ -112,15 +112,15 @@ Only one of the three IO directories can be built with CICE. The CICE scripts w by default, but other options can be selecting by setting ``ICE_IOTYPE`` in **cice.settings** in the case. This has to be set before CICE is built. -**cicedynB/infrastructure/io/io_netcdf/** is the +**cicedyn/infrastructure/io/io_netcdf/** is the default for the standalone CICE model, and it supports writing history and restart files in netcdf format using standard netcdf calls. It does this by writing from and reading to the root task and gathering and scattering fields from the root task to support model parallelism. -**cicedynB/infrastructure/io/io_binary/** supports files in binary format using a gather/scatter +**cicedyn/infrastructure/io/io_binary/** supports files in binary format using a gather/scatter approach and reading to and writing from the root task. -**cicedynB/infrastructure/io/io_pio/** support reading and writing through the pio interface. pio +**cicedyn/infrastructure/io/io_pio/** support reading and writing through the pio interface. pio is a parallel io library (https://github.com/NCAR/ParallelIO) that supports reading and writing of binary and netcdf file through various interfaces including netcdf and pnetcdf. pio is generally more parallel in memory even when using serial netcdf than the standard gather/scatter methods, diff --git a/doc/source/developer_guide/dg_forcing.rst b/doc/source/developer_guide/dg_forcing.rst index d3c406b43..0b90a9b2e 100644 --- a/doc/source/developer_guide/dg_forcing.rst +++ b/doc/source/developer_guide/dg_forcing.rst @@ -15,7 +15,7 @@ generally not been maintained by the Consortium and only a subset of the code is tested by the Consortium. The forcing implementation can be found in the file -**cicecore/cicedynB/general/ice_forcing.F90**. As noted above, only a subset of the +**cicecore/cicedyn/general/ice_forcing.F90**. As noted above, only a subset of the forcing modes are tested and supported. In many ways, the implemetation is fairly primitive, in part due to historical reasons and in part because standalone runs are discouraged for evaluating complex science. In general, most implementations diff --git a/doc/source/science_guide/sg_horiztrans.rst b/doc/source/science_guide/sg_horiztrans.rst index f85f13ee5..d66046465 100644 --- a/doc/source/science_guide/sg_horiztrans.rst +++ b/doc/source/science_guide/sg_horiztrans.rst @@ -35,8 +35,11 @@ versions but have not yet been implemented. Two transport schemes are available: upwind and the incremental remapping scheme of :cite:`Dukowicz00` as modified for sea ice by -:cite:`Lipscomb04`. The upwind scheme is naturally suited for a C grid discretization. As such, the C grid velocity components (i.e. :math:`uvelE=u` at the E point and :math:`vvelN=v` at the N point) are directly passed to the upwind transport scheme. On the other hand, if the B grid is used, :math:`uvel` and :math:`vvel` (respectively :math:`u` and :math:`v` at the U point) are interpolated to the E and N points such that the upwind advection can be performed. Conversely, as the remapping scheme was originally developed for B grid applications, :math:`uvel` and :math:`vvel` are directly used for the advection. If the remapping scheme is used for the C grid, :math:`uvelE` and :math:`vvelN` are first interpolated to the U points before performing the advection. +:cite:`Lipscomb04`. +- The upwind scheme uses velocity points at the East and North face (i.e. :math:`uvelE=u` at the E point and :math:`vvelN=v` at the N point) of a T gridcell. As such, the prognostic C grid velocity components (:math:`uvelE` and :math:`vvelN`) can be passed directly to the upwind transport scheme. If the upwind scheme is used with the B grid, the B grid velocities, :math:`uvelU` and :math:`vvelU` (respectively :math:`u` and :math:`v` at the U point) are interpolated to the E and N points first. (Note however that the upwind scheme does not transport all potentially available tracers.) + +- The remapping scheme uses :math:`uvelU` and :math:`vvelU` if l_fixed_area is false and :math:`uvelE` and :math:`vvelN` if l_fixed_area is true. l_fixed_area is hardcoded to false by default and further described below. As such, the B grid velocities (:math:`uvelU` and :math:`vvelU`) are used directly in the remapping scheme, while the C grid velocities (:math:`uvelE` and :math:`vvelN`) are interpolated to U points first. If l_fixed_area is changed to true, then the reverse is true. The C grid velocities are used directly and the B grid velocities are interpolated. The remapping scheme has several desirable features: @@ -464,14 +467,14 @@ In general, the fluxes in this expression are not equal to those implied by the above scheme for locating departure regions. For some applications it may be desirable to prescribe the divergence by prescribing the area of the departure region for each edge. This can be -done in CICE 4.0 by setting `l\_fixed\_area` = true in +done by setting `l\_fixed\_area` = true in **ice\_transport\_driver.F90** and passing the prescribed departure areas (`edgearea\_e` and `edgearea\_n`) into the remapping routine. An extra triangle is then constructed for each departure region to ensure that the total area is equal to the prescribed value. This idea was suggested and first implemented by Mats Bentsen of the Nansen Environmental and Remote Sensing Center (Norway), who applied an earlier version of the -CICE remapping scheme to an ocean model. The implementation in CICE v4.0 +CICE remapping scheme to an ocean model. The implementation in CICE is somewhat more general, allowing for departure regions lying on both sides of a cell edge. The extra triangle is constrained to lie in one but not both of the grid cells that share the edge. Since this option diff --git a/doc/source/science_guide/sg_tracers.rst b/doc/source/science_guide/sg_tracers.rst index b75edfb00..cbecb9310 100644 --- a/doc/source/science_guide/sg_tracers.rst +++ b/doc/source/science_guide/sg_tracers.rst @@ -92,7 +92,6 @@ is not in use. " ","nslyr","vsno","nt_rhos"," " " ","nslyr","vsno","nt_smice"," " " ","nslyr","vsno","nt_smliq"," " - "solve_zsal", "n_trzs", "fbri or (a,v)ice", "nt_bgc_S", " " "tr_bgc_N", "n_algae", "fbri or (a,v)ice", "nt_bgc_N", "nlt_bgc_N" "tr_bgc_Nit", " ", "fbri or (a,v)ice", "nt_bgc_Nit", "nlt_bgc_Nit" "tr_bgc_C", "n_doc", "fbri or (a,v)ice", "nt_bgc_DOC", "nlt_bgc_DOC" @@ -112,6 +111,7 @@ is not in use. " ", "1", "fbri", "nt_zbgc_frac", " " .. + "solve_zsal", "n_trzs", "fbri or (a,v)ice", "nt_bgc_S", " " "tr_pond_cesm", "2", "aice", "nt_apnd", " " " ", " ", "apnd", "nt_vpnd", " " diff --git a/doc/source/user_guide/ug_case_settings.rst b/doc/source/user_guide/ug_case_settings.rst index a34c69822..587adcd56 100644 --- a/doc/source/user_guide/ug_case_settings.rst +++ b/doc/source/user_guide/ug_case_settings.rst @@ -644,6 +644,7 @@ forcing_nml "", "``eastblock``", "ice block covering about 25 percent of domain at the east edge of the domain", "" "", "``latsst``", "ice dependent on latitude and ocean temperature", "" "", "``uniform``", "ice defined at all grid points", "" + "``ice_ref_salinity``", "real", "sea ice salinity for coupling fluxes (ppt)", "4.0" "``iceruf``", "real", "ice surface roughness at atmosphere interface in meters", "0.0005" "``l_mpond_fresh``", "``.false.``", "release pond water immediately to ocean", "``.false.``" "", "``true``", "retain (topo) pond water until ponds drain", "" @@ -666,6 +667,8 @@ forcing_nml "``restore_ocn``", "logical", "restore sst to data", "``.false.``" "``restore_ice``", "logical", "restore ice state along lateral boundaries", "``.false.``" "``rotate_wind``", "logical", "rotate wind from east/north to computation grid", "``.true.``" + "``saltflux_option``", "``constant``", "computed using ice_ref_salinity", "``constant``" + "", "``prognostic``", "computed using prognostic salinity", "" "``tfrz_option``", "``linear_salt``", "linear function of salinity (ktherm=1)", "``mushy``" "", "``minus1p8``", "constant ocean freezing temperature (:math:`-1.8^{\circ} C`)", "" "", "``mushy``", "matches mushy-layer thermo (ktherm=2)", "" @@ -804,14 +807,14 @@ zbgc_nml "``ratio_S2N_sp``", "real", "algal S to N in mol/mol small plankton", "0.03" "``restart_bgc``", "logical", "restart tracer values from file", "``.false.``" "``restart_hbrine``", "logical", "", "``.false.``" - "``restart_zsal``", "logical", "", "``.false.``" + "``restart_zsal``", "logical", "zsalinity DEPRECATED", "``.false.``" "``restore_bgc``", "logical", "restore bgc to data", "``.false.``" "``R_dFe2dust``", "real", "g/g :cite:`Tagliabue09`", "0.035" "``scale_bgc``", "logical", "", "``.false.``" "``silicatetype``", "real", "mobility type between stationary and mobile silicate", "-1.0" "``skl_bgc``", "logical", "biogeochemistry", "``.false.``" "``solve_zbgc``", "logical", "", "``.false.``" - "``solve_zsal``", "logical", "update salinity tracer profile", "``.false.``" + "``solve_zsal``", "logical", "zsalinity DEPRECATED, update salinity tracer profile", "``.false.``" "``tau_max``", "real", "long time mobile to stationary exchanges", "1.73e-5" "``tau_min``", "real", "rapid module to stationary exchanges", "5200." "``tr_bgc_Am``", "logical", "ammonium tracer", "``.false.``" @@ -847,13 +850,13 @@ icefields_nml There are several icefield namelist groups to control model history output. See the source code for a full list of supported output fields. -* ``icefields_nml`` is in **cicecore/cicedynB/analysis/ice_history_shared.F90** -* ``icefields_bgc_nml`` is in **cicecore/cicedynB/analysis/ice_history_bgc.F90** -* ``icefields_drag_nml`` is in **cicecore/cicedynB/analysis/ice_history_drag.F90** -* ``icefields_fsd_nml`` is in **cicecore/cicedynB/analysis/ice_history_fsd.F90** -* ``icefields_mechred_nml`` is in **cicecore/cicedynB/analysis/ice_history_mechred.F90** -* ``icefields_pond_nml`` is in **cicecore/cicedynB/analysis/ice_history_pond.F90** -* ``icefields_snow_nml`` is in **cicecore/cicedynB/analysis/ice_history_snow.F90** +* ``icefields_nml`` is in **cicecore/cicedyn/analysis/ice_history_shared.F90** +* ``icefields_bgc_nml`` is in **cicecore/cicedyn/analysis/ice_history_bgc.F90** +* ``icefields_drag_nml`` is in **cicecore/cicedyn/analysis/ice_history_drag.F90** +* ``icefields_fsd_nml`` is in **cicecore/cicedyn/analysis/ice_history_fsd.F90** +* ``icefields_mechred_nml`` is in **cicecore/cicedyn/analysis/ice_history_mechred.F90** +* ``icefields_pond_nml`` is in **cicecore/cicedyn/analysis/ice_history_pond.F90** +* ``icefields_snow_nml`` is in **cicecore/cicedyn/analysis/ice_history_snow.F90** .. csv-table:: **icefields_nml namelist options** :header: "variable", "options/format", "description", "default value" diff --git a/doc/source/user_guide/ug_implementation.rst b/doc/source/user_guide/ug_implementation.rst index a7cc66948..5ed2092c0 100644 --- a/doc/source/user_guide/ug_implementation.rst +++ b/doc/source/user_guide/ug_implementation.rst @@ -47,7 +47,7 @@ as follows **cicecore/** CICE source code -**cicecore/cicedynB/** +**cicecore/cicedyn/** routines associated with the dynamics core **cicecore/drivers/** diff --git a/doc/source/user_guide/ug_testing.rst b/doc/source/user_guide/ug_testing.rst index 284de72f1..289f626a9 100644 --- a/doc/source/user_guide/ug_testing.rst +++ b/doc/source/user_guide/ug_testing.rst @@ -23,7 +23,8 @@ The testing scripts support several features - Ability to compare results to prior baselines to verify bit-for-bit (``--bcmp``) - Ability to define where baseline tests are stored (``--bdir``) - Ability to compare tests against each other (``--diff``) - - Ability to set account number (``--acct``), which is otherwise not set and may result in tests not being submitted + - Ability to set or overide the batch account number (``--acct``) and queue name (``--queue``) + - Ability to control how test suites execute (``--setup-only``, ``--setup-build``, ``--setup-build-run``, ``--setup-build-submit``) .. _indtests: @@ -301,22 +302,6 @@ results.csh script in the testsuite.[testid]:: cd testsuite.[testid] ./results.csh -The script **create_fails.csh** will process the output from results.csh and generate a new -test suite file, **fails.ts**, from the failed tests. -**fails.ts** can then be edited and passed into ``cice.setup --suite fails.ts ...`` to rerun -subsets of failed tests to more efficiently move thru the development, testing, and -validation process. However, a full test suite should be run on the final development -version of the code. - -To report the test results, as is required for Pull Requests to be accepted into -the master the CICE Consortium code see :ref:`testreporting`. - -If using the ``--tdir`` option, that directory must not exist before the script is run. The tdir directory will be -created by the script and it will be populated by all tests as well as scripts that support the -test suite:: - - ./cice.setup --suite base_suite --mach wolf --env gnu --testid myid --tdir /scratch/$user/testsuite.myid - Multiple suites are supported on the command line as comma separated arguments:: ./cice.setup --suite base_suite,decomp_suite --mach wolf --env gnu --testid myid @@ -329,9 +314,48 @@ The option settings defined at the command line have precedence over the test su values if there are conflicts. The predefined test suites are defined under **configuration/scripts/tests** and -the files defining the suites -have a suffix of .ts in that directory. The format for the test suite file -is relatively simple. +the files defining the suites have a suffix of .ts in that directory. Some of the +available tests suites are + +``quick_suite`` + consists of a handful of basic CICE tests + +``base_suite`` + consists of a much large suite of tests covering much of the CICE functionality + +``decomp_suite`` + checks that different decompositions and pe counts produce bit-for-bit results + +``omp_suite`` + checks that OpenMP single thread and multi-thread cases are bit-for-bit identical + +``io_suite`` + tests the various IO options including binary, netcdf, and pio. PIO should be installed locally and accessible to the CICE build system to make full use of this suite. + +``perf_suite`` + runs a series of tests to evaluate model scaling and performance + +``reprosum_suite`` + verifies that CICE log files are bit-for-bit with different decompositions and pe counts when the bfbflag is set to reprosum + +``gridsys_suite`` + tests B, C, and CD grid_ice configurations + +``prod_suite`` + consists of a handful of tests running 5 to 10 model years and includes some QC testing. These tests will be relatively expensive and take more time compared to other suites. + +``unittest_suite`` + runs unit tests in the CICE repository + +``travis_suite`` + consists of a small suite of tests suitable for running on low pe counts. This is the suite used with Github Actions for CI in the workflow. + +``first_suite`` + this small suite of tests is redundant with tests in other suites. It runs several of the critical baseline tests that other test compare to. It can improve testing turnaround if listed first in a series of test suites. + +When running multiple suites on the command line (i.e. ``--suite first_suite,base_suite,omp_suite``) the suites will be run in the order defined by the user and redundant tests across multiple suites will be created and executed only once. + +The format for the test suite file is relatively simple. It is a text file with white space delimited columns that define a handful of values in a specific order. The first column is the test name, the second the grid, the third the pe count, @@ -423,6 +447,22 @@ which means by default the test suite builds and submits the jobs. By defining By leveraging the **cice.setup** command line arguments ``--setup-only``, ``--setup-build``, and ``--setup-build-run`` as well as the environment variables SUITE_BUILD, SUITE_RUN, and SUITE_SUBMIT, users can run **cice.setup** and **suite.submit** in various combinations to quickly setup, setup and build, submit, resubmit, run interactively, or rebuild and resubmit full testsuites quickly and easily. See :ref:`examplesuites` for an example. +The script **create_fails.csh** will process the output from results.csh and generate a new +test suite file, **fails.ts**, from the failed tests. +**fails.ts** can then be edited and passed into ``cice.setup --suite fails.ts ...`` to rerun +subsets of failed tests to more efficiently move thru the development, testing, and +validation process. However, a full test suite should be run on the final development +version of the code. + +To report the test results, as is required for Pull Requests to be accepted into +the master the CICE Consortium code see :ref:`testreporting`. + +If using the ``--tdir`` option, that directory must not exist before the script is run. The tdir directory will be +created by the script and it will be populated by all tests as well as scripts that support the +test suite:: + + ./cice.setup --suite base_suite --mach wolf --env gnu --testid myid --tdir /scratch/$user/testsuite.myid + .. _examplesuites: @@ -695,9 +735,12 @@ The following are brief descriptions of some of the current unit tests, both sets of software are tested independently and correctness is verified. - **calchk** is a unit test that exercises the CICE calendar over 100,000 years and verifies correctness. This test does not depend on the CICE initialization. + - **gridavgchk** is a unit test that exercises the CICE grid_average_X2Y methods and verifies results. - **helloworld** is a simple test that writes out helloworld and uses no CICE infrastructure. This tests exists to demonstrate how to build a unit test by specifying the object files directly in the Makefile + - **optargs** is a unit test that tests passing optional arguments down a calling tree and verifying + that the optional attribute is preserved correctly. - **sumchk** is a unit test that exercises the methods in ice_global_reductions.F90. This test requires that a CICE grid and decomposition be initialized, so CICE_InitMod.F90 is leveraged to initialize the model prior to running a suite of unit validation tests to verify correctness. diff --git a/doc/source/user_guide/ug_troubleshooting.rst b/doc/source/user_guide/ug_troubleshooting.rst index d20b14ffc..315b2f869 100644 --- a/doc/source/user_guide/ug_troubleshooting.rst +++ b/doc/source/user_guide/ug_troubleshooting.rst @@ -7,6 +7,16 @@ Troubleshooting Check the FAQ: https://github.com/CICE-Consortium/CICE/wiki +.. _dirsetup: + +Directory Structure +--------------------- + +In November, 2022, the cicedynB directory was renamed to cicedyn. +A soft link was temporarily added to preserve the ability to use +cicedynB as a path when compiling CICE in other build systems. This +soft link will be removed in the future. + .. _setup: Initial setup @@ -221,6 +231,16 @@ be found in the `Icepack documentation `_. +VP dynamics results +---------------------------------------- + +The VP dynamics solver (`kdyn=3`) requires a global sum. This global sum +is computed by default via an efficient implementation that is not bit-for-bit +for different decompositions or pe counts. Bit-for-bit identical results +can be recovered for the VP dynamics solver by setting the namelist +`bfbflag = reprosum` or using the `-s reprosum` option when setting up a case. + + Proliferating subprocess parameterizations ------------------------------------------------------- diff --git a/icepack b/icepack index 18fc1c9b7..8f96707a9 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 18fc1c9b79d81604eafdb1fac4ddd039b78ad390 +Subproject commit 8f96707a90132ca119d81ed84e5a62ca0ff3ed96