From d3f2e6166bfd05e3d0fb36f571f8e500cf09934f Mon Sep 17 00:00:00 2001 From: Denise Worthen Date: Wed, 22 Apr 2020 14:48:08 -0400 Subject: [PATCH] merge latest master (#4) * Isotopes for CICE (#423) Co-authored-by: apcraig Co-authored-by: David Bailey Co-authored-by: Elizabeth Hunke Convergence on ustar for CICE. (#452) (#5) * Add atmiter_conv to CICE * Add documentation * trigger build the docs Co-authored-by: David A. Bailey update icepack submodule Add restart_coszen namelist option updated orbital calculations needed for cesm fixed problems in updated orbital calculations needed for cesm update CICE6 to support coupling with UFS put in changes so that both ufsatm and cesm requirements for potential temperature and density are satisfied Fix threading problem in init_bgc Fix additional OMP problems Fix logging issues for NUOPC Move the forapps directory update icepack submodule update comp_ice.backend with temporary ice_timers fix changes for coldstart running remove cesmcoupled ifdefs removal of many cpp-ifdefs fix compile errors fixes to get cesm working fixed white space issue change Orion to orion in backend remove duplicate print lines from ice_transport_driver add -link_mpi=dbg to debug flags (#8) cice6 compile (#6) * enable debug build. fix to remove errors * fix an error in comp_ice.backend.libcice * change Orion to orion for machine identification changes for consistency w/ current emc-cice5 (#13) Update to emc/develop fork to current CICE consortium Co-authored-by: David A. Bailey Co-authored-by: Tony Craig Co-authored-by: Elizabeth Hunke Co-authored-by: Mariana Vertenstein Co-authored-by: apcraig Co-authored-by: Philippe Blain Fixcommit (#14) Align commit history between emc/develop and cice-consortium/master Update CICE6 for integration to S2S * add wcoss_dell_p3 compiler macro * update to icepack w/ debug fix * replace SITE with MACHINE_ID * update compile scripts Support TACC stampede (#19) --- .gitmodules | 2 +- LICENSE.pdf | Bin 55036 -> 92397 bytes README.md | 5 +- cice.setup | 156 +- cicecore/cicedyn/analysis/ice_diagnostics.F90 | 761 +++-- cicecore/cicedyn/analysis/ice_history_bgc.F90 | 922 +++--- .../cicedyn/analysis/ice_history_shared.F90 | 413 +-- .../cicedyn/dynamics/ice_transport_driver.F90 | 1368 +++++---- cicecore/cicedyn/general/ice_flux.F90 | 375 +-- cicecore/cicedyn/general/ice_flux_bgc.F90 | 68 +- cicecore/cicedyn/general/ice_forcing.F90 | 2609 +++++++---------- cicecore/cicedyn/general/ice_forcing_bgc.F90 | 344 ++- cicecore/cicedyn/general/ice_step_mod.F90 | 632 ++-- .../cicedyn/infrastructure/ice_domain.F90 | 267 +- .../infrastructure/ice_restart_driver.F90 | 311 +- .../io/io_binary/ice_restart.F90 | 222 +- .../io/io_netcdf/ice_restart.F90 | 602 ++-- cicecore/cicedynB/dynamics/ice_dyn_shared.F90 | 998 +++++++ cicecore/cicedynB/general/ice_init.F90 | 2431 +++++++++++++++ cicecore/cicedynB/infrastructure/ice_grid.F90 | 2561 ++++++++++++++++ .../infrastructure/ice_read_write.F90 | 2456 ++++++++++++++++ .../io/io_netcdf/ice_history_write.F90 | 1589 ++++++++++ .../infrastructure/io/io_pio/ice_restart.F90 | 884 ++++++ .../io/io_pio2/ice_history_write.F90 | 1305 +++++++++ .../infrastructure/io/io_pio2/ice_restart.F90 | 882 ++++++ cicecore/drivers/direct/hadgem3/CICE.F90 | 59 +- .../drivers/direct/hadgem3/CICE_InitMod.F90 | 144 +- .../direct/nemo_concepts/CICE_RunMod.F90 | 656 ----- cicecore/drivers/mct/cesm1/CICE_InitMod.F90 | 161 +- cicecore/drivers/mct/cesm1/CICE_RunMod.F90 | 239 +- cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 | 867 +++--- cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 | 202 +- .../drivers/nuopc/cmeps/cice_wrapper_mod.F90 | 84 +- .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 1000 +++---- .../drivers/nuopc/cmeps/ice_import_export.F90 | 1354 ++++----- .../nuopc/cmeps/ice_prescribed_mod.F90 | 581 ++-- cicecore/drivers/nuopc/dmi/CICE.F90 | 58 +- cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 | 171 +- cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 | 218 +- cicecore/drivers/standalone/cice/CICE.F90 | 58 +- .../drivers/standalone/cice/CICE_InitMod.F90 | 172 +- .../drivers/standalone/cice/CICE_RunMod.F90 | 213 +- .../drivers/unittest/opticep/CICE_RunMod.F90 | 234 +- cicecore/shared/ice_init_column.F90 | 569 ++-- cicecore/shared/ice_restart_column.F90 | 340 ++- cicecore/version.txt | 2 +- configuration/scripts/Makefile | 43 +- configuration/scripts/cice.build | 23 +- configuration/scripts/cice.settings | 14 +- .../forapps/ufs/comp_ice.backend.clean | 46 + .../forapps/ufs/comp_ice.backend.libcice | 149 + configuration/scripts/ice_in | 185 +- .../scripts/machines/Macros.cheyenne_intel | 10 +- .../scripts/machines/Macros.gaffney_gnu | 11 +- .../scripts/machines/Macros.izumi_gnu | 11 +- .../scripts/machines/Macros.onyx_gnu | 13 +- .../scripts/machines/Macros.perlmutter_gnu | 29 +- .../scripts/machines/Macros.travisCI_gnu | 4 +- .../scripts/machines/env.badger_intel | 45 + configuration/scripts/options/set_nml.alt04 | 2 +- configuration/scripts/parse_namelist.sh | 18 +- configuration/scripts/parse_settings.sh | 34 +- configuration/scripts/tests/QC/cice.t-test.py | 322 +- configuration/scripts/tests/base_suite.ts | 85 +- configuration/scripts/tests/io_suite.ts | 117 +- configuration/scripts/tests/nothread_suite.ts | 40 +- .../scripts/tests/report_results.csh | 39 +- doc/source/cice_index.rst | 476 ++- doc/source/conf.py | 9 +- doc/source/developer_guide/dg_dynamics.rst | 120 +- doc/source/intro/citing.rst | 2 +- doc/source/science_guide/sg_horiztrans.rst | 2 +- doc/source/science_guide/sg_tracers.rst | 14 +- doc/source/user_guide/ug_case_settings.rst | 383 +-- doc/source/user_guide/ug_implementation.rst | 1007 ++----- doc/source/user_guide/ug_running.rst | 172 +- doc/source/user_guide/ug_testing.rst | 329 +-- 77 files changed, 21372 insertions(+), 11927 deletions(-) create mode 100644 cicecore/cicedynB/dynamics/ice_dyn_shared.F90 create mode 100644 cicecore/cicedynB/general/ice_init.F90 create mode 100644 cicecore/cicedynB/infrastructure/ice_grid.F90 create mode 100644 cicecore/cicedynB/infrastructure/ice_read_write.F90 create mode 100644 cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 create mode 100644 cicecore/cicedynB/infrastructure/io/io_pio/ice_restart.F90 create mode 100644 cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 create mode 100644 cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 delete mode 100644 cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 mode change 100644 => 100755 configuration/scripts/cice.settings create mode 100755 configuration/scripts/forapps/ufs/comp_ice.backend.clean create mode 100755 configuration/scripts/forapps/ufs/comp_ice.backend.libcice create mode 100755 configuration/scripts/machines/env.badger_intel mode change 100644 => 100755 configuration/scripts/tests/base_suite.ts mode change 100644 => 100755 configuration/scripts/tests/io_suite.ts diff --git a/.gitmodules b/.gitmodules index f14869a27..8a773d230 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,3 @@ [submodule "icepack"] path = icepack - url = https://github.com/cice-consortium/icepack + url = https://github.com/NOAA-EMC/Icepack diff --git a/LICENSE.pdf b/LICENSE.pdf index 80ae31d5183808e151adbeca14f21d7b66e4761a..da37344cf0234bb32481f59a2268ef9ae91516cd 100644 GIT binary patch literal 92397 zcmb5VbzB@vw*`v3yEABTXK>fSJ!o)u_uvp524|4q?gT=RKyVKhELd>Y071edIVbm= z`@VbM{k=b!?y|03Rl94iwYs2Fla^)WWaCDm>)XEAJ}kR@`?+rbg$KX^a5l3;5f%op zD_J^Od)NSYpR+Uo?6S5F9+qy;R|iuMOKD4UXA4V!s3?lNhnuCTBZ?1nzoC7~mTbdg zt#QE@2lM#7FDHPY=2Y zG;ID+k@$3ze~x+1Sa%*odowCtLcI4vKyL*9a*zzq^6Qn9XzKZm*g(F>$nZ=>?YF_x z?JJ**htzRK(e#L~kGJMmcjpaw)D%H)KE}=%xlz8naQ9{}n|^ssd4Ep%k?85C&BJn@ zufpjjmrDN0;Bw==eBQ&h&!Y<^)=iD9KCbjb+c%pny&T*iMK^qnzTi#Uq{5vw{+t+R6d0g+vSdvNZDd)X{r1? znlvkhFLGDKjYbp46(dpjB=8)s(H)f!l(UKV)P>%}->lRRg+UljR4iAk<8RNMou;o< zf_**5RJE2{FW-sDZ7Uz1#O_20>>q*fW+jadwcN{4Uxlom%ZW~+bDWnt6;9W` zX48XRQ8oSM%$0oiVjD<*8z5O>38B`M!(bIN$iOM>SdT{n}=WuF_cvdiHU8={WF_K^;gf2<08g5l)s zU4$3Y$V$h9a*J~xpr=0IRqht@GF*!u#RvPP=Dg+G1mj>46QbALjf073dq+)=`=TVd zXsSKKNBhYMtskCRPA&?GViv$h`}LDu+Pvrvg-nq5Oi>ksB)x;Y<~#&G5RiN(Ahd48 z+BdcTHgkEwW=mK zLj!%*Gdo4dLZt7H6yv^Pm9{|Q7Fii_Nb%X*1>ev84(l(!E7cN&(?E$JV2*Uc%=!Y2LU1Xe%idth%-v zg1T2co{R=lP9w~V*;x&Wv1HMIXe) ze3`h;O?j%5*lcSAjT@T+cW^OJjk}Klkb85lF>8nIpFxaPxFlLaWVN{YzE5Io`J{TR z&|{J_n2~7Mofwo)W$fC${U}!#ActDguD%gxga<~?wyr05Ec59=_aoph(TBM%O$~?e zVc#elN^GyA)n=dk#V&eqErJHS~K*zce*5 zR(2sp&gz%aX1RID> z($xb!R4sqoI-{6e8dr~FL_1j3!svx0TSiZ4O^Q0F=5#<-T1ByE+owiuOr4Umc5s6r zk$pfL{lkPL<#v z?qNTQ3utwpi0yAtF)Ak_9t_bjxlrmxD26aak$;E;Q^N9V->HH{6GmZ2!o$oNMowAm zLD0LmXWi~2@PzV~LH*gCo*T7wF6emW%d~Clvs%Rv<>@z9M1njxQWug*0RVL3#E#vs zFbxFSI4?`Fi478`EifO^){fyZ*I3ta*9aw*`{0pRR596MFx`3B1`<*;9XTZ&syUqC z8w!B}`opp0RZ#1_@m@P#iw$>OBquR!Sb7L2ah_uNM^8H9s0!OSd{?TrKx5xjw(5tL zcL_^VXgz|UX*aJ=F{&l~%lXq=$JQ!vI5$!pUysaTqfysqDF|P=zKcV9ii9Lm1mPgi z<_HPVTD%`1LEcrt=!6qhvFD`6SEek{k?iyVS!|Hgc#Sa3iP-iO4TpGWFuR~D4oI$T z!r30)AX@~*fA>dDQty)4prbZi!3>r`E5hta;}rAg{nB_sSK$zF?aFm342@G|eNrS5}oILrMZ4i)35zUy&UW~T`2cQRi zn}PXysjh&&F0M$`fT8j|3B`vfT~i1%25orx@la1^oN`NKz$!vg1Gy1&pWCE>qsxS_ zfj!LxOA-fpN5)S(Corq-tDIgVHLl$_oJ8J}ybgV>#{!lbgMyzP5T+=gNc z6df-Wb@)B@*D2mi){2`OG?+Y-SqN#-B=WIXHT7X3l(dHjt5Cyreo6EKqQi9xq^b-; zVyd_d8dR@V0I|Y)FW*UJ7>w36rf?atU(j@t;4q+By|fgJv@kcp0zQ5`h9`1nDcY`y zo<}%hH9`ch%{B!J=_u|jlPcQx9zhCR7(p?%#I4mzxQ+wEw2Zw`8TH6mNgQf!!Pq4z zJ~`S{X=Nq}93@ia^I+^Aqf_3nLWK^UfJ4PTxYgiPT;Cli5>|!yeWh zv^m?Spevm|q_g;S4f%Aj<~knykcG$koZ~B6qx=HV4(dmI$*m-UcfaDfXv$I3feW-2 z%LP)o3)=+wUNKrEXK3#~^ zfP2HkWr)<^-6RyEMZnM(bAsLGw+K*iL-tg~gL+DaDsyw_h^(XypDH+bT;JJSVKW~E zf1Y^$_0bnAs5S0G0m-Q}eA0bbz#ILk%+HCO7#l74L1zJ*rKK4O0f9?Wrl`W%3UN47 zyZp^GtJ;RoN6w%8i1=Q#NZ598cEc|Gu-#L3mRwpRuz_9e#pPS50fzIaM z{6AOvH*vTR3AF5~{AGu{_*MNuECc=FXUvr%v7*v3kcdS*3HD0S(}!ED6Dc-tId*2* zcC}C05v@8*azXR!N*k*U)?I;KB-k<8Y*r*;V|4XXge-V{k@TlZH_a^e*>?+Ds0yHx zdnLBq^Z4-@gSbicE%?{;ZyXlcKeyfeybP5W%9|hf83)i9)G+RjgH9z9R674bkCl@; z^u}Pe4SK@mOauPYGWk{Kl{=)cYEXraxJ}^ze@t0%;H5S!0P5$Nl*}gQYNSlQmol=|`7K()Gn1l{H|YdOR=9XseOK&E zXl=UCeo?U> zcJ@7d*P;wjNlHT2k4X>Kmg|eJXOnO3d`;>sPdjbjebb{UIrQAAqCC=JX_W12E_cXu)$WVzojiEpg&CgPPf3zc&*SSeVB#=b#uvO| z62Fvkb$H(pgl*2x#{aO4VV@6|WpiicdGFg`@VU}RG%AS6tOP^$eAEKYLk|-p z+KaI-oC6nW#q>d5IxR`kZx>Oif7Ya9Bs36dxS39};v}UoGjQqGd*)KolmRFg1kWq+>3JL94 zu(G$08x6u{eYl)+!4HRVRwIn?*i{gAOQlue7yYJVhrZ3kEu?6|uWwc0#ma*%d47wN zeWlk{uuzK7zwU8_S$L+HSLk1_8Xf6y_*Gv$R(H;LOx;!&E6fexIw1 zL5AIce=?<3n&@ypN<35N8Nr=)bI0GYD&fb~cx`1jokzTzz9Tdk>lR&=F|yFiX~|b_ z>_ATa+~M<9RlpQNuRj5vGQHG-PxDhViP||(_Tf;d7{?zh)K5nFT*lp0qD0=cW&Z_W ztp92}Hn}f^5Vx*4V~0bWTis=KoCvGL4o{baGKE55#Inb1QDGmZh@l@T00rmstIoF9 z`zPe7#K6_h-!$=d=8T6!6k-!6Q8P+m=&C|e&Y722iz?vAR4;xG@=wJU>s4y33K>#N zO*X07He`;9^oRP1?p6B*b9|q93*282i0^^*SHoTYtW8!T6ftOfa~rdg3jTrXUF@|P zgEX0#HWKsx7Thq6*fJzp_b&MnR;A|^j18`J5tLL$B{q)oRmfgbm_};e9KsFqZpO*7 zsUmLOGRM0~4a(s%m4G>Y+#C;Ocz>1flT)CnBy8LH zvaig`w4Q{x4ki=c7jTs+8EB!H^!u$^3ut3`$brzOXCusdmyR8Jp896K1P6Nga@?CYeK zt_)P--pK&^$6l7@&gbUtdFyZxwC6Xek{4ICRD?>XF05|w!kHFtiL-d(9m*HKZxgz9 zZV6jFTGqc<{MJ#v#fs5;#~ADY&0T>u64iX=AB7}Qd;B%GXwk(?HCox>uCF>WQ-XS7 zsT;t^T&A^Qvi4JqmP)S^#nQ>*Z&B{~?w6MLOThbm<>27@Rr>3m>t8ybmamH?fL+bh z`uDG!rIQDM`!CH;!_wW^)6Lw{9RU0*L(197?VbXUXw*nVdT(3g_Q~q?|h!fb%cAepQrxHtlyEEiFC( z=il~eX#oM8e-B1WiwD5T`Ip^VT3i5uU$@Vu{!%gjw)Xd<|CsyNwEWXQyM%zg5`WbZ;%qMS`fwPQK31IMe{TD-u6sz_gG&KAd_qC|C_9GMwH^8eSF;n^DI? zs~cJ_irQQT4>c^*Y##XnOKqS>U0P;)CC(oCw1)R`HT!W;Z*6h6b$-$MG;sa{l-ffm z+$RrR=wO}#UA&XS9zz`5(Y_lPA}nYOUFhx&D=Rp;TWOfthfa~KtaAB*rt1^7V2k!; zmXB)m?q8mQlBxNmXc3@LWafavujvW5VNx}l`iZ%o;6|>}7`Y-jxd;l73#+L&`|2F7 z%XXOChch$iw+cw~CWt$D^5WXnREjzV9*-+az3^OygIZ99* zcv;3{yYvMP(Ofck#3#`gFi=RTeF7bB7_mPDZ1SLdO0BnI;G#ihrP%YCw%dC{R(I!3 znm^1qnNxd}H{SRtOwVd=`SEdajpDWDqsWlo zz4kf%(PW-PT%O*V!%;PRCjrzotv-ajsVhwzs@V}ie0!r>^%MYg0T0!@nsZ4J>F^5N z_YLg@YV6UGgFAG-Jsa~L-+g|l^T@|Yft#$+lY1SZQbMs18W29WrOb9|S*fD**cci{ z#-zpL4c60Uw}9>Va4$Q>Cow;mz(WHpPL!dp4x5_fI`>2_zn{Bzl&FR=P8&)R z)S5fjXo(hXbcHO$MQh;>Juo;)h#qfIctY4XU|3Ah$%46EU{?W9;lc7)u<5XD@-VjI zaE%h83s5K$oD0xiVRkN1vmt(;;Z#C_E(nHjFT2rQP|k&#>?d zB`T8$iZQb!zhIH-Bl${6sPW^$_De-4VS&Yee5NlaVhXD)kWv%5qVXX4fMgVQ_ZIOp zfCu3bIxrkN6K2K~eF3T^c(GoH3`y?O=(@BIehwmG_wt6qdje66)=z{Rq-Rhp5yqcV zki>_OV5ektkx0*_C6jb%p|Pp#lc|egDrB;gFp5dr;^mWFw_p*X3QRMJ2nL8Zqx9l7 z%ydk@m??0~WtwN|C=u=et+2R5euS2o%{7$jzNi-SLIz?(;#vE2OcG5D>N6T(7fg?Y z$S?-GiNBG#ynat)$mPUa3)hM{AJ-my*o(eFf9})4w2F5cirM3SW)?uzg%U&-M6s4g zIq>mC60$y=eJCbXI)K)JRtt*+X&31+9QzZGdPupd2X!QtZ=azVV?)A%LQ%#-E@Xj!1 z%YR`^6{PU9^x-{>dXIRIW=K)JPJ!%3^CyuAkW6-An%Nt(Y_n*yuy4Hm>U1685O6Dw zYK8cI{WM((6^Z0biSP9LX60tVCCeoaayh=@`p0*yM4(dNd`@O%j``&vXa*#rjP0~d| zOrlMa!85>1ob8l7oV}gB%QM()qmTLyPJdf}_uVUww5*dcEi1aFSb>6?+T$|TDbeDU zDv=uV@+MvDB9*2ITq7`>0iQ&xVXLOMq^csjF1vA^`e}ivdVWoQ@khCj20St#`w z_26|Kn-A*mdYAgL?TyR>2P5q^&6_4lS9adjRIsjjeLM~n$~LNBA!t`=pS;GnroN}Y z=R_2V@`>uixLNS;^X^!W=uuQWVzVE)UuRa zl1bE8Z!+p$Ccms}1>cyR+MSk(3W`RH#*5;1d399ooqt7VX98?R!2W`TrO1xY5 zY&5&GmWWdHsTg}(aGM+Q^XsqId@5Oq z!ClX)Ju~5_(s%hdS_oa39%_?an^XK zkqcP(t~tf5&Z@svukSLkX*%5* zkZhAw-{@qWFu-u=cffZzUa@Mbv0Kidel_*oZ1h{qH&wc?;KMb&z?VN(#;(JUJ4>!h z?!DK&ki5UH?0>k_%`($_wfMPq-Kb`J?4t0Zwf*H$myW4{s|luUqD5jeS9xQnLY(8VSdK)2|!Vy_1K9 z2OmCK2N?%RAIqO7Ps&Bel?;AL^qdo#o#!j(O9g(42M;uCx(+%&u78*E$F15|<1T8u zh2y6ZIR7?yOL>}yCcNky6Ii+b;xYfa<+FZ_e!-WoU!q%zfwuli{%iZeGr8mw#Xdi5o^WCkG+&i1XNJ!YIL)9-<$bx2AJz>uXm>Lo(`KowvDF zEO*{NXlGPe*9sU31?C2J9!EaLf1zm4OE*4xZM*;O)Uk8lrRgz!vnHVnK5+NW=$?J` z`a|G$!c#6OY%Idv30s%{)2pu{`+=*EQ-~Fquf)du_#bT^&5t$vuv~Mfn1H;WZaLq6~Bh7{#t?ClH4tXrN zH@_N8-ak^kRZYu}7xNB4ez@?VceL7By9vL%|4+W~8;1TS3D1E34>bM*SD(Qi5 z)ZNko!1)(UH~LfO7tsC#Y5yS=?*AK2_=WsEpFuk(fL+nn!u^>S{9+{<&y?U_i2N5y z|L@O~GW9TZaJK%7vAF+DRsL!5pACL919nYMGmqayN9!4g|7G2;QT$yfud5{iVwP~T zHFZ$d`io3RTe_RO*}8Z*yP(qLf8@~B-e2bRpu9SZA=O#L^05bOY!2F$A~iIvyEtnp!Q(<4~e zA8tB}N*O&|3FL~Bcan1t%a#@ynp!yyA`6ddxI;=^F|sY>ULN+*XAB(pGJpq0imyVQ4+l%K2ZWy9DFMCOHD8Zx^mYXp1wDoI+J!9O#$x{NpBnxs8gc<1=N`{C_)?gQ@i|*8H2MO8#cd ztUw?yfR&qz7r@2w>ejQ^H=J3=fBN*_9x3{PyLH7|MNoq&iFI#KQ8z;zy2%9`OLBTpS%9Q98z1;UrEy% zPi*bDlXa8RzW}rJjreE{H1J(U`f!m^T7349VkS+(8I~_-l!&1M_G_m26cs9m)K@7v zUTzu^D@J;A7JdTG4eUn5<*$aN+=2vV^65vEZ!LFMz3SXA19x*y?T<$NB3U3Prfm2IHYfO@j?gF% zOIeRRMUpX-N8hL83&vsKyfv*x3t7+wQoIN?IphRG4ktP55jd}usjTM)oRzpQ(H&&u zSW9?a^+%G(YPY{P>kqhQ^D~xcg;)7>CSNOA-gX7tF5Eo&(^#C&8i7-R!>Z^34{sNK zmLE>n;k~oBJVw=c`Yv_x?Yn=IYS6;zMa@;<&i&&UoRur|XT>r83&gy5ZE9lI_cq6O z@StyZg~egSA9uKozegDL&ZG>LkgIOCZ|UXb=9mq5yd9<+_FN{k%4Y;`X0c5Y76_=fvxg$QR# zl>9WtDLcHKw3PC_Fj1AveYcM?j-DXAmQ>?>2~;=w?xpyY;|)_a@>RFc^9?S7s;Tw` zUNMS7gjQvA7*d4GWu!#`W>$vk+Ff+He)`&7|IMvtYtyL6(!Pf#w)Fh+QTu7G@#_@n zl=`;CwmPTr38dU(xVNR)8ILy<^(Aj;dn<*EO7$;C=}JZXrxoy~c=)a=dB4Z7jT(O0 zdn7O)CshO=61C$Jmy|!ydgPoC2apcnoy%&6>=tO1H`sadh~kEDS$jDPu1p_r^kFLH zPreg|e17@)`Ull@%IDIhFGWmq39RHPVTkr+%LFG$apfNdM#j*TEjnHs`>TA3K3>MD zVVz4xjKM6nDQuC2v-efBKQD-7Hp@3e6MWk^t zem{e`3>FlW9UP#>X_dn?N`){PX~|$FF9ulqcfc-!AQAoo5${%t1m5(C^#aWHvEr>8 zunN?QOccjzV(Bv6wyE;4T13S@@{M114cble@~A(qPBpbRUZAd?lzMHtOHpAZNMSJw zBdeLPGBqD5HuHQ*Uv5Nwg29$oB={e8@INORIXO9jf41=7vf!Vs?4K?CdH4MP1P2pM%;zVTO}~=Wj#)4l{q0+dpf=^=}pRPnh`& zVE%m)l@q|j^*llUXPAi~MAy>NzH7hf;=8;}fyNG*@|nTaCBovpL_|W!lyJuhk;s*( zB1V8y4%epDTS-qDY~-|U5nj@P=vcrzZja>&;rNqq$k3;EbP4}X`^fn3HCq}oc>A5jEVSYKIH zLlZMdE*0RsytJyVF8qTt2XVol+_TmP5>MzYyaXuM%@Gvq2SCN5_7BJ~IF zbv{vt;FNvqt3a;WP$YpvKjR38M(j9#FG?;eF^erNLX9v%-k}PK+-y$mm1B>#Iap1R z0~QP$O_43N?YE9iMcS@D_;So%bsvh#M5K=G)3=ixlejI?w*4TF#{t*xD#H}2jNUPN z320T+-DCT6Tfbzw#&YoD`5*;xtho+RFes5aUlfs2Q_Plvf83m1@s?S^!b{qD(jlTB z4;Y21ZO}2kqltgaF&gE)c8`BFG_R@AncveL$JO1%0I6+PyduuUoC^0Di*slBc=W_2Va{&+l_z#&01O9%`Gm?^<_o>1Bx%(02h^j zOa;v0fGbGS0yYTH+pshtq|a51KGPL4d!(n6h~H;mTIz#pVV?iVD}-2F2>;NUn&P;_ z2azF}cPYBVf@LbhjQmAFKoV@LgebugsS7kl3QT#J=`qo5kMe0ab+H)K$0*ZVyra*d zba^r@!PO!NH&Sd45rG`WL(>Qw1pF`Pqy&}1aHf$FGQ&^v28WT~6X(2_V`K@wHxWtr zF8`D_CWR5=^B(`*X{g)=#{=a3n≻p z>w4FQJsR@^db*|&-W4s0xoC@yg$vZfWH*Q)slNT%boQdMzO0$WvFSSJnoUXK5VLT% zuGgUakUDvo>s#4m{p!%icb-cUaZ=PvabL`wNb_Wev2R2AOxqzN&7pTRj7t`; zHN+8^ms;@z*Vz^@gr;KUOxQb+cIDx1Uz{e_+29-($@K}E*E9f6hW6x^V2-H|V~NH1Wq3 z4VkRZjc`AsE0C6^+yp_l5HBc1$Foo?4j&iCI{^HLMroBWQd)-yw+sX!69t$1I7{No zHiXBe5|z_fB9fDzg9_8=#(IGD#tmUd=+|ub!GUqwALY-Gt0Lk*u}R>I;;XszMoo1BY_g<#z95x5B!7s55%aIhv zk%OFG!hY;hM6ddp0%0%jfn;HcQy*2=c>*vqIQYMQ1ocVA3@OE(DqBFe(2r?iE=u0X zi}}KBl2L~(L=GcgafMQ_6;b$+9v2wwseFhMRfyW-An#GK84J!DGC|EuVYOynaIA(L zTVXb>)ekPdBCzT`atuMClgzDuwZH@y@gjmS!aE!@0{0tQgBF^JX1~Na?|C)VoC$CY z_#Vx)NaCPBXbFid)EfX7cGuQn1z3)b4qx})a;L8s^-KL=3qwxjje43QJSI*I-9Y)J zhtFn1>jFAgF`Zk?_S5Z(vQ9GonE^0DWSF2w-Id(6L2w#|%@os+U=1#x=q+qq3^Y)I zZ7S?D#^Y7O)GjY(78F}!zGxW!r6NOE02Qk2UV!PFtk89l_>EIkhQ zj2DWQUV%ZXuyLhvI@5O5?@N}T{fN3EMUoXif%ugYdo=>=Z@;HpqUUuAohvt?vCYs5 ztFi?odMrA`<8Sk&rKOMM^W6+A@{MH+6#4k67rX;JS^KsL%%mQ&x?Ou}mSr|^anqNn zDnK%`%@p#IQZunAUl5}gb~A)XMrmxO%^B7)Z84z|61WTS|GGPE7Yfm_Op9hJbDTU-pxOsV8C)m_#vw>WGoy=gjTYJDuecVMu8F{OGb2gYj9 zb%{6{vS}SG!S6U{rezp{g0%D-*(ywreXJ-mm3xEklMyg0jtHsJPRPhK_@k$M6 zkO%D*WtS|F+m(hYx{{`Ep1mvg{O%OB;?Up|gQBf*{4!g6JxE{22(HqTx3WN=_rPvz zj_Cue(3aYU%lglHD0W2lTBEQmuvo2Tj%7dpqZDtxx-V)aw0YDb$Evu+>o84;h<`T}1s!=64wUDf7d+ zH5Gl{@H#a*@7@MyylEhDj{&A1-p0#Bh?weEcw|}RoDm>;5*g%|6%+M3H=MzpA}zlq z#(r0~O5Sg21H=rjA*vIY9Tn(($)A$$rm}`VvEWu0n)ZP6k*}9ldxZR+4l?~ha7j@M z{8Mb9M!jpMg#QrowUK5$Uc08rm5gBfGT7{E5@FbE^qiAbsr)7uFL!RNOk1O^5*)U7 zA=B3S_qCD#S@e88nBW_ zC33TKcIU+3P83WGl!^mhlVM`|Xv+MjHiswIn?S&)1*?bwJ5{p6=A1tO+#FN6dd+rk>?t{6s zfLf0L1@H6WNA74a`PtJw3cLw{PtvE4MiMr?s#=lWAsWk`2t|}@p9|Nmv7E6UX(e)_=+FVl@!f*RlVg+p@r;kAQDkWZ5y9<0=(HM>hyIv&CI+9E)UMmcE6y zFwHc8KBE#z0MZ3pA`$rx1F0sE^gxkgeZ_UG0S5NR#4~#3x&g0u(7;w|wUeE$*EytC zB{Q2e{uC6%zCM#kHE|O(AIACDYnDTAw(|YXE|b)xE(hM(8<{Go?95T+*;Cf2;`L~2k~gyt+JPY@EA(V|)U6Q`AxdP_FT zh9XU={=KwgQgeP1h$^+i5KqeaNl%C)&cD$ym4!x{+3x2)UUR^?_HeayoAQbjt3C>U zVx4wB)@YdLYiK7P7o7iG;@{KqYc;vJxc{v1@5T7f75-lah z7EpgNSuUPm0xB2qf2FemywB|O9|83jef>v31^(j5|G{Q?fX{O6e~G5N&r1Bi*ev(+ zVE!eVa{u)?;2+WScgCNw{&AJRb^HGkO?f#4o|W7G%X8)+-T^7wOM|B`_T}Z4nwv|o z0F0P5Ci;5q0YvAN&k0;oPw);bz4h`ZAF9qHGYSlwOBWHP= zs|$TPj=HrzR#&zL1aN+P6WLw4dK`O$y_|tSVpsk%yw&pSg4T>dt%<*ssF(;4cBqJ&Ie9uHsYl3Vm*U>wI$n`OJs)5hD=Y5AKfX3UH;qNAo!{U6@M)oCqT|!ZCibg^%G@nB8ZXTDXn!dxjAwsl=Fi(up z;?RyT@Yn%ZuQyp5KQ0KgU#vE5${uF|GA~w}%#4&qD04%$9Y_{g%$rh`VIh7%#RV3t zrZ>vukO&96#o77_E6x5=uySyG{x}Tqa&gR@J7b&-i0=^K(BUv)$qk8*QWk)40ZSYd z7Vw&~`mN|8#15rRjAq>N%2W^-pyuNH#rF$aO~d9XqgE)SFCb*Vl*OE0D;!9)1ARU5 zBt2w&(P5y0c+?6FGUYH~#vL;*52RT@vrg%!Pl*p3&vJ-tARgI40}?w-FFr%tM&dF0 z`1suT_;CObb`j0Q+;S#gSqxa=aMuVjO&Oz)R;C0-JK!wnF7!1;o10tVj?zae^FSDY z^uS~X>V@zoWOMW9f3_*A$L^HfH+2tH=7Sh7JdEw|LxLKAM30LtJPhq10@s>;B#gH@ zbeUfALh2R*hIfb{>NBH=lLqg+^H{($bq9&K3 zkeh{$(H(N2bCYlEIK*Mq2T}h_Ay=V^(izXnbOP#(0~9RnvJrqqXKhX{s_O zM9;yhF>9jq%{UJ*!U1JL$#T`XOdVZEv7?_HbQIMU-2By#} z2AEWhi5?@<7+>f~YY+O8RE>e26UtO233~}(2I(X@NN zq#=O)z;p%BY_o<+jco4 zWs9Xv>637yALalT&|0g_6VvX`?qcDjS#Wsf%9@pV0l^ZqupopaUC>?B-DIEHcN!4- z_xy-*b`T0(3JA=w7)6^*8#2N|d!8Cc!Ng{UtoeML9dZY(w9G^o=R0%CY>_erEigf z>ne_zW4dSsWG~>;0#z=&6<*6;@PR*qA0jFrvI3x}JZw!^T$JN#rllbIcm2KJ6%E`Z?^NpzNP;$!zvnO}RU%(7sYHbUE ztr>ry4bRZ3CJTcu;N}8y3J2velF5o28yQm#V{K@LeG4ow3$N9JL02P}#hluV z52jRDf*MApdne^4740;Hhw}o2F}$TlElS9+tH#5ax-P|&Y$3@YkteqaYLr6O#LgvG zID0;_g2MzbuW&#c-cysL2V1L;s~5ltG$a?m`AU#TaOGrMPZ4l|$YvL_XnV_>o}D?# zBB?kEXNw7)O1)yTXV#j;)2i&118rFOiZ~dWRtNl(_E9q3caNLbA`5e$w2>vaIEgle zHu;JQRE&T;+bucl1Lnu!2C>Y=6=YAP#!+)7OM|7=#$76jWkEF2A<|UX6M44^kS*ze zv>KtVrEAoO$#wqQ7j9xyH<)KMx6lEJ58mqrsexg)_GkFFIAl^kQO^WMekR;2CHHr^0nd}e6p-vm8`c9}P;RpA#$!x;qJWlpT5y#pl5zALL{y2{L;z5aF zgTYO^SgV+!ay>K+qmGz^z(tA}#}@K8jt$3%MkbUOn4ySjErgc-NvI(zZ$s|Qb(T+J z^~~G(ta6X@GJytiV}8pk>ufGB7SKTRW|<k6wj^5hTPPDD2*;2KmjHCUs;L@tSJH4!Q-DUN8^|9`z-m%`k z-dw}9dw0FeED2EPg|CiBv)?EE6nz8z__bCh@*0KLRa}kQoNsg#t2*b1mcQ9v@Obn1 zN^81m`lo!s1FG8#E?HnHD|oz|ED%xtBH>i-Zj`e4?bV6ij_r}|j-LMVx?UVK+sLcE zibLJKit*a(Sr0A$%0_095QmS}64%0$4+*|`ZHxGM0_6A<#x3*iYzLDQY&SlFBO;cI zba~SWIFrwx1oEVIsBM6R+DvEZd2xM}^)hZcKUwl#iG#&XzNutxV7cV59>Mv_y}%e8VIXnb<7 zzE*b^`Q7!J9Qc)0gN5-L&%80mU?k2>pjCuX;74)Mz=8X_l5*mcTMIhT#dHUkz_e-y zY}x+Wbv|0uJ{w6$r$XbnV}SIN9-|?{NJ-&W^;t`t5}E7@pM+9}nd#B3_NHRV*o9JL z)qXPPvO>e7LoGDlZG8=O5PUzR)tM01J+?W-ay7|yzBZ(-i@L_huT1ksWExfLTwsT< z6)4m(;V%7YD!dLiZWxi3-2mI8x6Rp;ZTN(k+9; z#msL!G&C6(aSPwvyaVD`2iAw9v+&Jb2DY}^ayj*AR-U*fMcne5E#*cZtPmYyrGls~ z7;CutVCj5#TLI~+NM=-uub(95)SM-_Dg5|mA5ZyyursLtL>;(Aq%#=4- zNz~9Q#p#fs#B`x{!hB8GGGUp5F@1X4On_&>ZfXb$637e)P2EZyuu6zBCL|1S@Fq+y zu!KX1z5qfcXq6;b`Y`uyS`KU_s7(nVKWQASc@Gf>Ne;YhNU{kW5k{jpy-P564+jTc z4m6jzAQ6lf2T?J!P<@Dm34jQy`mNX=qK1TV63p|*o;_F`37Gm|A1uT%06|}{4Hmo- z07p951q)b$cq(fGg@RS6FQFm;{h^0_-P(oh0rvs!C-iGLvEb95=N;$_{gxyEz6)-! zn{eIz%=_&BA> ztiZSR@ZAu&|F{745`b|23A+J*1^gW7B>>)ny#aa!(%y@^<7$uHg1Yh37OcHT)_}YP zapOld_;P@CFYAs~4X_SqHHfl09S##xHR60BAu3ogYmBRR+*2H031?)W`ic z|9|5+1CkmfRs%x=3Y&{Z0~FOuqyUZ`TboNxr6;1kX*r;7pHTxy=H^URRYp$`y_23Lgm%z?Q%Ud>i4vZGFG`q?tvH%V7sAq9o19Y0CKmaEBVQ59!eGCJ@VxU{!M~_@>a(ZkGi{l#96W$mi|%zNSS#;*`ueTn%3qD! zZBt*;444_!dx`?yBg7$$ixh5|r8YA|FWdq}6=mh^;cW5AoFN zAkaghk19dHUE0nOeTd{B(nGeRG;2{GGvLAvZsG zA6Bl^wHCEndhFK~!9Gf((U)s$Xe;Zs8hmxMI#GG+$$My8O5RqcGZ&-FQq%lCb3Hsh zlyf+e;WM8bfZVU2#$$IKxT&1T!#ovU=xtgk1d;CGB;y)G!9>I}8#um~NjrO5Z-nVa zOU*+=F&Wi>B|zjL-Ce$rXq5*PteUp}h?J{16`5SyUT{{DJs!=79ORRf85?V8S80Dz z>ax04qLS*kS-)4!(d90F$83Wip8;kLY+z}qX;(VGyUf^YZfALpY|9>P|=f9KfAXr)M~vKOcd8 zWsDMZLXiM$LN*JD3p!dhBr}q2){5!sc0!tIsmUdfeyVoVHo?#~4&g098BV_geK z*S?Hx#}>yJ?Jm<6jAAbb91Y1l7s)W{GupZps~<~NI5~~309IM2w;FH0MeY40FUdxW z{#YX?cLVUVDu(9mg`=_GO=#)1bzZP)1@c$4Bp0?FWvxjH2D3yI$zCjOiJKJ~pz8f< zp4Jj>TKo{vryigdFrxDvKdrIgY(H=DLrsbx4KOlJ4}SMCgx}~7i>$wH<|*26<{G-H z%YIP;^HeE=y#4U_h)+KM$wGV9@aOx}npbCpF|@3e|J zQ+9yijOuhe9%)>9wo^=}>egF)cD+_q^3@VKTvaKKXvlY@2fxcCpS+xTuGpyZ$d$l6_u-{fvYm!kyPXK2rZcD5`!b1PoWe!<}{lk zc;H$-)|4n$Qdd>k*S<{@?4#YvZqnm6`K6KSrhiM z&Qvf;AO#1pC3(4nH!cSEK|YCwbIZt5oYDwd^(RefCkdVaT8L2^$krIF)Zm^aH}hxl zO}vVB)OtSWRXp>bi_KtKj@|OAzznW*l@ebek=+13{S#2q*G{gM>-UMv>Uap87QeuS zW85;^a#qJ3Ne{pNFbMyO<8xmRQn$)#xl=E5X~gIFP2!P=@7ok4xi$&@4f3XvYC z;>1cS+BUoiUuN<+rL4T$H93Y;EK-OPf>-S1%?;JIBc5hwDetQd1y-`# zjuo(ew&V-jD0NG~N!YZV6$#)Hj2g-1>t{X)6?+McIMckF5LKn+@~KFsh?Y(EpfnZ@ zkJ*hSGDpFTN>DGOD8E~;J>w7?v!|P%B+|kQ8(G-Nz)w{yK(@K0MRC*`dcbgMKx4(h z-6Qpxuy+nuFNBXGA_s_Tdr=##qtSv9k8zk%dSM&5D5y{j2QI6@=&AY0xGK+)N-e6k zJdF#UXGV(Dn#s|_>|BXm8pkWvw=%vl6^$of>`RV`jinD8Z8}JvoUK+>r>1fEnwXZ~ zU6IG}8*auStG)zw%UU?$Xr>o$n@xIhVX&ls`v7avC%qPq=`kcwd1wcX0n71kUC?<1 zsg}54ndO72%(8OG4$4l()AfSm63gYVcs0r|^~Fa$TBpBxrOXVDO5L$lx#2a)BUs6^#iBgfeSWTz13T|&&HqRR_)bkB`f^`2S{sQ3Avvt&#;`z*7Ky!hO&}&C z6dTTw&qw$1GpK6blDM)r7i-}GIsiLm)(1?kU5L`1m1{vD<^v+;aut}>Q=2`s0uYOn zvskpJQ{8KE*k-KvvaO%5)TIil`oK1Q-6KT;f*ka8E2tBAynBV1s>r$?r=8M4y!xn% zTSL^>hBh`57F%kh(pD)^;*TR6RN$m%@&>^4ptBmsQCTf~j!a1LY&5bh%#p8Katt#& z$&3+-R#BF6FztUEKSx2M=jlWkKOe|5PpP`h0BTlWC$B*j+>_~o2 zzMsuHSGECsD;bTvIvEMYo7}_}k2zoy*3V}ptzrwl4TpCTWBcKp_c1ufN=OPxPe=wa ziHMVk7JB<(ji*O2*gYRKlVFX9O-h6IVLWXoTI$KM#dBCk3`AtdWY|VD81Y_<`Zy}M z`)c$i(|NhP6~13coJ3j-1+0vg3Z11_vmPY-!NstMiKBOmNAk>%;pocUV13z{3=JH| zdPI^a9So%IdV)u;p8NJ={jl+#o%A$5r`55D$)+YTjXKEIOdz7dPq32Lt|uZaL+bb6 zHTsO+G?0{J5+WdLh3}7D?IiIC-ZA2{WPDtvd)~%r z{9EsD(t^Lf(!a0&S(!OF37GyK|2NJ*<$q%RQ|F&p|B^iaZ2yhpZtQFG77CDz5KWF6Q^zd@YWh@#vkBHVARw3AK-_ zFEbA}SKE(N~?aR>S+K5-KMlYou1dH8l!=>N+(s1Cj(33LvsTx_)qqAuH}OM?nfqPVA5k zAt-{#5tS21^++ygG1<@l@Q_5|h!Cryu}2a< z1G&=-Qnt{wZ?T!n1^t8lCH6FrqrP>( z&=+Xi{%1Vjm)JaqCD1&6iUR}T|1f#{Yh7n$XZ_az@SmIe-^=rV&mI4>0pR~<%J}bA z04Qx*RyK~mRsaU}zbqBY-;^K!uvC9R#s86|;^6$NJ!Vq^OstN?%K zhW~18XJPoSR)GJ`Pci>@WBY%0{Cn2_-jx5{*v`qx^bZ{Nf5@?X(c$5(viQpHJJsse zG~sGaHacO#^qwJ;m~N6NAVk0(Ujr(eJQ$pjSQ90P#3ouafEoe4(6;__X;0~Pp9BbJ z21={3aVc(Pu&p7a$zoHftVoMm#`oHZJhv%l`uY8>)7S4)4>0woKF902^;mQ2p69sL z?Q;!wJUkl)A%G;U^6Hd$3~=E=(oDbFvXw?HjgLsA+udO|c&4)&=j-u@+LN7jI%RyD z;6Sd}V~+OG-PIrc{M+*0MYd$6pM zTXSuD^1H#eX+J`rdmU)I!){7h50mU0Mz5@v2=$jJhDb*DkXggV^-M%QEJUf!7+KIh z$T`{$c~{8mcw-2n1O;Nk{AXTeL~TgB|R)Qt!m4Td@y+gYMh(E3jbD} z-+)OOk?eV}HY2k*0=_6mtUZz-iFI(;C|d9(ggim`K=evfynw|%YoBqSF~%`omYrCZ zfJRUauHeAL7s6xOorb%JhUia@XS7{7CI1JrU2ds2;6hOfKm13$LipT?dENcP_k_Er z_o4G^(N=^*uc7&N#cQEgx?~T}V|R~&8bMCGPxm%$4fcy$DOtRRcZ5WUpN?(>Zs5gN zaLEuxR667jRKyrUS|QZcQ$_x3M}~e10kcPl?e$jd#m>ydzRYH}|15~J$7l6E)l-j@ z=+$C56|=`y53N}Bi6EdO5HQc$;5=Gkik1DFam0KPJu`l|ZvbIGJwM((T!?lL$rhY@ zjX*^kWHs2~4uK|AVi!l%edRt!nMN8Ke22D*CO^%&LkAHZzI^7G6023DCrVGxVDN>UAh)L zv}z%4@+qlWUKWeRDA0927}sv-rYt)|l!I`fhR})dN&vy0C3@Z#xi_*Q;*lvL-cDvI zc{rpLQlH{M9IL`$iTE%1NrBn8AC-7+)PY3 z@@kZp1QYY0{ZsknkVnOm7m(4v5chN;h7myaM6(1eL^Og3>c|niR`*MNa2n&1*n&kN zsIqQ3rr3#(E<0kDuZ0vSTa8C*0gce^p`HDYWlhERJ;P{(7eWa3mz^AbfI03oFQGDd zV!rByX2PRSH{u(`D_*x)f9{LJeFU`FVZ8$^L^Q$&!qSn7KGB_a^xJddC!z;Jo&W+U zXa^k{JdCY?ExhTxszAu&oiUnGoo!|hZ4z%tIf!LjZ8qLDvuaH6(Xk9_n0`h?LIc_ zJ~Fpo)Vu%we{0;m&9OQ?O%0}rq1lCW?jdunw6`jSlkljwAEmc1s<#gUs2!DXQ;H6d zQ8kJ~YIm36Up2R7L*7(=K%R>}pWO{bvI|8d%`@(YmED4wgkzr&a5QT%Bv15%WA`3@ zXZg&cssR0T=5X#?R}qk()g)4be`~&d063$0z#Jw?s1x{vv&=Be;lr@&^uP0m-(L&A zzLVr09`g#UrQEyCYD#@R4^6*c*{>RHJbvxg$+n#P6h_}|xxuIX5x8^<9d9CR^=YX@ zAAQSt#yoS!X@m;z>@w2~k zv%=dt{hXh7?*gUxN|?g(op@gpmDSJ7>Z5+X5xbj#|24zg z$gEp*Fgz4*Cp-Q7yuD?g;N09=D+9icFWcD_r%=7_D;3&&0d8J#^IN%|g5zX4>SQ^ACtu$x*X&&E`}larDunwR zTY>0}!3q7&-vJ8sj*RsI6zJ4Pe?t2VHvlk-=EF0=3|ufF@LVCwY612^@eAV zJT&j^<2L4zE-bHbJS@f=mH?a9>#bh0>v#Mc*3jK6EWUuS0owic4(o0ET%gJ5yF57tC`MR!GOMZE@Cds%l`YgxSp znhdmCXqTGAf5EVYVjIVJ4Tvxx$_*fSiP+6xKMi$w4Zw~8oV3DdaBbZi1F9@n=4v_( z)%%fQ%VDwI8z1d=7Jq1?Xdtg3t>CSox)bFd!J@n2#ACaH^1(r1`$stQ`qFwQW3VcH zcsP85eG!~9v?(-K%d_Vqll+lSTNrI~{bLnVBf3Hdp+`u_0%g-jBnZlArJu0CMXWhx5yr+Xy*JvnT}81o+A$ zcZUO^2qF@M6QT+U_{3kNeK`nUp#+KN2!7zZg29M-#L3Wyw6W3|uQ<+ok>C-+rC5?n zDzXyR9Ki8~exYR>g7YOr19T*3rSj$5 zd3*RQ?k{2BZmFzN+n_7{Fn7eryo6Q=%y-~E$l&L?`eC^WI?~>Y^4k4C*zwu@{eXCe z)~E|2LdQh4#N!b}8U=X4P-}9xc+3OUI}pYM*$2}JbAAcxt2>YopCKG8!WB}op&QFclrkimqkvJKVQ=;VVn@j)`w#e zxFcdZnV!I;wu!H2W>yf=BR1Lxe)1YYzp|-Zs~(seZ|orvMUD19SiSKD57|1_YtX{Q z>PnF%d>$+}ptjzRkS$`jeIj^u$>J5ddYW9UtmKR`BOs;jN-Za5GVB>T2CG8MtF%YN ziHw*y@|_Bvs~*T&6{H06py6h6xzbzVljYQKcqz^p;5aZv(GWE!f-_(Pl91ya9vVV@ zMo6o3|PgVGpF3{1f;)Vb+>^wnSt|*_<2_n~#>XMG_+q&m^ zw+)k6VE31E7E7c_3WbR z%576}alCMyunCqKTv?y-qO0~{&SjBd;?Nsb)+&XUXH3YJr!S;FarH!>oMAuBl$rd{ zc;iQy>l^R08`Mg%s>D;LI#}bcIUuS-LU=(ldw1ozCO!bYL!!+^)QTnd#69*KBdayT zfv;0vy$-<+zTC51j9J}Zg|F9-hV9c^gy}f%`SpcRf5S*SfFX54)fW2&Nxa?r1W6`{ z6etukLo$KP6_Odu9h?L8Ob#~BlgXqYaMo=cPI`SeS&_{c0$|_AfZo;d(xJ7D}zng2R5AN zxy}d8x1T-{Uje>j@Mt}V#*LK;vc~hPP_DqS+$)MtTwd}NXsP~Da62oI_p_NAE+P&x zsv*Y3XJ(82XKAdAlQNS=m(%ib*_6EV9!JlbiL_(B#9r!8s~0)LXwf!(Fa3pCDh7qx zjALE_L07}hI1mHruXco zaDD}5X^3v1Hn6I9c6g~S#($>&S1_�Gf>u#&@0g0s0y0o0NzAL+VAx#3=nY0`>st z9@H(@t?FNl5B3&1MSU?}t8A^zQ|?kpz2dk+z3f_%KhIa*N2_3|K^UrA>z;b@Hycds zBQnpkZ_mTHzR)L?Zpj!rz+hMa9WGD4F=c8vx@?svPrXIk1RyeY#qV6}*T zfHd)5P)^OTt-K=Jpz@N^lGH&Avbvqiwgp;!3-yOKbcJ0L*9QjY8RjuwaZUd1~2?=t6On1?nd3{Ka!lbwaF%2LabMjU1UCT$sJg~P7#XD$6 zQdA417N@5)(zJk|R|bkV4QTdB`g2r!1a3jwIb(d2oK0u2i<3R|;K59egp!GLPfTUt zHXT6)z)go#ju0H~$dYwlBa6ns&Xg}-9y>YF0)(WH<>1W?#=o>{^ib-4xbG5eyy5SB z^8+v275NdSDdSU|LqtS$9HP~NW|d)oJe%Q2OQK6{lQvg8SNKY&;m7k0_veu8+p9;!7|l5Q&JTK%c2QwbL56Tuhv zcw*zp{PHAohxVfQn`;D|;|qE}C*Ly?CbApo`WH^S>BVx*#E?N%C%U&8Tg$T59s0z& z%r1na>9o19@q7qhrNZzCs4;w;hb5jS=MwbPJIiW;V!$&DLYx)6{j-?^r?gQAX< zf>*6o+70$DVDDw?$JJZTwx8J!Kl4ov=F2q9hdJ06Gw}C!dVtW0gzJbloCG7?NeyW{ zhg4p={VN~PYB5&bFo=b%OT0dr;8Z#cr5V*h_GvuXk-(HyKO_3E_ld@znGX8Ujs?(B z3q}p%QVYK{VZ1jqk~*Lhu4N&iwMb)xwF{e>_v=6tXZC}V z%^5`e;D#o2r6~xOC>TzO>;Srl%6#^&AF})8@jJd^dZ9E=PIucg{C~SUg}*Zp^xWFPtR4Yzg(`1D(Ey@jb7^pb?=WG|Io9`rEf5lb=mF(uVtL6 zsG2eA}W&O9_`GkMZ|r)4`6)8a}1I5&li!#$(h*v9giq3g$pXM;xr} znOeL~e_~mgV5@Z{XF0UTr2M z%?(_38d$o^))r#ghaD_hOKF^E8@Y7=&+bcIcvQbe+hau)ogo4k9sV^BN6TW`mXF}D z`jpG-D=HVaLktVF{54)yEcW(Kjz&jq&y3BP)r4a9olu77&=m7AUfn4)+$+69>GMG4 zAZl=R7hnd_AWO$xvTu9E1aJEb4oa(A*r{`fxA~ZA19N$&TLnJM4*U6V3Z>E)%9WbC@B3S*qcflZ}+z>6B9ydUmGz*f2&GD6s8X^FUaFGsvwBvPZ)&B1|17 z#Q8XV@!7{u@B)o)TRYS@X><@0BkDvm4pUz_Q4l&%+FFAMO;Y6qK z)X-4ASm#rB9$l?5L=k3Kgh7lFio=Y%x){t6nvTQirJlzo7JK(_}grMfuMq0ZysTvw9 zE9K|=zJ>4WbiB?t45%-5yHb0v9I|ZwJU1#;|BrgX07_R^;D}PS4}Tt9Qb1V;y1G82ByCj75u} zQI^+rYFXxGMt3!F|2{l={_Uz{c0s}~E!o9m^N~LPczati#%t_3w>+kgoo%RK@dx~B zKAG#lgNzO96C zcw`}upz}14Fnx=~&2U)om+_lz#xmmsMdlK3qZ37NQ*02QkV#SMt(M+s`-?k}6`zIW zZ_AXg>L4aUV5y2y6^cXBm1UJP$3F_?ra#BtmTrCB-HhN6Y~P`Er>R z*dvdzG0#i+qIDYH*6L+ku(*U~VX z;4QwxKTg9K;8>z~f$vZ}0r~)4=>maXaV*4)w|7HO!Oh|wm;&z>mr8NAcdap{^t|y$ z%?B&Edrx(}(-Vxv#p&z)u0p1Bs75~a8XE*JlH`=D+7P15gZ%D&hOLh1ip`#B}n506gSsS zUn^vxxr3Y*+>0ZiVgKu;IY^!G6f-NzNs`?5Ov<{aHuaeX+OgWdP0mygb!6wNe%{1Z zJfj5+JSfOAk}$NSfSC?ovVyqYJ|_#i%Ayv?JNeHq6p@|ERT)~wJ$C2s%wKx zD$Z#mT|S$N2YQ|w_)+KMmwraTDaA!fP9@~@UP;@K^^m-9j8G=dcJD}7-$U_JEIp*z z)}W>KdC0cCq0u-b;Tf0KL_E~w0oao;6MWv!!*PVJ$+K_*Bqr=#5+Te!o!|;YIst90 zUA9H}@IoztpAhv4@R5E*MSEO(;=W|``<|4AKnrOW8z;|6wQla%Zv=6C(DqYYO#V-l z6z7PQ8QKxhWthEW8&YNwCKu3!^A6i)g--U9YzO4T7>uD{g=mFIi!K9!9YA@Y$KJ8N zV!g7d`Ly^h(a-&?F&*%DnA(Wp9(uCX;%8yD2BfSr=w!mi5>#)(xNifw!UK7__Ekni zuA)9O#0xhUlVP!j{c?JYG__hfqW(T?KVtgfWrYKa5rfSV1CNvOxFUeLBC)+}=q@ri z4CA5=z6XAen~^@5VYs?aj7hx2s#Cv$x)__E3_mclC44HLAhxSgFKQds^XJB#+b&Ks zyujpSPM9UB7@js}*jyiPYu$wE)_te#mrQ}{#_j=Mjq`GM_}z7*8|`w=s@DD?CEw#c ztf1Db&eDDKc#K>NLVj9QXP+5mF7B9lxn0<@^wO@J&q*&z=k-Tq(1IEFu7x`BcX%we z3wMKId_(a9o31bn3yYANs4;(nB$4lDey4P>S}gCkFc--s6%nT|+9ru@d9dtH54|r^ zhCTjcNf0w4U4p1erYkHmVd2X>CtV75sz6T~E5d^)ujcwj>nn|cNQ*XVnPY#@P)Ve3 z0K5YsCTi=i`)zsAfCUs{$X8CzJe9-6&kPYdvBEDFxNAj9E3Y;`vEoHs#JWhr{G_}Z z6FnbLYC)7g4WnXQgRP-=3tNoZD7Lb69Agnoggm$F9bgii;Ui|8Z6PE^M?+PoeWw=1 z-vl5-vuf>C^$zn}OtC+FZ8)DODL}KS; zS1-T$r4lY?Rc@`U8Up%?)l2VP)N`;V#8ykBXI?Q*m#qLn+0a$01~E57R#IQv)#dn= zc(KmnYMQRYU&iFA{UjUPcwbk)y10_ol%U3k?s!8t-jC*5V3-+lxLHsi?^SaSrS7#! zTrAL@Q4W50?2{B!X#F4~90cMX%@(U>gdXVHDpm^ni5nB+9S@pd5xgj2A{3xC33Ydg zH&or_wHkXpS1u1&^XTffcsjbsxXC))E)MF>Z*;z>n>&iDbL+f$Lk@!99u`*(P^485 zW7Wn~Eft|zoKk_rhz0GhGi(=43nM(#!322}5FCd8^3m{C;KJtRrqGtn>zvUdK$MlW zscJ0d((0wU(ro%{UsdOCw_5h_H9uc0FDj}Ux0}}cI=`5%d#JM?4||wH_x*$CErB_2 zx;lKqdr*?wxj6Pa|4-%omv>%ny&HEN7bq?%mLiLF)q%e%+sUA(Q$Vvpt^mczj8aQi zI(mBg>gfwzD5TC6*R{+t1=)3QW2{!?Q`;EHW&US1IpZQ&#bWO9j+};41j^A|=&Vsq zf`LE*9N1KVw$Ey$?;o)OcXmGtN}8I^O|yl=^E2g?`DI!y#)0Fzbz|}q zEDgv9lSHN~Bch7Oz{;d^YiCE}Ng~|MA}^s~XLbw@HL08@K+N##ihyCiNEHld0fdt` zb~isMWNTS|e%UD}X|=@22VdU`BpWCL@(9u)A6LHTYI{Cf?ihsP>PWEn5Jv0`r$3)Y?UgIZkUATSBy}vh8pnFFjXo6LEc|1Xr`r_O=po3 zBC!mdbGWOxoOp=DXnHnUAW0KwEk7>6v|U5t72!d}M+L1Pyvr)S=kH${ZX(i~B(sHu zdld+R|G56zVQeJRxo~Xfp%b);`rZE)-f!u~)TrM;L#F34Ym{LpR6XOb6aI##oQ9U5 zN~Ix_$Mz1hZ91`vEJ?Fzfn-v9f_tKUGJn;Qe?aJA;N@W$dn!px-rg42yT(4T zjMxvKSN0n(j=H0yd%Q8OoTaTJ`et}n?@xfd3aYtqll@qtzJ>+;&9aOktG6Qu>Yu{; z&!*6*`bsY&!)!7glx(W6g8=%b>OUujy9`e1dZWHx;$_qOgY#*-r}+KzUMGK`u?rSD6{Xy3aC=@xwZyJj59TsuuWAKTxq)2|vY7Oz=5 z3q41_l!r5)*l~w_a-wq9p`l8(oy&DVr-Vv2FK13JJj1qhFSB+x%(k!QIkKWxqq4wC z)+jjA|K@$fe(98DF*}jcEcu%dCn#um7pxLiTjl97+LO!hoq39s5w@_?8fAz zsxG1Qd&NxKMB8}X2-ya+Ji&$uuQL3_WZPf<G3TE0jq_wl!j#hwQ3tfC=JJ9e zi$AtDE{}w(Gi*oAAr?)J<)Ae0oF~3?Aj0`UMx8&k`iqctIVpRix**I2aE5Vub{(Cw zb0Hg$-dG(g*-H1$#%<@4-#NcfqpD=%y9IJr{Aq1SeW)U^S4|!xHo9+^NLN6+woG02 z8##Sfucd>U#h-OvDeZ@U*$#lPuo%lg)jF_ozan3&pXIB3Z4I)|I7$vbtaYgQzOJa9 z$nd!0`TAAY@kuyGJ}=6PK0{ALb(Ox)nX2=6)*5eZ#_wiO|5BrN@BO`=oLnj;-i*>U zYa1Eu$D67%bl8_*BD<^iibBc$l?!ZW8=0>3ma~twfVZl+v=mwD$l~TAKn)-8MI$d< zZkc2&eKQt`9|{)=r`n<7K;usH;W`K&@Z2A&xt0jiX-a~YBoX=tgEOOWG+LokUqLdY zfkD{q(rx@!q!F8Lhd8m8m&JJ}}8X5vW3Vd-g0bICQ(si}Il+E<{y;v3|E zhN1Q4WNm1+T9Gc3Vxzi=XgA^@UDTxq{~_$du)9<(L^eSJZ$(3hMqh^4?BL_=#3-Yr zLV+Az88AEfnz6ue#iH&B>+5glx2!dX4^F=fS&KAt{-q}q96w=U^X|zAdKGdscSZ`)` zci!OL(p}=FwpTu*^Pza~ytz{m;%N@Iz)}pV-a0lPfRn&x#JR<1QNR^0}8Cr|m&DL1cBdL%SobONT@bel}XnZ=c*OI4lyV3 zL%nRw9fqe6!^FB!ezWwTiiRqJoG_%7b8J(L&^N3v@L-CQv|R>N=kUJ;u)X8+wRVs zcV@2BwR#(CKaPn!P0v3&>pOj?pifq6gPtA))*|;2_uz3y%w>_Jwo$zmpRot97wkzn zBw5uzNo^j6Qb%Y0N_)3m(lhU08hb)wldOp3`cMp$x(OD|SH#Vi4i6f8&%eYf<#o=e zln!1FuC+(3@p`P01cmR>epG*_%;Qw~0KpDV2oe1cWseVmM?1UZ5%#W>lV{!~19&ujaD6mPrnryTn3v%xU> z?$2j>z8e^6%@SKK!2Qrvu_)>`6g(E53eThP)KQ(QX1*nr ze2BB$Nwo!+u`(4ERk2Z4vmo~79Xu$uK=rP29o>Z%F_7n*J(d>3yeaq zd?{hS;VAQ8**_6AS`68mlmKBK8-**%G8=kllxDBf5Pl@CAh1H&H{z?+rh|CBm9%sN z(z8A{0Fz{^Nf~LR7@S&t&NnHd-dZgz+|O}wmV|f7%x1ptj^H0560Vkpw2V?7cz>EV z0dx%Og+$T`X+iB)SB zOZ$5+N2^Dtb2Rdq-SgfFEd1U5^UpV_I%_|ryDD~|;xz9hv-kG=MaP;SdbI*{^^`+9Nwj~1pJfi8Zwc=_jgZp` zKaYb|nV>;IK{x(QS&3nHFv`U|W#}bSN7|oAY=d_;S`%lIB+u5EDVG5<-`^z-jv8r? z22_tDilhcbCn&{%kaK1gKgIHkRdqmB;kC&%oeJl+Djt3xjwnXybfkM&yJ&Kn-2&_8 z80ny8HP~9&x}8f`-gCmXdFld`A2S8E^RMrb#^T~NFrOm&0qv{-$9n&X3htAG0b7Zb zT(nzE6D5~bpeRE?nWaRC!m1@3GA{ddeK0^Ws{a0%TUq(!I`AQ;t-95Gt2>v@b+sLT z7G2*i$`d)me&s^KSG?mT%FDJ*02M9@^{Y;FVMNH>cl^>2NF!`}Q2Pho%1`pTZ#9yL z1EIQ^`k0M_)zr4;6R+7$QLln)bRUFIr#^-R9n8(-Ho)1|LiP;2vK>k%q7QK@`I7lc zj`#$%)j4M$DtNgE)puEs-k-!z)9|e9Gb&CD;R8o?_OOip^~kM4K-iX4C-PD_Ar_mO zq)@<`YVtKD4C)Y6BC0BF-s&RyEZlM5iS7guVwo?WF2-5)bp?Luwl08H3Qe^US#S;| zgon{wNQJrA__?9T7HSjSley@2as2V}QF;X_PkXWIYRQ?Ly?hz9%IVQ#DQrm;5UmU7 znsq5K_p-|UGLL9R!Vmqzbab_v)JvQCo1~;#$d3UvFrX!RVW5+cP=TqIhExT_h?TXi zFY))L-UyWu&bX_HMew|)PfViB;b@P9u`(-ttW0Kgp(EefF%4C*N45zgbvFG_C-OMe z>w^R>tD5EpEmPo03q^Pb)3iky8PVh4hrO`n1N-zfGvoVl7zI^C?3i%iT;TeFqfDO; zi!Dv{Zx$cerd2#B{RH=q4`qjw(t;jt3!&R;->)O7B84wLJs=ULHAqH`r8;F@YmYQ{ zZyQ_2!N4>NTClT#`c>L7kG2+=b*)xFH<8a!N-|U74!b;F^&VB1Ul%QRIv<6j)Cp z%DfD<;^5K>649jcNEFC+V9LERBr7F_Wdcw39W!(EwHBg)`D))b*GsLsrtd6`L0uJt zQ^*Aq7W~U=SPg6n;2%BtJ8dEh1(oV>Av69 z?V$rD^D~Il?Xu%-LUmWvrP=TJ;~0+CMt{5eI~{b|CTDKf=$%KIzRPg6_jPw|M=S4H z`>U>o_~GDi9G}6OC1yF>?Ugu0ztVx%U3C5OUIewFb=BBUA-&c%c&G?a#w8y}rXeKZ ze$BLf?lJR``{@5FS)q5H7vkEc*;2j}*{2=S+jkA=hI)ybjpPj0OTzRl(=nGTWkD)E z0$L>1nA1wZ3le2_4z%r4u2Cz%e6ix|{!2n-3_|w38QTprospR2K%^^nH^yG7qOen5 zqjY)B!Z4Kx6QauchO=~a*M%W0&^A_cw@5nqk$tQ2iKp7eudV63V^;ao8#grD^=xx! zHcl>pFjlC>T~ZKTKS0{XpO!d34kdJvPM4Drg?bKvc(krLUr8?*-lRb?I-nN zrd4^@2mZwF?*uhP^RMCCkCM{ z6oH7E2iyJIeocXz_YS$3E7+=cTaTPfTVP%UY(*c@OTa*RVBNwc31tsJGT{wnhZ{ln ztIcuV4UDC}aS6VOX!PgFhy$_y425sx{H=vRMRSyl-XF!9l(YH%%e8-98?8gUqVC(r zakuRTt&JNl)L! z|5|EncUN|Bi*@R{&~WY(H?!!mf;)o*G0NgKdvPzLn^f+SF5p@6EA04-=A zA?N~O5mTW*S3~{Vi(s27;w|TIRE;3Dc7t|42e6$hEYPUqAKws4u!)sKTmG^}-;vOB zb0^{=Z(cagm4*9DGBZW3`9#%f!!B*^iV56ulkxgb&q`;p$OoZb`$bR_`xAcv$(wkf zD_1!G7XVE_vcIswia96#l4{3IWfngt&6FjmSW5idFq+~7j5=7+INg59syUH^Y2!_* z()H15bvSG?SudQQeO-!8e0SZ-r?fqw*Y$vR0Hbmz#gq8Pge?Kp8B8f6Mf|~dSu9U$5LFCF1i;$yj%tL&q!^2F912IV zX;o;~M&q%)q=b@^B$VYo=*TiHd zMB?p{Sv5p+Q)bH)#Tkbqdqz%WSCDGETpf*78 zGy5%*sLv}|H1WK0VhkdZe6SVjqUv(051Y!lEC)G{Yf;FR!cN9@A-*e1xv1xo^C~OL z7z>mY+KYe_vaZ)|50q64Woj%I`+CcSfJu?{0$*M7<5j@IudMpO1s_+8a?8Gj+gfV; z1z0oG1s1hBCj)?))M95^vA;|Z--pyUZ__i`Gh6%b`rAP3Elu903z^Y*zB_;2@om}5 ze}3QrJbTlci8h-{fIKj7_XQhXdHmz&vPbTWz|&-TqdyX<4zJGkOsPNpqt8Zu z@Z(D^a+DMVQ@|fC8+;GkqlF$)xQAi`cyfl!r=El}W1sHla1B>y#!kyto~>5sv04Fi zwGAC)8?Uy}fPw)5g@yyz^2dG(03A>|28x;_*bdwXWe5p`3SvO4Luf>3z-;O#lTfH) zGOH-gW27;m*b-iW3E`uUl!zt(llYNW0RMRWckv^9CsXe4o3VSw(HX~Qh_)F!i{;vO zWEt>Q?+*mLu3~?n+Uu(D2U@(YDgFTSy7K%%o7d&?2eC|+`-3%J*JOVXvk(l0TvH}b z*6Z_FMR|E~u}f>U1(+P5#{$$BNCx@>y8}l9#{=p>hWX?oX-4;qBQpfw3_7DF9H?o> zjs>gUIlbq%j`(@fskPk2Lh4_uFet^@Vq7l=Ugg*me-i%V`_*v@6?=ap#NsIpK&k`1 z?y`j7;`lY(8{)i3Huhq4c~e*(E7!WTq!B-kQgLY9_;dc_AK$P%i%CGGf8|10sQx#; zq#&QVfbL-%!I0v~Bx_dIBygm%Wb(|@$+;2dnL(wg^2}4?yb3L>1u%fL{Tq7y${dHX zOHE5p-}xoGqTkCd`LZNj`ju|Ly%Y?CR{;0Cdw zute#XIiK6nmi zL104t;66El6f#L8=M;b=k6pw07APxNH7v}_1OZS z4jHwp3AKtX6`0amHlHnH18keTgWW5S826bDr4CK|wedGqMys9*CTz#6s%|D*t9FvT zbhqiXDy<$G7hx)cSLZR7;@b!bU8Al|cMEy3`c3jlwOOZk>XWpFrR1q{d&@)gLAFOe zzz*c4r@n&MCXUiy;YH~W$%l9i`0sfq4WAetg{i{o>Z)Y*JbEv=!+2NKoz=SBo~GC1QGak!b-fEtuq0qI=0lljBG^KFNuHOi&QB(* z2{l$XsalexsamWzVgnd&XTY4^XF6=t^L&@TUme zG{`N3+%SlA+$@Nd?@R08c%c(43hEN+UpuV>dnBpyro`0pX<}DrR~(M!a$c>Y(Ab#g z%df+y=Z+tc>SW1O#~r?gO?B}1I*tUTBf}Ce9T`Hx2}N0DJ99fw(4LbfA~>RIZE5{v zIAwusb6doryyFMf_-@kqUG)M_jHYm zSzOarq^nbW=docFly;(vtJ!?LT8s4x-gHE;N@l&oDG zSfepikVP4*Qkxw#IUAN@B-&#p8*JO8u zC;HY4mvUt}3>UCXr}xhF7UWlK;LZp^`lR~YdHaWu~6egpNNK#NcR7tyC zh?5eOu$OGFbJRF%l8seuRV(oogN=?&&h^PbvdwWr@?LUx@=@|oa(7j_>gV>C97n2t z3)hsRRipL~93ME3SA9zUY5y!4#y9Hh(-RA6r+qF#b(7F416x><7o_;tax-klCkczW<$!{8S; z6FTSi>`%54*ESJ>`>Z_pUi*a877pq=Lr z0NO4;KG*?!elVY7M9^xrQ0N@iK)%%)05|x)Y>w`Qdx(xQSCQ7L6BI}qtaB|EpOlnj zNsz#J_gMf|WKQkl0uRq9W;tnR3AY7FFw2!tAOFbc@e)>I%jd{Y;P)p3^hf}bydt?P zN*E;hld$YVstPz`J>NgeA+YLzM{q<@EOL;FkbjrUx?CgzmW+;W%^?V}*3mizIW!VB zmq-_1NMbsx(^^6sNu9Ng%(gBf3$1I&D(jWj`|(+pr|5p`%k-br`Z3;Xf$r4_M;F=; zWI9OL*drqzYa_!mj9}e?68YW%prc$|$FsxZiAyORaMnROf5v$f7zXocTY0fMs* zYRenL`Z`?ZXfFB3{`@*7!yQgpoX(ke`RArO6>PLImyR&2E^5$efsOlnb*HKwe3A5ZBD~cLxd~Vsz+?EFZbu^aS(wC_**0Mr3Ysfb#YJ4qKGYGGFZxqi#p0_z6QBM6>?IZv79;?BCEEboY+M~;}Sq_ zvabQdfDbzCFQaf)5Y%wf#J-4nT!rQfh>sgp{XCvf%ZPZS~0E&$nF$J$Oy_SJ{`dE8%t5K&SnDXx7$8vQKCCjo`~Pw25B) z*iD!_tRP|>wCcr&NQd~`9V8W)itp6dBq}yI*1OghUmfeKxU*QZ$?;_9Q0#5j+r@8% z)Xu0>5sTD?>!Ool$%=*1OQL-h0~Pug2`wrvDQ+$Pjq`1n>X8`zS?EpsTcJ0x&-h_T zT`ULPF)hq!T7Q6gT^fH7jK9DiByL|>sXNvfYzxACq$wzcWxarDHE@A0lCWDS7xfjX zie^=C5W${KDyUqMu3)<=j#M125Gu;3B1%$aF{g?sNdsn+Lc`;BHD!f2?JTdz(91{s z+>oYhwVjnC$EmpMJigx+$?dlx8SPZ`o80aicVdIXf+7Q8Tdg=$VlQ@tW04X&Hqt`H zh@#HY6b-vzRT!rq*xF~!+ayaKFty+$G2rn{0u1m=ILJ#{9?!3KdoHbRCA$w<=r!{pO1cX5{5PFwk*#D?TU(ddsfCe#)hIQzSym6nZOZp&gn#sh~FD<5L})<#Z#x zk?tUOX@6pVhv-DJOr}y{fmXO1Z)hBq6WT)f>JblY>s)s0BLhSv7izUeAs%S(+7dQK zY?94qOWI_c$~G%@mNsKD46H>BlF?^mW}_GTVY8zD6K%p9J~7$_3THRWbmn$jV!Bg>Y=V@vCndQc6IiZ&$Bm)A6)xjHl2NPc-yw`(}ur2G_VO!fh)p;bgS>4a(uTg{AA?m^082X+IKy@C3JoCzR(`}1NKnpiSoncZzMl1 zA1gQFn{l*=S!1B)%KD1>O8=1Hw} zMc_t?-WgYXm@AVG&R@zaJ@_zac`%Wpl^#SDfwB|{hLXOj6eVZfVPT)e{wplAT-3xD zYZbR*=$j{p^Xou0#}8mt$8wMC$^BAYQZi$KSN8Eq9AYLn*o4aP?$i4aF3ZZy#Z-$) z)SMpfEv{1b1dCnZItO}-SrPCGkT=r$ebF5Eh!2R{mUD#Y=7`E>gXO^b9%C^SohouyRL&n$d!!J z?8dJ%r^ZJ&Y|ccoSKZiQV0X|*uHCW=uk?_Cu|J3^@TZCFUIovc);r7aY#1qq)!1=j}`M zCAVpB)NeIzHC=Ds8h=E5BqdpmDPy&<#+`CkyKA@;K)LAi_`D@0<#6c1d&gqZne-&R zcnhL>a?MQR%+h)K1;&e{1tkmO@GSE(SIS%Cs-5SU=bTs6S#@#h;_8cQF0NfT!6fMQ zB{sdQB&he*Pbx{)ueGkVZ42F_xhHXN@{z=m*t4ZC#*fsWtj|ARJJE$N*}I;gufmsV z3mx|%kc~C>RpL#OHC~s;bI1*^0=e3GUp|;tgTa(vsVBF=p{&9RHSm27P*E4R@GQ|UH?EMD#JPnBuM))ZaTP_6Gw2=W zIx%UkOCfLpuG1ykYHBK-&Pv8AJ)Zo^iGo{KiH(rI?(3}-^0AhK|DW@H_=@wx*=2#jyt{mm^GOi8tFUcUN{3+_9n zQ#Hb2<+aTcH~s)t+}M%^B;Y{?&hZN}^mY8*j0R7z&8Kf{j5HtdZN*=}mI znjvDg<71X0OpP(QNUKwLu1;aEI)&%z0`@|0+{0IU6F-A^B+nwxZjb0W#A2jy>?2vv z^|C^R7Z>8{Bfh0D*+`$pUikp&pq z#0Sg~z0JE~-eIUTi8Ie~{pP_%vFh2Hh@_g6eQsW;e!>^0&gwFv!DQ>b7>?GI=*Nz$ zh5&&Ix|c)XAW1^mD@{}<@mwRmkdn&t3X7{_>U#AY^(M0z4u+#u!K!FWuqC=Dsws)q zMOk}tz5XimebK|we?-&`rraI^jK5BQpj6pIz&4P>AA~6d>njV#jJi^2#(&+%XBe4x z6}4H(aNW0r>&KE#rn+TfRg9W@WXv(1?}^`+L$wQL2T z{5?8&!r+Hx>})01$++%4z9rFm!KR@JS}dj!e=N_!-#B4vb2u6fnS9vzv_uRg*!9fw zOW|@7(<9<|(7=~gY<+T$0Lmx*oI$wui!NAb?LY+VkUN+WF6OZJ8FMNcH_38Q{M;sH zK6Wv&8b)C6%b8j^O2FQy5cRlkgzy?Yo@$R5Trm3c-@lvmwQv`;>Uklj`~PX|O#tI4 zuXNG+tCy;-Zgp4hdv~>ZSGT%bOG|3mSS7s6HWt{jEgWG5Oju%K*@S)5gw-K~H6a@? ziAiRFK;{m{mNytCO2!bfIPsfH7?Sr0WS&DH;Ka!g9tlXk-}$R$JNLf4cV&0gU)5FB zRo!Pl-#P#4-B;fI?`seUwB)nd#mODBKmXA$zy9!NM*k>!Zo43xt!H-5KC|{mJBL1f z;8ih;m>}`22RZP6CY}|0iDIS|mo+Iq!!w!F0hUk!8~e{g&ZHQS5cK63{7(cdk4RIlDbK@!sg&g^|JLBA%{M8NDa*xyjltp6<<_et z%Vud*-mF~W9Q9lD)G+2j+Gtsaj^e3qbDw{iu+nMCT8mOaF8B)p3(VwH4aunEDN8d& z5)+&$O$kH-Z2?n2YN1DrI<3G|$Vn$^uSc$RKOf~ ztLE4J!~VVgQ+|u)ANNn7Lf7m+EWUG4OMNLt*kp*Lu`uvEV{ibBh!C$pC7p&!)KG~A ze;tlYS%)2CXa9)M1!PKhQb^IJgggzDpEiIxeG4fES@t$y7{7YZ+fWK#+WYD=E*?^( z(=ctsSoBapQWMB)5hXyXfeeUtc{YL9MKQk>*#MSiB+~MrWU6BcGF}P+XQgiW=M~v{ zaW0>oGw0Rt%0*(^C4Ciit=6mkODzt|>TFA@R)p8xFjpBp+p4H`bH1j$y68g+Y(4ULCTHnm5^=uvv4~ zl3cbI90hM7QjE40@*O?aMx?v8+`2?sWnU3pZ`~wql1G(Ij!ltGwe=lWS+`2t>{mvv zjDE7~dh_+x>!s_J8|}9^Zi(C&y)Aa5c3t&u^XKHx#_p@$SKHO`dFdhhgWd;24@JHb z{X*f3)i2b(DL-X<%KlX3o6#p@PsN|8?w9tDu>df0F-X|0w<^ZRpDC<+Uq2 zcG=8*(QS$C$xpSLFPAQtue6y~*;Xf46joKuqtT7k3u~rfX;{A4ZZf0z3npSLP;HA9 zlO0mSKA${{3!Vl2(V8u0w!4jQHtLZjJG0A;JU}RjWDkr{@{s<>&T6tp*%ph*NQ)rx z4@|!-Sivmbh%f3bREyDq$Kj58@(Cm#8y$Vo#^KWw`=fS6JA8V(?yJd?=CIpaf&Y$1 zBe7V*rYJm^8jWE`tQwc)R*KWtsvTAd#f0D2W3>)!c6dGcLIEKa0kb+_k`>9e;A_^$ z0Ir_gr`HK%4QMupTIbL~q1G|kG1XyO+p(o%ykmlIywP#0L+*G>{ zNa@eIU3UySj-v^~6a5Pgi&yVAm=RdI6Jhm4NS%F;yK1FbQvQZ_##bum&U>A=8Qghx zAUF4Le*NnVdu~O6q&gSM_=k8e@$oztLz_@w2nvOm*jFeZfSaI&R>LA0^4JIX0pyy& zxb)d#oDmwLY`3+!bA8O>Aqe&UZS+Gn>kYc*Bmc~g!d1?}O(r&w5S)v>fTJ1z&e_>y z?Q~tbt$u#OSDL%0fKcJF%-l8Y4&TxR?EO%^ugvVfENJk3z2UI8D5{yh?sjHoVmThj zod>4Rb*Jz8@Hx|EAAQyQiQ9uYBDy-|@!p-AJ9FL^kBkIit=M(PY*KvZ)>;t3 zzbxr1!1gRpLlO*_t~@IVrrl;KL=NY)wZFGO;4@v#xl=;LoD27fIT1}g=Aj=btReNK zeso4Cs5iM;OW2m{XEzErq%0nY_BV8=tAXT8Lmkvh`k-l$jFH{RbocebjqaOUCrUTB zKiv9I`f>KS`ef?K)+f`Cw?A2ZF8y5gx!kjT2M1nKUx>b-z0{Z){Hf=s+Mn#F24kM8 zs(D(qOsP<*R?kywo|?8G)swH4mJ0}Q3~Gb5!Q+GG7u(sV+i$JjUD{POFD{KbMpGtR zI-CwH8W>y^S)8|ed=-|dT%LL?^;pH+)Mu^c$e>KPDNyJx_v0tgV%aFu*}0gOA)A8l9|U;seU zMZ>vPx@xsI+SlHys;#Wu$5=b0Ex>5Vt7%oArd2B`w}5gJELdx!ua7W|NU$NvFJC#p z21>v~xR{2Z)5p-$Zi4MaKlC>MZ%}|+|FiB|qOH@mY9_5y0B}eIXz1S#>Vr+q`2N^? zV{rYT8s}M%7?DdOBQwvrkJs#gAOC8}&bcgi?%0Jt@SQLQn|~j^nKAGa{*Xc0bg;^b zt<^;)_;%#$U6tBe+Fl(WJ!^2ub)<1C`s~`?sYWMsMxRIm0l5O z}j-Wo<<*KXIzcEv7(d=wje-h zRWeqK{!(p|>gIpsYeH_&Q=rft=2}Haxy_M6Z@-B{G=UWWd4zx!h!FSe-v+8VBOYLy z8kxa)tN?Izh`&G@m>4frrIrS?ua}|1^^D^8qE4EQGWJODXG}|2`ljnHo_)4I=8xK> z+M9DH+C4q1=aOC7MH4HSKKI9KzH*s(?eK!y@!z+39j?j>_RB`5=i-aR-_2bxy#*!# zv)i)XV9>pseQfRv{dvFEX3AzQYGl)9_62tDp3AUhs>HI(=f2N6dJ2BO>UT5jas@BA ziev@?&z2|2D>5?O)2PJoaR1_)NafWo30)E%u9?fhTY@*`Zpz;u+?BV6En%xD)clga zpw)(J7K;TEvfvlZDc}q;QX!uyWGl7WGN#uqM9li)#HPYXd9-#k{?qsyHSKP6KDxJHViGW+hU=TmY0;!ms6++B2~LF2#3Pdf zC3j-r}Ndw(Q zzEGf5mywl%Z)C$h^ftiYpEMBFjP7ZvYx!DU&kyG(@{{?gyp%sIer11^Owll6E~N>X=34KJ1prIAaj&_;tx~KF^Axekd0@=BxEMZe_Fw4F^#hJHW~e#Jc_Wpv zlMx#o!F;a}!C)d*Ak9XL5Rt!wY*ktjGd$zqu@rK$xS5()O_jJQkpdwf#2lmli}y{w zmSM0`LK}4XKPNzliko=SvleVFZ92|=7Kv=VXyM$ESni@SlG^ab=RS9Sb;y?!mn5od z&tp-hER6T`K&@3ae0+9x?w{seb2cdUZS71e*=)I-xnyn?`^F`eSUHS{`pVNMG0W)F z=&p61>Hf2tOxFfDf^PuGP;tZ`l`2`;Zl^dq{bE_cfq9ikI(Wp9t^gf^?O$-m8-w?B zKmEX6{MfFB#McrsSNM@N8%{xaR>E8?*Sa0L4HtFj@i;Brcnb}uUe*(Iki%iVBg8^{ zZ-^feQnLwZVc9HHffqgsw;5p!5-i86v-IP?Tsj7P=`p^mG&A#BsdQ9*`53|aqWX4w z?6X~>XI&5TXvxOpz&C9N6_cmrxlOpO>mK2L`~7um+!N?m2POy1w%BURYU@&MY3u5K zePCByR-BS1w60{Ul$G|C^;Nx#`&XW~(SC*fZrfeTU3S;{z?}gxIk06w9GAOr06zmN=7IL%wbrqJKNao$R!=&-m^$xRRA*kwAy=ZT&4Q&Z+51Fc= zJ5c|0JD7@|xVBJVDB`gvsI^6V9pBqw(=y#{wpV7_S^Ica=tAXjcQ?NHkMIjvN4lP4 zR|pwI((#EvaAlL($?R0ttY=SU#mOwos&r8HIdL&6mHjYMlMQqPxcmBzkXdF{M(~;Iw@v2e1I6M!9^mQhs@TK`d?Y_V)GkiB=nCDnz58R;^XE*6j@>mg3%+ z;PJYWEiu-bUSMg&gg&`jWA$#kC#J@jvlWYe>%tf*7~BX9(`NLe4Sjt00+PWheGCr1KEhT4s3HcY6%si>23Hui-+Zw>1{Q;dEsH% zwr`u6`btr8*_m1ugkrJ~^v$yVo?au{VD$&*yG~Jc85%h7x)rKq@$+bn8|Oe`AU`24 z|JzK@d0TEt6o2^c#&rYPoLJ3et9$p{e8Ga4M+v%AhkxP3Cp!Asm&`=FD-eHjV%n9#~Uj!9g2AR zLv(RCVtD{HX&LtZS=sSS0&}mu2N*CSX%-*a)NtrY2rTd>*9sVX#LEWwfFuP5?*Yez z;~kDr5Hs4^yktAOeJH9IM|{X+W^MM=C;~rJDKPL_2{Z)1k8?Uu~2)YnJ5Lrh;BGEtH&Ih;)D}DHH5791%BbS97!BrtTW@vP*`FY3a z<0#H_lbU1yXgz2>ApIt3w&WJKjP+=_>rB_1?=jtLe%$meSz0c!e%Y6A8T2N6OF}`1 zV2%a^1gFm2Nk`H$Wf8|MlW-qQmUkSO#5R<1I8>lZCt9Xj%#&E`Z9xMfN)gvEoM}1U zBDFw%99md!8P9%qm65swYf;RQ*l!lawERwY4!DC2-VKT;7Ybp`WS4T9DWS255{e1o zklhiJv6VEZG#0i;(H+1V)p%PGVjUhgM9O~$5^JOdM@OUk{ep-hyWsRT#9N}g#L6~L zuOL%KB@WbuhUdwA*@C+t{@Z{5#{J(Ker$tF3&q-;%-h~|O=I&{ zzq++vFNp6S`ThU#-WMnP`%MS_enmu0Ps|o(|F6!jm%hFC`6yD$!ZOg=5Oh>ZY$Yn& zC!3iOdC@g7@)u=?T+>(sS(i;3PfZ|2im#0qKcoPTd3oB4I4=f2I7oUc-ho+-QG|T7 zG&XSbBqye0)Ysq{53hQ>wynKeNYgJIY}sfLW8U@Vb%^h+mo`N=#iT1N*IOoq$<#C) ztmE1n!mlj0Ud&;&Av6-(k{%C@$F2|UjD6PgfOpD06?z=07OF+>8iPo$=h z{+-7Ho=UZ(J`sN%slo3BvNj2i_bM`{w9@-L4Lqbh^`oSNt##~nh>ohtai-(ID@+M{ zG11x^f{lQuqVUwUkvk*eaD?rNFm#4P7mrg`%toeKjkbao>*ChMN5sD~^2StwJGOJj z?2fS$J2>eflX~*x4%lcrcAoGw*-%**zbt-j-1J}^Qh&!N_@}S05A(Aj*~?3Dcno1$ zP(u{;I)%WWMOEj4#*n{ffX0l%f9MD3a@kVwLjg)aD>$2EiXWC^K&m#05!~8LN>St* zjgJIQm>81_kmKQNl8g|RrlIVscmDlbj7{JB&mH9j3Aa6+UbOYR3%|bW(hGXK*~bq2 z8?(OtDs%2$ldIpKU`e7iEtxJPM<(t_5mb<+Qpq`WuBg^^16qj6^K++ zzVb?jCTR*eoax`C)qe7kO|N8qb*EpV*|>M4ajIaX`e9 zc3j3D;8HPwk}m5A?$AeT4J8&>iXWD5ScwlqK~ISU&}(C3 z#|Ef|f8aHe$47)J!kNpLcUQ^!IUfx`#;doQZ?$~ZJXw9ZI#ZSO>SR?Es)06tX~Z%j zuP=Q`!US_ntM)3(l?}>Q%ulpET9sz1r%Ixx30mq|@CH&#`qG8kTI~|;6UsL2W^IqK zM|(;-BE8sV&&l4rW6+cEF7e0nfx%cJz9fm$?B=qce{0DyE0>cdds48c9O(IUh3@f> z2POkg2TVys#l^ro#bGP%b$_AKP3uF;>(<4U#dkEp5^GM*?i@qCD*eI#puS)va8)jG zQN2oS__*MWbMC`eh2wOPts)RLzFAv*?k;M{;(X6n`fVS4~mhr`7L znt@X|W}CL0rEuO%G)vS?1?I2-nvf_Gw^YB{*&DQ^>u$=(N=+-^dy6NBzVyZi|8~<_ zh}}r3g}K{ZsX(;dK6k2OU3gh_)6&g*w{5;+*?Awn@B&-D=E<*e>HFbpUtb<`r+0jx zy}D$gvG&TBe()be`Kuv?*P;854<%sJ=TLq@4nWfaU4ddVVAZf{)`0qJy1)n=B?@R% zgGtv;&u|G$1Kmy7R!nLe&AKJ3N=B15yiI4*-~dT#w8B083O@sb9~>gCF?ZPQAT$!% zklNIN32W3A6Zyjss2)Q_**w+9{gY^4xmPgJEfBVz--PjrMm2SYNL|Hr*?T3l@-JT8p;UNKq1S_~#;tPubJ79l^h;8;A=EhkCw&?b80b@Scp z+A7^qYuL8Qdr4pmdMa*?NX%rjO6VSG@vpS*5g(- z@xM%7&$u)$PvGfx*}fya>ANHF(q4pZZ!`OZ8X9Ea3A{k+ zZt>sY-{XHB86|)8zf(66z;t~)+q2I|@8~O#{K0qpu_z-NkbVR}U9{l}uaq|&b`S**#s<=17CD(ff%n~?XSEX*oD<@B3{T|`%=vhJxlsjrSnaH>wtX*%U( zPP!8tzDwug#NwvS1ovglENX4woKVRPdi(*kgx?5^44&Kx1wwwpc0c?CxHi_n& z+-&)rP!b2vkqX&%t`FOFvhUk@EFp^AwA0FDXOYRIB$)@l(QkAQF}Bk~dzw&JE2%$J zmFkL|$tEzbBFaDxv*TcvZ* znIx*mi`ERTBo_%>YaL-$;#|f(0e%~=>UPe>d_QjM=iDQ3I@`+zQ61-usm#L)%H75QrUN^!+SK2j2zCu6 zlevl>d)Ksj_VLNqawbz)a;bRniiNxW^|~ePeTjPN8n3tGidW8G0hMsx+%nVu3D07I zuu>Q`%@Y0UpYa5Sznpuh$ApF&o5kzfu3sk#ZPtqQqWd-TK=0bk+k3CeO>Ewc&Xacr z?+Wd%e|FKGOLwojXYCh*Ukp98_OSVgWjZ(=`hNHKSIulbzWI&Kr#43;n!ih}`+Aa_ zElqid!PX^nAwlg8(JgSW*^v-eLDN? ztSNh#eeJ-e(j?qU9ICg_v7S8uhJHI`BA18G;cJ|d!m)a2YKRR{pWGpI=NlL*6Uz+^ z`!hm>lV4TmSuOZR#4->EmhX${J}J9Nb= zg)d|m0&uzJUrjaW*94XgVU4M#CxMjNUfW&UQ!~{~SL54_x z-y`r?&+FcYkQqV`hhdXAY(gh*)r2Zwe5K)dH(%qbSii?388kRV&uF^p;?2(@ZJ~XZ<0zHP^qaTbxhf* z((zDD&`Q%kNxhyDp_hO;gs}*q?(|QxuV=Am=X~Y~l4`zThS8P~u%Vk*ZR}r~smJ2M z5JL#0v!kn{yTfE1%&pB;vTeBy+4V6NTYxTstLkes0Ne*O;XKPgOc-uo6B8~ft=HI+ z(6SgC$#0CY4IAVAQ5+s!Agt~f(%7n@dXFwH)}R9xnHR>`1=R~)CA} zRS@hJnXQ%CfXu2gOUle8GmlBO>ODhcAsSwMSbF`yws3gJx{!~o(uHLgC6tU18}Hf} zQIBo81Qr?pBVaqi?tCtRAaB}0F%4@v-E5L-;>==enYd`jWuM>J*%Jz7&$nHc>`10B5%0bJ=C5r$f9K76E% zN%8+sj&ymFSSpw5uIEvNV0Jjfkrcm1sy#&4Xs5&w?Zx1I9sLf#!O14x2)oK=bn@*Wf5f~Ggj<9O=P7drt{ zBA*yuG2CM^66Sq`P2CV4){#12;uG`TxF}uHomaSVpm2jg;YNefj2!Y^%~)cnua~9x zktx1A#g9zkX-@H2JO+Qj_hqFg{s`GF4BGp8!KrjV+@xrZ;X#)OECeSS(MF1ngH@t> zwNG!WD}Cee23%QJZnAHx&%C#9rtf&4sbpotedB!-v`g<}nj9)7fI&j@_}2DfB0tos z6cg%DI#o=d9NVc^()E00uscy-!nAykz@HM9pWCe};ZVjlrLesUb14(b9_1&B8Tgmy zb>In8nM$&KxP82RqTM{%KGiPnZD-VIYNq{oySaV5_i^~f2>ruMCVLrnHLd4!;y^OG z8%8{mzI<*v_#!b2AQjPE%o2_P>=u#YWIQw@k33XQeSNq$jNex=Bg~_~h%ig8Y{BHF zP{_V*Zf-fClpJNF#JO%V5sZL17}V(61KuR?BAK=l_mBOldb6(+?L?H4t=ayW!3!p$ zUZ+yi=N9?(PQ{d5QtP;S$lq8t*MDBx7jh*d{;HFCEDy|Hdh^l^AJd1sqLvdU>dY;2iK2d>Jo01^9Tp=uC>+bg=PB+6wR?Q^)!(Oy=oN#Lo^s1GzNOOx7RwNQxNe{t4JRzAQaOodFk=q*U@XwI)Z*@ z=@!mCD24))v~cdBCo{MQgZDZ2&>qe`p-?7$4#)7}G2HuW#|Dmp_&Aku#tBbl*m!0l zGnIKXb1GxeGQ$~Br$vU8d1q%gU-$JJYv8l^HfFq`wO$EFy1`D~q1KjS!UHDChX=Jp zYKbH4@J``T8fZJ=kh~sc3iSgGvJ3VtuG30)4b)AabU0eVEt!yBYT(9^h3o0>o(i$y z5E~Cogr+cM#Ho-av@gByA2{>SZNu`0?Txiza>FIXqZtt;Ua?R70{{iIBn9P<2v)h{ zkdB*>+^G8{mhd)>>4C^e;)rv)CVS6NKUvg-h%l(X*tTFnTie0~w}(3h=N2!n01}ai z#0pO4vphg=ENp9AFqfLuHZ(x)k%c4d<6kUmVOM4XG~@=t-T~U~HT~%Ji_0-p$>lV^l<%wRuZ`|ek$A` zH4Z!o1V4@=-{cv^N>QehrKVb$MLHP?q4|_kQ*yru&VXitTr-?=PIpIKjbuagSXGAq z58A${OxdR#kGLLoKjL{fxu>yTQ5xY!WQ)4Ry(PI#-R|C=d_=UplQ@|aCvAV@e9`ox z>uvFE*Gc#9Jo12hAT*Hd(*_#LTsxKPTyj-xQ?;y?t2PivS0%qX!Y)$RYv#1Nk!^In zss2&5taPtPe%JP0<^NVJL0dpg#*@jV;`uJC-R<(WL>zHf!kM(LGmRjZF{-Y2ulHKR z=nI@kt`p~RT-6>(BSd|fYEtsR)d;s@YK2=s=883(cR29TO?}6MUnzY1Z*onC!70wG z82p*@YNgWXYw{{I9Z#r{hC2o!!<8JL>IXV*M0GLIjmA`JI2lQVD^OnfRzC>2~C1EYpm|+ahTn>KlL~g;i$XC{2WNW!QQw)-AP>*C{l!o(!AnBXbBDC7l!HSq05PI z(Djzr%W?2D^;@8UfCEBJE;q{O_!8JL_2^Ma8imN+xpN04Om;RQm_Vl(RrD&@#s@n& zS@1M~PHDt-5MIH>Azi44x06V6H(bUF5|_4+>7026t#wJ3%#*AE)U-2IGv)EUITWIQ z=v&2_jRZ{j0=D*V3+G9?@V+hZ)rk>E#<iH&ZPSe6-`` zB_4;v7e@|2tStE1+<)AXsydZShWWk0E_TJ-@zK5z%Vga4VCq8C`FoZ}-D#3h=RyCv zAfx<74T_O5JR$_4e7N@YIjl@-Mxwa1VO-i^9ub&qq!uyw4YyV?Fcd5XuW(U=!7mQ* z%(dlth!q(x2~+B!|C#qp&YG$eUMqEyQQEXe$;N~IslIrQ1)BGA9qRKD(^1M=5Ts^| z%7{+!Obp$Wp%O75vB%TahDvleF!_;TP#Q+$0MEXVG&(dDoC%)7tQ7d+_YW-VrnTPR zSkTRa`&zd437RSxlc37E9FfcBF!Jl>L9vr05WmcE?=_P)8sTV&sO1z+q zdU^XqY`?7Yy0P9ET^QSNwdm|#Wawx~*XVWgYqnVEU-3*J`K?IOo(e%NFJumU)N@;D_#Zppel6S&nE;_|qa^Ij^%F73vP`kDYX)JwMu+nsdtcX4f3tQ)p)MoaP6aMoA5n-z;zQJ5Yu4r2mWqv z?QU6_SG_-9xNu7VW2NckL1@3u1}0Ck&C@+^hU8D z3sQFZhK=;YWXlf4E?LbMY56h~V+WPdU@(F<-KmNYeZp)!Xi zzwhV!`|s`Ep1q$vtiAS{_i&!dq$1wO-*zni?Ay3;L%L49;rgY^*s=CWXC{VSzFVEL zXuU|^;_H?A^i4Cft7?@a&vq5;eo^leu{qa6xgpW4l^QRR!OgRA?_PFmoRs<=@d*!4 z=6Ubf9b!{HxH>TNu0g$~e)X;sLs!mn?Pt!3Nwk){a%0(wordf8H2rwdADdFOM9QyA zgtb^~KV#R~N~*u8`_pR|$r+`O-`iMx-^G>j{k-DCrDMD1SgxOx-CA~bQ{&L18BX!9 zSoi0iT`JMnSN%%UhSk2?I^4%1io5!Z^Mi&B>u-)=6?dt>Qr+6=8{G1w*!OI}gJU~? zz`t_(EMw?%vtbVkVQCzanfb`}@q(b^XJSVxg^urr6WhSf^!}#($`SL`r5sZt&!#tR z<}@D=rR?h;{_$jWMX4tvfO=UoWv#?ENfXVlcU4M7JSQkhK1qpp-{!G=u-@(VysGRN z+2;ojxE0MZnVmlB4P(%O$6XHECYB1MinAlOb&b0EO{vL9{+mkR#0zY{W6rvj3L5w5 zhSR%y?QLJrGMYBSO}Fioz9vsZm&Yr<-K=y!vQSc^Vgt{ia@U(=1y|AWHn|@!CEN&b zTsyu`Z+`0Q*y^c?ER%0d4YPNyF=gl;dlX|C`kr!e{g{_MdEyMKQ}I>HE>tahST%Ue z_7U&7gLSnBrH>dCFrn3?@>@K6E$!yiwcFhe?KxuM7n7X5*z3G|9jD5z}GbV_;98yVpm;Lx{beYMI zt=_|WzlY0BoLxclx!y!}Gt-~@`g)UffZrWOi6x&h6iP1*qb!!~WQrIJnb7x4Ts&Og zG3i12=)D^#n^k_y-WmSNBvR^KE;;=9V=8^G(ke z_`VvnS=+Qbv;X_}#TKV`ee8W2_CA0XG4ew7(s3u#JJA7%x>yEaHwi7H>)*Qd?2B|aZJnll{ zqWC)-FHf?18mGKmcYbvv1y0kp8u$ zeHJY7F_G^cro3A7;q}GyU(N=QLi_0~mxxJ)sgtKPoeT2FKeO&--<(~{(?e9g#<^C> zOrl%esm?xU`_g}Hs{PtmA7sRJni6F<&e&{AlbkPKaBs>%x=v2|-FM`Tv+Uh(9g&oj zQ#?OZ{Sn2sNO$pS!_M&+w~Ux8yQWkJEIyM!GWl9~|o2j+`;EN+09;BJH8O`*qG!Hdk=_Zpirs#zpQ@lD; zUv_Hi3$aCW7x%rpA+q5m{r;}EZow(_krruc%428SEooeK;Yn2Rx24}^d@*Y&ZN7G> zq-Ad1(p96F!EqseZOpb&JJPSz#LH{cl)l%_T`m>=t#F4$ZtcuyrOdJPtMeJ{`hOVOj|s!9xBV)ntJ4unD#Nx@*y5ZH&WJR4EuU7aZ2Y*Hu>|d z!)|Z8=B+H;e#QBnmZ+KPu7&1%f`{jR*Zr`xNWG}4t|NWAZS})!^SC7rr(U1_>~W%e z`5oneM~Xf%%lB-RcuimQdd&>`)$9q6XK^SWYEO>!4tlcDG3$EpW7GTdElUi?hg%g_ zTRfbU{Z7fe#N))h?KQ(!#?>xxh@G}!e9Ga4(t{rvPP(r4?0k2@)cSyD6;=k?n{KXj zevm$?qt^LmF;5}c^(nKJ(N?T}?!9wQ&QgZ+s`kj1oCLqpww9!Xh8Lklx6VhI+HQy_ zd@`qakGfp$ru5Jn?IP``7vF5Kkp1>WZgRBNsU15$PZ+ztdsKTX9EBZ>v%0{L=Cz(3 zpEOp<{@!A(M<+!lRr^a*$g?MlukmV5Olo+yAkb>~YR9V+`xVEV8gw~}&E0%E?cLnq z)+fE)y>j70Y!B@a9k+eGbMxGN(O-sqoWlw=y0m+8tJ>~w$xHUAr<9+q_X%BjT5}o6 zRRdKgwyef=e`Cg`@>GrzmOB1gbXX-WAHcOclN_}9@*F0zmAJ9H-)y{j9 z+0nDz{%-x_-OSyJPg)wQwBJN7m#IYdb`j_M#GXF_kJd6MTcJdko~F7<~u~^a)yb# zDtc<>p)@wy`SgsBgS&@@*}iz=GcEi1rRbyI=X%bP+I;a&_TF!%A7l!66Ycjs_#8b* z$6#%YaqflgoJPr~X`hTQUVFB@t2q6lM_%Cx#tA#}76a-@^2IlAruK&gaGn+1w=Hw= zZF#FDv3ka(+^%YQZ4I}I@{;{J(|$;8rxiL&dYz~ocR+@eG-%T0{FH>F8Zy*R3qR)2T5eRoD%QPXl^^3H zml%+C%6-?SsJnr&Mujf*hm=MZGd_3@N;>^o$vpJyaJg?Q+G467JdobGkDlCh$=lxK z^Tk!UQ`)j_=Q-=T$Hz_E#$jg2w{(R)&xtA9RvdU{c7&GX(?;d2#2L*p!OPND z8}{i}X3kSSt`Z+=#xo1C=W@5CO)GXJ+fQ- zprucn?u(fh?pM8gL4GyOKIgm0k{+k4Q^TdY#+`Z@8eLEM#`_)}8&^7m+*L9%rbJ#Y zXkE1GI#yMSb~y89wSw7{Rf<=83u3}Usqdsk>qZRA^tIL!owGoy@}9~{DVZe_n(yY+ zk`S@&35PAlp-eR~xObJ>Z%Ded*Hn5td!IKPlgsan$yKxlX}u%Bym> z2QEehMZBz`GPWxD`jn$bmFLbs_u`yGY@oRJm_=KyPgNYTmApUlTKMyuy!tJ7zrX$7 zF?Q4uJLCF!r)VLYzns!;lD{YayDVyI_pzAA#N(=?$7{UJ$hYvk)V%Lb`TVR6T-P7^a{IRx7nsLQ zBFnG4)_mN5`o=}lx<~ytd$wE>yE>x9jTIl|;cN0p-+Aoj5G94>t`Y_>)2-%ZHj!=1 zqs7{`HR?}hZ&I*{G#?dzc~n&58`tP3qYXZ+Y- z>5UI(gx`+;P#aZ$skvba-7RjN&+@E6k$W<;tfx=8bYfj?x5JJQj|)a&{@ZG&TllP7 z>QQQ&KEK8H>*Ld-mj+h1uKn5@+*Xp^dwI9@tnQ)(DZag58#JEOPd%IMZkFQknZ3~B z*7;A*9~iug+T`T=&Gp%tcL$>y0J;) zoHTU~#Sf3Kb<)3^WU@E^>Z4a)7j6z4U9K0+cplTG^jSN7>PCgerp3Cw@s*_Q>pQZh z&$zv$Lelle@kno_+y0*dG)G7s@JXPKV$D!%*jM7;x9aj}%e?VBBN|j5osg2MQ$NX) zpL5)Pf$Vo}^6E%$Z(}2|sO>K~@7p#??YQ7`d9=9Z6o(gH%x~1H2 zQ?_xvJOjsNNvz)AKP+gO2bq!lZD$SU3i^U(|nTA`OwktHLJfY%QCLh=<@I$xzyU-+x;F_&p3a?#P{otnGP3`+>PeoYWo*N@ zPtA!pa*8juoiEn5qpx-ExTvwWul0`6)hpR4!9HgfmQf|otTO5dynSHA$%u!ahE7xU z)!gz!BwW68!}CKsCKY&Xa;quzO!6$#m|V7Tq9%1)Ot{I{_2b7ToXjd&KREEprT2m4 zumsUrT4~{jE2VENeYc0^5|^@1Sz_IGw(jLw_Tv{N1c^^#dvl_ zW6Nrr*tIFALg{C{Ey%EK7BupUS_B(v98jn_GxM77*cu7 z-V8={ho-)m+o;02rB>`)9WMu)9eDKCclw@y)h865iEcgkVZHGqj*aH`skYNa!rw_n zI`w?>*&lo(=k_adPEXF}w$C8pcq44x@yVz41=I02TXhXlMl=otb)ux3> zgo-o-CM+*Dy0TPRHm^=%>bdzw(|2w+tDLv{MCvx<&7QA6Uy7V~V&^Hjv*VP@)WoJ_ zyRSPi=SbNJyX5*s>Z5LPALKqTX_@{pff8`Xr}%yF_*J7bMl!VyRNwQ-Y8_IT`tZAE zTcKw2kx|9cXF|5rm5j|iS*y6`8!I96vtCNc9HUH4aUQwW!l+4!Die{k)FrwxHvQ1& zo8!bUxNQ{I6(b$LbNzt9*dG^riuFv!Z(pxIZ*98K_BB4kB}Q|}q~=D==pr@IQaSTc zCzPTc8*fYc89cmQCaxjRem*+lsIie>zR{2bhSQ@4ouriubB080RN6R5t!U!V(D3ld z;*ZH5kFR_$TC{PdsupvV$fhUBWqY;dswel&jZ#vszjLqX{<{U&UhP-CDK}CgvBUh{ zvLD28$sFMriL$dRN!D&<~d5dvbT$+^D8ifAkXb+V;gX>FuHKeg$77 zJ5=wMRsG=Z6=p) zQJZWkqd!?Pv@Z1cmrWs?LbE%o8&me47}8nvbYdCHeN}$kX_@;n4Q~Ep*WC5C(W_4z zVzsd&`dGHrNHgW#6E`dfn6d1-zry;JSGB*l=*z$T#^jnD2>QfK_OZ;ddoLL~bcZ2x zMUs!s!u6x8mD_JA+aJhup?r5xljz>0|H5gEn%+h+_L9q;?`2m{Zr{DKv%`X#vbWg! z^2pK=5lfbg-+gv=KT@%AZMhNCG|2y$ z(dYvI4z-&S36ry%!oEw#zEgZVG$eZZOm~Y*uJw14MIK9D9-~}yd+UfAcEnD*jEKQ| zeB?F3CSHikrc@@*8`mc{I$Tffgv+I!VdW+|sn4u>p1o5viE2LMp~q{O*EHz*yuRQu z%^vfm1GA5;9J>3H%=adGR!!Qjgktk|qnPix`=7M7I^6ntt8i2Fj~UXU@ia-p!*h0? z9#L^~OR*An1H*x&)aZI>{RhL%M-E=%D2}e!HrPRg!BVma+M8{*`MIZK(xUe!D_)Oj z5gk?cY1@!hQ!l=(X4$QB4(PacbwzC-WzLtbuxI)sNI?p_XW#mPQ|8!Rq;=jOP<->dZ+uN=(Nb5h@iTiRk+(e0=c*Q^rM zzbx}CwLCkkcWagF9o1^q4VDUPlV41vW`uKi%4yYJ$rq}{8YUO4Bh1;2eX3ql5;Lu5 zl@zIL_j*D;+8XF?rQPbHCcE|i6N#x~lvCy}qoh3XpYKrNX46c4==lC(y6-WMLQnm< zZVf&c_V`wCpAIsJ-nu2LRI>YA;I=Dp&9`%$@5Q(o74=TEI^T_h+wQ;2wp}&_K6N{8 z;?wilHOE@kpSBwl8a&r@&{9y;~jJ^DAq?KAg z-+~6w5CxgGlOdbR(+`{NcAPVBRMNgmD_Ujh*|+4;vNi99rabe%)9t16A67wGEfgE>E^Rbn(2iA*9g3^Ii_P|6mRDx1N#CH;c@+;}8gzd#p~8fl@a zfswhDk-CeYbC}9Nd(J?6M`x0S70Hf7A*)kp>P*t&V8=jS1Zk0>v4Nj2&ozuU(1}hK zc0$lUiz&ilLrp4~!lqED914xhq)?WU$ur^qpDn4vmOqPVf+7bJSBRVto|YC#!`hSQ z?MkBI^ejmlivs;zf}LFhNi@VC3JicWJikB^ftv8$)6dt?k>^UHBOoX=^K-#>G2#0H z*YHr7KoE(J_&@_ay+sQ(f0iOjmLwe=AwFS&u5Myv5}hta9{59IG8i-l$&JJ>W3Z`I zpbq{a`3lO&3l*kC;tKO zZ`x5Q490&0k3!=xe}_jQ)7kXjabZBiKg#HgKWW7v1KA0T2I$A8{tf+TjNiv4bI9aB zaAq?pf8fIAu>WYsp)x?B{xUAigZVoy6f%?fJI)j``;WN+UFd(nqyITy3Y|g&>HO=s zbUN*iG3ZS8@ALy6p+5YjFO|xq|Bf@2!eRX`SJ007M>{I}54tc|9M13U7;G}-ciDxq zKWN1O3-CM6Q1*wMLK*va8h{>w1`W)O7wG8e?HVYCmcYt00#uq#(y;XNUI!gd7(qLZE2u1pq%?&{{uaddTLGn^e+R2{K@Z$jdt`UmmA z5)#VKVAGg%F(oA<3u7@H4D9b{Ev>(`8+?-_FhR#~@ckRb#R3JiYmgRjoaJT!YMALoNM$9DNsAnOSQXx&}D| zX&rre*bZn(P(j1U*V)g-)7M?Y)CH*L$qQGTr(qQgpV$Uuaagj3wciq7PiO?94mu9> zA+SLpzo3FR3s70;>g&$)AkkHs7=vxWR_!ddF*@MDP4Vp?;80t(lXEJb@Ls6$NX-o=pHVLBy z4GLL<%2A^*H5qJ8I)^0qqo5-OIjuk!H%&`JW5ICnrmf(?#z)gG|++Qq`b^T2Qzz&oNBRH)uK^h!76)BGA*H2RcbYxQOt z{sRN~2B{NsRCo6C(Fk+&*Py7AHU6%X|1a46r4v*@C)(e3a&aCQ$UivHo0yM_vxcj; zs}D#<5cH$`Z9m|Hnb|zDR|(%(8j$mQu4E*OtWRwL8le`_*FlMehwi>65? z^MU-*@^=^l9sRGM{Mz;xIKYB|z)ph@Ek4J!utFG3>ioMs)zbJ&hhIi;an^M63-od1 z={WlPdwV)NqMTuCEsdX5e{Bzn@9p{vGRPzpOg(SjUrU7*-kzukn%<7S?%E1rYA&vD zdBK~fpz|MNeJu@y>FxPfko?lWjznPkJ5tDZGc!#?Kj&bSNK-?d;9yS|O{OE0?Z$R; zQlq#zG1RCGy0aRaJT62g`A0F$u>0weK$k;KMcb1@G$X>?(xoLUyB3m~@sJ z)0xRsV{+&;H75quMUCU?L?*krF&SV(gm8c9D}c+N)h~)pOqyQ`W&|vl#()X?D?Rw) z@c;h#bHe}cVGlt3-yj2|_-``*BiDaJrGYm8Bi8^a{+rDI$o1b)X`s#j$TdKU|0eT4 za{V_{8ff#M$o0!22$pEt3ZbyX!F8};jU-qR6RTgrBFos*o97y+VeAd@v!N@l&NKw; zE-IBp`?PlcHBW z#*NvPsW7GIz0$VLT8Cy&`O%tk`Q*lL5tok4ZHQ#5a`Mu1ABl%J^ovSExmnUET6yX`R*JTPo(Hum10C zL*KqxGB@v{Tyxvy*xk{I#y=iTFpaucWGLEJe@RA$-aD*xnEYmm{exuv6K3XoE*4o- z5umM~D;>m{Xh@w)y7zhRu#h7MdiutlTX*MFthua6$;KHwUQaX3%8aQ!cC>{yVa|i( zsDdq*%*8Kuy=&rXcmJSo-?oXaDd}vMp*w8wAt%a&3qQhBXj*s5N$bPcii?_;lcVGO zpKPg9IvciSqFdV6tqccBdgx`TvCpSJd+D({UTxm0rST>cPBl`Oxi`*P*Oj_ipE1+L zyO^B2N^^Yl-8+lh%Iz$r_fOVc`$+f9%j5yw&?->>=-BoqvbE@fX zJ)45#FIJ1x#2Iitk#{?v|5EgVc8^O5P0(Lm%=M5Vdu#4k)!bJy`B_xx_zP2&zRl^K zoLEI3$>`Dj_^#i!W9bcr zKD#-ub^4dPYE&lfi)6QiP2QAhn#mRLlvxbx5bp%ku?uK92>o4 z<~S|Y)1-YQmgz+IzPK{qkMi(hnZ~Edb^rw zH8Q=VP@T>07*HWt-gTpUWeC6I` zC(ms98qWyzeYM(Z`aQcM>5a0E>&2_q&zZBW@K~Zqf0yWo8*@s}=e^iA+C|sve9G$m z50dYG8`5Te_`b)ej2@E1?om9Ag(D<-TPg$Pj=WvuZTtMQYIF1GQzAvS7dNV|FV_@z z3(lT&O-)oq^tE}B_aphsM~_L4Fs@5YQ>|EUJFKF_FI@BS-dS-!a{DGrcbyGzJ?lqN zo+8h_EN63K&zhh?)Zs5;dDxt&7~>#Kb6@|w;uS9->THRPwOlDLlo z#)llfvA=}f=n}WrZS!!q*(;^emzv4>uk@ie+|U_Qa^psBqwYhyu)60GXY^m4QS%N- zACw*SDA51dJ@~i39!;1syY9ua&)vPEAva&e7dg54%TA18N&hfiGcxO5`rVrPFC*SJ zUf-Q-!gT#I^nkTr%<5tb(#EgXyT7!LSb0ZPTfg{Vi*&)JhbA|R!+M80RV!|mXB`YQ zFyF9lZ`Vtksu8R2(xz8$>DJwwZ`Oa@_eEpy-I3FjM5grGA5~Z2-7;PLz}zhLfxn%@ zb4}IFb$O%qJgo4!=GED)eBSp^UIlYj&$3ZpQYPQExg7gaHbAfCy6k8179AhWd1b?m zoLyFgjGDJieek(Pd*vfV^Ny5Cy<1Y=JkEP!@?V|OGA7g9=nXM}6IT950@ojBt?fYSy9!y%E>lStR>@~Hm8BuXJ zgEmFir#?{9U6GYO_2!PAkGu5_?OS`ZMa`fwW~anZ|A!;EBAsJTm#I2QCS6M~JLK$j zXY^yUq{YK~%n$FJ`C;*$X?-hgdFfRWSGpfmzRaQyUYSr`m@NG*-F;ABfsW3WVb1po zlIt6z>7&gKo_=|H-u%3%@JR*Z2I-B?pJyCAU*o01*-1k#B_3KhQmrFaf|Q+oyS7Hh z@GfumaJ`Q)J$J(C8+b2^zIX1DeR}2K-pWzZk+bd&sat&|BwnkTs%E@k&w`JqlE*61 z?W9ypUQl+Fx?geLR>>$kQ)Nuo=vX4IZ70$`KKO8AvP%gk`{Y;)@@pyI@j+9*wSHT^ zZlSf)T&<~+U$xdAI+i0LLRUXDC0oRM)idKA!OurTXz2`9zI1zTti#|Qi~HF#RxipD zNw7K~VYpJA;dn`6<&dvw$Brot+MuF%v$M-EAMT;4$cbZdx=Z<`a#Jwt!J z)dTHeB8u~iBWvy|aC5dTOL09>pkwthPwHs9iGFqaA>M?`Y1!68&NWT1a?Kgyzw6Z= zgLNa*hKzgO7my~Yd3c-R5=-@|WMhr%K4&+F4LcWP7f}`{cH6x*JNbRd>8Z4`g**xU z4=ODtF2l>MCJ)kkSR%G2CDm6mCTROj@eSd;4JVDB4okl}eT2pf*3HJu!5yScZ#lw^P z_AYsS#c12i#A~5LHA~YX#T660%+^fGChc&EnymNq=C+*5wsh&IOnW)mbs6ye*+q5V z-?uEG4_=km8)uQS%&XNV#QyQ|g^S$3JwCW@URPa%l-C_o--Xxqt#7`+s$kNA=~eC? zO}YoA8}9Z)2htQ57N&auF!deaiQ{l>U0Sz@3i(buZ9c$=Y3Y3s;}eaW#&^# z+>-W>oc_8)d}5BeO|wB+!6&_2V=I^Q!Z+CJ9wM)Orq(}s<)EhZ^XC0cn1mbK7$!Nu z9xm)xbFm4&6WiDS9zW3-zlKlCc3fUnP8Q$XxkgN`-XK5ev;%p2yxy31m8Z4WIjg2Q z=EpV3e(bhNzk7KIv-fp>g~s&--|r4Ppm)KlP|i&%(my(AS(U;kb`R;;l=PBsc{luK zCPllPacJFcC9_$1&#>m^;Mx()TJDbadlN|;e z*0CMfP=c)}Y%&)PXyAwl+fZl@Sj}M4iH=x=0H`cB7fxcRY@#!jLxM9;C?mmfD73)V zG`a&FPFpxs3W-Ifa^b=Q9CtFAkZ>HJ0M4K&BnE@Yh0}XD^TZN5A1fLhV+tF`l;gR`yKoIkY zs4#^#To#=K=lfW~g5g-i%vfxK5I9Mo5*N-q;g}qouo(m~5pARjn@xfoR&apoFmnzC zh`=wP5hS6rfmS#wqzpQR3s(d<{3!#A9gsvgD+(D7$H;&POdNi*3uk5w0#7`Zr6B2% zRd8DY?r0Fe;UwCDP6pP|pgoO`8ifY9A-fsC6QU0KzzHr3$U~u%9XQBpxKBX=CLnDn zbl^778&8sPTo#1}^(-id8Npu)gUpu(APb!XxKU^%xVk{pQGsYwD5p`7YJ65B6_BT-x2FaxWFDNVl{&31oLBivFSquju4g}sTXb03nSa{k@VGHnO^YKMo5H8Ud zB?IRQbD(hee5L>oD98j56$*zh(I~})Jb=JZIP3wL1Q=X6TmcU8Wt0jx8mMHL7tDc5 z23f$fQ^1i5nt}YL!U-l7Xh76c>4dx?Z*VM>36wv?8##{sgm_|#Z4g%~Xa^P4gU~YI z4;6SqgL*3Pg$jLHsBv@#L3<$#f~LYTV9q!O$_}JlDuV%9O2CGcMaU=i!#2OBOjsV{ zyAa1;Q=9|w4s*wpCg}Ss{z6;`wFdeK(_iO40Ea5j(O>BB>v)J81C zI+q5pX;cQhV+wo%*<-=Y8XAiRa)8zY+Mp~$Uk=I;$Q})3j%W`u2f7IYMP^XpIF!Yu z!QBoT8)ksbhwPNxh=xeyi$ zapy4jl>U{&Ad!EORFEVAOg>#;Zhwa>nsGW4tPJ$WG2jBtPxC9F2U-Y}I3W%Q7y1$J zzoc9`aE{Iv_!u@F;XoN!Q{)@cpUom*gLFVS%o8jH@{$gBXXtP@h-kxM@ht@Klns2O z!#r?YHdtd8_%AA0GJHpT;0_O$fxL$MOeo0Es26 z0!kbDHfYZP>q69lbzy;(!108z2rWSkfi{Q(@&L!Bqgw^bgykq-SjPZ)!g(`jKv$4M zI%q$^Pw+l?zYF^kat0c~g!@@P;UO(RTc8Z*!-Br3sd$$Pa0UMhb7b-@GJ{ET0ILUO zFg6qB37W-VG96f8(Fk5a$iYBd!0HooVu4QwtB-XAO#ZyVg(7Y+e;5aDG4b271>O6rd06jzE+^ z1Y9E-;u?fR*d(MT%z{p6GEx?>1*kL*gUf_6g2Ql!5d4_~N)#Qa$plX0y+Wj}FvWg! zu=7~Pgs}Q&9gWLGilco5sxYY_=^znI922Yv9qbVcd@voTO~8OzBQ98ulxBivGJ(2; zouyMj(oiE{eAHCHM>r1NUZXK6TqZw}$a7gh7Z${$ zXp>khrUMmp1oBW0P%(zY_Bc1x1#APl0JZ3mK+=g44#R;-V{=)cRV)ZM`DGC2p{2$c z4&og)Sa6Ir*x-TSIv~-8O$N&dHv-_x-gty3l;Zrclu0FQG#hLP8)Stj1080AJw}?a zIba8&9b7^JStO_k5g5oKMrTwGmjkompg#h6;E*9oU{BOTbO5%OXafNO2W>l+gZ6P?mi%&vN(2<- zfM#-V#RL@OP{Ga%%OPIipuS)^4WiS5a)@xiR`biDFUS=QEC>g*9sL_pp9A^<*9##E z$1OcE8q6}%-3 zcRDfNg7pE643JUk2!8+doy!4hz#$|PV6cF?(3i@j!72yf01N^S#85xW0ST}oOynBK zEC=NT%8ea6kf(lX#<*1NAiECI_rI2R$^l=U^S+33$PcGPLe|)<6bq z7@vOx=Fl2eSGc4i18kHM$gogB2q6`U355*u01v)eKnB+uU{L_ie@Jo7hFwq&fI^Ug zB^4PYfym={Xq)*pNNJE+tO2&egD>ZhL3BWuh(cr(NC3YNupSnKc*heG2vorYKn9x- z*B>E))eRY!k~jp+2_GP~pfZpFR48aDh&n3EfkI`#G9Sbc7A9Z?h&Thff_-JsVP%F( zND8j|5DG5)a4ARuNrH!9dT@am-hg%31Y#&*T!&r?;r;)SqPIo3|F_SRLO};l^aBZjHz6*-9(WVt0^XiN14GJ($p$|u@D)&s zKp+A@Zvh$L?9VEM0p^rn1=~_Tt56zfU{M8Cus9m%1p)z85P}f^VTWX(3bvVW`v~kH zR>89M*D64R4z^WL1u*Df1<`Fm2J3Jk3Aj)&l7a^W5@<7Vy#fzJgm}Z2VATY52nC#* zpbln32gAg#Lvuz47W3=CG{GBi6x|!-5DYLG{TO732O3d+CD8%{5G+S7(}BzAo*)BL z0+#;b=U^WU*$eAXh_4WRaD_q_23ur^G%%H41-l5pRDq~tbS$U>GbwP0P>OJXrTi*3 zY60I@LMhAve}^Nef&k=a6|o-&T1Q|CDhpN`aE71!U=I7CHw4cH8Q4bqYbXZOK;MX( ziH;01!~?tpbb>eNj>|+r9Y93)E2x8Uaak*(nY8m105f+E(aG18ElCwWK?_nsTG9*@`I5FWMDxn)FI432@Hpd0_$O6 z90IcgI{^<|!+{*)a+rc^S;A5=Kzsj)Mi>waQE)lS9~4;*3tB-3h=>Qm7Ff|@mB4^7 zKye4EaQ}8p|A^!u@HJxtc7JPgav>%nGInQ{{9Lcgw17A$qp2ZyjT!; zFu@_ggNRd@xb2Bsu1vJ+xR6Ca5b450pr=eE024S$aFvPn7#GQWRb-;YX97zJRfJy` z{H}aNn2!M;hH*H58Wl%F_A!A^1Vz9B4agv_L_sux>nJw=5Uu@NIShbYffX>Hv#=ol z833YdMwWx=CYGxF6uO{=W`c!A{|Omf;qs#)Hmr+;&Cy;E`vz!3VFv~F!9l)YJxsV+ z$Ut-lV^hJBp%WDPUj9NHmsbM6j9364;MZ?_-_7^IT-Z_v+OuH6h9XPBH8ElSF$M-~ z5DbnkOmRO6!QdK_Fbw<{3&5Zlu)u2L{wrjF+$@lBoDpPTek{TH4`etnQIJMKn1Y+# zRKibzfFqxY2L&uBZtTG}Kky125~2rxumsk07>Wp}h3U^=;`g0CAwm8rCH9L2GyTiZ z1Ej>Y5$f!(5Plu|_xKX=|4(rvvK!k7(_i5d0s=Ed9ti1=|Okiw=BM>kN6W|a*Jh@c@u)bR&~HwGcEpu*S? zIE*WG+<}J-Ed9V#A(vsIP>miN7w?DwWZ<|D#1&uoqoXxtgL3k#K{=6p5avNo^wMl_ zS6BiWA_&D|?15nyQH%~5cLezT!6DNz76I^RN^mO@ez^;sP|etQ96(G2BPljSQgk$` zY}hNo9WuxejrdVNyn(EO>)_7T@SI`|9fd{HI44GiN!2z5P76T8_22f#h0y3~egt!9xsptj3cteN^&5-dN zK#L$1ZY;nXdI=yE2iOdz9jJ{FEeBbPI|=-jfDQ%qg&+}bCcqnC+9AUR5(D)x?f`W0 zaEuCK4nSc*0v5zAG>m3o?*JYUBk_Skj6}!@2U$k!3lM_>1E4kZ#c&q40?>6sh9f9O zCy1+melf&1{MO*|h-EC_(ZhNli=hOt!8N|17!Jbt#XuSSEgQd>A}oe&9XRv@p8-Qc zhK~tkFy=r*$1mn%0;hsN5HdI}elZ@j60m`+RQSoKpcqa{_{BtDv^o5YuoyIfN(R#) zC>COZp#>RM8vJ5nOC6^J^n`~H69_>0u+dYH(E$;#;pkLQjF{jVHBpSG)C7XCri0B? zAtoFlCLqvIEW`v(JNT_3Ql{c20s#az5%|Th)quqUOu&f?iXk4P!p?!9HEy2)Ex?XL z0@8_57BU|)c$fn!CNO|(K!;E>a4v*q|6-VlbtoQi0F{U}0EE&Q?-NIa5Z|MfLxX{v zfecU!CYTHqEI5*d8Bt+40(B}}MwP2$*i$yp7{fc51bQD$l*l?j9P4m)!c-Uw{OT34UO0wu zLYNBs3E>Q+zu0v=#UjpFP@J*+w{!p}*Qq(A9&OgEV zr*cAtfq}yGpW5O0=raB(MMk3L{+|95y#KZw=Y{kWrbti3^`BCL4uH$QrT;V!VflZH z-|y{yJ*tLF8(_6CIK)Uh|F&(P8&%s90J?GJKP7sSRBJQJTHa}cxYTt z@&~~+B`z9S{0!I+kW(V+gAB+j#-(_I1sSl3OtfKW|CwMiseH4GjUe`b2e1<&ILP1w z_j}OKLJm8kLB{t1%642Z!jT5jacfTI^vc; z70xm7fyY9K8fX}mP-XBbkb(UO28Rn7924UZ+?j_QR-z)Khh^dh923+QSHqAY>hSP@ zsKb$AF$V`Vn8D5v=Mdy$LO26&fIlAoFk#gW55O3#C1i+x2!ZH_4|KN37+mDR14v8Y zh-LtOAuGTIi|`T!F#-$8&O)-I6tIx&u!!SR0Z%sJ4OxkekkBl^gCIK#Cy7fmKG|`N z2ES|(48cM|vmibprpv<64{s9?(`CT|4~}VZDM@4yGRE#GEqG!84a2@x<2=c+4Z@)K}Vn}yzzYpWS9;R^MlZWjBy~2f{&s=kcxf} z1(7)S!~;)4Ch)|Qjg}i-AcPXc*(==VfW<6~fPR_Zi2~sST>XI(AS$lM(YineZixu= zk2re~>NY4Ww1sUF!Z5-46THA|aCHXLfVCMukas{7Sex;i3G5gaVmG|lgo*?z2i-wo z;32TWu+HM&MMKo7xR&5!2~2}GpdYRSaG?do&=)tlap8n*@m`J4V1oGJo(fI{zE}e4 z36=v_Q}96XMqn%!u|UM&g+P{w7;(u5-GG`zqye}>S0e7hPuyVqg>nqR0)RtC;Xy8; z5qOdd+p<`UqW~~m|3C&t0r_O$Ul4xyNE4(WWB{3f1ZE8};O|a@K!9!#garx5U0w#n zgeVroZUh{A!<7agHXhyLH7&@%9RM2Ky5peo;Gzn4v@ipr2XnxUX=48Zj1L{`1n%17 zb|P#FLmdbl=pLL(z+ZUB9Z)O>k~84i0YqFl0L&V63o{TILl{g0Va0oqG!z5)Q9RV3 zWd>9s12e}HT;i@D9N{|9Kr&%l3mW4(7DW|F1`v@@cyi0fSR6nL+Pxbp*ZVq#Q?W*=|ok&#q* z@Jb~7B{{k>xG;y6fF~Izhsy~v>`9{DQ>bA2Kn9?muVt7al6y_^2Au9yT?g0J7i&fe+XdLVOw-a6oV%LGS>a(CUzJu|ftt zB(^_sNdRx)82NPw6ig_;4xAOd!90jMm@2#h$53g=XiUi%=JV?i3JMax4xwOFK-8f~ zkx^TTIv_E;32{N)!^NVY4xxb46!Zfqc=k-xfgc*!4Tn+yf=fr-4TsFnn-TB=Fo*>u zzY1Xh|MA!eGT4pS10jlGY`g=F$3KuE8h|W;^@O*7y`F#)ObDak`T|}b#h!vi4rG3w z-M|Zu1*(o-2QmX^H-roQPZwU$Tl`Xwms4>RkQ3a+h73*#rG!vaw9s%CAgseFq21xv z;mBYV34CEE9^V8^fh}`cn2FE!qy-;2n)|Ruo-qC#)T~j z+#2KW8A1ls3x|Z!;OG&cfQ^9%7#TzhH_PC`#{x1SI)DaN0b0-)JQh4~9}OFWQ4s{w z1lD+pDQF5)!saMN#Fhi{5w;!RqzCm1e)vE|DWVelLcmnG>J4Xk*oJTAfu7I=>@7Ud zIAdG5+yOgsSPb04jR?XWz-9zEc?|ps?qD?#mbk`_rk@J;`T$R`BlrTb1O_ZH;{o45 z%opu5)IkISTMIZtv^>Ck$iS~JfWn9yM4VWS^Z}W{=?iWF5>5@h7S0U=FmOpwNi+l= z5F~@$RSYNut1YMofq@6m3jzol_JRla3g9th09Q1D7`8zM$O4vxUkBy@-az;Hbr5g~ z&vDT0BX}GS4<&@FA)#0iVKnejfCAADn8gKQK#=IRk+ATG*pn9CfIx7_Q(-FHNDypR zKzE`S-`3N>*5kG@-r+`Qhy%(Yro!n91x5%^mvhkHv-^+UkrnU zAJV~B(1gFm0^c?8+ailV7uP`e1evn%7eXrV`8IdYAYNd&GV$py75I)Ae94IT4i$;~ z3lMZYf*&a9;n&T0uvj3o@K;Ch^)+A;RKstO9R26Hdb)e?fQVGVXV7>gHE=oj^Lh6m z5|j8Pl0JS=PK^O(2CLAs;;t7QDH65Djy|5=;mX8k)kyGNH4^b*H5Gmxd=L-~2{9V} zX35;q$Mv5-#D*=b-~*~WXAcc?{7#cM0mYV&Biu*OFoh4jc{=O)y2E!y$r@Jhn<^XF zn)&%7sGmQY(y$Xu4HW4Yy5PkB^@nbhzkDWO)yi>$N00{%P8qc8&tHNObNhFIcpM@4 z3<1~xSPwG)@=LlUBdvE`ll;*!yAytaudJ16EU#s6IL@%a-{8fxZKat`WukcfaiG zUmNPy(c4wKZ_YEW^%=J4a=S5Gk9@?gbxwMoNYs-tC zQ&&C74l-D>MCo?kupLHq(rl=ameUBL`3>{)&I{qfZyKQc29Jh`g7(4cVx z=S_@VzYaN2qU2V~;mMMP@hN3B8sB?A^=`kIGKrRwIBnf3W#yW$o{>fl&y8jtYF{9e z@#5gKM^UM+i`p(WY+d-mZ0U&D`}0Q~%~NRK5+O0V^{CIDRtsqvKNH#ZS82;%njg}6 zI7m}=IJNS9yO?%Pc(C&M_)yNUAHBR?lCMQly?Ps-w-&Fa-`Y2P%bZ8ccFEQCkDaTn zJC5l%{M^gIpDQW%o#|^5UZk!Q+g{-rxjjTVCuxyCX>jRy!yn`3MaR*LnkwZKE)r$`fc2B(~8UH;YOy6<4SCd(O%|cpfZS5=Gmbt@L zD!8p2y!&}@Z@k54DV=KJ87e#h;s^CEw5@<;Q5H0X8|jM`^)?8Ctto{&rE!IN@{AL z#2V6ySGP9?zG0lZS;LB%dn4rZd0y3pz%%IzRxPXSr}fXeXtU{<4fkOd?_6F{PHAiD ztyGt;kOGNISIYFq#Y=r^?Y-XYz2{o5nA9fa%VNona;??I?S^Wd-)ZTW!Xvk={Wx-+ zNNUiSllyPkmn1%<6yG)BCCA?zdCoXt?=%tVgP8}UW;7XPR!vZqmdw;!Hm=0gmSdux zR4u(L@M77ajAW|jo}GuPgX3BbG4Ahi+4t_lgM!t!fN(k&n>Lpgd}no-b!2roxE)h* z`=J-3tKoKTo4s#~>v+Su$jZHKepJofs}r0}^P86z+vyZH6l)bMJu&e_L9WunAe)!t zD^`7;-1tSO_v2(wj@ijK=EvsIKb(J`G&Djo+rMJA)3%zVwq}-QlKi|m+#G7!^wu;k zb+OjYQ69&>ZWqbE7{4pcPSl{NX-}AUJnw_&uU(e-A%H63biP-hHCh5W?PNwX=CEM3*la2pA{9MdS zZR-;`h!=F`)g zD`O^qEnJqV_Ic*>WburkyAzM9Nov@Ay*=cE?xCvbyNBh`DrS8cy*u%c^38n**80}8 z$}F0uZ+9>K#0+7}A&8tmX&U@!;}wc_wSK0w2CpT;`@NrZBg`wcYWD z!Dq+hqp>IUmDfxOlGl+<(2c+CotMelk?>s>{$bw)osNLan~FC?@)nt;daT+$H(a5< zIsc=)UeS_*?%5_RZtYozJgSjI*SoK6 z)?Q;r*z4sKza(yiKf)<+v;R z;w$K-s_feA4RWTY<7<6PG(!*dXHKtmZOyA#@%>D@rOG9+eeuyic0>A9}+5va~&4t$XvNQnmWQgS9Sqe5y7-RjORu z!BbMdt{#-5^uG0Nnbu3W57PU`zc|wM$?0CKT1xHWMJB}u?>mjyODi_Nl%a(gZ3WcTBoJldL@XEpYg<-JtwE;5bS7aKQW%LJ05;*h`v{$bI!^9IdP zx;pY;%blV_r+mGAM($ItxnwtU@#U54W)&?RW$9;mZpx1NuV35sAL+SPxu`#Nn@#F# z<6(O;CJt@tyBRDyA;+wKc7Mm0eO>#mefcW)eg8c7lA;qzQ_|YT4slAFHT%h#hp$h^ zP_Eq=Rc|ma>wKINYw5Q0aTnH`7<9jmk`&!~-gFo5YI)sLA`+q89tciChWZ{G*` zlk25pm-1#Z?}y83S8An$=@%tC<^hHoSa($W0H|r!ln;+g~j6TCS65 zdqbN#Y2BFJ7i_l7Tr5hdeV!0Zo9FaST)cdyF{S#gu{P80W@VUuuEE6{tqzup7@sQS zHuT7BWL0+a#(b&L`#k2wEcfdp?yS7^+J^1DD=`?xP zrI%J;htE88J22Y#Linnkb538lnQ7F2xlG;1&zG^s;Yn76;`pgIeS+#!ZJy{jS1?c9 zDI7eqq=G~(x-{tX?h=#m;TJ4VU61jOxpi&v#)RFalv!WxXXjEbuGAR%BTwEwjg?s! zsYX$*@9i|7HD=u7j)M_N*_Ew_599}|zqFR4l6LN@dtK9u!WXG0&b@!!T|@aljlFeD z9nl)@jg(TX6ew;5iWHZPTXA=HFTQd2;_mLn-Q9~zf#UA4@r`e6!=)$rPV$}m&s~$u ztUT{ZW|B$9*8HC5jOE;n`W+X2h^CW{7dZ z`R~AiB%jKQ>G1?f&wL|i$d`p#9>vUfZlCw->oevv>s4oeXKAaoI#z!{qw0t5v+!c$ z*qg;97JI2&Z7_|Vk!hn-?FIYk&UOUEr|M%bXv=GzprV+Grv)l-_$BuH@X~-#`s~1L zjDQpj>UGgv@{QU@IcVS`zY!`Zb&)8)HG(NhWzub1<;bsMg3{+H9A@2_xH3mL}W30%n6bt@7RQ zQzVqqSI9N=>ZcZ2&B5O^qEC8ydo0U~VknOJ*k&S`-)|X@-L~Rua9lBLiAapRY5bB_ zX_z**tH)}}ayaHy_Gk_3m3Ua*Zk`dva#hu3u4;0qa;W%45feD)>nkLJS2Nq|JtvRu z!)}bCR$k>TDSZ0e`OTHU^P2~?(?geb=Lf7|0enyJ&~(j5UEbH&{1YEU9s@2Z>l)Gx zW&E^x9_M!@HE@(X0a3}*w+9swoD~bZ#DW!L);sx^68cg~O6kfq^okowrYe5QHg0@J zmJpz$jf?Fc!Gzn$HIeE>EovvhWP#h~g{NQ;3K$z&?Y^j@DH$mycZF*umQk`)!Nsq& zQMp5Fn{#I}<*RpT73JZr-Ao1gK#*wqY>kUU17x8_;>?n7_|lQTD3vW`l#yN~-3=w+ zFYYQ~Ig4UOBU*+Cm@QLEwNFiuUFD{|&G^wr&{<(;{oOQMxJA3=ii$fUlBmN(*!~p)jS`B&x5}uH-dsc z&h%Bt{#N}UOKs|jJw`((&W`*iFcrJ_U9q~x;s@%5wVs~_4$woV^fkT&yMD?QxSq%)MP1Lxmrpl+&5*Wvov69GVfVOo zBg*La1^ZlA?8LS`H(f#3AA(^#1658qx)0 zJ2Y&(NhB#q8p%E$G(M-`7lSftHbdPU5662R`89R;EnaqD016U-5&|4v#VO8m(a!0- z4p)2p`G2}_1$L_QhM4gG7NE!s-Yx49;oO76+*V{v#2j#gFU9T%FaBIo-9L;l^Np3* z7ICYoZ;?+}9}9?u)m;1h-hEc?N5eN}Sp}bQU&)C5FX^G~@ph~j-NRgOS&B$_i&j%$ zD#{)9oj{PrYnDntaTS^~u@tDm?kFni&WKB4K6^+b+i2pOOp3~>!}+y z3n2w&&>MAu;rBh}{UtI86+{I>1cm(ZyBIy7ui3@f3vr_N`LSWFkhuOGR*?+rYK3`> zWkG%x!y z#w?C~WJ>7?b>lO7T!js_(HKnGcZP>${+lqqk$l}>gmoJ*j`)~@6rnaA;mHxHrC+lp zeHgM0YYOB4N{mYgA)--BvX~YU1C1qzs4%QNYlacQ~cdT zzk%D2Vdg(v5=|UK{`HfyB>*IrI44C_)~BNpsA-Yg@#Rs&@N9I4iWTenWz=UGQA0@& z_PKN2=LwGTYtTKmBu&sKlYs#XRyh;6;5-c4w?ssYh^D!8`S%gnMvbJZ#0ZtL*u1tG z>Ys=5WmSnH{Rh=CLV2tLnzeP}wR>pJP-n`p@r1CGlI;35@GgRzu#Wodvj0<8P;xlv#=odJ_OC@X>Z$&kcadlR} zcWNIZfW9sWa;!d?LWyj9q8^9MAos2wE0&yg+Jc=jKO`UolcAU^1|OgweABKMmg>eP zE>knO&sEt%U)u%^VxFgM9HsvxuCp|WmJ1uGe;QO?FWF_Yk_O~>5{7LH z5fKA-L9pCx5BayYYU&1Ep`M6z7hXjJmlVbQN>7FSPy6w#Q5#ud)7M5qTNB^tPs&c5blD_YD!wm!NsAQWOBFiWlla11U2B z%GZ*A2rItC))&VJPHc780!_bQ6=ZW3YOQ9aUvX$0AYsh@^2qDZIB1Kt_f;6NaO1f2 zqOPlp!4pXCdhD8Tbi3vcd1S8*TC=O$bLVE=^hxzqYh*B9V?5B5=y2pTxg+D0?s}^C z?q8Spy{JC;z`D|bz>P~_=D5RR=@26oX&@q7&4#}6kUV(6GtRSf(6PPrslr;+j%QOE zxER^3z9`GDHmrX$%k8Ys8zYcI&3!s6K!arZh-{wBhm6kab>x-4ma$~mTvvlTY%xaf zH6IS{_0Vs!30{2=zMZ+X(7Nd-B+$$}Rc1Bl|liey%D&-JQ~ETy*iMN+5CX zL&F)?{+|H?NpI8UZ-JDx_3#eUZ(`SKM)2v;%f&`m6zKUSgWcrL844e#{>0A_j z_u$*#{_86a5{1Gy3%L3&MI7b~pX~16x7}&Da*TG$lJg!mhtALBxnmuoUpg+x%;eqk zTCOfPoI-z*{zxonER$a{w4ACJ6p_X@r*FqO#5*T)PK9h~2tBP_E)U2|cT{jGU_|D3 zbP?pD<3DK^JRLV9oc()O9E= zFJ`?e*t_Y6;BL3Po-|?ptP*BiB5IN^zMtRA`XelR_LwbcF5t-J5RkbjrF-TX4JORX z0$0s7k!yT$V9&NZMspYttG+}Y*%VWUaVo?`FzJZtoiU85HWw9mkr~-B^ageHKDfrG zy9gi<8d3mQsWL#mW1M6LdEQdY8q}M5;9keH_ku{KApX3wK+WB$jM$s2kNidTHIU)a zvBPYPQ4+hbfRXmZgj{!PQP5^?)COjkWCwcwi*JYkwfC+l<`aGrQk!$#K~jcYYWNJ`3Nxzh)Vb%~;C2YQ(1R(&&4-Srv}1O>2lC#RDJx_B;h@Y(y&Hjl{02z{ zeQYF};(f0hvrnnSEgyT=dM5TD8}Qi~tHTXmY!heWV7JY$P~@4!ZL_6zf<$igouEAj zO9kq}Hq*$f9NEPXdqvrkzIutO8GkiczX88l5d!Pj-AqO*UHQ9>m_;uMoR@WSR_|jT zJOQ4{*>$aWX#0b{%yxnZR)H459e+X32}1a*b+Ve+gwEB2^VhAKbCu}MRfHb#R6~7S z?t*KdXzbqVx=tYoqHRX+`!JmU`T1EDA=jtw(essYwIse-+>IKt!(Vnm)V9*GOs|;x z(Ze_0l=oZWkRYxKiO8nWz^}`5UdNOk;gbnlqe?63^^s?~FnC<%nlzJ*Kb2k$^v)>Z z=!ARMldhL|nNvKNVJ-o|~uCoU7o)u{dV^<<@c*mPdK) z48K~=)Q~z^VJ%W9$X!0~kpl2^IT4%7~`2?snA z&*K9DCK_i0Ym!68{7=t5cOKp6W$UY}W*ItEY__tCnpG@siKYtylTY`_>QH6PK z6?e$5yZt0-r26-hA)bR?lE>WrLS+550n!yRh^__gXd&Y12Mo0*&mb;VQ3&efbiEf$ zZgH(adF+nw;uypkeEM>fGnq=|hRDkGYSXdMGdSDj6blDJ$L(92mIsBnRgC&5(cZqU zyh_RsrZb9c#;%FoM;iLs81#q{K#4DCy&Q30{0jD{r&7INS<|-}K!Tg^gXemLNIJA5 z40V6?J+oXAe*0Otqe~}t4tKtAyF=MLE)Dw9I9{{m;Z6`k8!kWO?^co|jHFw{br$}& zku->M+s)`gPOr+QF373(5%tjgW&h)T=a0HO*@MlXnAGb(JU~WQj6fF7AParpkdnb_{*;lX_0wQth}H6D?>iU!_r z(Bd~?1?J|^I(K*|-}e;hCXFQY@yLPyLcuQ-iN#ItMTQv+`7+)snr%7E(i|8S~QM*bt+M_lwTBH9a%}-Y} zii2On9Sv#RGnW$&9X9oTdtONx-|sn1^|MzS#2%jN@B({%rF{2EnZUwPN%>2B>P4yQ z16ilxn*}miPvM{?nTh@utOrvfpK%q=(26myF@C=AptLd~2Bs(##%J*o*K+5$-&@m` zyeqss#f`hq-G~_*d7rZ~ zO}o%~gmydKuKWFkwtGPVjZ=Lh%n zfQbe1v2Ze=mgiBvgji&C~le))I2jD^Dd$|?JyG0-KxEkA0u zUqXoSSp${e@i~UMpe76u5$WC!<*s~5Y5Q@1BM5xux^IARj-b?%HJ)_~e7y2#@zZjx z(RsZRDCER^KwT(c)U#{n%Iv*2&5p1eKiMttwP~paK}aX=7IHP3mijd*%%nV*<`V-o z8v|mTUXG8o%2fD?*ZX(46N{P?7r0AKSI2TEt)orE?7YM@4F2e=^|TXco;9397jz6D z_3MxAejS~$FXu)MB`Oaa?LIKP%0ASV4lQ`&1al2vvYQ+ex~hkvWi9*MZG#T&F7ykS zLy1Q6MU?P;Coz{AWI#yZ48*6Fku2nAZ)MKbdXw0PWwNZ`e~lxXL_`7N{Wh7+hC_;S*#bzxVgZ|BgFM!X9q6MT$vuotkvkBrFN=4s*BWv_f*of8i z>N3i4n{pY7^bwI4u4%2?@{C~+kvzYIy9TM2+-$HEL38>Aa({aw?H>U<3sy}h#E6)E z{qgp<i?KbnS@+$go-D1!1 zhFFD$3?u2SS^a^Zre;4oAY1bVH4uf7!W|;;xuw)a37Dr)^dPN_&65hX;mYR&*;kVM zt;)Uuu}`W`ZR;X`8t3jx=7mp)mF6Nda1nAKv^sD^?|v0T0V|uPD7DjsyzH0P#qVmGcbidlN01<#Qq&tbqEI+>~@eiyRMFNH?`b#^`!MTqp#`ph}+svc{(m1Qw{3O?D?zLDm>PdX}xiQIX)-PTUxyuEJk` z;F|n!w|pGk^}Z3e5}dLTgn{E)@X{xZOjtItO;>r>c*`0lyEb`H{(==M;Q@h{U0@rt zgsaeW>ugSNKXjXlneb@Y=Q?dX?$>bVBj;>Y6gT2+TG4dkYFicWRNy|;3=q#kwS}J! z-CLY3{myotyvAXD(RoR~BM@h8YrM&~Q5JUO3+}tJOq1EFYC6C5Zh}yow?uW=0wPRW z=dAc3wv#BR3raY~H<8j6e>j?rd%A^|cN^rU?( z{AJ+CN}c`3C@uun7$NF0JjAom2BnQe&0V@Gmeg4y^L^h(#`Dl60&fWs`g&7cBWYDs zEAuS9A@_TZaZSKV3r@3c!jadfj(pj{Qr*sR3fh-OE?zV5>UFe) z=E^B`#H!_70PK`-SVGwy{+Xu1LCc>$LOC)BwDOKU*cPFH6Q7M1F6$qcBmo)+e;y72L{T8IY2$!3* z593EL+21E6&Nt<{Gu5=G@eI18GbQ~LilCB$)ibc#+r0i60&KxgfT6O+SmY=)TH9&n z-8^ZOdjoY#tRslpiUNZ1PiYgrUlDh4a%8loicmDr&*@A;Q$_7e!= z;-*T)h#9R5=Pw4!&at9my`o1+)?MdE#Nu}`mQ}-5qbJ%FV)`Gtn!+p*f3 zX!ae%%+9t*cvTgd*Q>Sh>tx>EmUhlV!uXjsmOj~4N{Q~)F8=H-#S2NjQUMvQi*NrtxrWwqnj#C1?{60G_VY54^a*jGK^2i|@XeE8ySSK{=paL5!Y`;8c zZ(iHZ?Ie=Hho)!QXXO2whyr&#plc(At>_2h7iB&Pif=kZpAl6@1160qgg*l@(aDOt zgEkYlVz=}Rm38^VPWq@WP?^6G_bh)QO{$Yg^ru^Hw13!>bWj5OFPJq#PRd3Rh1MtHwJjZI#E|EjrEHPYZC zx}Q0yz08Ael6fW`r=2|vY zP~i8~0L+8d+4e$M7g-UNREs>IwMIgfj4Fd@TX*q@w}ohSGwBmc+MS)ScH@=x6N@%S zvHJRwQw%W;rXSTA&(!tWb@voMxDAx^gptC|Ntd(qW6$S1N^h+jtQRh`3-0|Yvg|V9 zX;5iWsCXW=OvF(J09xKEo)~P-DSp6Ql$iD?HL>>f2QWc1E<^KLHi=Kp1}+E5ooXw6 z3rv|J)+lc7bfKGtdK{1AE{e>4SB>6sDAIQj>;;jI(_C`y)SQ z*JO-Qxr=LMZ5<;kv~|C73k5@4O`Q`G6Lu=ur!`tY5ezz(MqXSrQL{<&CXACrYbCkI z{Deqvvs7uPUR~$2R~UGxHkJe!ET&gcmyyX*-!#RTIr*ZQFkcyN!3?0qk8Vce8qO-o zp4h$N*2!vbShOTIQ?bcl31)AgIWfY2%T&>2IpftKd(>Z@T)@%1$I=1*bcg` zHd}R=H$2)faI7y_6V^sA%dh^hapC|)*$y$HwGQc)=uPZiwQ1|NS1g*TojNnRrl4Jd zKhz@P!T*3kIp7LxjqUniYR}G^7J-XFZTK?@u}+fTJ=xxC4U|Y1OC6IdUXbbmydV~?+Hs~Tmhqw=FYIF zJc$m&5#K}2XU&~WO6OCJ4%D+O6sO3xE@X>VUHnxSMgpUzph9K@U@q&V>4U(jEe`(q%ip^645^^!9LVp}5pr4&t-M2~~5 z|@;S2g()Uh7DEF9<+$`<12GB0b!yrje zC-Q3l#TX;^-rLAwT|r=vFW>W%)O$Sm$~_$3sMLO=4K_{KA%C;p-;bBP6CY({?)4HF z>uMS8bZX+z2rra$x+b^7Jqo2OVbi;r?uUO)A;ln9FIKegA?jV#wC{mkM|J8b*tZ7h zXHkAa+v%tbVn+E{+E}OxTFk?9M`k?xI3{oGenOBwp0N8qsPuSmGWngRZr+ZUJYS_k zpIhZx-OS3wYa`iwWb9M77}{mx{VGO6vD@V0O<21mm9iUAHt1VZ*r0p$yysMtCpTD_ z5(;y31j)ZdgQi1^aRQ%*=kA1GY^`v~a?@8l_TNL3A~Xnm3>P-lj8BdPElPr0z^S(; zD^fW5ki6l16wv~qLnhfXR7g+g?xy$?=tYCI!*3c#-G z@NgN0JSaKn=SlL)x)7Suk?y0FC+I85v#2>FxQJrw*^rmxIWr(3SN#IAxTn7h;*q^Y z^{mtTxS{q^qz9swf00^05Zf;xERvheGk>OUlIA< zO=lT5aCNg;B+ap_p{JpzPiGycRd-Y!Q~KtcrZo<#ynHdV5=8~Sx9yjRa6Du5-;?#lTxvpPDFXvKAMSgdg@bxDs*Vkkd%u6NkEdd*Y6U3hS;fL-N zXY6l$8IntG5G@-~kG#aBKNi=1_c*kvQd*C1m!`25c>2Mx7~Wu4JPWX_L`~d_n{uBH zy)Iqaw9}k-Wm>+lnj@^*=mHMs+^bmRP2>+}a^4L7(J}8#K9#}msgxna=)SIH%m&=u zJ;W4NOP3V>6@4xT3{hscOB*E0Q+~YQ@?psFHKl;HMCJJ9u-y!YWKW1R{j=5ug)qhP z3&s>89OZ0iV`65lx4`G(-JggBN4-foh6(k{{iLU#Dyf zp?=J>zqhgr`ol5US@NjF{b6^XYP$E><D-VIS>=EoR?BK?3NNh z_7OvK6G#q@gxGVa`@;jvo@)g#IT3IBf_U&0aB+%x5)|fmAf@WVb1FQUJJ~K5+9;zX zg^7G!z1EZ-RbOkG@|eQ3N8!%^%b8=ml3wk*l2RxM6>yOL?^S~9-Fx|U@VCl}HC3WY?A@+Zv45XCjy*jx`kk>q`f~3Jxo5A$_%U~dr|!nS z)GwcMNigEV#ezD?Ibt#wHd6t2r(gyAjC@K|pD#JZ&yIUzIug>5wK<^WOesf^oOQ!W z{f|HUGaocIQP+?q(|aupX_<-B(C86u_#D2vh%Nqr_d~BGQpX}5P6><5;HG(Tcj8Ok zClm^xSER)q3+7R-gTWtKWE@QO!18#n7@(eqUYlSL`1-;7-5MhO9@>S+7JptZMP)cU zWhc|H$DnhVpmQg^vV{j(eb)zKKJ}UImjtT2WYtKqPDQpAZ49T~{>%jHtxZ@zxEEya z_nKNkVxpk7nQ}REDGpH<@1H|lB{mjWA+;h8&avhqWWW1xxvF6*$sYIGsR5Z3X~?>t zKFE@1?1%2RR@?Gj8o<7XQExA-t{r+um4(e|YD=!oT+BqW#8; z{-^r|_+RZG$M;|D4bo%zZ{-M0m5$p&HsPg8!P-q1pTk}k1+pV?O&|G ze>G=Eqc@(x5$!GXUd7V$EiE0uq-hN&^M8#RS#NI4O z0L;Q%VsG{#05Ne9ZXq!tE_M+iPF4XvPG(^?aR94`h`2B(K$uyKTZD^Sm|0jXy*Fn6~e{=cw-peLYN($|8vsbU`+rU8YQKef;ig$0il6y&;S4c literal 55036 zcmaI6b8zHg(=HsF8+&5gw#|)=jWN;2wv*i?8*OZRW81cEJM-mv&wI{$>iqFlO;!K8 z`|7LjyK8Eurmvw?k&t3!W#T}foI5+&L0|>2031zi5CjC6HLP9jEC4hLvf`3T>XNhw z%qlL9=5A&dE&v*JTWb^MuR?%5D=QB`+R@SK3n}4f1_Y>CIG9_wSh&y%2_bwv{)dYF ze<;mN0IUeiVu}D}B}W%~6TANbas3ZS%G%ELs|mA|or$Z3goT--xdj5Vf`x;X>kj}I z3k#=^5CG`vVqs#B;F)=0pchZn>g<k)4kjA#tNqLh7UCB2+gd;%)bJq) zk~SO9+I*t)aLDp0J=)U41krbX!>*3WXyZg^waXvymNWbODGYkM^uH3D8C$3ZIpUmj zg5IwDJN=*guKc^+_HyPvwm?@^AVb+q2H!ZypxgBU$RY8M2-Tgrz3*dQ%Av@~RGLkq*uY`J}5?rvZYe)v}{HRk?Pxb3>dPawByWKy5B-3d+eTvxcH!{xO7cYDh z#uWFb|FwZH*E&~_Hfyk5f4tl*%S4*KT)LJu!^9&t5K}^~6Zv7X^HL;#|6~0-d!=85 zd=w1!)-j%7F6H8M?&w)~zuLBiY`FX))x3#2hi+=1@Ni2Y585M6+-4lhxHMzro&G*C zH+}|N!qg%__&xsSdCwUcKZ+ISJM`uCX70oaPAYeFKFp6`j!2Gu4f*7chLKxvu1@uP zXcz)3`q$#*Ev@uSF?3EFFS9?Jm!$o*S%3fFtkP$l_>v7VibX#;+ONumn-}#WQ0T9KzIu(?<&YJoDLu%Roji)X^W6 zh#)Q_o)M6XH)l>r>(a}j|JUqn5EWSFtHO`B)T+O2)2E|51UGy~t< z5kv?fV+|WDT|Gke82?_8eBG0pQHACT9^OWU8Kp|23CDsqqH3_e{c1gaX7H3vKIvwa zRrUHrTI874CIUJ?3)(5Mna;O_5TNRGe=5OGuB2U6Dh@OD!KrPP7ZFVin-@MK#EC(~ z)~9WH6#q<2;;*sBw`UH3}xTgt22GE%&VzenS`_!@-xI9ypG4RBR=z zCspnUADEgv%Q+vkRgvwNK%!JS5-UUwHd)NPn|Y!>mI1QGC{mII`OXwDtR$cp25rB{ ze%}@_T_H84uXnQB0f78{)No|m%Zbmjgo-U&K# z*=@vZC`5pZ`{lI3rC|!+6aqmwX$_{$u|L?2;G_FAk9dqRMp=Ug9?^G;`84==`iC}c zl??FE(sJ9kK#_f#?k@7`;{tRH-N&|(li~1KOXc@by|~$fpZ}1mZQXx# z4ac|y9$?A@{Gmu7B1&>(yO;tE<}NvvH+2{Os65(T#4)x}%yklFnF|NqfLjbETttRE zlL+eS#posw>1qxpe3nmr%f%LtMn@1wLb`SC{SDK%9RUM0S#w>5N9g^QQC8#2L#cCk z0aa~0t}C8M7NGI4V$biz8CC(4ftXT$FxUq}1^ShCb z;03EslRC<-`xGPH@4sZWzw(=cx|}6XI4gbA;Gn(62AS}rn8jK~RwO(QPMVlYuas;l zAVN*Fx*j>Sjv4#$1k_%b=^30l8h%5%y$H4`8WqmoiRr&aHI;5pQ^ghQ5mC~=p?&S0 zK@Mnh-xVNwrg(U{313(H{dkXpmRqRb))=?1kbR_S$0(yYta>F6$dU2ON{s~&tHS5O zP-v0u5Qv@WW$yi9E89>ew$7zOad8)*BS#^Wry?`%^PSk8{m9B58D z7Fr>rwEpVQjOkJ;lV%?L1hemU0Bdw`5fJzHk;QxwifosYRDea%Ovb&1{-=(&%C)bR2scFd+$w_wOP4BAxhs=01Z=<4w{+O9xD3u9VcbBQ9KPFqP{3P$pl316TJS z{eo05xdO_Y?21(ysLTi%`Hft6g3QwY%yKDD4XIE`f}8JczjTspQ2v6;damd~etc@|{U z?A%mHe>#XF<64&%9_Z$Kn-#VB`#*+h|P&gnv)PATYZjVXD z&A-IQw+b)vHT7F=?##D5}+Spc6C_%eHkQYWm3Te^`g(-*qoCFE{ODliet0 zbyZI-hp%ltTy7_voi1~XaCNwQfpBcSyN%>FEg)a_RYBeWG?b~noF3#sC*#ELY$ zmTfI~BAR50=3N|}sPQuiN!DjKV}`+FQkux~>HagO`-<;SfUOoj*Y?4JoCf`9(yR{y zj_okrKTr>WqVZ2%@04~DejeI*-wyXQhWOHP4R%@@TclBOC|#7lj{gXJ0R>SzC;QhQ zp$CO^g^J_2KwVop671`6XrBh?b{l4u@(qg=w1}Hi@s7+ttU$GWvpAH8ID2FE@dmUY zcB5{%%UB5#IgWVlpe;K^-Nk7+#S>`zlT|CZ6ePm&^4daP*f?JBkWY-2SHy)}7yqojp zfRS27FOmb;LW%=F91ceoC!TxFQ?(~|j+eTjsOIb%Yq9o}l0}-PW&KPG z(FEKK(?7wu}0Ef8FaS%hh^ z17g8Yo>HvZkY9ABR+N;F!bBWMu7eO=hzWs@Wr>J(j=u z-pv1Tl1zAb_gs%%NwEJD^iEox-&!M+qfgG;A9$qn07dOkw^~X2U4**5Naw-wBIC7k zSUc^Bzg$QYITkuS@40@WEH#@(mJ|H?yg}zdaF6Stfk%6VI;T>{Ku~Ez#+JVo*d?P}nW*f%1#$ZC{$cS(N!l06v(klCAgILG39D5;G-&yZ{NPi+ zU1HjH*|a3<`=sSB`=~36GOtB4mC$uP1}qA$17&fKnQH-4{8)Vt>dtsuPa9>P$?k9_ z<@?LIOeDs$(H0Ybx{kI|{-S1Ph@or;NlDyvKHmb;o#ZKEsdOo$JBfekh2(_~qNz|6 zR?M`@sq;RfhbMYpCbZe4^ISXaj9IKcBnD012Hje!!>CQ%jVwfQas^;)WT>cdHmIFC zebx@h2tOGMt17sBj|#|bgUre06&EBbz}d{&3xEK+lVv2x4TB9eSLS)aVrhj;_){T= zF4tSaiLY_pa`DMcRcK=HHnAW5bMigeb4c`dt>U_Gbp~1Ecq{N$>}>J`s00j++*`j(@_k`RyWqXazs@7S?|+} z?->_vq>Hp%pr4>UCGC3hy9Se7*99(2i4u&$WJbbJ|DItovHXT7=tJ*jw34MeAuWSHS=wEZp_Gk;z(tMW|Md*0(A`y=*rb8f| zKuqzLZg*0hP8Aj@$b+zkgFyV7<-ofg_}C`m@7#b#YjAl^8bf(8wPtjHmm)dvR6Su$ z3^F6WhQbqq8(#i3j44ew1)1njE%DHj-$#=l$@PiS*d(BI;kT*SFwG1VXh(NbVBrK2llsm5Q8$HMhkTLj&SIBVI{ zk+6%5$;Xl~GCecVBwMAiM?f1x6T)zyr*Ys&?q%uEVdp&ZbouSonJdy#ct~RFjae~W z*%$4_)KbWy5sCCg6xiKm=!QAUndpI51WZqNte!Cad(?#m)gN(Aa{zd`FvGCF8!EpD z*JwLW907vTc~{3a(lhDNq(yhw$KUPD&e2+~#)G_8(G6i_CHQTA4Ms>Tr{Wg2dZbz( znCCpefRvL7qE>YyQs8f%P+7YOlK(iz{iPFlRQOZ$&-M7PKA|=IO5Zqj>uAy;uiKwEX?*|{FYK7`FnMLWUSGbi4A7~cTBMg=*?(nTA&UmY7 zQ>8+r72OY^fAH1sC2H1IE)y}Y-@`m2pryD-#Z+AF+V{M+kG6@tyURLY3B{!?hHnW> zm>GSsp;o0T0@gw1Rb2i(PXzh|*26R)0m)@;AiU|eSkq7;cP5bC_ne*DI|C4`zi4kF zF4{EM`0a_D{HR&pev4>sdo(ba>|}~S%nNpaNVen0ge^RRq@&gxqxwi{+;RloO>ZQx zkY)}xXwv*m7c22}&aj9GPbT=vHN|#r+N!Ue@9L2sOC6JcCIJ9_28-K*ud9sBH^eAh zwvB5QVerigk!-}T$asSXcKlJ1f;7l#b8sVKVv_wPj^`@Q3^MEB4+Oe6aoa%?X5sxT zF3lfS1bJe3#;R^35~PXLmKH<#X`ZQx07c1ma{klEL*XM1l*RtR9{4~Mu#mRA#rjId zUpBgj3oGM)J2=hQv zYI?lV6sB%szqXgRh{PMit0-!MIT$p&6q#moZM15uOLae{ zm&cUw$?KdX0Y>8J3>}CkBb|Zd^h-yz>$cZaF&!uPE)}5;qK+vxSJ;q+)0ZRt_^$4F z;=#CbOfC|ZePb5vj!pVgbfjvsh>S~t4o{!xm6H^^gc67vDYZP8ipvgmiV3R7S1thA zs~1xV-9%Zg%NDSj2ME=v9wwQ%YEZ}KJD&<#(Ymx7yD2UqPevbsIEU~#4yV5<34Rpj z&q-yIKiQ@Gu|M306g&z1pBZLq0a3ZiKuMa#3@;wxh-Cu2Bc+xVV)%dlr$xD82_46gB2f~DH0)<(c?RKXtzR_tqSkbUtY!rpwyw* z&%1dlZd(p%8$u1X{QgVvL7E&2tHveXv>M4ndha$CVem676z)xaAKUYD$6UJNx*#V( zx67!Op8oa?^`z2%5f#4=GN{}?;^y_OhH;#)>)PIo^SVceXd&Jt7_4kY+UxfUw>r^l zL&$gf$2aUeA&qTy>14({&yloMPbWve1E#9lKDX9JY6+9j+YXCGX7huY3}#8#jW_!8o2HqdkG8>lrXvh%|iQnIIi=0Lbla_jRQ zl4JI5v*-GDDt=lI}a1%u{rte}iC9%?|_o?&y=NOx+ zr8KUsp3Ww@ePeg4$EcrF;dj61)3eSh`|9;;x2ohQUx*Rl?^kDA;*;j@_GybVS6X@< z9{pF=2e4u^j~gRDEOVV8M27BOdxIHss275!F1vWzkCD{|*JWEhcv5uH`)pVFp`LVF z8vg3Zwq~U!-F>!-LhZaHPv5V|gSH|#|6G%KlW&!jBcrwG!P8xlubApj<{GHYDqH9? z^E@b8Sj_6Xcz;5kXRYqQrakrs|F%83kj>45CuzBGdlv2r{)DRhSG$o;rY7|up&ATz zkJ+z8IAY@heAb_d3rkaEZPZH4#B;f(2)XRZI=`}hA| z@U#8T?q1`klLdfT#l-4sWv^xdbaZnuvj76V#Dp*Hz)ao36`=QDvH`P(g{LckS=Rmw zDE8l0{J*WN0Rpq6r>nHO>(~1KKc&*_fd5DcYF}ajvy`KQ>wjWW8~~30#J(b&0M7rF z{ZC%e!ra(okA#ZIC1nB)|0BQzb{r`I)JpXT*xQVNY zouk!%+E{(fI9Yi)IKD3T|09L4bMUfp{NGy0dB;y<4ap{9zcPob zvy037?EB3VimZYRyM*b4j95k*)us?q@m`cX3Ok6Peyj+@n7vN0xZtq>vt%$BCrsbd zl9KIX8YXg{prBEty=|0}tI|_T;9s;T<2#CRgrf z(xH5CeKl-^Jp{`J@5SgN$^*lX-gn3}|FZ_lXrGUpcx4D&9Fq)^@$)pd1Al=(!CB>$ zU;_a<7l{1b;5K%`fd>ORcfTj}`F166L=^G5SF=~g_1;amEnyjP(hFZYhlW*&}KohWM^dBF{EUF56e+ND;`SBJdm-KO$ihjCN2vI=cuIcI680qdBluTIvr8Pq1U?+>La1P*a5Wx_-AhKXQ~ z*b~2l&Jg#JYB>tc!fcctYynE+8@;X74@k`jc)%9JIYb?3!Oqh8NzgLGomc<of_Oj6Cd#3psCfz9h){N^N|t=zF5u#>fem^qs#q}sTX@uvoWr%@%UH1k;v9= zYCbK7lgcc1%Pe+;h4{nLHCdh8V1CH1f|WoFw`xe2d^&dUTdJ9l!0W^@mD!&N-|;~q zOS9Ow+Cjc^Gq#j%&olKb7NiY0*L-!~0*EfKQtsb3?Eh8G7$jsrO*)U63iYf!+;QAG!0=c|z2`Zk5Q+ zx_WB!H}pm-^O>8R(9r;|WayfNx|ePV1psU=K{8ZBz{#2s*pn*Dsb zoARg-MaJL<$uQiopP6U!x#GlzuZWGw03EWCOed9Okjh!SCzN)22~LY`FFVWiyZ*^8&P`zKgf9bF<)w%A;K_ev zV5|1a`!YwhK?mRRJ7wfW*Kl^?q$44LUAvuHeRXadBYSh~9#d4|9cwn=pf@p3ul9kA^)mHwjg*M= zEuYF)>SZc{KM`(_<)iG$XP5JC-ay>+4~0r~y6gN#jCV>SWyb?IJval*_}Nu@-QZ&` z)_X0o8ykP3Z^KA++&oYbU&Gv*-I|{Y{}sXbn$~!yPBUjl?s}t-r@Vb*{k1^+b83elXzk09MU}0XpGGBLEGrP}i>p2#Yul*; z``l|oYsy8Z+u~_>lQ)Ka*)A-{a2 z9s%yjH4p@OK6(-M_D72oz_^ozzis zB==GHlAR!R#ktnOL^f1=3>3EzX2tZy02q#M+ z89`#Rhe33efRA1}<_B%_=+@?b`_w?})T=k+qhy9S{{TX4`bHsSd{US=J zN51-NEEiGwlV))Pp=}t9Enu~j$Z9O^_q^x09&AP}$dE?_UlwsiJ{yN?Z0}>Cv?7J` zfmhcBdOC)D9d62uq*#V<%otTnM4RGn7<)v1Qa{|0hln@?@)|pC&n)xFaobxhV2b)) z_a7Z6>x1VhO`IgOVM7qJxV*iPZu<8=N0l<>?&xdOTMSLJk&n96{jmdTb&u`!!@P|Z zIJ262ZM!$@6gAZQojaz^!pP@qMwmqTkppvV9qCId-+kX(Ax#zGivlA!S(BXW#0oJc zWla-#E&*CkRe7pBMqbpaft60kttV$sctc99B>AZ8*v*00t=XOi;vDH?zt>u)7xtyrsfjyV<|%bZ|((A-5MY{ zJ)rerH$2(ivEtk??0TO3Pz*zb`;4~fQ^3%K<9?z%`cc|!DDJ!aT$1aJ-0f4|(!GTE zj1l8^Q^#vf+vrM%&*P9+#oqmq>-o?jq z=H=OS8rl_J?wEB|-eblCIvNc-D#Sfky+!%C<2vX%?m9ST6v2X3M~q99OWr-$uIrp} zMRY~krMg{INUu}$GpM(cMm^^HxaNAfr<}8bvu>#h7i}@!cbYBLA`KJsa^YR?tCr(u zLiKZM!~>nW!h5tT#PDfSBgMhJ7}{eVlWeE9Yw8yT7q#m|RGDTL=@znw2$mQ>=5cC_ zV;}{<^FXs#p#@9L9PuM0J1=Lf!kkc0>DIi9OnL4_j7amk9<;_*eh%}(Dxlj1tLaL) zB5(A~0nI_?1H5dXV3_m;<`w(dJiHVdWv9$g@v2A8ahCTU`^WEJBmP0WJXgxYsk=U? zg|;$**Ku}`gjhlMIr|i?1Uh@jY;xFnT?N0)5!o*Bb^|dAF!+WI0EvS8%mt`$KlMFh zVv6`uw31FoL>fpp(cbroM1!f>iVj=n<^CMY@)>$cIg8LxL+R`)*JQ#`#Qg(4&c$2J zr8`l_0*W~S8SRR4-QZ7vt(;T;FxIB`zwg-d_2Hpn0;Pkwzb^Zdw}^&0q@5KvW2kpG zN`h?xN>`y+rA%yDISp2Sz3vlR(a3ep<@w|6u`Y4A@I{z1gF5`;-$D3we&jC%`ied+ zb&q1%F$e2(wj!1`;-4MRIVaOmwXi!V11K+REMd#w=AEC&B?lxu%+6CUm~S3Cy!YRS z?NG-Moacqi50xrzj6rY-*R6Pu=m_-Amx z(aYO;5@bUvj*B}8pdhs~lU+ocZGvWK#`xoff;J2!2 zJllvqe(`%GbX$-cbx#TW(S4QVdAQXJ%wG5DOq z`Jhq@iRFJr!dMm`lHLhHX8dy5&t|$F=FY#|B+HQ9Sq8i? z%U{kTbhUw0Dpl$;mAYtsOqY3n)?60S5?bn%ozHR1nLub_{=OHk+@_mtZB#x$<;G`c zWHx`o!FR4$jOBK;HWV*bXu7oHmC2jQ!aGWW3>m#LEbUe0%iB5d@@R1q_rNEp6fIf} z)}b{|z)~=PS#OwQZB(dgtAW7epDtQC6Cf5Edn}5q746yS%XPNg2@zhdh^m+9uw~4G z`@om43NDqk@?#zkSvN)pX;9(K#?Gj6{s-n?)d>7b7$ce71>9QS@GPTZo7p}URz)=$ zw`UXQETx`wwiT`yt7pJ2*BKw375P14G4ZXQA#cUTO|KHCq5At7G962aUAr$M zS5oN3n@gsC03N4Fl1i!Mdrb>X(E$=y`9iiu(^{i(z4_XOW!r*X?F#VxQQ-{+og{Xq z1V+`K+tfnCDt`t=>e|}?wAwk6lC6_WJJ-2k2{cb9LSbC4cOgWn@3U-Ly)bMepX*F) zj_(6cULAocUMN+}Ba(oFb@2?mw>@3vmbQrB7*~(kr zT{#Y1V6tY&?5X9VicqJQd=gn5Rl94dXVnCu=5;zz1H8gNK*phJ%?dOKYx5|5c>Kb;Mz`G+DL` z%@9dsiWFJ;Baws8I8!sa{-BT6iXG=a1N3#jav1EdFGc12P}x zr~~nd`-dR5ugo(r27i}7VBk+Qi4bFJI+m`XnwqM=7Lkl5li`me;6Zoy^%1Q}B2@*8)2{$v%aK&^_FbtJkG)API zOiGL#g=u8qfJJ0~ngrRLUkezCho2C!z{SFgHt(9%)Ksi3iqR z_&{{DfsdQ`MFMGKu$-+e@UO%4L1SZKqqrQs?HSeLg5~NI^Wc!4#(5Cg5{uRffeRMS z!mg;rYl^&1VEeu8t}ZZ$y$wp3!B8zJ>+{x8!hR!y6jD0ox;LVSkz5K1UzPjEa0I%# zRLpRLk#`9r@9Zee#Y{$VLc7%K40n*r}KG1zmtdJn-{UJ#s?@8rAW!*wqoe-i zkGZ7a!yCsT4%R$g%=JddLW+qnbY*ol(~hcAMYF!jmgc3cOc@!Q`)&=`9KKk z-RsqF(C+B1?9D2%HQd48Sy~4%zB1kW2cLLW;Sdk<+;Lm(qW+z_y_zc63U?-WeX@M_ zX4#1S+<2!NrC{xcayWp5#BRI+b7&)RnpCo10m(k#COgbwxO+p zKMghNbUJ>i$vzMZ7=N>Kx2$F_Hs5$j)9u4V_Z1)HuB&7Byo@nyj4PZxb~`BWWpI;> z6-|R#`kS8+iYxX#tco~GA#D*O>Zb={YVR+)1f)$(*IzYqO>#`pOBi=RF?fz#gB|F4 z6)1RygsZG10OZ;{(J50Spv?EQMOTfx^x!U3vzJfw8i6%e7nfWCec!jVLvXZA?Y>|q zs%-XQRdKbzz4|dgD_i4SlTd|FD^p|GH>yQUje~X-lPHR5z-xdi)T_g^BdN)nzrjFP zbpEx$xw621cCBii%;eEQvgUrgl(HwK$W2d!mNNG0Rt%5P0 zNukrRy=lZ|UNI-N^Wa|i(llhR)10{&o~tIVC7cmfcf0BDT&@V&{z<948Pv7^?i=Qb zS2C27Ud+|M{y3ssAmOGMi7CR8F`*u$-m89FWcPalbrN-IDx*t>i;9OzfXZ>)Qpc@^ zPe1E>hYmL4U~g^vvSm8=F|zkvdxpiClrZOO>&%<@i9%Mt3x>`1e=`AwE1`gc%*rKziNs40-cb9cWLmQs7CQbP*N$yQh2}s=mU({2-n; z9?T;ANNSSm>W~sHlFBj4A6h)$h81jd(O{u4a%oR7%S}000&OpW=Bh#AMxX?vt&=6x zn|iO67WW5^==|TLtr>eXaji*<<^C(>ImZjA_oue^=8!#`xxe$n^D{_ayxMAsr&PR$ zmsH{z6SE~co8uv#WXh;jgAM(aO?kYmyuJ}kztvLT*j};k!}d0F+}bN2M>G%0DJ3Sd zVS}s&L9lr6deg#@pA7dTp-}MoptbCNw~BXiZIE9K2nK>q>(k`N?gLj+qQFP4!q?Ib zq1dz<^&XR{&+sZ-Z1YS|FjSi5VM#F5cBU=A*ooNd%&XTf0OR4JIn zf*ddF#2Knd0QF<%WE5F2v_ebp>~H-10~G9E9Fu&XFsR%}kE09t+tl$ufFl_}kk7OO z9B@k{#^>4#&dF#2qa8(0)ee>9#xHfb&ycW_bTAuvQ#q&SDI;K}D=AS>uif#&bZ%j9 z!)RH$m%qai=E>6t;UN)TaI+)Smu}p%swO^(Xw#l&M3zB*MCnRBw7y*#=I7-h;4rU* zn<}au6^i=j6{1=Dkhj4OyB)3*W#}AoxUyA5Lu>-OLjRU9yi_xA@HH*aA;&D|= z`g#q^UnOiMu5`#D?Kj(im z5Ro}AT3a7Ca-OqiWGrB9^}y#)dZ6qgioVKiwNq0gu5b&mmu<{n6`0rHt@zv$(Syl9 z5{^3#C6QLTy&SBa7}+{#ppe%F{F<9hBb@Ba+LGiyc@KLvXbAqvIMFujvby5LD^ERk zrZ9^Dp#n0fZcF84J9Ix+j1VD|MT{9rL<>Z`|Ri_txN_EEsFi413nx(-7u7 zu~sC@e(Jqi_!wsF?JIwkUw(?ud}@2q+Igz=kuj)MJ9w&X6wmdUaW3`ygXXL}@nK>3 z)l0)GhFIe)?KMKoGrww>z}`MGnQFXnB#infX7Jyls#GW^_N^)<=sYM?Wijj~X*{5oXvFON- z5N+s2;!oiJn5RD?l$cWBDBEuatSjwx;1Khn!clQ``i)Io(IZU*YEBFrSn#0_(}MD$ zYJQ~XS3HSeGr?pcV8VH6M-(%eFJQBS&ciJVP*)(}LVWjwL}(m*QC_~Uc3gMR4P4%< zef)(%&I`-OZYX6xXF^n+ngR2YAm6qA9Q!wCy!!(NeJ1D`!#ogv2EkH-z?^gy-5&~8 z8$HWd+SS=LnEV0KJQ$HRJRf#F!f;sXp42a%OAUoATN6EaTO5hh57y+G{7?*xwU-rD ztkm&_16ztAK);_x0u7Or`2iVIRLGPS6EMg#Ncx8l8uBk%EmRbmJnwH&h6iPua2`|W zp+FWeXS9@p&M-L|$%-JC{vJXMl|UY-8xj^w7&5IsfCPLQnk|~L7$cHYMKF}9Ad;w^ z$U8q$uucCCfHZEK=bMe1e_ux(b}jlTnzXp!F6DFrA9!jnOFt_MVQu7|1q;~&mOe^8 z+K>w&Y;44yiR-C+8dxeoX@42X3{CGZyz@8x->(pUJG6~YC|RnG17Ly z@DUd*=+1PztZ6V45pyQrc9EW;cNO}lW}-4;cnEaSkDM|Q)zxx(u%^R!P*>5b_dT%` z6p2D#SdeQWi^$!3k@}%o*xjJ8dl~vA332-D`YBmpZ*0DHXS56lEl7u?OyTtC)q&+| z{-Ice-^E~e8>b^%5Nhb5gxsb?7_4kJCm=+ec{ACaTm!yVf;d$;noA6 z_4-kb+28SRR9i4^SX&5goco`4;9FpCJe-(!v0G5*nq7Rp5qGC-^#w;LJ1g{xN30o( zp91c?rTPt*Lt2Q4rPjf_WIJH_chLKbc^`Wm(LH+dOFc}uhXk*2ciFFbc1gA)=W}*+ z;--4A`#Wl|Zhm^QJ)pFA-w1C%MKFtfyP@oF-mzE?dzL zsP3DKa1e=CYtmhA!miMGw65TI_3G{?C%7BfIN@HC{+#XPd6FHDA@*xpQ(;0;g`S!DF;L$L zXoB}ce)7b~MF|uHe8tI}>)Ouzgg(08%bbe`DhW^We(?N1j@ehnSxI=1_XGFneu}r) z{|f`0JjnqnfntEKR+)3cdzqhjUn!nql7mdY6yU4s?cngzb=OHg>e4g#y=YPs`mx_ z>QDWZ{-XLn0lxs?EBIIcuhVU3cIE(I{U%S|c#E%&GC!$~?zz7>M|eLJzlvOx{DXn7 z1Yay)W^DeWA!i9*uR`c zxa1zEtwh1)UiDFjEqw1+E+BrSFw_JGs23&(hreJ#C8655K)+D(ckUW-L?BPF!^?bx_g0p7A&gaS!a3WbfI4BnrFR-~>IqhU;x%&iE5>m3 zExqy%-eJ4M85ol>I@NX*{fQ^{fJF-nsRSEji`MJGmp8xSrjhKMY@!uP{5srXSh&sh zcPp7vBuAywpn7Q-2{*xxDRrZuRqc`Fk-kSlfxET->VC@LGD51$UhwV$KDm|cH%y68 za)~8meA49N_`)RwnS{&O7@eHF-6BKbHV#vSN@=eYES5Aqv4oJ{ zyjcsHnvjFaXP|_AM&^_<7}SXMO_0zVm#IF8=AW1!|7DP-+O@-+8I@PPLuI7RrDn#q zVeh9`sMJRb>2$%@kEROeF7xCyuU!Q)(%tdw5+POqN@c+>Z6JATgMLV$R%noJ{W-DR z_jPu&kKls+HCrzIQ;%5P6=7ikufuAYC;ozkLZrT!f8Vk1kQ6&C}BqjQCHr?AN)4O7!wPWCSggkS#Ro@P zqzc=*;{~15vRUvxY^C(Z*pcJ!rR3&9b&1${YFsi1m4MR1GM7dT10hL8uCg38^#?`1 zBGKLEEW|ujt|0ImmMiiNlE5?#b$xyHJBXp(wEorq4>v%_zZFtP=un(@0WO!+Am8Vm zRe{spCqyJY6|yL0zjUTVi@NNUvV1~ZXcIbwO~PS8A?zVf(tWZ%GJo44*6WE=?6nis ziPiGr=t}WLrm`eR=+J&^6Zukp`;Qakzhi&2U{vau@U(n3T45pB-P5k}^$|snTyABa zUhnE7M#-#pL9UcbrQEjMO}U42Ww|DnY?y)@@G9KIHh~efY*+8$i-z?!$GzHr@@FE&(mRxI)q$3vC*RrTc~%IJn;WX$C?8(->w>Z|^pcO8BH zyZ&R_?rLAKl~f*kU}4-pWNiO~D5H}|gbJK78{8vhNsczhCdXli3>=b!EehoM$WS-R>jHBt8orIJpRyVg0$M(0yb>f9}q;>6w7 zOQ_D%M{bk!p!n+;%M{Ce%PNb^(npk%h5grzF7QddE}zim+vKA@=N|F{t}{K6q^v1) zK3zrE(dX!XdWim=s;JYuo7{e0pN}Nc8r^ycE-R5OtP^KiJ8>)M+@@gv>EqMu)Z{t! zel>;G&X(vAMl@V9=E_Pcri*M}>AcN}`7MUTYEi!OMFTEL(z&1&CC0uei4wtI4p3f( zqEaY>6`37Gp`=QGNkzp#Vat@$hel|71~|=YVo1qq40;rUj&Ya zbt6^zedN~OygU*5=#kz7aT4zn%615H(MD{2WaEw;DHm?u|blqmxKpg-gf z`(+Aw*pQp6L2`-7il!2e5kr1$WGd09ixu&y#H-I?Y$GNzUu+dzhabMi7C>h-YN_Cy zgL1NQVfiy#i5(XoW1n))J-NXkTZc@t%Gp)hZhT~7?e3Lb9e4D9xprF2?{t}#+QS7i z?+LoR(Yr78joCQjy0!;q%SNreYwnmycWxN4^P1#!kJm?X3RUuIg?7WdF^z+A@@hPq zU*0%o)~ZLDRki}9+l|t7;yJdIESGk-;1*r#EiH)5FI^t!(st>(TwU(#!(F-ErB66E zyB-Vo>bAR{&fODvN%NBSJ-tl{8luosmpWq8+g)LO*w9FBB{%4AGCTo>!B9mSp^=Qt zn?imOnOr&-=8`#dR_@%$?9yw*QU^F4gs>>&@%3*4gfr-L1OQbhr5d z+oQQpMxHF~lkHR;*M6ZpZa5w}UXriWt0PrVNd}e3>s6q0MPwXAJKud2@*?J0^f@(Z zVniO5xQk<0Br_!u%B6BDmA92|DnDENv&7R)vxp5d~_6A$p)-0RdqF`P|KgimLTVH?T zx3A3GlB^v4!M5k;O;|<-ER~kboY_@Apki#(ZCB2_K6g0Xa^t26H$K<1VDyG7)?7NX zbKSmW?F%Pudw1T-F>|h7GNx>HtpD?2o7=8?VEM!mm2+`lzbth`cpP=Y4iTc0O0S8$ zFMluaend7~woJZKwOoC*ZmE8mR5Pw821YBF!31V1PNff-41%kZk&A=F+L4u(d;t|<~}k*GDE(U-<2;& z`EB`|@(<_B^2u(Lq92P%n&&iBitt8$H)u?6^49T4_XT*gn&AnmebjxCrRiFT;nCOFy8o^tfF zD%LwUp}8Z0QYV?UM#+liW7&G^CM&f*L&D&IKcSV+ZR%5ex5-%4QF4I+X}Qpt^fMC@ zU16-Imia<5ZB-W6oUzoVL$%D>yB)3SK4BYJu)NydKGcmaS3kMRT^oEmj%*UVoQg@W0}AhL~}Sf@9{j$ZgFtILerKgv*JG zigaEyVBtTpxrr+?_+S3q_!hQF>9I>c{`jwEIQxrpAWz~6F&n(pv5m4_Rx%T>hk9}# z+mj7BUgF87SJ)}w&j4ggLJXq8K{g@1+i+TbZ;DZBNk&C zC!RR*x35Sn{SjBLe(^;S-GI1Ta;pqRqh8c_)ZQk)!fLdLE|bgc&UWM|{Orfc;d18n z#+%EyA1&s7PkuVtmzz#@dD6*tp4?;Qo^+3BDbpLZ*m$LJlyR6i(lf^2Vw@-edzOe@vekxe<7#oWd97!S_W|Pr;(ewEJiCm$#6P%pdG;A!6JN=B&GVu0p!lWn zn0UcK;Z$D6YmAU$z?WQU|viAKR{v#C^SRdyfwQBoVR4&7{! zOno%oJH|siee^5I;FAnZhLZ-t@R;w4Zm!)<7iy2gB_cO7n@Yq7@l$D2~eU z;Ig{aQ0$1VMvE!x0PzG7_x>xc7FWKQP$uvn)4JC4Kf7Ac+CuzFPP%9e<7r?ZDJ5yk zm@A`}3QwH*=j6cPsr}=}J4=U>zXZv_%GR-Gj$c-pchzTKk(b{c6Y<8B;jqyW|FvxL zPxr35Odbx)iv5LCh@OVde8l$O0D$Z>`DpM!6b8|iQhX9j@~nk5p0%a-xgLsab#09t zcYP80Jf<52%OlH5?=QKpbaUu;r3YOHBM0*|vZ_A%d9QI+MHSPE>_8dwrBAK)vQo)k zh~V^;l}N!n1b0qZeW*UX*7ZJlH}pa2(Xdh$B4K@rC|DJ4SC+>XvgKLh#U%})QDqZJ zvvX49ZfX)itQt=yh1#k*s=BH+RjFKYS4k5HqS6)e_yc-oayKnZaEA6U!E%rg|%g~P@XN$_{ zb*udW(d)MPgQX!5WQ9;3^@fB%{-DBAp*WCNUaq#~=h^KxTAar;WqmHmWolnl&b?aZ zb@f))l(A@U-7xM;S(rY0N;c7W;@LENye#S+R5*ZThz;dtNw!Hw0pYMrkg-IK-c$xM z;*$|P2)`wT3l$a%9bc%!V-})bj~GB*Hi~q-(Z{034V6{Tkhj1O?ZknM8@=>Y6wg;D zP92peMIfC?f+(g@`-9l*Hx5r4Y^ul!B*3C@;lB z2%4XX3pTQ}6-rt=$$|w7U_s|OV+7b?B5igkg8uT-l7SWc^oRDRpV_3C9=5T)Ep3Y^ zO=%m1?|Mq1=eA+(U5AdI=_(x`w&z4HDW#)+JMHcbE6!XKo>FOds8m*8f63L4+kTqweq`yGJI1m9 zdY8bc)F*Nya&?GMsaEZYkr<5$G4I{R`#g^rA2IJVK5f>jJUDu?Qn<#t)OMTDZF@+# z+qG5LBdB$PK}K^%2rYspdHy#A!JE)tG-OwjA?)2R+&&w7_A3e0g6itdm>=SBs ztkZAQQ+=NhlVVwFx)lf^CF0hnOvG!dHc^vHLIqSOd=6rCcpcQi)x$9|JbfD9*P^Wp z_$|h_3p&ww>}2~Jo-Iy&R(;~DQ%E_?o_v*y*JpJrba(l)dHk4*l%m)gP=}a^l__ zUt8v#*?)3R|5LlVcarKq{CZu1*`4Lm%CG1z-M@2f|2v2J`oCV^`FPfj$A8>?dLJ3L zXN1k-jKh`Z!6WG5TiUV_x3b!&5O7Vi@OEXtRNOR`pr_ge0=zUqE8=N(byFqyM* zJc80nR=d`CXr4;pb%Q^k^t$!_pxy8E<{1on>ddp*K$V>sVBpty`!NWlz=(qFLXy>cZ&Zpasrhrbw)u`Z!3KvUMYzVYxWWd(q z8IEWhE{J~1g>rt$Ri0g9*)EUKDu%OiJ;v+_#AQXuF?lBtx5YW(?DVs98?Knv&eDq) zhEHa;ig4cCio2`I65+_5R-}-$2;ty9%I7UkP-#j^KVR)c_p)hZi zuQ1%*|LoiSN1uIXYBm{0?8HgxbC|9d;C`?jXS|eDNYzsLtn90^ABaEU*c#sxKU}Vw z;OtOzC|9Xgsk;iuv_Eo6VRN(?ibQr`rf=g`Q5Z#q+MMG)mYtoWQg2nM6kDs=DH@cba*R^Iwmy@Z zcw0+~w-!ddMKP>8&$ZQu7R@2I;2zsl-hqcIp&UdmDv?V?L7*-8o)MamT6u)$!ye0=}AV&0(7 z7S4_jt;t*b)0gph)Ms~v#>Hh;W5ilok~c$6&m0RDFY3>mmL1INubC9F`(i^^_HPZ_ zMQNJQd7URO-2dJcO;#h5s2?TD{(@BzDcF`5>mwd%V0e0kTBg<{W5T`B-O-n$?+b57 zkIRm0evhg3lFZ?WIb4!C8;bvB(VfCImq076lb^{@ zdVgy@Cp>;A<7cvHMtWz?`Y1wVINUvniHQt_SWJUK6P~J&*h;Mz>`Q0mhxz1@@5YbO zhr=Y&Q2(7?<1360II}xGF4v*gcu^^Yf9Qj*h8c5FXkRwY?=K%SD%?L|mfvZ1gu?@T z%Y}LAaR0kgTJo5@M&NGqJKSx`NUJnXBO6vsoe@_a6&<3J`YNP~wu+^y4o8P`X~BBO zdS}v+bZU!YOSG%Cf}^6?)l|_@ajWb}+2IPAPPkdSw?Y`9Lb*8p889=c1gqf|HG{zALM5m4k63WmIZnnb-G9h-R%pJb1MO|mCoGBugko2k(}#!MM^t}>_0 zGP8_H!;A!aWIHF&KKi|+)g+p7i80rkOVJQIDT%C^T#@DHjx3+voskt1BCfMol#7Z> z?Fc_EGAW5?cd86amieMeQ5DXMO5cKp#X9eTKoHjwp(V zHf$)$9H5zH&ac|}78d1el$joEBSR_rn2DX<$xO_gDEC|0DbH#`JA`j*xNzBNFMTC1 zJC0%>x9r*eGpjrA{%2R?tu@}7%cgvma@Nk)B_=aq$q6vc(HBRdf1ibESf1u3kF7^i+yzZg? z0QI^x{-DL{cKd_4aTNK3bNoydfMYZz^jmqh%Y0wSVM@ULu}az`n8-I~Q;= z;9uTmPHFh0so<yi3`JlLZNN!HL+(9tHey%!xDX-k=Z#PY^PfxW`DrxbrXM3?RA^{L9+>oTIFz3 zrZr9#(-NnQRdoi`9jY$XVU>_lk+`Z!)us}rsP?K3s05Xa6;p8yRQ094-_Gh{pkK=2 z20**7!{6mU>=)wxCV!h>*y}&wr%Wi9q9iy?CdJP zCzVfW_#!@K;nR2WFqmz6KCcuuohueuV5 zS=LaD(^Wyo(;2jju9eo)`=nm|cEj${-F1J~zgMC+Yl&c>3MwzT1y+}AfURVc;jI#t z7Oixka-CQ0(HB6N#MIU5G3s04<+AtT>oTKS>(s_cIW3iHrKW~Q$s=^Lw3F`CBx~P* zkKh1#hrTO(315>%tRi;jV8zcE+#(*c`X{fh? zHBJq25F!qn)9p|=RQb8?$lwTNs($7r4(v47$uG8IXA*9yQlVG66>LZF2V!1#oU(^%ZkU#fauF=vl5~>3Nl&}SG1SY~75`sdkD-|!4c z@MEy^>@oPP8Gr`B0`_GWTk#_9F4bZ>cqSp9f6@`0q1G7A6}zrQ$F6YL4Gz1&K|gbS zgS*N3o109e1<3hjU{d>U7s50a8w z1d;f_N2I_+I*VRSyM6chp78DR=}4fD+%A}31Kp+}0gcx!`hz~N z58@z!0H0@@M4BC>f(n=2JlaQJlw!7jcQ{k4HC$CRJXOP0MYF-*-g@3pOxql%PVuiq zajhI_Wrs4h7c9WNfY^nfT|-!NGur{T=PP7)`xpK6S?PqZl^?fe&YS2HbtN}Ud+^%X zl)C!XVY1!$mW4+d_;>Bq$QhbfG5+|=p$BYhQ0F~X~)aM!VjRmr} z(maF=iM2T9li7|dW6K=(ko#l%93ME2kuM#3y#o*Ciuka&P~oVE4|fQ*c*Kz#7ZeV8 z+-?`5kdLv!P-U-llsn7g)g@y}X2Wt=;#lTf6z_($j+^55!9DRO;L-S|l4Qy2_E#Ny zOFpu{?>JC$!v2Nh3+LgIZ{bJ#ck%EDGSWUQHi@*@C&cF3mpWf|ycB=e@oxO6<7nKF zzA^K8-7bHin6n7(=_-FPeTU}f48lxGAX(sW0&zMVj6R0OV_9*BJsxw!NDRkrcR8JQ zs%C$qD;|&Jsp7xDeasmv4)}cjP5z{x?Hh;v3jYSFgp?4%2+=NG_$zVn?3smrgF6^b1AFBCCvhk^Lsz;MxD5VTiJqj$HXjMHA#b5 z>2R1T9iq7sRE|n}U+TaPd!;>|Rhj55In6MVQSc4lvUV$gQx8`)2O2Lq?J^Yb?b_{EC2A)nuTR+mfp#K-d!SpEKZ zfb0#R<;pEpeq0E!gM)_Ku+F^p-<(~w$mO_h&)unD0){_hwq12v)+bJ^PKz1vh*!x) ztP~UMXwK+4Pi=OZ^GP*Sn#aH>^Awn5o)2@)%gqmvC&(VM!@Q6DNX&nygq;#v0MCJ) zb?mVz#J?3i4u6L2m@wjtrCn}YcUCDwy8PCnVoz6P3a|X*tpH>Qnv#RpbT3P++Kb%vXry2?^?EhB6&Yer~+^GXMY?B7u zV}4xugysoRb~Rb1Tus&}Wpyfj9thSvh1!u|f6<15APVea2x~j{4IZ_VI zF(uev!J+A|-$rIHDziz9ny=zDTsUefM ztYP<%>|2s3x33Gy(p^xDulnn%@>ua=$0GNl>=k((#dl>ZmpPse?auqq{bBY8A%!y{ z7U$(whASh3^Ww#mB6A`g#a+ePmw~vl^RpYX-*bNGmOq|HUJJc%{~+{!{AmsQ83v%+RgJXg;7*tvd@@zIMRVh_waEY$COLBF%Z`i?s=lj_>SOwx>F$~7$&jug_eBPB%_IyN zB_Jr8nE@gJVn#VafHfncT&sx(x4Q`;#{^^)Z$^U&{ zbx(r3`};Ua)qC~oRdpTT`zl?EVDgigXNpJEdH5AZDMWO3DrW>IjA31d~IjE=0_Fp1IF7NuSh9yf-5;ROA~x5Aw?hHBOnROL*SKbCd|+MQ^ZdVk?b^rUt)_07WBq=!{+MRzA}O+T1? z7=4|7B)PxveBo#1KNQXuY%=9V0b0&rENky6_LQ$oUR@H~csdk9o@me!i&I)ANMYsL z9!q$^(vi^Vyp~MHY2?O=b@CguN^@=5hlw%V2bar9Tt7F=F%#TAj^?P~H$v4(^f}$p zkqL)Gw8L)4Y9uccxB7tLR$sKbM#c8W=+#&#qGN}pPNa8^be`;FI;%XvY@R#^o?tc~ z_j(9sd&r=NV7BMWwGSO5>d^j9;y0_8|lV0&2syf3sz5>c&?C{<8<1h0zme1%FR%W6jjQ7y>&^rz97;ZGv` zRU=`2SPwdV&|S}fp5!JB6L(J^mg;56fmtLnI3pIfa=BOx5tRSGkpvF(Ce+a+3YaFC zubTbhY^|c&B2p-|vPOsl5#G`7e|BQe*PcYm@SR)VzrYm|zWu`9TY4_1Z=?~L-S$Be z(Rlp&9h0fqn{FSl()XgrKC^SL3+xNyXMbffV>;1IZ_xen7xKt~9JGa@9HtDFHRo2N z)wJO3nM8~Albzk20VZhLtZepg4r~sxW}DegwN3SyuCrWcyUxDNF%lVxj+92qyZPHK zyKKAcw>oy^9y2{wkz`xNR<+f_m2fp&12<&Bq(;IEw^|^2qMd@HAS_^irRo42~1_nDWtz25Yw03Fd#x6T!v1DDAU^Zb< zd*)@!J!A5iYj^T~?*7sP<;O}>nQyhdn49W3-Q&K9?+#L1>EM3!GNRE=)bwKNw%UX3 zpha%Cs|y40tz@9*I{Gih@%e zC3LCeeBMdF=0vJ;;!wuC#qbx;zK=0)%-iHA)dJ#A;C-byL|>~@ z$LG3w(z9Rx{L0t=<(lo?zCcXG5>al~zRPxAJioGH$CV$y_u>a%6U>cB3H|#$pBuh) zW5<>G$ak*0d+j~{QS(Pj0QvK=Y;=IgMt{_MHlmI6#_-0-$I!>)8igZX(COm>P|pOXB&aAYIr!KlJ*B$@l%sT?y&*gB>H4Ytm|srP89l%Y zgw6?sqzQ!132~n{nu`E^w8IfpM2c*V?2DKp&(Imld-fgO0#wjT=${v#{b_Z}km2k* zGXw-Lf(fjp1}8XZaa8dG&YhBao6aM6Gn8(r;ia|f4I=7*$rwrB1OGW_eTEMqWw?mW zqeH|H!zEm%uQ*Z`SM(!mpT`n;Y35rXo_}p~rn;OQa-(H*S>3Z)>OB6R*?XGJs zMf2gQTF$<1ehNR;BD&=mRmN&ZTd7(m5m-sm>TA8BYKHA$SF+n3CM}_*I}#n~MTte} zhtpg(T~E{f~wsh}Z@i_|EfEIMa{W(nU(VP5kA(=@@qCI@2v9lc)Z&v`DM{^2j*D65Mg z9HqK8=?|~mbJcA>UWGZ1Ijw06qN6jv_~Ea<^5AC%|4hp}F4DAGa%|?Q)ju3te%;}p z(b`TGzR@aUt?XX`yJ-24=wPF?;TbyQMG?t1$^81LJxY5yI~F9sPt%ZQPM(}XB?$Dh z%2Et@c}x9qFCx-}VubKjYDNfOzC>F67Bx}*FZsRb=@|2j@0jvz;6UuJocVG8Hv-=@ zA7PJi<|oXLvX665cpmkdAL1q)6Y_)JiJ19n&lcZxrtRW*%)HUN!Pg(Vg1wqEZ{h~| zP2y$tL62FF^;2t^4dykhS&daq-JZqNGP_x0vs{MHcrsoymLOy0*l_G*%-l3=hbVhY z6}FQB8%qObmIVN=d3}&v5_MoPi=sJJc+ZD?zb$)p?}YcX*W`V(?9n~_ zo&%oK9<%Bh_KbMOJtog2{q_+x_Qe=P{$b)V- zlt4g)(ij*aINOJi!3fed^S>_DYT4PeHfu^t{$=y&w#&MUXb|b8o<(M>d8KBHwO{ex zr%a#Q=#D1LnkE#J9Up!FH;nVTLbztZL;)yK@a(U+9T)=ZOvHe_BLeEqrrb^pePH0A z({xBE2Tc~4w(u0V3w`wl0mL~_^b4$wv-0E%1ZB3)$#%t6x8fxTfXEBgAjHq%HiTRK zP2A}eYNh4EplLvORA5tVj?Y^%){H9?$OhXo>Gn>x9;lX=unV~rmZib9>;N~w4~hfU zfxtj{ZTr>i7Va9$Re`I5AFFILZDY4_+r%3zH(75A+z{Lmx`MxYTqrGx`Wq5wnjfuFkQi2!CxgXD}Ebfe&-L>g zEsTkyL@Z{7yrs5KHrmeBEiHFWn3Cu91j|CmWN{i#HYoF)1zGrdS_ZJkaNkP+|AiOk zqRE4LUI>MFK@dY&?utZsip7ZF3b=!=Oeq`8$X0xobR-o_*W0^;^~tj%PX{fcI(c@h z?k@A3YPDG6K|DJc2!$eoC=x?lFoXv}r7+LO!BSf;wX+-tdqU-QyxH!O)0qrb@+sOP z!dFm)d0%E9ZpZKJpk8Y?0wPI(W2#WDmfOeMC)%0S?VH<&+egU8>+Pr8`Sv&Y-wKyl zf`P3taT z-Y)wN1M|*x_#Nlr;{T3}=dU@*-pk{EoYXr=48q1ArvG57z&Hm8>ds`mwnhY2Y8kJh zifn06O;^p(Ao9eU=-9+agFO(GbfrOUCXSkOgH1e5)VLkB3nT8_?CqJ^AIzRe&VIaL zbuXHS-c@Sdd1U!jM#Wm3%kOt(X(`!VEg%!3^I>o50*t|_YU0-So@Fk7|4XJTKjlkl znpTb{J~hM9yT&$kq+B+cXYo+BvU4U%zkPGrm*MRMs~uRM-jA`m!Q`79#cAS{AjauK zY)@B41LT%c&KOlRrTpD=iuUs?Pc`r#2`>jWBfUR|;L>&{viXHn54wT+WXvo_p?s;` zQ7=jEy40uV^ggB!0Ao)yQQbz};Jh(DlDo0+VElo^Bj^$7@z~?>#}khf9xpwccvgEh z^-TAX#!J!*!57q*>Qj9`m4B-K)pEKoB$p&rj;qOBrdTRnAeH5^Ixp6lF6Wj|SWWL! z`^tSM`%Etu&~=5IOSk2AmrM(CgVwuV0R%dNpVzgBUHeYQzl*#wd7XS zQZeSFOk{=P_3mznn+o{-ENkYkYM@4rLRiIAQ5ju|hS3N*fTqyv=rj`1B>gwtu~1#3 zZc!PvgNpBq)A33AJ4f`sFSZct%n(+EFbg|BAflO71d0&BBzqI3{vq@^f9=43gJ5vS zQuIOmWH4d?DxuK@14l|I8!ydgu&x6V_SLa1xviz)KH@7ySoa}V=D0F_bvNAxUeknR z11tHPy31NmD3*E=FNvkzdaCS&@u?%;dOYI=&iML4Zyjpp9fFJL+kp0n}P4J?BP5awcKAeo6YzxG9KC#qg0gSGU;SSE0)WPkzT$S4R9MH1DXEv zwd}RrwV7+%M#|%8oV}I1H8b8eUf$dGHS{%lU-@|Wr{UMj>TT>UZdZmu934c4LKsb{ z!6=m}1}Q@|j40t?B$-ltKCI@tG5B*l59kq3XYf`=N%=|{uFPk+bV@NtB?MpJh=3~T z^G=@q8wpAAw){}g(H%q+jO)BW6EzZ#9VMFNH`Fvd1=&_p)8(|D?oW@T$I}yOE5E=mJr>yl>su+lbgu7ra^!NV8)c?SHu&Qzr z{X7uZa!K#(u~6!g{LB<+sIzxpSX%B*(F-G`)fb>35_`j)otW1Y*MDecX7(E`brto| z-CH^mqNe5Z$;)O}ps!w54CVb`tzC9@n%T|lr#h$$m}SlECbdB#k{UX^8BdTaYP`h) zHe49hC~L*QS(#o}@xm0m{Y#>ZR^a7%;FVR#+)AAzEmt584^_a26f0E3l+Bl`R$ag+ zwd&z8tetq5b@J>_^axC{T1`6@q>xdC%upmP!u94&RB5^~jb())xlW?nPLyWgb^auG z0+sLvApuiUujX>crJtODFr}cr)e^e1Ld$D9k*r4R$G%I(x$mb+?f zSoZcvjq!#_2(2`)WEZK6;wyXf#_ljL+BubqFGDNDWtL^N6*xaCp`3hC-EoKYVXR&w(WvHf(2z6`5QnwOsK~GJ>vJ(vg)UcINNwdvr zvi2(Ae#~0xtCh{lR)r}kI~7{_6h;%c<8rUwOXJ%ffwbj94L`R@X0h%xS&CByR2bH% zip^@RR`G*>AHM;1UFBJ*2Ty76i}pIDMYVBlLNn>wX^kG&kS4(-?OA#O#ZewC21e_i zN%Rpt5-io*Io)1Yxqfb(VA1QR7>55mk>aE zz=Ab9OT8-tt%zF@w*qd3IgfDbZxG+7#&WIP&(1Exq|JJK?zxOahzs(NcyowA_ZS9p zU^^sZmtOKu$<7Nl-xSII;GGR?8d{1jrL@w4y*FMoFC>dT_~2IW$d&Cq=)U~wh3mUl z-uiK;|1%$5(7y17^~v2=#^d>(Vn?;Gej*#aFn8PR_imZz=4`#)UtIWkG}PmvEw#g%^8MVf~T+Mrlc1!(qgf+DcxZfLNWLh z_&7KQj*XbjDUE`>8FrmQ1N6e>{N)gANBi`rF~F0!>9N5<)T-@3V9Ri|rnQRG={@IWVbQ$X9{i^tfAy{> z`yXEKP?b=d9k~jXkJmSS=}TK`wG934u|NL%nJl{fVd1 zx6pUF@A}{3PlbM`oLJo`GO;w6d+1{J1&0GBab49CV} zG!>I#DhZt(iA|gr33WPVi(MIh&4C=>^=bka2FcZ3heq#~>-Z)uu^&dQXti~Zm9~~7 z5;HeUjZhQR0cwhRof2Sxrk=Pqa7%#h51_pP6qrO-T|Uhsij`QSERvaB5MOYN{+!`H zfgG}-vC)~)p;My-ta7==^zqEvxi(S)s3iNs82S%m-786s1}i1`Z94gpnQ|4%6YN1)s*^B{a-y0)mCF&<(9>ayamshCQH2X2~3KbL={n zEfon>JCQSp8-()bfLy#fG<2c?d7zlUVca-ImCjB*y<|zX1SoQ0u2>x|-E6wqe5Yx= zw7)b};`GvZiKa^4HcxJyc^$tt_eG9d$|1GXB`y)yi}#xzZQEDkrb?%Cw5n2S>=_L0 z7R&_~^{T7Y%hW5yYt$Rnz0_Xy3GNv8Vw)w!yVBM^IpSLA38%e%p-6aP6dz_Wh-J)(Yv(AV!H#|AM9m5AyCw|N-(4`i{EM&uoixo1Do8%e#l7S>&0$W* z*>0s70q}&yc(S}xSWGVwR!7&;YlRQfWAtvqXGAxD zl0y`ClA}kVg=YuBB?mZ`+hKB3Wv>S=)g{;E&7Pf}y`I-RCXXlhACPH&_9SK?;BPo+ zI2iP$avh%Wr9lL5g!>`y%>?TXFWTze>D}XHyl33*aYzT5;OR2IhkuP{Bwpw7o$v?v z*LjwI!tOCqy8*74ye^mRP&a{r&)=$c=CmEz;YtMjEbR*-3!0J(RsmMcjDpxQ3ZeSb zSk5PfL&gB4a$`>Xim}z^?d*Shu{9=kq2L#*@Yf6Oa99{ z*W5zS`{8xhU4Dam_pcE8`P+#5!4o|L$!`7ASRPtUQOu3zyQv(_H?dnQ5PntAf$bEC zULDLK$<89)o<%%~@RRYg{+%uZg4$(*`bMZ9`;3wArHg!0i})zYk#$U>;6a&%Dk9TU z(p2Nbv@~rnr6~}nk4rDYzvCo7r-{hNC5PbzP8&7`4$U$6;}A?PnD1=7%{e;6+Yd$fd*bL&gqvh|J(`H@jXWQrBhIAeL%Dve z{Fd9YvXkXGQNjQ|i7t714_0_5>30vJw%jC=bXz8iWQS9_$KgOp0M@66hpVLR=`q?y z!)TN3q}9ElSargV0K_-jN9({ zJ2TeXn8BKDsTnybYiYMPrFl}CHxr`L?qmr4UCa~~lFl>lfZI7yt6&wdSrQ2N5D}QL z=I5z+&?6yj{+gLrvzZJ12M-P$9=&>?ryB89mPezhq8@sOSvm8_cs!p>W)^;!-ng`P z_rG1gu+SZ;#XjzGwSVN53ztHjkPBuPGyez6PKGK-o9JzA z+t$!j8(U;Axl1)Qx>j%6+I4+uWE0fZy2W>^vZr?E{96|7S#kU7FZjNoJg|Dwbj*Cn zcS!kO^?NI(Hl5t``li#Hf&tZ2k!tSF=qB@{{PNC5kn%E}vE@O^zd(jEE;dLr5?n5~ zz>jN4*1$d_V?Lz8>u_5e(CQZ1(%7r**PhoHZ4!O?@IY=n2JY`)>Nc1w?~Uz`Js)FY z&0|Q54~gNqx-zjGEr&{d%XK`oJP)X|yx)!7lZe+{TY0pT#~mkxJJi?*7N7-_OuKIN zFBeOG)bAhn)Bfk^A5$zwu2obo-X*e}|6+7;KJQrdZ%i5Uy9i#_sZ~r_k4j~U_3^owOTZTNYJR)7pU&Z zpw*d?%v9!N#+0$c%uLJri95g7Wgr-t>(x!=P5P#N_?eqG!J#3GwYtf^=ZlNcVq(Qv z+^%|&!#m=A858@-vwzZ^B#Oogf`OM@qj!>iPIo=fK#le?)6dZT41z##nt?~@4_8Tx z&&t5hgP07Rqi~PR)f+cGgKofjo%qyla72*k4Xjm+&5+J?ZtRql8zo`=qdB8Y$*6RS zqzYq-)MN@XZvcyENKjx4vh>F!n2#r8E^+AP*lRHw^NF!D)8Jf(5$!b%kBqf!wM|p) zxl&IU<8EBBp=VLD77F_mWKL-v?UnXwJHz&+R;P+uTWYW)`U|T<)FrvKDq5&44xx4F4I#9CL%1i1rw8XzE8CZ=XvOkcr%o?WAyUfJ8$uVA zE)G#^vKOn=BHx0LQH`w?9oAfZ@Jm!}q)ZziKS>4~C9F*sixP&>nk0j*jJ*X=B+aug zio3frxWnM??(XiexVyW%+oFpv?(Xioi#sffyFR{i?>XQ9#k+CiO-xK=W>r>K{<1Qo zr@A})^iI{8RiYa6RU9o{SkV$astn2eiDs;~3~GDA{(@Apu>HokqzT{W18b|cX4BJT zn_+N!W*v8Bip#m+T;Stsfa2MzngH*49#T|S@MZSW?@tRRmSx;;z3Js7%jfj4<&QLC z1GTJo*@}(E!*)?cjY{MXIQ`W3w;jS!H&(hQu%aw|9v1(hyvoizT1%6uV7r6l`N_-m zx%mp}Utt4>^AA>gEZ(av_`O}b1NHci_FY%+ikeVO&dwh*rRqOdL$d_T2Zv63zp4gE zUs~UcIJP21(nTpG0Y#Lh2UWJc z1(m5uDHLefyEn1xQ;DCA1`aDf>{u*40A@$p44H6Q?<7Olpo|FT?bbA8;l|~}pza8k zKa@GYXR%sceM2j?6|+NIM!DgMiw1m^;i~!dlGPI<5zN@BwPJbfbBB}^#Yo>8;M)G9 z$Ft=)I8@h33_L%XaQtX?#`kGN{14M|LFW4rlsc@iZI3Ie&eQOu%<^($;bio@*#d?8 zPfJE%ljPOZ-5zGd^{~kBX_@5_PTEi`KyHF_4W{k4$xt#XneYR1#sExa2#OA)P&Bqv zb`aK@Yw`OD%oLWbOw8W?%ui(Y`M>vA9o^2EMmcatDBEXiD{Bf<;rqJGNXU(m6v_1S zzjh&PbKQ)suQ2776cMCU_6Ol;4|*ZhXw9GB-QyB& zZZ8klF7_`qCzS3=(#XUOb7OC8b$Y|ZV}zOwNL=+xfUo_A5$MaqefkcvhyK=2CW(k4 zLpd`DDmpr}`f(OD0z0TZF9!K3hK5KbnT$T}H@+y{n0>Cez*C*v2ju0cEctuE{7Yor z3LVe-zDQ7M7-mn0sMVZ|MIxrDS21n~OXpo<6{07NM|`*@R~Cy3f_y0mnpjUMM8bpZ z+tg^aM$UG++MS+0iGT4-{#j`73np82EH(Osz+sb#1`;Kbf82L)npZ@wK<&#Wk}{JS zcy@=QX!hbVUEd`je1YMH6H+ZN?=c*gY1>T8bk6F^3h$-r$3xFLBz&}RPz;0!F0XTr zAUv=4?_WpLBNj{dZU|#RTW%+!hGSL!lFksKkts!T8VJq$Q@8T81@I_)f2nN9BA2Yq zIB-ErHSmcNyp7{%UUMMu3X6{s(6+M}C6rUzQocbkrzgIW7H)|%CBB-(OknG(^}u+7 zP*IE2gD013r6#g|lZomHOeYb909$R$T=`Ls7w+1gGWRmWNPF0-gUAGZ`-d!eS;8^m z`CgS&cH;dgo8E|$lbzjdCU3r^fypE@!l>-QnUx3>aF$t2RI|uJNAmW0JAgd|I~jUU zG8!@yaR6QH^G7kpWYZAYHl2f)Q}Sn03s0*AaZy++*`l(y-c!@N_>uUi?wxMD^)azo z&#o}=ZQPUY4iuM)QtH0zpgGE(?+P^0(@1r)a|#|tN| zVGc2$Kdl~M4QTMW|34kn%8bzpj^ zo~cE4qe;hR*6v!mHezZ!41mx#d^9ab2Xu_*L`qX>KqR2rhKz~Up?G{q)YWNKdlc-; zgwlX=@&t{K=5|7%!~5kO2sNp?x0463pqJKOSuRA)xK{kcI@Oa(qmGeH zPX4x3L#h*JGGidoginH}vR2G!CbNj7oQzQ)Q{c6^!WThD+Md|r#v1PJ&iuCyhx2tXyI<^U7XDzmwI7%_u_@TWA}88 ztN@_xS!{}O8*3gCQ^Gvv1ob*PcjC1yS5{b(NoPpQgY`u5s zB@EMPwCxm6rj9To(QhQUf)G$FK|}VcK47&!vS7g$a%j;N>W1Kn3k}=PZB?QB-D!nT zf(!?-?MH$0%Dfi|n?zzRI=Mom51B9v!7(YCb*cI?T61Nuhil$`nvj4-Z}Y~)sLDxW zhHuy9-&R2aCaU*aOYD;1to7}em@12y7<8hT!CELjI|cGA!Pk!yMXU*pDqguNTK(L| z+x5>b8jXgSaq^osU(CCt^^}lJI2p=3EW>k?qk(SO;28*|Y(1A?4e=OnTJqbm>mqy4n(~BS zEuZj@TGS1QRuhL#`X)9u0rn{a+{SH-l@Zp8WdQqP3b&Tqe{{DADfL3l4DJDO-k2Vj!O^rGuclAf-i+`&%^bud?`KQhH{W{Mb>1E7%FCdE?H&?>AS(F9|-~ z6w81@U871F&d1&aV~j3+gMbRtc@>oHB?_C>Q{ay#q#A=(x6K*1sgC>eWOwmN-L|c+ z^{dH@z+yy1V=p@kxDOo!#K4PQkDTvFzpk8UB!6K?#d2CYFZo=bUavFcX6bg?^Z#gK zazcyrs2qAPJ&J@&Av0~S6=+zNYFyPj5S3ehR~<66h#lwqEKl>HBa{cRO+I`w;mZLs!__>d-OQsMKJKd~a#5$zROdHQ zL+8VZlgbbEyQj)1b+%?19r2&R0J+xIj&Ghuj>%h*UMmXbXm;&!W(l+6v@vyM3_VQP@G=GAZe)5c(m zDm(b}zB!?$#9DE7Z~Y<7ErvVYa8p}X^{Vw}X5Q)v3<@siU!etJRjKmcT588QloRP9|cT zi9wBMlMg|;aoRfqiYinY4aytG&ZJ+nyj`#WSn)NP&=l?b#1e~pMiFFBCgymy#)Ikr zVm51t9j|`>%-~Z4S~(=76!frhngMMS{Bg8??%!yMjp30JJeM4-Ut6{4yOH#mMJeV0 za*<9Xg_{Q?d<>q51y`p*og#Q?$%YX4gE*W5mGN>xNm9QvVvM-iVq!EDlyP8EE*XAC zTGvpEOm)WJ5=F-En_RtQ^$-OaR^l&phh06F*Y@i??k z?-)I^pv=Bl+ZK&?NqGt_j=vSBcI-V+PlmPktKl|e%~9FyMj||?3HvqR><>bJ&Ef=< zLMDn~-S960uXQmTPpwIhOA1v6wbTv|GFYn37KK9~Z+TD-E9 z4MBUJBuv4c=sX?pQ1WKk z<3Fykq>B+|tRMimBe5>EPM{BiT_6Mm5Seh98xVUN8c|Y?XsFwAVn~6t;~4cih}fLQ z;vjmN*vbr4zev3{LWu5(u6qJ|s0ffh^^6X;qpd3H#yb05@H=$Z{yupn#+(tdqdVDM z#uZ`nZ^O@dV=(ONYLcaVmARn*{5kU7N`%UI(*choE|XFYTCVz(@CijswDn?`b|zO7 zyNIX1I$hTZ&}cCn^bx};U>-;BL;Fp~eS9S77!aSvvP-U0sj!cYn5&_;;FEogbZ01Mta5@g^38*sEfeL zi^G>)IuWN>;`-9JdX%1X2EfQyt*XdzPmN+d!nkzWc(1|wfd2> zMI7){8}PhjD7e1*bTiE~{o%arF|)yskdzXTr$56NV$2}FeCmn5(r_!)T>{WQHx;-r=bTeN~DE7!K6F~~ADLHY!X-XDYA=NK?q3X53= zgIR_Wt{`Vzf50IRa{(Pp`{V1fpfQ6Vfa4Tr+3DeZ5G0b2w2vGXh&JrS|8xek(E$_Z zS^0^CBH0jJDzC-OcNYih?Jfpgr%r`EiPqxCyqeq{>P9PNR69y zR+IFvzigZNlD9W;UUWA(H^grKYyk|to9+Aobf3UStWV8;jeJfe+-d_=_S0LXC`%UY z^{`>u&%aLCShWkVw1m2O@k;R4+!@Al;uYxqX>$a6DTehPO_IZ~Nkmwq!|$YIJv9rA zk$iBK-vl8D<3~{-{@{XoAFVkehnk|9hc98WkunDn1iw#lS_+z;E5-D#QV$qZI&m0O zl~q?XXzcT-sWt4Xi=U_|cb1la=%c#tK@I@MtT1OcsB&Ql%5=fxhJEtI*KJYcq`<7; zc-Y`d($BU1j!TO@V)^R3SQmGzerBq_>sx0?%7|egszbhxuG(e=T$jEGwWpe$nf|o> zoMUhOFncM?Ba$?JsF|cirM|Yb-X8GvtgBx0m=UOFpJ#QPPaE~;^G+gF4c818h&JX9 zL2BJg`UOMYs9eA*FBaK^CN3ZL$M>E5SF%76Lpuo^P_!#ixkx}f>N_fhH*8iUxy1EL z|3R4&4Z_UB@@dU!o;79OE(TqQ@EODj$)XngvqJ;8R&ptFhZ>q=2Zn%qRh{a0NMEq9 zVNIF7b?G~Vwvk>`$N1nas8C!U@-h-UyUBdZh~)T`%&Vv6O!Na!X8|%Iv&0w zL#K9DF8I-4^k@|y4`Mtu52d81VX0QuE*K8!bz@r! zIVQ4VC5mbBG^y0GRVe636)ULJ8+mg5%or!O5{E_=;^Nt5IpXAzxpT7Umb@%BnWw^- zy>4AwOqg^V>K$gZuR$B9CTvY^+aOWYox+V&&YSRXpDKVTT4zn|Q7}KVZD8A~5|e`-X0?fC`Fv^hz7`+C)d)~A?6hvtz3(oc$dy`#D+|+c5+aE;S6i}a!T?+f0@K3ct z_tbL`{nB4kZ4%zZOG2%-m740s_%sMcW;SRgxCwt>H~IkG1-&M@zm1xuVsM^GA7`5V zS!-Lnk=RWV(D`0J^f8e$g->FW?vS7~JCBMXPH~f!w;_DizDEvsGQ&Z!7r=-ED+VqF zSN=5Kukh(&u!fs)Xf|NM%)L;;EIH>+Mt>f`nJt#e93WK8seFC9ceg+u}b8glU(9mVRh7C4yHD zqM*7%kxtKy%*`2fhqizAH^O*~Q)$g6BKstmY$YO@3pMc*O0P=NML3a z)*~+^^>12?Xewjcs?%~>ElE$B6;~dZwU?t;wc4g5D_?<O~_BtuDTbRS6sh*o`YyL3XbN8IU$mD9-FiepV;5G`5 z${d)!@xC7OCx$WSq{0EGd;7SKE;GD+_9N+n-7f~NC`%*NKr4e85k5tWKPIhwTq6NK zOTXj~Y3$T;w3H+0F&QZ#AAB)FblBHvr9FD+v2x^auI4?@aEmm)le1CB`E+zlA?FLnSD4_=5VayjV14mErF~ z=ORB9Jn~)U-SwUxKZMU;fauui+x@oPw0&(1z86nO*1-8Gc@2#NLFeH}Y{GYVw9I0j zEm7SJh5grW{Wkj;nP|i%jQ2@&0&FX*D>}=v9zy`7Y!E|)GKKr}!lTd$I~dIuT2Hk$ zrFK=n+J@`y;36Gll^%^>Odto&@SzBg^#?)#{FWP>@q_fPcrCtR9LTtB?9)44erSp> zS$&m=$$%cVMJZdyMkuXAwwCu(8QDOQMYj9zGQJXtZ0|mrP4S^mkSv_U zs5mYApqz~b9LHk>r|HS@^k5@Rww0n^m>jvSx?GX|{0MMhOe&?CE+~ajVd3Fyn{OC_ z*AyVh-H!PL%|WaK!MFxZRgbub2HFjvv)o*jiCRJoQJpDhEiLrOFE3p`+6pO?=WJ}l(8_C;r|lu%mEO8#hJ*^NsXx+1QSya{Z~U{h(@cx8Ar-rtpW+5I+j{J{FM|!vJa=3 zlJSWVNK5T@CyvS4r+7+?#&!|7!ANABp-LSiNslmYl2!}#CiP7Yh?;g#ChhbXRzel1 zJa;oE37H+(>dPu`8<9O;W8TM2b+Za?%CaE!@cZxXZ0?@~^~3`f zEQs(fFZHBdu-)eIp45S{3Z^*frw+}uLojMW7UQe%d7jGGBCNgs(O{a7cf6NuGMQwl zGui^bFqL6$f>9E7_v!6Tid;IiEb&oW&C@B#aC+4jly~{4oR2^7cYRAW6{W~s#IX<9 z_W~QnXP>0|H+2Zw<|6pmTek_NV4Rqb)DBCEUY3Otl(2owsM9ymwPu*mv7%#A#Xro0 zs-6Rmh*5elJb`VTA6)l1pLJL2pPJwake_ja^aQ90)ROPB5c!A-!vU3DbUjsF*wp8k zO1VQ$2%6G4Hk`ktR;)7UnSZ(dvOrQ*@)|{DxKkMkUOh<{~l%t`b!SD-Sgmvh|M_$&MywZ!dkMy)a5h zkdVrbQ^K)UV>!`(tO>7>{jtQHmSg{*Z{D=);X9@fv#nH?z4&W$dXRlXJRO)j)t)ag5Fu}vSu())it#I5f>-)Ts2yOO|8dsX)vg#k-gcl-pBKd2q8oKo~1Nj=Usy6etNj^hgX&;(fm%qnZDZ|v}98T4v2k2~!3}$P#)2%`} zJLjB+12vkA;}??WB&!9bt(xB*~>?_UbCXt*Z?rq`AcgkoqIf7I^^C#OHVwC6fkWbU#Lr!`MkwE%v^oz$IV8HdM?{UhIp1$Bg)F@8ek*k9#8sJ zdWrwu79YPcy&4XxUFMs?7JFQkpUowP8hW#m>iS?wt3}@!A+(;ESi8WTRyaLLpnD%D z){*J&74%!pm^#U*RUqKa=7u}C8*e2bPoM8cu2%`hjOvOiP$2BV!m&*Q4-H+os6h6X?FYqCvF75Mm<9;bBXTXi_g)8AOhnZzS z>ihJ0LG!v{Xr~V|KeM>V{M!`qeDC?${d!?XjG{~%KWl))g&@Abk^x2GO>8Tp32V@`s}-zoGKdUfCJaY1)R7#olD8#a^b$#3)lpU zInDr*)IVF8l900>VK)W*@{m{i7^P*rZ_=P9zyG!~nOSQ#pKLC8#XNJQV?i`dy*>vf zxBhH-GXb> zOfKEs*Yo4L3;n;YBxLphjygJT8)fPjApK0YQj zFGq6#lZ1o4tC+cqsk61CtAjJ#fAI>&c3&;R|Mj6$v$ivLQ84#Vaj-MCS5lQ%`|4wB zZ0Q2vfMXJNF*Ub$1#odQGcq%?vT(sM{WH`*A_f*_4n|fW7dtl`ld7w^od$q|nGwj! z&dmO$CTi>`Wo~V0nTiTiffd6YAA|ei+09^)l4lYJk z_ODI=238;kBL^FhgB8F61Ofrfz%QQbtBp>o8aCJ|#7^M6qJzagONX5#u^@QF%`{nPg!ELpw`lNM8RkdzjaH+BRtNt=BE zVeRVmUt(1+moF5g?JXPtU(lci2vY7?Qst;fX=&}L17{6%dFOHcRz{~<*Wo2aq0y(+9 z7{-c!{ z_;qw{E;ax=I|m~(I}0lt;NRANseh*d{Ev2y|Iy~+;{G}wkeTB@TL0DlKaKtGy#8(d zcf5af|E0LuSQ)w4I63~0$Iu6Ge4)bj-(B_IwYFW&oxdX1*4Wit z%-qz$%p8tM&fMP8)#}Tg+yVlC{~ha|x%$bI_8~wN@y8E1rr+Wa?f`j;(9}6Gllcgl zSI{9M71?2}5$&r-c2@GFZ?M%qeznA=RO;_4y6B!ayU@!*J9+iL2D$XQ!2E+7>*b_s z)GZFe5^3LDuzQ-$zU_8?E3#*tc^5(E zxJpf_XK;39VOz)r8M(g`;29*ZP1^EoDmQ=2~su$}hy@1{&Gm;I9nywG7l8)a|YRaqvF@{r}EotpCpHfBBvnz{1VJ z!S)CcJ%gVqteBJ;drS+b(0(fG>{7dwF8-vmWX@^o*9GI_B%Ad%hQ2? z&wEPf-6i|a^#^X(rGT$%>p8D?03XC5)vn+2T2Y}nnTFykYM&vzQIGjv?3qp>BjL8V zVc&~1`tfYqrCaYKV&D-Hh}&stbBbkGOPw#GkT1%mr9y3O6(sM;)%%k|BBi3C=P9HI zE-}~%85sEni}}LW-z2>&<_8CAflFeR(lYObt*4ADzXRRWOLjPdzwE<3C>+XNL(cDgLUqWRrx0}3cL6ky(QvmDo3fbGUCm} zn;XxLXmK6!Z{W53{^n-uY-evonrk!Cu50>1&dZ+h#$)X~)C`To&_klxEv^jT(# zpP-m7WHY3@8HJ^7AA5z-l*A%v0I_gQniz#01bF*e;u4Swsc^U74nW|HkRFQh$_ACF#qlfp zUwO7P%i8nHH;Lv5|3us*YXh;dL>Oeo*FJHz1aQqQNhcD+TyN}H&I+L>k4X&UN(nC& zCir8M*VjalyUfn}F+MJ~TQwp*ToNs{;tqF-+z;&(5}G(Mbu)wA+>%!QJXsx)DTqkx z4Siz&}%FNN45K>Z|qjZ*TZ|f z#>m<=K4{(h;wPe<@ByNqbrR(mQ7qcr;rvqJll91?FJC)n_=r2dZFb;IsA57zd?)>6 zq`W{&HrLN}$Tm2Ig09fyD>SG^nsg5%8l?O+vkew)`cg0sb+Ws4Yhy0(m#3 zAf@2Mq^lOq01cW3d_I6FEN9JjFy@NJDcRnP3s<&tPWz6?80jK4Yokg*q_++1qObLm z60{)-?-XfQ;9B`w)6L}7fC7qrFQOBu@Wji!D`l=C`607K@oG&%sCZfQ8mc=KG2F_w z%ylL^x4#XNFrYXj@{*WzV8RkcPdYg!f1q%~vo76J0@;*mAb||48oEBvxF^&l5#zSE zTxp}eTG4CEWD`*MKK)RTYI~@I6ZKFB;S)f&5kmGGfp-VI>!DAeh!hGfBttHF9LE{b zAZ8rVjDKB-h>~G#LZKnSSKJ>Um~2R6@2w`U&z4oTb6Dzfa(IX3&AA!0s_??_E?x&q zBO-J{a{zldK!3i>VvK2^2v-oT;1p55!)IA#uUeySr^H|BZQ!P5xAdkC|7+F1^U`zV zdh_pGPuF|PL)in?q3Yc;QWFiQC$JJP%14R)Vh|qa?+xgckA>S217{5x<6h2c{b@J_($ad){M4SrU(9rv;eixKvT0Pp zJH!{LU$>r_L*Q$s&?&& zSb?!pjy#VHR9PoEGj6@YdGLq!6TzV+(ugBfb%fM|T^+DBhox6bi*iUB2PD?-pg$IQ z=#rbojAt5#8I*$g{i4*{)SW}dfDx@9icfbT`8AAlhhlTp0sbBuiN%Jw`XqibycLM$ zMq?K?UWpTI0Bt&@`CGD|>rl8&H4A|?Z8}IQ|K=D_eI>vA2BsUA^px6jd$ z+2hGpjg5u{pjg&BdpqMyUOJs$I?6s{(YRF`eQ%%Mf`((njP1ltCr*a~E>*~)-a{#k z;6^h{tk+{Kk>35XW+eDb6|Pc*1q<3dyK9%*@o_gp{ z8@3bp?lAV3YB{SV{6uyDc7vArDqQotBoJFcg=-+#K_XjnQu;UxR~82qwrq}h8=31Xj($}T=#^k2 z(p%EYnC2vf9l6cMNJgFFB)<)j))cq)CKn(K zy|{b83rc&0)e%GLPX9Qe$x2MF;pyubV3Eq!OJ~J}#?kWi;5jhURb*2}xNZe)(I75X zDzVRmqr~cDZXCEmankwpYtwV4rhBD`J~KLaxg~Wn6X8iP*t$cQOO9q&Pf3gu$k z48<+lkD%|;$HIKZq13>khTJrK5t_`=zH0K-ZSYi4!2+(QmK_HDic(Fmf!>#C5JD3j zq7!mLv?+SIN!$i)q%Ql#YC3APs>^Defsa%U#`>Mg--Ng0=#N?fGnSJ*qd44N7 zX@BBiFT8SWBR$o<4Sw~JTN!&-#3{@?I7Wi&49$}P&wLox*j?2EAxhv)*`+vtsdBzZ z{RWK`8BSVVbU>?CQQ_i1v}VHi0b^D{0PEDE-0=Ip-zU-|VTYwHO5LF68!Zjt>fHBB zrHC>t`>Qvpa4h6f^KoZmn30`{yIsYqg_Q{9b~=n98agvDtAO32F*|5s%}}P%p=wq- z7Zje3@Ni)RFC|fRQMW1;!G1c~t?=nPuNPlYJ16xeN1Zf=)pn7PTBgzVY>}7UqVK@E zdD-*?TZH7p;?Xp_bdZC?Qgiv#xq-@{!?+49%@%Dam>ZP|8Eulzq;sW}+VQ$wm+k(Tc>a z^Hv{s0W#O1ZYi~s1q`V>Tdal9p|X#BqpBUd7IL#uEHhY^mi4&eST1;LvIUJGKTDv2 z)POVWHEoG^Nt~2!k~?VyD9YDUA#Cv03=0MtWaMxclMQ%mn~&~JXiSayg7PnkhV3{Zhjxd zObkBHCGB>dywz#-)@dbdI6AEc?ea0O=IZ*c-_x(=H2j9E4#tGG9*$onMSB4oLJOAM zcM|~%O)UQ&p9%pf>ryG`^H)O{f>bG}kNk?-4$PV*s}cTAtuUewjOc}sAE&kuI>&Ts z-3FfV`>WBHb>u-8gMUBz0n%Fs-2%NXXeYV^)c}+!r@dWCerlrJkks zlU7LW#CbwX3JU--ccw=LTr2{_Q40yE6fdYqf5?WkUPzpY7m(|1960YUa=NovgX~d9 z_G5q?nxXYtjk>40^1ZIYF*n6!^F{esDn=j&Ff{T#MEM&1TdADOi!zU#lc2L5!56OB zp^4NFX4oxV@Ma~bQtDF8Qr`}?#4q&AC3V7@kZ3|fHNvS$#$9!H(43xx6?LHS^Gejz;w9J$x4r-Ggzcz#jGKNqmQKH!d|GaY^~u~T?`?+d&SoR!Ly z&GZ=b#(ej2sT?lG$UN1^AKQE zyY26JOAb``2`Mi;DwkCE0xAVEXXXpMYceT0Gqbze%(L0>Yej=^U7#m^Ce|!7RxH1K zSzbbGB6$-r{ISJIxGCt*rA)kJTqOLTdTe^nwIM}MID8%PD~<8bh0&CSf6v9X?8>(6 z!keJpCXJ9alMsriBrZ|s1rr*MGh@}`s|b9p_o_rBk_s;mBogIJ%+o!It$O<;1!PMP zSA2tW(M_{#0=d);6(Ky*YS)xCauz;m>7wrbZ^Ku8CM(&xQPzu!Wd#8&LA64X4kdIX zsF?M%$vmnv=syTeUZOEQI&aEnDy{>|Py%g;+rBjXx>AhT2EHiweLh;rYIFX&P4!I7DkqxZC=G&v#bWf_#ZDHYNy zo@`Ng8I%S#^D@71YLu&1zcqiWDKFKT8)>EW2?2c|^z6zG*!_ux8=ad$E&3qN>EbOW zy9)+03`@~ub|<8Q_ALj70{viaett`%Xw*vlkWfk_9gUiX5a$e?+IpF~Sfm@Af@C@} zW)cs!y`Df;)Rp^Uq13!a%vfAj|6H#Q7lxUsyMOrRu9P!1BV?0Da68`t*XV+`{De&j zv%hcL?6Qw4+@20sAIqD(o-cJ8hoZiV+?AD=%a&3wad22XXZ(?#j_Ef&0zeQW!Q^lt zu;}I{S%W2gGDxmRlDjGMvOO1z(}EJZfrEy=m6HB=@K~8K&7AtjVo<{5vnvlH6&mHL zR)&t=QfBsqxh>0*Qx;E-Z{?Ib@x+s_-iQwJ@AfHo8~zkn#2r$5&S>)Xb{MDNoS3$# zl$cn4ngs^ZhP}`+xUFO%g^9L7ma=YT+(m;_SXYi^1Ox~vm(QCh%=0<*Lxx)6N;rMa zfM|X-LE&_Ks&q*O(~nA6^dEIy9h7PpH8gPYcUa?b?{o^P`u&x!J~4QOMQu&h^`u&= zOG|=ly=%JqYFm7sV)mCvb~Rk{8W)j^UA7QD+Ke_fj+cvSj~2@z+PdBU^$iIL@zU+H z^A*!B#)3*Wjh(%<_%>JEt5=L!gQ_Cal| z&rFmYb9Mxl8pOO7EUeXop-4+rafK{RBAddBEnRz1(u@bMCIeN$_LD669Rw13 z85GAY<8mzCHl{Zfcd$!!oz!uXnk!}707hkD%h;1h0SYmYEQ10w3|y>zS($?pw80d@ zE^i7!a-DE`_*wbnF$N_@hJz|^Boe!e1J4|ZHbk0@8ZZc4ip7D(8uE-3l@ofytVT(& z3V{&g-D7qPMD(KKO7sXqpPyz%)r*wg(eUq|%P2&MiX6vT`v9W>ka?p9gsVwO+zG=K z5noeDfis!#l#sfE3YtKTf|SrmlAtf3HgdU#Aq?502%RHoNAjZtgxa1&XM%5{xD<9e zLwGx%L-M2eh$5K~kEwsa`f)#mq3b}i5LJJq9E*Z5kQf}2a-gR z?fM?_;tV^n=>=-RQA`NqfQzDzI(NZ@lce`M9_ms@phoVaAP)=!(VpRO;Y7B};qNDVNM=8Z~!H$ru^bJR5h=ko>${le*(xUztpqR)2B335$#_qe8 zOXPy0*ZES)iRBL0V^Pu=_Su#T=Ypr8{^6PlqnS7cBA1cQL|8{=koiNw@30Jo0l}}C zfauq7@i9hNdud8BZ8#Qm&^xs8_ev3M5J#jRm`60{o%7F;tB!hb8Fa5ZI$D8k2w0?y z5xs!gK)O`LLQpIc187Z?@%zYv0mM3-Yl${ECklFDy|6ZH``}*E+VCn=hj5!zen`BRi`55Ml~676VS9ptsVC)72Ar-m2)mV__7d%sKMWuL~7N1?nI z`quuF0AIorsIBN5ihgyuPtuEHPY?sq zHv|L8H-aspH!ycvhrSGg-e6AD-tgAc)m?l~{A=BKXk+X>kE_}nJc8gy6l9k-qAkfc zPywMgD&I)Y(EWs~`Wsq;Fh!Jr;LS)uGW!TaBji2iwU{S5-{>d&(fv9@FUViJAVpvW zB>A_epO=0wkDrGE?>P#ucRxS+egxd}7JM8MKix6;A1?}ChAIj=4*Zxs-C2D63{u>l z^?Ui)e!lbD_;JZQ{Nr+8z>oOx^i|>dwBTdr$K|1bAJgNh&nLX!3-l*RLC>FDMZx36 z&p*B|0Y14O+kX~6sTIBiv(I;nuXh1&0XvH{pToc|ArM5N2JZE9Q2=1Tp<(VpS1F7s zahT&_{kT}ry@#yqC~J5k8uhq)u)zS$Uu{E#{?XFXA^v>v35 zS^^w$CI>u2S%y6CuF#$!`ZRO8n2SkKVxa=3Wt;h|(c;=u4B7}!SvjL@*MM`Ld7Pu= z6y770v*4zYKNTgC2iy+~CT{UxmFM;{moauNkTHv|GW9}YntI8~jucTmAdA`+dwUI3b|Q4s0YZ<_z82Hyy!(do{Ej2DuGzB`}v3xTW`j}dUgJVW0i zXWRMtxKe4UbK}#3gR(@dEfsN6q&-YEmmHbJaKvq2$}2C1dd9F*b!M#Hym|foEx^0> zv89hMy?aZXGWDVBlHdEh`;N&f(EHHim-@hU_h);2s#So@6z{PoyV?H!g(Ahw8~WqT%jssvI|OWXZF$o$OZ!rA4b-Cw)}OF435>Ugh)1Xcr(Oi|b9D>M}>$cztlz;X%0ik?uW#^{2DoqB_xJ>SUGQ{?u8HV_Fg$Acr zIyz^+=4MRZ=59FVFZ1XA@PIuwFyvB{gFAMonhn9Fq|wxqyq5`^mnYA3zup)Cl#W{* z&C+$~AG>(Zf^(9&+o9g!){$5%-zw>$^#e3dQnRXRwI&YZ?=lTfx1?mCl^E^G5?IbP z{nnWY*tayDr7Cs-%t)JIHxnsa^vCEkj_W!p;nd+gPb!lT!B*fQz=hr_CQ4Yr3=Z|a z+K)NE0`gCf1j$TOD=_EyeW}q6fV7B2=zEMt$sO*!6 zB;C6xsqwt>Ts~{zGV-L66{T&K&Hj5mdvK%&5iiq!`dY5nqUCihGk!Ytbj{Wbj-$%d zbh(=G{D73-450#na=qDB%>6>(??&q19(BgOJ;XKzww*o1op+BZ3KFUYC_V2Lkn|ST zg23D)_W!52JB^2O?H>SsN{7NjC5$CavNQ<8jG3_}OHCL>LbC5;pBejBsbm>4lAUDB z63V`X82ipxGE|g(sSHOH|6Avr>gf4DpXc3ky_(POUasGLE%WO8yKnY*uVyo*xwH71 zFKZ@ed~1hWxoMlN7;s*1&n4EB-|kMe+qj^uoNhLKy)Y6CGITH_Iu_KqMXOC;kW7L9 zPBl4scgyj7Zefg=9mb}N2jn5(n!AQu@jok@dEd!H*o7{pC76Bqn7^Vgi_dzEdiAt`y5=ae4y=ILX~D9zx8 zMfl7oE$PmaLDc8I#V3+vc076}m;R9NUwjI>)_9g!BDtnO|RWx&_}!cf&?G*+J}Z zOoF)Jl7Sb)1tV&PJSW&(N-Om2EU!jS9J34$;das_kjX_{>mS%jSXQa9VupBr=DBc0 zlQf(w&MhuJmzi1((y2J$+h0Fueb2T^s>nE;Cs?@$W@9!MbjQByaOYf19_ZjU=6&g`K0X@CKf&S^c*9rj z@Tn|d%};ZvRFdyeEez5*7Rhfv6jF;%U%hQEL!HugbMW??QX&bPn+G#$cHa9WKnu5n zGIaT)n=a*Ji!p^12imP`pAsoh@uPX=s?0)kXKZ91UB)$@jV^j6Ye0U!wyKy@<#{{9 zrLDs8!nn%gopmC)RHe93-sF5?GAi9{&7mdLyl%lX!w-^`Hm&sruoQwLqh6B+p!PU} z>4R9%%o}zN;)FSGKHuZxdE&#EtJ)MSJ~QB9z);G%MNX5B#hEhwCnALOjEfqX?hpL$ zV(&VWV2x6ZvTXqxigRO?=(diKy{s&w~Un6w}3=hZ-!~oscIr*Wx?`JDcAKKjC@G z{WS4W!Xq{){p4$@7IbajVemL_r+ znpEY7G)I;2%#;WEd|7kZzDT+^SE_I7yGqtCPd_4|HsE#W=}YG7AeCzJ5ww5_RA21a z`LNhfyjp2@KE>WW3t8NK;MPMB)iHfvt++pX3X{$| z#AECpHN-vo{2()XI(n?Z)adiP?&>Jz(`P;#t}o|WPM@k7V!04=eww%5F6d0oi;c!p zEcZh96slC-^XhP|<$CRJuojz5ZW@1ciES_OSJh7XW{yw4=#?Xx)k^vz zHKki$H-m|PWR9m!EC%+uXho{?LM}J+02ygo)y&Tx9_&-jNpHMTd>`xCOB(b{lIgM? z48x43g1M`NWR2!dH5@xHr2DKTY6Y|mS{a!gd3&ivRzAWoLV=}k%3$tMgl*scqZ=*^ zM@y2ugv6u3X-7vxggN++csM#cM3hf|O3y!L*)5pipf*WUO_;^>-pl8aH*wNaj8xA} z1xkmwLY52YBcqYJ#~YBo&N99=!mDB|zMxSlPlI?tS*!I76&r1ofQ(3R%$AzyK$Im@ zEeO`mnz(Us*@v#a_c96nW&l(FTXl3g&sB(H`sFu;u3s5B*>s7MC>q&wdZyp#BlOZD79Fl1j z6GKHBnycCFnF@H{wPp4RWa9}Xfu=E6)>6T~BD%F11i7HXqVHD9i$v2Zyq9unc%r)L z=S3vG)d1n%2g@r>Tu69Y9+|w^A0wv-o~e{b)YT=V3@>*m2DTM7Fbyc&JgKi)7MTm8 zj4fco2ePM+iRFUI*B`fxU6iPLKJL=mk8?Z$z@-s_|Xd$mNx)dtYy|$x78O!j> z+m-d&n+ex)ODIrK={6C(_gU@+ot&$Wk}rGuHmx_j;TcN!yw9Cv`csb;d31y7vPZ|H zTc|t=rNg{EunU4Scf8rMY)$Z5tI^uczxPKk8;kzyiJ`Wzi8gFt%%x&`qf13P89AVz zUO3UQJ;_k%BaL+HCmbrfF4AI%w=~Q?{&0hOmKRNFHY5skW9$Fb8NSMZ8nWYcAgT)v zvAN|_Y|EeB7-&m3<_B;HB*%#gP3YTV@r z@}6#uLae(~R$I4*zg1Y6ouBGL&_MDyTKoGF3BJ#VU=56YqP&fE><7iEx4r7mGH~J~ zd`3HsSI5Q>3X}=Oz*twu?gCWnkHFQyDT)lKuI z*aycbx2o`U@4Us6h3qA{hukmfwCr3@aHWS_b?LRcBmtU8b(-03LemKaz0f7et^*%+ z@ucym40P;x_n*se>xH1{b}U1a`@%xJ)nl$MCI!mJVqyuPWS$-|`o$h4sjtasiA8N_ zLP-_g2i1|mP~0{MX)7|hVtaqmr!UQ9@Y*x{)=(v(DkB&P$7>4f+fBGcf4pPfC?OLbXL}0$jb)SmyceC8L@CIntrUj}I~Egqw4P(mo-P z4KbN;tZX?;Pv$_Tfve$^oFEsgW}xk>j`)cW;Su}P!e!PS<^B-H92JnwPbO*JK$^`Y z2c{a-26?Od=a=EPKVFwLetu)|x#z|HX2td3Z@GwvaUsOf!>AoE@nmtb^7D>92wJYl zo#I^^*E9ECOhRc+P+DbTYLFcJaTY;+2VZQf$|AIJL#^r$*h}cSq^u)VJX>l=S&<1B0Gcb}`G|4on>}#JLU^fG$~$RHf`L4+xLQ zT@KHSIB-(1SJp_ZKfNqWn1MCcI2rYA9OmhwFKWmNq7E?bd!zYGVk zmnmTGA294NSy#-Eer1}_f9o`Vxb8ol_n)Nw+@GXf8E0?q_ye*7cJeRCj*^6nOCh9T zK;$RL4i!g10dxL0WQW1Uk$`{RHS0ehI|2$scOCYBg6sg(6oqF|0kUOm$&{KNq;*i2}XWL?msN}k6*ht_%5%F!AQi6B*e3Ad<`qRUZ+S4eso9D1+#@Tl~`nl_w7x zo(rllwvE(HXHaQBdL!WW?BY0Mu&}*P%_&+Jns71Fbt8AIe#v49fq9}}m63aP@^F%g z?A9x-gRu4pBD353Vawhx)_w&XH`Tgz5ic(!ISKc=`>5*JnCo3q4Awg`_{lU$SWgLf zRdku72ji&WRjPgzAKOP&_s94Q{yvCKfcSo>Nw!G!i7od!cw_PDNilngzK~Tye5gXv zIDbf4O8MvnJ@66LmtvC)O(z|D^toLEt%&1S#JcS94-w9+kn5vfNF|w$H2~i$7G~-!S-(Z z5tEceA@{^!Qb?cz;h*QhVK5}HO7~m~1$x{QlYs%n8o$JV>WjTGxb)s!U{E*=(Br%9 zdwwt|0@w|EVp1^V-uz(FlD|AJFdo0;0+&X}?9CYtg90@n-}@lo%z>gD+-}8@uDuT+ v+7XNrIxa3i)zI!N0DqUeH8J1dJ5ct3Cz#_1-{u1jlYycb1qIc#)fxW}2Uz;G diff --git a/README.md b/README.md index 45e4a6a1a..a584e8ac9 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,5 @@ - -[![GHActions](https://github.com/CICE-Consortium/CICE/workflows/GHActions/badge.svg)](https://github.com/CICE-Consortium/CICE/actions) -[![Documentation Status](https://readthedocs.org/projects/cice-consortium-cice/badge/?version=main)](http://cice-consortium-cice.readthedocs.io/en/main/?badge=main) +[![Build Status](https://travis-ci.org/CICE-Consortium/CICE.svg?branch=master)](https://travis-ci.org/CICE-Consortium/CICE) +[![Documentation Status](https://readthedocs.org/projects/cice-consortium-cice/badge/?version=master)](http://cice-consortium-cice.readthedocs.io/en/master/?badge=master) [![lcov](https://img.shields.io/endpoint?url=https://apcraig.github.io/coverage.json)](https://apcraig.github.io) diff --git a/cice.setup b/cice.setup index 4c7a222ff..3efe94827 100755 --- a/cice.setup +++ b/cice.setup @@ -1,7 +1,5 @@ #!/bin/csh -f -#set pd0 = `date -u "+%s%N"` - set ICE_SANDBOX = `pwd` set ICE_VERSION = unknown if (-e cicecore/version.txt) then @@ -392,18 +390,6 @@ if ((${dosuite} == 1 || ${dotest} == 1) && ${testid} == ${spval}) then exit -1 endif -# This creates a new sandbox and modifies the source code for "improved" lcov analysis -# Turn this if block off if you don't want coverage to do that -if ($coverage == 1) then - set sandbox_lcov = ${ICE_SANDBOX}/../cice_lcov_${sdate}-${stime} - cp -p -r ${ICE_SANDBOX} ${sandbox_lcov} - echo "shifting to sandbox = ${sandbox_lcov}" - set ICE_SANDBOX = ${sandbox_lcov} - set ICE_SCRIPTS = "${ICE_SANDBOX}/configuration/scripts" - cd ${ICE_SANDBOX} - ${ICE_SCRIPTS}/tests/lcov_modify_source.sh -endif - #--------------------------------------------------------------------- # Setup tsfile and test suite support stuff @@ -455,22 +441,7 @@ if ( ${dosuite} == 0 ) then set sets = "" else - # generate unique set of suites in tarrays in order they are set - set tarrays0 = `echo ${testsuite} | sed 's/,/ /g' | fmt -1 ` - #echo "${0}: tarrays0 = ${tarrays0}" - set tarrays = $tarrays0[1] - foreach t1 ( ${tarrays0} ) - set found = 0 - foreach t2 ( ${tarrays} ) - if ( ${t1} == ${t2} ) then - set found = 1 - endif - end - if ( ${found} == 0 ) then - set tarrays = ( ${tarrays} ${t1} ) - endif - end - #echo "${0}: tarrays = ${tarrays}" + set tarrays = `echo ${testsuite} | sed 's/,/ /g' | fmt -1 | sort -u` set testsuitecnt = 0 foreach tarray ( ${tarrays} ) @ testsuitecnt = ${testsuitecnt} + 1 @@ -497,14 +468,12 @@ else exit -1 endif cp -f ${ICE_SCRIPTS}/tests/report_results.csh ${tsdir} - cp -f ${ICE_SCRIPTS}/tests/create_fails.csh ${tsdir} cp -f ${ICE_SCRIPTS}/tests/poll_queue.csh ${tsdir} cat >! ${tsdir}/suite.submit << EOF0 #!/bin/csh -f set nonomatch && rm -f ciceexe.* && unset nonomatch -rm -f suite.jobs set dobuild = true set doreuse = true @@ -653,7 +622,7 @@ EOF set bfbcomp_tmp = `echo $line | cut -d' ' -f5` # Append sets from .ts file to the $sets variable - set sets = "$sets_tmp,$sets_base" + set sets = "$sets_base,$sets_tmp" # Create a new bfbcomp_base variable to store bfbcomp passed to cice.setup # Use bfbcomp_base or bfbcomp_tmp @@ -663,6 +632,11 @@ EOF set bfbcomp = "$bfbcomp_tmp" endif + set fbfbcomp = ${spval} + if ($bfbcomp != ${spval}) then + set fbfbcomp = ${machcomp}_${bfbcomp} + endif + #------------------------------------------------------------ # Parse pesx with strict checking, limit pes for machine @@ -765,18 +739,11 @@ EOF endif set testname_noid = ${spval} - # create case for test cases - - set fbfbcomp = ${spval} - if ($bfbcomp != ${spval}) then - set fbfbcomp = ${machcomp}_${bfbcomp} - endif - if (${docase} == 0) then set soptions = "" # Create sorted array and remove duplicates and "none" - set setsarray = `echo ${sets_tmp} | sed 's/,/ /g' | fmt -1 | sort -u` + set setsarray = `echo ${sets} | sed 's/,/ /g' | fmt -1 | sort -u` if ("${setsarray}" != "") then foreach field (${setsarray}) if (${field} != "none") then @@ -784,27 +751,11 @@ EOF endif end endif - # Add options from command line, sort and remove duplicates - set soptions_base = "" - set setsarray_base = `echo ${sets_base} | sed 's/,/ /g' | fmt -1 | sort -u` - if ("${setsarray_base}" != "") then - foreach field (${setsarray_base}) - set soptions = ${soptions}"_"${field} - set soptions_base = ${soptions_base}"_"${field} - end - endif # soptions starts with _ set testname_noid = "${machcomp}_${test}_${grid}_${pesx}${soptions}" set testname_base = "${machcomp}_${test}_${grid}_${pesx}${soptions}.${testid}" set testname = "${tsdir}/${testname_base}" set case = ${testname} - - if (${dosuite} == 1) then - # Add -s flags in cice.setup to bfbcomp name - if ($bfbcomp != ${spval}) then - set fbfbcomp = ${machcomp}_${bfbcomp}${soptions_base} - endif - endif endif if (-d ${case}) then @@ -833,8 +784,8 @@ EOF # set default test output as failure if (${docase} == 0) then echo "#---" >! test_output - echo "PEND ${testname_noid} build" >> test_output - echo "PEND ${testname_noid} run" >> test_output + echo "FAIL ${testname_noid} build" >> test_output + echo "FAIL ${testname_noid} run" >> test_output endif # from basic script dir to case @@ -928,9 +879,6 @@ EOF echo "ICE_PES = ${task}x${thrd}" echo "ICE_GRID = ${grid} (${ICE_DECOMP_NXGLOB}x${ICE_DECOMP_NYGLOB}) blocksize=${ICE_DECOMP_BLCKX}x${ICE_DECOMP_BLCKY}x${ICE_DECOMP_MXBLCKS}" echo "ICE_DECOMP = ${ICE_DECOMP_DECOMP} ${ICE_DECOMP_DSHAPE}" - if ($fbfbcomp != ${spval}) then - echo "ICE_BFBCOMP = ${fbfbcomp}" - endif #------------------------------------------------------------ # Copy in and update cice.settings and ice_in files @@ -943,21 +891,9 @@ EOF if (-e ${fimods}) rm ${fimods} if (-e ${fsmods}) rm ${fsmods} - # Use an existing ice_in file from the suite if it exists - # to reduce time spent in parse_namelist - set skip_parse_namelist = spval - if (${dosuite} == 1) then - set iceinfn = ../ice_in_save_${grid}${soptions} - if (-e ${iceinfn}) then - echo "use ${iceinfn}" - cp ${iceinfn} ice_in - set skip_parse_namelist = true - endif - endif - - # Set decomp info in namelist cat >! ${fimods} << EOF1 # cice.setup settings + nprocs = ${task} nx_global = ${ICE_DECOMP_NXGLOB} ny_global = ${ICE_DECOMP_NYGLOB} @@ -986,6 +922,7 @@ EOF1 cat >! ${fsmods} << EOF1 # cice.setup settings + setenv ICE_SANDBOX ${ICE_SANDBOX} setenv ICE_SCRIPTS ${ICE_SCRIPTS} setenv ICE_CASENAME ${casename} @@ -1054,59 +991,42 @@ EOF1 foreach name (${grid} $setsx) set found = 0 - if (-e ${ICE_SCRIPTS}/options/set_nml.${name}) then cat >> ${fimods} << EOF2 # set_nml.${name} + EOF2 - if ("${skip_parse_namelist}" == "true") then - # need to make sure the decomp info from the set_nml is picked up. each case - # has a slightly different decomp that is independent of the ice_in_save file. - # compute that then overwrite by set_nml as needed. - grep -i "distribution_type" ${ICE_SCRIPTS}/options/set_nml.${name} >> ${fimods} - grep -i "processor_shape" ${ICE_SCRIPTS}/options/set_nml.${name} >> ${fimods} - cat >> ${fimods} << EOF2 -# using saved ice_in -EOF2 - else - cat ${ICE_SCRIPTS}/options/set_nml.${name} >> ${fimods} - cat >> ${fimods} << EOF2 + cat ${ICE_SCRIPTS}/options/set_nml.${name} >> ${fimods} + cat >> ${fimods} << EOF2 + EOF2 - endif echo "adding namelist mods set_nml.${name}" echo "`date` ${0} adding namelist modes set_nml.${name}" >> ${casedir}/README.case set found = 1 endif - if (-e ${ICE_SCRIPTS}/options/set_env.${name}) then cat >> ${fsmods} << EOF2 # set_env.${name} + EOF2 cat ${ICE_SCRIPTS}/options/set_env.${name} >> ${fsmods} cat >> ${fsmods} << EOF2 + EOF2 echo "adding env mods set_env.${name}" echo "`date` ${0} adding namelist modes set_env.${name}" >> ${casedir}/README.case set found = 1 endif - if (${found} == 0) then echo "${0}: ERROR, ${ICE_SCRIPTS}/options/set_[nml,env].${name} not found" exit -1 endif end -#set pd1 = `date -u "+%s%N"` -#@ pdd = ( $pd1 - $pd0 ) / 1000000 -#echo "tcxp b4 parse $pdd" ${casescr}/parse_settings.sh cice.settings ${fsmods} - if ($status != 0) then - echo "${0}: ERROR, parse_settings.sh aborted" - exit -1 - endif ${casescr}/parse_namelist.sh ice_in ${fimods} if ($status != 0) then echo "${0}: ERROR, parse_namelist.sh aborted" @@ -1115,20 +1035,6 @@ EOF2 source ./cice.settings source ./env.${machcomp} -nomodules || exit 2 ${casescr}/parse_namelist_from_env.sh ice_in - if ($status != 0) then - echo "${0}: ERROR, parse_namelist_from_env.sh aborted" - exit -1 - endif -#set pd1 = `date -u "+%s%N"` -#@ pdd = ( $pd1 - $pd0 ) / 1000000 -#echo "tcxp after parse $pdd" - - # Save ice_in in the suite to reduce time spent in parse_namelist - if (${dosuite} == 1) then - if !(-e ${iceinfn}) then - cp ice_in ${iceinfn} - endif - endif #------------------------------------------------------------ # Generate run script @@ -1186,26 +1092,19 @@ echo "-------test--------------" echo "${testname_base}" cd ${testname_base} source ./cice.settings -set bldstat = 0 if (\${dobuild} == true) then if (\${doreuse} == true) then - set ciceexe = "../ciceexe.\${ICE_TARGET}.\${ICE_ENVNAME}.\${ICE_COMMDIR}.\${ICE_BLDDEBUG}.\${ICE_THREADED}.\${ICE_IOTYPE}.\${ICE_SNICARHC}" + set ciceexe = "../ciceexe.\${ICE_ENVNAME}.\${ICE_COMMDIR}.\${ICE_BLDDEBUG}.\${ICE_THREADED}.\${ICE_IOTYPE}" ./cice.build --exe \${ciceexe} - set bldstat = \${status} if !(-e \${ciceexe}) cp -p \${ICE_RUNDIR}/cice \${ciceexe} else ./cice.build - set bldstat = \${status} endif endif -if (\$bldstat == 0) then - if (\${dosubmit} == true) then - set jobid = \`./cice.submit\` - echo "\$jobid" - echo "\$jobid \${ICE_TESTNAME} " >> ../suite.jobs - else if (\${dorun} == true) then - ./cice.test - endif +if (\${dosubmit} == true) then + ./cice.submit | tee -a ../suite.jobs +else if (\${dorun} == true) then + ./cice.test endif cd .. EOF @@ -1218,10 +1117,6 @@ EOF echo "" endif -#set pd1 = `date -u "+%s%N"` -#@ pdd = ( $pd1 - $pd0 ) / 1000000 -#echo "tcxp case done $pdd" - # This is the foreach end for the testsuite end # This is the foreach end for the envnames @@ -1236,7 +1131,6 @@ if ( ${dosuite} == 1 ) then cat >> ${tsdir}/suite.submit << EOF0 set nonomatch && rm -f ciceexe.* && unset nonomatch -set nonomatch && rm -f ice_in_save* && unset nonomatch EOF0 @@ -1279,10 +1173,6 @@ endif #--------------------------------------------- -#set pd1 = `date -u "+%s%N"` -#@ pdd = ( $pd1 - $pd0 ) / 1000000 -#echo "tcxp done $pdd" - echo " " echo "${0} done" echo " " diff --git a/cicecore/cicedyn/analysis/ice_diagnostics.F90 b/cicecore/cicedyn/analysis/ice_diagnostics.F90 index 3a6ceb83d..40da6cb64 100644 --- a/cicecore/cicedyn/analysis/ice_diagnostics.F90 +++ b/cicecore/cicedyn/analysis/ice_diagnostics.F90 @@ -11,11 +11,9 @@ module ice_diagnostics use ice_kinds_mod - use ice_blocks, only: nx_block, ny_block use ice_communicate, only: my_task, master_task use ice_constants, only: c0, c1 use ice_calendar, only: istep1 - use ice_domain_size, only: nslyr use ice_fileunits, only: nu_diag use ice_fileunits, only: flush_fileunit use ice_exit, only: abort_ice @@ -27,8 +25,9 @@ module ice_diagnostics implicit none private - public :: runtime_diags, init_mass_diags, init_diags, debug_ice, & - print_state, diagnostic_abort + public :: runtime_diags, init_mass_diags, init_diags, & + print_state, print_points_state, diagnostic_abort + ! diagnostic output file character (len=char_len), public :: diag_file @@ -36,13 +35,9 @@ module ice_diagnostics ! point print data logical (kind=log_kind), public :: & - debug_model , & ! if true, debug model at high level print_points , & ! if true, print point data print_global ! if true, print global data - integer (kind=int_kind), public :: & - debug_model_step = 0 ! begin printing at istep1=debug_model_step - integer (kind=int_kind), parameter, public :: & npnt = 2 ! total number of points to be printed @@ -53,7 +48,7 @@ module ice_diagnostics real (kind=dbl_kind), parameter :: & umax_stab = 1.0_dbl_kind , & ! ice speed threshold for instability (m/s) aice_extmin = 0.15_dbl_kind ! min aice value for ice extent calc - + real (kind=dbl_kind), dimension(npnt), public :: & latpnt , & ! latitude of diagnostic points lonpnt ! longitude of diagnostic points @@ -75,20 +70,12 @@ module ice_diagnostics integer (kind=int_kind), dimension(npnt), public :: & piloc, pjloc, pbloc, pmloc ! location of diagnostic points - integer (kind=int_kind), public :: & - debug_model_i = -1, & ! location of debug_model point, local i index - debug_model_j = -1, & ! location of debug_model point, local j index - debug_model_iblk = -1, & ! location of debug_model point, local block number - debug_model_task = -1 ! location of debug_model point, local task number - ! for hemispheric water and heat budgets real (kind=dbl_kind) :: & totmn , & ! total ice/snow water mass (nh) totms , & ! total ice/snow water mass (sh) totmin , & ! total ice water mass (nh) totmis , & ! total ice water mass (sh) - totsn , & ! total salt mass (nh) - totss , & ! total salt mass (sh) toten , & ! total ice/snow energy (J) totes ! total ice/snow energy (J) @@ -100,6 +87,16 @@ module ice_diagnostics totaeron , & ! total aerosol mass totaeros ! total aerosol mass + ! printing info for routine print_state + ! iblkp, ip, jp, mtask identify the grid cell to print +! character (char_len) :: plabel + integer (kind=int_kind), parameter, public :: & + check_step = 999999999, & ! begin printing at istep1=check_step + iblkp = 1, & ! block number + ip = 72, & ! i index + jp = 11, & ! j index + mtask = 0 ! my_task + !======================================================================= contains @@ -115,6 +112,7 @@ module ice_diagnostics subroutine runtime_diags (dt) use ice_arrays_column, only: floe_rad_c + use ice_blocks, only: nx_block, ny_block use ice_broadcast, only: broadcast_scalar use ice_constants, only: c1, c1000, c2, p001, p5, & field_loc_center, m2_to_km2 @@ -123,13 +121,13 @@ subroutine runtime_diags (dt) use ice_flux, only: alvdr, alidr, alvdf, alidf, evap, fsnow, frazil, & fswabs, fswthru, flw, flwout, fsens, fsurf, flat, frzmlt_init, frain, fpond, & fhocn_ai, fsalt_ai, fresh_ai, frazil_diag, & - update_ocn_f, cpl_frazil, Tair, Qa, fsw, fcondtop, meltt, meltb, meltl, snoice, & + update_ocn_f, Tair, Qa, fsw, fcondtop, meltt, meltb, meltl, snoice, & dsnow, congel, sst, sss, Tf, fhocn, & swvdr, swvdf, swidr, swidf, & alvdr_init, alvdf_init, alidr_init, alidf_init use ice_flux_bgc, only: faero_atm, faero_ocn, fiso_atm, fiso_ocn use ice_global_reductions, only: global_sum, global_sum_prod, global_maxval - use ice_grid, only: lmask_n, lmask_s, tarean, tareas, grid_ice, grid_average_X2Y + use ice_grid, only: lmask_n, lmask_s, tarean, tareas use ice_state ! everything ! tcraig, this is likely to cause circular dependency because ice_prescribed_mod is high level routine #ifdef CESMCOUPLED @@ -145,29 +143,23 @@ subroutine runtime_diags (dt) i, j, k, n, iblk, nc, & ktherm, & nt_tsfc, nt_aero, nt_fbri, nt_apnd, nt_hpnd, nt_fsd, & - nt_isosno, nt_isoice, nt_rsnw, nt_rhos, nt_smice, nt_smliq + nt_isosno, nt_isoice logical (kind=log_kind) :: & - tr_pond_topo, tr_brine, tr_iso, tr_aero, calc_Tsfc, tr_fsd, & - tr_snow, snwgrain + tr_pond_topo, tr_brine, tr_iso, tr_aero, calc_Tsfc, tr_fsd real (kind=dbl_kind) :: & rhow, rhos, rhoi, puny, awtvdr, awtidr, awtvdf, awtidf, & rhofresh, lfresh, lvap, ice_ref_salinity, Tffresh - character (len=char_len) :: & - snwredist, saltflux_option - ! hemispheric state quantities real (kind=dbl_kind) :: & umaxn, hmaxn, shmaxn, arean, snwmxn, extentn, shmaxnt, & umaxs, hmaxs, shmaxs, areas, snwmxs, extents, shmaxst, & etotn, mtotn, micen, msnwn, pmaxn, ketotn, & etots, mtots, mices, msnws, pmaxs, ketots, & - stotn, & - stots, & urmsn, albtotn, arean_alb, mpndn, ptotn, spondn, & - urmss, albtots, areas_alb, mpnds, ptots, sponds + urmss, albtots, areas_alb, mpnds, ptots, sponds ! hemispheric flux quantities real (kind=dbl_kind) :: & @@ -195,42 +187,28 @@ subroutine runtime_diags (dt) ! fields at diagnostic points real (kind=dbl_kind), dimension(npnt) :: & - paice, pTair, pQa, pfsnow, pfrain, pfsw, pflw, & + paice, pTair, pQa, pfsnow, pfrain, pfsw, pflw, & pTsfc, pevap, pfswabs, pflwout, pflat, pfsens, & pfsurf, pfcondtop, psst, psss, pTf, hiavg, hsavg, hbravg, & pfhocn, psalt, fsdavg, & - pmeltt, pmeltb, pmeltl, psnoice, pdsnow, pfrazil, pcongel, & - prsnwavg, prhosavg, psmicetot, psmliqtot, psmtot + pmeltt, pmeltb, pmeltl, psnoice, pdsnow, pfrazil, pcongel real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & - uvelT, vvelT, & ! u,v on T points - work1, work2 ! temporary - - real (kind=dbl_kind), parameter :: & - maxval_spval = -0.9_dbl_kind*HUGE(0.0_dbl_kind) ! spval to detect - ! undefined values returned from global_maxval. if global_maxval - ! is applied to a region that does not exist (for instance - ! southern hemisphere in box cases), global_maxval - ! returns -HUGE which we want to avoid writing. The - ! return value is checked against maxval_spval before writing. + work1, work2 character(len=*), parameter :: subname = '(runtime_diags)' call icepack_query_parameters(ktherm_out=ktherm, calc_Tsfc_out=calc_Tsfc) call icepack_query_tracer_flags(tr_brine_out=tr_brine, tr_aero_out=tr_aero, & - tr_pond_topo_out=tr_pond_topo, tr_fsd_out=tr_fsd, tr_iso_out=tr_iso, & - tr_snow_out=tr_snow) + tr_pond_topo_out=tr_pond_topo, tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_fbri_out=nt_fbri, nt_Tsfc_out=nt_Tsfc, & nt_aero_out=nt_aero, nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, & - nt_fsd_out=nt_fsd,nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice, & - nt_rsnw_out=nt_rsnw, nt_rhos_out=nt_rhos, & - nt_smice_out=nt_smice, nt_smliq_out=nt_smliq) + nt_fsd_out=nt_fsd,nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) call icepack_query_parameters(Tffresh_out=Tffresh, rhos_out=rhos, & rhow_out=rhow, rhoi_out=rhoi, puny_out=puny, & awtvdr_out=awtvdr, awtidr_out=awtidr, awtvdf_out=awtvdf, awtidf_out=awtidf, & rhofresh_out=rhofresh, lfresh_out=lfresh, lvap_out=lvap, & - ice_ref_salinity_out=ice_ref_salinity,snwredist_out=snwredist, & - snwgrain_out=snwgrain, saltflux_option_out=saltflux_option) + ice_ref_salinity_out=ice_ref_salinity) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -241,8 +219,6 @@ subroutine runtime_diags (dt) ! hemispheric quantities ! total ice area - arean = c0 - areas = c0 arean = global_sum(aice, distrb_info, field_loc_center, tarean) areas = global_sum(aice, distrb_info, field_loc_center, tareas) arean = arean * m2_to_km2 @@ -259,22 +235,18 @@ subroutine runtime_diags (dt) enddo enddo !$OMP END PARALLEL DO - extentn = c0 - extents = c0 - extentn = global_sum(work1, distrb_info, field_loc_center, tarean) - extents = global_sum(work1, distrb_info, field_loc_center, tareas) + extentn = global_sum(work1, distrb_info, field_loc_center, & + tarean) + extents = global_sum(work1, distrb_info, field_loc_center, & + tareas) extentn = extentn * m2_to_km2 extents = extents * m2_to_km2 ! total ice volume - shmaxn = c0 - shmaxs = c0 shmaxn = global_sum(vice, distrb_info, field_loc_center, tarean) shmaxs = global_sum(vice, distrb_info, field_loc_center, tareas) ! total snow volume - snwmxn = c0 - snwmxs = c0 snwmxn = global_sum(vsno, distrb_info, field_loc_center, tarean) snwmxs = global_sum(vsno, distrb_info, field_loc_center, tareas) @@ -284,47 +256,35 @@ subroutine runtime_diags (dt) if (tr_pond_topo) then !$OMP PARALLEL DO PRIVATE(iblk,i,j,n) do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - work1(i,j,iblk) = c0 - do n = 1, ncat - work1(i,j,iblk) = work1(i,j,iblk) & - + aicen(i,j,n,iblk) & - * trcrn(i,j,nt_apnd,n,iblk) & - * trcrn(i,j,nt_hpnd,n,iblk) - enddo - enddo + do j = 1, ny_block + do i = 1, nx_block + work1(i,j,iblk) = c0 + do n = 1, ncat + work1(i,j,iblk) = work1(i,j,iblk) & + + aicen(i,j,n,iblk) & + * trcrn(i,j,nt_apnd,n,iblk) & + * trcrn(i,j,nt_hpnd,n,iblk) enddo enddo + enddo + enddo !$OMP END PARALLEL DO ptotn = global_sum(work1, distrb_info, field_loc_center, tarean) ptots = global_sum(work1, distrb_info, field_loc_center, tareas) endif - ! total ice-snow kinetic energy, on T points. - if (grid_ice == 'B') then - call grid_average_X2Y('A',uvel ,'U',uvelT,'T') - call grid_average_X2Y('A',vvel ,'U',vvelT,'T') - elseif (grid_ice == 'C') then - call grid_average_X2Y('A',uvelE,'E',uvelT,'T') - call grid_average_X2Y('A',vvelN,'N',vvelT,'T') - elseif (grid_ice == 'CD') then - call grid_average_X2Y('A',uvelE,'E',uvelN,'N',uvelT,'T') - call grid_average_X2Y('A',vvelE,'E',vvelN,'N',vvelT,'T') - endif - + ! total ice-snow kinetic energy !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block do i = 1, nx_block - work1(i,j,iblk) = p5 * (rhos*vsno(i,j,iblk) + rhoi*vice(i,j,iblk)) & - * (uvelT(i,j,iblk)**2 + vvelT(i,j,iblk)**2) + work1(i,j,iblk) = p5 & + * (rhos*vsno(i,j,iblk) + rhoi*vice(i,j,iblk)) & + * (uvel(i,j,iblk)**2 + vvel(i,j,iblk)**2) enddo enddo enddo !$OMP END PARALLEL DO - ketotn = c0 - ketots = c0 ketotn = global_sum(work1, distrb_info, field_loc_center, tarean) ketots = global_sum(work1, distrb_info, field_loc_center, tareas) @@ -363,8 +323,8 @@ subroutine runtime_diags (dt) enddo enddo !$OMP END PARALLEL DO - - arean_alb = global_sum(aice, distrb_info, field_loc_center, work2) + + arean_alb = global_sum(aice, distrb_info, field_loc_center, work2) albtotn = global_sum_prod(aice, work1, distrb_info, & field_loc_center, work2) @@ -389,7 +349,7 @@ subroutine runtime_diags (dt) enddo !$OMP END PARALLEL DO - areas_alb = global_sum(aice, distrb_info, field_loc_center, work2) + areas_alb = global_sum(aice, distrb_info, field_loc_center, work2) albtots = global_sum_prod(aice, work1, distrb_info, & field_loc_center, work2) @@ -401,57 +361,23 @@ subroutine runtime_diags (dt) endif ! maximum ice volume (= mean thickness including open water) - hmaxn = c0 - hmaxs = c0 hmaxn = global_maxval(vice, distrb_info, lmask_n) hmaxs = global_maxval(vice, distrb_info, lmask_s) - if (hmaxn < maxval_spval) hmaxn = c0 - if (hmaxs < maxval_spval) hmaxs = c0 ! maximum ice speed - if (grid_ice == 'CD') then - !$OMP PARALLEL DO PRIVATE(iblk,i,j) - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - work1(i,j,iblk) = max(sqrt(uvelE(i,j,iblk)**2 + vvelE(i,j,iblk)**2), & - sqrt(uvelN(i,j,iblk)**2 + vvelN(i,j,iblk)**2)) - enddo - enddo - enddo - !$OMP END PARALLEL DO - elseif (grid_ice == 'C') then - ! map uvelE to N and vvelN to E then compute max on E and N - call grid_average_X2Y('A',uvelE,'E',work1,'N') ! work1 =~ uvelN - call grid_average_X2Y('A',vvelN,'N',work2,'E') ! work2 =~ vvelE - !$OMP PARALLEL DO PRIVATE(iblk,i,j) - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - work1(i,j,iblk) = max(sqrt(uvelE(i,j,iblk)**2 + work2(i,j,iblk)**2), & - sqrt(work1(i,j,iblk)**2 + vvelN(i,j,iblk)**2)) - enddo - enddo + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + work1(i,j,iblk) = sqrt(uvel(i,j,iblk)**2 & + + vvel(i,j,iblk)**2) enddo - !$OMP END PARALLEL DO - else - !$OMP PARALLEL DO PRIVATE(iblk,i,j) - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - work1(i,j,iblk) = sqrt(uvel(i,j,iblk)**2 + vvel(i,j,iblk)**2) - enddo - enddo enddo - !$OMP END PARALLEL DO - endif + enddo + !$OMP END PARALLEL DO - umaxn = c0 - umaxs = c0 umaxn = global_maxval(work1, distrb_info, lmask_n) umaxs = global_maxval(work1, distrb_info, lmask_s) - if (umaxn < maxval_spval) umaxn = c0 - if (umaxs < maxval_spval) umaxs = c0 ! Write warning message if ice speed is too big ! (Ice speeds of ~1 m/s or more usually indicate instability) @@ -460,31 +386,31 @@ subroutine runtime_diags (dt) if (umaxn > umax_stab) then !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - if (abs(work1(i,j,iblk) - umaxn) < puny) then - write(nu_diag,*) ' ' - write(nu_diag,*) 'Warning, large ice speed' - write(nu_diag,*) 'my_task, iblk, i, j, umaxn:', & - my_task, iblk, i, j, umaxn - endif - enddo - enddo + do j = 1, ny_block + do i = 1, nx_block + if (abs(work1(i,j,iblk) - umaxn) < puny) then + write(nu_diag,*) ' ' + write(nu_diag,*) 'Warning, large ice speed' + write(nu_diag,*) 'my_task, iblk, i, j, umaxn:', & + my_task, iblk, i, j, umaxn + endif + enddo + enddo enddo !$OMP END PARALLEL DO elseif (umaxs > umax_stab) then !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - if (abs(work1(i,j,iblk) - umaxs) < puny) then - write(nu_diag,*) ' ' - write(nu_diag,*) 'Warning, large ice speed' - write(nu_diag,*) 'my_task, iblk, i, j, umaxs:', & - my_task, iblk, i, j, umaxs - endif - enddo - enddo + do j = 1, ny_block + do i = 1, nx_block + if (abs(work1(i,j,iblk) - umaxs) < puny) then + write(nu_diag,*) ' ' + write(nu_diag,*) 'Warning, large ice speed' + write(nu_diag,*) 'my_task, iblk, i, j, umaxs:', & + my_task, iblk, i, j, umaxs + endif + enddo + enddo enddo !$OMP END PARALLEL DO endif ! umax @@ -492,37 +418,22 @@ subroutine runtime_diags (dt) ! maximum ice strength - pmaxn = c0 - pmaxs = c0 pmaxn = global_maxval(strength, distrb_info, lmask_n) pmaxs = global_maxval(strength, distrb_info, lmask_s) - if (pmaxn < maxval_spval) pmaxn = c0 - if (pmaxs < maxval_spval) pmaxs = c0 pmaxn = pmaxn / c1000 ! convert to kN/m - pmaxs = pmaxs / c1000 + pmaxs = pmaxs / c1000 if (print_global) then ! total ice/snow internal energy call total_energy (work1) - - etotn = c0 - etots = c0 + etotn = global_sum(work1, distrb_info, & field_loc_center, tarean) etots = global_sum(work1, distrb_info, & field_loc_center, tareas) - ! total salt volume - call total_salt (work2) - - stotn = global_sum(work2, distrb_info, & - field_loc_center, tarean) - stots = global_sum(work2, distrb_info, & - field_loc_center, tareas) - - !----------------------------------------------------------------- ! various fluxes !----------------------------------------------------------------- @@ -532,8 +443,6 @@ subroutine runtime_diags (dt) ! evaporation - evpn = c0 - evps = c0 evpn = global_sum_prod(evap, aice, distrb_info, & field_loc_center, tarean) evps = global_sum_prod(evap, aice, distrb_info, & @@ -552,8 +461,6 @@ subroutine runtime_diags (dt) endif ! salt flux - sfsaltn = c0 - sfsalts = c0 sfsaltn = global_sum(fsalt_ai, distrb_info, & field_loc_center, tarean) sfsalts = global_sum(fsalt_ai, distrb_info, & @@ -562,8 +469,6 @@ subroutine runtime_diags (dt) sfsalts = sfsalts*dt ! fresh water flux - sfreshn = c0 - sfreshs = c0 sfreshn = global_sum(fresh_ai, distrb_info, & field_loc_center, tarean) sfreshs = global_sum(fresh_ai, distrb_info, & @@ -585,8 +490,6 @@ subroutine runtime_diags (dt) ! ocean heat ! Note: fswthru not included because it does not heat ice - fhocnn = c0 - fhocns = c0 fhocnn = global_sum(fhocn_ai, distrb_info, & field_loc_center, tarean) fhocns = global_sum(fhocn_ai, distrb_info, & @@ -620,14 +523,14 @@ subroutine runtime_diags (dt) enddo !$OMP END PARALLEL DO - else ! fsurf is computed by atmosphere model + else ! fsurf is computed by atmosphere model !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block do i = 1, nx_block work1(i,j,iblk) = & - (fsurf(i,j,iblk) - flat(i,j,iblk)) & + (fsurf(i,j,iblk) - flat(i,j,iblk)) & * aice(i,j,iblk) enddo enddo @@ -636,13 +539,11 @@ subroutine runtime_diags (dt) endif ! calc_Tsfc - fhatmn = c0 - fhatms = c0 fhatmn = global_sum(work1, distrb_info, & field_loc_center, tarean) fhatms = global_sum(work1, distrb_info, & field_loc_center, tareas) - + !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block @@ -654,8 +555,6 @@ subroutine runtime_diags (dt) enddo !$OMP END PARALLEL DO - fswnetn = c0 - fswnets = c0 fswnetn = global_sum(work1, distrb_info, & field_loc_center, tarean) fswnets = global_sum(work1, distrb_info, & @@ -674,8 +573,6 @@ subroutine runtime_diags (dt) enddo !$OMP END PARALLEL DO - fswdnn = c0 - fswdns = c0 fswdnn = global_sum(work1, distrb_info, & field_loc_center, tarean) fswdns = global_sum(work1, distrb_info, & @@ -691,17 +588,12 @@ subroutine runtime_diags (dt) enddo enddo !$OMP END PARALLEL DO - - fhfrzn = c0 - fhfrzs = c0 fhfrzn = global_sum(work1, distrb_info, & field_loc_center, tarean) fhfrzs = global_sum(work1, distrb_info, & field_loc_center, tareas) ! rain - rnn = c0 - rns = c0 rnn = global_sum_prod(frain, aice_init, distrb_info, & field_loc_center, tarean) rns = global_sum_prod(frain, aice_init, distrb_info, & @@ -710,8 +602,6 @@ subroutine runtime_diags (dt) rns = rns*dt ! snow - snn = c0 - sns = c0 snn = global_sum_prod(fsnow, aice_init, distrb_info, & field_loc_center, tarean) sns = global_sum_prod(fsnow, aice_init, distrb_info, & @@ -722,11 +612,8 @@ subroutine runtime_diags (dt) ! frazil ice growth !! should not be multiplied by aice ! m/step->kg/m^2/s work1(:,:,:) = frazil(:,:,:)*rhoi/dt - if (.not. update_ocn_f .and. ktherm == 2 .and. cpl_frazil == 'fresh_ice_correction') then + if (ktherm == 2 .and. .not.update_ocn_f) & work1(:,:,:) = (frazil(:,:,:)-frazil_diag(:,:,:))*rhoi/dt - endif - frzn = c0 - frzs = c0 frzn = global_sum(work1, distrb_info, & field_loc_center, tarean) frzs = global_sum(work1, distrb_info, & @@ -745,7 +632,7 @@ subroutine runtime_diags (dt) ! total ice, snow and pond mass mtotn = micen + msnwn + mpndn mtots = mices + msnws + mpnds - + ! mass change since beginning of time step delmin = mtotn - totmn delmis = mtots - totms @@ -764,14 +651,14 @@ subroutine runtime_diags (dt) fluxs = c0 if( arean > c0) then ! water associated with frazil ice included in fresh - fluxn = rnn + snn + evpn - sfreshn + fluxn = rnn + snn + evpn - sfreshn if (.not. update_ocn_f) then fluxn = fluxn + frzn endif endif if( areas > c0) then ! water associated with frazil ice included in fresh - fluxs = rns + sns + evps - sfreshs + fluxs = rns + sns + evps - sfreshs if (.not. update_ocn_f) then fluxs = fluxs + frzs endif @@ -797,22 +684,12 @@ subroutine runtime_diags (dt) swerrs = (fswnets - fswdns) / (fswnets - c1) ! salt mass - if (saltflux_option == 'prognostic') then - ! compute the total salt mass - msltn = stotn*rhoi*p001 - mslts = stots*rhoi*p001 - - ! change in salt mass - delmsltn = rhoi*(stotn-totsn)*p001 - delmslts = rhoi*(stots-totss)*p001 - else - msltn = micen*ice_ref_salinity*p001 - mslts = mices*ice_ref_salinity*p001 + msltn = micen*ice_ref_salinity*p001 + mslts = mices*ice_ref_salinity*p001 - ! change in salt mass - delmsltn = delmxn*ice_ref_salinity*p001 - delmslts = delmxs*ice_ref_salinity*p001 - endif + ! change in salt mass + delmsltn = delmxn*ice_ref_salinity*p001 + delmslts = delmxs*ice_ref_salinity*p001 ! salt error serrn = (sfsaltn + delmsltn) / (msltn + c1) @@ -820,16 +697,6 @@ subroutine runtime_diags (dt) ! isotopes if (tr_iso) then - fisoan = c0 - fisoas = c0 - fisoon = c0 - fisoos = c0 - isototn = c0 - isotots = c0 - isomx1n = c0 - isomx1s = c0 - isorn = c0 - isors = c0 do n = 1, n_iso fisoan(n) = global_sum_prod(fiso_atm(:,:,n,:), aice_init, & distrb_info, field_loc_center, tarean) @@ -844,7 +711,7 @@ subroutine runtime_diags (dt) fisoon(n) = fisoon(n)*dt fisoos(n) = fisoos(n)*dt - !$OMP PARALLEL DO PRIVATE(iblk,i,j,k) + !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block do i = 1, nx_block @@ -862,8 +729,6 @@ subroutine runtime_diags (dt) isotots(n) = global_sum(work1, distrb_info, field_loc_center, tareas) isomx1n(n) = global_maxval(work1, distrb_info, lmask_n) isomx1s(n) = global_maxval(work1, distrb_info, lmask_s) - if (isomx1n(n) < maxval_spval) isomx1n(n) = c0 - if (isomx1s(n) < maxval_spval) isomx1s(n) = c0 isorn(n) = (totison(n)-isototn(n)+fisoan(n)-fisoon(n))/(isototn(n)+c1) isors(n) = (totisos(n)-isotots(n)+fisoas(n)-fisoos(n))/(isotots(n)+c1) enddo ! n_iso @@ -871,16 +736,6 @@ subroutine runtime_diags (dt) ! aerosols if (tr_aero) then - faeran = c0 - faeras = c0 - faeron = c0 - faeros = c0 - aerototn = c0 - aerotots = c0 - aeromx1n = c0 - aeromx1s = c0 - aerrn = c0 - aerrs = c0 do n = 1, n_aero faeran(n) = global_sum_prod(faero_atm(:,:,n,:), aice_init, & distrb_info, field_loc_center, tarean) @@ -912,8 +767,6 @@ subroutine runtime_diags (dt) aerotots(n) = global_sum(work1, distrb_info, field_loc_center, tareas) aeromx1n(n) = global_maxval(work1, distrb_info, lmask_n) aeromx1s(n) = global_maxval(work1, distrb_info, lmask_s) - if (aeromx1n(n) < maxval_spval) aeromx1n(n) = c0 - if (aeromx1s(n) < maxval_spval) aeromx1s(n) = c0 aerrn(n) = (totaeron(n)-aerototn(n)+faeran(n)-faeron(n)) & / (aerototn(n) + c1) @@ -947,7 +800,7 @@ subroutine runtime_diags (dt) pfsw(n) = fsw(i,j,iblk) ! shortwave radiation pflw(n) = flw(i,j,iblk) ! longwave radiation paice(n) = aice(i,j,iblk) ! ice area - + fsdavg(n) = c0 ! avg floe effective radius hiavg(n) = c0 ! avg snow/ice thickness hsavg(n) = c0 @@ -973,27 +826,6 @@ subroutine runtime_diags (dt) enddo endif endif - if (tr_snow) then ! snow tracer quantities - prsnwavg (n) = c0 ! avg snow grain radius - prhosavg (n) = c0 ! avg snow density - psmicetot(n) = c0 ! total mass of ice in snow (kg/m2) - psmliqtot(n) = c0 ! total mass of liquid in snow (kg/m2) - psmtot (n) = c0 ! total mass of snow volume (kg/m2) - if (vsno(i,j,iblk) > c0) then - do k = 1, nslyr - prsnwavg (n) = prsnwavg (n) + trcr(i,j,nt_rsnw +k-1,iblk) ! snow grain radius - prhosavg (n) = prhosavg (n) + trcr(i,j,nt_rhos +k-1,iblk) ! compacted snow density - psmicetot(n) = psmicetot(n) + trcr(i,j,nt_smice+k-1,iblk) * vsno(i,j,iblk) - psmliqtot(n) = psmliqtot(n) + trcr(i,j,nt_smliq+k-1,iblk) * vsno(i,j,iblk) - end do - endif - psmtot (n) = rhos * vsno(i,j,iblk) ! mass of ice in standard density snow - prsnwavg (n) = prsnwavg (n) / real(nslyr,kind=dbl_kind) ! snow grain radius - prhosavg (n) = prhosavg (n) / real(nslyr,kind=dbl_kind) ! compacted snow density - psmicetot(n) = psmicetot(n) / real(nslyr,kind=dbl_kind) ! mass of ice in snow - psmliqtot(n) = psmliqtot(n) / real(nslyr,kind=dbl_kind) ! mass of liquid in snow - end if - psalt(n) = c0 if (vice(i,j,iblk) /= c0) psalt(n) = work2(i,j,iblk)/vice(i,j,iblk) pTsfc(n) = trcr(i,j,nt_Tsfc,iblk) ! ice/snow sfc temperature pevap(n) = evap(i,j,iblk)*dt/rhoi ! sublimation/condensation @@ -1012,7 +844,7 @@ subroutine runtime_diags (dt) pcongel(n) = congel(i,j,iblk) ! congelation ice pdhi(n) = vice(i,j,iblk) - pdhi(n) ! ice thickness change pdhs(n) = vsno(i,j,iblk) - pdhs(n) ! snow thickness change - pde(n) =-(work1(i,j,iblk)- pde(n))/dt ! ice/snow energy change + pde(n) =-(work1(i,j,iblk)- pde(n))/dt ! ice/snow energy change psst(n) = sst(i,j,iblk) ! sea surface temperature psss(n) = sss(i,j,iblk) ! sea surface salinity pTf(n) = Tf(i,j,iblk) ! freezing temperature @@ -1045,11 +877,6 @@ subroutine runtime_diags (dt) call broadcast_scalar(pmeltl (n), pmloc(n)) call broadcast_scalar(psnoice (n), pmloc(n)) call broadcast_scalar(pdsnow (n), pmloc(n)) - call broadcast_scalar(psmtot (n), pmloc(n)) - call broadcast_scalar(prsnwavg (n), pmloc(n)) - call broadcast_scalar(prhosavg (n), pmloc(n)) - call broadcast_scalar(psmicetot(n), pmloc(n)) - call broadcast_scalar(psmliqtot(n), pmloc(n)) call broadcast_scalar(pfrazil (n), pmloc(n)) call broadcast_scalar(pcongel (n), pmloc(n)) call broadcast_scalar(pdhi (n), pmloc(n)) @@ -1059,7 +886,7 @@ subroutine runtime_diags (dt) call broadcast_scalar(psss (n), pmloc(n)) call broadcast_scalar(pTf (n), pmloc(n)) call broadcast_scalar(pfhocn (n), pmloc(n)) - + enddo ! npnt endif ! print_points @@ -1107,7 +934,7 @@ subroutine runtime_diags (dt) write(nu_diag,901) 'arwt snw mass (kg) = ',msnwn,msnws if (tr_pond_topo) & write(nu_diag,901) 'arwt pnd mass (kg) = ',mpndn,mpnds - + write(nu_diag,901) 'arwt tot mass (kg) = ',mtotn,mtots write(nu_diag,901) 'arwt tot mass chng(kg) = ',delmin,delmis write(nu_diag,901) 'arwt water flux = ',fluxn,fluxs @@ -1233,26 +1060,6 @@ subroutine runtime_diags (dt) write(nu_diag,900) 'effective dhi (m) = ',pdhi(1),pdhi(2) write(nu_diag,900) 'effective dhs (m) = ',pdhs(1),pdhs(2) write(nu_diag,900) 'intnl enrgy chng(W/m^2)= ',pde (1),pde (2) - - if (tr_snow) then - if (trim(snwredist) /= 'none') then - write(nu_diag,900) 'avg snow density(kg/m3)= ',prhosavg(1) & - ,prhosavg(2) - endif - if (snwgrain) then - write(nu_diag,900) 'avg snow grain radius = ',prsnwavg(1) & - ,prsnwavg(2) - write(nu_diag,900) 'mass ice in snow(kg/m2)= ',psmicetot(1) & - ,psmicetot(2) - write(nu_diag,900) 'mass liq in snow(kg/m2)= ',psmliqtot(1) & - ,psmliqtot(2) - write(nu_diag,900) 'mass std snow (kg/m2)= ',psmtot(1) & - ,psmtot(2) - write(nu_diag,900) 'max ice+liq (kg/m2)= ',rhow * hsavg(1) & - ,rhow * hsavg(2) - endif - endif - write(nu_diag,*) '----------ocn----------' write(nu_diag,900) 'sst (C) = ',psst(1),psst(2) write(nu_diag,900) 'sss (ppt) = ',psss(1),psss(2) @@ -1263,6 +1070,9 @@ subroutine runtime_diags (dt) endif ! print_points endif ! my_task = master_task + 799 format (27x,a24) + 800 format (a25,2x,f24.17) + 801 format (a25,2x,1pe24.17) 899 format (27x,a24,2x,a24) 900 format (a25,2x,f24.17,2x,f24.17) 901 format (a25,2x,1pe24.17,2x,1pe24.17) @@ -1279,6 +1089,7 @@ end subroutine runtime_diags subroutine init_mass_diags + use ice_blocks, only: nx_block, ny_block use ice_constants, only: field_loc_center use ice_domain, only: distrb_info, nblocks use ice_domain_size, only: n_iso, n_aero, ncat, max_blocks @@ -1297,7 +1108,7 @@ subroutine init_mass_diags rhoi, rhos, rhofresh real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & - work1, work2 + work1 character(len=*), parameter :: subname = '(init_mass_diags)' @@ -1332,12 +1143,6 @@ subroutine init_mass_diags toten = global_sum(work1, distrb_info, field_loc_center, tarean) totes = global_sum(work1, distrb_info, field_loc_center, tareas) - ! north/south salt - call total_salt (work2) - totsn = global_sum(work2, distrb_info, field_loc_center, tarean) - totss = global_sum(work2, distrb_info, field_loc_center, tareas) - - if (print_points) then do n = 1, npnt if (my_task == pmloc(n)) then @@ -1354,7 +1159,7 @@ subroutine init_mass_diags if (tr_iso) then do n=1,n_iso - !$OMP PARALLEL DO PRIVATE(iblk,i,j,k) + !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block do i = 1, nx_block @@ -1377,14 +1182,14 @@ subroutine init_mass_diags do n=1,n_aero !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - work1(i,j,iblk) = trcr(i,j,nt_aero +4*(n-1),iblk)*vsno(i,j,iblk) & - + trcr(i,j,nt_aero+1+4*(n-1),iblk)*vsno(i,j,iblk) & - + trcr(i,j,nt_aero+2+4*(n-1),iblk)*vice(i,j,iblk) & - + trcr(i,j,nt_aero+3+4*(n-1),iblk)*vice(i,j,iblk) - enddo - enddo + do j = 1, ny_block + do i = 1, nx_block + work1(i,j,iblk) = trcr(i,j,nt_aero +4*(n-1),iblk)*vsno(i,j,iblk) & + + trcr(i,j,nt_aero+1+4*(n-1),iblk)*vsno(i,j,iblk) & + + trcr(i,j,nt_aero+2+4*(n-1),iblk)*vice(i,j,iblk) & + + trcr(i,j,nt_aero+3+4*(n-1),iblk)*vice(i,j,iblk) + enddo + enddo enddo !$OMP END PARALLEL DO totaeron(n)= global_sum(work1, distrb_info, field_loc_center, tarean) @@ -1397,18 +1202,18 @@ subroutine init_mass_diags totps = c0 !$OMP PARALLEL DO PRIVATE(iblk,i,j,n) do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - work1(i,j,iblk) = c0 - do n = 1, ncat - work1(i,j,iblk) = work1(i,j,iblk) & - + aicen(i,j,n,iblk) & - * trcrn(i,j,nt_apnd,n,iblk) & - * trcrn(i,j,nt_hpnd,n,iblk) - enddo - enddo + do j = 1, ny_block + do i = 1, nx_block + work1(i,j,iblk) = c0 + do n = 1, ncat + work1(i,j,iblk) = work1(i,j,iblk) & + + aicen(i,j,n,iblk) & + * trcrn(i,j,nt_apnd,n,iblk) & + * trcrn(i,j,nt_hpnd,n,iblk) enddo enddo + enddo + enddo !$OMP END PARALLEL DO totpn = global_sum(work1, distrb_info, field_loc_center, tarean) totps = global_sum(work1, distrb_info, field_loc_center, tareas) @@ -1428,6 +1233,7 @@ end subroutine init_mass_diags subroutine total_energy (work) + use ice_blocks, only: nx_block, ny_block use ice_domain, only: nblocks use ice_domain_size, only: ncat, nilyr, nslyr, max_blocks use ice_grid, only: tmask @@ -1456,6 +1262,7 @@ subroutine total_energy (work) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) +! MHRI: CHECK THIS OMP !$OMP PARALLEL DO PRIVATE(iblk,i,j,n,k,ij,icells,indxi,indxj) do iblk = 1, nblocks @@ -1514,6 +1321,7 @@ end subroutine total_energy subroutine total_salt (work) + use ice_blocks, only: nx_block, ny_block use ice_domain, only: nblocks use ice_domain_size, only: ncat, nilyr, max_blocks use ice_grid, only: tmask @@ -1542,6 +1350,7 @@ subroutine total_salt (work) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) +! MHRI: CHECK THIS OMP !$OMP PARALLEL DO PRIVATE(iblk,i,j,n,k,ij,icells,indxi,indxj) do iblk = 1, nblocks @@ -1630,14 +1439,14 @@ subroutine init_diags write(nu_diag,*) ' Find indices of diagnostic points ' endif - piloc(:) = -1 - pjloc(:) = -1 - pbloc(:) = -1 + piloc(:) = 0 + pjloc(:) = 0 + pbloc(:) = 0 pmloc(:) = -999 plat(:) = -999._dbl_kind plon(:) = -999._dbl_kind - ! find minimum distance to diagnostic points on this processor + ! find minimum distance to diagnostic points on this processor do n = 1, npnt if (lonpnt(n) > c180) lonpnt(n) = lonpnt(n) - c360 @@ -1649,10 +1458,9 @@ subroutine init_diags if (abs(latpnt(n)) < c360 .and. abs(lonpnt(n)) < c360) then ! MDT, 09/2017: Comment out OpenMP directives since loop is not thread-safe - ! This is computing closest point, Could add a CRITICAL but it's just initialization - !!$XXXOMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,latdis,londis,totdis) + !!$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,latdis,londis,totdis) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -1675,11 +1483,11 @@ subroutine init_diags enddo ! i enddo ! j enddo ! iblk - !!$XXXOMP END PARALLEL DO + !!$OMP END PARALLEL DO endif - ! find global minimum distance to diagnostic points + ! find global minimum distance to diagnostic points mindis_g = global_minval(mindis, distrb_info) ! save indices of minimum-distance grid cell @@ -1717,64 +1525,35 @@ end subroutine init_diags !======================================================================= -! This routine is useful for debugging -! author Elizabeth C. Hunke, LANL - - subroutine debug_ice(iblk, plabeld) - - character (char_len), intent(in) :: plabeld - integer (kind=int_kind), intent(in) :: iblk - - ! local - character(len=*), parameter :: subname='(debug_ice)' - - if (istep1 >= debug_model_step) then - - ! set debug point to 1st global point if not set as local values - if (debug_model_i < 0 .and. debug_model_j < 0 .and. & - debug_model_iblk < 0 .and. debug_model_task < 0) then - debug_model_i = piloc(1) - debug_model_j = pjloc(1) - debug_model_task = pmloc(1) - debug_model_iblk = pbloc(1) - endif - - ! if debug point is messed up, abort - if (debug_model_i < 0 .or. debug_model_j < 0 .or. & - debug_model_iblk < 0 .or. debug_model_task < 0) then - call abort_ice (subname//'ERROR: debug_model_[i,j,iblk,mytask] not set correctly') - endif - - ! write out debug info - if (debug_model_iblk == iblk .and. debug_model_task == my_task) then - call print_state(plabeld,debug_model_i,debug_model_j,debug_model_iblk) - endif - - endif - - end subroutine debug_ice - -!======================================================================= - ! This routine is useful for debugging. +! Calls to it should be inserted in the form (after thermo, for example) +! do iblk = 1, nblocks +! do j=jlo,jhi +! do i=ilo,ihi +! plabel = 'post thermo' +! if (istep1 >= check_step .and. iblk==iblkp .and i==ip & +! .and. j==jp .and. my_task == mtask) & +! call print_state(plabel,i,j,iblk) +! enddo +! enddo +! enddo +! +! 'use ice_diagnostics' may need to be inserted also ! author: Elizabeth C. Hunke, LANL subroutine print_state(plabel,i,j,iblk) - use ice_grid, only: grid_ice use ice_blocks, only: block, get_block use ice_domain, only: blocks_ice use ice_domain_size, only: ncat, nilyr, nslyr, nfsd - use ice_grid, only: TLAT, TLON - use ice_state, only: aice, aice0, aicen, vicen, vsnon, uvel, vvel, & - uvelE, vvelE, uvelN, vvelN, trcrn + use ice_state, only: aice0, aicen, vicen, vsnon, uvel, vvel, trcrn use ice_flux, only: uatm, vatm, potT, Tair, Qa, flw, frain, fsnow, & fsens, flat, evap, flwout, swvdr, swvdf, swidr, swidf, rhoa, & - frzmlt, sst, sss, Tf, Tref, Qref, Uref, uocn, vocn, strtltxU, strtltyU + frzmlt, sst, sss, Tf, Tref, Qref, Uref, uocn, vocn, strtltx, strtlty character (len=20), intent(in) :: plabel - integer (kind=int_kind), intent(in) :: & + integer (kind=int_kind), intent(in) :: & i, j , & ! horizontal indices iblk ! block index @@ -1782,25 +1561,23 @@ subroutine print_state(plabel,i,j,iblk) real (kind=dbl_kind) :: & eidebug, esdebug, & - qi, qs, Tsnow, si, & + qi, qs, Tsnow, & rad_to_deg, puny, rhoi, lfresh, rhos, cp_ice integer (kind=int_kind) :: n, k, nt_Tsfc, nt_qice, nt_qsno, nt_fsd, & - nt_isosno, nt_isoice, nt_sice, nt_smice, nt_smliq + nt_isosno, nt_isoice - logical (kind=log_kind) :: tr_fsd, tr_iso, tr_snow + logical (kind=log_kind) :: tr_fsd, tr_iso type (block) :: & this_block ! block information for current block character(len=*), parameter :: subname = '(print_state)' - call icepack_query_tracer_flags(tr_fsd_out=tr_fsd, tr_iso_out=tr_iso, & - tr_snow_out=tr_snow) + call icepack_query_tracer_flags(tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_qice_out=nt_qice, & - nt_qsno_out=nt_qsno, nt_sice_out=nt_sice, nt_fsd_out=nt_fsd, & - nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice, & - nt_smice_out=nt_smice, nt_smliq_out=nt_smliq) + nt_qsno_out=nt_qsno, nt_fsd_out=nt_fsd, & + nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) call icepack_query_parameters( & rad_to_deg_out=rad_to_deg, puny_out=puny, rhoi_out=rhoi, lfresh_out=lfresh, & rhos_out=rhos, cp_ice_out=cp_ice) @@ -1808,20 +1585,15 @@ subroutine print_state(plabel,i,j,iblk) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) - write(nu_diag,*) subname,' ',trim(plabel) - write(nu_diag,*) subname,' istep1, my_task, i, j, iblk:', & + write(nu_diag,*) plabel + write(nu_diag,*) 'istep1, my_task, i, j, iblk:', & istep1, my_task, i, j, iblk - write(nu_diag,*) subname,' Global block:', this_block%block_id - write(nu_diag,*) subname,' Global i and j:', & + write(nu_diag,*) 'Global i and j:', & this_block%i_glob(i), & - this_block%j_glob(j) - write (nu_diag,*) subname,' Lat, Lon (degrees):', & - TLAT(i,j,iblk)*rad_to_deg, & - TLON(i,j,iblk)*rad_to_deg + this_block%j_glob(j) write(nu_diag,*) ' ' - write(nu_diag,*) 'aice ', aice(i,j,iblk) write(nu_diag,*) 'aice0', aice0(i,j,iblk) do n = 1, ncat write(nu_diag,*) ' ' @@ -1835,23 +1607,21 @@ subroutine print_state(plabel,i,j,iblk) endif write(nu_diag,*) 'Tsfcn',trcrn(i,j,nt_Tsfc,n,iblk) if (tr_fsd) write(nu_diag,*) 'afsdn',trcrn(i,j,nt_fsd,n,iblk) ! fsd cat 1 -! layer 1 diagnostics -! if (tr_iso) write(nu_diag,*) 'isosno',trcrn(i,j,nt_isosno,n,iblk) ! isotopes in snow -! if (tr_iso) write(nu_diag,*) 'isoice',trcrn(i,j,nt_isoice,n,iblk) ! isotopes in ice -! if (tr_snow) write(nu_diag,*) 'smice', trcrn(i,j,nt_smice, n,iblk) ! ice mass in snow -! if (tr_snow) write(nu_diag,*) 'smliq', trcrn(i,j,nt_smliq, n,iblk) ! liquid mass in snow +! if (tr_iso) write(nu_diag,*) 'isosno',trcrn(i,j,nt_isosno,n,iblk) ! isotopes in snow +! if (tr_iso) write(nu_diag,*) 'isoice',trcrn(i,j,nt_isoice,n,iblk) ! isotopes in ice write(nu_diag,*) ' ' ! dynamics (transport and/or ridging) causes the floe size distribution to become non-normal ! if (tr_fsd) then ! if (abs(sum(trcrn(i,j,nt_fsd:nt_fsd+nfsd-1,n,iblk))-c1) > puny) & -! write(nu_diag,*) 'afsdn not normal', & +! print*,'afsdn not normal', & ! sum(trcrn(i,j,nt_fsd:nt_fsd+nfsd-1,n,iblk)), & ! trcrn(i,j,nt_fsd:nt_fsd+nfsd-1,n,iblk) ! endif enddo ! n + eidebug = c0 do n = 1,ncat do k = 1,nilyr @@ -1884,25 +1654,151 @@ subroutine print_state(plabel,i,j,iblk) write(nu_diag,*) 'qsnow(i,j)',esdebug write(nu_diag,*) ' ' + write(nu_diag,*) 'uvel(i,j)',uvel(i,j,iblk) + write(nu_diag,*) 'vvel(i,j)',vvel(i,j,iblk) + + write(nu_diag,*) ' ' + write(nu_diag,*) 'atm states and fluxes' + write(nu_diag,*) ' uatm = ',uatm (i,j,iblk) + write(nu_diag,*) ' vatm = ',vatm (i,j,iblk) + write(nu_diag,*) ' potT = ',potT (i,j,iblk) + write(nu_diag,*) ' Tair = ',Tair (i,j,iblk) + write(nu_diag,*) ' Qa = ',Qa (i,j,iblk) + write(nu_diag,*) ' rhoa = ',rhoa (i,j,iblk) + write(nu_diag,*) ' swvdr = ',swvdr(i,j,iblk) + write(nu_diag,*) ' swvdf = ',swvdf(i,j,iblk) + write(nu_diag,*) ' swidr = ',swidr(i,j,iblk) + write(nu_diag,*) ' swidf = ',swidf(i,j,iblk) + write(nu_diag,*) ' flw = ',flw (i,j,iblk) + write(nu_diag,*) ' frain = ',frain(i,j,iblk) + write(nu_diag,*) ' fsnow = ',fsnow(i,j,iblk) + write(nu_diag,*) ' ' + write(nu_diag,*) 'ocn states and fluxes' + write(nu_diag,*) ' frzmlt = ',frzmlt (i,j,iblk) + write(nu_diag,*) ' sst = ',sst (i,j,iblk) + write(nu_diag,*) ' sss = ',sss (i,j,iblk) + write(nu_diag,*) ' Tf = ',Tf (i,j,iblk) + write(nu_diag,*) ' uocn = ',uocn (i,j,iblk) + write(nu_diag,*) ' vocn = ',vocn (i,j,iblk) + write(nu_diag,*) ' strtltx = ',strtltx(i,j,iblk) + write(nu_diag,*) ' strtlty = ',strtlty(i,j,iblk) + write(nu_diag,*) ' ' + write(nu_diag,*) 'srf states and fluxes' + write(nu_diag,*) ' Tref = ',Tref (i,j,iblk) + write(nu_diag,*) ' Qref = ',Qref (i,j,iblk) + write(nu_diag,*) ' Uref = ',Uref (i,j,iblk) + write(nu_diag,*) ' fsens = ',fsens (i,j,iblk) + write(nu_diag,*) ' flat = ',flat (i,j,iblk) + write(nu_diag,*) ' evap = ',evap (i,j,iblk) + write(nu_diag,*) ' flwout = ',flwout(i,j,iblk) + write(nu_diag,*) ' ' + + end subroutine print_state + +!======================================================================= + +! This routine is useful for debugging. +! Calls can be inserted anywhere and it will print info on print_points points +! call print_points_state(plabel) +! +! 'use ice_diagnostics' may need to be inserted also + + subroutine print_points_state(plabel,ilabel) + + use ice_blocks, only: block, get_block + use ice_domain, only: blocks_ice + use ice_domain_size, only: ncat, nilyr, nslyr + use ice_state, only: aice0, aicen, vicen, vsnon, uvel, vvel, trcrn + use ice_flux, only: uatm, vatm, potT, Tair, Qa, flw, frain, fsnow, & + fsens, flat, evap, flwout, swvdr, swvdf, swidr, swidf, rhoa, & + frzmlt, sst, sss, Tf, Tref, Qref, Uref, uocn, vocn, strtltx, strtlty + + character (len=*), intent(in),optional :: plabel + integer , intent(in),optional :: ilabel + + ! local variables + + real (kind=dbl_kind) :: & + eidebug, esdebug, & + qi, qs, & + puny + + integer (kind=int_kind) :: m, n, k, i, j, iblk, nt_Tsfc, nt_qice, nt_qsno + character(len=256) :: llabel + + type (block) :: & + this_block ! block information for current block + + character(len=*), parameter :: subname = '(print_points_state)' + ! ---------------------- + + call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_qice_out=nt_qice, & + nt_qsno_out=nt_qsno) + call icepack_query_parameters(puny_out=puny) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + do m = 1, npnt + if (my_task == pmloc(m)) then + i = piloc(m) + j = pjloc(m) + iblk = pbloc(m) + this_block = get_block(blocks_ice(iblk),iblk) + + if (present(ilabel)) then + write(llabel,'(i6,a1,i3,a1)') ilabel,':',m,':' + else + write(llabel,'(i3,a1)') m,':' + endif + if (present(plabel)) then + write(llabel,'(a)') 'pps:'//trim(plabel)//':'//trim(llabel) + else + write(llabel,'(a)') 'pps:'//trim(llabel) + endif + + write(nu_diag,*) trim(llabel),'istep1, my_task, i, j, iblk=', & + istep1, my_task, i, j, iblk + write(nu_diag,*) trim(llabel),'Global i and j=', & + this_block%i_glob(i), & + this_block%j_glob(j) + write(nu_diag,*) trim(llabel),'aice0=', aice0(i,j,iblk) + + do n = 1, ncat + write(nu_diag,*) trim(llabel),'aicen=', n,aicen(i,j,n,iblk) + write(nu_diag,*) trim(llabel),'vicen=', n,vicen(i,j,n,iblk) + write(nu_diag,*) trim(llabel),'vsnon=', n,vsnon(i,j,n,iblk) + if (aicen(i,j,n,iblk) > puny) then + write(nu_diag,*) trim(llabel),'hin=', n,vicen(i,j,n,iblk)/aicen(i,j,n,iblk) + write(nu_diag,*) trim(llabel),'hsn=', n,vsnon(i,j,n,iblk)/aicen(i,j,n,iblk) + endif + write(nu_diag,*) trim(llabel),'Tsfcn=',n,trcrn(i,j,nt_Tsfc,n,iblk) + enddo + + eidebug = c0 do n = 1,ncat do k = 1,nilyr - si = trcrn(i,j,nt_sice+k-1,n,iblk) - write(nu_diag,*) 'sice, cat ',n,' layer ',k, si + qi = trcrn(i,j,nt_qice+k-1,n,iblk) + write(nu_diag,*) trim(llabel),'qice= ',n,k, qi + eidebug = eidebug + qi enddo enddo - write(nu_diag,*) ' ' + write(nu_diag,*) trim(llabel),'qice=',eidebug - write(nu_diag,*) 'uvel(i,j)',uvel(i,j,iblk) - write(nu_diag,*) 'vvel(i,j)',vvel(i,j,iblk) - if (grid_ice == 'C') then - write(nu_diag,*) 'uvelE(i,j)',uvelE(i,j,iblk) - write(nu_diag,*) 'uvelN(i,j)',uvelN(i,j,iblk) - elseif (grid_ice == 'CD') then - write(nu_diag,*) 'uvelE(i,j)',uvelE(i,j,iblk) - write(nu_diag,*) 'vvelE(i,j)',vvelE(i,j,iblk) - write(nu_diag,*) 'uvelN(i,j)',uvelN(i,j,iblk) - write(nu_diag,*) 'vvelN(i,j)',vvelN(i,j,iblk) - endif + esdebug = c0 + do n = 1,ncat + if (vsnon(i,j,n,iblk) > puny) then + do k = 1,nslyr + qs = trcrn(i,j,nt_qsno+k-1,n,iblk) + write(nu_diag,*) trim(llabel),'qsnow=',n,k, qs + esdebug = esdebug + qs + enddo + endif + enddo + write(nu_diag,*) trim(llabel),'qsnow=',esdebug + + write(nu_diag,*) trim(llabel),'uvel=',uvel(i,j,iblk) + write(nu_diag,*) trim(llabel),'vvel=',vvel(i,j,iblk) write(nu_diag,*) ' ' write(nu_diag,*) 'atm states and fluxes' @@ -1921,14 +1817,14 @@ subroutine print_state(plabel,i,j,iblk) write(nu_diag,*) ' fsnow = ',fsnow(i,j,iblk) write(nu_diag,*) ' ' write(nu_diag,*) 'ocn states and fluxes' - write(nu_diag,*) ' frzmlt = ',frzmlt (i,j,iblk) - write(nu_diag,*) ' sst = ',sst (i,j,iblk) - write(nu_diag,*) ' sss = ',sss (i,j,iblk) - write(nu_diag,*) ' Tf = ',Tf (i,j,iblk) - write(nu_diag,*) ' uocn = ',uocn (i,j,iblk) - write(nu_diag,*) ' vocn = ',vocn (i,j,iblk) - write(nu_diag,*) ' strtltxU= ',strtltxU(i,j,iblk) - write(nu_diag,*) ' strtltyU= ',strtltyU(i,j,iblk) + write(nu_diag,*) ' frzmlt = ',frzmlt (i,j,iblk) + write(nu_diag,*) ' sst = ',sst (i,j,iblk) + write(nu_diag,*) ' sss = ',sss (i,j,iblk) + write(nu_diag,*) ' Tf = ',Tf (i,j,iblk) + write(nu_diag,*) ' uocn = ',uocn (i,j,iblk) + write(nu_diag,*) ' vocn = ',vocn (i,j,iblk) + write(nu_diag,*) ' strtltx = ',strtltx(i,j,iblk) + write(nu_diag,*) ' strtlty = ',strtlty(i,j,iblk) write(nu_diag,*) ' ' write(nu_diag,*) 'srf states and fluxes' write(nu_diag,*) ' Tref = ',Tref (i,j,iblk) @@ -1939,26 +1835,30 @@ subroutine print_state(plabel,i,j,iblk) write(nu_diag,*) ' evap = ',evap (i,j,iblk) write(nu_diag,*) ' flwout = ',flwout(i,j,iblk) write(nu_diag,*) ' ' - call flush_fileunit(nu_diag) - end subroutine print_state + endif ! my_task + enddo ! ncnt + + end subroutine print_points_state !======================================================================= ! prints error information prior to aborting - subroutine diagnostic_abort(istop, jstop, iblk, stop_label) + subroutine diagnostic_abort(istop, jstop, iblk, istep1, stop_label) use ice_blocks, only: block, get_block + use ice_communicate, only: my_task use ice_domain, only: blocks_ice use ice_grid, only: TLAT, TLON use ice_state, only: aice integer (kind=int_kind), intent(in) :: & istop, jstop, & ! indices of grid cell where model aborts - iblk ! block index + iblk , & ! block index + istep1 ! time step number - character (len=*), intent(in) :: stop_label + character (char_len), intent(in) :: stop_label ! local variables @@ -1974,17 +1874,20 @@ subroutine diagnostic_abort(istop, jstop, iblk, stop_label) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - this_block = get_block(blocks_ice(iblk),iblk) - - call flush_fileunit(nu_diag) - if (istop > 0 .and. jstop > 0) then - call print_state(trim(stop_label),istop,jstop,iblk) - else - write (nu_diag,*) subname,' istep1, my_task, iblk =', & - istep1, my_task, iblk - write (nu_diag,*) subname,' Global block:', this_block%block_id - endif - call flush_fileunit(nu_diag) + this_block = get_block(blocks_ice(iblk),iblk) + + write (nu_diag,*) 'istep1, my_task, iblk =', & + istep1, my_task, iblk + write (nu_diag,*) 'Global block:', this_block%block_id + if (istop > 0 .and. jstop > 0) & + write (nu_diag,*) 'Global i and j:', & + this_block%i_glob(istop), & + this_block%j_glob(jstop) + write (nu_diag,*) 'Lat, Lon:', & + TLAT(istop,jstop,iblk)*rad_to_deg, & + TLON(istop,jstop,iblk)*rad_to_deg + write (nu_diag,*) 'aice:', & + aice(istop,jstop,iblk) call abort_ice (subname//'ERROR: '//trim(stop_label)) end subroutine diagnostic_abort diff --git a/cicecore/cicedyn/analysis/ice_history_bgc.F90 b/cicecore/cicedyn/analysis/ice_history_bgc.F90 index 7c87c1f70..1ae572b30 100644 --- a/cicecore/cicedyn/analysis/ice_history_bgc.F90 +++ b/cicecore/cicedyn/analysis/ice_history_bgc.F90 @@ -21,71 +21,74 @@ module ice_history_bgc icepack_query_tracer_indices, icepack_query_parameters, & icepack_query_parameters use ice_domain_size, only: max_nstrm, n_iso, n_aero, & - n_algae, n_dic, n_doc, n_don, n_zaero, n_fed, n_fep + n_algae, n_dic, n_doc, n_don, n_zaero, n_fed, n_fep implicit none private public :: init_hist_bgc_2D, init_hist_bgc_3Dc, & init_hist_bgc_3Db, init_hist_bgc_3Da,& accum_hist_bgc, init_history_bgc - + !--------------------------------------------------------------- ! flags: write to output file if true or histfreq value !--------------------------------------------------------------- ! specified in input_templates !-------------------------------------------------------------- - character (len=max_nstrm), public :: & + character (len=max_nstrm), public :: & f_fiso_atm = 'x', f_fiso_ocn = 'x', & f_iso = 'x', & f_faero_atm = 'x', f_faero_ocn = 'x', & f_aero = 'x', & - f_fbio = 'x', f_fbio_ai = 'x', & - f_zaero = 'x', f_bgc_S = 'x', & + f_fzsal = 'm', f_fzsal_ai = 'm', & + f_fzsal_g = 'm', f_fzsal_g_ai = 'm', & + f_zsal = 'x', & + f_fbio = 'x', f_fbio_ai = 'x', & + f_zaero = 'x', f_bgc_S = 'x', & f_bgc_N = 'x', f_bgc_C = 'x', & f_bgc_DOC = 'x', f_bgc_DIC = 'x', & f_bgc_chl = 'x', f_bgc_Nit = 'x', & f_bgc_Am = 'x', f_bgc_Sil = 'x', & f_bgc_DMSPp = 'x', f_bgc_DMSPd = 'x', & - f_bgc_DMS = 'x', f_bgc_DON = 'x', & + f_bgc_DMS = 'x', f_bgc_DON = 'x', & f_bgc_Fe = 'x', f_bgc_hum = 'x', & f_bgc_PON = 'x', f_bgc_ml = 'x', & - f_upNO = 'x', f_upNH = 'x', & - f_bTin = 'x', f_bphi = 'x', & - f_iDi = 'x', f_iki = 'x', & + f_upNO = 'x', f_upNH = 'x', & + f_bTin = 'x', f_bphi = 'x', & + f_iDi = 'x', f_iki = 'x', & f_fbri = 'x', f_hbri = 'x', & - f_zfswin = 'x', f_grownet = 'x', & - f_bionet = 'x', f_biosnow = 'x', & + f_zfswin = 'x', f_grownet = 'x', & + f_bionet = 'x', f_biosnow = 'x', & f_PPnet = 'x', f_algalpeak = 'x', & f_zbgc_frac = 'x', & !------------------------------------------------ ! specified by combinations of above values !------------------------------------------------- f_bgc_Fed = 'x', f_bgc_Fep = 'x', & - f_DONnet = 'x', & - f_DICnet = 'x', f_DOCnet = 'x', & - f_chlnet = 'x', f_Nitnet = 'x', & - f_Amnet = 'x', f_Cnet = 'x', & - f_Nnet = 'x', f_DMSPpnet = 'x', & - f_DMSPdnet = 'x', f_DMSnet = 'x', & - f_Fednet = 'x', f_Fepnet = 'x', & + f_DONnet = 'x', & + f_DICnet = 'x', f_DOCnet = 'x', & + f_chlnet = 'x', f_Nitnet = 'x', & + f_Amnet = 'x', f_Cnet = 'x', & + f_Nnet = 'x', f_DMSPpnet = 'x', & + f_DMSPdnet = 'x', f_DMSnet = 'x', & + f_Fednet = 'x', f_Fepnet = 'x', & f_Silnet = 'x', f_PONnet = 'x', & - f_zaeronet = 'x', f_humnet = 'x', & - f_chlsnow = 'x', f_Nitsnow = 'x', & - f_Amsnow = 'x', f_Csnow = 'x', & - f_Nsnow = 'x', f_DMSPpsnow = 'x', & - f_DMSPdsnow = 'x', f_DMSsnow = 'x', & - f_Fedsnow = 'x', f_Fepsnow = 'x', & - f_Silsnow = 'x', f_PONsnow = 'x', & + f_zaeronet = 'x', f_humnet = 'x', & + f_chlsnow = 'x', f_Nitsnow = 'x', & + f_Amsnow = 'x', f_Csnow = 'x', & + f_Nsnow = 'x', f_DMSPpsnow = 'x', & + f_DMSPdsnow = 'x', f_DMSsnow = 'x', & + f_Fedsnow = 'x', f_Fepsnow = 'x', & + f_Silsnow = 'x', f_PONsnow = 'x', & f_humsnow = 'x', & - f_DICsnow = 'x', f_DOCsnow = 'x', & + f_DICsnow = 'x', f_DOCsnow = 'x', & f_DONsnow = 'x', f_zaerosnow = 'x', & - f_chlfrac = 'x', f_Nitfrac = 'x', & - f_Amfrac = 'x', & - f_Nfrac = 'x', f_DMSPpfrac = 'x', & - f_DMSPdfrac = 'x', f_DMSfrac = 'x', & - f_Silfrac = 'x', f_PONfrac = 'x', & + f_chlfrac = 'x', f_Nitfrac = 'x', & + f_Amfrac = 'x', & + f_Nfrac = 'x', f_DMSPpfrac = 'x', & + f_DMSPdfrac = 'x', f_DMSfrac = 'x', & + f_Silfrac = 'x', f_PONfrac = 'x', & f_humfrac = 'x', & - f_DICfrac = 'x', f_DOCfrac = 'x', & + f_DICfrac = 'x', f_DOCfrac = 'x', & f_DONfrac = 'x', f_zaerofrac = 'x', & f_Fedfrac = 'x', f_Fepfrac = 'x', & f_fNit = 'x', f_fNit_ai = 'x', & @@ -96,13 +99,13 @@ module ice_history_bgc f_fDON = 'x', f_fDON_ai = 'x', & f_fFed = 'x', f_fFed_ai = 'x', & f_fFep = 'x', f_fFep_ai = 'x', & - f_fSil = 'x', f_fSil_ai = 'x', & - f_fPON = 'x', f_fPON_ai = 'x', & - f_fhum = 'x', f_fhum_ai = 'x', & - f_fDMSPp = 'x', f_fDMSPp_ai = 'x', & - f_fDMSPd = 'x', f_fDMSPd_ai = 'x', & - f_fDMS = 'x', f_fDMS_ai = 'x', & - f_fzaero = 'x', f_fzaero_ai = 'x', & + f_fSil = 'x', f_fSil_ai = 'x', & + f_fPON = 'x', f_fPON_ai = 'x', & + f_fhum = 'x', f_fhum_ai = 'x', & + f_fDMSPp = 'x', f_fDMSPp_ai = 'x', & + f_fDMSPd = 'x', f_fDMSPd_ai = 'x', & + f_fDMS = 'x', f_fDMS_ai = 'x', & + f_fzaero = 'x', f_fzaero_ai = 'x', & f_bgc_Sil_ml = 'x', & f_bgc_Nit_ml = 'x', f_bgc_Am_ml = 'x', & f_bgc_DMSP_ml = 'x', f_bgc_DMS_ml = 'x', & @@ -137,12 +140,12 @@ module ice_history_bgc f_bgc_DMS , f_bgc_DON , & f_bgc_Fe , f_bgc_hum , & f_bgc_PON , f_bgc_ml , & - f_upNO , f_upNH , & + f_upNO , f_upNH , & f_bTin , f_bphi , & - f_iDi , f_iki , & + f_iDi , f_iki , & f_fbri , f_hbri , & - f_zfswin , f_grownet , & - f_bionet , f_biosnow , & + f_zfswin , f_grownet , & + f_bionet , f_biosnow , & f_PPnet , f_algalpeak , & f_zbgc_frac @@ -150,6 +153,11 @@ module ice_history_bgc ! field indices !--------------------------------------------------------------- + integer (kind=int_kind), dimension(max_nstrm), public :: & + n_fzsal , n_fzsal_ai , & + n_fzsal_g , n_fzsal_g_ai , & + n_zsal + integer(kind=int_kind), dimension(icepack_max_iso,max_nstrm) :: & n_fiso_atm , & n_fiso_ocn , & @@ -208,6 +216,7 @@ module ice_history_bgc n_bgc_Fed_cat1, n_bgc_Fep_cat1 integer(kind=int_kind), dimension(max_nstrm) :: & + n_bgc_S , & n_fNit , n_fNit_ai , & n_fAm , n_fAm_ai , & n_fSil , n_fSil_ai , & @@ -224,25 +233,25 @@ module ice_history_bgc n_bgc_hum_ml , & n_bgc_Nit_ml , n_bgc_Am_ml , & n_bgc_DMSP_ml , n_bgc_DMS_ml , & - n_upNO , n_upNH , & + n_upNO , n_upNH , & n_bTin , n_bphi , & n_iDi , n_iki , & n_bgc_PON , & n_fbri , n_hbri , & - n_zfswin , n_Nitnet , & - n_Amnet , n_Silnet , & + n_zfswin , n_Nitnet , & + n_Amnet , n_Silnet , & n_humnet , & - n_DMSPpnet , n_DMSPdnet , & - n_DMSnet , n_PONnet , & + n_DMSPpnet , n_DMSPdnet , & + n_DMSnet , n_PONnet , & n_Nitsnow , n_Amsnow , & n_Silsnow , n_humsnow , & - n_DMSPpsnow , n_DMSPdsnow , & - n_DMSsnow , n_PONsnow , & + n_DMSPpsnow , n_DMSPdsnow , & + n_DMSsnow , n_PONsnow , & n_Nitfrac , n_Amfrac , & n_Silfrac , & n_humfrac , & - n_DMSPpfrac , n_DMSPdfrac , & - n_DMSfrac , n_PONfrac , & + n_DMSPpfrac , n_DMSPdfrac , & + n_DMSfrac , n_PONfrac , & n_grownet , n_PPnet , & n_bgc_Nit_cat1, n_bgc_Am_cat1 , & n_bgc_Sil_cat1, n_bgc_DMSPd_cat1,& @@ -258,11 +267,10 @@ module ice_history_bgc subroutine init_hist_bgc_2D use ice_broadcast, only: broadcast_scalar - use ice_calendar, only: nstreams, histfreq + use ice_calendar, only: nstreams use ice_communicate, only: my_task, master_task use ice_history_shared, only: tstr2D, tcstr, define_hist_field, & f_fsalt, f_fsalt_ai, f_sice - use ice_fileunits, only: goto_nml integer (kind=int_kind) :: n, ns integer (kind=int_kind) :: nml_error ! namelist i/o error flag @@ -273,15 +281,11 @@ subroutine init_hist_bgc_2D tr_bgc_DMS, tr_bgc_PON, & tr_bgc_N, tr_bgc_C, tr_bgc_chl, & tr_bgc_DON, tr_bgc_Fe, tr_bgc_hum, & - skl_bgc, z_tracers - - character(len=char_len) :: nml_name ! for namelist check - character(len=char_len_long) :: tmpstr2 ! for namelist check - - character(len=*), parameter :: subname = '(init_hist_bgc_2D)' + skl_bgc, solve_zsal, z_tracers + character(len=*), parameter :: subname = '(init_hist_bgc_2D)' call icepack_query_parameters(skl_bgc_out=skl_bgc, & - z_tracers_out=z_tracers) + solve_zsal_out=solve_zsal, z_tracers_out=z_tracers) call icepack_query_tracer_flags( & tr_iso_out =tr_iso, tr_zaero_out =tr_zaero, & tr_aero_out =tr_aero, tr_brine_out =tr_brine, & @@ -290,7 +294,7 @@ subroutine init_hist_bgc_2D tr_bgc_PON_out=tr_bgc_PON, & tr_bgc_N_out =tr_bgc_N, tr_bgc_C_out =tr_bgc_C, & tr_bgc_chl_out=tr_bgc_chl, tr_bgc_DON_out=tr_bgc_DON, & - tr_bgc_Fe_out =tr_bgc_Fe, tr_bgc_hum_out=tr_bgc_hum ) + tr_bgc_Fe_out =tr_bgc_Fe, tr_bgc_hum_out=tr_bgc_hum ) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -299,42 +303,25 @@ subroutine init_hist_bgc_2D ! read namelist !----------------------------------------------------------------- + call get_fileunit(nu_nml) if (my_task == master_task) then - nml_name = 'icefields_bgc_nml' - write(nu_diag,*) subname,' Reading ', trim(nml_name) - - ! check if can open file - call get_fileunit(nu_nml) - open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) + open (nu_nml, file=nml_filename, status='old',iostat=nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: '//trim(nml_name)//' open file '// & - trim(nml_filename), & - file=__FILE__, line=__LINE__) + nml_error = -1 + else + nml_error = 1 endif - - ! seek to namelist in file - call goto_nml(nu_nml,trim(nml_name),nml_error) - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & - file=__FILE__, line=__LINE__) - endif - - ! read namelist - nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=icefields_bgc_nml,iostat=nml_error) - ! check if error - if (nml_error /= 0) then - ! backspace and re-read erroneous line - backspace(nu_nml) - read(nu_nml,fmt='(A)') tmpstr2 - call abort_ice(subname//'ERROR: ' // trim(nml_name) // ' reading ' // & - trim(tmpstr2), file=__FILE__, line=__LINE__) - endif end do + if (nml_error == 0) close(nu_nml) + endif + call release_fileunit(nu_nml) - close(nu_nml) - call release_fileunit(nu_nml) + call broadcast_scalar(nml_error, master_task) + if (nml_error /= 0) then + close (nu_nml) + call abort_ice(subname//'ERROR: reading icefields_bgc_nml') endif if (.not. tr_iso) then @@ -346,14 +333,14 @@ subroutine init_hist_bgc_2D if (.not. tr_aero) then f_faero_atm = 'x' f_faero_ocn = 'x' - f_aero = 'x' + f_aero = 'x' endif - + if (.not. tr_brine) then f_fbri = 'x' f_hbri = 'x' endif - + f_zaeronet = f_bionet f_zaerosnow = f_biosnow f_zaerofrac = f_zbgc_frac @@ -362,7 +349,7 @@ subroutine init_hist_bgc_2D if (.not. tr_zaero) then f_zaero = 'x' - f_fzaero = 'x' + f_fzaero = 'x' f_fzaero_ai = 'x' f_zaeronet = 'x' f_zaerosnow = 'x' @@ -406,7 +393,7 @@ subroutine init_hist_bgc_2D f_DMSPdnet = f_bionet f_DMSnet = f_bionet f_PONnet = f_bionet - + f_Nitsnow = f_biosnow f_Amsnow = f_biosnow f_Nsnow = f_biosnow @@ -476,7 +463,7 @@ subroutine init_hist_bgc_2D f_fDMSPd_ai = f_fbio_ai f_fDMS_ai = f_fbio_ai - if (.not. tr_bgc_N) then + if (.not. tr_bgc_N) then f_bgc_N = 'x' f_bgc_N_ml = 'x' f_fN = 'x' @@ -488,8 +475,8 @@ subroutine init_hist_bgc_2D endif f_peakval = f_algalpeak - if (.not. tr_bgc_Nit) then - f_upNO = 'x' + if (.not. tr_bgc_Nit) then + f_upNO = 'x' f_bgc_Nit = 'x' f_bgc_Nit_ml= 'x' f_fNit = 'x' @@ -521,8 +508,8 @@ subroutine init_hist_bgc_2D f_chlsnow = 'x' f_chlfrac = 'x' endif - if (.not. tr_bgc_Am) then - f_upNH = 'x' + if (.not. tr_bgc_Am) then + f_upNH = 'x' f_bgc_Am = 'x' f_bgc_Am_ml = 'x' f_fAm = 'x' @@ -570,8 +557,8 @@ subroutine init_hist_bgc_2D f_DMSfrac = 'x' f_DMSPpfrac = 'x' f_DMSPdfrac = 'x' - endif - if (.not. tr_bgc_DON) then + endif + if (.not. tr_bgc_DON) then f_bgc_DON = 'x' f_bgc_DON_ml = 'x' f_DONsnow = 'x' @@ -579,8 +566,8 @@ subroutine init_hist_bgc_2D f_DONnet = 'x' f_fDON = 'x' f_fDON_ai = 'x' - endif - if (.not. tr_bgc_Fe ) then + endif + if (.not. tr_bgc_Fe ) then f_bgc_Fe = 'x' f_bgc_Fed = 'x' f_bgc_Fed_ml = 'x' @@ -597,7 +584,7 @@ subroutine init_hist_bgc_2D f_fFep = 'x' f_fFep_ai = 'x' endif - if (.not. tr_bgc_PON .or. skl_bgc) then + if (.not. tr_bgc_PON .or. skl_bgc) then f_bgc_PON = 'x' f_PONsnow = 'x' f_PONfrac = 'x' @@ -605,21 +592,40 @@ subroutine init_hist_bgc_2D f_fPON = 'x' f_fPON_ai = 'x' endif - - f_bgc_Nit_cat1 = f_bgc_Nit - f_bgc_Am_cat1 = f_bgc_Am + + f_bgc_Nit_cat1 = f_bgc_Nit + f_bgc_Am_cat1 = f_bgc_Am f_bgc_N_cat1 = f_bgc_N f_bgc_DOC_cat1 = f_bgc_DOC f_bgc_DIC_cat1 = f_bgc_DIC f_bgc_DON_cat1 = f_bgc_DON - f_bgc_Fed_cat1 = f_bgc_Fe - f_bgc_Fep_cat1 = f_bgc_Fe - f_bgc_Sil_cat1 = f_bgc_Sil - f_bgc_hum_cat1 = f_bgc_hum + f_bgc_Fed_cat1 = f_bgc_Fe + f_bgc_Fep_cat1 = f_bgc_Fe + f_bgc_Sil_cat1 = f_bgc_Sil + f_bgc_hum_cat1 = f_bgc_hum f_bgc_DMSPd_cat1 = f_bgc_DMSPd - f_bgc_DMS_cat1 = f_bgc_DMS + f_bgc_DMS_cat1 = f_bgc_DMS f_bgc_PON_cat1 = f_bgc_PON + if (solve_zsal) then + f_fzsal = f_fsalt + f_fzsal_g = f_fsalt + f_fzsal_ai = f_fsalt_ai + f_fzsal_g_ai = f_fsalt_ai + f_zsal = f_sice + f_fsalt = 'x' + f_fsalt_ai = 'x' + f_sice = 'x' + else + f_fzsal = 'x' + f_fzsal_g = 'x' + f_fzsal_ai = 'x' + f_fzsal_g_ai = 'x' + f_zsal = 'x' + f_bgc_S = 'x' + f_iki = 'x' + endif + call broadcast_scalar (f_fiso_atm, master_task) call broadcast_scalar (f_fiso_ocn, master_task) call broadcast_scalar (f_iso, master_task) @@ -628,6 +634,11 @@ subroutine init_hist_bgc_2D call broadcast_scalar (f_aero, master_task) call broadcast_scalar (f_fbri, master_task) call broadcast_scalar (f_hbri, master_task) + call broadcast_scalar (f_fzsal, master_task) + call broadcast_scalar (f_fzsal_ai, master_task) + call broadcast_scalar (f_fzsal_g, master_task) + call broadcast_scalar (f_fzsal_g_ai, master_task) + call broadcast_scalar (f_zsal, master_task) call broadcast_scalar (f_fNit, master_task) call broadcast_scalar (f_fNit_ai, master_task) call broadcast_scalar (f_fDOC, master_task) @@ -697,79 +708,79 @@ subroutine init_hist_bgc_2D call broadcast_scalar (f_bgc_Sil_ml, master_task) call broadcast_scalar (f_bgc_hum_ml, master_task) call broadcast_scalar (f_bgc_DMSP_ml, master_task) - call broadcast_scalar (f_bgc_DMS_ml, master_task) - call broadcast_scalar (f_bgc_DON_ml, master_task) - call broadcast_scalar (f_bgc_Fed_ml, master_task) - call broadcast_scalar (f_bgc_Fep_ml, master_task) - call broadcast_scalar (f_upNO, master_task) - call broadcast_scalar (f_upNH, master_task) + call broadcast_scalar (f_bgc_DMS_ml, master_task) + call broadcast_scalar (f_bgc_DON_ml, master_task) + call broadcast_scalar (f_bgc_Fed_ml, master_task) + call broadcast_scalar (f_bgc_Fep_ml, master_task) + call broadcast_scalar (f_upNO, master_task) + call broadcast_scalar (f_upNH, master_task) call broadcast_scalar (f_bTin, master_task) call broadcast_scalar (f_bphi, master_task) - call broadcast_scalar (f_iDi, master_task) - call broadcast_scalar (f_iki, master_task) - call broadcast_scalar (f_zfswin, master_task) - call broadcast_scalar (f_PPnet, master_task) - call broadcast_scalar (f_algalpeak, master_task) - call broadcast_scalar (f_zbgc_frac, master_task) - call broadcast_scalar (f_peakval, master_task) - call broadcast_scalar (f_grownet, master_task) - call broadcast_scalar (f_chlnet, master_task) - call broadcast_scalar (f_Nitnet, master_task) - call broadcast_scalar (f_Nnet, master_task) - call broadcast_scalar (f_Cnet, master_task) - call broadcast_scalar (f_DOCnet, master_task) - call broadcast_scalar (f_DICnet, master_task) - call broadcast_scalar (f_Amnet, master_task) - call broadcast_scalar (f_Silnet, master_task) - call broadcast_scalar (f_humnet, master_task) - call broadcast_scalar (f_DMSPpnet, master_task) - call broadcast_scalar (f_DMSPdnet, master_task) - call broadcast_scalar (f_DMSnet, master_task) - call broadcast_scalar (f_PONnet, master_task) - call broadcast_scalar (f_DONnet, master_task) - call broadcast_scalar (f_Fednet, master_task) - call broadcast_scalar (f_Fepnet, master_task) - call broadcast_scalar (f_zaeronet, master_task) - call broadcast_scalar (f_chlsnow, master_task) - call broadcast_scalar (f_Nitsnow, master_task) - call broadcast_scalar (f_Nsnow, master_task) - call broadcast_scalar (f_Csnow, master_task) - call broadcast_scalar (f_DOCsnow, master_task) - call broadcast_scalar (f_DICsnow, master_task) - call broadcast_scalar (f_Amsnow, master_task) - call broadcast_scalar (f_Silsnow, master_task) - call broadcast_scalar (f_humsnow, master_task) - call broadcast_scalar (f_DMSPpsnow, master_task) - call broadcast_scalar (f_DMSPdsnow, master_task) - call broadcast_scalar (f_DMSsnow, master_task) - call broadcast_scalar (f_PONsnow, master_task) - call broadcast_scalar (f_DONsnow, master_task) - call broadcast_scalar (f_Fedsnow, master_task) - call broadcast_scalar (f_Fepsnow, master_task) - call broadcast_scalar (f_zaerosnow, master_task) - call broadcast_scalar (f_chlfrac, master_task) - call broadcast_scalar (f_Nitfrac, master_task) - call broadcast_scalar (f_Nfrac, master_task) - call broadcast_scalar (f_DOCfrac, master_task) - call broadcast_scalar (f_DICfrac, master_task) - call broadcast_scalar (f_Amfrac, master_task) - call broadcast_scalar (f_Silfrac, master_task) - call broadcast_scalar (f_humfrac, master_task) - call broadcast_scalar (f_DMSPpfrac, master_task) - call broadcast_scalar (f_DMSPdfrac, master_task) - call broadcast_scalar (f_DMSfrac, master_task) - call broadcast_scalar (f_PONfrac, master_task) - call broadcast_scalar (f_DONfrac, master_task) - call broadcast_scalar (f_Fedfrac, master_task) - call broadcast_scalar (f_Fepfrac, master_task) - call broadcast_scalar (f_zaerofrac, master_task) + call broadcast_scalar (f_iDi, master_task) + call broadcast_scalar (f_iki, master_task) + call broadcast_scalar (f_bgc_S, master_task) + call broadcast_scalar (f_zfswin, master_task) + call broadcast_scalar (f_PPnet, master_task) + call broadcast_scalar (f_algalpeak, master_task) + call broadcast_scalar (f_zbgc_frac, master_task) + call broadcast_scalar (f_peakval, master_task) + call broadcast_scalar (f_grownet, master_task) + call broadcast_scalar (f_chlnet, master_task) + call broadcast_scalar (f_Nitnet, master_task) + call broadcast_scalar (f_Nnet, master_task) + call broadcast_scalar (f_Cnet, master_task) + call broadcast_scalar (f_DOCnet, master_task) + call broadcast_scalar (f_DICnet, master_task) + call broadcast_scalar (f_Amnet, master_task) + call broadcast_scalar (f_Silnet, master_task) + call broadcast_scalar (f_humnet, master_task) + call broadcast_scalar (f_DMSPpnet, master_task) + call broadcast_scalar (f_DMSPdnet, master_task) + call broadcast_scalar (f_DMSnet, master_task) + call broadcast_scalar (f_PONnet, master_task) + call broadcast_scalar (f_DONnet, master_task) + call broadcast_scalar (f_Fednet, master_task) + call broadcast_scalar (f_Fepnet, master_task) + call broadcast_scalar (f_zaeronet, master_task) + call broadcast_scalar (f_chlsnow, master_task) + call broadcast_scalar (f_Nitsnow, master_task) + call broadcast_scalar (f_Nsnow, master_task) + call broadcast_scalar (f_Csnow, master_task) + call broadcast_scalar (f_DOCsnow, master_task) + call broadcast_scalar (f_DICsnow, master_task) + call broadcast_scalar (f_Amsnow, master_task) + call broadcast_scalar (f_Silsnow, master_task) + call broadcast_scalar (f_humsnow, master_task) + call broadcast_scalar (f_DMSPpsnow, master_task) + call broadcast_scalar (f_DMSPdsnow, master_task) + call broadcast_scalar (f_DMSsnow, master_task) + call broadcast_scalar (f_PONsnow, master_task) + call broadcast_scalar (f_DONsnow, master_task) + call broadcast_scalar (f_Fedsnow, master_task) + call broadcast_scalar (f_Fepsnow, master_task) + call broadcast_scalar (f_zaerosnow, master_task) + call broadcast_scalar (f_chlfrac, master_task) + call broadcast_scalar (f_Nitfrac, master_task) + call broadcast_scalar (f_Nfrac, master_task) + call broadcast_scalar (f_DOCfrac, master_task) + call broadcast_scalar (f_DICfrac, master_task) + call broadcast_scalar (f_Amfrac, master_task) + call broadcast_scalar (f_Silfrac, master_task) + call broadcast_scalar (f_humfrac, master_task) + call broadcast_scalar (f_DMSPpfrac, master_task) + call broadcast_scalar (f_DMSPdfrac, master_task) + call broadcast_scalar (f_DMSfrac, master_task) + call broadcast_scalar (f_PONfrac, master_task) + call broadcast_scalar (f_DONfrac, master_task) + call broadcast_scalar (f_Fedfrac, master_task) + call broadcast_scalar (f_Fepfrac, master_task) + call broadcast_scalar (f_zaerofrac, master_task) ! 2D variables - if (tr_iso .or. tr_aero .or. tr_brine .or. skl_bgc) then + if (tr_iso .or. tr_aero .or. tr_brine .or. solve_zsal .or. skl_bgc) then - do ns = 1, nstreams - if (histfreq(ns) /= 'x') then + do ns = 1, nstreams if (f_iso(1:1) /= 'x') then do n=1,n_iso @@ -805,6 +816,33 @@ subroutine init_hist_bgc_2D enddo endif + ! zsalinity + + call define_hist_field(n_fzsal,"fzsal","kg/m^2/s",tstr2D, tcstr, & + "prognostic salt flux ice to ocn (cpl)", & + "if positive, ocean gains salt", c1, c0, & + ns, f_fzsal) + + call define_hist_field(n_fzsal_ai,"fzsal_ai","kg/m^2/s",tstr2D, tcstr, & + "prognostic salt flux ice to ocean", & + "weighted by ice area", c1, c0, & + ns, f_fzsal_ai) + + call define_hist_field(n_fzsal_g,"fzsal_g","kg/m^2/s",tstr2D, tcstr, & + "Gravity drainage salt flux ice to ocn (cpl)", & + "if positive, ocean gains salt", c1, c0, & + ns, f_fzsal_g) + + call define_hist_field(n_fzsal_g_ai,"fzsal_g_ai","kg/m^2/s",tstr2D, tcstr, & + "Gravity drainage salt flux ice to ocean", & + "weighted by ice area", c1, c0, & + ns, f_fzsal_g_ai) + + call define_hist_field(n_zsal,"zsal_tot","g/m^2",tstr2D, tcstr, & + "Total Salt content", & + "In ice volume*fbri", c1, c0, & + ns, f_zsal) + ! Aerosols if (f_aero(1:1) /= 'x') then do n=1,n_aero @@ -929,8 +967,8 @@ subroutine init_hist_bgc_2D "skeletal layer: bottom 3 cm", c1, c0, & ns, f_bgc_Fep ) enddo - endif !f_bgc_Fe - + endif !f_bgc_Fe + if (f_bgc_Nit(1:1) /= 'x') & call define_hist_field(n_bgc_Nit,"Nit","mmol/m^2",tstr2D, tcstr, & "Bulk skeletal nutrient (nitrate)", & @@ -971,7 +1009,7 @@ subroutine init_hist_bgc_2D "Bulk dissolved skl trace gas (DMS)", & "skeletal layer: bottom 3 cm", c1, c0, & ns, f_bgc_DMS) - + endif !skl_bgc ! vertical and skeletal layer biogeochemistry @@ -1007,7 +1045,7 @@ subroutine init_hist_bgc_2D enddo endif if (f_bgc_Fed_ml (1:1) /= 'x') then - do n = 1, n_fed + do n = 1, n_fed write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'ml_dFe', trim(nchar) call define_hist_field(n_bgc_Fed_ml (n,:),vname_in,"nM",tstr2D, tcstr, & @@ -1017,7 +1055,7 @@ subroutine init_hist_bgc_2D enddo endif if (f_bgc_Fep_ml (1:1) /= 'x') then - do n = 1, n_fep + do n = 1, n_fep write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'ml_pFe', trim(nchar) call define_hist_field(n_bgc_Fep_ml (n,:),vname_in,"nM",tstr2D, tcstr, & @@ -1055,7 +1093,7 @@ subroutine init_hist_bgc_2D call define_hist_field(n_bgc_hum_ml,"ml_hum","mmol/m^3",tstr2D, tcstr, & "mixed layer humic material (carbon)", & "upper ocean", c1, c0, & - ns, f_bgc_hum_ml) + ns, f_bgc_hum_ml) if (f_bgc_DMSP_ml(1:1) /= 'x') & call define_hist_field(n_bgc_DMSP_ml,"ml_DMSP","mmol/m^3",tstr2D, tcstr, & "mixed layer precursor (DMSP)", & @@ -1066,30 +1104,30 @@ subroutine init_hist_bgc_2D "mixed layer trace gas (DMS)", & "upper ocean", c1, c0, & ns, f_bgc_DMS_ml) - + if (f_fNit(1:1) /= 'x') & call define_hist_field(n_fNit,"fNit","mmol/m^2/s",tstr2D, tcstr, & "nitrate flux ice to ocn (cpl)", & "if positive, ocean gains nitrate", c1, c0, & ns, f_fNit) - + if (f_fNit_ai(1:1) /= 'x') & call define_hist_field(n_fNit_ai,"fNit_ai","mmol/m^2/s",tstr2D, tcstr, & "nitrate flux ice to ocean", & "weighted by ice area", c1, c0, & ns, f_fNit_ai) - + if (f_fAm(1:1) /= 'x') & call define_hist_field(n_fAm,"fAm","mmol/m^2/s",tstr2D, tcstr, & "ammonium flux ice to ocn (cpl)", & "if positive, ocean gains ammonium", c1, c0, & ns, f_fAm) - + if (f_fAm_ai(1:1) /= 'x') & call define_hist_field(n_fAm_ai,"fAm_ai","mmol/m^2/s",tstr2D, tcstr, & "ammonium flux ice to ocean", & "weighted by ice area", c1, c0, & - ns, f_fAm_ai) + ns, f_fAm_ai) if (f_fN(1:1) /= 'x') then do n = 1, n_algae write(nchar,'(i3.3)') n @@ -1129,7 +1167,7 @@ subroutine init_hist_bgc_2D "weighted by ice area", c1, c0, & ns, f_fDOC_ai) enddo - endif + endif if (f_fDIC(1:1) /= 'x') then do n = 1, n_dic write(nchar,'(i3.3)') n @@ -1149,7 +1187,7 @@ subroutine init_hist_bgc_2D "weighted by ice area", c1, c0, & ns, f_fDIC_ai) enddo - endif + endif if (f_fDON(1:1) /= 'x') then do n = 1, n_don write(nchar,'(i3.3)') n @@ -1169,7 +1207,7 @@ subroutine init_hist_bgc_2D "weighted by ice area", c1, c0, & ns, f_fDON_ai) enddo - endif + endif if (f_fFed(1:1) /= 'x') then do n = 1, n_fed write(nchar,'(i3.3)') n @@ -1179,9 +1217,9 @@ subroutine init_hist_bgc_2D "positive to ocean", c1, c0, & ns, f_fFed ) enddo - endif + endif if (f_fFed_ai (1:1) /= 'x') then - do n = 1, n_fed + do n = 1, n_fed write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'fdFe_ai', trim(nchar) call define_hist_field(n_fFed_ai (n,:),vname_in,"umol/m^2/s",tstr2D, tcstr, & @@ -1189,7 +1227,7 @@ subroutine init_hist_bgc_2D "weighted by ice area", c1, c0, & ns, f_fFed_ai ) enddo - endif + endif if (f_fFep(1:1) /= 'x') then do n = 1, n_fep write(nchar,'(i3.3)') n @@ -1201,7 +1239,7 @@ subroutine init_hist_bgc_2D enddo endif if (f_fFep_ai (1:1) /= 'x') then - do n = 1, n_fep + do n = 1, n_fep write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'fpFe_ai', trim(nchar) call define_hist_field(n_fFep_ai (n,:),vname_in,"umol/m^2/s",tstr2D, tcstr, & @@ -1209,25 +1247,25 @@ subroutine init_hist_bgc_2D "weighted by ice area", c1, c0, & ns, f_fFep_ai ) enddo - endif + endif if (f_fSil(1:1) /= 'x') & call define_hist_field(n_fSil,"fSil","mmol/m^2/s",tstr2D, tcstr, & "silicate flux ice to ocn (cpl)", & "positive into ocean", c1, c0, & ns, f_fSil) - + if (f_fSil_ai(1:1) /= 'x') & call define_hist_field(n_fSil_ai,"fSil_ai","mmol/m^2/s",tstr2D, tcstr, & "silicate flux ice to ocean", & "weighted by ice area", c1, c0, & ns, f_fSil_ai) - + if (f_fhum(1:1) /= 'x') & call define_hist_field(n_fhum,"fhum","mmol/m^2/s",tstr2D, tcstr, & "humic matter (carbon) flux ice to ocn (cpl)", & "positive into ocean", c1, c0, & ns, f_fhum) - + if (f_fhum_ai(1:1) /= 'x') & call define_hist_field(n_fhum_ai,"fhum_ai","mmol/m^2/s",tstr2D, tcstr, & "humic matter (carbon) flux ice to ocean", & @@ -1294,19 +1332,19 @@ subroutine init_hist_bgc_2D "weighted by brine or skl volume ", c1, c0, & ns, f_grownet) - if (f_upNO(1:1) /= 'x') & + if (f_upNO(1:1) /= 'x') & call define_hist_field(n_upNO,"upNO","mmol/m^2/d",tstr2D, tcstr, & "Tot algal Nit uptake rate", & "weighted by ice area", c1, c0, & ns, f_upNO) - if (f_upNH(1:1) /= 'x') & + if (f_upNH(1:1) /= 'x') & call define_hist_field(n_upNH,"upNH","mmol/m^2/d",tstr2D, tcstr, & "Tot algal Am uptake rate", & "weighted by ice area", c1, c0,& ns, f_upNH) - ! vertical biogeochemistry + ! vertical biogeochemistry if (z_tracers) then if (f_fzaero(1:1) /= 'x') then @@ -1421,7 +1459,7 @@ subroutine init_hist_bgc_2D enddo endif !f_DONnet if (f_Fednet (1:1) /= 'x') then - do n = 1, n_fed + do n = 1, n_fed write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'dFe_net', trim(nchar) call define_hist_field(n_Fednet (n,:),vname_in,"umol/m^2",tstr2D, tcstr, & @@ -1429,9 +1467,9 @@ subroutine init_hist_bgc_2D "weighted by ice area ", c1, c0, & ns, f_Fednet ) enddo - endif !f_Fednet + endif !f_Fednet if (f_Fepnet (1:1) /= 'x') then - do n = 1, n_fep + do n = 1, n_fep write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'pFe_net', trim(nchar) call define_hist_field(n_Fepnet (n,:),vname_in,"umol/m^2",tstr2D, tcstr, & @@ -1439,7 +1477,7 @@ subroutine init_hist_bgc_2D "weighted by ice area ", c1, c0, & ns, f_Fepnet ) enddo - endif !f_Fepnet + endif !f_Fepnet if (f_Nitnet(1:1) /= 'x') & call define_hist_field(n_Nitnet,"Nit_net","mmol/m^2",tstr2D, tcstr, & "Net Nitrate", & @@ -1459,7 +1497,7 @@ subroutine init_hist_bgc_2D call define_hist_field(n_humnet,"hum_net","mmol/m^2",tstr2D, tcstr, & "Net humic material (carbon)", & "weighted by ice area", c1, c0, & - ns, f_humnet) + ns, f_humnet) if (f_DMSPpnet(1:1) /= 'x') & call define_hist_field(n_DMSPpnet,"DMSPp_net","mmol/m^2",tstr2D, tcstr, & "Net DMSPp", & @@ -1482,7 +1520,7 @@ subroutine init_hist_bgc_2D ns, f_PONnet) if (f_zaerosnow(1:1) /= 'x') then - do n = 1, n_zaero + do n = 1, n_zaero write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'zaero_snow', trim(nchar) call define_hist_field(n_zaerosnow(n,:),vname_in,"kg/m^2",tstr2D, tcstr, & @@ -1552,7 +1590,7 @@ subroutine init_hist_bgc_2D enddo endif !f_DONsnow if (f_Fedsnow (1:1) /= 'x') then - do n = 1, n_fed + do n = 1, n_fed write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'dFe_snow', trim(nchar) call define_hist_field(n_Fedsnow (n,:),vname_in,"umol/m^2",tstr2D, tcstr, & @@ -1560,9 +1598,9 @@ subroutine init_hist_bgc_2D "weighted by ice area ", c1, c0, & ns, f_Fedsnow ) enddo - endif !f_Fedsnow + endif !f_Fedsnow if (f_Fepsnow (1:1) /= 'x') then - do n = 1, n_fed + do n = 1, n_fed write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'pFe_snow', trim(nchar) call define_hist_field(n_Fepsnow (n,:),vname_in,"umol/m^2",tstr2D, tcstr, & @@ -1570,7 +1608,7 @@ subroutine init_hist_bgc_2D "weighted by ice area ", c1, c0, & ns, f_Fepsnow ) enddo - endif !f_Fepsnow + endif !f_Fepsnow if (f_Nitsnow(1:1) /= 'x') & call define_hist_field(n_Nitsnow,"Nit_snow","mmol/m^2",tstr2D, tcstr, & "Snow Nitrate", & @@ -1590,7 +1628,7 @@ subroutine init_hist_bgc_2D call define_hist_field(n_humsnow,"hum_snow","mmol/m^2",tstr2D, tcstr, & "Snow humic material (carbon)", & "weighted by ice area", c1, c0, & - ns, f_humsnow) + ns, f_humsnow) if (f_DMSPpsnow(1:1) /= 'x') & call define_hist_field(n_DMSPpsnow,"DMSPp_snow","mmol/m^2",tstr2D, tcstr, & "Snow DMSPp", & @@ -1613,7 +1651,7 @@ subroutine init_hist_bgc_2D ns, f_PONsnow) if (f_zaerofrac(1:1) /= 'x') then - do n = 1, n_zaero + do n = 1, n_zaero write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'zaero_frac', trim(nchar) call define_hist_field(n_zaerofrac(n,:),vname_in,"1",tstr2D, tcstr, & @@ -1673,7 +1711,7 @@ subroutine init_hist_bgc_2D enddo endif !f_DONfrac if (f_Fedfrac (1:1) /= 'x') then - do n = 1, n_fed + do n = 1, n_fed write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'dFe_frac', trim(nchar) call define_hist_field(n_Fedfrac (n,:),vname_in,"1",tstr2D, tcstr, & @@ -1681,9 +1719,9 @@ subroutine init_hist_bgc_2D "averaged over depth ", c1, c0, & ns, f_Fedfrac ) enddo - endif !f_Fedfrac + endif !f_Fedfrac if (f_Fepfrac (1:1) /= 'x') then - do n = 1, n_fep + do n = 1, n_fep write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'pFe_frac', trim(nchar) call define_hist_field(n_Fepfrac (n,:),vname_in,"1",tstr2D, tcstr, & @@ -1691,7 +1729,7 @@ subroutine init_hist_bgc_2D "averaged over depth ", c1, c0, & ns, f_Fepfrac ) enddo - endif !f_Fepfrac + endif !f_Fepfrac if (f_Nitfrac(1:1) /= 'x') & call define_hist_field(n_Nitfrac,"Nit_frac","1",tstr2D, tcstr, & "Mobile frac Nitrate", & @@ -1711,7 +1749,7 @@ subroutine init_hist_bgc_2D call define_hist_field(n_humfrac,"hum_frac","1",tstr2D, tcstr, & "Mobile frac humic material", & "averaged over depth", c1, c0, & - ns, f_humfrac) + ns, f_humfrac) if (f_DMSPpfrac(1:1) /= 'x') & call define_hist_field(n_DMSPpfrac,"DMSPp_frac","1",tstr2D, tcstr, & "Mobile frac DMSPp", & @@ -1742,18 +1780,17 @@ subroutine init_hist_bgc_2D "distance from ice bottom to brine surface", c1, c0, & ns, f_hbri) - endif ! histfreq(ns) /= 'x' - enddo ! nstreams - - endif ! tr_aero, etc + enddo ! nstreams + endif ! tr_aero, etc + end subroutine init_hist_bgc_2D !======================================================================= subroutine init_hist_bgc_3Dc - use ice_calendar, only: nstreams, histfreq + use ice_calendar, only: nstreams use ice_history_shared, only: tstr3Dc, tcstr, define_hist_field integer (kind=int_kind) :: ns @@ -1765,19 +1802,18 @@ subroutine init_hist_bgc_3Dc if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - if (tr_brine) then + if (tr_brine) then ! 3D (category) variables must be looped separately do ns = 1, nstreams - if (histfreq(ns) /= 'x') then - if (f_fbri(1:1) /= 'x') & - call define_hist_field(n_fbri,"fbrine","1",tstr3Dc, tcstr, & + if (f_fbri(1:1) /= 'x') & + call define_hist_field(n_fbri,"fbrine","1",tstr3Dc, tcstr, & "brine tracer fraction of ice volume, cat", & - "none", c1, c0, ns, f_fbri) - endif ! histfreq /= 'x' + "none", c1, c0, & + ns, f_fbri) enddo ! ns - endif ! tr_brine + endif end subroutine init_hist_bgc_3Dc @@ -1785,27 +1821,27 @@ end subroutine init_hist_bgc_3Dc subroutine init_hist_bgc_3Db - use ice_calendar, only: nstreams,histfreq + use ice_calendar, only: nstreams use ice_history_shared, only: tstr3Db, tcstr, define_hist_field integer (kind=int_kind) :: ns real (kind=dbl_kind) :: secday - logical (kind=log_kind) :: z_tracers + logical (kind=log_kind) :: solve_zsal, z_tracers character(len=*), parameter :: subname = '(init_hist_bgc_3Db)' - + ! biology vertical grid call icepack_query_parameters(secday_out=secday) - call icepack_query_parameters(z_tracers_out=z_tracers) + call icepack_query_parameters( & + solve_zsal_out=solve_zsal, z_tracers_out=z_tracers) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - if (z_tracers) then + if (z_tracers .or. solve_zsal) then do ns = 1, nstreams - if (histfreq(ns) /= 'x') then - + if (f_bTin(1:1) /= 'x') & call define_hist_field(n_bTin,"bTizn","C",tstr3Db, tcstr, & "ice internal temperatures on bio grid", & @@ -1816,26 +1852,30 @@ subroutine init_hist_bgc_3Db call define_hist_field(n_bphi,"bphizn","%",tstr3Db, tcstr, & "porosity", "brine volume fraction", c100, c0, & ns, f_bphi) - - if (f_iDi(1:1) /= 'x') & + + if (f_iDi(1:1) /= 'x') & call define_hist_field(n_iDi,"iDin","m^2/d",tstr3Db, tcstr, & "interface diffusivity", "on bio interface grid", secday, c0, & ns, f_iDi) - - if (f_iki(1:1) /= 'x') & + + if (f_iki(1:1) /= 'x') & call define_hist_field(n_iki,"ikin","mm^2",tstr3Db, tcstr, & "permeability", "on bio interface grid", 1.0e6_dbl_kind, c0, & ns, f_iki) - + + if (f_bgc_S(1:1) /= 'x') & + call define_hist_field(n_bgc_S,"bgc_S","ppt",tstr3Db, tcstr, & + "bulk salinity", "on bio grid", c1, c0, & + ns, f_bgc_S) + if (f_zfswin(1:1) /= 'x') & call define_hist_field(n_zfswin,"zfswin","W/m^2",tstr3Db, tcstr, & "internal ice PAR", "on bio interface grid", c1, c0, & ns, f_zfswin) - - endif ! histfreq(ns) /= 'x' + enddo ! ns - endif ! z_tracers + endif ! z_tracers or solve_zsal end subroutine init_hist_bgc_3Db @@ -1847,16 +1887,16 @@ subroutine accum_hist_bgc (iblk) use ice_arrays_column, only: ocean_bio, & grow_net, PP_net, upNO, upNH, ice_bio_net, snow_bio_net, & - hbri, bTiz, bphi, zfswin, iDi, iki, & + hbri, bTiz, bphi, zfswin, iDi, iki, zsal_tot, fzsal, fzsal_g, & R_C2N, R_chl2N use ice_blocks, only: block, get_block, nx_block, ny_block use ice_domain, only: blocks_ice use ice_domain_size, only: nblyr use ice_flux, only: sss use ice_flux_bgc, only: fiso_atm, fiso_ocn, faero_atm, faero_ocn, & - flux_bio, flux_bio_ai - use ice_history_shared, only: n2D, a2D, a3Dc, & - n3Dzcum, n3Dbcum, a3Db, a3Da, & + flux_bio, flux_bio_ai, fzsal_ai, fzsal_g_ai + use ice_history_shared, only: n2D, a2D, a3Dc, & + n3Dzcum, n3Dbcum, a3Db, a3Da, & ncat_hist, accum_hist_field, nzblyr, nzalyr use ice_state, only: trcrn, trcr, aicen, aice, vicen @@ -1866,33 +1906,33 @@ subroutine accum_hist_bgc (iblk) ! local variables integer (kind=int_kind) :: & - i, j, n, k, & ! loop indices + i, j, n, k, & ! loop indices ilo,ihi,jlo,jhi ! beginning and end of physical domain - real (kind=dbl_kind), dimension (nx_block,ny_block,nblyr+4) :: & + real (kind=dbl_kind), dimension (nx_block,ny_block,nblyr+4) :: & workz, workz2 - real (kind=dbl_kind) :: & + real (kind=dbl_kind) :: & maxv, rhos, rhoi, rhow, puny, sk_l - real (kind=dbl_kind), dimension (nblyr+1) :: & + real (kind=dbl_kind), dimension (nblyr+1) :: & workv - real (kind=dbl_kind), dimension (nx_block,ny_block) :: & + real (kind=dbl_kind), dimension (nx_block,ny_block) :: & workni, worknj - integer (kind=int_kind), dimension (1) :: & + integer (kind=int_kind), dimension (1) :: & worki - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & workii logical (kind=log_kind) :: & - skl_bgc, z_tracers, tr_iso, tr_aero, tr_brine + skl_bgc, z_tracers, tr_iso, tr_aero, tr_brine, solve_zsal integer(kind=int_kind) :: & nt_isosno, nt_isoice, nt_aero, nt_fbri, & nt_bgc_Nit, nt_bgc_Am, nt_bgc_Sil, nt_bgc_DMSPp, & - nt_bgc_DMSPd, nt_bgc_DMS, nt_bgc_PON, & + nt_bgc_DMSPd, nt_bgc_DMS, nt_bgc_PON, nt_bgc_S, & nt_zbgc_frac, nlt_chl_sw, & nlt_bgc_Nit, nlt_bgc_Am, nlt_bgc_Sil, & nlt_bgc_DMS, nlt_bgc_PON, & @@ -1901,9 +1941,9 @@ subroutine accum_hist_bgc (iblk) integer (kind=int_kind), dimension(icepack_max_aero) :: & nlt_zaero_sw ! points to aerosol in trcrn_sw - + integer (kind=int_kind), dimension(icepack_max_algae) :: & - nt_bgc_N, nlt_bgc_N, & ! algae + nt_bgc_N, nlt_bgc_N, & ! algae nt_bgc_C, nlt_bgc_C, & ! nt_bgc_chl, nlt_bgc_chl ! @@ -1933,7 +1973,7 @@ subroutine accum_hist_bgc (iblk) call icepack_query_tracer_flags(tr_iso_out=tr_iso, & tr_aero_out=tr_aero, tr_brine_out=tr_brine) call icepack_query_parameters(skl_bgc_out=skl_bgc, & - z_tracers_out=z_tracers) + solve_zsal_out=solve_zsal, z_tracers_out=z_tracers) call icepack_query_tracer_indices( & nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice, & nt_aero_out=nt_aero, & @@ -1945,7 +1985,7 @@ subroutine accum_hist_bgc (iblk) nt_bgc_Sil_out=nt_bgc_Sil, nt_bgc_DMSPp_out=nt_bgc_DMSPp, & nt_bgc_DMSPd_out=nt_bgc_DMSPd, nt_bgc_DMS_out=nt_bgc_DMS, & nt_bgc_PON_out=nt_bgc_PON, & - nt_bgc_Fed_out=nt_bgc_Fed, & + nt_bgc_S_out=nt_bgc_S, nt_bgc_Fed_out=nt_bgc_Fed, & nt_bgc_Fep_out=nt_bgc_Fep, nt_zbgc_frac_out=nt_zbgc_frac, & nlt_zaero_sw_out=nlt_zaero_sw, nlt_chl_sw_out=nlt_chl_sw, & nlt_bgc_Nit_out=nlt_bgc_Nit, nlt_bgc_Am_out=nlt_bgc_Am, & @@ -1961,8 +2001,8 @@ subroutine accum_hist_bgc (iblk) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - - this_block = get_block(blocks_ice(iblk),iblk) + + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -1972,10 +2012,21 @@ subroutine accum_hist_bgc (iblk) ! increment field !--------------------------------------------------------------- - ! 2d bgc fields - if (allocated(a2D)) then + if (tr_iso .or. tr_aero .or. tr_brine .or. solve_zsal .or. skl_bgc) then + ! 2d bgc fields - if (tr_iso .or. tr_aero .or. tr_brine .or. skl_bgc) then + + ! zsalinity + if (f_fzsal (1:1) /= 'x') & + call accum_hist_field(n_fzsal, iblk, fzsal(:,:,iblk), a2D) + if (f_fzsal_ai(1:1)/= 'x') & + call accum_hist_field(n_fzsal_ai, iblk, fzsal_ai(:,:,iblk), a2D) + if (f_fzsal_g (1:1) /= 'x') & + call accum_hist_field(n_fzsal_g, iblk, fzsal_g(:,:,iblk), a2D) + if (f_fzsal_g_ai(1:1)/= 'x') & + call accum_hist_field(n_fzsal_g_ai,iblk, fzsal_g_ai(:,:,iblk), a2D) + if (f_zsal (1:1) /= 'x') & + call accum_hist_field(n_zsal, iblk, zsal_tot(:,:,iblk), a2D) ! isotopes if (f_fiso_atm(1:1) /= 'x') then @@ -2025,7 +2076,7 @@ subroutine accum_hist_bgc (iblk) enddo endif - if (skl_bgc) then + if (skl_bgc) then ! skeletal layer bgc @@ -2060,13 +2111,13 @@ subroutine accum_hist_bgc (iblk) enddo endif if (f_bgc_Fed (1:1)/= 'x') then - do n=1,n_fed + do n=1,n_fed call accum_hist_field(n_bgc_Fed (n,:), iblk, & sk_l*trcr(:,:,nt_bgc_Fed (n), iblk), a2D) enddo endif if (f_bgc_Fep (1:1)/= 'x') then - do n=1,n_fep + do n=1,n_fep call accum_hist_field(n_bgc_Fep (n,:), iblk, & sk_l*trcr(:,:,nt_bgc_Fep (n), iblk), a2D) enddo @@ -2079,32 +2130,32 @@ subroutine accum_hist_bgc (iblk) endif if (f_bgc_Nit(1:1)/= 'x') & call accum_hist_field(n_bgc_Nit, iblk, & - sk_l*trcr(:,:,nt_bgc_Nit, iblk), a2D) + sk_l*trcr(:,:,nt_bgc_Nit, iblk), a2D) if (f_bgc_Am(1:1)/= 'x') & call accum_hist_field(n_bgc_Am, iblk, & - sk_l*trcr(:,:,nt_bgc_Am, iblk), a2D) + sk_l*trcr(:,:,nt_bgc_Am, iblk), a2D) if (f_bgc_Sil(1:1)/= 'x') & call accum_hist_field(n_bgc_Sil, iblk, & - sk_l*trcr(:,:,nt_bgc_Sil, iblk), a2D) + sk_l*trcr(:,:,nt_bgc_Sil, iblk), a2D) if (f_bgc_hum(1:1)/= 'x') & call accum_hist_field(n_bgc_hum, iblk, & - sk_l*trcr(:,:,nt_bgc_hum, iblk), a2D) + sk_l*trcr(:,:,nt_bgc_hum, iblk), a2D) if (f_bgc_PON(1:1)/= 'x') & call accum_hist_field(n_bgc_PON, iblk, & - sk_l*trcr(:,:,nt_bgc_PON, iblk), a2D) + sk_l*trcr(:,:,nt_bgc_PON, iblk), a2D) if (f_bgc_DMSPp(1:1)/= 'x') & call accum_hist_field(n_bgc_DMSPp,iblk, & - sk_l*trcr(:,:,nt_bgc_DMSPp,iblk), a2D) + sk_l*trcr(:,:,nt_bgc_DMSPp,iblk), a2D) if (f_bgc_DMSPd(1:1)/= 'x') & call accum_hist_field(n_bgc_DMSPd,iblk, & - sk_l*trcr(:,:,nt_bgc_DMSPd,iblk), a2D) + sk_l*trcr(:,:,nt_bgc_DMSPd,iblk), a2D) if (f_bgc_DMS(1:1)/= 'x') & call accum_hist_field(n_bgc_DMS, iblk, & - sk_l*trcr(:,:,nt_bgc_DMS, iblk), a2D) + sk_l*trcr(:,:,nt_bgc_DMS, iblk), a2D) - endif !skl_bgc + endif !skl_bgc - ! skeletal layer and vertical bgc + ! skeletal layer and vertical bgc if (f_bgc_DOC_ml(1:1)/= 'x') then do n=1,n_doc @@ -2125,13 +2176,13 @@ subroutine accum_hist_bgc (iblk) enddo endif if (f_bgc_Fed_ml (1:1)/= 'x') then - do n=1,n_fed + do n=1,n_fed call accum_hist_field(n_bgc_Fed_ml (n,:), iblk, & ocean_bio(:,:,nlt_bgc_Fed (n), iblk), a2D) enddo endif if (f_bgc_Fep_ml (1:1)/= 'x') then - do n=1,n_fep + do n=1,n_fep call accum_hist_field(n_bgc_Fep_ml (n,:), iblk, & ocean_bio(:,:,nlt_bgc_Fep (n), iblk), a2D) enddo @@ -2144,22 +2195,22 @@ subroutine accum_hist_bgc (iblk) endif if (f_bgc_Nit_ml(1:1)/= 'x') & call accum_hist_field(n_bgc_Nit_ml, iblk, & - ocean_bio(:,:,nlt_bgc_Nit, iblk), a2D) + ocean_bio(:,:,nlt_bgc_Nit, iblk), a2D) if (f_bgc_Am_ml(1:1)/= 'x') & call accum_hist_field(n_bgc_Am_ml, iblk, & - ocean_bio(:,:,nlt_bgc_Am, iblk), a2D) + ocean_bio(:,:,nlt_bgc_Am, iblk), a2D) if (f_bgc_Sil_ml(1:1)/= 'x') & call accum_hist_field(n_bgc_Sil_ml, iblk, & - ocean_bio(:,:,nlt_bgc_Sil, iblk), a2D) + ocean_bio(:,:,nlt_bgc_Sil, iblk), a2D) if (f_bgc_hum_ml(1:1)/= 'x') & call accum_hist_field(n_bgc_hum_ml, iblk, & - ocean_bio(:,:,nlt_bgc_hum, iblk), a2D) + ocean_bio(:,:,nlt_bgc_hum, iblk), a2D) if (f_bgc_DMSP_ml(1:1)/= 'x') & call accum_hist_field(n_bgc_DMSP_ml, iblk, & - ocean_bio(:,:,nlt_bgc_DMSPd, iblk), a2D) + ocean_bio(:,:,nlt_bgc_DMSPd, iblk), a2D) if (f_bgc_DMS_ml(1:1)/= 'x') & call accum_hist_field(n_bgc_DMS_ml, iblk, & - ocean_bio(:,:,nlt_bgc_DMS, iblk), a2D) + ocean_bio(:,:,nlt_bgc_DMS, iblk), a2D) if (f_fNit (1:1) /= 'x') & call accum_hist_field(n_fNit, iblk, & @@ -2223,25 +2274,25 @@ subroutine accum_hist_bgc (iblk) enddo endif if (f_fFed (1:1)/= 'x') then - do n=1,n_fed + do n=1,n_fed call accum_hist_field(n_fFed (n,:), iblk, & flux_bio(:,:,nlt_bgc_Fed (n),iblk), a2D) enddo endif if (f_fFed_ai (1:1)/= 'x') then - do n=1,n_fed + do n=1,n_fed call accum_hist_field(n_fFed_ai (n,:), iblk, & flux_bio_ai(:,:,nlt_bgc_Fed (n),iblk), a2D) enddo endif if (f_fFep (1:1)/= 'x') then - do n=1,n_fep + do n=1,n_fep call accum_hist_field(n_fFep (n,:), iblk, & flux_bio(:,:,nlt_bgc_Fep (n),iblk), a2D) enddo endif if (f_fFep_ai (1:1)/= 'x') then - do n=1,n_fep + do n=1,n_fep call accum_hist_field(n_fFep_ai (n,:), iblk, & flux_bio_ai(:,:,nlt_bgc_Fep (n),iblk), a2D) enddo @@ -2287,7 +2338,7 @@ subroutine accum_hist_bgc (iblk) PP_net(:,:,iblk), a2D) if (f_grownet (1:1) /= 'x') & call accum_hist_field(n_grownet, iblk, & - grow_net(:,:,iblk), a2D) + grow_net(:,:,iblk), a2D) if (f_upNO (1:1) /= 'x') & call accum_hist_field(n_upNO, iblk, & upNO(:,:,iblk), a2D) @@ -2295,9 +2346,9 @@ subroutine accum_hist_bgc (iblk) call accum_hist_field(n_upNH, iblk, & upNH(:,:,iblk), a2D) - ! vertical biogeochemistry + ! vertical biogeochemistry - if (z_tracers) then + if (z_tracers) then if (f_fzaero(1:1)/= 'x') then do n=1,n_zaero @@ -2336,7 +2387,7 @@ subroutine accum_hist_bgc (iblk) enddo ! n endif !f_algalpeak - ! + ! ! ice_bio_net ! if (f_zaeronet (1:1) /= 'x') then @@ -2364,35 +2415,35 @@ subroutine accum_hist_bgc (iblk) enddo endif !f_Cnet if (f_DOCnet (1:1) /= 'x') then - do n=1,n_doc + do n=1,n_doc call accum_hist_field(n_DOCnet(n,:), iblk, & ice_bio_net(:,:,nlt_bgc_DOC(n), iblk), a2D) enddo endif !f_DOCnet if (f_DICnet (1:1) /= 'x') then - do n=1,n_dic + do n=1,n_dic call accum_hist_field(n_DICnet(n,:), iblk, & ice_bio_net(:,:,nlt_bgc_DIC(n), iblk), a2D) enddo endif !f_DICnet if (f_DONnet (1:1) /= 'x') then - do n=1,n_don + do n=1,n_don call accum_hist_field(n_DONnet(n,:), iblk, & ice_bio_net(:,:,nlt_bgc_DON(n), iblk), a2D) enddo endif !f_DONnet if (f_Fednet (1:1) /= 'x') then - do n=1,n_fed + do n=1,n_fed call accum_hist_field(n_Fednet (n,:), iblk, & ice_bio_net(:,:,nlt_bgc_Fed (n), iblk), a2D) enddo - endif !f_Fednet + endif !f_Fednet if (f_Fepnet (1:1) /= 'x') then - do n=1,n_fep + do n=1,n_fep call accum_hist_field(n_Fepnet (n,:), iblk, & ice_bio_net(:,:,nlt_bgc_Fep (n), iblk), a2D) enddo - endif !f_Fepnet + endif !f_Fepnet if (f_Nitnet (1:1) /= 'x') & call accum_hist_field(n_Nitnet, iblk, & @@ -2420,7 +2471,7 @@ subroutine accum_hist_bgc (iblk) ice_bio_net(:,:,nlt_bgc_PON, iblk), a2D) ! ! snow_bio_net - ! + ! if (f_zaerosnow (1:1) /= 'x') then do n=1,n_zaero call accum_hist_field(n_zaerosnow(n,:), iblk, & @@ -2446,35 +2497,35 @@ subroutine accum_hist_bgc (iblk) enddo endif !f_Csnow if (f_DOCsnow (1:1) /= 'x') then - do n=1,n_doc + do n=1,n_doc call accum_hist_field(n_DOCsnow(n,:), iblk, & snow_bio_net(:,:,nlt_bgc_DOC(n), iblk), a2D) enddo endif !f_DOCsnow if (f_DICsnow (1:1) /= 'x') then - do n=1,n_dic + do n=1,n_dic call accum_hist_field(n_DICsnow(n,:), iblk, & snow_bio_net(:,:,nlt_bgc_DIC(n), iblk), a2D) enddo endif !f_DICsnow if (f_DONsnow (1:1) /= 'x') then - do n=1,n_don + do n=1,n_don call accum_hist_field(n_DONsnow(n,:), iblk, & snow_bio_net(:,:,nlt_bgc_DON(n), iblk), a2D) enddo endif !f_DONsnow if (f_Fedsnow (1:1) /= 'x') then - do n=1,n_fed + do n=1,n_fed call accum_hist_field(n_Fedsnow (n,:), iblk, & snow_bio_net(:,:,nlt_bgc_Fed (n), iblk), a2D) enddo - endif !f_Fedsnow + endif !f_Fedsnow if (f_Fepsnow (1:1) /= 'x') then - do n=1,n_fep + do n=1,n_fep call accum_hist_field(n_Fepsnow (n,:), iblk, & snow_bio_net(:,:,nlt_bgc_Fep (n), iblk), a2D) enddo - endif !f_Fepsnow + endif !f_Fepsnow if (f_Nitsnow (1:1) /= 'x') & call accum_hist_field(n_Nitsnow, iblk, & @@ -2502,7 +2553,7 @@ subroutine accum_hist_bgc (iblk) snow_bio_net(:,:,nlt_bgc_PON, iblk), a2D) ! ! mobile frac - ! + ! if (f_zaerofrac (1:1) /= 'x') then do n=1,n_zaero call accum_hist_field(n_zaerofrac(n,:), iblk, & @@ -2522,35 +2573,35 @@ subroutine accum_hist_bgc (iblk) enddo endif !f_Nfrac if (f_DOCfrac (1:1) /= 'x') then - do n=1,n_doc + do n=1,n_doc call accum_hist_field(n_DOCfrac(n,:), iblk, & trcr(:,:,nt_zbgc_frac - 1 + nlt_bgc_DOC(n), iblk), a2D) enddo endif !f_DOCfrac if (f_DICfrac (1:1) /= 'x') then - do n=1,n_dic + do n=1,n_dic call accum_hist_field(n_DICfrac(n,:), iblk, & trcr(:,:,nt_zbgc_frac - 1 + nlt_bgc_DIC(n), iblk), a2D) enddo endif !f_DICfrac if (f_DONfrac (1:1) /= 'x') then - do n=1,n_don + do n=1,n_don call accum_hist_field(n_DONfrac(n,:), iblk, & trcr(:,:,nt_zbgc_frac - 1 + nlt_bgc_DON(n), iblk), a2D) enddo endif !f_DONfrac if (f_Fedfrac (1:1) /= 'x') then - do n=1,n_fed + do n=1,n_fed call accum_hist_field(n_Fedfrac (n,:), iblk, & trcr(:,:,nt_zbgc_frac - 1 + nlt_bgc_Fed (n), iblk), a2D) enddo - endif !f_Fedfrac + endif !f_Fedfrac if (f_Fepfrac (1:1) /= 'x') then - do n=1,n_fep + do n=1,n_fep call accum_hist_field(n_Fepfrac (n,:), iblk, & trcr(:,:,nt_zbgc_frac - 1 + nlt_bgc_Fep (n), iblk), a2D) enddo - endif !f_Fepfrac + endif !f_Fepfrac if (f_Nitfrac (1:1) /= 'x') & call accum_hist_field(n_Nitfrac, iblk, & @@ -2563,7 +2614,7 @@ subroutine accum_hist_bgc (iblk) trcr(:,:,nt_zbgc_frac - 1 + nlt_bgc_Sil, iblk), a2D) if (f_humfrac (1:1) /= 'x') & call accum_hist_field(n_humfrac, iblk, & - trcr(:,:,nt_zbgc_frac - 1 + nlt_bgc_hum, iblk), a2D) + trcr(:,:,nt_zbgc_frac - 1 + nlt_bgc_hum, iblk), a2D) if (f_DMSPpfrac (1:1) /= 'x') & call accum_hist_field(n_DMSPpfrac, iblk, & trcr(:,:,nt_zbgc_frac - 1 + nlt_bgc_DMSPp, iblk), a2D) @@ -2577,30 +2628,27 @@ subroutine accum_hist_bgc (iblk) call accum_hist_field(n_PONfrac, iblk, & trcr(:,:,nt_zbgc_frac - 1 + nlt_bgc_PON, iblk), a2D) - endif ! z_tracers + endif ! z_tracers ! brine if (f_hbri (1:1) /= 'x') & call accum_hist_field(n_hbri, iblk, & hbri(:,:,iblk), a2D) - endif ! 2d bgc tracers, tr_aero, tr_brine, skl_bgc - endif ! allocated(a2D) + endif ! 2d bgc tracers, tr_aero, tr_brine, solve_zsal, skl_bgc + ! 3D category fields - if (allocated(a3Dc)) then - if (tr_brine) then + if (tr_brine) then ! 3Dc bgc category fields if (f_fbri (1:1) /= 'x') & call accum_hist_field(n_fbri-n2D, iblk, ncat_hist, & trcrn(:,:,nt_fbri,1:ncat_hist,iblk), a3Dc) - endif - endif ! allocated(a3Dc) + endif - if (allocated(a3Db)) then - if (z_tracers) then + if (z_tracers .or. solve_zsal) then ! 3Db category fields if (f_bTin (1:1) /= 'x') then @@ -2635,6 +2683,21 @@ subroutine accum_hist_bgc (iblk) workz(:,:,1:nzblyr), a3Db) endif + if (f_bgc_S (1:1) /= 'x') then + workz(:,:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > c0) then + workz(i,j,1) = trcr(i,j,nt_bgc_S,iblk) + workz(i,j,2:nblyr+1) = trcr(i,j,nt_bgc_S:nt_bgc_S+nblyr-1,iblk) + workz(i,j,nblyr+2) = sss(i,j,iblk) + endif + enddo ! i + enddo ! j + call accum_hist_field(n_bgc_S-n3Dzcum, iblk, nzblyr, & + workz(:,:,1:nzblyr), a3Db) + endif + if (f_zfswin (1:1) /= 'x') then workz(:,:,:) = c0 do n = 1, ncat_hist @@ -2661,11 +2724,11 @@ subroutine accum_hist_bgc (iblk) do i = ilo, ihi if (aicen(i,j,n,iblk) > c0) then workz(i,j,k) = workz(i,j,k) + iDi(i,j,k,n,iblk)*vicen(i,j,n,iblk)**2/aicen(i,j,n,iblk) - workz(i,j,nzblyr) = workz(i,j,nzblyr-1) + workz(i,j,nzblyr) = workz(i,j,nzblyr-1) endif enddo ! i enddo ! j - enddo ! k + enddo ! k enddo ! n call accum_hist_field(n_iDi-n3Dzcum, iblk, nzblyr, & workz(:,:,1:nzblyr), a3Db) @@ -2685,17 +2748,15 @@ subroutine accum_hist_bgc (iblk) endif enddo ! i enddo ! j - enddo ! k + enddo ! k enddo ! n call accum_hist_field(n_iki-n3Dzcum, iblk, nzblyr, & workz(:,:,1:nzblyr), a3Db) endif - endif ! 3Db fields - endif ! allocated(a3Db) + endif ! 3Db fields - if (allocated(a3Da)) then - if (z_tracers) then + if (z_tracers) then ! 3Da category fields if (f_zaero (1:1) /= 'x') then @@ -2703,7 +2764,7 @@ subroutine accum_hist_bgc (iblk) workz(:,:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) then + if (aice(i,j,iblk) > puny) then workz(i,j,1:2) = & !snow trcr(i,j,nt_zaero(k)+nblyr+1:nt_zaero(k)+nblyr+2,iblk)/rhos workz(i,j,3:nblyr+3) = & !ice @@ -2711,7 +2772,7 @@ subroutine accum_hist_bgc (iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_zaero(k),iblk)/rhow !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_zaeros(k,:)-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) enddo !k @@ -2722,14 +2783,14 @@ subroutine accum_hist_bgc (iblk) workz2(:,:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) then + if (aice(i,j,iblk) > puny) then workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_N(k)+nblyr+1:nt_bgc_N(k)+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice trcr(i,j,nt_bgc_N(k):nt_bgc_N(k)+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_N(k),iblk) !ocean endif - if (aicen(i,j,1,iblk) > puny) then + if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_N(k)+nblyr+1:nt_bgc_N(k)+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice @@ -2737,7 +2798,7 @@ subroutine accum_hist_bgc (iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_N(k),iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_N(k,:)-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_N_cat1(k,:)-n3Dbcum, iblk, nzalyr, & @@ -2749,7 +2810,7 @@ subroutine accum_hist_bgc (iblk) workz(:,:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) then + if (aice(i,j,iblk) > puny) then workz(i,j,1:2) = & !snow R_C2N(k)*trcr(i,j,nt_bgc_N(k)+nblyr+1:nt_bgc_N(k)+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice @@ -2757,25 +2818,25 @@ subroutine accum_hist_bgc (iblk) workz(i,j,nblyr+4) = R_C2N(k)*ocean_bio(i,j,nlt_bgc_N(k),iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_C(k,:)-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) enddo !k endif !f_bgc_C if (f_bgc_DOC (1:1) /= 'x') then - do k = 1,n_doc + do k = 1,n_doc workz(:,:,:) = c0 workz2(:,:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) then + if (aice(i,j,iblk) > puny) then workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_DOC(k)+nblyr+1:nt_bgc_DOC(k)+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice trcr(i,j,nt_bgc_DOC(k):nt_bgc_DOC(k)+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_DOC(k),iblk) !ocean endif - if (aicen(i,j,1,iblk) > puny) then + if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_DOC(k)+nblyr+1:nt_bgc_DOC(k)+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice @@ -2783,7 +2844,7 @@ subroutine accum_hist_bgc (iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_DOC(k),iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_DOC(k,:)-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_DOC_cat1(k,:)-n3Dbcum, iblk, nzalyr, & @@ -2791,19 +2852,19 @@ subroutine accum_hist_bgc (iblk) enddo !k endif !f_bgc_DOC if (f_bgc_DIC (1:1) /= 'x') then - do k = 1,n_dic + do k = 1,n_dic workz(:,:,:) = c0 workz2(:,:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) then + if (aice(i,j,iblk) > puny) then workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_DIC(k)+nblyr+1:nt_bgc_DIC(k)+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice trcr(i,j,nt_bgc_DIC(k):nt_bgc_DIC(k)+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_DIC(k),iblk) !ocean endif - if (aicen(i,j,1,iblk) > puny) then + if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_DIC(k)+nblyr+1:nt_bgc_DIC(k)+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice @@ -2811,7 +2872,7 @@ subroutine accum_hist_bgc (iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_DIC(k),iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_DIC(k,:)-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_DIC_cat1(k,:)-n3Dbcum, iblk, nzalyr, & @@ -2819,19 +2880,19 @@ subroutine accum_hist_bgc (iblk) enddo !k endif !f_bgc_DIC if (f_bgc_DON (1:1) /= 'x') then - do k = 1,n_don + do k = 1,n_don workz(:,:,:) = c0 workz2(:,:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) then + if (aice(i,j,iblk) > puny) then workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_DON(k)+nblyr+1:nt_bgc_DON(k)+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice trcr(i,j,nt_bgc_DON(k):nt_bgc_DON(k)+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_DON(k),iblk) !ocean endif - if (aicen(i,j,1,iblk) > puny) then + if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_DON(k)+nblyr+1:nt_bgc_DON(k)+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice @@ -2839,7 +2900,7 @@ subroutine accum_hist_bgc (iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_DON(k),iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_DON(k,:)-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_DON_cat1(k,:)-n3Dbcum, iblk, nzalyr, & @@ -2847,19 +2908,19 @@ subroutine accum_hist_bgc (iblk) enddo !k endif !f_bgc_DON if (f_bgc_Fed (1:1) /= 'x') then - do k = 1,n_fed + do k = 1,n_fed workz(:,:,:) = c0 workz2(:,:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) then + if (aice(i,j,iblk) > puny) then workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_Fed (k)+nblyr+1:nt_bgc_Fed (k)+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice trcr(i,j,nt_bgc_Fed (k):nt_bgc_Fed (k)+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_Fed (k),iblk) !ocean endif - if (aicen(i,j,1,iblk) > puny) then + if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_Fed (k)+nblyr+1:nt_bgc_Fed (k)+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice @@ -2867,27 +2928,27 @@ subroutine accum_hist_bgc (iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_Fed (k),iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_Fed (k,:)-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_Fed_cat1 (k,:)-n3Dbcum, iblk, nzalyr, & workz2(:,:,1:nzalyr), a3Da) enddo !k - endif !f_bgc_Fed + endif !f_bgc_Fed if (f_bgc_Fep (1:1) /= 'x') then - do k = 1,n_fep + do k = 1,n_fep workz(:,:,:) = c0 workz2(:,:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) then + if (aice(i,j,iblk) > puny) then workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_Fep (k)+nblyr+1:nt_bgc_Fep (k)+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice trcr(i,j,nt_bgc_Fep (k):nt_bgc_Fep (k)+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_Fep (k),iblk) !ocean endif - if (aicen(i,j,1,iblk) > puny) then + if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_Fep (k)+nblyr+1:nt_bgc_Fep (k)+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice @@ -2895,19 +2956,19 @@ subroutine accum_hist_bgc (iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_Fep (k),iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_Fep (k,:)-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_Fep_cat1 (k,:)-n3Dbcum, iblk, nzalyr, & workz2(:,:,1:nzalyr), a3Da) enddo !k - endif !f_bgc_Fep + endif !f_bgc_Fep if (f_bgc_chl (1:1) /= 'x') then do k = 1,n_algae workz(:,:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) then + if (aice(i,j,iblk) > puny) then workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_chl(k)+nblyr+1:nt_bgc_chl(k)+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice @@ -2915,12 +2976,12 @@ subroutine accum_hist_bgc (iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_chl(k),iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_chl(k,:)-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) enddo !k endif !f_bgc_chl - + if (f_bgc_Nit (1:1) /= 'x') then workz(:,:,:) = c0 workz2(:,:,:) = c0 @@ -2930,18 +2991,18 @@ subroutine accum_hist_bgc (iblk) workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_Nit+nblyr+2:nt_bgc_Nit+nblyr+3,iblk) workz(i,j,3:nblyr+3) = & !ice - trcr(i,j,nt_bgc_Nit:nt_bgc_Nit+nblyr,iblk) + trcr(i,j,nt_bgc_Nit:nt_bgc_Nit+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_Nit,iblk) !ocean endif if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_Nit+nblyr+2:nt_bgc_Nit+nblyr+3,1,iblk) workz2(i,j,3:nblyr+3) = & !ice - trcrn(i,j,nt_bgc_Nit:nt_bgc_Nit+nblyr,1,iblk) + trcrn(i,j,nt_bgc_Nit:nt_bgc_Nit+nblyr,1,iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_Nit,iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_Nit-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_Nit_cat1-n3Dbcum, iblk, nzalyr, & @@ -2957,18 +3018,18 @@ subroutine accum_hist_bgc (iblk) workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_Am+nblyr+1:nt_bgc_Am+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice - trcr(i,j,nt_bgc_Am:nt_bgc_Am+nblyr,iblk) + trcr(i,j,nt_bgc_Am:nt_bgc_Am+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_Am,iblk) !ocean endif if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_Am+nblyr+1:nt_bgc_Am+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice - trcrn(i,j,nt_bgc_Am:nt_bgc_Am+nblyr,1,iblk) + trcrn(i,j,nt_bgc_Am:nt_bgc_Am+nblyr,1,iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_Am,iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_Am-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_Am_cat1-n3Dbcum, iblk, nzalyr, & @@ -2984,24 +3045,24 @@ subroutine accum_hist_bgc (iblk) workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_Sil+nblyr+1:nt_bgc_Sil+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice - trcr(i,j,nt_bgc_Sil:nt_bgc_Sil+nblyr,iblk) + trcr(i,j,nt_bgc_Sil:nt_bgc_Sil+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_Sil,iblk) !ocean endif if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_Sil+nblyr+1:nt_bgc_Sil+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice - trcrn(i,j,nt_bgc_Sil:nt_bgc_Sil+nblyr,1,iblk) + trcrn(i,j,nt_bgc_Sil:nt_bgc_Sil+nblyr,1,iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_Sil,iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_Sil-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_Sil_cat1-n3Dbcum, iblk, nzalyr, & workz2(:,:,1:nzalyr), a3Da) endif - + if (f_bgc_hum (1:1) /= 'x') then workz(:,:,:) = c0 @@ -3012,24 +3073,24 @@ subroutine accum_hist_bgc (iblk) workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_hum+nblyr+1:nt_bgc_hum+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice - trcr(i,j,nt_bgc_hum:nt_bgc_hum+nblyr,iblk) + trcr(i,j,nt_bgc_hum:nt_bgc_hum+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_hum,iblk) !ocean endif if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_hum+nblyr+1:nt_bgc_hum+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice - trcrn(i,j,nt_bgc_hum:nt_bgc_hum+nblyr,1,iblk) + trcrn(i,j,nt_bgc_hum:nt_bgc_hum+nblyr,1,iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_hum,iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_hum-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_hum_cat1-n3Dbcum, iblk, nzalyr, & workz2(:,:,1:nzalyr), a3Da) endif - + if (f_bgc_DMSPd (1:1) /= 'x') then workz(:,:,:) = c0 workz2(:,:,:) = c0 @@ -3039,23 +3100,23 @@ subroutine accum_hist_bgc (iblk) workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_DMSPd+nblyr+1:nt_bgc_DMSPd+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice - trcr(i,j,nt_bgc_DMSPd:nt_bgc_DMSPd+nblyr,iblk) + trcr(i,j,nt_bgc_DMSPd:nt_bgc_DMSPd+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_DMSPd,iblk) !ocean endif if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_DMSPd+nblyr+1:nt_bgc_DMSPd+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice - trcrn(i,j,nt_bgc_DMSPd:nt_bgc_DMSPd+nblyr,1,iblk) + trcrn(i,j,nt_bgc_DMSPd:nt_bgc_DMSPd+nblyr,1,iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_DMSPd,iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_DMSPd-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_DMSPd_cat1-n3Dbcum, iblk, nzalyr, & workz2(:,:,1:nzalyr), a3Da) - endif + endif if (f_bgc_DMSPp (1:1) /= 'x') then workz(:,:,:) = c0 @@ -3065,11 +3126,11 @@ subroutine accum_hist_bgc (iblk) workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_DMSPp+nblyr+1:nt_bgc_DMSPp+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice - trcr(i,j,nt_bgc_DMSPp:nt_bgc_DMSPp+nblyr,iblk) + trcr(i,j,nt_bgc_DMSPp:nt_bgc_DMSPp+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_DMSPp,iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_DMSPp-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) endif @@ -3083,18 +3144,18 @@ subroutine accum_hist_bgc (iblk) workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_DMS+nblyr+1:nt_bgc_DMS+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice - trcr(i,j,nt_bgc_DMS:nt_bgc_DMS+nblyr,iblk) + trcr(i,j,nt_bgc_DMS:nt_bgc_DMS+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_DMS,iblk) !ocean endif if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_DMS+nblyr+1:nt_bgc_DMS+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice - trcrn(i,j,nt_bgc_DMS:nt_bgc_DMS+nblyr,1,iblk) + trcrn(i,j,nt_bgc_DMS:nt_bgc_DMS+nblyr,1,iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_DMS,iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_DMS-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_DMS_cat1-n3Dbcum, iblk, nzalyr, & @@ -3110,26 +3171,25 @@ subroutine accum_hist_bgc (iblk) workz(i,j,1:2) = & !snow trcr(i,j,nt_bgc_PON+nblyr+1:nt_bgc_PON+nblyr+2,iblk) workz(i,j,3:nblyr+3) = & !ice - trcr(i,j,nt_bgc_PON:nt_bgc_PON+nblyr,iblk) + trcr(i,j,nt_bgc_PON:nt_bgc_PON+nblyr,iblk) workz(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_PON,iblk) !ocean endif if (aicen(i,j,1,iblk) > puny) then workz2(i,j,1:2) = & !snow trcrn(i,j,nt_bgc_PON+nblyr+1:nt_bgc_PON+nblyr+2,1,iblk) workz2(i,j,3:nblyr+3) = & !ice - trcrn(i,j,nt_bgc_PON:nt_bgc_PON+nblyr,1,iblk) + trcrn(i,j,nt_bgc_PON:nt_bgc_PON+nblyr,1,iblk) workz2(i,j,nblyr+4) = ocean_bio(i,j,nlt_bgc_PON,iblk) !ocean endif enddo ! i - enddo ! j + enddo ! j call accum_hist_field(n_bgc_PON-n3Dbcum, iblk, nzalyr, & workz(:,:,1:nzalyr), a3Da) call accum_hist_field(n_bgc_PON_cat1-n3Dbcum, iblk, nzalyr, & workz2(:,:,1:nzalyr), a3Da) endif - endif ! z_tracers, 3Da tracers - endif ! allocated(a3Da) + endif ! z_tracers, 3Da tracers end subroutine accum_hist_bgc @@ -3137,7 +3197,7 @@ end subroutine accum_hist_bgc subroutine init_hist_bgc_3Da - use ice_calendar, only: nstreams, histfreq + use ice_calendar, only: nstreams use ice_history_shared, only: tstr3Da, tcstr, define_hist_field integer (kind=int_kind) :: ns, n @@ -3145,19 +3205,18 @@ subroutine init_hist_bgc_3Da character (len=3) :: nchar character (len=16):: vname_in ! variable name character(len=*), parameter :: subname = '(init_hist_bgc_3Da)' - + call icepack_query_parameters(z_tracers_out=z_tracers) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - ! snow+bio grid - - if (z_tracers) then + ! snow+bio grid - do ns = 1, nstreams - if (histfreq(ns) /= 'x') then + if (z_tracers) then + do ns = 1, nstreams + !---------------------------------------------------------------------------- ! snow+bio grid ==> ! 1:2 snow (surface layer +interior), 3:nblyr+2 ice (bio grid), nblyr+3 ocean @@ -3172,12 +3231,12 @@ subroutine init_hist_bgc_3Da ns, f_zaero) enddo endif - - if (f_bgc_Nit(1:1) /= 'x') & + + if (f_bgc_Nit(1:1) /= 'x') & call define_hist_field(n_bgc_Nit,"bgc_Nit","mmol/m^3",tstr3Da, tcstr, & "bulk nitrate ", "snow+bio grid", c1, c0, & ns, f_bgc_Nit) - + if (f_bgc_Am(1:1) /= 'x') & call define_hist_field(n_bgc_Am,"bgc_Am","mmol/m^3",tstr3Da, tcstr, & "bulk ammonia/um ", "snow+bio grid", c1, c0, & @@ -3238,7 +3297,7 @@ subroutine init_hist_bgc_3Da enddo endif if (f_bgc_Fed (1:1) /= 'x') then - do n=1,n_fed + do n=1,n_fed write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'bgc_Fed', trim(nchar) call define_hist_field(n_bgc_Fed (n,:),vname_in,"umol/m^3",tstr3Da, tcstr, & @@ -3247,7 +3306,7 @@ subroutine init_hist_bgc_3Da enddo endif if (f_bgc_Fep (1:1) /= 'x') then - do n=1,n_fep + do n=1,n_fep write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'bgc_Fep', trim(nchar) call define_hist_field(n_bgc_Fep (n,:),vname_in,"umol/m^3",tstr3Da, tcstr, & @@ -3255,32 +3314,32 @@ subroutine init_hist_bgc_3Da ns, f_bgc_Fep ) enddo endif - + if (f_bgc_Sil(1:1) /= 'x') & call define_hist_field(n_bgc_Sil,"bgc_Sil","mmol/m^3",tstr3Da, tcstr, & "bulk silicate ", "snow+bio grid", c1, c0, & ns, f_bgc_Sil) - + if (f_bgc_hum(1:1) /= 'x') & call define_hist_field(n_bgc_hum,"bgc_hum","mmol/m^3",tstr3Da, tcstr, & "bulk humic (carbon) material ", "snow+bio grid", c1, c0, & ns, f_bgc_hum) - + if (f_bgc_DMSPp(1:1) /= 'x') & call define_hist_field(n_bgc_DMSPp,"bgc_DMSPp","mmol/m^3",tstr3Da, tcstr, & "bulk algal DMSP ", "snow+bio grid", c1, c0,& ns, f_bgc_DMSPp) - + if (f_bgc_DMSPd(1:1) /= 'x') & call define_hist_field(n_bgc_DMSPd,"bgc_DMSPd","mmol/m^3",tstr3Da, tcstr, & "bulk dissolved DMSP ", "snow+bio grid", c1, c0, & ns, f_bgc_DMSPd) - + if (f_bgc_DMS(1:1) /= 'x') & call define_hist_field(n_bgc_DMS,"bgc_DMS","mmol/m^3",tstr3Da, tcstr, & "bulk DMS gas ", "snow+bio grid", c1, c0, & ns, f_bgc_DMS) - + if (f_bgc_PON(1:1) /= 'x') & call define_hist_field(n_bgc_PON,"bgc_PON","mmol/m^3",tstr3Da, tcstr, & "other bulk nitrogen pool ", "snow+bio grid", c1, c0, & @@ -3290,11 +3349,11 @@ subroutine init_hist_bgc_3Da ! Category 1 BGC !---------------------------------------------- - if (f_bgc_Nit_cat1(1:1) /= 'x') & + if (f_bgc_Nit_cat1(1:1) /= 'x') & call define_hist_field(n_bgc_Nit_cat1,"bgc_Nit_cat1","mmol/m^3",tstr3Da, tcstr, & "bulk nitrate in cat 1 ", "snow+bio grid", c1, c0, & ns, f_bgc_Nit_cat1) - + if (f_bgc_Am_cat1(1:1) /= 'x') & call define_hist_field(n_bgc_Am_cat1,"bgc_Am_cat1","mmol/m^3",tstr3Da, tcstr, & "bulk ammonia/um in cat 1", "snow+bio grid", c1, c0, & @@ -3337,7 +3396,7 @@ subroutine init_hist_bgc_3Da enddo endif if (f_bgc_Fed_cat1 (1:1) /= 'x') then - do n=1,n_fed + do n=1,n_fed write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'bgc_Fed_cat1', trim(nchar) call define_hist_field(n_bgc_Fed_cat1 (n,:),vname_in,"umol/m^3",tstr3Da, tcstr, & @@ -3346,7 +3405,7 @@ subroutine init_hist_bgc_3Da enddo endif if (f_bgc_Fep_cat1 (1:1) /= 'x') then - do n=1,n_fep + do n=1,n_fep write(nchar,'(i3.3)') n write(vname_in,'(a,a)') 'bgc_Fep_cat1', trim(nchar) call define_hist_field(n_bgc_Fep_cat1 (n,:),vname_in,"umol/m^3",tstr3Da, tcstr, & @@ -3354,33 +3413,32 @@ subroutine init_hist_bgc_3Da ns, f_bgc_Fep_cat1 ) enddo endif - + if (f_bgc_Sil_cat1(1:1) /= 'x') & call define_hist_field(n_bgc_Sil_cat1,"bgc_Sil_cat1","mmol/m^3",tstr3Da, tcstr, & "bulk silicate in cat 1", "snow+bio grid", c1, c0, & ns, f_bgc_Sil_cat1) - + if (f_bgc_hum_cat1(1:1) /= 'x') & call define_hist_field(n_bgc_hum,"bgc_hum_cat1","mmol/m^3",tstr3Da, tcstr, & "bulk humic (carbon) material in cat 1", "snow+bio grid", c1, c0, & ns, f_bgc_hum_cat1) - + if (f_bgc_DMSPd_cat1(1:1) /= 'x') & call define_hist_field(n_bgc_DMSPd_cat1,"bgc_DMSPd_cat1","mmol/m^3",tstr3Da, tcstr, & "bulk dissolved DMSP in cat 1", "snow+bio grid", c1, c0, & ns, f_bgc_DMSPd_cat1) - + if (f_bgc_DMS_cat1(1:1) /= 'x') & call define_hist_field(n_bgc_DMS_cat1,"bgc_DMS_cat1","mmol/m^3",tstr3Da, tcstr, & "bulk DMS gas in cat 1", "snow+bio grid", c1, c0, & ns, f_bgc_DMS_cat1) - + if (f_bgc_PON_cat1(1:1) /= 'x') & call define_hist_field(n_bgc_PON_cat1,"bgc_PON_cat1","mmol/m^3",tstr3Da, tcstr, & "other bulk nitrogen pool in cat 1", "snow+bio grid", c1, c0, & ns, f_bgc_PON_cat1) - - endif ! histfreq(ns) /= 'x' + enddo !ns endif ! z_tracers @@ -3398,7 +3456,7 @@ subroutine init_history_bgc use ice_arrays_column, only: PP_net, grow_net, hbri, & ice_bio_net, snow_bio_net, fbio_snoice, fbio_atmice, & - zfswin + fzsal, fzsal_g, zfswin use ice_flux_bgc, only: flux_bio, flux_bio_ai, fnit, fsil, & famm, fdmsp, fdms, fhum, fdust, falgalN, fdoc, fdic, & fdon, ffep, ffed @@ -3414,6 +3472,8 @@ subroutine init_history_bgc snow_bio_net(:,:,:,:) = c0 fbio_snoice (:,:,:,:) = c0 fbio_atmice (:,:,:,:) = c0 + fzsal (:,:,:) = c0 + fzsal_g (:,:,:) = c0 zfswin (:,:,:,:,:) = c0 fnit (:,:,:) = c0 fsil (:,:,:) = c0 diff --git a/cicecore/cicedyn/analysis/ice_history_shared.F90 b/cicecore/cicedyn/analysis/ice_history_shared.F90 index d6fa78542..ce177ad1e 100644 --- a/cicecore/cicedyn/analysis/ice_history_shared.F90 +++ b/cicecore/cicedyn/analysis/ice_history_shared.F90 @@ -2,18 +2,17 @@ ! ! Output files: netCDF or binary data, Fortran unformatted dumps ! -! The following variables are currently hard-wired as snapshots +! The following variables are currently hard-wired as snapshots ! (instantaneous rather than time-averages): -! divu, shear, vort, sig1, sig2, sigP, trsig, mlt_onset, -! frz_onset, hisnap, aisnap +! divu, shear, sig1, sig2, sigP, trsig, mlt_onset, frz_onset, hisnap, aisnap ! ! Options for histfreq: '1','h','d','m','y','x', where x means that -! output stream will not be used (recommended for efficiency). -! histfreq_n can be any nonnegative integer, where 0 means that the +! output stream will not be used (recommended for efficiency). +! histfreq_n can be any nonnegative integer, where 0 means that the ! corresponding histfreq frequency will not be used. ! The flags (f_) can be set to '1','h','d','m','y' or 'x', where ! n means the field will not be written. To output the same field at -! more than one frequency, for instance monthy and daily, set +! more than one frequency, for instance monthy and daily, set ! f_ = 'md'. ! ! authors Tony Craig and Bruce Briegleb, NCAR @@ -35,13 +34,13 @@ module ice_history_shared private public :: define_hist_field, accum_hist_field, icefields_nml, construct_filename - + integer (kind=int_kind), public :: history_precision logical (kind=log_kind), public :: & - hist_avg(max_nstrm) ! if true, write averaged data instead of snapshots + hist_avg ! if true, write averaged data instead of snapshots - character (len=char_len_long), public :: & + character (len=char_len), public :: & history_file , & ! output file for history incond_file ! output file for snapshot initial conditions @@ -49,26 +48,18 @@ module ice_history_shared history_dir , & ! directory name for history file incond_dir ! directory for snapshot initial conditions - character (len=char_len), public :: & - version_name + character (len=char_len_long), public :: & + pointer_file ! input pointer file for restarts character (len=char_len), public :: & - history_format , & ! history format, cdf1, cdf2, cdf5, etc - history_rearranger ! history file rearranger, box or subset for pio + version_name character (len=char_len), public :: & - hist_suffix(max_nstrm) ! appended to history_file in filename - - integer (kind=int_kind), public :: & - history_iotasks , & ! iotasks, root, stride defines io pes for pio - history_root , & ! iotasks, root, stride defines io pes for pio - history_stride , & ! iotasks, root, stride defines io pes for pio - history_deflate , & ! compression level for hdf5/netcdf4 - history_chunksize(2) ! chunksize for hdf5/netcdf4 + history_format !--------------------------------------------------------------- ! Instructions for adding a field: (search for 'example') - ! Here or in ice_history_[process].F90: + ! Here: ! (1) Add to frequency flags (f_) ! (2) Add to namelist (here and also in ice_in) ! (3) Add to index list @@ -90,8 +81,6 @@ module ice_history_shared real (kind=dbl_kind) :: conb ! additive conversion factor character (len=1) :: vhistfreq ! frequency of history output integer (kind=int_kind) :: vhistfreq_n ! number of vhistfreq intervals - logical (kind=log_kind) :: avg_ice_present ! only average where ice is present - logical (kind=log_kind) :: mask_ice_free_points ! mask ice-free points end type integer (kind=int_kind), parameter, public :: & @@ -131,10 +120,9 @@ module ice_history_shared avail_hist_fields(max_avail_hist_fields) integer (kind=int_kind), parameter, public :: & - ncoord = 8 , & ! number of coordinate variables: TLON, TLAT, ULON, ULAT, NLON, NLAT, ELON, ELAT - nvar_grd = 21 , & ! number of grid fields that can be written + nvar = 12 , & ! number of grid fields that can be written ! excluding grid vertices - nvar_grdz = 6 ! number of category/vertical grid fields written + nvarz = 6 ! number of category/vertical grid fields written integer (kind=int_kind), public :: & ncat_hist , & ! number of thickness categories written <= ncat @@ -144,8 +132,6 @@ module ice_history_shared time_end(max_nstrm), & time_bounds(2) - character (len=char_len), public :: hist_time_axis - real (kind=dbl_kind), allocatable, public :: & a2D (:,:,:,:) , & ! field accumulations/averages, 2D a3Dz(:,:,:,:,:) , & ! field accumulations/averages, 3D vertical @@ -156,7 +142,7 @@ module ice_history_shared a4Di(:,:,:,:,:,:), & ! field accumulations/averages, 4D categories,vertical, ice a4Ds(:,:,:,:,:,:), & ! field accumulations/averages, 4D categories,vertical, snow a4Df(:,:,:,:,:,:) ! field accumulations/averages, 4D floe size, thickness categories - + real (kind=dbl_kind), allocatable, public :: & Tinz4d (:,:,:,:) , & ! array for Tin Tsnz4d (:,:,:,:) , & ! array for Tsn @@ -166,49 +152,51 @@ module ice_history_shared avgct(max_nstrm) ! average sample counter logical (kind=log_kind), public :: & - icoord(ncoord) , & ! true if coord field is written to output file - igrd (nvar_grd), & ! true if grid field is written to output file - igrdz(nvar_grdz) ! true if category/vertical grid field is written + igrd (nvar), & ! true if grid field is written to output file + igrdz(nvarz) ! true if category/vertical grid field is written character (len=25), public, parameter :: & - ! T grids - tcstr = 'area: tarea' , & ! vcellmeas for T cell quantities - tstr2D = 'TLON TLAT time' , & ! vcoord for T cell, 2D - tstr3Dc = 'TLON TLAT NCAT time', & ! vcoord for T cell, 3D, ncat - tstr3Da = 'TLON TLAT VGRDa time', & ! vcoord for T cell, 3D, ice-snow-bio - tstr3Db = 'TLON TLAT VGRDb time', & ! vcoord for T cell, 3D, ice-bio - tstr3Df = 'TLON TLAT NFSD time', & ! vcoord for T cell, 3D, fsd + tcstr = 'area: tarea' , & ! vcellmeas for T cell quantities + ucstr = 'area: uarea' , & ! vcellmeas for U cell quantities + tstr2D = 'TLON TLAT time' , & ! vcoord for T cell quantities, 2D + ustr2D = 'ULON ULAT time' , & ! vcoord for U cell quantities, 2D + tstr3Dz = 'TLON TLAT VGRDi time',& ! vcoord for T cell quantities, 3D + ustr3Dz = 'ULON ULAT VGRDi time',& ! vcoord for U cell quantities, 3D + tstr3Dc = 'TLON TLAT NCAT time',& ! vcoord for T cell quantities, 3D + ustr3Dc = 'ULON ULAT NCAT time',& ! vcoord for U cell quantities, 3D + tstr3Db = 'TLON TLAT VGRDb time',& ! vcoord for T cell quantities, 3D + ustr3Db = 'ULON ULAT VGRDb time',& ! vcoord for U cell quantities, 3D + tstr3Da = 'TLON TLAT VGRDa time',& ! vcoord for T cell quantities, 3D + ustr3Da = 'ULON ULAT VGRDa time',& ! vcoord for U cell quantities, 3D + tstr3Df = 'TLON TLAT NFSD time',& ! vcoord for T cell quantities, 3D + ustr3Df = 'ULON ULAT NFSD time',& ! vcoord for U cell quantities, 3D + +!ferret tstr4Di = 'TLON TLAT VGRDi NCAT', & ! vcoord for T cell, 4D, ice + ustr4Di = 'ULON ULAT VGRDi NCAT', & ! vcoord for U cell, 4D, ice tstr4Ds = 'TLON TLAT VGRDs NCAT', & ! vcoord for T cell, 4D, snow + ustr4Ds = 'ULON ULAT VGRDs NCAT', & ! vcoord for U cell, 4D, snow tstr4Df = 'TLON TLAT NFSD NCAT', & ! vcoord for T cell, 4D, fsd - ! U grids - ucstr = 'area: uarea' , & ! vcellmeas for U cell quantities - ustr2D = 'ULON ULAT time' , & ! vcoord for U cell, 2D - ! N grids - ncstr = 'area: narea' , & ! vcellmeas for N cell quantities - nstr2D = 'NLON NLAT time' , & ! vcoord for N cell, 2D - ! E grids - ecstr = 'area: earea' , & ! vcellmeas for E cell quantities - estr2D = 'ELON ELAT time' ! vcoord for E cell, 2D + ustr4Df = 'ULON ULAT NFSD NCAT' ! vcoord for U cell, 4D, fsd +!ferret +! tstr4Di = 'TLON TLAT VGRDi NCAT time', & ! ferret can not handle time +! ustr4Di = 'ULON ULAT VGRDi NCAT time', & ! index on 4D variables. +! tstr4Ds = 'TLON TLAT VGRDs NCAT time', & ! Use 'ferret' lines instead +! ustr4Ds = 'ULON ULAT VGRDs NCAT time', & ! (below also) +! tstr4Db = 'TLON TLAT VGRDb NCAT time', & +! ustr4Db = 'ULON ULAT VGRDb NCAT time', & +! tstr4Df = 'TLON TLAT NFSD NCAT time', & +! ustr4Df = 'ULON ULAT NFSD NCAT time', & !--------------------------------------------------------------- ! flags: write to output file if true or histfreq value !--------------------------------------------------------------- logical (kind=log_kind), public :: & - f_tlon = .true., f_tlat = .true., & - f_ulon = .true., f_ulat = .true., & - f_nlon = .true., f_nlat = .true., & - f_elon = .true., f_elat = .true., & - f_tmask = .true., f_umask = .true., & - f_nmask = .true., f_emask = .true., & - f_blkmask = .true., & + f_tmask = .true., f_blkmask = .true., & f_tarea = .true., f_uarea = .true., & - f_narea = .true., f_earea = .true., & f_dxt = .true., f_dyt = .true., & f_dxu = .true., f_dyu = .true., & - f_dxn = .true., f_dyn = .true., & - f_dxe = .true., f_dye = .true., & f_HTN = .true., f_HTE = .true., & f_ANGLE = .true., f_ANGLET = .true., & f_bounds = .true., f_NCAT = .true., & @@ -222,11 +210,6 @@ module ice_history_shared f_snowfrac = 'x', f_snowfracn = 'x', & f_Tsfc = 'm', f_aice = 'm', & f_uvel = 'm', f_vvel = 'm', & - f_icespd = 'm', f_icedir = 'm', & - f_uvelE = 'x', f_vvelE = 'x', & - f_icespdE = 'x', f_icedirE = 'x', & - f_uvelN = 'x', f_vvelN = 'x', & - f_icespdN = 'x', f_icedirN = 'x', & f_uatm = 'm', f_vatm = 'm', & f_atmspd = 'm', f_atmdir = 'm', & f_fswup = 'm', & @@ -267,19 +250,7 @@ module ice_history_shared f_strocnx = 'm', f_strocny = 'm', & f_strintx = 'm', f_strinty = 'm', & f_taubx = 'm', f_tauby = 'm', & - f_strairxN = 'x', f_strairyN = 'x', & - f_strtltxN = 'x', f_strtltyN = 'x', & - f_strcorxN = 'x', f_strcoryN = 'x', & - f_strocnxN = 'x', f_strocnyN = 'x', & - f_strintxN = 'x', f_strintyN = 'x', & - f_taubxN = 'x', f_taubyN = 'x', & - f_strairxE = 'x', f_strairyE = 'x', & - f_strtltxE = 'x', f_strtltyE = 'x', & - f_strcorxE = 'x', f_strcoryE = 'x', & - f_strocnxE = 'x', f_strocnyE = 'x', & - f_strintxE = 'x', f_strintyE = 'x', & - f_taubxE = 'x', f_taubyE = 'x', & - f_strength = 'm', f_vort = 'm', & + f_strength = 'm', & f_divu = 'm', f_shear = 'm', & f_sig1 = 'm', f_sig2 = 'm', & f_sigP = 'm', & @@ -314,7 +285,6 @@ module ice_history_shared f_sidmasslat = 'x', & f_sndmasssnf = 'x', & f_sndmassmelt = 'x', & - f_sndmassdyn = 'x', & f_siflswdtop = 'x', & f_siflswutop = 'x', & f_siflswdbot = 'x', & @@ -355,10 +325,10 @@ module ice_history_shared f_keffn_top = 'x', & f_Tinz = 'x', f_Sinz = 'x', & f_Tsnz = 'x', & - f_a11 = 'x', f_a12 = 'x', & - f_e11 = 'x', f_e12 = 'x', & + f_a11 = 'x', f_a12 = 'x', & + f_e11 = 'x', f_e12 = 'x', & f_e22 = 'x', & - f_s11 = 'x', f_s12 = 'x', & + f_s11 = 'x', f_s12 = 'x', & f_s22 = 'x', & f_yieldstress11 = 'x', & f_yieldstress12 = 'x', & @@ -369,19 +339,10 @@ module ice_history_shared !--------------------------------------------------------------- namelist / icefields_nml / & - f_tlon , f_tlat , & - f_ulon , f_ulat , & - f_nlon , f_nlat , & - f_elon , f_elat , & - f_tmask , f_umask , & - f_nmask , f_emask , & - f_blkmask , & + f_tmask , f_blkmask , & f_tarea , f_uarea , & - f_narea , f_earea , & f_dxt , f_dyt , & f_dxu , f_dyu , & - f_dxn , f_dyn , & - f_dxe , f_dye , & f_HTN , f_HTE , & f_ANGLE , f_ANGLET , & f_bounds , f_NCAT , & @@ -393,17 +354,11 @@ module ice_history_shared f_snowfrac, f_snowfracn, & f_Tsfc, f_aice , & f_uvel, f_vvel , & - f_icespd, f_icedir , & -! For now, C and CD grid quantities are controlled by the generic (originally B-grid) namelist flag -! f_uvelE, f_vvelE , & -! f_icespdE, f_icedirE , & -! f_uvelN, f_vvelN , & -! f_icespdN, f_icedirN , & f_uatm, f_vatm , & f_atmspd, f_atmdir , & f_fswup, & f_fswdn, f_flwdn , & - f_snow, f_snow_ai , & + f_snow, f_snow_ai , & f_rain, f_rain_ai , & f_sst, f_sss , & f_uocn, f_vocn , & @@ -428,8 +383,8 @@ module ice_history_shared f_snoice, f_dsnow , & f_meltt, f_melts , & f_meltb, f_meltl , & - f_fresh, f_fresh_ai , & - f_fsalt, f_fsalt_ai , & + f_fresh, f_fresh_ai , & + f_fsalt, f_fsalt_ai , & f_fbot, & f_fhocn, f_fhocn_ai , & f_fswthru, f_fswthru_ai,& @@ -439,19 +394,7 @@ module ice_history_shared f_strocnx, f_strocny , & f_strintx, f_strinty , & f_taubx, f_tauby , & -! f_strairxN, f_strairyN , & -! f_strtltxN, f_strtltyN , & -! f_strcorxN, f_strcoryN , & -! f_strocnxN, f_strocnyN , & -! f_strintxN, f_strintyN , & -! f_taubxN, f_taubyN , & -! f_strairxE, f_strairyE , & -! f_strtltxE, f_strtltyE , & -! f_strcorxE, f_strcoryE , & -! f_strocnxE, f_strocnyE , & -! f_strintxE, f_strintyE , & -! f_taubxE, f_taubyE , & - f_strength, f_vort , & + f_strength, & f_divu, f_shear , & f_sig1, f_sig2 , & f_sigP, & @@ -486,7 +429,6 @@ module ice_history_shared f_sidmasslat, & f_sndmasssnf, & f_sndmassmelt, & - f_sndmassdyn, & f_siflswdtop, & f_siflswutop, & f_siflswdbot, & @@ -541,36 +483,18 @@ module ice_history_shared !--------------------------------------------------------------- integer (kind=int_kind), parameter, public :: & - n_tlon = 1, & - n_tlat = 2, & - n_ulon = 3, & - n_ulat = 4, & - n_nlon = 5, & - n_nlat = 6, & - n_elon = 7, & - n_elat = 8, & - n_tmask = 1, & - n_umask = 2, & - n_nmask = 3, & - n_emask = 4, & - n_blkmask = 5, & - n_tarea = 6, & - n_uarea = 7, & - n_narea = 8, & - n_earea = 9, & - n_dxt = 10, & - n_dyt = 11, & - n_dxu = 12, & - n_dyu = 13, & - n_dxn = 14, & - n_dyn = 15, & - n_dxe = 16, & - n_dye = 17, & - n_HTN = 18, & - n_HTE = 19, & - n_ANGLE = 20, & - n_ANGLET = 21, & + n_blkmask = 2, & + n_tarea = 3, & + n_uarea = 4, & + n_dxt = 5, & + n_dyt = 6, & + n_dxu = 7, & + n_dyu = 8, & + n_HTN = 9, & + n_HTE = 10, & + n_ANGLE = 11, & + n_ANGLET = 12, & n_NCAT = 1, & n_VGRDi = 2, & @@ -582,11 +506,7 @@ module ice_history_shared n_lont_bnds = 1, & n_latt_bnds = 2, & n_lonu_bnds = 3, & - n_latu_bnds = 4, & - n_lonn_bnds = 5, & - n_latn_bnds = 6, & - n_lone_bnds = 7, & - n_late_bnds = 8 + n_latu_bnds = 4 integer (kind=int_kind), dimension(max_nstrm), public :: & ! n_example , & @@ -594,11 +514,6 @@ module ice_history_shared n_snowfrac , n_snowfracn , & n_Tsfc , n_aice , & n_uvel , n_vvel , & - n_icespd , n_icedir , & - n_uvelE , n_vvelE , & - n_icespdE , n_icedirE , & - n_uvelN , n_vvelN , & - n_icespdN , n_icedirN , & n_uatm , n_vatm , & n_atmspd , n_atmdir , & n_sice , & @@ -641,19 +556,7 @@ module ice_history_shared n_strocnx , n_strocny , & n_strintx , n_strinty , & n_taubx , n_tauby , & - n_strairxN , n_strairyN , & - n_strtltxN , n_strtltyN , & - n_strcorxN , n_strcoryN , & - n_strocnxN , n_strocnyN , & - n_strintxN , n_strintyN , & - n_taubxN , n_taubyN , & - n_strairxE , n_strairyE , & - n_strtltxE , n_strtltyE , & - n_strcorxE , n_strcoryE , & - n_strocnxE , n_strocnyE , & - n_strintxE , n_strintyE , & - n_taubxE , n_taubyE , & - n_strength , n_vort , & + n_strength , & n_divu , n_shear , & n_sig1 , n_sig2 , & n_sigP , & @@ -686,7 +589,6 @@ module ice_history_shared n_sidmasslat, & n_sndmasssnf, & n_sndmassmelt, & - n_sndmassdyn, & n_siflswdtop, & n_siflswutop, & n_siflswdbot, & @@ -718,7 +620,7 @@ module ice_history_shared n_trsig , n_icepresent , & n_iage , n_FY , & n_fsurf_ai , & - n_fcondtop_ai, n_fmeltt_ai , & + n_fcondtop_ai, n_fmeltt_ai , & n_aicen , n_vicen , & n_fsurfn_ai , & n_fcondtopn_ai, & @@ -751,105 +653,84 @@ module ice_history_shared subroutine construct_filename(ncfile,suffix,ns) - use ice_calendar, only: msec, myear, mmonth, daymo, & + use ice_calendar, only: sec, nyr, month, daymo, & mday, write_ic, histfreq, histfreq_n, & - new_year, new_month, new_day, & + year_init, new_year, new_month, new_day, & dt use ice_restart_shared, only: lenstr - character (len=*), intent(inout) :: ncfile - character (len=*), intent(in) :: suffix + character (char_len_long), intent(inout) :: ncfile + character (len=2), intent(in) :: suffix integer (kind=int_kind), intent(in) :: ns integer (kind=int_kind) :: iyear, imonth, iday, isec - integer (kind=int_kind) :: n - character (len=char_len) :: cstream - character (len=char_len_long), save :: ncfile_last(max_nstrm) = 'UnDefineD' + character (len=1) :: cstream character(len=*), parameter :: subname = '(construct_filename)' - iyear = myear - imonth = mmonth + iyear = nyr + year_init - 1 ! set year_init=1 in ice_in to get iyear=nyr + imonth = month iday = mday - isec = int(msec - dt,int_kind) - cstream = '' - if (hist_suffix(ns) /= 'x') cstream = hist_suffix(ns) + isec = sec - dt + if (write_ic) isec = sec ! construct filename if (write_ic) then - isec = msec write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & incond_file(1:lenstr(incond_file)),'.',iyear,'-', & - imonth,'-',iday,'-',isec,'.',trim(suffix) + imonth,'-',iday,'-',isec,'.',suffix else - if (hist_avg(ns)) then - if (histfreq(ns) == '1' .or. histfreq(ns) == 'h'.or.histfreq(ns) == 'H') then - ! do nothing - elseif (new_year) then - iyear = iyear - 1 - imonth = 12 - iday = daymo(imonth) - elseif (new_month) then - imonth = mmonth - 1 - iday = daymo(imonth) - elseif (new_day) then - iday = iday - 1 - endif - endif - - if (hist_avg(ns)) then ! write averaged data - if (histfreq(ns) == '1' .and. histfreq_n(ns) == 1) then ! timestep - write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & - history_file(1:lenstr(history_file))//trim(cstream),'_inst.', & - iyear,'-',imonth,'-',iday,'-',msec,'.',trim(suffix) - elseif (histfreq(ns) == '1' .and. histfreq_n(ns) > 1) then ! timestep - write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & - history_file(1:lenstr(history_file))//trim(cstream),'.', & - iyear,'-',imonth,'-',iday,'-',msec,'.',trim(suffix) - elseif (histfreq(ns) == 'h'.or.histfreq(ns) == 'H') then ! hourly - write(ncfile,'(a,a,i2.2,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & - history_file(1:lenstr(history_file))//trim(cstream),'_', & - histfreq_n(ns),'h.',iyear,'-',imonth,'-',iday,'-',msec,'.',trim(suffix) - elseif (histfreq(ns) == 'd'.or.histfreq(ns) == 'D') then ! daily - write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,a)') & - history_file(1:lenstr(history_file))//trim(cstream),'.', & - iyear,'-',imonth,'-',iday,'.',trim(suffix) - elseif (histfreq(ns) == 'm'.or.histfreq(ns) == 'M') then ! monthly - write(ncfile,'(a,a,i4.4,a,i2.2,a,a)') & - history_file(1:lenstr(history_file))//trim(cstream),'.', & - iyear,'-',imonth,'.',trim(suffix) - elseif (histfreq(ns) == 'y'.or.histfreq(ns) == 'Y') then ! yearly - write(ncfile,'(a,a,i4.4,a,a)') & - history_file(1:lenstr(history_file))//trim(cstream),'.', & - iyear,'.',trim(suffix) - endif - - else ! instantaneous - write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & - history_file(1:lenstr(history_file))//trim(cstream),'_inst.', & - iyear,'-',imonth,'-',iday,'-',msec,'.',trim(suffix) - endif + if (hist_avg .and. histfreq(ns) /= '1') then + if (histfreq(ns) == 'h'.or.histfreq(ns) == 'H') then + ! do nothing + elseif (new_year) then + iyear = iyear - 1 + imonth = 12 + iday = daymo(imonth) + elseif (new_month) then + imonth = month - 1 + iday = daymo(imonth) + elseif (new_day) then + iday = iday - 1 + endif + endif - endif + cstream = '' +!echmod ! this was implemented for CESM but it breaks post-processing software +!echmod ! of other groups (including RASM which uses CESMCOUPLED) +!echmod if (ns > 1) write(cstream,'(i1.1)') ns-1 - ! Check whether the filename is already in use. - ! Same filename in multiple history streams leads to files being overwritten (not good). - ! The current filename convention means we just have to check latest filename, - ! not all filenames ever generated because of use of current model date/time in filename. - - ! write(nu_diag,'(2a,i2,1x,a)') subname, 'debug ncfile= ',ns,trim(ncfile) - do n = 1,max_nstrm - ! write(nu_diag,'(2a,i2,1x,a)') subname, 'debug nfile_last= ',n,trim(ncfile_last(n)) - if (ncfile == ncfile_last(n)) then - write(nu_diag,*) subname,' history stream = ',ns - write(nu_diag,*) subname,' history filename = ',trim(ncfile) - write(nu_diag,*) subname,' filename in use for stream ',n - write(nu_diag,*) subname,' filename for stream ',trim(ncfile_last(n)) - write(nu_diag,*) subname,' Use namelist hist_suffix so history filenames are unique' - call abort_ice(subname//' ERROR: history filename already used for another history stream '//trim(ncfile)) - endif - enddo - ncfile_last(ns) = ncfile + if (histfreq(ns) == '1') then ! instantaneous, write every dt + write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & + history_file(1:lenstr(history_file))//trim(cstream),'_inst.', & + iyear,'-',imonth,'-',iday,'-',sec,'.',suffix + + elseif (hist_avg) then ! write averaged data + + if (histfreq(ns) == 'd'.or.histfreq(ns) == 'D') then ! daily + write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,a)') & + history_file(1:lenstr(history_file))//trim(cstream), & + '.',iyear,'-',imonth,'-',iday,'.',suffix + elseif (histfreq(ns) == 'h'.or.histfreq(ns) == 'H') then ! hourly + write(ncfile,'(a,a,i2.2,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & + history_file(1:lenstr(history_file))//trim(cstream),'_', & + histfreq_n(ns),'h.',iyear,'-',imonth,'-',iday,'-',sec,'.',suffix + elseif (histfreq(ns) == 'm'.or.histfreq(ns) == 'M') then ! monthly + write(ncfile,'(a,a,i4.4,a,i2.2,a,a)') & + history_file(1:lenstr(history_file))//trim(cstream),'.', & + iyear,'-',imonth,'.',suffix + elseif (histfreq(ns) == 'y'.or.histfreq(ns) == 'Y') then ! yearly + write(ncfile,'(a,a,i4.4,a,a)') & + history_file(1:lenstr(history_file))//trim(cstream),'.', & + iyear,'.',suffix + endif + + else ! instantaneous with histfreq > dt + write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & + history_file(1:lenstr(history_file)),'_inst.', & + iyear,'-',imonth,'-',iday,'-',sec,'.',suffix + endif + endif end subroutine construct_filename @@ -862,7 +743,7 @@ end subroutine construct_filename subroutine define_hist_field(id, vname, vunit, vcoord, vcellmeas, & vdesc, vcomment, cona, conb, & - ns, vhistfreq, avg_ice_present, mask_ice_free_points) + ns, vhistfreq) use ice_calendar, only: histfreq, histfreq_n @@ -884,34 +765,20 @@ subroutine define_hist_field(id, vname, vunit, vcoord, vcellmeas, & character (len=*), intent(in) :: & vhistfreq ! history frequency - + integer (kind=int_kind), intent(in) :: & ns ! history file stream index - logical (kind=log_kind), optional, intent(in) :: & - avg_ice_present , & ! compute average only when ice is present - mask_ice_free_points ! mask ice-free points - integer (kind=int_kind) :: & ns1 , & ! variable stream loop index lenf ! length of namelist string character (len=40) :: stmp - logical (kind=log_kind) :: & - l_avg_ice_present , & ! compute average only when ice is present - l_mask_ice_free_points ! mask ice-free points - character(len=*), parameter :: subname = '(define_hist_field)' - l_avg_ice_present = .false. - l_mask_ice_free_points = .false. - - if(present(avg_ice_present)) l_avg_ice_present = avg_ice_present - if(present(mask_ice_free_points)) l_mask_ice_free_points = mask_ice_free_points - if (histfreq(ns) == 'x') then - call abort_ice(subname//' ERROR: define_hist_fields has histfreq x') + call abort_ice(subname//'ERROR: define_hist_fields has histfreq x') endif if (ns == 1) id(:) = 0 @@ -920,10 +787,6 @@ subroutine define_hist_field(id, vname, vunit, vcoord, vcellmeas, & do ns1 = 1, lenf if (vhistfreq(ns1:ns1) == histfreq(ns)) then - if (ns1 > 1 .and. index(vhistfreq(1:ns1-1),'x') /= 0) then - call abort_ice(subname//' ERROR: history frequency variable f_' // vname // ' can''t contain ''x'' along with active frequencies') - endif - num_avail_hist_fields_tot = num_avail_hist_fields_tot + 1 if (vcoord(11:14) == 'time') then @@ -951,7 +814,7 @@ subroutine define_hist_field(id, vname, vunit, vcoord, vcellmeas, & write(nu_diag,*) subname,' num_avail_hist_fields_tot = ',num_avail_hist_fields_tot write(nu_diag,*) subname,' max_avail_hist_fields = ',max_avail_hist_fields endif - call abort_ice(subname//' ERROR: Need in computation of max_avail_hist_fields') + call abort_ice(subname//'ERROR: Need in computation of max_avail_hist_fields') endif if (num_avail_hist_fields_tot /= & @@ -967,7 +830,7 @@ subroutine define_hist_field(id, vname, vunit, vcoord, vcellmeas, & if (my_task == master_task) then write(nu_diag,*) subname,' num_avail_hist_fields_tot = ',num_avail_hist_fields_tot endif - call abort_ice(subname//' ERROR: in num_avail_hist_fields') + call abort_ice(subname//'ERROR: in num_avail_hist_fields') endif id(ns) = num_avail_hist_fields_tot @@ -986,8 +849,6 @@ subroutine define_hist_field(id, vname, vunit, vcoord, vcellmeas, & avail_hist_fields(id(ns))%conb = conb avail_hist_fields(id(ns))%vhistfreq = vhistfreq(ns1:ns1) avail_hist_fields(id(ns))%vhistfreq_n = histfreq_n(ns) - avail_hist_fields(id(ns))%avg_ice_present = l_avg_ice_present - avail_hist_fields(id(ns))%mask_ice_free_points = l_mask_ice_free_points endif enddo @@ -1011,7 +872,7 @@ subroutine accum_hist_field_2D(id, iblk, field_accum, field) integer (int_kind), dimension(:), intent(in) :: & ! max_nstrm id ! location in avail_fields array for use in ! later routines - + integer (kind=int_kind), intent(in) :: iblk real (kind=dbl_kind), intent(in) :: & @@ -1071,7 +932,7 @@ subroutine accum_hist_field_3D(id, iblk, ndim, field_accum, field) integer (int_kind), dimension(:), intent(in) :: & ! max_nstrm id ! location in avail_fields array for use in ! later routines - + integer (kind=int_kind), intent(in) :: iblk integer (kind=int_kind), intent(in) :: & @@ -1136,7 +997,7 @@ subroutine accum_hist_field_4D(id, iblk, ndim3, ndim4, field_accum, field) integer (int_kind), dimension(:), intent(in) :: & ! max_nstrm id ! location in avail_fields array for use in ! later routines - + integer (kind=int_kind), intent(in) :: iblk integer (kind=int_kind), intent(in) :: & diff --git a/cicecore/cicedyn/dynamics/ice_transport_driver.F90 b/cicecore/cicedyn/dynamics/ice_transport_driver.F90 index fca964593..ed3327672 100644 --- a/cicecore/cicedyn/dynamics/ice_transport_driver.F90 +++ b/cicecore/cicedyn/dynamics/ice_transport_driver.F90 @@ -1,14 +1,16 @@ !======================================================================= ! -! Drivers for remapping and upwind ice transport +!deprecate upwind Drivers for remapping and upwind ice transport +! Drivers for incremental remapping ice transport ! -! authors: Elizabeth C. Hunke and William H. Lipscomb, LANL +! authors: Elizabeth C. Hunke and William H. Lipscomb, LANL ! ! 2004: Revised by William Lipscomb from ice_transport_mpdata. ! Stripped out mpdata, retained upwind, and added block structure. ! 2006: Incorporated remap transport driver and renamed from -! ice_transport_upwind. +! ice_transport_upwind. ! 2011: ECH moved edgearea arrays into ice_transport_remap.F90 +! 2020: deprecated upwind transport module ice_transport_driver @@ -17,9 +19,7 @@ module ice_transport_driver use ice_constants, only: c0, c1, p5, & field_loc_center, & field_type_scalar, field_type_vector, & - field_loc_NEcorner, & field_loc_Nface, field_loc_Eface - use ice_diagnostics, only: diagnostic_abort use ice_fileunits, only: nu_diag use ice_exit, only: abort_ice use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted @@ -30,37 +30,38 @@ module ice_transport_driver implicit none private - public :: init_transport, transport_remap, transport_upwind + public :: init_transport, transport_remap!deprecate upwind:, transport_upwind character (len=char_len), public :: & advection ! type of advection scheme used - ! 'upwind' => 1st order donor cell scheme +!deprecate upwind ! 'upwind' => 1st order donor cell scheme ! 'remap' => remapping scheme + ! 'none' => advection off (ktransport = -1 also turns it off) + + logical, parameter :: & ! if true, prescribe area flux across each edge + l_fixed_area = .false. ! NOTE: For remapping, hice and hsno are considered tracers. ! ntrace is not equal to ntrcr! integer (kind=int_kind) :: & ntrace ! number of tracers in use - + integer (kind=int_kind), dimension(:), allocatable, public :: & - tracer_type , & ! = 1, 2, or 3 (depends on 0, 1 or 2 other tracers) + tracer_type ,&! = 1, 2, or 3 (depends on 0, 1 or 2 other tracers) depend ! tracer dependencies (see below) logical (kind=log_kind), dimension (:), allocatable, public :: & has_dependents ! true if a tracer has dependent tracers - logical (kind=log_kind), public :: & - conserv_check ! if true, check conservation - integer (kind=int_kind), parameter :: & - integral_order = 3 ! polynomial order of quadrature integrals - ! linear=1, quadratic=2, cubic=3 + integral_order = 3 ! polynomial order of quadrature integrals + ! linear=1, quadratic=2, cubic=3 logical (kind=log_kind), parameter :: & - l_dp_midpt = .true. ! if true, find departure points using - ! corrected midpoint velocity - + l_dp_midpt = .true. ! if true, find departure points using + ! corrected midpoint velocity + !======================================================================= contains @@ -68,8 +69,9 @@ module ice_transport_driver !======================================================================= ! ! This subroutine is a wrapper for init_remap, which initializes the -! remapping transport scheme. If the model is run with upwind -! transport, no initializations are necessary. +! remapping transport scheme. +!deprecate upwind If the model is run with upwind +!deprecate upwind! transport, no initializations are necessary. ! ! authors William H. Lipscomb, LANL @@ -78,161 +80,137 @@ subroutine init_transport use ice_state, only: trcr_depend use ice_timers, only: ice_timer_start, ice_timer_stop, timer_advect use ice_transport_remap, only: init_remap - use ice_grid, only: grid_ice integer (kind=int_kind) :: & k, nt, nt1 ! tracer indices - integer (kind=int_kind) :: & - ntrcr , nt_Tsfc , nt_qice , nt_qsno , & - nt_sice , nt_fbri , nt_iage , nt_FY , & - nt_alvl , nt_vlvl , & - nt_apnd , nt_hpnd , nt_ipnd , nt_fsd , & - nt_smice , nt_smliq , nt_rhos , nt_rsnw , & - nt_isosno, nt_isoice, nt_bgc_Nit + integer (kind=int_kind) :: ntrcr, nt_Tsfc, nt_qice, nt_qsno, & + nt_sice, nt_fbri, nt_iage, nt_FY, nt_alvl, nt_vlvl, & + nt_apnd, nt_hpnd, nt_ipnd, nt_fsd, nt_isosno, nt_isoice, nt_bgc_Nit, nt_bgc_S character(len=*), parameter :: subname = '(init_transport)' - call ice_timer_start(timer_advect) ! advection + call ice_timer_start(timer_advect) ! advection call icepack_query_tracer_sizes(ntrcr_out=ntrcr) call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_qice_out=nt_qice, & - nt_qsno_out=nt_qsno, nt_sice_out=nt_sice, nt_fbri_out=nt_fbri, & - nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_fsd_out=nt_fsd, & - nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & - nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & - nt_smice_out=nt_smice, nt_smliq_out=nt_smliq, nt_rhos_out=nt_rhos, & - nt_rsnw_out=nt_rsnw, nt_bgc_Nit_out=nt_bgc_Nit, & - nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) + nt_qsno_out=nt_qsno, nt_sice_out=nt_sice, nt_fbri_out=nt_fbri, & + nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_alvl_out=nt_alvl, nt_fsd_out=nt_fsd, & + nt_vlvl_out=nt_vlvl, nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, & + nt_ipnd_out=nt_ipnd, nt_bgc_Nit_out=nt_bgc_Nit, nt_bgc_S_out=nt_bgc_S, & + nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) ntrace = 2 + ntrcr ! hice,hsno,trcr - if (allocated(tracer_type)) deallocate(tracer_type) - if (allocated(depend)) deallocate(depend) + if (allocated(tracer_type)) deallocate(tracer_type) + if (allocated(depend)) deallocate(depend) if (allocated(has_dependents)) deallocate(has_dependents) allocate (tracer_type (ntrace), & depend (ntrace), & has_dependents(ntrace)) - ! define tracer dependency arrays - ! see comments in remapping routine - - depend(1:2) = 0 ! hice, hsno - tracer_type(1:2) = 1 ! no dependency - - k = 2 - - do nt = 1, ntrcr - depend(k+nt) = trcr_depend(nt) ! 0 for ice area tracers - ! 1 for ice volume tracers - ! 2 for snow volume tracers - tracer_type(k+nt) = 2 ! depends on 1 other tracer - if (trcr_depend(nt) == 0) then - tracer_type(k+nt) = 1 ! depends on no other tracers - elseif (trcr_depend(nt) > 2) then - if (trcr_depend(trcr_depend(nt)-2) > 0) then - tracer_type(k+nt) = 3 ! depends on 2 other tracers - endif - endif - enddo - - has_dependents = .false. - do nt = 1, ntrace - if (depend(nt) > 0) then - nt1 = depend(nt) - has_dependents(nt1) = .true. - if (nt1 > nt) then - write(nu_diag,*) & - 'Tracer nt2 =',nt,' depends on tracer nt1 =',nt1 - call abort_ice(subname// & - 'ERROR: remap transport: Must have nt2 > nt1') - endif - endif - enddo ! ntrace - - ! diagnostic output - if (my_task == master_task) then - write (nu_diag, *) 'tracer index depend type has_dependents' - nt = 1 - write(nu_diag,1000) 'hi ',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - nt = 2 - write(nu_diag,1000) 'hs ',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - k=2 - do nt = k+1, k+ntrcr - if (nt-k==nt_Tsfc) & - write(nu_diag,1000) 'nt_Tsfc ',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - if (nt-k==nt_qice) & - write(nu_diag,1000) 'nt_qice ',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - if (nt-k==nt_qsno) & - write(nu_diag,1000) 'nt_qsno ',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - if (nt-k==nt_sice) & - write(nu_diag,1000) 'nt_sice ',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - if (nt-k==nt_fbri) & - write(nu_diag,1000) 'nt_fbri ',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - if (nt-k==nt_iage) & - write(nu_diag,1000) 'nt_iage ',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - if (nt-k==nt_FY) & - write(nu_diag,1000) 'nt_FY ',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - if (nt-k==nt_alvl) & - write(nu_diag,1000) 'nt_alvl ',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - if (nt-k==nt_vlvl) & - write(nu_diag,1000) 'nt_vlvl ',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - if (nt-k==nt_apnd) & - write(nu_diag,1000) 'nt_apnd ',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - if (nt-k==nt_hpnd) & - write(nu_diag,1000) 'nt_hpnd ',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - if (nt-k==nt_ipnd) & - write(nu_diag,1000) 'nt_ipnd ',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - if (nt-k==nt_smice) & - write(nu_diag,1000) 'nt_smice ',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - if (nt-k==nt_smliq) & - write(nu_diag,1000) 'nt_smliq ',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - if (nt-k==nt_rhos) & - write(nu_diag,1000) 'nt_rhos ',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - if (nt-k==nt_rsnw) & - write(nu_diag,1000) 'nt_rsnw ',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - if (nt-k==nt_fsd) & - write(nu_diag,1000) 'nt_fsd ',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - if (nt-k==nt_isosno) & - write(nu_diag,1000) 'nt_isosno ',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - if (nt-k==nt_isoice) & - write(nu_diag,1000) 'nt_isoice ',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - if (nt-k==nt_bgc_Nit) & - write(nu_diag,1000) 'nt_bgc_Nit ',nt,depend(nt),tracer_type(nt),& - has_dependents(nt) - enddo - write(nu_diag,*) ' ' - endif ! master_task - 1000 format (1x,a,2x,i6,2x,i6,2x,i4,4x,l4) - - if (trim(advection)=='remap') call init_remap ! grid quantities - - call ice_timer_stop(timer_advect) ! advection + ! define tracer dependency arrays + ! see comments in remapping routine + + depend(1:2) = 0 ! hice, hsno + tracer_type(1:2) = 1 ! no dependency + + k = 2 + + do nt = 1, ntrcr + depend(k+nt) = trcr_depend(nt) ! 0 for ice area tracers + ! 1 for ice volume tracers + ! 2 for snow volume tracers + tracer_type(k+nt) = 2 ! depends on 1 other tracer + if (trcr_depend(nt) == 0) then + tracer_type(k+nt) = 1 ! depends on no other tracers + elseif (trcr_depend(nt) > 2) then + if (trcr_depend(trcr_depend(nt)-2) > 0) then + tracer_type(k+nt) = 3 ! depends on 2 other tracers + endif + endif + enddo + + has_dependents = .false. + do nt = 1, ntrace + if (depend(nt) > 0) then + nt1 = depend(nt) + has_dependents(nt1) = .true. + if (nt1 > nt) then + write(nu_diag,*) & + 'Tracer nt2 =',nt,' depends on tracer nt1 =',nt1 + call abort_ice(subname// & + 'ERROR: remap transport: Must have nt2 > nt1') + endif + endif + enddo ! ntrace + + ! diagnostic output + if (my_task == master_task) then + write (nu_diag, *) 'tracer index depend type has_dependents' + nt = 1 + write(nu_diag,*) ' hi ',nt,depend(nt),tracer_type(nt),& + has_dependents(nt) + nt = 2 + write(nu_diag,*) ' hs ',nt,depend(nt),tracer_type(nt),& + has_dependents(nt) + k=2 + do nt = k+1, k+ntrcr + if (nt-k==nt_Tsfc) & + write(nu_diag,*) 'nt_Tsfc',nt,depend(nt),tracer_type(nt),& + has_dependents(nt) + if (nt-k==nt_qice) & + write(nu_diag,*) 'nt_qice',nt,depend(nt),tracer_type(nt),& + has_dependents(nt) + if (nt-k==nt_qsno) & + write(nu_diag,*) 'nt_qsno',nt,depend(nt),tracer_type(nt),& + has_dependents(nt) + if (nt-k==nt_sice) & + write(nu_diag,*) 'nt_sice',nt,depend(nt),tracer_type(nt),& + has_dependents(nt) + if (nt-k==nt_fbri) & + write(nu_diag,*) 'nt_fbri',nt,depend(nt),tracer_type(nt),& + has_dependents(nt) + if (nt-k==nt_iage) & + write(nu_diag,*) 'nt_iage',nt,depend(nt),tracer_type(nt),& + has_dependents(nt) + if (nt-k==nt_FY) & + write(nu_diag,*) 'nt_FY ', nt,depend(nt),tracer_type(nt),& + has_dependents(nt) + if (nt-k==nt_alvl) & + write(nu_diag,*) 'nt_alvl',nt,depend(nt),tracer_type(nt),& + has_dependents(nt) + if (nt-k==nt_vlvl) & + write(nu_diag,*) 'nt_vlvl',nt,depend(nt),tracer_type(nt),& + has_dependents(nt) + if (nt-k==nt_apnd) & + write(nu_diag,*) 'nt_apnd',nt,depend(nt),tracer_type(nt),& + has_dependents(nt) + if (nt-k==nt_hpnd) & + write(nu_diag,*) 'nt_hpnd',nt,depend(nt),tracer_type(nt),& + has_dependents(nt) + if (nt-k==nt_ipnd) & + write(nu_diag,*) 'nt_ipnd',nt,depend(nt),tracer_type(nt),& + has_dependents(nt) + if (nt-k==nt_fsd) & + write(nu_diag,*) 'nt_fsd ',nt,depend(nt),tracer_type(nt),& + has_dependents(nt) + if (nt-k==nt_bgc_Nit) & + write(nu_diag,*) 'nt_bgc_Nit',nt,depend(nt),tracer_type(nt),& + has_dependents(nt) + if (nt-k==nt_bgc_S) & + write(nu_diag,*) 'nt_bgc_S',nt,depend(nt),tracer_type(nt),& + has_dependents(nt) + enddo + endif ! master_task + + if (trim(advection)=='remap') call init_remap ! grid quantities + + call ice_timer_stop(timer_advect) ! advection end subroutine init_transport @@ -245,7 +223,7 @@ end subroutine init_transport ! ! This scheme preserves monotonicity of ice area and tracers. That is, ! it does not produce new extrema. It is second-order accurate in space, -! except where gradients are limited to preserve monotonicity. +! except where gradients are limited to preserve monotonicity. ! ! authors William H. Lipscomb, LANL @@ -258,8 +236,8 @@ subroutine transport_remap (dt) use ice_domain_size, only: ncat, max_blocks use ice_blocks, only: nx_block, ny_block, block, get_block, nghost use ice_state, only: aice0, aicen, vicen, vsnon, trcrn, & - uvel, vvel, bound_state, uvelE, vvelN - use ice_grid, only: tarea, grid_ice + uvel, vvel, bound_state + use ice_grid, only: tarea use ice_calendar, only: istep1 use ice_timers, only: ice_timer_start, ice_timer_stop, & timer_advect, timer_bound @@ -270,105 +248,105 @@ subroutine transport_remap (dt) ! local variables - integer (kind=int_kind) :: & - iblk , & ! block index - ilo,ihi,jlo,jhi, & ! beginning and end of physical domain - n , & ! ice category index - nt, nt1, nt2 ! tracer indices - - real (kind=dbl_kind), dimension (nx_block,ny_block,0:ncat,max_blocks) :: & - aim , & ! mean ice category areas in each grid cell + integer (kind=int_kind) :: & + iblk ,&! block index + ilo,ihi,jlo,jhi,&! beginning and end of physical domain + n ,&! ice category index + nt, nt1, nt2 ! tracer indices + + real (kind=dbl_kind), & + dimension (nx_block,ny_block,0:ncat,max_blocks) :: & + aim ,&! mean ice category areas in each grid cell aimask ! = 1. if ice is present, = 0. otherwise - real (kind=dbl_kind), dimension (nx_block,ny_block,ntrace,ncat,max_blocks) :: & - trm , & ! mean tracer values in each grid cell + real (kind=dbl_kind), & + dimension (nx_block,ny_block,ntrace,ncat,max_blocks) :: & + trm ,&! mean tracer values in each grid cell trmask ! = 1. if tracer is present, = 0. otherwise - logical (kind=log_kind) :: & - ckflag ! if true, abort the model + logical (kind=log_kind) :: & + l_stop ! if true, abort the model - integer (kind=int_kind) :: & - istop, jstop ! indices of grid cell where model aborts + integer (kind=int_kind) :: & + istop, jstop ! indices of grid cell where model aborts - integer (kind=int_kind), dimension(0:ncat,max_blocks) :: & + integer (kind=int_kind), dimension(0:ncat,max_blocks) :: & icellsnc ! number of cells with ice - integer (kind=int_kind), dimension(nx_block*ny_block,0:ncat,max_blocks) :: & - indxinc, indxjnc ! compressed i/j indices + integer (kind=int_kind), & + dimension(nx_block*ny_block,0:ncat,max_blocks) :: & + indxinc, indxjnc ! compressed i/j indices integer (kind=int_kind) :: & - ntrcr ! number of tracers + ntrcr type (block) :: & - this_block ! block information for current block - + this_block ! block information for current block + ! variables related to optional bug checks - logical (kind=log_kind), parameter :: & + logical (kind=log_kind), parameter :: & + l_conservation_check = .false. ,&! if true, check conservation l_monotonicity_check = .false. ! if true, check monotonicity - real (kind=dbl_kind), dimension(0:ncat) :: & - asum_init , & ! initial global ice area + real (kind=dbl_kind), dimension(0:ncat) :: & + asum_init ,&! initial global ice area asum_final ! final global ice area - real (kind=dbl_kind), dimension(ntrace,ncat) :: & - atsum_init , & ! initial global ice area*tracer + real (kind=dbl_kind), dimension(ntrace,ncat) :: & + atsum_init ,&! initial global ice area*tracer atsum_final ! final global ice area*tracer - real (kind=dbl_kind), dimension (:,:,:,:,:), allocatable :: & - tmin , & ! local min tracer - tmax ! local max tracer + real (kind=dbl_kind), dimension (:,:,:,:,:), allocatable :: & + tmin ,&! local min tracer + tmax ! local max tracer - integer (kind=int_kind) :: & - alloc_error + integer (kind=int_kind) :: alloc_error real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & work1 - character(len=char_len_long) :: & - fieldid - character(len=*), parameter :: subname = '(transport_remap)' - call ice_timer_start(timer_advect) ! advection + call ice_timer_start(timer_advect) ! advection call icepack_query_tracer_sizes(ntrcr_out=ntrcr) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - !------------------------------------------------------------------- - ! Prepare for remapping. - ! Initialize, update ghost cells, fill tracer arrays. - !------------------------------------------------------------------- +!---!------------------------------------------------------------------- +!---! Prepare for remapping. +!---! Initialize, update ghost cells, fill tracer arrays. +!---!------------------------------------------------------------------- - ckflag = .false. + l_stop = .false. istop = 0 jstop = 0 - !------------------------------------------------------------------- - ! Compute open water area in each grid cell. - ! Note: An aggregate_area call is needed only if the open - ! water area has changed since the previous call. - ! Here we assume that aice0 is up to date. - !------------------------------------------------------------------- + !------------------------------------------------------------------- + ! Compute open water area in each grid cell. + ! Note: An aggregate_area call is needed only if the open + ! water area has changed since the previous call. + ! Here we assume that aice0 is up to date. + !------------------------------------------------------------------- -! !$OMP PARALLEL DO PRIVATE(i,j,iblk) SCHEDULE(runtime) +! !$OMP PARALLEL DO PRIVATE(i,j,iblk) ! do iblk = 1, nblocks ! do j = 1, ny_block ! do i = 1, nx_block ! call aggregate_area (ncat, ! aicen(i,j,:,iblk), & ! aice (i,j, iblk), & -! aice0(i,j, iblk)) +! aice0(i,j, iblk)) ! enddo ! enddo ! enddo ! !$OMP END PARALLEL DO - !------------------------------------------------------------------- - ! Ghost cell updates for state variables. - ! Commented out because ghost cells are updated after cleanup_itd. - !------------------------------------------------------------------- + !------------------------------------------------------------------- + ! Ghost cell updates for state variables. + ! Commented out because ghost cells are updated after cleanup_itd. + !------------------------------------------------------------------- ! call ice_timer_start(timer_bound) ! call ice_HaloUpdate (aice0, halo_info, & @@ -380,11 +358,11 @@ subroutine transport_remap (dt) ! call ice_timer_stop(timer_bound) - !------------------------------------------------------------------- - ! Ghost cell updates for ice velocity. - ! Commented out because ghost cell velocities are computed - ! in ice_dyn_evp. - !------------------------------------------------------------------- + !------------------------------------------------------------------- + ! Ghost cell updates for ice velocity. + ! Commented out because ghost cell velocities are computed + ! in ice_dyn_evp. + !------------------------------------------------------------------- ! call ice_timer_start(timer_bound) ! call ice_HaloUpdate (uvel, halo_info, & @@ -394,32 +372,33 @@ subroutine transport_remap (dt) ! call ice_timer_stop(timer_bound) - !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) +! MHRI: CHECK THIS OMP ... maybe ok: Were trcrn(:,:,1:ntrcr,:,iblk) in my testcode + !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks - !------------------------------------------------------------------- - ! Fill arrays with fields to be remapped. - !------------------------------------------------------------------- + !------------------------------------------------------------------- + ! Fill arrays with fields to be remapped. + !------------------------------------------------------------------- - call state_to_tracers(nx_block, ny_block, & - ntrcr, ntrace, & - aice0(:,:, iblk), aicen(:,:,:, iblk), & - trcrn(:,:,:,:,iblk), & - vicen(:,:,:, iblk), vsnon(:,:,:, iblk), & - aim (:,:,:, iblk), trm (:,:,:,:,iblk)) + call state_to_tracers(nx_block, ny_block, & + ntrcr, ntrace, & + aice0(:,:, iblk), aicen(:,:,:,iblk), & + trcrn(:,:,:,:,iblk), & + vicen(:,:,:,iblk), vsnon(:,:, :,iblk), & + aim (:,:,:,iblk), trm (:,:,:,:,iblk)) enddo !$OMP END PARALLEL DO - !------------------------------------------------------------------- - ! Optional conservation and monotonicity checks. - !------------------------------------------------------------------- +!---!------------------------------------------------------------------- +!---! Optional conservation and monotonicity checks. +!---!------------------------------------------------------------------- - if (conserv_check) then + if (l_conservation_check) then - !------------------------------------------------------------------- - ! Compute initial values of globally conserved quantities. - !------------------------------------------------------------------- + !------------------------------------------------------------------- + ! Compute initial values of globally conserved quantities. + !------------------------------------------------------------------- do n = 0, ncat asum_init(n) = global_sum(aim(:,:,n,:), distrb_info, & @@ -453,8 +432,8 @@ subroutine transport_remap (dt) enddo ! nt enddo ! n - endif ! conserv_check - + endif ! l_conservation_check + if (l_monotonicity_check) then allocate(tmin(nx_block,ny_block,ntrace,ncat,max_blocks), & @@ -467,35 +446,35 @@ subroutine transport_remap (dt) tmin(:,:,:,:,:) = c0 tmax(:,:,:,:,:) = c0 - !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block,n) SCHEDULE(runtime) + !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block,n) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - !------------------------------------------------------------------- - ! Compute masks. - ! Masks are used to prevent tracer values in cells without ice - ! from being used in the monotonicity check. - !------------------------------------------------------------------- + !------------------------------------------------------------------- + ! Compute masks. + ! Masks are used to prevent tracer values in cells without ice + ! from being used in the monotonicity check. + !------------------------------------------------------------------- call make_masks (nx_block, ny_block, & ilo, ihi, jlo, jhi, & nghost, ntrace, & has_dependents, & - icellsnc (:,iblk), & - indxinc(:,:,iblk), indxjnc(:,:, iblk), & - aim(:,:,:, iblk), aimask(:,:,:, iblk), & + icellsnc(:,iblk), & + indxinc(:,:,iblk), indxjnc(:,:,iblk), & + aim(:,:,:,iblk), aimask(:,:,:,iblk), & trm(:,:,:,:,iblk), trmask(:,:,:,:,iblk)) - !------------------------------------------------------------------- - ! Compute local max and min of tracer fields. - !------------------------------------------------------------------- + !------------------------------------------------------------------- + ! Compute local max and min of tracer fields. + !------------------------------------------------------------------- do n = 1, ncat - call local_max_min & + call local_max_min & (nx_block, ny_block, & ilo, ihi, jlo, jhi, & trm (:,:,:,n,iblk), & @@ -512,18 +491,18 @@ subroutine transport_remap (dt) field_loc_center, field_type_scalar) call ice_timer_stop(timer_bound) - !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block,n) SCHEDULE(runtime) + !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block,n) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi do n = 1, ncat - call quasilocal_max_min (nx_block, ny_block, & - ilo, ihi, jlo, jhi, & - tmin(:,:,:,n,iblk), & + call quasilocal_max_min (nx_block, ny_block, & + ilo, ihi, jlo, jhi, & + tmin(:,:,:,n,iblk), & tmax(:,:,:,n,iblk)) enddo enddo @@ -531,47 +510,39 @@ subroutine transport_remap (dt) endif ! l_monotonicity_check - !------------------------------------------------------------------- - ! Main remapping routine: Step ice area and tracers forward in time. - !------------------------------------------------------------------- - - if (grid_ice == 'CD' .or. grid_ice == 'C') then - call horizontal_remap (dt, ntrace, & - uvel (:,:,:), vvel (:,:,:), & - aim (:,:,:,:), trm(:,:,:,:,:), & - tracer_type, depend, & - has_dependents, integral_order, & - l_dp_midpt, & - uvelE (:,:,:), vvelN (:,:,:)) - else - call horizontal_remap (dt, ntrace, & - uvel (:,:,:), vvel (:,:,:), & - aim (:,:,:,:), trm(:,:,:,:,:), & - tracer_type, depend, & - has_dependents, integral_order, & - l_dp_midpt) - endif - - !------------------------------------------------------------------- - ! Given new fields, recompute state variables. - !------------------------------------------------------------------- - - !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) + !------------------------------------------------------------------- + ! Main remapping routine: Step ice area and tracers forward in time. + !------------------------------------------------------------------- + + call horizontal_remap (dt, ntrace, & + uvel (:,:,:), vvel (:,:,:), & + aim (:,:,:,:), trm (:,:,:,:,:), & + l_fixed_area, & + tracer_type, depend, & + has_dependents, integral_order, & + l_dp_midpt) + + !------------------------------------------------------------------- + ! Given new fields, recompute state variables. + !------------------------------------------------------------------- + +! MHRI: CHECK THIS OMP ... maybe ok: Were trcrn(:,:,1:ntrcr,:,iblk) in my testcode + !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks - call tracers_to_state (nx_block, ny_block, & - ntrcr, ntrace, & - aim (:,:,:, iblk), trm (:,:,:,:,iblk), & - aice0(:,:, iblk), aicen(:,:,:, iblk), & - trcrn(:,:,:,:,iblk), & - vicen(:,:,:, iblk), vsnon(:,:, :,iblk)) + call tracers_to_state (nx_block, ny_block, & + ntrcr, ntrace, & + aim (:,:,:,iblk), trm (:,:,:,:,iblk), & + aice0(:,:, iblk), aicen(:,:,:,iblk), & + trcrn(:,:,:,:,iblk), & + vicen(:,:,:,iblk), vsnon(:,:, :,iblk)) enddo ! iblk !$OMP END PARALLEL DO - !------------------------------------------------------------------- - ! Ghost cell updates for state variables. - !------------------------------------------------------------------- + !------------------------------------------------------------------- + ! Ghost cell updates for state variables. + !------------------------------------------------------------------- call ice_timer_start(timer_bound) @@ -581,16 +552,16 @@ subroutine transport_remap (dt) call ice_timer_stop(timer_bound) - !------------------------------------------------------------------- - ! Optional conservation and monotonicity checks - !------------------------------------------------------------------- +!---!------------------------------------------------------------------- +!---! Optional conservation and monotonicity checks +!---!------------------------------------------------------------------- - !------------------------------------------------------------------- - ! Compute final values of globally conserved quantities. - ! Check global conservation of area and area*tracers. (Optional) - !------------------------------------------------------------------- + !------------------------------------------------------------------- + ! Compute final values of globally conserved quantities. + ! Check global conservation of area and area*tracers. (Optional) + !------------------------------------------------------------------- - if (conserv_check) then + if (l_conservation_check) then do n = 0, ncat asum_final(n) = global_sum(aim(:,:,n,:), distrb_info, & @@ -625,27 +596,25 @@ subroutine transport_remap (dt) enddo ! n if (my_task == master_task) then - fieldid = subname//':000' - call global_conservation (ckflag, fieldid, & + call global_conservation (l_stop, & asum_init(0), asum_final(0)) - if (ckflag) then - write (nu_diag,*) 'istep1, my_task =', & - istep1, my_task + if (l_stop) then + write (nu_diag,*) 'istep1, my_task, iblk =', & + istep1, my_task, iblk write (nu_diag,*) 'transport: conservation error, cat 0' call abort_ice(subname//'ERROR: conservation error1') endif - do n = 1, ncat - write(fieldid,'(a,i3.3)') subname,n + do n = 1, ncat call global_conservation & - (ckflag, fieldid, & + (l_stop, & asum_init(n), asum_final(n), & atsum_init(:,n), atsum_final(:,n)) - if (ckflag) then - write (nu_diag,*) 'istep1, my_task, cat =', & - istep1, my_task, n + if (l_stop) then + write (nu_diag,*) 'istep1, my_task, iblk, cat =', & + istep1, my_task, iblk, n write (nu_diag,*) 'transport: conservation error, cat ',n call abort_ice(subname//'ERROR: conservation error2') endif @@ -653,37 +622,38 @@ subroutine transport_remap (dt) endif ! my_task = master_task - endif ! conserv_check + endif ! l_conservation_check - !------------------------------------------------------------------- - ! Check tracer monotonicity. (Optional) - !------------------------------------------------------------------- + !------------------------------------------------------------------- + ! Check tracer monotonicity. (Optional) + !------------------------------------------------------------------- if (l_monotonicity_check) then - !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block,n,ckflag,istop,jstop) SCHEDULE(runtime) + !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block,n,l_stop,istop,jstop) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - ckflag = .false. + l_stop = .false. istop = 0 jstop = 0 do n = 1, ncat - call check_monotonicity (nx_block, ny_block, & - ilo, ihi, jlo, jhi, & - tmin(:,:,:,n,iblk), tmax(:,:,:,n,iblk), & - aim (:,:, n,iblk), trm (:,:,:,n,iblk), & - ckflag, & - istop, jstop) - - if (ckflag) then + call check_monotonicity & + (nx_block, ny_block, & + ilo, ihi, jlo, jhi, & + tmin(:,:,:,n,iblk), tmax(:,:,:,n,iblk), & + aim (:,:, n,iblk), trm (:,:,:,n,iblk), & + l_stop, & + istop, jstop) + + if (l_stop) then write (nu_diag,*) 'istep1, my_task, iblk, cat =', & istep1, my_task, iblk, n - call diagnostic_abort(istop,jstop,iblk,' monotonicity error') + call abort_ice(subname//'ERROR: monotonicity error') endif enddo ! n @@ -695,16 +665,17 @@ subroutine transport_remap (dt) endif ! l_monotonicity_check - call ice_timer_stop(timer_advect) ! advection - + call ice_timer_stop(timer_advect) ! advection + end subroutine transport_remap !======================================================================= -! +!deprecate upwind! ! Computes the transport equations for one timestep using upwind. Sets ! several fields into a work array and passes it to upwind routine. - subroutine transport_upwind (dt) +!deprecate upwind + subroutine transport_upwind_deprecated (dt) use ice_boundary, only: ice_HaloUpdate use ice_blocks, only: nx_block, ny_block, block, get_block, nx_block, ny_block @@ -712,37 +683,37 @@ subroutine transport_upwind (dt) use ice_domain_size, only: ncat, max_blocks use ice_state, only: aice0, aicen, vicen, vsnon, trcrn, & uvel, vvel, trcr_depend, bound_state, trcr_base, & - n_trcr_strata, nt_strata, uvelE, vvelN - use ice_flux, only: Tf - use ice_grid, only: HTE, HTN, tarea, tmask, grid_ice + n_trcr_strata, nt_strata + use ice_grid, only: HTE, HTN, tarea use ice_timers, only: ice_timer_start, ice_timer_stop, & timer_bound, timer_advect - real (kind=dbl_kind), intent(in) :: & + real (kind=dbl_kind), intent(in) :: & dt ! time step ! local variables - integer (kind=int_kind) :: & - ntrcr , & ! + integer (kind=int_kind) :: & + ntrcr, & ! narr ! max number of state variable arrays - integer (kind=int_kind) :: & - i, j, iblk , & ! horizontal indices + integer (kind=int_kind) :: & + i, j, iblk ,&! horizontal indices ilo,ihi,jlo,jhi ! beginning and end of physical domain - real (kind=dbl_kind), dimension (nx_block,ny_block,nblocks) :: & + real (kind=dbl_kind), dimension (nx_block,ny_block,nblocks) :: & uee, vnn ! cell edge velocities - real (kind=dbl_kind), dimension (:,:,:,:), allocatable :: & + real (kind=dbl_kind), & + dimension (:,:,:,:), allocatable :: & works ! work array type (block) :: & - this_block ! block information for current block + this_block ! block information for current block character(len=*), parameter :: subname = '(transport_upwind)' - call ice_timer_start(timer_advect) ! advection + call ice_timer_start(timer_advect) ! advection call icepack_query_tracer_sizes(ntrcr_out=ntrcr) call icepack_warnings_flush(nu_diag) @@ -753,105 +724,94 @@ subroutine transport_upwind (dt) allocate (works(nx_block,ny_block,narr,max_blocks)) - !------------------------------------------------------------------- - ! Get ghost cell values of state variables. - ! (Assume velocities are already known for ghost cells, also.) - !------------------------------------------------------------------- + !------------------------------------------------------------------- + ! Get ghost cell values of state variables. + ! (Assume velocities are already known for ghost cells, also.) + !------------------------------------------------------------------- ! call bound_state (aicen, & ! vicen, vsnon, & ! ntrcr, trcrn) -! call ice_timer_start(timer_bound) -! call ice_HaloUpdate (uvel, halo_info, & -! field_loc_NEcorner, field_type_vector) -! call ice_HaloUpdate (vvel, halo_info, & -! field_loc_NEcorner, field_type_vector) -! call ice_timer_stop(timer_bound) - - !------------------------------------------------------------------- - ! Average corner velocities to edges. - !------------------------------------------------------------------- - if (grid_ice == 'CD' .or. grid_ice == 'C') then - uee(:,:,:)=uvelE(:,:,:) - vnn(:,:,:)=vvelN(:,:,:) - else - !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) SCHEDULE(runtime) - do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - - do j = jlo, jhi - do i = ilo, ihi - uee(i,j,iblk) = p5*(uvel(i,j,iblk) + uvel(i ,j-1,iblk)) - vnn(i,j,iblk) = p5*(vvel(i,j,iblk) + vvel(i-1,j ,iblk)) - enddo - enddo - enddo - !$OMP END PARALLEL DO - - call ice_timer_start(timer_bound) - call ice_HaloUpdate (uee, halo_info, & - field_loc_Eface, field_type_vector) - call ice_HaloUpdate (vnn, halo_info, & - field_loc_Nface, field_type_vector) - call ice_timer_stop(timer_bound) - endif - - !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block) SCHEDULE(runtime) + !------------------------------------------------------------------- + ! Average corner velocities to edges. + !------------------------------------------------------------------- + + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - !----------------------------------------------------------------- - ! fill work arrays with fields to be advected - !----------------------------------------------------------------- - - call state_to_work (nx_block, ny_block, & - ntrcr, & - narr, trcr_depend, & - aicen (:,:, :,iblk), trcrn (:,:,:,:,iblk), & - vicen (:,:, :,iblk), vsnon (:,:, :,iblk), & - aice0 (:,:, iblk), works (:,:, :,iblk)) - - !----------------------------------------------------------------- - ! advect - !----------------------------------------------------------------- - - call upwind_field (nx_block, ny_block, & - ilo, ihi, jlo, jhi, & - dt, & - narr, works(:,:,:,iblk), & - uee (:,:,iblk), vnn (:,:,iblk), & - HTE (:,:,iblk), HTN (:,:,iblk), & - tarea(:,:,iblk)) - - !----------------------------------------------------------------- - ! convert work arrays back to state variables - !----------------------------------------------------------------- - - call work_to_state (nx_block, ny_block, & - ntrcr, narr, & - trcr_depend(:), trcr_base(:,:), & - n_trcr_strata(:), nt_strata(:,:), & - tmask(:,:, iblk), Tf (:,:,iblk), & - aicen(:,:, :,iblk), trcrn (:,:,:,:,iblk), & - vicen(:,:, :,iblk), vsnon (:,:, :,iblk), & - aice0(:,:, iblk), works (:,:, :,iblk)) - - enddo ! iblk + do j = jlo, jhi + do i = ilo, ihi + uee(i,j,iblk) = p5*(uvel(i,j,iblk) + uvel(i,j-1,iblk)) + vnn(i,j,iblk) = p5*(vvel(i,j,iblk) + vvel(i-1,j,iblk)) + enddo + enddo + enddo !$OMP END PARALLEL DO + call ice_timer_start(timer_bound) + call ice_HaloUpdate (uee, halo_info, & + field_loc_Eface, field_type_vector) + call ice_HaloUpdate (vnn, halo_info, & + field_loc_Nface, field_type_vector) + call ice_timer_stop(timer_bound) + +!deprecate upwind !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block) +!deprecate upwind do iblk = 1, nblocks +!deprecate upwind this_block = get_block(blocks_ice(iblk),iblk) +!deprecate upwind ilo = this_block%ilo +!deprecate upwind ihi = this_block%ihi +!deprecate upwind jlo = this_block%jlo +!deprecate upwind jhi = this_block%jhi + + !----------------------------------------------------------------- + ! fill work arrays with fields to be advected + !----------------------------------------------------------------- + +!deprecate upwind +!deprecate upwind call state_to_work (nx_block, ny_block, & +!deprecate upwind ntrcr, & +!deprecate upwind narr, trcr_depend, & +!deprecate upwind aicen (:,:, :,iblk), trcrn (:,:,:,:,iblk), & +!deprecate upwind vicen (:,:, :,iblk), vsnon (:,:, :,iblk), & +!deprecate upwind aice0 (:,:, iblk), works (:,:, :,iblk)) + + !----------------------------------------------------------------- + ! advect + !----------------------------------------------------------------- + +!deprecate upwind call upwind_field (nx_block, ny_block, & +!deprecate upwind ilo, ihi, jlo, jhi, & +!deprecate upwind dt, & +!deprecate upwind narr, works(:,:,:,iblk), & +!deprecate upwind uee(:,:,iblk), vnn (:,:,iblk), & +!deprecate upwind HTE(:,:,iblk), HTN (:,:,iblk), & +!deprecate upwind tarea(:,:,iblk)) + + !----------------------------------------------------------------- + ! convert work arrays back to state variables + !----------------------------------------------------------------- + +!deprecate upwind call work_to_state (nx_block, ny_block, & +!deprecate upwind ntrcr, narr, & +!deprecate upwind trcr_depend(:), trcr_base(:,:), & +!deprecate upwind n_trcr_strata(:), nt_strata(:,:), & +!deprecate upwind aicen(:,:, :,iblk), trcrn (:,:,:,:,iblk), & +!deprecate upwind vicen(:,:, :,iblk), vsnon (:,:, :,iblk), & +!deprecate upwind aice0(:,:, iblk), works (:,:, :,iblk)) + +!deprecate upwind enddo ! iblk +!deprecate upwind !$OMP END PARALLEL DO + deallocate (works) - !------------------------------------------------------------------- - ! Ghost cell updates for state variables. - !------------------------------------------------------------------- + !------------------------------------------------------------------- + ! Ghost cell updates for state variables. + !------------------------------------------------------------------- call ice_timer_start(timer_bound) @@ -861,9 +821,10 @@ subroutine transport_upwind (dt) call ice_timer_stop(timer_bound) - call ice_timer_stop(timer_advect) ! advection + call ice_timer_stop(timer_advect) ! advection - end subroutine transport_upwind + end subroutine transport_upwind_deprecated +!deprecate upwind !======================================================================= ! The next few subroutines (through check_monotonicity) are called @@ -871,7 +832,7 @@ end subroutine transport_upwind !======================================================================= ! ! Fill ice area and tracer arrays. -! Assume that the advected tracers are hicen, hsnon, trcrn, +! Assume that the advected tracers are hicen, hsnon, trcrn, ! qicen(1:nilyr), and qsnon(1:nslyr). ! This subroutine must be modified if a different set of tracers ! is to be transported. The rule for ordering tracers @@ -890,47 +851,47 @@ subroutine state_to_tracers (nx_block, ny_block, & use ice_domain_size, only: ncat, nslyr integer (kind=int_kind), intent(in) :: & - nx_block, ny_block, & ! block dimensions - ntrcr , & ! number of tracers in use - ntrace ! number of tracers in use incl. hi, hs + nx_block, ny_block, & ! block dimensions + ntrcr , & ! number of tracers in use + ntrace ! number of tracers in use incl. hi, hs real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & - aice0 ! fractional open water area + aice0 ! fractional open water area real (kind=dbl_kind), dimension (nx_block,ny_block,ncat), intent(in) :: & - aicen , & ! fractional ice area - vicen , & ! volume per unit area of ice (m) - vsnon ! volume per unit area of snow (m) + aicen ,&! fractional ice area + vicen ,&! volume per unit area of ice (m) + vsnon ! volume per unit area of snow (m) real (kind=dbl_kind), dimension (nx_block,ny_block,ntrcr,ncat), intent(in) :: & - trcrn ! ice area tracers + trcrn ! ice area tracers real (kind=dbl_kind), dimension (nx_block,ny_block,0:ncat), intent(out) :: & - aim ! mean ice area in each grid cell + aim ! mean ice area in each grid cell real (kind=dbl_kind), dimension (nx_block,ny_block,ntrace,ncat), intent(out) :: & - trm ! mean tracer values in each grid cell + trm ! mean tracer values in each grid cell ! local variables integer (kind=int_kind) :: & - nt_qsno , & ! - i, j, n , & ! standard indices - it, kt , & ! tracer indices - ij ! combined i/j index + nt_qsno ,&! + i, j, n ,&! standard indices + it, kt ,&! tracer indices + ij ! combined i/j index real (kind=dbl_kind) :: & - puny , & ! - rhos , & ! snow density (km/m^3) - Lfresh , & ! latent heat of melting fresh ice (J/kg) - w1 ! work variable + puny ,&! + rhos ,&! + Lfresh ,&! + w1 ! work variable integer (kind=int_kind), dimension(nx_block*ny_block,0:ncat) :: & - indxi , & ! compressed i/j indices - indxj + indxi ,&! compressed i/j indices + indxj integer (kind=int_kind), dimension(0:ncat) :: & - icells ! number of cells with ice + icells ! number of cells with ice character(len=*), parameter :: subname = '(state_to_tracers)' @@ -947,9 +908,9 @@ subroutine state_to_tracers (nx_block, ny_block, & trm(:,:,:,n) = c0 - !------------------------------------------------------------------- - ! Find grid cells where ice is present and fill area array. - !------------------------------------------------------------------- + !------------------------------------------------------------------- + ! Find grid cells where ice is present and fill area array. + !------------------------------------------------------------------- icells(n) = 0 do j = 1, ny_block @@ -963,13 +924,13 @@ subroutine state_to_tracers (nx_block, ny_block, & endif ! aim > puny enddo enddo - - !------------------------------------------------------------------- - ! Fill tracer array - ! Note: If aice > 0, then hice > 0, but we can have hsno = 0. - ! Alse note: We transport qice*nilyr rather than qice, so as to - ! avoid extra operations here and in tracers_to_state. - !------------------------------------------------------------------- + + !------------------------------------------------------------------- + ! Fill tracer array + ! Note: If aice > 0, then hice > 0, but we can have hsno = 0. + ! Alse note: We transport qice*nilyr rather than qice, so as to + ! avoid extra operations here and in tracers_to_state. + !------------------------------------------------------------------- do ij = 1, icells(n) i = indxi(ij,n) @@ -996,7 +957,7 @@ subroutine state_to_tracers (nx_block, ny_block, & endif enddo enddo ! ncat - + end subroutine state_to_tracers !======================================================================= @@ -1015,42 +976,42 @@ subroutine tracers_to_state (nx_block, ny_block, & use ice_domain_size, only: ncat, nslyr integer (kind=int_kind), intent(in) :: & - nx_block, ny_block, & ! block dimensions - ntrcr , & ! number of tracers in use - ntrace ! number of tracers in use incl. hi, hs + nx_block, ny_block, & ! block dimensions + ntrcr , & ! number of tracers in use + ntrace ! number of tracers in use incl. hi, hs real (kind=dbl_kind), dimension (nx_block,ny_block,0:ncat), intent(in) :: & - aim ! fractional ice area + aim ! fractional ice area real (kind=dbl_kind), dimension (nx_block,ny_block,ntrace,ncat), intent(in) :: & - trm ! mean tracer values in each grid cell + trm ! mean tracer values in each grid cell real (kind=dbl_kind), dimension (nx_block,ny_block), intent(inout) :: & - aice0 ! fractional ice area + aice0 ! fractional ice area real (kind=dbl_kind), dimension (nx_block,ny_block,ncat), intent(inout) :: & - aicen , & ! fractional ice area - vicen , & ! volume per unit area of ice (m) - vsnon ! volume per unit area of snow (m) + aicen ,&! fractional ice area + vicen ,&! volume per unit area of ice (m) + vsnon ! volume per unit area of snow (m) real (kind=dbl_kind), dimension (nx_block,ny_block,ntrcr,ncat), intent(inout) :: & - trcrn ! tracers + trcrn ! tracers ! local variables integer (kind=int_kind) :: & - nt_qsno , & ! - i, j, n , & ! standard indices - it, kt , & ! tracer indices - icells , & ! number of cells with ice - ij + nt_qsno ,&! + i, j, n ,&! standard indices + it, kt ,&! tracer indices + icells ,&! number of cells with ice + ij real (kind=dbl_kind) :: & - rhos , & ! - Lfresh ! + rhos, & + Lfresh integer (kind=int_kind), dimension (nx_block*ny_block) :: & - indxi, indxj ! compressed indices + indxi, indxj ! compressed indices character(len=*), parameter :: subname = '(tracers_to_state)' @@ -1064,20 +1025,20 @@ subroutine tracers_to_state (nx_block, ny_block, & do n = 1, ncat - icells = 0 - do j = 1, ny_block - do i = 1, nx_block - if (aim(i,j,n) > c0) then - icells = icells + 1 - indxi(icells) = i - indxj(icells) = j - endif - enddo - enddo + icells = 0 + do j = 1, ny_block + do i = 1, nx_block + if (aim(i,j,n) > c0) then + icells = icells + 1 + indxi(icells) = i + indxj(icells) = j + endif + enddo + enddo - !------------------------------------------------------------------- - ! Compute state variables. - !------------------------------------------------------------------- + !------------------------------------------------------------------- + ! Compute state variables. + !------------------------------------------------------------------- do ij = 1, icells i = indxi(ij) @@ -1095,7 +1056,7 @@ subroutine tracers_to_state (nx_block, ny_block, & j = indxj(ij) trcrn(i,j,it,n) = trm(i,j,kt+it,n) - rhos*Lfresh ! snow enthalpy enddo - else + else do ij = 1, icells i = indxi(ij) j = indxj(ij) @@ -1114,32 +1075,29 @@ end subroutine tracers_to_state ! ! author William H. Lipscomb, LANL - subroutine global_conservation (ckflag, fieldid, & + subroutine global_conservation (l_stop, & asum_init, asum_final, & atsum_init, atsum_final) - character(len=*), intent(in) :: & - fieldid ! field information string - real (kind=dbl_kind), intent(in) :: & - asum_init , & ! initial global ice area + asum_init ,&! initial global ice area asum_final ! final global ice area real (kind=dbl_kind), dimension(ntrace), intent(in), optional :: & - atsum_init, & ! initial global ice area*tracer + atsum_init ,&! initial global ice area*tracer atsum_final ! final global ice area*tracer logical (kind=log_kind), intent(inout) :: & - ckflag ! if true, abort on return + l_stop ! if true, abort on return ! local variables integer (kind=int_kind) :: & - nt ! tracer index + nt ! tracer index real (kind=dbl_kind) :: & - puny , & ! - diff ! difference between initial and final values + puny ,&! + diff ! difference between initial and final values character(len=*), parameter :: subname = '(global_conservation)' @@ -1151,32 +1109,35 @@ subroutine global_conservation (ckflag, fieldid, & if (asum_init > puny) then diff = asum_final - asum_init if (abs(diff/asum_init) > puny) then - ckflag = .true. + l_stop = .true. write (nu_diag,*) - write (nu_diag,*) subname,'Ice area conserv error ', trim(fieldid) - write (nu_diag,*) subname,' Initial global area =', asum_init - write (nu_diag,*) subname,' Final global area =', asum_final - write (nu_diag,*) subname,' Fractional error =', abs(diff)/asum_init - write (nu_diag,*) subname,' asum_final-asum_init =', diff + write (nu_diag,*) 'Ice area conserv error' + write (nu_diag,*) 'Initial global area =', asum_init + write (nu_diag,*) 'Final global area =', asum_final + write (nu_diag,*) 'Fractional error =', abs(diff)/asum_init + write (nu_diag,*) 'asum_final-asum_init =', diff endif endif if (present(atsum_init)) then - do nt = 1, ntrace - if (abs(atsum_init(nt)) > puny) then - diff = atsum_final(nt) - atsum_init(nt) - if (abs(diff/atsum_init(nt)) > puny) then - ckflag = .true. - write (nu_diag,*) - write (nu_diag,*) subname,'Ice area*tracer conserv error ', trim(fieldid),nt - write (nu_diag,*) subname,' Tracer index =', nt - write (nu_diag,*) subname,' Initial global area*tracer =', atsum_init(nt) - write (nu_diag,*) subname,' Final global area*tracer =', atsum_final(nt) - write (nu_diag,*) subname,' Fractional error =', abs(diff)/atsum_init(nt) - write (nu_diag,*) subname,' atsum_final-atsum_init =', diff - endif + do nt = 1, ntrace + if (abs(atsum_init(nt)) > puny) then + diff = atsum_final(nt) - atsum_init(nt) + if (abs(diff/atsum_init(nt)) > puny) then + l_stop = .true. + write (nu_diag,*) + write (nu_diag,*) 'area*tracer conserv error' + write (nu_diag,*) 'tracer index =', nt + write (nu_diag,*) 'Initial global area*tracer =', & + atsum_init(nt) + write (nu_diag,*) 'Final global area*tracer =', & + atsum_final(nt) + write (nu_diag,*) 'Fractional error =', & + abs(diff)/atsum_init(nt) + write (nu_diag,*) 'atsum_final-atsum_init =', diff endif - enddo + endif + enddo endif ! present(atsum_init) end subroutine global_conservation @@ -1186,7 +1147,7 @@ end subroutine global_conservation ! At each grid point, compute the local max and min of a scalar ! field phi: i.e., the max and min values in the nine-cell region ! consisting of the home cell and its eight neighbors. -! +! ! To extend to the neighbors of the neighbors (25 cells in all), ! follow this call with a call to quasilocal_max_min. ! @@ -1199,33 +1160,33 @@ subroutine local_max_min (nx_block, ny_block, & aimask, trmask) integer (kind=int_kind), intent(in) :: & - nx_block, ny_block, & ! block dimensions - ilo,ihi,jlo,jhi ! beginning and end of physical domain + nx_block, ny_block,&! block dimensions + ilo,ihi,jlo,jhi ! beginning and end of physical domain real (kind=dbl_kind), intent(in), dimension(nx_block,ny_block) :: & - aimask ! ice area mask + aimask ! ice area mask real (kind=dbl_kind), intent(in), dimension (nx_block,ny_block,ntrace) :: & - trm , & ! tracer fields - trmask ! tracer mask + trm ,&! tracer fields + trmask ! tracer mask real (kind=dbl_kind), intent(out), dimension (nx_block,ny_block,ntrace) :: & - tmin , & ! local min tracer - tmax ! local max tracer + tmin ,&! local min tracer + tmax ! local max tracer ! local variables integer (kind=int_kind) :: & - i, j , & ! horizontal indices - nt, nt1 ! tracer indices + i, j ,&! horizontal indices + nt, nt1 ! tracer indices real (kind=dbl_kind), dimension(nx_block,ny_block) :: & - phimask ! aimask or trmask, as appropriate + phimask ! aimask or trmask, as appropriate real (kind=dbl_kind) :: & - phi_nw, phi_n, phi_ne , & ! field values in 8 neighbor cells - phi_w , phi_e , & - phi_sw, phi_s, phi_se + phi_nw, phi_n, phi_ne ,&! field values in 8 neighbor cells + phi_w, phi_e ,& + phi_sw, phi_s, phi_se character(len=*), parameter :: subname = '(local_max_min)' @@ -1250,46 +1211,46 @@ subroutine local_max_min (nx_block, ny_block, & endif - !----------------------------------------------------------------------- - ! Store values of trm in the 8 neighbor cells. - ! If aimask = 1, use the true value; otherwise use the home cell value - ! so that non-physical values of phi do not contribute to the gradient. - !----------------------------------------------------------------------- +!----------------------------------------------------------------------- +! Store values of trm in the 8 neighbor cells. +! If aimask = 1, use the true value; otherwise use the home cell value +! so that non-physical values of phi do not contribute to the gradient. +!----------------------------------------------------------------------- do j = jlo, jhi - do i = ilo, ihi + do i = ilo, ihi + + phi_nw = phimask(i-1,j+1) * trm(i-1,j+1,nt) & + + (c1-phimask(i-1,j+1))* trm(i, j, nt) + phi_n = phimask(i, j+1) * trm(i, j+1,nt) & + + (c1-phimask(i, j+1))* trm(i, j, nt) + phi_ne = phimask(i+1,j+1) * trm(i+1,j+1,nt) & + + (c1-phimask(i+1,j+1))* trm(i, j, nt) + phi_w = phimask(i-1,j) * trm(i-1,j, nt) & + + (c1-phimask(i-1,j)) * trm(i, j, nt) + phi_e = phimask(i+1,j) * trm(i+1,j, nt) & + + (c1-phimask(i+1,j)) * trm(i, j, nt) + phi_sw = phimask(i-1,j-1) * trm(i-1,j-1,nt) & + + (c1-phimask(i-1,j-1))* trm(i, j, nt) + phi_s = phimask(i, j-1) * trm(i, j-1,nt) & + + (c1-phimask(i, j-1))* trm(i, j, nt) + phi_se = phimask(i+1,j-1) * trm(i+1,j-1,nt) & + + (c1-phimask(i+1,j-1))* trm(i, j, nt) + +!----------------------------------------------------------------------- +! Compute the minimum and maximum among the nine local cells. +!----------------------------------------------------------------------- + + tmax(i,j,nt) = max (phi_nw, phi_n, phi_ne, phi_w, & + trm(i,j,nt), phi_e, phi_sw, phi_s, phi_se) + + tmin(i,j,nt) = min (phi_nw, phi_n, phi_ne, phi_w, & + trm(i,j,nt), phi_e, phi_sw, phi_s, phi_se) + + enddo ! i + enddo ! j - phi_nw = phimask(i-1,j+1) * trm(i-1,j+1,nt) & - + (c1-phimask(i-1,j+1))* trm(i, j, nt) - phi_n = phimask(i, j+1) * trm(i, j+1,nt) & - + (c1-phimask(i, j+1))* trm(i, j, nt) - phi_ne = phimask(i+1,j+1) * trm(i+1,j+1,nt) & - + (c1-phimask(i+1,j+1))* trm(i, j, nt) - phi_w = phimask(i-1,j) * trm(i-1,j, nt) & - + (c1-phimask(i-1,j)) * trm(i, j, nt) - phi_e = phimask(i+1,j) * trm(i+1,j, nt) & - + (c1-phimask(i+1,j)) * trm(i, j, nt) - phi_sw = phimask(i-1,j-1) * trm(i-1,j-1,nt) & - + (c1-phimask(i-1,j-1))* trm(i, j, nt) - phi_s = phimask(i, j-1) * trm(i, j-1,nt) & - + (c1-phimask(i, j-1))* trm(i, j, nt) - phi_se = phimask(i+1,j-1) * trm(i+1,j-1,nt) & - + (c1-phimask(i+1,j-1))* trm(i, j, nt) - - !----------------------------------------------------------------------- - ! Compute the minimum and maximum among the nine local cells. - !----------------------------------------------------------------------- - - tmax(i,j,nt) = max (phi_nw, phi_n, phi_ne, phi_w, & - trm(i,j,nt), phi_e, phi_sw, phi_s, phi_se) - - tmin(i,j,nt) = min (phi_nw, phi_n, phi_ne, phi_w, & - trm(i,j,nt), phi_e, phi_sw, phi_s, phi_se) - - enddo ! i - enddo ! j - - enddo ! nt + enddo ! nt end subroutine local_max_min @@ -1306,18 +1267,18 @@ subroutine quasilocal_max_min (nx_block, ny_block, & tmin, tmax) integer (kind=int_kind), intent(in) :: & - nx_block, ny_block, & ! block dimensions - ilo,ihi,jlo,jhi ! beginning and end of physical domain + nx_block, ny_block,&! block dimensions + ilo,ihi,jlo,jhi ! beginning and end of physical domain real (kind=dbl_kind), intent(inout), dimension (nx_block,ny_block,ntrace) :: & - tmin , & ! local min tracer - tmax ! local max tracer + tmin ,&! local min tracer + tmax ! local max tracer ! local variables integer (kind=int_kind) :: & - i, j , & ! horizontal indices - nt ! tracer index + i, j ,&! horizontal indices + nt ! tracer index character(len=*), parameter :: subname = '(quasilocal_max_min)' @@ -1354,41 +1315,41 @@ subroutine check_monotonicity (nx_block, ny_block, & ilo, ihi, jlo, jhi, & tmin, tmax, & aim, trm, & - ckflag, & + l_stop, & istop, jstop) integer (kind=int_kind), intent(in) :: & - nx_block, ny_block, & ! block dimensions - ilo,ihi,jlo,jhi ! beginning and end of physical domain + nx_block, ny_block,&! block dimensions + ilo,ihi,jlo,jhi ! beginning and end of physical domain real (kind=dbl_kind), intent(in), dimension (nx_block,ny_block) :: & - aim ! new ice area + aim ! new ice area real (kind=dbl_kind), intent(in), dimension (nx_block,ny_block,ntrace) :: & - trm ! new tracers + trm ! new tracers real (kind=dbl_kind), intent(in), dimension (nx_block,ny_block,ntrace) :: & - tmin , & ! local min tracer - tmax ! local max tracer + tmin ,&! local min tracer + tmax ! local max tracer logical (kind=log_kind), intent(inout) :: & - ckflag ! if true, abort on return + l_stop ! if true, abort on return integer (kind=int_kind), intent(inout) :: & - istop, jstop ! indices of grid cell where model aborts + istop, jstop ! indices of grid cell where model aborts ! local variables integer (kind=int_kind) :: & - i, j , & ! horizontal indices - nt, nt1, nt2 ! tracer indices + i, j ,&! horizontal indices + nt, nt1, nt2 ! tracer indices real (kind=dbl_kind) :: & - puny , & ! - w1, w2 ! work variables + puny ,&! + w1, w2 ! work variables logical (kind=log_kind), dimension (nx_block, ny_block) :: & - l_check ! if true, check monotonicity + l_check ! if true, check monotonicity character(len=*), parameter :: subname = '(check_monotonicity)' @@ -1399,15 +1360,15 @@ subroutine check_monotonicity (nx_block, ny_block, & do nt = 1, ntrace - !------------------------------------------------------------------- - ! Load logical array to identify tracers that need checking. - !------------------------------------------------------------------- + !------------------------------------------------------------------- + ! Load logical array to identify tracers that need checking. + !------------------------------------------------------------------- if (tracer_type(nt)==1) then ! does not depend on another tracer do j = jlo, jhi do i = ilo, ihi - if (aim(i,j) > puny) then + if (aim(i,j) > puny) then l_check(i,j) = .true. else l_check(i,j) = .false. @@ -1444,9 +1405,9 @@ subroutine check_monotonicity (nx_block, ny_block, & enddo endif - !------------------------------------------------------------------- - ! Make sure new values lie between tmin and tmax - !------------------------------------------------------------------- + !------------------------------------------------------------------- + ! Make sure new values lie between tmin and tmax + !------------------------------------------------------------------- do j = jlo, jhi do i = ilo, ihi @@ -1456,7 +1417,7 @@ subroutine check_monotonicity (nx_block, ny_block, & w1 = max(c1, abs(tmin(i,j,nt))) w2 = max(c1, abs(tmax(i,j,nt))) if (trm(i,j,nt) < tmin(i,j,nt)-w1*puny) then - ckflag = .true. + l_stop = .true. istop = i jstop = j write (nu_diag,*) ' ' @@ -1466,7 +1427,7 @@ subroutine check_monotonicity (nx_block, ny_block, & write (nu_diag,*) 'tmin =' , tmin(i,j,nt) write (nu_diag,*) 'ice area =' , aim(i,j) elseif (trm(i,j,nt) > tmax(i,j,nt)+w2*puny) then - ckflag = .true. + l_stop = .true. istop = i jstop = j write (nu_diag,*) ' ' @@ -1486,12 +1447,12 @@ subroutine check_monotonicity (nx_block, ny_block, & end subroutine check_monotonicity !======================================================================= -! The remaining subroutines are called by transport_upwind. +!deprecate upwind! The remaining subroutines are called by transport_upwind. !======================================================================= ! ! Fill work array with state variables in preparation for upwind transport - - subroutine state_to_work (nx_block, ny_block, & +!deprecate upwind + subroutine state_to_work_deprecated (nx_block, ny_block, & ntrcr, & narr, trcr_depend, & aicen, trcrn, & @@ -1503,24 +1464,24 @@ subroutine state_to_work (nx_block, ny_block, & integer (kind=int_kind), intent(in) :: & nx_block, ny_block, & ! block dimensions ntrcr , & ! number of tracers in use - narr ! number of 2D state variable arrays in works array + narr ! number of 2D state variable arrays in works array integer (kind=int_kind), dimension (ntrcr), intent(in) :: & trcr_depend ! = 0 for aicen tracers, 1 for vicen, 2 for vsnon real (kind=dbl_kind), dimension (nx_block,ny_block,ncat), intent(in) :: & - aicen , & ! concentration of ice - vicen , & ! volume per unit area of ice (m) - vsnon ! volume per unit area of snow (m) + aicen ,&! concentration of ice + vicen ,&! volume per unit area of ice (m) + vsnon ! volume per unit area of snow (m) real (kind=dbl_kind), dimension (nx_block,ny_block,ntrcr,ncat), intent(in) :: & - trcrn ! ice tracers + trcrn ! ice tracers real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & - aice0 ! concentration of open water + aice0 ! concentration of open water real (kind=dbl_kind), dimension(nx_block,ny_block,narr), intent (out) :: & - works ! work array + works ! work array ! local variables @@ -1528,16 +1489,16 @@ subroutine state_to_work (nx_block, ny_block, & nt_alvl, nt_apnd, nt_fbri logical (kind=log_kind) :: & - tr_pond_lvl, tr_pond_topo + tr_pond_cesm, tr_pond_lvl, tr_pond_topo integer (kind=int_kind) :: & - i, j, n, it, & ! counting indices - narrays ! counter for number of state variable arrays + i, j, n, it ,&! counting indices + narrays ! counter for number of state variable arrays character(len=*), parameter :: subname = '(state_to_work)' - call icepack_query_tracer_flags(tr_pond_lvl_out=tr_pond_lvl, & - tr_pond_topo_out=tr_pond_topo) + call icepack_query_tracer_flags(tr_pond_cesm_out=tr_pond_cesm, & + tr_pond_lvl_out=tr_pond_lvl, tr_pond_topo_out=tr_pond_topo) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_apnd_out=nt_apnd, & nt_fbri_out=nt_fbri) call icepack_warnings_flush(nu_diag) @@ -1591,36 +1552,36 @@ subroutine state_to_work (nx_block, ny_block, & elseif (trcr_depend(it) == 2+nt_alvl) then do j = 1, ny_block do i = 1, nx_block - works(i,j,narrays+it) = aicen(i,j ,n) & + works(i,j,narrays+it) = aicen(i,j,n) & * trcrn(i,j,nt_alvl,n) & - * trcrn(i,j,it ,n) + * trcrn(i,j,it,n) enddo enddo elseif (trcr_depend(it) == 2+nt_apnd .and. & - tr_pond_topo) then + tr_pond_cesm .or. tr_pond_topo) then do j = 1, ny_block do i = 1, nx_block - works(i,j,narrays+it) = aicen(i,j ,n) & + works(i,j,narrays+it) = aicen(i,j,n) & * trcrn(i,j,nt_apnd,n) & - * trcrn(i,j,it ,n) + * trcrn(i,j,it,n) enddo enddo elseif (trcr_depend(it) == 2+nt_apnd .and. & tr_pond_lvl) then do j = 1, ny_block do i = 1, nx_block - works(i,j,narrays+it) = aicen(i,j ,n) & + works(i,j,narrays+it) = aicen(i,j,n) & * trcrn(i,j,nt_alvl,n) & * trcrn(i,j,nt_apnd,n) & - * trcrn(i,j,it ,n) + * trcrn(i,j,it,n) enddo enddo elseif (trcr_depend(it) == 2+nt_fbri) then do j = 1, ny_block do i = 1, nx_block - works(i,j,narrays+it) = vicen(i,j ,n) & + works(i,j,narrays+it) = vicen(i,j,n) & * trcrn(i,j,nt_fbri,n) & - * trcrn(i,j,it ,n) + * trcrn(i,j,it,n) enddo enddo endif @@ -1632,29 +1593,28 @@ subroutine state_to_work (nx_block, ny_block, & if (narr /= narrays) write(nu_diag,*) & "Wrong number of arrays in transport bound call" - end subroutine state_to_work + end subroutine state_to_work_deprecated !======================================================================= ! ! Convert work array back to state variables - - subroutine work_to_state (nx_block, ny_block, & - ntrcr, narr, & - trcr_depend, & - trcr_base, & - n_trcr_strata, & - nt_strata, & - tmask, Tf, & - aicen, trcrn, & - vicen, vsnon, & - aice0, works) +!deprecate upwind + subroutine work_to_state_deprecated (nx_block, ny_block, & + ntrcr, narr, & + trcr_depend, & + trcr_base, & + n_trcr_strata, & + nt_strata, & + aicen, trcrn, & + vicen, vsnon, & + aice0, works) use ice_domain_size, only: ncat - integer (kind=int_kind), intent (in) :: & + integer (kind=int_kind), intent (in) :: & nx_block, ny_block, & ! block dimensions ntrcr , & ! number of tracers in use - narr ! number of 2D state variable arrays in works array + narr ! number of 2D state variable arrays in works array integer (kind=int_kind), dimension (ntrcr), intent(in) :: & trcr_depend, & ! = 0 for aicen tracers, 1 for vicen, 2 for vsnon @@ -1667,45 +1627,35 @@ subroutine work_to_state (nx_block, ny_block, & integer (kind=int_kind), dimension (ntrcr,2), intent(in) :: & nt_strata ! indices of underlying tracer layers - logical (kind=log_kind), intent (in) :: & - tmask (nx_block,ny_block) - - real (kind=dbl_kind), intent (in) :: & - Tf (nx_block,ny_block), & + real (kind=dbl_kind), intent (in) :: & works (nx_block,ny_block,narr) real (kind=dbl_kind), dimension (nx_block,ny_block,ncat), intent(out) :: & - aicen , & ! concentration of ice - vicen , & ! volume per unit area of ice (m) - vsnon ! volume per unit area of snow (m) + aicen ,&! concentration of ice + vicen ,&! volume per unit area of ice (m) + vsnon ! volume per unit area of snow (m) real (kind=dbl_kind), dimension (nx_block,ny_block,ntrcr,ncat),intent(out) :: & - trcrn ! ice tracers + trcrn ! ice tracers real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out) :: & - aice0 ! concentration of open water + aice0 ! concentration of open water ! local variables - integer (kind=int_kind) :: & - i, j, ij, n, & ! counting indices - narrays , & ! counter for number of state variable arrays - nt_Tsfc , & ! Tsfc tracer number - icells ! number of ocean/ice cells + integer (kind=int_kind) :: & + i, j, ij, n ,&! counting indices + narrays ,&! counter for number of state variable arrays + icells ! number of ocean/ice cells - integer (kind=int_kind), dimension (nx_block*ny_block) :: & + integer (kind=int_kind), dimension (nx_block*ny_block) :: & indxi, indxj - real (kind=dbl_kind), dimension (nx_block*ny_block,narr) :: & - work + real (kind=dbl_kind), dimension (nx_block*ny_block,narr) :: & + work character(len=*), parameter :: subname = '(work_to_state)' - call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - ! for call to compute_tracers icells = 0 do j = 1, ny_block @@ -1739,25 +1689,16 @@ subroutine work_to_state (nx_block, ny_block, & i = indxi(ij) j = indxj(ij) - call icepack_compute_tracers(ntrcr = ntrcr, & - trcr_depend = trcr_depend(:), & - atrcrn = work (ij,narrays+1:narrays+ntrcr), & - aicen = aicen(i,j,n), & - vicen = vicen(i,j,n), & - vsnon = vsnon(i,j,n), & + call icepack_compute_tracers(ntrcr=ntrcr, trcr_depend=trcr_depend(:), & + atrcrn = work (ij,narrays+1:narrays+ntrcr), & + aicen = aicen(i,j,n), & + vicen = vicen(i,j,n), & + vsnon = vsnon(i,j,n), & trcr_base = trcr_base(:,:), & n_trcr_strata = n_trcr_strata(:), & nt_strata = nt_strata(:,:), & - trcrn = trcrn(i,j,:,n), & - Tf = Tf(i,j)) - - ! tcraig, don't let land points get non-zero Tsfc - if (.not.tmask(i,j)) then - trcrn(i,j,nt_Tsfc,n) = c0 - endif - + trcrn = trcrn(i,j,:,n)) enddo - narrays = narrays + ntrcr enddo ! ncat @@ -1766,13 +1707,13 @@ subroutine work_to_state (nx_block, ny_block, & if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - end subroutine work_to_state + end subroutine work_to_state_deprecated !======================================================================= ! ! upwind transport algorithm - - subroutine upwind_field (nx_block, ny_block, & +!deprecate upwind + subroutine upwind_field_deprecated (nx_block, ny_block, & ilo, ihi, jlo, jhi, & dt, & narrays, phi, & @@ -1781,73 +1722,74 @@ subroutine upwind_field (nx_block, ny_block, & tarea) integer (kind=int_kind), intent (in) :: & - nx_block, ny_block, & ! block dimensions - ilo,ihi,jlo,jhi , & ! beginning and end of physical domain - narrays ! number of 2D arrays to be transported + nx_block, ny_block ,&! block dimensions + ilo,ihi,jlo,jhi ,&! beginning and end of physical domain + narrays ! number of 2D arrays to be transported real (kind=dbl_kind), intent(in) :: & - dt ! time step + dt ! time step real (kind=dbl_kind), dimension(nx_block,ny_block,narrays), intent(inout) :: & - phi ! scalar field + phi ! scalar field real (kind=dbl_kind), dimension(nx_block,ny_block), intent(in) :: & - uee, vnn ! cell edge velocities + uee, vnn ! cell edge velocities real (kind=dbl_kind), dimension(nx_block,ny_block), intent(in) :: & - HTE , & ! length of east cell edge - HTN , & ! length of north cell edge - tarea ! grid cell area + HTE ,&! length of east cell edge + HTN ,&! length of north cell edge + tarea ! grid cell area ! local variables integer (kind=int_kind) :: & - i, j, n ! standard indices + i, j, n ! standard indices real (kind=dbl_kind), dimension (nx_block,ny_block) :: & worka, workb character(len=*), parameter :: subname = '(upwind_field)' - !------------------------------------------------------------------- - ! upwind transport - !------------------------------------------------------------------- + !------------------------------------------------------------------- + ! upwind transport + !------------------------------------------------------------------- do n = 1, narrays - do j = 1, jhi - do i = 1, ihi - worka(i,j)= & - upwind(phi(i,j,n),phi(i+1,j ,n),uee(i,j),HTE(i,j),dt) - workb(i,j)= & - upwind(phi(i,j,n),phi(i ,j+1,n),vnn(i,j),HTN(i,j),dt) - enddo - enddo - - do j = jlo, jhi - do i = ilo, ihi - phi(i,j,n) = phi(i,j,n) - ( worka(i,j)-worka(i-1,j ) & - + workb(i,j)-workb(i ,j-1) ) & - / tarea(i,j) - enddo - enddo +!deprecate upwind do j = 1, jhi +!deprecate upwind do i = 1, ihi +!deprecate upwind worka(i,j)= & +!deprecate upwind upwind(phi(i,j,n),phi(i+1,j,n),uee(i,j),HTE(i,j),dt) +!deprecate upwind workb(i,j)= & +!deprecate upwind upwind(phi(i,j,n),phi(i,j+1,n),vnn(i,j),HTN(i,j),dt) +!deprecate upwind enddo +!deprecate upwind enddo + +!deprecate upwind do j = jlo, jhi +!deprecate upwind do i = ilo, ihi +!deprecate upwind phi(i,j,n) = phi(i,j,n) - ( worka(i,j)-worka(i-1,j) & +!deprecate upwind + workb(i,j)-workb(i,j-1) ) & +!deprecate upwind / tarea(i,j) +!deprecate upwind enddo +!deprecate upwind enddo enddo ! narrays - end subroutine upwind_field + end subroutine upwind_field_deprecated !======================================================================= -! -! Define upwind function -! - real(kind=dbl_kind) function upwind(y1,y2,a,h,dt) + !------------------------------------------------------------------- + ! Define upwind function + !------------------------------------------------------------------- + +!deprecate upwind real(kind=dbl_kind) function upwind(y1,y2,a,h,dt) - real(kind=dbl_kind), intent(in) :: y1,y2,a,h,dt +!deprecate upwind real(kind=dbl_kind), intent(in) :: y1,y2,a,h,dt - upwind = p5*dt*h*((a+abs(a))*y1+(a-abs(a))*y2) +!deprecate upwind upwind = p5*dt*h*((a+abs(a))*y1+(a-abs(a))*y2) - end function upwind +!deprecate upwind end function upwind !======================================================================= diff --git a/cicecore/cicedyn/general/ice_flux.F90 b/cicecore/cicedyn/general/ice_flux.F90 index 4d19bb8b2..0e9ec8a6a 100644 --- a/cicecore/cicedyn/general/ice_flux.F90 +++ b/cicecore/cicedyn/general/ice_flux.F90 @@ -33,41 +33,29 @@ module ice_flux !----------------------------------------------------------------- ! Dynamics component - ! All variables are assumed to be on the atm or ocn thermodynamic - ! grid except as noted - ! - ! scale_fluxes divides several of these by aice "in place", so - ! the state of some of these variables is not well defined. In the - ! future, we need to refactor and add "_iavg" versions of the - ! fields to clearly differentiate fields that have been divided - ! by aice and others that are not. The challenge is that we need - ! to go thru each field carefully to see which version is used. - ! For instance, in diagnostics, there are places where these - ! fields are multiplied by aice to compute things properly. - ! strocn[x,y]T_iavg is the first field defined using _iavg. !----------------------------------------------------------------- real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & - ! in from atmos (if .not.calc_strair) - strax , & ! wind stress components (N/m^2), on grid_atm_dynu - stray , & ! on grid_atm_dynv + ! in from atmos (if .not.calc_strair) + strax , & ! wind stress components (N/m^2) + stray , & ! ! in from ocean - uocn , & ! ocean current, x-direction (m/s), on grid_ocn_dynu - vocn , & ! ocean current, y-direction (m/s), on grid_ocn_dynv - ss_tltx , & ! sea surface slope, x-direction (m/m), on grid_ocn_dynu - ss_tlty , & ! sea surface slope, y-direction, on grid_ocn_dynv - hwater , & ! water depth for seabed stress calc (landfast ice) + uocn , & ! ocean current, x-direction (m/s) + vocn , & ! ocean current, y-direction (m/s) + ss_tltx , & ! sea surface slope, x-direction (m/m) + ss_tlty , & ! sea surface slope, y-direction + hwater , & ! water depth for basal stress calc (landfast ice) ! out to atmosphere - strairxT, & ! stress on ice by air, x-direction at T points, computed in icepack - strairyT, & ! stress on ice by air, y-direction at T points, computed in icepack + strairxT, & ! stress on ice by air, x-direction + strairyT, & ! stress on ice by air, y-direction ! out to ocean T-cell (kg/m s^2) ! Note, CICE_IN_NEMO uses strocnx and strocny for coupling - strocnxT_iavg, & ! ice-ocean stress, x-direction at T points, per ice fraction (scaled flux) - strocnyT_iavg ! ice-ocean stress, y-direction at T points, per ice fraction (scaled flux) + strocnxT, & ! ice-ocean stress, x-direction + strocnyT ! ice-ocean stress, y-direction ! diagnostic @@ -75,46 +63,25 @@ module ice_flux sig1 , & ! normalized principal stress component sig2 , & ! normalized principal stress component sigP , & ! internal ice pressure (N/m) - taubxU , & ! seabed stress (x) (N/m^2) - taubyU , & ! seabed stress (y) (N/m^2) - strairxU, & ! stress on ice by air, x-direction at U points - strairyU, & ! stress on ice by air, y-direction at U points - strocnxU, & ! ice-ocean stress, x-direction at U points, computed in dyn_finish - strocnyU, & ! ice-ocean stress, y-direction at U points, computed in dyn_finish - strtltxU, & ! stress due to sea surface slope, x-direction - strtltyU, & ! stress due to sea surface slope, y-direction - strintxU, & ! divergence of internal ice stress, x (N/m^2) - strintyU, & ! divergence of internal ice stress, y (N/m^2) - taubxN , & ! seabed stress (x) at N points (N/m^2) - taubyN , & ! seabed stress (y) at N points (N/m^2) - strairxN, & ! stress on ice by air, x-direction at N points - strairyN, & ! stress on ice by air, y-direction at N points - strocnxN, & ! ice-ocean stress, x-direction at N points, computed in dyn_finish - strocnyN, & ! ice-ocean stress, y-direction at N points, computed in dyn_finish - strtltxN, & ! stress due to sea surface slope, x-direction at N points - strtltyN, & ! stress due to sea surface slope, y-direction at N points - strintxN, & ! divergence of internal ice stress, x at N points (N/m^2) - strintyN, & ! divergence of internal ice stress, y at N points (N/m^2) - taubxE , & ! seabed stress (x) at E points (N/m^2) - taubyE , & ! seabed stress (y) at E points (N/m^2) - strairxE, & ! stress on ice by air, x-direction at E points - strairyE, & ! stress on ice by air, y-direction at E points - strocnxE, & ! ice-ocean stress, x-direction at E points, computed in dyn_finish - strocnyE, & ! ice-ocean stress, y-direction at E points, computed in dyn_finish - strtltxE, & ! stress due to sea surface slope, x-direction at E points - strtltyE, & ! stress due to sea surface slope, y-direction at E points - strintxE, & ! divergence of internal ice stress, x at E points (N/m^2) - strintyE, & ! divergence of internal ice stress, y at E points (N/m^2) + taubx , & ! basal stress (x) (N/m^2) + tauby , & ! basal stress (y) (N/m^2) + strairx , & ! stress on ice by air, x-direction + strairy , & ! stress on ice by air, y-direction + strocnx , & ! ice-ocean stress, x-direction + strocny , & ! ice-ocean stress, y-direction + strtltx , & ! stress due to sea surface slope, x-direction + strtlty , & ! stress due to sea surface slope, y-direction + strintx , & ! divergence of internal ice stress, x (N/m^2) + strinty , & ! divergence of internal ice stress, y (N/m^2) daidtd , & ! ice area tendency due to transport (1/s) dvidtd , & ! ice volume tendency due to transport (m/s) - dvsdtd , & ! snow volume tendency due to transport (m/s) dagedtd , & ! ice age tendency due to transport (s/s) dardg1dt, & ! rate of area loss by ridging ice (1/s) dardg2dt, & ! rate of area gain by new ridges (1/s) dvirdgdt, & ! rate of ice volume ridged (m/s) opening ! rate of opening due to divergence/shear (1/s) - real (kind=dbl_kind), & + real (kind=dbl_kind), & dimension (:,:,:,:), allocatable, public :: & ! ridging diagnostics in categories dardg1ndt, & ! rate of area loss by ridging ice (1/s) @@ -125,7 +92,7 @@ module ice_flux ardgn, & ! fractional area of ridged ice vrdgn, & ! volume of ridged ice araftn, & ! rafting ice area - vraftn, & ! rafting ice volume + vraftn, & ! rafting ice volume aredistn, & ! redistribution function: fraction of new ridge area vredistn ! redistribution function: fraction of new ridge volume @@ -135,20 +102,17 @@ module ice_flux ! ice stress tensor in each corner of T cell (kg/s^2) stressp_1, stressp_2, stressp_3, stressp_4 , & ! sigma11+sigma22 stressm_1, stressm_2, stressm_3, stressm_4 , & ! sigma11-sigma22 - stress12_1,stress12_2,stress12_3,stress12_4, & ! sigma12 - ! ice stress tensor at U and T locations (grid_ice = 'C|CD') (kg/s^2) - stresspT, stressmT, stress12T, & ! sigma11+sigma22, sigma11-sigma22, sigma12 - stresspU, stressmU, stress12U ! " + stress12_1,stress12_2,stress12_3,stress12_4 ! sigma12 + + logical (kind=log_kind), & + dimension (:,:,:), allocatable, public :: & + iceumask ! ice extent mask (U-cell) ! internal real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & - fmU , & ! Coriolis param. * mass in U-cell (kg/s) - TbU , & ! factor for seabed stress (N/m^2) - fmE , & ! Coriolis param. * mass in E-cell (kg/s) - TbE , & ! factor for seabed stress (N/m^2) - fmN , & ! Coriolis param. * mass in N-cell (kg/s) - TbN ! factor for seabed stress (N/m^2) + fm , & ! Coriolis param. * mass in U-cell (kg/s) + Tbu ! coefficient for basal stress (N/m^2) !----------------------------------------------------------------- ! Thermodynamic component @@ -157,11 +121,10 @@ module ice_flux ! in from atmosphere (if calc_Tsfc) real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & - zlvl , & ! atm level height (momentum) (m) - zlvs , & ! atm level height (scalar quantities) (m) - uatm , & ! wind velocity components (m/s), on grid_atm_dynu - vatm , & ! on grid_atm_dynv - wind , & ! wind speed (m/s) , on grid_atm_dynu + zlvl , & ! atm level height (m) + uatm , & ! wind velocity components (m/s) + vatm , & + wind , & ! wind speed (m/s) potT , & ! air potential temperature (K) Tair , & ! air temperature (K) Qa , & ! specific humidity (kg/kg) @@ -177,7 +140,7 @@ module ice_flux ! NOTE: when in CICE_IN_NEMO mode, these are gridbox mean fields, ! not per ice area. When in standalone mode, these are per ice area. - real (kind=dbl_kind), & + real (kind=dbl_kind), & dimension (:,:,:,:), allocatable, public :: & fsurfn_f , & ! net flux to top surface, excluding fcondtop fcondtopn_f, & ! downward cond flux at top surface (W m-2) @@ -200,7 +163,7 @@ module ice_flux Tf , & ! freezing temperature (C) qdp , & ! deep ocean heat flux (W/m^2), negative upward hmix , & ! mixed layer depth (m) - daice_da ! data assimilation concentration increment rate + daice_da ! data assimilation concentration increment rate ! (concentration s-1)(only used in hadgem drivers) ! out to atmosphere (if calc_Tsfc) @@ -242,18 +205,18 @@ module ice_flux alvdf_init, & ! visible, diffuse (fraction) alidf_init ! near-ir, diffuse (fraction) - real (kind=dbl_kind), dimension(:,:,:,:), allocatable, public :: & + real (kind=dbl_kind), & + dimension(:,:,:,:), allocatable, public :: & albcnt ! counter for zenith angle - ! out to ocean - ! (Note CICE_IN_NEMO does not use these for coupling. + ! out to ocean + ! (Note CICE_IN_NEMO does not use these for coupling. ! It uses fresh_ai,fsalt_ai,fhocn_ai and fswthru_ai) real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & fpond , & ! fresh water flux to ponds (kg/m^2/s) fresh , & ! fresh water flux to ocean (kg/m^2/s) fsalt , & ! salt flux to ocean (kg/m^2/s) fhocn , & ! net heat flux to ocean (W/m^2) - fsloss , & ! rate of snow loss to leads (kg/m^2/s) fswthru , & ! shortwave penetrating to ocean (W/m^2) fswthru_vdr , & ! vis dir shortwave penetrating to ocean (W/m^2) fswthru_vdf , & ! vis dif shortwave penetrating to ocean (W/m^2) @@ -270,9 +233,6 @@ module ice_flux l_mpond_fresh ! if true, include freshwater feedback from meltponds ! when running in ice-ocean or coupled configuration - character (char_len), public :: & - cpl_frazil ! type of coupling for frazil ice, 'fresh_ice_correction','internal','external' - real (kind=dbl_kind), dimension (:,:,:,:), allocatable, public :: & meltsn , & ! snow melt in category n (m) melttn , & ! top melt in category n (m) @@ -281,7 +241,7 @@ module ice_flux snoicen ! snow-ice formation in category n (m) real (kind=dbl_kind), dimension (:,:,:,:), allocatable, public :: & - keffn_top ! effective thermal conductivity of the top ice layer + keffn_top ! effective thermal conductivity of the top ice layer ! on categories (W/m^2/K) ! quantities passed from ocean mixed layer to atmosphere @@ -320,13 +280,12 @@ module ice_flux dsnow, & ! change in snow thickness (m/step-->cm/day) daidtt, & ! ice area tendency thermo. (s^-1) dvidtt, & ! ice volume tendency thermo. (m/s) - dvsdtt, & ! snow volume tendency thermo. (m/s) dagedtt,& ! ice age tendency thermo. (s/s) mlt_onset, &! day of year that sfc melting begins frz_onset, &! day of year that freezing begins (congel or frazil) frazil_diag ! frazil ice growth diagnostic (m/step-->cm/day) - - real (kind=dbl_kind), & + + real (kind=dbl_kind), & dimension (:,:,:,:), allocatable, public :: & fsurfn, & ! category fsurf fcondtopn,& ! category fcondtop @@ -334,17 +293,13 @@ module ice_flux fsensn, & ! category sensible heat flux flatn ! category latent heat flux - real (kind=dbl_kind), & - dimension (:,:,:,:), allocatable, public :: & - snwcnt ! counter for presence of snow - ! As above but these remain grid box mean values i.e. they are not ! divided by aice at end of ice_dynamics. These are used in ! CICE_IN_NEMO for coupling and also for generating - ! ice diagnostics and history files as these are more accurate. + ! ice diagnostics and history files as these are more accurate. ! (The others suffer from problem of incorrect values at grid boxes ! that change from an ice free state to an icy state.) - + real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & fresh_ai, & ! fresh water flux to ocean (kg/m^2/s) fsalt_ai, & ! salt flux to ocean (kg/m^2/s) @@ -358,7 +313,7 @@ module ice_flux real (kind=dbl_kind), dimension (:,:,:,:), allocatable, public :: & fswthrun_ai ! per-category fswthru * ai (W/m^2) - + logical (kind=log_kind), public :: send_i2x_per_cat = .false. !----------------------------------------------------------------- @@ -366,18 +321,15 @@ module ice_flux !----------------------------------------------------------------- real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & - uatmT , & ! uatm on T grid (m/s) - vatmT , & ! vatm on T grid (m/s) rside , & ! fraction of ice that melts laterally fside , & ! lateral heat flux (W/m^2) - wlat , & ! lateral heat rate (m/s) fsw , & ! incoming shortwave radiation (W/m^2) - coszen , & ! cosine solar zenith angle, < 0 for sun below horizon + coszen , & ! cosine solar zenith angle, < 0 for sun below horizon rdg_conv, & ! convergence term for ridging (1/s) rdg_shear ! shear term for ridging (1/s) - + real (kind=dbl_kind), dimension(:,:,:,:), allocatable, public :: & - salinz ,& ! initial salinity profile (ppt) + salinz ,& ! initial salinity profile (ppt) Tmltz ! initial melting temperature (^oC) !======================================================================= @@ -386,42 +338,39 @@ module ice_flux !======================================================================= ! -! Allocate space for all variables +! Allocate space for all variables ! subroutine alloc_flux - use ice_grid, only : grid_ice - integer (int_kind) :: ierr allocate( & strax (nx_block,ny_block,max_blocks), & ! wind stress components (N/m^2) - stray (nx_block,ny_block,max_blocks), & ! + stray (nx_block,ny_block,max_blocks), & ! uocn (nx_block,ny_block,max_blocks), & ! ocean current, x-direction (m/s) vocn (nx_block,ny_block,max_blocks), & ! ocean current, y-direction (m/s) ss_tltx (nx_block,ny_block,max_blocks), & ! sea surface slope, x-direction (m/m) ss_tlty (nx_block,ny_block,max_blocks), & ! sea surface slope, y-direction - hwater (nx_block,ny_block,max_blocks), & ! water depth for seabed stress calc (landfast ice) + hwater (nx_block,ny_block,max_blocks), & ! water depth for basal stress calc (landfast ice) strairxT (nx_block,ny_block,max_blocks), & ! stress on ice by air, x-direction strairyT (nx_block,ny_block,max_blocks), & ! stress on ice by air, y-direction - strocnxT_iavg(nx_block,ny_block,max_blocks), & ! ice-ocean stress, x-direction, per ice area - strocnyT_iavg(nx_block,ny_block,max_blocks), & ! ice-ocean stress, y-direction, per ice area + strocnxT (nx_block,ny_block,max_blocks), & ! ice-ocean stress, x-direction + strocnyT (nx_block,ny_block,max_blocks), & ! ice-ocean stress, y-direction sig1 (nx_block,ny_block,max_blocks), & ! normalized principal stress component sig2 (nx_block,ny_block,max_blocks), & ! normalized principal stress component sigP (nx_block,ny_block,max_blocks), & ! internal ice pressure (N/m) - taubxU (nx_block,ny_block,max_blocks), & ! seabed stress (x) (N/m^2) - taubyU (nx_block,ny_block,max_blocks), & ! seabed stress (y) (N/m^2) - strairxU (nx_block,ny_block,max_blocks), & ! stress on ice by air, x-direction - strairyU (nx_block,ny_block,max_blocks), & ! stress on ice by air, y-direction - strocnxU (nx_block,ny_block,max_blocks), & ! ice-ocean stress, x-direction - strocnyU (nx_block,ny_block,max_blocks), & ! ice-ocean stress, y-direction - strtltxU (nx_block,ny_block,max_blocks), & ! stress due to sea surface slope, x-direction - strtltyU (nx_block,ny_block,max_blocks), & ! stress due to sea surface slope, y-direction - strintxU (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, x (N/m^2) - strintyU (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, y (N/m^2) + taubx (nx_block,ny_block,max_blocks), & ! basal stress (x) (N/m^2) + tauby (nx_block,ny_block,max_blocks), & ! basal stress (y) (N/m^2) + strairx (nx_block,ny_block,max_blocks), & ! stress on ice by air, x-direction + strairy (nx_block,ny_block,max_blocks), & ! stress on ice by air, y-direction + strocnx (nx_block,ny_block,max_blocks), & ! ice-ocean stress, x-direction + strocny (nx_block,ny_block,max_blocks), & ! ice-ocean stress, y-direction + strtltx (nx_block,ny_block,max_blocks), & ! stress due to sea surface slope, x-direction + strtlty (nx_block,ny_block,max_blocks), & ! stress due to sea surface slope, y-direction + strintx (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, x (N/m^2) + strinty (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, y (N/m^2) daidtd (nx_block,ny_block,max_blocks), & ! ice area tendency due to transport (1/s) dvidtd (nx_block,ny_block,max_blocks), & ! ice volume tendency due to transport (m/s) - dvsdtd (nx_block,ny_block,max_blocks), & ! snow volume tendency due to transport (m/s) dagedtd (nx_block,ny_block,max_blocks), & ! ice age tendency due to transport (s/s) dardg1dt (nx_block,ny_block,max_blocks), & ! rate of area loss by ridging ice (1/s) dardg2dt (nx_block,ny_block,max_blocks), & ! rate of area gain by new ridges (1/s) @@ -439,10 +388,10 @@ subroutine alloc_flux stress12_2 (nx_block,ny_block,max_blocks), & ! sigma12 stress12_3 (nx_block,ny_block,max_blocks), & ! sigma12 stress12_4 (nx_block,ny_block,max_blocks), & ! sigma12 - fmU (nx_block,ny_block,max_blocks), & ! Coriolis param. * mass in U-cell (kg/s) - TbU (nx_block,ny_block,max_blocks), & ! factor for seabed stress (landfast ice) - zlvl (nx_block,ny_block,max_blocks), & ! atm level height (momentum) (m) - zlvs (nx_block,ny_block,max_blocks), & ! atm level height (scalar quantities) (m) + iceumask (nx_block,ny_block,max_blocks), & ! ice extent mask (U-cell) + fm (nx_block,ny_block,max_blocks), & ! Coriolis param. * mass in U-cell (kg/s) + Tbu (nx_block,ny_block,max_blocks), & ! coefficient for basal stress (landfast ice) + zlvl (nx_block,ny_block,max_blocks), & ! atm level height (m) uatm (nx_block,ny_block,max_blocks), & ! wind velocity components (m/s) vatm (nx_block,ny_block,max_blocks), & wind (nx_block,ny_block,max_blocks), & ! wind speed (m/s) @@ -464,8 +413,7 @@ subroutine alloc_flux Tf (nx_block,ny_block,max_blocks), & ! freezing temperature (C) qdp (nx_block,ny_block,max_blocks), & ! deep ocean heat flux (W/m^2), negative upward hmix (nx_block,ny_block,max_blocks), & ! mixed layer depth (m) - daice_da (nx_block,ny_block,max_blocks), & ! data assimilation concentration increment rate (concentration s-1) - ! (only used in hadgem drivers) + daice_da (nx_block,ny_block,max_blocks), & ! data assimilation concentration increment rate (concentration s-1)(only used in hadgem drivers) fsens (nx_block,ny_block,max_blocks), & ! sensible heat flux (W/m^2) flat (nx_block,ny_block,max_blocks), & ! latent heat flux (W/m^2) fswabs (nx_block,ny_block,max_blocks), & ! shortwave flux absorbed in ice and ocean (W/m^2) @@ -498,7 +446,6 @@ subroutine alloc_flux fresh (nx_block,ny_block,max_blocks), & ! fresh water flux to ocean (kg/m^2/s) fsalt (nx_block,ny_block,max_blocks), & ! salt flux to ocean (kg/m^2/s) fhocn (nx_block,ny_block,max_blocks), & ! net heat flux to ocean (W/m^2) - fsloss (nx_block,ny_block,max_blocks), & ! rate of snow loss to leads (kg/m^2/s) fswthru (nx_block,ny_block,max_blocks), & ! shortwave penetrating to ocean (W/m^2) fswthru_vdr (nx_block,ny_block,max_blocks), & ! vis dir shortwave penetrating to ocean (W/m^2) fswthru_vdf (nx_block,ny_block,max_blocks), & ! vis dif shortwave penetrating to ocean (W/m^2) @@ -533,7 +480,6 @@ subroutine alloc_flux dsnow (nx_block,ny_block,max_blocks), & ! change in snow thickness (m/step-->cm/day) daidtt (nx_block,ny_block,max_blocks), & ! ice area tendency thermo. (s^-1) dvidtt (nx_block,ny_block,max_blocks), & ! ice volume tendency thermo. (m/s) - dvsdtt (nx_block,ny_block,max_blocks), & ! snow volume tendency thermo. (m/s) dagedtt (nx_block,ny_block,max_blocks), & ! ice age tendency thermo. (s/s) mlt_onset (nx_block,ny_block,max_blocks), & ! day of year that sfc melting begins frz_onset (nx_block,ny_block,max_blocks), & ! day of year that freezing begins (congel or frazil) @@ -544,13 +490,10 @@ subroutine alloc_flux fswthru_ai (nx_block,ny_block,max_blocks), & ! shortwave penetrating to ocean (W/m^2) fresh_da (nx_block,ny_block,max_blocks), & ! fresh water flux to ocean due to data assim (kg/m^2/s) fsalt_da (nx_block,ny_block,max_blocks), & ! salt flux to ocean due to data assimilation(kg/m^2/s) - uatmT (nx_block,ny_block,max_blocks), & ! uatm on T grid - vatmT (nx_block,ny_block,max_blocks), & ! vatm on T grid rside (nx_block,ny_block,max_blocks), & ! fraction of ice that melts laterally - fside (nx_block,ny_block,max_blocks), & ! lateral melt flux (W/m^2) - wlat (nx_block,ny_block,max_blocks), & ! lateral melt rate (m/s) + fside (nx_block,ny_block,max_blocks), & ! lateral melt rate (W/m^2) fsw (nx_block,ny_block,max_blocks), & ! incoming shortwave radiation (W/m^2) - coszen (nx_block,ny_block,max_blocks), & ! cosine solar zenith angle, < 0 for sun below horizon + coszen (nx_block,ny_block,max_blocks), & ! cosine solar zenith angle, < 0 for sun below horizon rdg_conv (nx_block,ny_block,max_blocks), & ! convergence term for ridging (1/s) rdg_shear (nx_block,ny_block,max_blocks), & ! shear term for ridging (1/s) dardg1ndt (nx_block,ny_block,ncat,max_blocks), & ! rate of area loss by ridging ice (1/s) @@ -561,7 +504,7 @@ subroutine alloc_flux ardgn (nx_block,ny_block,ncat,max_blocks), & ! fractional area of ridged ice vrdgn (nx_block,ny_block,ncat,max_blocks), & ! volume of ridged ice araftn (nx_block,ny_block,ncat,max_blocks), & ! rafting ice area - vraftn (nx_block,ny_block,ncat,max_blocks), & ! rafting ice volume + vraftn (nx_block,ny_block,ncat,max_blocks), & ! rafting ice volume aredistn (nx_block,ny_block,ncat,max_blocks), & ! redistribution function: fraction of new ridge area vredistn (nx_block,ny_block,ncat,max_blocks), & ! redistribution function: fraction of new ridge volume fsurfn_f (nx_block,ny_block,ncat,max_blocks), & ! net flux to top surface, excluding fcondtop @@ -580,47 +523,11 @@ subroutine alloc_flux fsensn (nx_block,ny_block,ncat,max_blocks), & ! category sensible heat flux flatn (nx_block,ny_block,ncat,max_blocks), & ! category latent heat flux albcnt (nx_block,ny_block,max_blocks,max_nstrm), & ! counter for zenith angle - snwcnt (nx_block,ny_block,max_blocks,max_nstrm), & ! counter for snow - salinz (nx_block,ny_block,nilyr+1,max_blocks), & ! initial salinity profile (ppt) + salinz (nx_block,ny_block,nilyr+1,max_blocks), & ! initial salinity profile (ppt) Tmltz (nx_block,ny_block,nilyr+1,max_blocks), & ! initial melting temperature (^oC) stat=ierr) if (ierr/=0) call abort_ice('(alloc_flux): Out of memory') - if (grid_ice == "CD" .or. grid_ice == "C") & - allocate( & - taubxN (nx_block,ny_block,max_blocks), & ! seabed stress (x) at N points (N/m^2) - taubyN (nx_block,ny_block,max_blocks), & ! seabed stress (y) at N points (N/m^2) - strairxN (nx_block,ny_block,max_blocks), & ! stress on ice by air, x-direction at N points - strairyN (nx_block,ny_block,max_blocks), & ! stress on ice by air, y-direction at N points - strocnxN (nx_block,ny_block,max_blocks), & ! ice-ocean stress, x-direction at N points - strocnyN (nx_block,ny_block,max_blocks), & ! ice-ocean stress, y-direction at N points - strtltxN (nx_block,ny_block,max_blocks), & ! stress due to sea surface slope, x-direction at N points - strtltyN (nx_block,ny_block,max_blocks), & ! stress due to sea surface slope, y-direction at N points - strintxN (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, x at N points (N/m^2) - strintyN (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, y at N points (N/m^2) - fmN (nx_block,ny_block,max_blocks), & ! Coriolis param. * mass in N-cell (kg/s) - TbN (nx_block,ny_block,max_blocks), & ! factor for seabed stress (landfast ice) - taubxE (nx_block,ny_block,max_blocks), & ! seabed stress (x) at E points (N/m^2) - taubyE (nx_block,ny_block,max_blocks), & ! seabed stress (y) at E points (N/m^2) - strairxE (nx_block,ny_block,max_blocks), & ! stress on ice by air, x-direction at E points - strairyE (nx_block,ny_block,max_blocks), & ! stress on ice by air, y-direction at E points - strocnxE (nx_block,ny_block,max_blocks), & ! ice-ocean stress, x-direction at E points - strocnyE (nx_block,ny_block,max_blocks), & ! ice-ocean stress, y-direction at E points - strtltxE (nx_block,ny_block,max_blocks), & ! stress due to sea surface slope, x-direction at E points - strtltyE (nx_block,ny_block,max_blocks), & ! stress due to sea surface slope, y-direction at E points - strintxE (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, x at E points (N/m^2) - strintyE (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, y at E points (N/m^2) - fmE (nx_block,ny_block,max_blocks), & ! Coriolis param. * mass in E-cell (kg/s) - TbE (nx_block,ny_block,max_blocks), & ! factor for seabed stress (landfast ice) - stresspT (nx_block,ny_block,max_blocks), & ! sigma11+sigma22 - stressmT (nx_block,ny_block,max_blocks), & ! sigma11-sigma22 - stress12T (nx_block,ny_block,max_blocks), & ! sigma12 - stresspU (nx_block,ny_block,max_blocks), & ! sigma11+sigma22 - stressmU (nx_block,ny_block,max_blocks), & ! sigma11-sigma22 - stress12U (nx_block,ny_block,max_blocks), & ! sigma12 - stat=ierr) - if (ierr/=0) call abort_ice('(alloc_flux): Out of memory') - end subroutine alloc_flux !======================================================================= @@ -640,8 +547,7 @@ subroutine init_coupler_flux integer (kind=int_kind) :: n - integer (kind=int_kind), parameter :: max_d = 6 - real (kind=dbl_kind) :: fcondtopn_d(max_d), fsurfn_d(max_d) + real (kind=dbl_kind) :: fcondtopn_d(6), fsurfn_d(6) real (kind=dbl_kind) :: stefan_boltzmann, Tffresh real (kind=dbl_kind) :: vonkar, zref, iceruf @@ -663,8 +569,7 @@ subroutine init_coupler_flux !----------------------------------------------------------------- ! fluxes received from atmosphere !----------------------------------------------------------------- - zlvl (:,:,:) = c10 ! atm level height (momentum) (m) - zlvs (:,:,:) = c10 ! atm level height (scalar quantities) (m) + zlvl (:,:,:) = c10 ! atm level height (m) rhoa (:,:,:) = 1.3_dbl_kind ! air density (kg/m^3) uatm (:,:,:) = c5 ! wind velocity (m/s) vatm (:,:,:) = c5 @@ -684,7 +589,7 @@ subroutine init_coupler_flux flw (:,:,:) = c180 ! incoming longwave rad (W/m^2) frain (:,:,:) = c0 ! rainfall rate (kg/m2/s) do n = 1, ncat ! conductive heat flux (W/m^2) - fcondtopn_f(:,:,n,:) = fcondtopn_d(min(n,max_d)) + fcondtopn_f(:,:,n,:) = fcondtopn_d(n) enddo fsurfn_f = fcondtopn_f ! surface heat flux (W/m^2) flatn_f (:,:,:,:) = c0 ! latent heat flux (kg/m2/s) @@ -701,7 +606,7 @@ subroutine init_coupler_flux flw (:,:,:) = 280.0_dbl_kind ! incoming longwave rad (W/m^2) frain (:,:,:) = c0 ! rainfall rate (kg/m2/s) do n = 1, ncat ! surface heat flux (W/m^2) - fsurfn_f(:,:,n,:) = fsurfn_d(min(n,max_d)) + fsurfn_f(:,:,n,:) = fsurfn_d(n) enddo fcondtopn_f(:,:,:,:) = 0.0_dbl_kind ! conductive heat flux (W/m^2) flatn_f (:,:,:,:) = -2.0_dbl_kind ! latent heat flux (W/m^2) @@ -718,12 +623,12 @@ subroutine init_coupler_flux flw (:,:,:) = 230.0_dbl_kind ! incoming longwave rad (W/m^2) frain (:,:,:) = c0 ! rainfall rate (kg/m2/s) do n = 1, ncat ! surface heat flux (W/m^2) - fsurfn_f(:,:,n,:) = fsurfn_d(min(n,max_d)) + fsurfn_f(:,:,n,:) = fsurfn_d(n) enddo fcondtopn_f(:,:,:,:) = c0 ! conductive heat flux (W/m^2) flatn_f (:,:,:,:) = -1.0_dbl_kind ! latent heat flux (W/m^2) fsensn_f (:,:,:,:) = c0 ! sensible heat flux (W/m^2) - endif ! + endif ! fiso_atm (:,:,:,:) = c0 ! isotope deposition rate (kg/m2/s) faero_atm (:,:,:,:) = c0 ! aerosol deposition rate (kg/m2/s) @@ -733,13 +638,13 @@ subroutine init_coupler_flux ! fluxes received from ocean !----------------------------------------------------------------- - ss_tltx (:,:,:) = c0 ! sea surface tilt (m/m) - ss_tlty (:,:,:) = c0 - uocn (:,:,:) = c0 ! surface ocean currents (m/s) - vocn (:,:,:) = c0 - frzmlt (:,:,:) = c0 ! freezing/melting potential (W/m^2) - frzmlt_init(:,:,:) = c0 ! freezing/melting potential (W/m^2) - sss (:,:,:) = 34.0_dbl_kind ! sea surface salinity (ppt) + ss_tltx(:,:,:)= c0 ! sea surface tilt (m/m) + ss_tlty(:,:,:)= c0 + uocn (:,:,:) = c0 ! surface ocean currents (m/s) + vocn (:,:,:) = c0 + frzmlt(:,:,:) = c0 ! freezing/melting potential (W/m^2) + frzmlt_init(:,:,:) = c0 ! freezing/melting potential (W/m^2) + sss (:,:,:) = 34.0_dbl_kind ! sea surface salinity (ppt) do iblk = 1, size(Tf,3) do j = 1, size(Tf,2) @@ -749,7 +654,9 @@ subroutine init_coupler_flux enddo enddo +#ifndef CICE_IN_NEMO sst (:,:,:) = Tf(:,:,:) ! sea surface temp (C) +#endif qdp (:,:,:) = c0 ! deep ocean heat flux (W/m^2) hmix (:,:,:) = c20 ! ocean mixed layer depth (m) hwater(:,:,:) = bathymetry(:,:,:) ! ocean water depth (m) @@ -766,7 +673,7 @@ subroutine init_coupler_flux flat (:,:,:) = c0 fswabs (:,:,:) = c0 fswint_ai(:,:,:) = c0 - flwout (:,:,:) = -stefan_boltzmann*Tffresh**4 + flwout (:,:,:) = -stefan_boltzmann*Tffresh**4 ! in case atm model diagnoses Tsfc from flwout evap (:,:,:) = c0 evaps (:,:,:) = c0 @@ -783,8 +690,8 @@ subroutine init_coupler_flux ! fluxes sent to ocean !----------------------------------------------------------------- - strocnxT_iavg (:,:,:) = c0 ! ice-ocean stress, x-direction (T-cell) - strocnyT_iavg (:,:,:) = c0 ! ice-ocean stress, y-direction (T-cell) + strocnxT(:,:,:) = c0 ! ice-ocean stress, x-direction (T-cell) + strocnyT(:,:,:) = c0 ! ice-ocean stress, y-direction (T-cell) fresh (:,:,:) = c0 fsalt (:,:,:) = c0 fpond (:,:,:) = c0 @@ -810,9 +717,11 @@ subroutine init_coupler_flux fdon (:,:,:,:)= c0 ffep (:,:,:,:)= c0 ffed (:,:,:,:)= c0 - - allocate(fswthrun_ai(nx_block,ny_block,ncat,max_blocks)) - fswthrun_ai(:,:,:,:) = c0 + + if (send_i2x_per_cat) then + allocate(fswthrun_ai(nx_block,ny_block,ncat,max_blocks)) + fswthrun_ai(:,:,:,:) = c0 + endif !----------------------------------------------------------------- ! derived or computed fields @@ -820,7 +729,7 @@ subroutine init_coupler_flux coszen (:,:,:) = c0 ! Cosine of the zenith angle fsw (:,:,:) = c0 ! shortwave radiation (W/m^2) - scale_factor(:,:,:) = c1 ! shortwave scaling factor + scale_factor(:,:,:) = c1 ! shortwave scaling factor wind (:,:,:) = sqrt(uatm(:,:,:)**2 & + vatm(:,:,:)**2) ! wind speed, (m/s) Cdn_atm(:,:,:) = (vonkar/log(zref/iceruf)) & @@ -922,7 +831,7 @@ end subroutine init_flux_ocn subroutine init_history_therm - use ice_state, only: aice, vice, vsno, trcr + use ice_state, only: aice, vice, trcr use ice_arrays_column, only: & hfreebd, hdraft, hridge, distrdg, hkeel, dkeel, lfloe, dfloe, & Cdn_atm_skin, Cdn_atm_floe, Cdn_atm_pond, Cdn_atm_rdg, & @@ -969,7 +878,6 @@ subroutine init_history_therm meltl (:,:,:) = c0 daidtt (:,:,:) = aice(:,:,:) ! temporary initial area dvidtt (:,:,:) = vice(:,:,:) ! temporary initial volume - dvsdtt (:,:,:) = vsno(:,:,:) ! temporary initial volume if (tr_iage) then dagedtt(:,:,:) = trcr(:,:,nt_iage,:) ! temporary initial age else @@ -991,8 +899,8 @@ subroutine init_history_therm snowfrac (:,:,:) = c0 frazil_diag (:,:,:) = c0 - ! drag coefficients are computed prior to the atmo_boundary call, - ! during the thermodynamics section + ! drag coefficients are computed prior to the atmo_boundary call, + ! during the thermodynamics section Cdn_ocn(:,:,:) = dragio Cdn_atm(:,:,:) = (vonkar/log(zref/iceruf)) & * (vonkar/log(zref/iceruf)) ! atmo drag for RASM @@ -1027,8 +935,7 @@ end subroutine init_history_therm subroutine init_history_dyn - use ice_state, only: aice, vice, vsno, trcr, strength, divu, shear, vort - use ice_grid, only: grid_ice + use ice_state, only: aice, vice, trcr, strength logical (kind=log_kind) :: & tr_iage @@ -1046,30 +953,26 @@ subroutine init_history_dyn sig1 (:,:,:) = c0 sig2 (:,:,:) = c0 - divu (:,:,:) = c0 - shear (:,:,:) = c0 - vort (:,:,:) = c0 - taubxU (:,:,:) = c0 - taubyU (:,:,:) = c0 + taubx (:,:,:) = c0 + tauby (:,:,:) = c0 strength (:,:,:) = c0 - strocnxU(:,:,:) = c0 - strocnyU(:,:,:) = c0 - strairxU(:,:,:) = c0 - strairyU(:,:,:) = c0 - strtltxU(:,:,:) = c0 - strtltyU(:,:,:) = c0 - strintxU(:,:,:) = c0 - strintyU(:,:,:) = c0 + strocnx (:,:,:) = c0 + strocny (:,:,:) = c0 + strairx (:,:,:) = c0 + strairy (:,:,:) = c0 + strtltx (:,:,:) = c0 + strtlty (:,:,:) = c0 + strintx (:,:,:) = c0 + strinty (:,:,:) = c0 dardg1dt(:,:,:) = c0 dardg2dt(:,:,:) = c0 dvirdgdt(:,:,:) = c0 opening (:,:,:) = c0 daidtd (:,:,:) = aice(:,:,:) ! temporary initial area dvidtd (:,:,:) = vice(:,:,:) ! temporary initial volume - dvsdtd (:,:,:) = vsno(:,:,:) ! temporary initial volume if (tr_iage) & dagedtd (:,:,:) = trcr(:,:,nt_iage,:) ! temporary initial age - fmU (:,:,:) = c0 + fm (:,:,:) = c0 ardgn (:,:,:,:) = c0 vrdgn (:,:,:,:) = c0 krdgn (:,:,:,:) = c1 @@ -1084,32 +987,6 @@ subroutine init_history_dyn aredistn (:,:,:,:) = c0 vredistn (:,:,:,:) = c0 - if (grid_ice == "CD" .or. grid_ice == "C") then - taubxE (:,:,:) = c0 - taubyE (:,:,:) = c0 - strocnxE (:,:,:) = c0 - strocnyE (:,:,:) = c0 - strairxE (:,:,:) = c0 - strairyE (:,:,:) = c0 - strtltxE (:,:,:) = c0 - strtltyE (:,:,:) = c0 - strintxE (:,:,:) = c0 - strintyE (:,:,:) = c0 - fmE (:,:,:) = c0 - TbE (:,:,:) = c0 - taubxN (:,:,:) = c0 - taubyN (:,:,:) = c0 - strocnxN (:,:,:) = c0 - strocnyN (:,:,:) = c0 - strairxN (:,:,:) = c0 - strairyN (:,:,:) = c0 - strtltxN (:,:,:) = c0 - strtltyN (:,:,:) = c0 - strintxN (:,:,:) = c0 - strintyN (:,:,:) = c0 - fmN (:,:,:) = c0 - TbN (:,:,:) = c0 - end if end subroutine init_history_dyn !======================================================================= @@ -1136,6 +1013,7 @@ subroutine scale_fluxes (nx_block, ny_block, & faero_ocn, & alvdr, alidr, & alvdf, alidf, & + fzsal, fzsal_g, & flux_bio, & fsurf, fcondtop, & Uref, wind, & @@ -1199,6 +1077,11 @@ subroutine scale_fluxes (nx_block, ny_block, & fsurf , & ! surface heat flux (W/m**2) fcondtop ! top surface conductive flux (W/m**2) + ! zsalinity fluxes + real (kind=dbl_kind), dimension(nx_block,ny_block), intent(inout) :: & + fzsal , & ! salt flux to ocean with prognositic salinity (kg/m2/s) + fzsal_g ! Gravity drainage salt flux to ocean (kg/m2/s) + ! isotopes real (kind=dbl_kind), dimension(nx_block,ny_block,icepack_max_iso), & optional, intent(inout) :: & @@ -1224,6 +1107,9 @@ subroutine scale_fluxes (nx_block, ny_block, & if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) +!DIR$ CONCURRENT !Cray +!cdir nodep !NEC +!ocl novrec !Fujitsu do j = 1, ny_block do i = 1, nx_block if (tmask(i,j) .and. aice(i,j) > c0) then @@ -1251,6 +1137,8 @@ subroutine scale_fluxes (nx_block, ny_block, & alidr (i,j) = alidr (i,j) * ar alvdf (i,j) = alvdf (i,j) * ar alidf (i,j) = alidf (i,j) * ar + fzsal (i,j) = fzsal (i,j) * ar + fzsal_g (i,j) = fzsal_g (i,j) * ar flux_bio (i,j,:) = flux_bio (i,j,:) * ar faero_ocn(i,j,:) = faero_ocn(i,j,:) * ar if (present(Qref_iso )) Qref_iso (i,j,:) = Qref_iso (i,j,:) * ar @@ -1279,8 +1167,10 @@ subroutine scale_fluxes (nx_block, ny_block, & fswthru_idf (i,j) = c0 alvdr (i,j) = c0 ! zero out albedo where ice is absent alidr (i,j) = c0 - alvdf (i,j) = c0 + alvdf (i,j) = c0 alidf (i,j) = c0 + fzsal (i,j) = c0 + fzsal_g (i,j) = c0 flux_bio (i,j,:) = c0 faero_ocn(i,j,:) = c0 if (present(Qref_iso )) Qref_iso (i,j,:) = c0 @@ -1291,8 +1181,11 @@ subroutine scale_fluxes (nx_block, ny_block, & enddo ! j ! Scale fluxes for history output - if (present(fsurf) .and. present(fcondtop) ) then - + if (present(fsurf) .and. present(fcondtop) ) then + +!DIR$ CONCURRENT !Cray +!cdir nodep !NEC +!ocl novrec !Fujitsu do j = 1, ny_block do i = 1, nx_block if (tmask(i,j) .and. aice(i,j) > c0) then @@ -1305,9 +1198,9 @@ subroutine scale_fluxes (nx_block, ny_block, & endif ! tmask and aice > 0 enddo ! i enddo ! j - + endif ! present(fsurf & fcondtop) - + end subroutine scale_fluxes !======================================================================= diff --git a/cicecore/cicedyn/general/ice_flux_bgc.F90 b/cicecore/cicedyn/general/ice_flux_bgc.F90 index 9c07971ff..56e644431 100644 --- a/cicecore/cicedyn/general/ice_flux_bgc.F90 +++ b/cicecore/cicedyn/general/ice_flux_bgc.F90 @@ -26,13 +26,13 @@ module ice_flux_bgc real (kind=dbl_kind), & ! coupling variable for both tr_aero and tr_zaero dimension (:,:,:,:), allocatable, public :: & fiso_atm, & ! isotope deposition rate (kg/m^2 s) - faero_atm ! aerosol deposition rate (kg/m^2 s) + faero_atm ! aerosol deposition rate (kg/m^2 s) real (kind=dbl_kind), & dimension (:,:,:,:), allocatable, public :: & flux_bio_atm ! all bio fluxes to ice from atmosphere - ! out to ocean + ! out to ocean real (kind=dbl_kind), & dimension (:,:,:,:), allocatable, public :: & @@ -44,6 +44,10 @@ module ice_flux_bgc flux_bio , & ! all bio fluxes to ocean flux_bio_ai ! all bio fluxes to ocean, averaged over grid cell + real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & + fzsal_ai, & ! salt flux to ocean from zsalinity (kg/m^2/s) + fzsal_g_ai ! gravity drainage salt flux to ocean (kg/m^2/s) + ! internal logical (kind=log_kind), public :: & @@ -54,7 +58,7 @@ module ice_flux_bgc dsnown ! change in snow thickness in category n (m) real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & - nit , & ! ocean nitrate (mmol/m^3) + nit , & ! ocean nitrate (mmol/m^3) amm , & ! ammonia/um (mmol/m^3) sil , & ! silicate (mmol/m^3) dmsp , & ! dmsp (mmol/m^3) @@ -81,15 +85,15 @@ module ice_flux_bgc fdon ! ice-ocean don flux (mmol/m^2/s) (proteins and amino acids) real (kind=dbl_kind), dimension (:,:,:,:), allocatable, public :: & - dic , & ! ocean dic (mmol/m^3) - fdic ! ice-ocean dic flux (mmol/m^2/s) + dic , & ! ocean dic (mmol/m^3) + fdic ! ice-ocean dic flux (mmol/m^2/s) real (kind=dbl_kind), dimension (:,:,:,:), allocatable, public :: & - fed, fep , & ! ocean dissolved and particulate fe (nM) - ffed, ffep ! ice-ocean dissolved and particulate fe flux (umol/m^2/s) + fed, fep , & ! ocean dissolved and particulate fe (nM) + ffed, ffep ! ice-ocean dissolved and particulate fe flux (umol/m^2/s) real (kind=dbl_kind), dimension (:,:,:,:), allocatable, public :: & - zaeros ! ocean aerosols (mmol/m^3) + zaeros ! ocean aerosols (mmol/m^3) ! isotopes real (kind=dbl_kind), & ! coupling variable for tr_iso @@ -110,14 +114,16 @@ module ice_flux_bgc !======================================================================= ! -! Allocate space for all variables +! Allocate space for all variables ! subroutine alloc_flux_bgc integer (int_kind) :: ierr allocate( & - nit (nx_block,ny_block,max_blocks), & ! ocean nitrate (mmol/m^3) + fzsal_ai (nx_block,ny_block,max_blocks), & ! salt flux to ocean from zsalinity (kg/m^2/s) + fzsal_g_ai (nx_block,ny_block,max_blocks), & ! gravity drainage salt flux to ocean (kg/m^2/s) + nit (nx_block,ny_block,max_blocks), & ! ocean nitrate (mmol/m^3) amm (nx_block,ny_block,max_blocks), & ! ammonia/um (mmol/m^3) sil (nx_block,ny_block,max_blocks), & ! silicate (mmol/m^3) dmsp (nx_block,ny_block,max_blocks), & ! dmsp (mmol/m^3) @@ -132,32 +138,32 @@ subroutine alloc_flux_bgc fdust (nx_block,ny_block,max_blocks), & ! ice-ocean dust flux (kg/m^2/s), positive to ocean hin_old (nx_block,ny_block,ncat,max_blocks), & ! old ice thickness dsnown (nx_block,ny_block,ncat,max_blocks), & ! change in snow thickness in category n (m) - HDO_ocn (nx_block,ny_block,max_blocks), & ! seawater concentration of HDO (kg/kg) - H2_16O_ocn (nx_block,ny_block,max_blocks), & ! seawater concentration of H2_16O (kg/kg) - H2_18O_ocn (nx_block,ny_block,max_blocks), & ! seawater concentration of H2_18O (kg/kg) - Qa_iso (nx_block,ny_block,icepack_max_iso,max_blocks), & ! isotope specific humidity (kg/kg) - Qref_iso (nx_block,ny_block,icepack_max_iso,max_blocks), & ! 2m atm reference isotope spec humidity (kg/kg) - fiso_atm (nx_block,ny_block,icepack_max_iso,max_blocks), & ! isotope deposition rate (kg/m^2 s) - fiso_evap (nx_block,ny_block,icepack_max_iso,max_blocks), & ! isotope evaporation rate (kg/m^2 s) - fiso_ocn (nx_block,ny_block,icepack_max_iso,max_blocks), & ! isotope flux to ocean (kg/m^2/s) - faero_atm (nx_block,ny_block,icepack_max_aero,max_blocks), & ! aerosol deposition rate (kg/m^2 s) + HDO_ocn (nx_block,ny_block,max_blocks), & ! seawater concentration of HDO (kg/kg) + H2_16O_ocn (nx_block,ny_block,max_blocks), & ! seawater concentration of H2_16O (kg/kg) + H2_18O_ocn (nx_block,ny_block,max_blocks), & ! seawater concentration of H2_18O (kg/kg) + Qa_iso (nx_block,ny_block,icepack_max_iso,max_blocks), & ! isotope specific humidity (kg/kg) + Qref_iso (nx_block,ny_block,icepack_max_iso,max_blocks), & ! 2m atm reference isotope spec humidity (kg/kg) + fiso_atm (nx_block,ny_block,icepack_max_iso,max_blocks), & ! isotope deposition rate (kg/m^2 s) + fiso_evap (nx_block,ny_block,icepack_max_iso,max_blocks), & ! isotope evaporation rate (kg/m^2 s) + fiso_ocn (nx_block,ny_block,icepack_max_iso,max_blocks), & ! isotope flux to ocean (kg/m^2/s) + faero_atm (nx_block,ny_block,icepack_max_aero,max_blocks), & ! aerosol deposition rate (kg/m^2 s) faero_ocn (nx_block,ny_block,icepack_max_aero,max_blocks), & ! aerosol flux to ocean (kg/m^2/s) - zaeros (nx_block,ny_block,icepack_max_aero,max_blocks), & ! ocean aerosols (mmol/m^3) + zaeros (nx_block,ny_block,icepack_max_aero,max_blocks), & ! ocean aerosols (mmol/m^3) flux_bio_atm(nx_block,ny_block,icepack_max_nbtrcr,max_blocks), & ! all bio fluxes to ice from atmosphere flux_bio (nx_block,ny_block,icepack_max_nbtrcr,max_blocks), & ! all bio fluxes to ocean flux_bio_ai (nx_block,ny_block,icepack_max_nbtrcr,max_blocks), & ! all bio fluxes to ocean, averaged over grid cell algalN (nx_block,ny_block,icepack_max_algae,max_blocks), & ! ocean algal nitrogen (mmol/m^3) (diatoms, pico, phaeo) - falgalN (nx_block,ny_block,icepack_max_algae,max_blocks), & ! ice-ocn algalN flux (mmol/m^2/s) (diatoms, pico, phaeo) + falgalN (nx_block,ny_block,icepack_max_algae,max_blocks), & ! ice-ocean algal nitrogen flux (mmol/m^2/s) (diatoms, pico, phaeo) doc (nx_block,ny_block,icepack_max_doc,max_blocks), & ! ocean doc (mmol/m^3) (saccharids, lipids, tbd ) fdoc (nx_block,ny_block,icepack_max_doc,max_blocks), & ! ice-ocean doc flux (mmol/m^2/s) (saccharids, lipids, tbd) don (nx_block,ny_block,icepack_max_don,max_blocks), & ! ocean don (mmol/m^3) (proteins and amino acids) fdon (nx_block,ny_block,icepack_max_don,max_blocks), & ! ice-ocean don flux (mmol/m^2/s) (proteins and amino acids) - dic (nx_block,ny_block,icepack_max_dic,max_blocks), & ! ocean dic (mmol/m^3) - fdic (nx_block,ny_block,icepack_max_dic,max_blocks), & ! ice-ocean dic flux (mmol/m^2/s) - fed (nx_block,ny_block,icepack_max_fe, max_blocks), & ! ocean dissolved fe (nM) - fep (nx_block,ny_block,icepack_max_fe, max_blocks), & ! ocean particulate fe (nM) - ffed (nx_block,ny_block,icepack_max_fe, max_blocks), & ! ice-ocean dissolved fe flux (umol/m^2/s) - ffep (nx_block,ny_block,icepack_max_fe, max_blocks), & ! ice-ocean particulate fe flux (umol/m^2/s) + dic (nx_block,ny_block,icepack_max_dic,max_blocks), & ! ocean dic (mmol/m^3) + fdic (nx_block,ny_block,icepack_max_dic,max_blocks), & ! ice-ocean dic flux (mmol/m^2/s) + fed (nx_block,ny_block,icepack_max_fe, max_blocks), & ! ocean dissolved fe (nM) + fep (nx_block,ny_block,icepack_max_fe, max_blocks), & ! ocean particulate fe (nM) + ffed (nx_block,ny_block,icepack_max_fe, max_blocks), & ! ice-ocean dissolved fe flux (umol/m^2/s) + ffep (nx_block,ny_block,icepack_max_fe, max_blocks), & ! ice-ocean particulate fe flux (umol/m^2/s) stat=ierr) if (ierr/=0) call abort_ice('(alloc_flux_bgc): Out of memory') @@ -208,10 +214,10 @@ subroutine bgcflux_ice_to_ocn(nx_block, & ! local variables - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & i,j , & ! horizontal indices k ! tracer index - + logical (kind=log_kind) :: & skl_bgc, solve_zbgc, & tr_bgc_Nit, tr_bgc_N, & @@ -220,14 +226,14 @@ subroutine bgcflux_ice_to_ocn(nx_block, & integer (kind=int_kind) :: & nlt_bgc_Nit, nlt_bgc_Am, & - nlt_bgc_Sil, nlt_bgc_DMSPd, nlt_bgc_DMS, nlt_bgc_hum + nlt_bgc_Sil, nlt_bgc_DMSPd, nlt_bgc_DMS, nlt_bgc_hum integer (kind=int_kind), dimension(icepack_max_algae) :: & nlt_bgc_N, nlt_bgc_C ! algae integer (kind=int_kind), dimension(icepack_max_doc) :: & nlt_bgc_DOC ! disolved organic carbon integer (kind=int_kind), dimension(icepack_max_don) :: & - nlt_bgc_DON ! + nlt_bgc_DON ! integer (kind=int_kind), dimension(icepack_max_dic) :: & nlt_bgc_DIC ! disolved inorganic carbon integer (kind=int_kind), dimension(icepack_max_fe) :: & diff --git a/cicecore/cicedyn/general/ice_forcing.F90 b/cicecore/cicedyn/general/ice_forcing.F90 index b977f54aa..4c88037ed 100755 --- a/cicecore/cicedyn/general/ice_forcing.F90 +++ b/cicecore/cicedyn/general/ice_forcing.F90 @@ -19,21 +19,19 @@ module ice_forcing use ice_kinds_mod - use ice_boundary, only: ice_HaloUpdate use ice_blocks, only: nx_block, ny_block - use ice_domain, only: halo_info - use ice_domain_size, only: ncat, max_blocks, nx_global, ny_global, nfreq + use ice_domain_size, only: ncat, max_blocks, nx_global, ny_global use ice_communicate, only: my_task, master_task - use ice_calendar, only: istep, istep1, & - msec, mday, mmonth, myear, yday, daycal, & - daymo, days_per_year, compute_days_between + use ice_calendar, only: istep, istep1, time, time_forc, & + sec, mday, month, nyr, yday, daycal, dayyr, & + daymo, days_per_year, hc_jday use ice_fileunits, only: nu_diag, nu_forcing use ice_exit, only: abort_ice - use ice_read_write, only: ice_open, ice_read, ice_check_nc, & + use ice_read_write, only: ice_open, ice_read, & ice_get_ncvarsize, ice_read_vec_nc, & ice_open_nc, ice_read_nc, ice_close_nc use ice_timers, only: ice_timer_start, ice_timer_stop, timer_readwrite, & - timer_bound, timer_forcing + timer_bound use ice_arrays_column, only: oceanmixed_ice, restore_bgc use ice_constants, only: c0, c1, c2, c3, c4, c5, c8, c10, c12, c15, c20, & c180, c360, c365, c1000, c3600 @@ -43,7 +41,7 @@ module ice_forcing field_type_vector, field_loc_NEcorner use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_sea_freezing_temperature - use icepack_intfc, only: icepack_init_wave, icepack_init_parameters + use icepack_intfc, only: icepack_init_wave use icepack_intfc, only: icepack_query_tracer_indices, icepack_query_parameters implicit none @@ -52,17 +50,16 @@ module ice_forcing get_forcing_atmo, get_forcing_ocn, get_wave_spec, & read_clim_data, read_clim_data_nc, & interpolate_data, interp_coeff_monthly, & - read_data_nc_point, interp_coeff, & - init_snowtable + read_data_nc_point, interp_coeff integer (kind=int_kind), public :: & - ycycle , & ! number of years in forcing cycle, set by namelist - fyear_init , & ! first year of data in forcing cycle, set by namelist - fyear , & ! current year in forcing cycle, varying during the run - fyear_final ! last year in cycle, computed at init + ycycle , & ! number of years in forcing cycle + fyear_init , & ! first year of data in forcing cycle + fyear , & ! current year in forcing cycle + fyear_final ! last year in cycle character (char_len_long) :: & ! input data file names - uwind_file, & ! this is also used a generic file containing all fields for JRA55 + uwind_file, & vwind_file, & wind_file, & strax_file, & @@ -76,14 +73,15 @@ module ice_forcing sst_file, & sss_file, & sublim_file, & - snow_file + snow_file character (char_len_long), dimension(:), allocatable, public :: & ! input data file names topmelt_file, & botmelt_file real (kind=dbl_kind), public :: & - c1intp, c2intp ! interpolation coefficients + c1intp, c2intp , & ! interpolation coefficients + ftime ! forcing time (for restart) integer (kind=int_kind) :: & oldrecnum = 0 , & ! old record number (save between steps) @@ -106,7 +104,7 @@ module ice_forcing rhoa_data, & flw_data, & sst_data, & - sss_data, & + sss_data, & uocn_data, & vocn_data, & sublim_data, & @@ -116,35 +114,29 @@ module ice_forcing topmelt_data, & botmelt_data - real (kind=dbl_kind), dimension(:,:,:,:,:), allocatable :: & - wave_spectrum_data ! field values at 2 temporal data points - - character(char_len), public :: & - atm_data_format , & ! 'bin'=binary or 'nc'=netcdf - ocn_data_format , & ! 'bin'=binary or 'nc'=netcdf - atm_data_type , & ! 'default', 'monthly', 'ncar', 'box2001' - ! 'hadgem', 'oned', 'calm', 'uniform' - ! 'JRA55' or 'JRA55do' - atm_data_version , & ! date of atm_forcing file creation - bgc_data_type , & ! 'default', 'clim' - ocn_data_type , & ! 'default', 'clim', 'ncar', 'oned', 'calm', 'box2001' - ! 'hadgem_sst' or 'hadgem_sst_uvocn', 'uniform' - ice_data_type , & ! 'latsst', 'box2001', 'boxslotcyl', etc - ice_data_conc , & ! 'p5','p8','p9','c1','parabolic', 'box2001', etc - ice_data_dist , & ! 'box2001','gauss', 'uniform', etc - precip_units ! 'mm_per_month', 'mm_per_sec', 'mks','m_per_sec' + character(char_len), public :: & + atm_data_format, & ! 'bin'=binary or 'nc'=netcdf + ocn_data_format, & ! 'bin'=binary or 'nc'=netcdf + atm_data_type, & ! 'default', 'monthly', 'ncar', + ! 'LYq' or 'hadgem' or 'oned' or + ! 'JRA55_gx1' or 'JRA55_gx3' or 'JRA55_tx1' + bgc_data_type, & ! 'default', 'clim' + ocn_data_type, & ! 'default', 'clim', 'ncar', 'oned', + ! 'hadgem_sst' or 'hadgem_sst_uvocn' + ice_data_type, & ! 'default', 'box2001', 'boxslotcyl' + precip_units ! 'mm_per_month', 'mm_per_sec', 'mks','m_per_sec' logical (kind=log_kind), public :: & rotate_wind ! rotate wind/stress to computational grid from true north directed - - character(char_len_long), public :: & + + character(char_len_long), public :: & atm_data_dir , & ! top directory for atmospheric data ocn_data_dir , & ! top directory for ocean data wave_spec_dir, & ! dir name for wave spectrum wave_spec_file,& ! file name for wave spectrum oceanmixed_file ! file name for ocean forcing data - integer (kind=int_kind), parameter :: & + integer (kind=int_kind), parameter :: & nfld = 8 ! number of fields to search for in forcing file ! as in the dummy atm (latm) @@ -163,11 +155,11 @@ module ice_forcing integer (kind=int_kind), public :: & trestore ! restoring time scale (days) - real (kind=dbl_kind), public :: & + real (kind=dbl_kind), public :: & trest ! restoring time scale (sec) logical (kind=log_kind), public :: & - debug_forcing ! prints forcing debugging output if true + dbug ! prints debugging output if true real (dbl_kind), dimension(:), allocatable, public :: & jday_atm ! jday time vector from atm forcing files @@ -175,38 +167,16 @@ module ice_forcing integer (kind=int_kind), public :: & Njday_atm ! Number of atm forcing timesteps - character (len=char_len_long), public :: & - snw_filename ! filename for snow lookup table - - character (char_len), public :: & - snw_rhos_fname , & ! snow table 1d rhos field name - snw_Tgrd_fname , & ! snow table 1d Tgrd field name - snw_T_fname , & ! snow table 1d T field name - snw_tau_fname , & ! snow table 3d tau field name - snw_kappa_fname, & ! snow table 3d kappa field name - snw_drdt0_fname ! snow table 3d drdt0 field name - - ! PRIVATE: - - real (dbl_kind), parameter :: & - mixed_layer_depth_default = c20 ! default mixed layer depth in m - - logical (kind=log_kind), parameter :: & - local_debug = .false. ! local debug flag - !======================================================================= contains !======================================================================= ! -! Allocate space for all variables +! Allocate space for all variables ! subroutine alloc_forcing integer (int_kind) :: ierr - character(len=*), parameter :: subname = '(alloc_forcing)' - - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' allocate ( & cldf(nx_block,ny_block, max_blocks), & ! cloud fraction @@ -233,7 +203,6 @@ subroutine alloc_forcing ocn_frc_m(nx_block,ny_block, max_blocks,nfld,12), & ! ocn data for 12 months topmelt_file(ncat), & botmelt_file(ncat), & - wave_spectrum_data(nx_block,ny_block,nfreq,2,max_blocks), & stat=ierr) if (ierr/=0) call abort_ice('(alloc_forcing): Out of Memory') @@ -252,20 +221,14 @@ subroutine init_forcing_atmo use ice_calendar, only: use_leap_years - integer (kind=int_kind) :: modadj ! adjustment for mod function character(len=*), parameter :: subname = '(init_forcing_atmo)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + ! Allocate forcing arrays + call alloc_forcing() - modadj = abs((min(0,myear-fyear_init)/ycycle+1)*ycycle) - fyear = fyear_init + mod(myear-fyear_init+modadj,ycycle) + fyear = fyear_init + mod(nyr-1,ycycle) ! current year fyear_final = fyear_init + ycycle - 1 ! last year in forcing cycle - if (local_debug .and. my_task == master_task) then - write(nu_diag,*) subname,'fdbg fyear = ',fyear,fyear_init,fyear_final - write(nu_diag,*) subname,'fdbg atm_data_type = ',trim(atm_data_type) - endif - if (trim(atm_data_type) /= 'default' .and. & my_task == master_task) then write (nu_diag,*) ' Initial forcing data year = ',fyear_init @@ -282,52 +245,43 @@ subroutine init_forcing_atmo file=__FILE__, line=__LINE__) endif - if (use_leap_years .and. (index(trim(atm_data_type),'JRA55') == 0 .and. & - trim(atm_data_type) /= 'hycom' .and. & - trim(atm_data_type) /= 'box2001')) then + if (use_leap_years .and. (trim(atm_data_type) /= 'JRA55_gx1' .and. & + trim(atm_data_type) /= 'JRA55_gx3' .and. & + trim(atm_data_type) /= 'JRA55_tx1' .and. & + trim(atm_data_type) /= 'hycom' .and. & + trim(atm_data_type) /= 'box2001')) then write(nu_diag,*) 'use_leap_years option is currently only supported for' - write(nu_diag,*) 'JRA55, JRA55do, default , and box2001 atmospheric data' + write(nu_diag,*) 'JRA55, default , and box2001 atmospheric data' call abort_ice(error_message=subname, file=__FILE__, line=__LINE__) endif !------------------------------------------------------------------- - ! Get filenames for input forcing data + ! Get filenames for input forcing data !------------------------------------------------------------------- ! default forcing values from init_flux_atm if (trim(atm_data_type) == 'ncar') then call NCAR_files(fyear) - elseif (index(trim(atm_data_type),'JRA55') > 0) then - call JRA55_files(fyear) + elseif (trim(atm_data_type) == 'LYq') then + call LY_files(fyear) + elseif (trim(atm_data_type) == 'JRA55_gx1') then + call JRA55_gx1_files(fyear) + elseif (trim(atm_data_type) == 'JRA55_gx3') then + call JRA55_gx3_files(fyear) + elseif (trim(atm_data_type) == 'JRA55_tx1') then + call JRA55_tx1_files(fyear) elseif (trim(atm_data_type) == 'hadgem') then call hadgem_files(fyear) elseif (trim(atm_data_type) == 'monthly') then call monthly_files(fyear) elseif (trim(atm_data_type) == 'oned') then call oned_files - elseif (trim(atm_data_type) == 'ISPOL') then + elseif (trim(atm_data_type) == 'ISPOL') then call ISPOL_files elseif (trim(atm_data_type) == 'box2001') then - call box2001_data_atm - elseif (trim(atm_data_type) == 'uniform_northeast') then - call uniform_data_atm('NE') - elseif (trim(atm_data_type) == 'uniform_north') then - call uniform_data_atm('N') - elseif (trim(atm_data_type) == 'uniform_east') then - call uniform_data_atm('E') - elseif (trim(atm_data_type) == 'uniform_south') then - call uniform_data_atm('S') - elseif (trim(atm_data_type) == 'uniform_west') then - call uniform_data_atm('W') - elseif (trim(atm_data_type) == 'calm') then - call uniform_data_atm('N',c0) ! direction does not matter when c0 + call box2001_data elseif (trim(atm_data_type) == 'hycom') then call hycom_atm_files - elseif (trim(atm_data_type) == 'default') then - ! don't need to do anything more - else - call abort_ice (error_message=subname//' ERROR atm_data_type unknown = '// & - trim(atm_data_type), file=__FILE__, line=__LINE__) endif end subroutine init_forcing_atmo @@ -336,13 +290,13 @@ end subroutine init_forcing_atmo subroutine init_forcing_ocn(dt) -! Set sea surface salinity and freezing temperature to annual mean value +! Set sea surface salinity and freezing temperature to annual mean value ! using a 12-month climatology. ! Read sst data for current month, and adjust sst based on freezing ! temperature. No interpolation in time. -! Note: SST is subsequently prognosed if CICE is run -! with a mixed layer ocean (oceanmixed_ice = T), and can be +! Note: SST is subsequently prognosed if CICE is run +! with a mixed layer ocean (oceanmixed_ice = T), and can be ! restored to data (restore_ocn = T). use ice_blocks, only: nx_block, ny_block @@ -358,14 +312,14 @@ subroutine init_forcing_ocn(dt) integer (kind=int_kind) :: & i, j, iblk , & ! horizontal indices k , & ! month index - fid , & ! file id for netCDF file + fid , & ! file id for netCDF file nbits logical (kind=log_kind) :: diag real (kind=dbl_kind) :: secday - character (char_len) :: & + character (char_len) :: & fieldname ! field name in netcdf file real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & @@ -373,19 +327,15 @@ subroutine init_forcing_ocn(dt) character(len=*), parameter :: subname = '(init_forcing_ocn)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - call icepack_query_parameters(secday_out=secday) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - call alloc_forcing() - - sst_data(:,:,:,:) = c0 - sss_data(:,:,:,:) = c0 - uocn_data(:,:,:,:) = c0 - vocn_data(:,:,:,:) = c0 +! sst_data(:,:,:,:) = c0 +! sss_data(:,:,:,:) = c0 +! uocn_data(:,:,:,:) = c0 +! vocn_data(:,:,:,:) = c0 nbits = 64 ! double precision data @@ -418,7 +368,7 @@ subroutine init_forcing_ocn(dt) sss(:,:,:) = c0 do k = 1,12 ! loop over 12 months - call ice_read (nu_forcing, k, work1, 'rda8', debug_forcing, & + call ice_read (nu_forcing, k, work1, 'rda8', dbug, & field_loc_center, field_type_scalar) !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks @@ -465,7 +415,7 @@ subroutine init_forcing_ocn(dt) if (my_task == master_task) & call ice_open (nu_forcing, sst_file, nbits) - call ice_read (nu_forcing, mmonth, sst, 'rda8', debug_forcing, & + call ice_read (nu_forcing, month, sst, 'rda8', dbug, & field_loc_center, field_type_scalar) if (my_task == master_task) close(nu_forcing) @@ -481,10 +431,13 @@ subroutine init_forcing_ocn(dt) enddo !$OMP END PARALLEL DO - elseif (trim(ocn_data_type) == 'hadgem_sst' .or. & + endif ! init_sst_data + + + if (trim(ocn_data_type) == 'hadgem_sst' .or. & trim(ocn_data_type) == 'hadgem_sst_uvocn') then - diag = .true. ! write diagnostic information + diag = .true. ! write diagnostic information sst_file = trim (ocn_data_dir)//'/MONTHLY/sst.1997.nc' @@ -496,11 +449,11 @@ subroutine init_forcing_ocn(dt) call ice_open_nc(sst_file,fid) endif - + fieldname='sst' - call ice_read_nc(fid,mmonth,fieldname,sst,diag) + call ice_read_nc(fid,month,fieldname,sst,diag) - if (my_task == master_task) call ice_close_nc(fid) + if (my_task == master_task) call ice_close_nc(fid) ! Make sure sst is not less than freezing temperature Tf !$OMP PARALLEL DO PRIVATE(iblk,i,j) @@ -513,30 +466,15 @@ subroutine init_forcing_ocn(dt) enddo !$OMP END PARALLEL DO - elseif (trim(ocn_data_type) == 'ncar') then - call ocn_data_ncar_init -! call ocn_data_ncar_init_3D + endif ! ocn_data_type - elseif (trim(ocn_data_type) == 'hycom') then - call ocn_data_hycom_init + if (trim(ocn_data_type) == 'ncar') then +! call ocn_data_ncar_init + call ocn_data_ncar_init_3D + endif - elseif (trim(ocn_data_type) == 'box2001') then - call box2001_data_ocn - - ! uniform forcing options - elseif (trim(ocn_data_type) == 'uniform_northeast') then - call uniform_data_ocn('NE',p1) - elseif (trim(ocn_data_type) == 'uniform_east') then - call uniform_data_ocn('E',p1) - elseif (trim(ocn_data_type) == 'uniform_north') then - call uniform_data_ocn('N',p1) - elseif (trim(ocn_data_type) == 'calm') then - call uniform_data_ocn('N',c0) ! directon does not matter for c0 - elseif (trim(ocn_data_type) == 'default') then - ! don't need to do anything more - else - call abort_ice (error_message=subname//' ERROR ocn_data_type unknown = '// & - trim(ocn_data_type), file=__FILE__, line=__LINE__) + if (trim(ocn_data_type) == 'hycom') then + call ocn_data_hycom_init endif end subroutine init_forcing_ocn @@ -561,8 +499,6 @@ subroutine ocn_freezing_temperature character(len=*), parameter :: subname = '(ocn_freezing_temperature)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block @@ -586,7 +522,8 @@ subroutine get_forcing_atmo ! Get atmospheric forcing data and interpolate as necessary use ice_blocks, only: block, get_block - use ice_domain, only: nblocks, blocks_ice + use ice_boundary, only: ice_HaloUpdate + use ice_domain, only: nblocks, blocks_ice, halo_info use ice_flux, only: Tair, fsw, flw, frain, fsnow, Qa, rhoa, & uatm, vatm, strax, stray, zlvl, wind, swvdr, swvdf, swidr, swidf, & potT, sst @@ -596,27 +533,17 @@ subroutine get_forcing_atmo integer (kind=int_kind) :: & iblk, & ! block index ilo,ihi,jlo,jhi, & ! beginning and end of physical domain - modadj, & ! adjustment to make mod a postive number - fyear_old, & ! fyear setting on last timestep nt_Tsfc type (block) :: & this_block ! block information for current block character(len=*), parameter :: subname = '(get_forcing_atmo)' - - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - - call ice_timer_start(timer_forcing) - - fyear_old = fyear - modadj = abs((min(0,myear-fyear_init)/ycycle+1)*ycycle) - fyear = fyear_init + mod(myear-fyear_init+modadj,ycycle) - if (trim(atm_data_type) /= 'default' .and. & - (istep <= 1 .or. fyear /= fyear_old)) then - if (my_task == master_task) then - write (nu_diag,*) ' Set current forcing data year = ',fyear - endif + + fyear = fyear_init + mod(nyr-1,ycycle) ! current year + if (trim(atm_data_type) /= 'default' .and. istep <= 1 & + .and. my_task == master_task) then + write (nu_diag,*) ' Current forcing data year = ',fyear endif call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc) @@ -624,19 +551,23 @@ subroutine get_forcing_atmo if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - !------------------------------------------------------------------- - ! Read and interpolate atmospheric data - !------------------------------------------------------------------- + ftime = time ! forcing time + time_forc = ftime ! for restarting - if (local_debug .and. my_task == master_task) then - write(nu_diag,*) subname,'fdbg fyear = ',fyear - write(nu_diag,*) subname,'fdbg atm_data_type = ',trim(atm_data_type) - endif + !------------------------------------------------------------------- + ! Read and interpolate atmospheric data + !------------------------------------------------------------------- if (trim(atm_data_type) == 'ncar') then call ncar_data - elseif (index(trim(atm_data_type),'JRA55') > 0) then - call JRA55_data + elseif (trim(atm_data_type) == 'LYq') then + call LY_data + elseif (trim(atm_data_type) == 'JRA55_gx1') then + call JRA55_data(fyear) + elseif (trim(atm_data_type) == 'JRA55_gx3') then + call JRA55_data(fyear) + elseif (trim(atm_data_type) == 'JRA55_tx1') then + call JRA55_data(fyear) elseif (trim(atm_data_type) == 'hadgem') then call hadgem_data elseif (trim(atm_data_type) == 'monthly') then @@ -644,36 +575,21 @@ subroutine get_forcing_atmo elseif (trim(atm_data_type) == 'oned') then call oned_data elseif (trim(atm_data_type) == 'box2001') then - call box2001_data_atm - elseif (trim(atm_data_type) == 'uniform_northeast') then - call uniform_data_atm('NE') - elseif (trim(atm_data_type) == 'uniform_north') then - call uniform_data_atm('N') - elseif (trim(atm_data_type) == 'uniform_east') then - call uniform_data_atm('E') - elseif (trim(atm_data_type) == 'uniform_south') then - call uniform_data_atm('S') - elseif (trim(atm_data_type) == 'uniform_west') then - call uniform_data_atm('W') - elseif (trim(atm_data_type) == 'calm') then - call uniform_data_atm('N',c0) ! direction does not matter when c0 + call box2001_data elseif (trim(atm_data_type) == 'hycom') then call hycom_atm_data - !elseif (trim(atm_data_type) == 'uniform_northeast') then - !elseif (trim(atm_data_type) == 'uniform_east') then - !elseif (trim(atm_data_type) == 'uniform_north') then else ! default values set in init_flux return endif - !------------------------------------------------------------------- - ! Convert forcing data to fields needed by ice model - !------------------------------------------------------------------- + !------------------------------------------------------------------- + ! Convert forcing data to fields needed by ice model + !------------------------------------------------------------------- !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -683,7 +599,7 @@ subroutine get_forcing_atmo ilo, ihi, jlo, jhi, & hm (:,:,iblk), & Tair (:,:,iblk), & - fsw (:,:,iblk), & + fsw (:,:,iblk), & cldf (:,:,iblk), & flw (:,:,iblk), & frain (:,:,iblk), & @@ -720,8 +636,6 @@ subroutine get_forcing_atmo field_loc_center, field_type_scalar) call ice_timer_stop(timer_bound) - call ice_timer_stop(timer_forcing) - end subroutine get_forcing_atmo !======================================================================= @@ -737,44 +651,21 @@ subroutine get_forcing_ocn (dt) character(len=*), parameter :: subname = '(get_forcing_ocn)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - - call ice_timer_start(timer_forcing) - - if (local_debug .and. my_task == master_task) then - write(nu_diag,*) subname,'fdbg fyear = ',fyear - write(nu_diag,*) subname,'fdbg ocn_data_type = ',trim(ocn_data_type) - endif - if (trim(ocn_data_type) == 'clim') then call ocn_data_clim(dt) elseif (trim(ocn_data_type) == 'ncar' .or. & trim(ocn_data_type) == 'ISPOL') then - call ocn_data_ncar(dt) + call ocn_data_ncar(dt) elseif (trim(ocn_data_type) == 'hadgem_sst' .or. & trim(ocn_data_type) == 'hadgem_sst_uvocn') then - call ocn_data_hadgem(dt) + call ocn_data_hadgem(dt) elseif (trim(ocn_data_type) == 'oned') then call ocn_data_oned elseif (trim(ocn_data_type) == 'hycom') then ! call ocn_data_hycom(dt) !MHRI: NOT IMPLEMENTED YET - elseif (trim(ocn_data_type) == 'box2001') then - call box2001_data_ocn - ! uniform forcing options - elseif (trim(ocn_data_type) == 'uniform_northeast') then -! tcraig, not time varying - call uniform_data_ocn('NE',p1) - elseif (trim(ocn_data_type) == 'uniform_east') then - call uniform_data_ocn('E',p1) - elseif (trim(ocn_data_type) == 'uniform_north') then - call uniform_data_ocn('N',p1) - elseif (trim(ocn_data_type) == 'calm') then - call uniform_data_ocn('N',c0) ! directon does not matter for c0 endif - call ice_timer_stop(timer_forcing) - end subroutine get_forcing_ocn !======================================================================= @@ -799,7 +690,7 @@ subroutine read_data (flag, recd, yr, ixm, ixx, ixp, & ! data is missing, and we assume periodicity when monthly data ! is missing. - use ice_diagnostics, only: debug_model_step + use ice_diagnostics, only: check_step logical (kind=log_kind), intent(in) :: flag @@ -831,15 +722,13 @@ subroutine read_data (flag, recd, yr, ixm, ixx, ixp, & character(len=*), parameter :: subname = '(read_data)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - call ice_timer_start(timer_readwrite) ! reading/writing nbits = 64 ! double precision data - if (istep1 > debug_model_step) debug_forcing = .true. !! debugging + if (istep1 > check_step) dbug = .true. !! debugging - if (my_task==master_task .and. (debug_forcing)) then + if (my_task==master_task .and. (dbug)) then write(nu_diag,*) ' ', trim(data_file) endif @@ -877,7 +766,7 @@ subroutine read_data (flag, recd, yr, ixm, ixx, ixp, & arg = 1 nrec = recd + n2 call ice_read (nu_forcing, nrec, field_data(:,:,arg,:), & - 'rda8', debug_forcing, field_loc, field_type) + 'rda8', dbug, field_loc, field_type) if (ixx==1 .and. my_task == master_task) close(nu_forcing) endif ! ixm ne -99 @@ -889,7 +778,7 @@ subroutine read_data (flag, recd, yr, ixm, ixx, ixp, & arg = arg + 1 nrec = recd + ixx call ice_read (nu_forcing, nrec, field_data(:,:,arg,:), & - 'rda8', debug_forcing, field_loc, field_type) + 'rda8', dbug, field_loc, field_type) if (ixp /= -99) then ! currently in latter half of data interval @@ -914,7 +803,7 @@ subroutine read_data (flag, recd, yr, ixm, ixx, ixp, & arg = arg + 1 nrec = recd + n4 call ice_read (nu_forcing, nrec, field_data(:,:,arg,:), & - 'rda8', debug_forcing, field_loc, field_type) + 'rda8', dbug, field_loc, field_type) endif ! ixp /= -99 if (my_task == master_task) close(nu_forcing) @@ -949,7 +838,7 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & ! ! Adapted by Alison McLaren, Met Office from read_data - use ice_diagnostics, only: debug_model_step + use ice_diagnostics, only: check_step logical (kind=log_kind), intent(in) :: flag @@ -967,14 +856,16 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & fieldname ! field name in netCDF file integer (kind=int_kind), intent(in) :: & - field_loc, & ! location of field on staggered grid - field_type ! type of field (scalar, vector, angle) + field_loc, & ! location of field on staggered grid + field_type ! type of field (scalar, vector, angle) real (kind=dbl_kind), dimension(nx_block,ny_block,2,max_blocks), intent(out) :: & field_data ! 2 values needed for interpolation ! local variables + character(len=*), parameter :: subname = '(read_data_nc)' + integer (kind=int_kind) :: & nrec , & ! record number to read n2, n4 , & ! like ixm and ixp, but @@ -982,15 +873,11 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & arg , & ! value of time argument in field_data fid ! file id for netCDF routines - character(len=*), parameter :: subname = '(read_data_nc)' - - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - call ice_timer_start(timer_readwrite) ! reading/writing - if (istep1 > debug_model_step) debug_forcing = .true. !! debugging + if (istep1 > check_step) dbug = .true. !! debugging - if (my_task==master_task .and. (debug_forcing)) then + if (my_task==master_task .and. (dbug)) then write(nu_diag,*) ' ', trim(data_file) endif @@ -1028,8 +915,8 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & arg = 1 nrec = recd + n2 - call ice_read_nc & - (fid, nrec, fieldname, field_data(:,:,arg,:), debug_forcing, & + call ice_read_nc & + (fid, nrec, fieldname, field_data(:,:,arg,:), dbug, & field_loc, field_type) if (ixx==1) call ice_close_nc(fid) @@ -1042,8 +929,8 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & arg = arg + 1 nrec = recd + ixx - call ice_read_nc & - (fid, nrec, fieldname, field_data(:,:,arg,:), debug_forcing, & + call ice_read_nc & + (fid, nrec, fieldname, field_data(:,:,arg,:), dbug, & field_loc, field_type) if (ixp /= -99) then @@ -1068,8 +955,8 @@ subroutine read_data_nc (flag, recd, yr, ixm, ixx, ixp, & arg = arg + 1 nrec = recd + n4 - call ice_read_nc & - (fid, nrec, fieldname, field_data(:,:,arg,:), debug_forcing, & + call ice_read_nc & + (fid, nrec, fieldname, field_data(:,:,arg,:), dbug, & field_loc, field_type) endif ! ixp /= -99 @@ -1095,7 +982,7 @@ subroutine read_data_nc_hycom (flag, recd, & ! ! Adapted by Mads Hvid Ribergaard, DMI from read_data_nc - use ice_diagnostics, only: debug_model_step + use ice_diagnostics, only: check_step use ice_timers, only: ice_timer_start, ice_timer_stop, timer_readwrite logical (kind=log_kind), intent(in) :: flag @@ -1120,15 +1007,11 @@ subroutine read_data_nc_hycom (flag, recd, & integer (kind=int_kind) :: & fid ! file id for netCDF routines - character(len=*), parameter :: subname = '(read_data_nc_hycom)' - - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - call ice_timer_start(timer_readwrite) ! reading/writing - if (istep1 > debug_model_step) debug_forcing = .true. !! debugging + if (istep1 > check_step) dbug = .true. !! debugging - if (my_task==master_task .and. (debug_forcing)) then + if (my_task==master_task .and. (dbug)) then write(nu_diag,*) ' ', trim(data_file) endif @@ -1139,11 +1022,11 @@ subroutine read_data_nc_hycom (flag, recd, & ! read data !----------------------------------------------------------------- call ice_read_nc & - (fid, recd , fieldname, field_data(:,:,1,:), debug_forcing, & + (fid, recd , fieldname, field_data(:,:,1,:), dbug, & field_loc, field_type) call ice_read_nc & - (fid, recd+1, fieldname, field_data(:,:,2,:), debug_forcing, & + (fid, recd+1, fieldname, field_data(:,:,2,:), dbug, & field_loc, field_type) call ice_close_nc(fid) @@ -1165,7 +1048,7 @@ subroutine read_clim_data (readflag, recd, ixm, ixx, ixp, & ! no need to get data from other years or to extrapolate data beyond ! the forcing time period. - use ice_diagnostics, only: debug_model_step + use ice_diagnostics, only: check_step logical (kind=log_kind),intent(in) :: readflag @@ -1192,15 +1075,13 @@ subroutine read_clim_data (readflag, recd, ixm, ixx, ixp, & character(len=*), parameter :: subname = '(read_clim_data)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - call ice_timer_start(timer_readwrite) ! reading/writing nbits = 64 ! double precision data - if (istep1 > debug_model_step) debug_forcing = .true. !! debugging + if (istep1 > check_step) dbug = .true. !! debugging - if (my_task==master_task .and. (debug_forcing)) & + if (my_task==master_task .and. (dbug)) & write(nu_diag,*) ' ', trim(data_file) if (readflag) then @@ -1216,19 +1097,19 @@ subroutine read_clim_data (readflag, recd, ixm, ixx, ixp, & arg = 1 nrec = recd + ixm call ice_read (nu_forcing, nrec, field_data(:,:,arg,:), & - 'rda8', debug_forcing, field_loc, field_type) + 'rda8', dbug, field_loc, field_type) endif arg = arg + 1 nrec = recd + ixx call ice_read (nu_forcing, nrec, field_data(:,:,arg,:), & - 'rda8', debug_forcing, field_loc, field_type) + 'rda8', dbug, field_loc, field_type) if (ixp /= -99) then arg = arg + 1 nrec = recd + ixp call ice_read (nu_forcing, nrec, field_data(:,:,arg,:), & - 'rda8', debug_forcing, field_loc, field_type) + 'rda8', dbug, field_loc, field_type) endif if (my_task == master_task) close (nu_forcing) @@ -1249,7 +1130,7 @@ subroutine read_clim_data_nc (readflag, recd, ixm, ixx, ixp, & ! no need to get data from other years or to extrapolate data beyond ! the forcing time period. - use ice_diagnostics, only: debug_model_step + use ice_diagnostics, only: check_step logical (kind=log_kind),intent(in) :: readflag @@ -1279,13 +1160,11 @@ subroutine read_clim_data_nc (readflag, recd, ixm, ixx, ixp, & character(len=*), parameter :: subname = '(read_clim_data_nc)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - call ice_timer_start(timer_readwrite) ! reading/writing - if (istep1 > debug_model_step) debug_forcing = .true. !! debugging + if (istep1 > check_step) dbug = .true. !! debugging - if (my_task==master_task .and. (debug_forcing)) & + if (my_task==master_task .and. (dbug)) & write(nu_diag,*) ' ', trim(data_file) if (readflag) then @@ -1300,23 +1179,23 @@ subroutine read_clim_data_nc (readflag, recd, ixm, ixx, ixp, & if (ixm /= -99) then arg = 1 nrec = recd + ixm - call ice_read_nc & + call ice_read_nc & (fid, nrec, fieldname, field_data(:,:,arg,:), & - debug_forcing, field_loc, field_type) + dbug, field_loc, field_type) endif arg = arg + 1 nrec = recd + ixx - call ice_read_nc & + call ice_read_nc & (fid, nrec, fieldname, field_data(:,:,arg,:), & - debug_forcing, field_loc, field_type) + dbug, field_loc, field_type) if (ixp /= -99) then arg = arg + 1 nrec = recd + ixp - call ice_read_nc & + call ice_read_nc & (fid, nrec, fieldname, field_data(:,:,arg,:), & - debug_forcing, field_loc, field_type) + dbug, field_loc, field_type) endif if (my_task == master_task) call ice_close_nc (fid) @@ -1339,16 +1218,14 @@ subroutine interp_coeff_monthly (recslot) real (kind=dbl_kind) :: & secday , & ! seconds in day - tt , & ! days elapsed in current year - t1, t2 ! days elapsed at month midpoint + tt , & ! seconds elapsed in current year + t1, t2 ! seconds elapsed at month midpoint real (kind=dbl_kind) :: & daymid(0:13) ! month mid-points character(len=*), parameter :: subname = '(interp_coeff_monthly)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - call icepack_query_parameters(secday_out=secday) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -1357,27 +1234,21 @@ subroutine interp_coeff_monthly (recslot) daymid(1:13) = 14._dbl_kind ! time frame ends 0 sec into day 15 daymid(0) = 14._dbl_kind - daymo(12) ! Dec 15, 0 sec - ! compute days since Jan 1, 00h, yday is the day counter for the year - tt = real(yday-1,kind=dbl_kind) + real(msec,kind=dbl_kind)/secday + ! make time cyclic + tt = mod(ftime/secday,dayyr) ! Find neighboring times if (recslot==2) then ! first half of month - t2 = daycal(mmonth) + daymid(mmonth) ! midpoint, current month - if (mmonth == 1) then + t2 = daycal(month) + daymid(month) ! midpoint, current month + if (month == 1) then t1 = daymid(0) ! Dec 15 (0 sec) else - t1 = daycal(mmonth-1) + daymid(mmonth-1) ! midpoint, previous month + t1 = daycal(month-1) + daymid(month-1) ! midpoint, previous month endif else ! second half of month - t1 = daycal(mmonth) + daymid(mmonth) ! midpoint, current month - t2 = daycal(mmonth+1) + daymid(mmonth+1)! day 15 of next month (0 sec) - endif - - if (tt < t1 .or. tt > t2) then - write(nu_diag,*) subname,' ERROR in tt',tt,t1,t2 - call abort_ice (error_message=subname//' ERROR in tt', & - file=__FILE__, line=__LINE__) + t1 = daycal(month) + daymid(month) ! midpoint, current month + t2 = daycal(month+1) + daymid(month+1)! day 15 of next month (0 sec) endif ! Compute coefficients @@ -1407,7 +1278,8 @@ subroutine interp_coeff (recnum, recslot, secint, dataloc) ! local variables real (kind=dbl_kind) :: & - secday ! seconds in a day + secday, & ! seconds in a day + secyr ! seconds in a year real (kind=dbl_kind) :: & tt , & ! seconds elapsed in current year @@ -1416,15 +1288,13 @@ subroutine interp_coeff (recnum, recslot, secint, dataloc) character(len=*), parameter :: subname = '(interp_coeff)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - call icepack_query_parameters(secday_out=secday) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - ! compute seconds since Jan 1, 00h, yday is the day counter for the year - tt = real(yday-1,kind=dbl_kind)*secday + real(msec,kind=dbl_kind) + secyr = dayyr * secday ! seconds in a year + tt = mod(ftime,secyr) ! Find neighboring times rcnum = real(recnum,kind=dbl_kind) @@ -1438,7 +1308,7 @@ subroutine interp_coeff (recnum, recslot, secint, dataloc) else ! recslot = 1 if (dataloc==1) then ! data located at middle of interval t1 = (rcnum-p5)*secint - else + else t1 = rcnum*secint ! data located at end of interval endif t2 = t1 + secint ! + 1 interval @@ -1448,12 +1318,6 @@ subroutine interp_coeff (recnum, recslot, secint, dataloc) c1intp = abs((t2 - tt) / (t2 - t1)) c2intp = c1 - c1intp - if (local_debug .and. my_task == master_task) then - write(nu_diag,*) subname,'fdbg yday,sec = ',yday,msec - write(nu_diag,*) subname,'fdbg tt = ',tt - write(nu_diag,*) subname,'fdbg c12intp = ',c1intp,c2intp - endif - end subroutine interp_coeff !======================================================================= @@ -1467,9 +1331,6 @@ subroutine interp_coeff2 (tt, t1, t2) real (kind=dbl_kind), intent(in) :: & tt , & ! current decimal daynumber t1, t2 ! first+last decimal daynumber - character(len=*), parameter :: subname = '(interp_coeff2)' - - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' ! Compute coefficients c1intp = abs((t2 - tt) / (t2 - t1)) @@ -1499,8 +1360,6 @@ subroutine interpolate_data (field_data, field) character(len=*), parameter :: subname = '(interpolate data)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block @@ -1514,44 +1373,6 @@ subroutine interpolate_data (field_data, field) end subroutine interpolate_data -!======================================================================= - - subroutine interpolate_wavespec_data (field_data, field) - -! Linear interpolation - -! author: Elizabeth C. Hunke, LANL - - use ice_domain, only: nblocks - - real (kind=dbl_kind), dimension(nx_block,ny_block,nfreq,2,max_blocks), intent(in) :: & - field_data ! 2 values used for interpolation - - real (kind=dbl_kind), dimension(nx_block,ny_block,nfreq,max_blocks), intent(out) :: & - field ! interpolated field - - ! local variables - - integer (kind=int_kind) :: i,j, iblk, freq - - character(len=*), parameter :: subname = '(interpolate data)' - - !$OMP PARALLEL DO PRIVATE(iblk,i,j) - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - do freq = 1, nfreq - field(i,j,freq,iblk) = c1intp * field_data(i,j,freq,1,iblk) & - + c2intp * field_data(i,j,freq,2,iblk) - enddo - enddo - enddo - enddo - !$OMP END PARALLEL DO - - end subroutine interpolate_wavespec_data - - !======================================================================= subroutine file_year (data_file, yr) @@ -1570,13 +1391,19 @@ subroutine file_year (data_file, yr) character(len=*), parameter :: subname = '(file_year)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - if (trim(atm_data_type) == 'hadgem') then ! netcdf i = index(data_file,'.nc') - 5 tmpname = data_file write(data_file,'(a,i4.4,a)') tmpname(1:i), yr, '.nc' - elseif (index(trim(atm_data_type),'JRA55') > 0) then ! netcdf + elseif (trim(atm_data_type) == 'JRA55_gx1') then ! netcdf + i = index(data_file,'.nc') - 5 + tmpname = data_file + write(data_file,'(a,i4.4,a)') tmpname(1:i), yr, '.nc' + elseif (trim(atm_data_type) == 'JRA55_gx3') then ! netcdf + i = index(data_file,'.nc') - 5 + tmpname = data_file + write(data_file,'(a,i4.4,a)') tmpname(1:i), yr, '.nc' + elseif (trim(atm_data_type) == 'JRA55_tx1') then ! netcdf i = index(data_file,'.nc') - 5 tmpname = data_file write(data_file,'(a,i4.4,a)') tmpname(1:i), yr, '.nc' @@ -1593,7 +1420,7 @@ end subroutine file_year subroutine prepare_forcing (nx_block, ny_block, & ilo, ihi, jlo, jhi, & hm, & - Tair, fsw, & + Tair, fsw, & cldf, flw, & frain, fsnow, & Qa, rhoa, & @@ -1616,7 +1443,7 @@ subroutine prepare_forcing (nx_block, ny_block, & sst , & ! sea surface temperature aice , & ! ice area fraction hm ! land mask - + real (kind=dbl_kind), dimension(nx_block,ny_block), intent(inout) :: & fsw , & ! incoming shortwave radiation (W/m^2) cldf , & ! cloud fraction @@ -1650,8 +1477,6 @@ subroutine prepare_forcing (nx_block, ny_block, & character(len=*), parameter :: subname = '(prepare_forcing)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - call icepack_query_parameters(Tffresh_out=Tffresh, puny_out=puny) call icepack_query_parameters(secday_out=secday) call icepack_query_parameters(calc_strair_out=calc_strair) @@ -1673,7 +1498,7 @@ subroutine prepare_forcing (nx_block, ny_block, & rhoa (i,j) = max(rhoa(i,j),c0) Qa (i,j) = max(Qa(i,j),c0) -! if (rhoa(i,j) .lt. puny) rhoa(i,j) = 1.3_dbl_kind +! if (rhoa(i,j) .lt. puny) rhoa(i,j) = 1.3_dbl_kind ! if (Tair(i,j) .lt. puny) Tair(i,j) = Tffresh ! if (Qa(i,j) .lt. puny) Qa(i,j) = 0.0035_dbl_kind enddo ! i @@ -1701,12 +1526,28 @@ subroutine prepare_forcing (nx_block, ny_block, & enddo enddo - elseif (trim(atm_data_type) == 'oned') then ! rectangular grid + elseif (trim(atm_data_type) == 'LYq') then - ! precip is in kg/m^2/s + ! precip is in mm/s zlvl0 = c10 + do j = jlo, jhi + do i = ilo, ihi + ! longwave based on Rosati and Miyakoda, JPO 18, p. 1607 (1988) + call longwave_rosati_miyakoda(cldf(i,j), Tsfc(i,j), & + aice(i,j), sst(i,j), & + Qa(i,j), Tair(i,j), & + hm(i,j), flw(i,j)) + enddo + enddo + + elseif (trim(atm_data_type) == 'oned') then ! rectangular grid + + ! precip is in kg/m^2/s + + zlvl0 = c10 + do j = jlo, jhi do i = ilo, ihi @@ -1734,11 +1575,11 @@ subroutine prepare_forcing (nx_block, ny_block, & ! convert precipitation units to kg/m^2 s if (trim(precip_units) == 'mm_per_month') then - precip_factor = c12/(secday*real(days_per_year,kind=dbl_kind)) + precip_factor = c12/(secday*days_per_year) elseif (trim(precip_units) == 'mm_per_day') then precip_factor = c1/secday elseif (trim(precip_units) == 'mm_per_sec' .or. & - trim(precip_units) == 'mks') then + trim(precip_units) == 'mks') then precip_factor = c1 ! mm/sec = kg/m^2 s elseif (trim(precip_units) == 'm_per_sec') then precip_factor = c1000 @@ -1755,20 +1596,20 @@ subroutine prepare_forcing (nx_block, ny_block, & swvdf(i,j) = fsw(i,j)*frcvdf ! visible diffuse swidr(i,j) = fsw(i,j)*frcidr ! near IR direct swidf(i,j) = fsw(i,j)*frcidf ! near IR diffuse - + ! convert precipitation units to kg/m^2 s fsnow(i,j) = fsnow(i,j) * precip_factor enddo ! i enddo ! j ! determine whether precip is rain or snow - ! HadGEM forcing provides separate snowfall and rainfall rather + ! HadGEM forcing provides separate snowfall and rainfall rather ! than total precipitation if (trim(atm_data_type) /= 'hadgem') then do j = jlo, jhi do i = ilo, ihi - frain(i,j) = c0 + frain(i,j) = c0 if (Tair(i,j) >= Tffresh) then frain(i,j) = fsnow(i,j) fsnow(i,j) = c0 @@ -1791,8 +1632,8 @@ subroutine prepare_forcing (nx_block, ny_block, & ! then interpolate to the U-cell centers (otherwise we ! interpolate across the pole). ! Use ANGLET which is on the T grid ! - ! Atmo variables are needed in T cell centers in subroutine - ! atmo_boundary_layer, and are interpolated to the U grid later as + ! Atmo variables are needed in T cell centers in subroutine + ! atmo_boundary_layer, and are interpolated to the U grid later as ! necessary. !----------------------------------------------------------------- workx = uatm(i,j) ! wind velocity, m/s @@ -1840,12 +1681,12 @@ subroutine longwave_parkinson_washington(Tair, cldf, flw) ! (for now) ! Parkinson, C. L. and W. M. Washington (1979), ! Large-scale numerical-model of sea ice, - ! JGR, 84, 311-337, doi:10.1029/JC084iC01p00311 + ! JGR, 84, 311-337, doi:10.1029/JC084iC01p00311 real(kind=dbl_kind), intent(in) :: & Tair , & ! air temperature (K) cldf ! cloud fraction - + real(kind=dbl_kind), intent(out) :: & flw ! incoming longwave radiation (W/m^2) @@ -1854,19 +1695,17 @@ subroutine longwave_parkinson_washington(Tair, cldf, flw) character(len=*), parameter :: subname = '(longwave_parkinson_washington)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - call icepack_query_parameters(Tffresh_out=Tffresh, & stefan_boltzmann_out=stefan_boltzmann) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - + flw = stefan_boltzmann*Tair**4 & * (c1 - 0.261_dbl_kind & * exp(-7.77e-4_dbl_kind*(Tffresh - Tair)**2)) & * (c1 + 0.275_dbl_kind*cldf) - + end subroutine longwave_parkinson_washington !======================================================================= @@ -1876,11 +1715,11 @@ subroutine longwave_rosati_miyakoda(cldf, Tsfc, & Qa, Tair, & hm, flw) - ! based on - ! Rosati, A. and K. Miyakoda (1988), - ! A general-circulation model for upper ocean simulation, - ! J. Physical Oceanography, 18, 1601-1626, - ! doi:10.1175/1520-0485(1988)018<1601:AGCMFU>2.0.CO;2 + ! based on + ! Rosati, A. and K. Miyakoda (1988), + ! A general-circulation model for upper ocean simulation, + ! J. Physical Oceanography, 18, 1601-1626, + ! doi:10.1175/1520-0485(1988)018<1601:AGCMFU>2.0.CO;2 real(kind=dbl_kind), intent(in) :: & cldf , & ! cloud fraction @@ -1899,15 +1738,13 @@ subroutine longwave_rosati_miyakoda(cldf, Tsfc, & sstk , & ! ice/ocean surface temperature (K) rtea , & ! square root of the vapour pressure ptem , & ! potential air temperature (K) - qlwm + qlwm real(kind=dbl_kind) :: & Tffresh, stefan_boltzmann, emissivity character(len=*), parameter :: subname = '(longwave_rosati_miyakoda)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - call icepack_query_parameters(Tffresh_out=Tffresh, & stefan_boltzmann_out=stefan_boltzmann, & emissivity_out=emissivity) @@ -1926,7 +1763,7 @@ subroutine longwave_rosati_miyakoda(cldf, Tsfc, & + c4*(sstk-ptem) ) flw = emissivity*stefan_boltzmann * ( sstk**4 - qlwm ) flw = flw * hm ! land mask - + end subroutine longwave_rosati_miyakoda !======================================================================= @@ -1935,75 +1772,60 @@ end subroutine longwave_rosati_miyakoda subroutine ncar_files (yr) - ! Construct filenames based on the LANL naming conventions for NCAR data. - ! Edit for other directory structures or filenames. - ! Note: The year number in these filenames does not matter, because - ! subroutine file_year will insert the correct year. - ! Note: atm_data_dir may have NCAR_bulk or not - ! - ! atm_data_type should be 'ncar' - ! atm_dat_dir should be ${CICE_DATA_root}/forcing/$grid/[NCAR_bulk,''] - ! atm_data_dir should be set to ${CICE_DATA_root}/forcing/$grid/[JRA55,JRA55do,''] - ! NCAR_bulk at the end of the atm_data_dir is optional to provide backwards - ! compatibility and if not included, will be appended automaticaly. - ! The grid is typically gx1, gx3, tx1, or similar. +! Construct filenames based on the LANL naming conventions for NCAR data. +! Edit for other directory structures or filenames. +! Note: The year number in these filenames does not matter, because +! subroutine file_year will insert the correct year. integer (kind=int_kind), intent(in) :: & yr ! current forcing year - character (char_len_long) :: & - atm_data_dir_extra ! atm_dat_dir extra if needed - - integer (kind=int_kind) :: & - strind ! string index - character(len=*), parameter :: subname = '(ncar_files)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - - ! decide whether NCAR_bulk is part of atm_data_dir and set atm_data_dir_extra - atm_data_dir_extra = '/NCAR_bulk' - strind = index(trim(atm_data_dir),'NCAR_bulk') - if (strind > 0) then - atm_data_dir_extra = '' - endif - - fsw_file = trim(atm_data_dir)//trim(atm_data_dir_extra)//'/MONTHLY/swdn.1996.dat' + fsw_file = & + trim(atm_data_dir)//'/MONTHLY/swdn.1996.dat' call file_year(fsw_file,yr) - flw_file = trim(atm_data_dir)//trim(atm_data_dir_extra)//'/MONTHLY/cldf.1996.dat' + flw_file = & + trim(atm_data_dir)//'/MONTHLY/cldf.1996.dat' call file_year(flw_file,yr) - rain_file = trim(atm_data_dir)//trim(atm_data_dir_extra)//'/MONTHLY/prec.1996.dat' + rain_file = & + trim(atm_data_dir)//'/MONTHLY/prec.1996.dat' call file_year(rain_file,yr) - uwind_file = trim(atm_data_dir)//trim(atm_data_dir_extra)//'/4XDAILY/u_10.1996.dat' + uwind_file = & + trim(atm_data_dir)//'/4XDAILY/u_10.1996.dat' call file_year(uwind_file,yr) - vwind_file = trim(atm_data_dir)//trim(atm_data_dir_extra)//'/4XDAILY/v_10.1996.dat' + vwind_file = & + trim(atm_data_dir)//'/4XDAILY/v_10.1996.dat' call file_year(vwind_file,yr) - tair_file = trim(atm_data_dir)//trim(atm_data_dir_extra)//'/4XDAILY/t_10.1996.dat' + tair_file = & + trim(atm_data_dir)//'/4XDAILY/t_10.1996.dat' call file_year(tair_file,yr) - humid_file = trim(atm_data_dir)//trim(atm_data_dir_extra)//'/4XDAILY/q_10.1996.dat' + humid_file = & + trim(atm_data_dir)//'/4XDAILY/q_10.1996.dat' call file_year(humid_file,yr) - rhoa_file = trim(atm_data_dir)//trim(atm_data_dir_extra)//'/4XDAILY/dn10.1996.dat' + rhoa_file = & + trim(atm_data_dir)//'/4XDAILY/dn10.1996.dat' call file_year(rhoa_file,yr) if (my_task == master_task) then write (nu_diag,*) ' ' write (nu_diag,*) 'Forcing data year =', fyear write (nu_diag,*) 'Atmospheric data files:' - write (nu_diag,'(3a)') trim(fsw_file) - write (nu_diag,'(3a)') trim(flw_file) - write (nu_diag,'(3a)') trim(rain_file) - write (nu_diag,'(3a)') trim(uwind_file) - write (nu_diag,'(3a)') trim(vwind_file) - write (nu_diag,'(3a)') trim(tair_file) - write (nu_diag,'(3a)') trim(humid_file) - write (nu_diag,'(3a)') trim(rhoa_file) + write (nu_diag,*) trim(fsw_file) + write (nu_diag,*) trim(flw_file) + write (nu_diag,*) trim(rain_file) + write (nu_diag,*) trim(uwind_file) + write (nu_diag,*) trim(vwind_file) + write (nu_diag,*) trim(tair_file) + write (nu_diag,*) trim(humid_file) + write (nu_diag,*) trim(rhoa_file) endif ! master_task end subroutine ncar_files @@ -2031,8 +1853,6 @@ subroutine ncar_data character(len=*), parameter :: subname = '(ncar_data)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - call icepack_query_parameters(secday_out=secday) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -2046,12 +1866,12 @@ subroutine ncar_data !------------------------------------------------------------------- midmonth = 15 ! data is given on 15th of every month -! midmonth = fix(p5 * real(daymo(mmonth))) ! exact middle +! midmonth = fix(p5 * real(daymo(month))) ! exact middle ! Compute record numbers for surrounding months maxrec = 12 - ixm = mod(mmonth+maxrec-2,maxrec) + 1 - ixp = mod(mmonth, maxrec) + 1 + ixm = mod(month+maxrec-2,maxrec) + 1 + ixp = mod(month, maxrec) + 1 if (mday >= midmonth) ixm = -99 ! other two points will be used if (mday < midmonth) ixp = -99 @@ -2068,29 +1888,29 @@ subroutine ncar_data ! Read 2 monthly values readm = .false. - if (istep==1 .or. (mday==midmonth .and. msec==0)) readm = .true. + if (istep==1 .or. (mday==midmonth .and. sec==0)) readm = .true. if (trim(atm_data_format) == 'bin') then - call read_data (readm, 0, fyear, ixm, mmonth, ixp, & + call read_data (readm, 0, fyear, ixm, month, ixp, & maxrec, fsw_file, fsw_data, & field_loc_center, field_type_scalar) - call read_data (readm, 0, fyear, ixm, mmonth, ixp, & + call read_data (readm, 0, fyear, ixm, month, ixp, & maxrec, flw_file, cldf_data, & field_loc_center, field_type_scalar) - call read_data (readm, 0, fyear, ixm, mmonth, ixp, & + call read_data (readm, 0, fyear, ixm, month, ixp, & maxrec, rain_file, fsnow_data, & field_loc_center, field_type_scalar) else call abort_ice (error_message=subname//'nonbinary atm_data_format unavailable', & file=__FILE__, line=__LINE__) -! The routine exists, for example: -! call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & +! The routine exists, for example: +! call read_data_nc (readm, 0, fyear, ixm, month, ixp, & ! maxrec, fsw_file, 'fsw', fsw_data, & ! field_loc_center, field_type_scalar) -! call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & +! call read_data_nc (readm, 0, fyear, ixm, month, ixp, & ! maxrec, flw_file, 'cldf',cldf_data, & ! field_loc_center, field_type_scalar) -! call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & +! call read_data_nc (readm, 0, fyear, ixm, month, ixp, & ! maxrec, rain_file,'prec',fsnow_data, & ! field_loc_center, field_type_scalar) endif @@ -2113,7 +1933,7 @@ subroutine ncar_data maxrec = 1460 ! 365*4 ! current record number - recnum = 4*int(yday) - 3 + int(real(msec,kind=dbl_kind)/sec6hr) + recnum = 4*int(yday) - 3 + int(real(sec,kind=dbl_kind)/sec6hr) ! Compute record numbers for surrounding data @@ -2166,164 +1986,146 @@ subroutine ncar_data end subroutine ncar_data +!======================================================================= +! Large and Yeager forcing (AOMIP style) !======================================================================= - subroutine JRA55_files(yr) - - ! find the JRA55 files: - ! This subroutine finds the JRA55 atm forcing files based on settings - ! in atm_data_type and atm_data_dir. Because the filenames are not - ! entirely consistent, we need a flexible method. - ! - ! atm_data_type could be JRA55 or JRA55do with/without _grid appended - ! atm_data_dir could contain JRA55 or JRA55do or not - ! actual files could have grid in name in two location or not at all - ! - ! The files will generally be of the format - ! $atm_data_type/[JRA55,JRA55do,'']/8XDAILY/[JRA55,JRA55do][_$grid,'']_03hr_forcing[_$grid,'']_$year.nc - ! The options defined by cnt try several versions of paths/filenames - ! As a user, - ! atm_data_type should be set to JRA55, JRA55do, JRA55_xxx, or JRA55do_xxx - ! where xxx can be any set of characters. The _xxx if included will be ignored. - ! Historically, these were set to JRA55_gx1 and so forth but the _gx1 is no longer needed - ! but this is still allowed for backwards compatibility. atm_data_type_prefix - ! is atm_data_type with _ and everything after _ removed. - ! atm_data_dir should be set to ${CICE_DATA_root}/forcing/$grid/[JRA55,JRA55do,''] - ! The [JRA55,JRA55do] at the end of the atm_data_dir is optional to provide backwards - ! compatibility and if not included, will be appended automaticaly using - ! the atm_data_type_prefix value. The grid is typically gx1, gx3, tx1, or similar. - ! In general, we recommend using the following format - ! atm_data_type = [JRA55,JRA55do] - ! atm_data_dir = ${CICE_DATA_root}/forcing/$grid - - integer (kind=int_kind), intent(in) :: & - yr ! current forcing year - - ! local variables - character(len=16) :: & - grd ! gx3, gx1, tx1 - - character(len=64) :: & - atm_data_type_prefix ! atm_data_type prefix - - integer (kind=int_kind) :: & - cnt , & ! search for files - strind ! string index - - logical :: & - exists ! file existance - - character(len=*), parameter :: subname = '(JRA55_files)' - - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + subroutine LY_files (yr) - ! this could be JRA55[do] or JRA55[do]_grid, drop the _grid if set - atm_data_type_prefix = trim(atm_data_type) - strind = index(trim(atm_data_type),'_') - if (strind > 0) then - atm_data_type_prefix = atm_data_type(1:strind-1) - endif +! Construct filenames based on the LANL naming conventions for CORE +! (Large and Yeager) data. +! Edit for other directory structures or filenames. +! Note: The year number in these filenames does not matter, because +! subroutine file_year will insert the correct year. - ! check for grid version using fortran INDEX intrinsic - if (index(trim(atm_data_dir),'gx1') > 0) then - grd = 'gx1' - else if (index(trim(atm_data_dir),'gx3') > 0) then - grd = 'gx3' - else if (index(trim(atm_data_dir),'tx1') > 0) then - grd = 'tx1' - else - call abort_ice(error_message=subname//' unknown grid type') - endif +! author: Elizabeth C. Hunke, LANL - ! cnt represents the possible file format options and steps thru them until one is found - exists = .false. - cnt = 1 - do while (.not.exists .and. cnt <= 6) + integer (kind=int_kind), intent(in) :: & + yr ! current forcing year - if (cnt == 1) uwind_file = trim(atm_data_dir)//'/'//trim(atm_data_type_prefix)// & - '/8XDAILY/'//trim(atm_data_type_prefix)//'_'//trim(grd)// & - '_03hr_forcing'//trim(atm_data_version)//'_2005.nc' + character(len=*), parameter :: subname = '(LY_files)' - if (cnt == 2) uwind_file = trim(atm_data_dir)//'/'//trim(atm_data_type_prefix)// & - '/8XDAILY/'//trim(atm_data_type_prefix)//'_03hr_forcing_'//trim(grd)// & - trim(atm_data_version)//'_2005.nc' + flw_file = & + trim(atm_data_dir)//'/MONTHLY/cldf.omip.dat' - if (cnt == 3) uwind_file = trim(atm_data_dir)//'/'//trim(atm_data_type_prefix)// & - '/8XDAILY/'//trim(atm_data_type_prefix)// & - '_03hr_forcing'//trim(atm_data_version)//'_2005.nc' + rain_file = & + trim(atm_data_dir)//'/MONTHLY/prec.nmyr.dat' - if (cnt == 4) uwind_file = trim(atm_data_dir)// & - '/8XDAILY/'//trim(atm_data_type_prefix)//'_'//trim(grd)// & - '_03hr_forcing'//trim(atm_data_version)//'_2005.nc' + uwind_file = & + trim(atm_data_dir)//'/4XDAILY/u_10.1996.dat' + call file_year(uwind_file,yr) - if (cnt == 5) uwind_file = trim(atm_data_dir)// & - '/8XDAILY/'//trim(atm_data_type_prefix)//'_03hr_forcing_'//trim(grd)// & - trim(atm_data_version)//'_2005.nc' + vwind_file = & + trim(atm_data_dir)//'/4XDAILY/v_10.1996.dat' + call file_year(vwind_file,yr) - if (cnt == 6) uwind_file = trim(atm_data_dir)// & - '/8XDAILY/'//trim(atm_data_type_prefix)// & - '_03hr_forcing'//trim(atm_data_version)//'_2005.nc' + tair_file = & + trim(atm_data_dir)//'/4XDAILY/t_10.1996.dat' + call file_year(tair_file,yr) + humid_file = & + trim(atm_data_dir)//'/4XDAILY/q_10.1996.dat' + call file_year(humid_file,yr) - call file_year(uwind_file,yr) - INQUIRE(FILE=uwind_file,EXIST=exists) + if (my_task == master_task) then + write (nu_diag,*) ' ' + write (nu_diag,*) 'Forcing data year = ', fyear + write (nu_diag,*) 'Atmospheric data files:' + write (nu_diag,*) trim(flw_file) + write (nu_diag,*) trim(rain_file) + write (nu_diag,*) trim(uwind_file) + write (nu_diag,*) trim(vwind_file) + write (nu_diag,*) trim(tair_file) + write (nu_diag,*) trim(humid_file) + endif ! master_task - if (debug_forcing .and. (my_task == master_task)) then - write(nu_diag,*) subname,cnt,exists,trim(uwind_file) - endif + end subroutine LY_files + subroutine JRA55_gx1_files(yr) +! + integer (kind=int_kind), intent(in) :: & + yr ! current forcing year - cnt = cnt + 1 - enddo + character(len=*), parameter :: subname = '(JRA55_gx1_files)' - if (.not.exists) then - call abort_ice(error_message=subname//' could not find forcing file') + uwind_file = & + trim(atm_data_dir)//'/8XDAILY/JRA55_03hr_forcing_2005.nc' + call file_year(uwind_file,yr) + if (my_task == master_task) then + write (nu_diag,*) ' ' + write (nu_diag,*) 'Atmospheric data files:' + write (nu_diag,*) trim(uwind_file) endif + end subroutine JRA55_gx1_files + subroutine JRA55_tx1_files(yr) +! + integer (kind=int_kind), intent(in) :: & + yr ! current forcing year + character(len=*), parameter :: subname = '(JRA55_tx1_files)' + + uwind_file = & + trim(atm_data_dir)//'/8XDAILY/JRA55_03hr_forcing_tx1_2005.nc' + call file_year(uwind_file,yr) if (my_task == master_task) then - write (nu_diag,'(2a)') ' ' - write (nu_diag,'(2a)') subname,'Atmospheric data files:' - write (nu_diag,'(2a)') subname,trim(uwind_file) + write (nu_diag,*) ' ' + write (nu_diag,*) 'Atmospheric data files:' + write (nu_diag,*) trim(uwind_file) endif + end subroutine JRA55_tx1_files + subroutine JRA55_gx3_files(yr) +! + integer (kind=int_kind), intent(in) :: & + yr ! current forcing year - end subroutine JRA55_files + character(len=*), parameter :: subname = '(JRA55_gx3_files)' + uwind_file = & + trim(atm_data_dir)//'/8XDAILY/JRA55_gx3_03hr_forcing_2005.nc' + call file_year(uwind_file,yr) + if (my_task == master_task) then + write (nu_diag,*) ' ' + write (nu_diag,*) 'Atmospheric data files:' + write (nu_diag,*) trim(uwind_file) + endif + end subroutine JRA55_gx3_files !======================================================================= +! +! read Large and Yeager atmospheric data +! note: also uses AOMIP protocol, in part - subroutine JRA55_data + subroutine LY_data use ice_blocks, only: block, get_block use ice_global_reductions, only: global_minval, global_maxval - use ice_domain, only: nblocks, distrb_info - use ice_flux, only: fsnow, Tair, uatm, vatm, Qa, fsw, flw - use ice_grid, only: hm, tmask, umask + use ice_domain, only: nblocks, distrb_info, blocks_ice + use ice_flux, only: fsnow, Tair, uatm, vatm, Qa, fsw + use ice_grid, only: hm, tlon, tlat, tmask, umask use ice_state, only: aice - use ice_calendar, only: days_per_year - integer (kind=int_kind) :: & - ncid , & ! netcdf file id - i, j, n1 , & - lfyear , & ! local year value + integer (kind=int_kind) :: & + i, j , & + ixm,ixx,ixp , & ! record numbers for neighboring months recnum , & ! record number maxrec , & ! maximum record number - iblk ! block index - - integer (kind=int_kind), save :: & - frec_info(2,2) = -99 ! remember prior values to reduce reading - ! first dim is yr, recnum - ! second dim is data1 data2 + recslot , & ! spline slot for current record + midmonth , & ! middle day of month + dataloc , & ! = 1 for data located in middle of time interval + ! = 2 for date located at end of time interval + iblk , & ! block index + ilo,ihi,jlo,jhi ! beginning and end of physical domain real (kind=dbl_kind) :: & - sec3hr , & ! number of seconds in 3 hours + sec6hr , & ! number of seconds in 6 hours secday , & ! number of seconds in day - eps, tt , & ! for interpolation coefficients Tffresh , & vmin, vmax - character(len=64) :: fieldname !netcdf field name - character (char_len_long) :: uwind_file_old - character(len=*), parameter :: subname = '(JRA55_data)' + logical (kind=log_kind) :: readm, read6 + + type (block) :: & + this_block ! block information for current block - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + character(len=*), parameter :: subname = '(LY_data)' call icepack_query_parameters(Tffresh_out=Tffresh) call icepack_query_parameters(secday_out=secday) @@ -2331,161 +2133,351 @@ subroutine JRA55_data if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - sec3hr = secday/c8 ! seconds in 3 hours - maxrec = days_per_year * 8 + !------------------------------------------------------------------- + ! monthly data + ! + ! Assume that monthly data values are located in the middle of the + ! month. + !------------------------------------------------------------------- - if (local_debug .and. my_task == master_task) then - write(nu_diag,*) subname,'fdbg dpy, maxrec = ',days_per_year,maxrec - endif + midmonth = 15 ! data is given on 15th of every month +! midmonth = fix(p5 * real(daymo(month))) ! exact middle - !------------------------------------------------------------------- - ! 3-hourly data - ! states are instantaneous, 1st record is 00z Jan 1 - ! fluxes are 3 hour averages, 1st record is 00z-03z Jan 1 - ! interpolate states, do not interpolate fluxes - !------------------------------------------------------------------- - ! File is NETCDF with winds in NORTH and EAST direction - ! file variable names are: - ! glbrad (shortwave W/m^2), 3 hr average - ! dlwsfc (longwave W/m^2), 3 hr average - ! wndewd (eastward wind m/s), instantaneous - ! wndnwd (northward wind m/s), instantaneous - ! airtmp (air temperature K), instantaneous - ! spchmd (specific humidity kg/kg), instantaneous - ! ttlpcp (precipitation kg/m s-1), 3 hr average - !------------------------------------------------------------------- + ! Compute record numbers for surrounding months + maxrec = 12 + ixm = mod(month+maxrec-2,maxrec) + 1 + ixp = mod(month, maxrec) + 1 + if (mday >= midmonth) ixm = -99 ! other two points will be used + if (mday < midmonth) ixp = -99 + + ! Determine whether interpolation will use values 1:2 or 2:3 + ! recslot = 2 means we use values 1:2, with the current value (2) + ! in the second slot + ! recslot = 1 means we use values 2:3, with the current value (2) + ! in the first slot + recslot = 1 ! latter half of month + if (mday < midmonth) recslot = 2 ! first half of month + + ! Find interpolation coefficients + call interp_coeff_monthly (recslot) + + ! Read 2 monthly values + readm = .false. + if (istep==1 .or. (mday==midmonth .and. sec==0)) readm = .true. + + call read_clim_data (readm, 0, ixm, month, ixp, & + flw_file, cldf_data, field_loc_center, field_type_scalar) + call read_clim_data (readm, 0, ixm, month, ixp, & + rain_file, fsnow_data, field_loc_center, field_type_scalar) + + call interpolate_data (cldf_data, cldf) + call interpolate_data (fsnow_data, fsnow) ! units mm/s = kg/m^2/s + + !------------------------------------------------------------------- + ! 6-hourly data + ! + ! Assume that the 6-hourly value is located at the end of the + ! 6-hour period. This is the convention for NCEP reanalysis data. + ! E.g. record 1 gives conditions at 6 am GMT on 1 January. + !------------------------------------------------------------------- + + dataloc = 2 ! data located at end of interval + sec6hr = secday/c4 ! seconds in 6 hours + maxrec = 1460 ! 365*4 + + ! current record number + recnum = 4*int(yday) - 3 + int(real(sec,kind=dbl_kind)/sec6hr) + + ! Compute record numbers for surrounding data (2 on each side) + + ixm = mod(recnum+maxrec-2,maxrec) + 1 + ixx = mod(recnum-1, maxrec) + 1 +! ixp = mod(recnum, maxrec) + 1 + + ! Compute interpolation coefficients + ! If data is located at the end of the time interval, then the + ! data value for the current record goes in slot 2 + + recslot = 2 + ixp = -99 + call interp_coeff (recnum, recslot, sec6hr, dataloc) + + ! Read + read6 = .false. + if (istep==1 .or. oldrecnum .ne. recnum) read6 = .true. - uwind_file_old = uwind_file - if (uwind_file /= uwind_file_old .and. my_task == master_task) then - write(nu_diag,'(2a)') subname,' reading forcing file = ',trim(uwind_file) + if (trim(atm_data_format) == 'bin') then + call read_data (read6, 0, fyear, ixm, ixx, ixp, maxrec, & + tair_file, Tair_data, & + field_loc_center, field_type_scalar) + call read_data (read6, 0, fyear, ixm, ixx, ixp, maxrec, & + uwind_file, uatm_data, & + field_loc_center, field_type_vector) + call read_data (read6, 0, fyear, ixm, ixx, ixp, maxrec, & + vwind_file, vatm_data, & + field_loc_center, field_type_vector) + call read_data (read6, 0, fyear, ixm, ixx, ixp, maxrec, & + humid_file, Qa_data, & + field_loc_center, field_type_scalar) + else + call abort_ice (error_message=subname//'nonbinary atm_data_format unavailable', & + file=__FILE__, line=__LINE__) endif - call ice_open_nc(uwind_file,ncid) + ! Interpolate + call interpolate_data (Tair_data, Tair) + call interpolate_data (uatm_data, uatm) + call interpolate_data (vatm_data, vatm) + call interpolate_data (Qa_data, Qa) - do n1 = 1, 2 + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) + do iblk = 1, nblocks + ! limit summer Tair values where ice is present + do j = 1, ny_block + do i = 1, nx_block + if (aice(i,j,iblk) > p1) Tair(i,j,iblk) = min(Tair(i,j,iblk), Tffresh+p1) + enddo + enddo - lfyear = fyear - call file_year(uwind_file,lfyear) - if (n1 == 1) then - recnum = 8*int(yday) - 7 + int(real(msec,kind=dbl_kind)/sec3hr) - if (my_task == master_task .and. (recnum <= 2 .or. recnum >= maxrec-1)) then - write(nu_diag,'(3a)') subname,' reading forcing file 1st ts = ',trim(uwind_file) - endif - elseif (n1 == 2) then - recnum = 8*int(yday) - 7 + int(real(msec,kind=dbl_kind)/sec3hr) + 1 - if (recnum > maxrec) then - lfyear = fyear + 1 ! next year - if (lfyear > fyear_final) lfyear = fyear_init - recnum = 1 - call file_year(uwind_file,lfyear) - if (my_task == master_task) then - write(nu_diag,'(3a)') subname,' reading forcing file 2nd ts = ',trim(uwind_file) - endif - call ice_close_nc(ncid) - call ice_open_nc(uwind_file,ncid) - endif - endif + call Qa_fixLY(nx_block, ny_block, & + Tair (:,:,iblk), & + Qa (:,:,iblk)) - if (local_debug .and. my_task == master_task) then - write(nu_diag,*) subname,'fdbg read recnum = ',recnum,n1 - endif + do j = 1, ny_block + do i = 1, nx_block + Qa (i,j,iblk) = Qa (i,j,iblk) * hm(i,j,iblk) + Tair(i,j,iblk) = Tair(i,j,iblk) * hm(i,j,iblk) + uatm(i,j,iblk) = uatm(i,j,iblk) * hm(i,j,iblk) + vatm(i,j,iblk) = vatm(i,j,iblk) * hm(i,j,iblk) + enddo + enddo - ! to reduce reading, check whether it's the same data as last read + ! AOMIP + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi - if (lfyear /= frec_info(1,n1) .or. recnum /= frec_info(2,n1)) then + call compute_shortwave(nx_block, ny_block, & + ilo, ihi, jlo, jhi, & + TLON (:,:,iblk), & + TLAT (:,:,iblk), & + hm (:,:,iblk), & + Qa (:,:,iblk), & + cldf (:,:,iblk), & + fsw (:,:,iblk)) - ! check whether we can copy values from 2 to 1, should be faster than reading - ! can only do this from 2 to 1 or 1 to 2 without setting up a temporary - ! it's more likely that the values from data2 when time advances are needed in data1 - ! compare n1=1 year/record with data from last timestep at n1=2 + enddo ! iblk + !$OMP END PARALLEL DO - if (n1 == 1 .and. lfyear == frec_info(1,2) .and. recnum == frec_info(2,2)) then - Tair_data(:,:,1,:) = Tair_data(:,:,2,:) - uatm_data(:,:,1,:) = uatm_data(:,:,2,:) - vatm_data(:,:,1,:) = vatm_data(:,:,2,:) - Qa_data(:,:,1,:) = Qa_data(:,:,2,:) - fsw_data(:,:,1,:) = fsw_data(:,:,2,:) - flw_data(:,:,1,:) = flw_data(:,:,2,:) - fsnow_data(:,:,1,:) = fsnow_data(:,:,2,:) - else + ! Save record number + oldrecnum = recnum + + if (dbug) then + if (my_task == master_task) write (nu_diag,*) 'LY_bulk_data' + vmin = global_minval(fsw,distrb_info,tmask) + + vmax = global_maxval(fsw,distrb_info,tmask) + if (my_task.eq.master_task) & + write (nu_diag,*) 'fsw',vmin,vmax + vmin = global_minval(cldf,distrb_info,tmask) + vmax = global_maxval(cldf,distrb_info,tmask) + if (my_task.eq.master_task) & + write (nu_diag,*) 'cldf',vmin,vmax + vmin =global_minval(fsnow,distrb_info,tmask) + vmax =global_maxval(fsnow,distrb_info,tmask) + if (my_task.eq.master_task) & + write (nu_diag,*) 'fsnow',vmin,vmax + vmin = global_minval(Tair,distrb_info,tmask) + vmax = global_maxval(Tair,distrb_info,tmask) + if (my_task.eq.master_task) & + write (nu_diag,*) 'Tair',vmin,vmax + vmin = global_minval(uatm,distrb_info,umask) + vmax = global_maxval(uatm,distrb_info,umask) + if (my_task.eq.master_task) & + write (nu_diag,*) 'uatm',vmin,vmax + vmin = global_minval(vatm,distrb_info,umask) + vmax = global_maxval(vatm,distrb_info,umask) + if (my_task.eq.master_task) & + write (nu_diag,*) 'vatm',vmin,vmax + vmin = global_minval(Qa,distrb_info,tmask) + vmax = global_maxval(Qa,distrb_info,tmask) + if (my_task.eq.master_task) & + write (nu_diag,*) 'Qa',vmin,vmax + + endif ! dbug + + end subroutine LY_data + +!======================================================================= + + subroutine JRA55_data (yr) - fieldname = 'airtmp' - call ice_read_nc(ncid,recnum,fieldname,Tair_data(:,:,n1,:),local_debug, & - field_loc=field_loc_center, & - field_type=field_type_scalar) + use ice_blocks, only: block, get_block + use ice_global_reductions, only: global_minval, global_maxval + use ice_domain, only: nblocks, distrb_info, blocks_ice + use ice_flux, only: fsnow, Tair, uatm, vatm, Qa, fsw, flw + use ice_grid, only: hm, tlon, tlat, tmask, umask + use ice_state, only: aice + use ice_calendar, only: days_per_year, use_leap_years + + integer (kind=int_kind) :: & + ncid , & ! netcdf file id + i, j , & + ixm,ixx,ixp , & ! record numbers for neighboring months + recnum , & ! record number + maxrec , & ! maximum record number + recslot , & ! spline slot for current record + midmonth , & ! middle day of month + dataloc , & ! = 1 for data located in middle of time interval + ! = 2 for date located at end of time interval + iblk , & ! block index + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain + yr ! current forcing year + + real (kind=dbl_kind) :: & + sec3hr , & ! number of seconds in 3 hours + secday , & ! number of seconds in day + Tffresh , & + vmin, vmax + + logical (kind=log_kind) :: readm, read6,debug_n_d + + type (block) :: & + this_block ! block information for current block - fieldname = 'wndewd' - call ice_read_nc(ncid,recnum,fieldname,uatm_data(:,:,n1,:),local_debug, & - field_loc=field_loc_center, & - field_type=field_type_scalar) + character(len=64) :: fieldname !netcdf field name + character(len=*), parameter :: subname = '(JRA55_data)' - fieldname = 'wndnwd' - call ice_read_nc(ncid,recnum,fieldname,vatm_data(:,:,n1,:),local_debug, & - field_loc=field_loc_center, & - field_type=field_type_scalar) + debug_n_d = .false. !usually false - fieldname = 'spchmd' - call ice_read_nc(ncid,recnum,fieldname,Qa_data(:,:,n1,:),local_debug, & - field_loc=field_loc_center, & - field_type=field_type_scalar) + call icepack_query_parameters(Tffresh_out=Tffresh) + call icepack_query_parameters(secday_out=secday) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) - fieldname = 'glbrad' - call ice_read_nc(ncid,recnum,fieldname,fsw_data(:,:,n1,:),local_debug, & - field_loc=field_loc_center, & - field_type=field_type_scalar) + !------------------------------------------------------------------- + ! 3-hourly data + ! + ! Assume that the 3-hourly value is located at the end of the + ! 3-hour period. This is the convention for NCEP reanalysis data. + ! E.g. record 1 gives conditions at 3 am GMT on 1 January. + !------------------------------------------------------------------- - fieldname = 'dlwsfc' - call ice_read_nc(ncid,recnum,fieldname,flw_data(:,:,n1,:),local_debug, & - field_loc=field_loc_center, & - field_type=field_type_scalar) + dataloc = 2 ! data located at end of interval + sec3hr = secday/c8 ! seconds in 3 hours + !maxrec = 2920 ! 365*8; for leap years = 366*8 - fieldname = 'ttlpcp' - call ice_read_nc(ncid,recnum,fieldname,fsnow_data(:,:,n1,:),local_debug, & - field_loc=field_loc_center, & - field_type=field_type_scalar) - endif ! copy data from n1=2 from last timestep to n1=1 - endif ! input data is same as last timestep + if (use_leap_years) days_per_year = 366 !overrides setting of 365 in ice_calendar + maxrec = days_per_year*8 - frec_info(1,n1) = lfyear - frec_info(2,n1) = recnum + if(days_per_year == 365 .and. (mod(yr, 4) == 0)) then + call abort_ice('days_per_year should be set to 366 for leap years') + end if - enddo ! n1 + ! current record number + recnum = 8*int(yday) - 7 + int(real(sec,kind=dbl_kind)/sec3hr) - call ice_close_nc(ncid) + ! Compute record numbers for surrounding data (2 on each side) - ! reset uwind_file to original year - call file_year(uwind_file,fyear) + ixm = mod(recnum+maxrec-2,maxrec) + 1 + ixx = mod(recnum-1, maxrec) + 1 ! Compute interpolation coefficients - eps = 1.0e-6 - tt = real(mod(msec,nint(sec3hr)),kind=dbl_kind) - c2intp = tt / sec3hr - if (c2intp < c0 .and. c2intp > c0-eps) c2intp = c0 - if (c2intp > c1 .and. c2intp < c1+eps) c2intp = c1 - c1intp = 1.0_dbl_kind - c2intp - if (c2intp < c0 .or. c2intp > c1) then - write(nu_diag,*) subname,' ERROR: c2intp = ',c2intp - call abort_ice (error_message=subname//' ERROR: c2intp out of range', & - file=__FILE__, line=__LINE__) - endif - if (local_debug .and. my_task == master_task) then - write(nu_diag,*) subname,'fdbg c12intp = ',c1intp,c2intp - endif + ! If data is located at the end of the time interval, then the + ! data value for the current record goes in slot 2 + + recslot = 2 + ixp = -99 + call interp_coeff (recnum, recslot, sec3hr, dataloc) + + ! Read + read6 = .false. + if (istep==1 .or. oldrecnum .ne. recnum) read6 = .true. + !------------------------------------------------------------------- + ! File is NETCDF with winds in NORTH and EAST direction + ! file variable names are: + ! glbrad (shortwave W/m^2) + ! dlwsfc (longwave W/m^2) + ! wndewd (eastward wind m/s) + ! wndnwd (northward wind m/s) + ! airtmp (air temperature K) + ! spchmd (specific humidity kg/kg) + ! ttlpcp (precipitation kg/m s-1) + !------------------------------------------------------------------- + call ice_open_nc(uwind_file,ncid) + + fieldname = 'airtmp' + call ice_read_nc(ncid,recnum,fieldname,Tair_data(:,:,1,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + call ice_read_nc(ncid,recnum,fieldname,Tair_data(:,:,2,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + fieldname = 'wndewd' + call ice_read_nc(ncid,recnum,fieldname,uatm_data(:,:,1,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + call ice_read_nc(ncid,recnum,fieldname,uatm_data(:,:,2,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + fieldname = 'wndnwd' + call ice_read_nc(ncid,recnum,fieldname,vatm_data(:,:,1,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + call ice_read_nc(ncid,recnum,fieldname,vatm_data(:,:,2,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + fieldname = 'spchmd' + call ice_read_nc(ncid,recnum,fieldname,Qa_data(:,:,1,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + call ice_read_nc(ncid,recnum,fieldname,Qa_data(:,:,2,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + fieldname = 'glbrad' + call ice_read_nc(ncid,recnum,fieldname,fsw_data(:,:,1,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + call ice_read_nc(ncid,recnum,fieldname,fsw_data(:,:,2,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + fieldname = 'dlwsfc' + call ice_read_nc(ncid,recnum,fieldname,flw_data(:,:,1,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + call ice_read_nc(ncid,recnum,fieldname,flw_data(:,:,2,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + fieldname = 'ttlpcp' + call ice_read_nc(ncid,recnum,fieldname,fsnow_data(:,:,1,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + call ice_read_nc(ncid,recnum,fieldname,fsnow_data(:,:,2,:),debug_n_d, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + call ice_close_nc(ncid) + ! Interpolate call interpolate_data (Tair_data, Tair) call interpolate_data (uatm_data, uatm) call interpolate_data (vatm_data, vatm) call interpolate_data (Qa_data, Qa) - ! use 3 hr average for heat flux and precip fields, no interpolation -! call interpolate_data (fsw_data, fsw) -! call interpolate_data (flw_data, flw) -! call interpolate_data (fsnow_data, fsnow) - fsw(:,:,:) = fsw_data(:,:,1,:) - flw(:,:,:) = flw_data(:,:,1,:) - fsnow(:,:,:) = fsnow_data(:,:,1,:) + call interpolate_data (fsw_data, fsw) + call interpolate_data (flw_data, flw) + call interpolate_data (fsnow_data, fsnow) - !$OMP PARALLEL DO PRIVATE(iblk,i,j) + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks ! limit summer Tair values where ice is present do j = 1, ny_block @@ -2509,30 +2501,45 @@ subroutine JRA55_data enddo ! iblk !$OMP END PARALLEL DO - if (debug_forcing .or. local_debug) then - if (my_task.eq.master_task) write (nu_diag,*) subname,'fdbg JRA55_bulk_data' - vmin = global_minval(fsw,distrb_info,tmask) - vmax = global_maxval(fsw,distrb_info,tmask) - if (my_task.eq.master_task) write (nu_diag,*) subname,'fdbg fsw',vmin,vmax - vmin = global_minval(flw,distrb_info,tmask) - vmax = global_maxval(flw,distrb_info,tmask) - if (my_task.eq.master_task) write (nu_diag,*) subname,'fdbg flw',vmin,vmax - vmin =global_minval(fsnow,distrb_info,tmask) - vmax =global_maxval(fsnow,distrb_info,tmask) - if (my_task.eq.master_task) write (nu_diag,*) subname,'fdbg fsnow',vmin,vmax - vmin = global_minval(Tair,distrb_info,tmask) - vmax = global_maxval(Tair,distrb_info,tmask) - if (my_task.eq.master_task) write (nu_diag,*) subname,'fdbg Tair',vmin,vmax - vmin = global_minval(uatm,distrb_info,umask) - vmax = global_maxval(uatm,distrb_info,umask) - if (my_task.eq.master_task) write (nu_diag,*) subname,'fdbg uatm',vmin,vmax - vmin = global_minval(vatm,distrb_info,umask) - vmax = global_maxval(vatm,distrb_info,umask) - if (my_task.eq.master_task) write (nu_diag,*) subname,'fdbg vatm',vmin,vmax - vmin = global_minval(Qa,distrb_info,tmask) - vmax = global_maxval(Qa,distrb_info,tmask) - if (my_task.eq.master_task) write (nu_diag,*) subname,'fdbg Qa',vmin,vmax - endif ! debug_forcing + ! Save record number + oldrecnum = recnum + + if (dbug) then + if (my_task == master_task) write (nu_diag,*) 'JRA55_bulk_data' + vmin = global_minval(fsw,distrb_info,tmask) + + vmax = global_maxval(fsw,distrb_info,tmask) + if (my_task.eq.master_task) & + write (nu_diag,*) 'fsw',vmin,vmax + vmin = global_minval(flw,distrb_info,tmask) + vmax = global_maxval(flw,distrb_info,tmask) + if (my_task.eq.master_task) & + write (nu_diag,*) 'flw',vmin,vmax + vmin =global_minval(fsnow,distrb_info,tmask) + vmax =global_maxval(fsnow,distrb_info,tmask) + if (my_task.eq.master_task) & + write (nu_diag,*) 'fsnow',vmin,vmax + vmin = global_minval(Tair,distrb_info,tmask) + vmax = global_maxval(Tair,distrb_info,tmask) + if (my_task.eq.master_task) & + write (nu_diag,*) 'Tair',vmin,vmax + vmin = global_minval(uatm,distrb_info,umask) + vmax = global_maxval(uatm,distrb_info,umask) + if (my_task.eq.master_task) & + write (nu_diag,*) 'uatm',vmin,vmax + vmin = global_minval(vatm,distrb_info,umask) + vmax = global_maxval(vatm,distrb_info,umask) + if (my_task.eq.master_task) & + write (nu_diag,*) 'vatm',vmin,vmax + vmin = global_minval(Qa,distrb_info,tmask) + vmax = global_maxval(Qa,distrb_info,tmask) + if (my_task.eq.master_task) & + write (nu_diag,*) 'Qa',vmin,vmax + if (my_task.eq.master_task) & + write (nu_diag,*) 'maxrec',maxrec + write (nu_diag,*) 'days_per_year', days_per_year + + endif ! dbug end subroutine JRA55_data @@ -2572,15 +2579,13 @@ subroutine compute_shortwave(nx_block, ny_block, & secday , & pi , & lontmp , & - deg2rad + deg2rad integer (kind=int_kind) :: & i, j character(len=*), parameter :: subname = '(compute_shortwave)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - call icepack_query_parameters(secday_out=secday, pi_out=pi) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -2589,7 +2594,7 @@ subroutine compute_shortwave(nx_block, ny_block, & do j=jlo,jhi do i=ilo,ihi deg2rad = pi/c180 -! solar_time = mod(real(msec,kind=dbl_kind),secday)/c3600 & +! solar_time = mod(real(sec,kind=dbl_kind),secday)/c3600 & ! + c12*sin(p5*TLON(i,j)) ! Convert longitude to range of -180 to 180 for LST calculation @@ -2598,7 +2603,7 @@ subroutine compute_shortwave(nx_block, ny_block, & if (lontmp .gt. c180) lontmp = lontmp - c360 if (lontmp .lt. -c180) lontmp = lontmp + c360 - solar_time = mod(real(msec,kind=dbl_kind),secday)/c3600 & + solar_time = mod(real(sec,kind=dbl_kind),secday)/c3600 & + lontmp/c15 if (solar_time .ge. 24._dbl_kind) solar_time = solar_time - 24._dbl_kind hour_angle = (c12 - solar_time)*pi/c12 @@ -2613,7 +2618,7 @@ subroutine compute_shortwave(nx_block, ny_block, & sw0 = max(sw0,c0) ! total downward shortwave for cice - Fsw(i,j) = sw0*(c1-p6*cldf(i,j)**3) + Fsw(i,j) = sw0*(c1-p6*cldf(i,j)**3) Fsw(i,j) = Fsw(i,j)*hm(i,j) enddo enddo @@ -2643,8 +2648,6 @@ subroutine Qa_fixLY(nx_block, ny_block, Tair, Qa) character(len=*), parameter :: subname = '(Qa_fixLY)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - call icepack_query_parameters(Tffresh_out=Tffresh, puny_out=puny) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -2655,7 +2658,7 @@ subroutine Qa_fixLY(nx_block, ny_block, Tair, Qa) /(c1 + 0.00412_dbl_kind*worka) & ! 2+ converts ea mb -> Pa + 0.00422_dbl_kind*worka ! for ice ! vapor pressure - worka = (c10**worka) ! saturated + worka = (c10**worka) ! saturated worka = max(worka,puny) ! puny over land to prevent division by zero ! specific humidity worka = 0.622_dbl_kind*worka/(1.e5_dbl_kind-0.378_dbl_kind*worka) @@ -2687,8 +2690,6 @@ subroutine hadgem_files (yr) character(len=*), parameter :: subname = '(hadgem_files)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - call icepack_query_parameters(calc_strair_out=calc_strair, & calc_Tsfc_out=calc_Tsfc) call icepack_warnings_flush(nu_diag) @@ -2771,13 +2772,13 @@ subroutine hadgem_files (yr) endif ! calc_strair ! -------------------------------------------------------------- - ! Atmosphere properties. Even if these fields are not + ! Atmosphere properties. Even if these fields are not ! being used to force the ice (i.e. calc_Tsfc=.false.), they ! are still needed to generate forcing for mixed layer model or ! to calculate wind stress ! -------------------------------------------------------------- - if (calc_Tsfc .or. oceanmixed_ice .or. calc_strair) then + if (calc_Tsfc .or. oceanmixed_ice .or. calc_strair) then fsw_file = & trim(atm_data_dir)//'/MONTHLY/SW_incoming.1996.nc' @@ -2822,14 +2823,14 @@ subroutine hadgem_files (yr) trim(atm_data_dir)//'/MONTHLY/topmeltn',n,'.1996.nc' call file_year(topmelt_file(n),yr) - ! 'botmelt' = fcondtop. + ! 'botmelt' = fcondtop. write(botmelt_file(n), '(a,i1,a)') & trim(atm_data_dir)//'/MONTHLY/botmeltn',n,'.1996.nc' call file_year(botmelt_file(n),yr) enddo - ! 'sublim' = - flat / Lsub. + ! 'sublim' = - flat / Lsub. sublim_file = & trim(atm_data_dir)//'/MONTHLY/sublim.1996.nc' call file_year(sublim_file,yr) @@ -2875,7 +2876,7 @@ subroutine hadgem_data botmelt, & sublim - character (char_len) :: & + character (char_len) :: & fieldname ! field name in netcdf file real (kind=dbl_kind) :: & @@ -2887,8 +2888,6 @@ subroutine hadgem_data character(len=*), parameter :: subname = '(hadgem_data)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - call icepack_query_parameters(Lsub_out=Lsub) call icepack_query_parameters(calc_strair_out=calc_strair, & calc_Tsfc_out=calc_Tsfc) @@ -2904,12 +2903,12 @@ subroutine hadgem_data !------------------------------------------------------------------- midmonth = 15 ! data is given on 15th of every month -! midmonth = fix(p5 * real(daymo(mmonth))) ! exact middle +! midmonth = fix(p5 * real(daymo(month))) ! exact middle ! Compute record numbers for surrounding months maxrec = 12 - ixm = mod(mmonth+maxrec-2,maxrec) + 1 - ixp = mod(mmonth, maxrec) + 1 + ixm = mod(month+maxrec-2,maxrec) + 1 + ixp = mod(month, maxrec) + 1 if (mday >= midmonth) ixm = -99 ! other two points will be used if (mday < midmonth) ixp = -99 @@ -2926,18 +2925,18 @@ subroutine hadgem_data ! Read 2 monthly values readm = .false. - if (istep==1 .or. (mday==midmonth .and. msec==0)) readm = .true. + if (istep==1 .or. (mday==midmonth .and. sec==0)) readm = .true. ! ----------------------------------------------------------- ! Rainfall and snowfall ! ----------------------------------------------------------- fieldname='rainfall' - call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & + call read_data_nc (readm, 0, fyear, ixm, month, ixp, & maxrec, rain_file, fieldname, frain_data, & field_loc_center, field_type_scalar) fieldname='snowfall' - call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & + call read_data_nc (readm, 0, fyear, ixm, month, ixp, & maxrec, snow_file, fieldname, fsnow_data, & field_loc_center, field_type_scalar) @@ -2952,11 +2951,11 @@ subroutine hadgem_data ! -------------------------------------------------------- fieldname='u_10' - call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & + call read_data_nc (readm, 0, fyear, ixm, month, ixp, & maxrec, uwind_file, fieldname, uatm_data, & field_loc_center, field_type_vector) fieldname='v_10' - call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & + call read_data_nc (readm, 0, fyear, ixm, month, ixp, & maxrec, vwind_file, fieldname, vatm_data, & field_loc_center, field_type_vector) @@ -2971,11 +2970,11 @@ subroutine hadgem_data ! -------------------------------------------------------- fieldname='taux' - call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & + call read_data_nc (readm, 0, fyear, ixm, month, ixp, & maxrec, strax_file, fieldname, strax_data, & field_loc_center, field_type_vector) fieldname='tauy' - call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & + call read_data_nc (readm, 0, fyear, ixm, month, ixp, & maxrec, stray_file, fieldname, stray_data, & field_loc_center, field_type_vector) @@ -2990,7 +2989,7 @@ subroutine hadgem_data ! -------------------------------------------------- fieldname='wind_10' - call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & + call read_data_nc (readm, 0, fyear, ixm, month, ixp, & maxrec, wind_file, fieldname, wind_data, & field_loc_center, field_type_scalar) @@ -3002,34 +3001,34 @@ subroutine hadgem_data endif ! calc_strair ! ----------------------------------------------------------- - ! SW incoming, LW incoming, air temperature, density and - ! humidity at 10m. + ! SW incoming, LW incoming, air temperature, density and + ! humidity at 10m. ! - ! Even if these fields are not being used to force the ice - ! (i.e. calc_Tsfc=.false.), they are still needed to generate + ! Even if these fields are not being used to force the ice + ! (i.e. calc_Tsfc=.false.), they are still needed to generate ! forcing for mixed layer model or to calculate wind stress ! ----------------------------------------------------------- - if (calc_Tsfc .or. oceanmixed_ice .or. calc_strair) then + if (calc_Tsfc .or. oceanmixed_ice .or. calc_strair) then fieldname='SW_incoming' - call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & + call read_data_nc (readm, 0, fyear, ixm, month, ixp, & maxrec, fsw_file, fieldname, fsw_data, & field_loc_center, field_type_scalar) fieldname='LW_incoming' - call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & + call read_data_nc (readm, 0, fyear, ixm, month, ixp, & maxrec, flw_file, fieldname, flw_data, & field_loc_center, field_type_scalar) fieldname='t_10' - call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & + call read_data_nc (readm, 0, fyear, ixm, month, ixp, & maxrec, tair_file, fieldname, Tair_data, & field_loc_center, field_type_scalar) fieldname='rho_10' - call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & + call read_data_nc (readm, 0, fyear, ixm, month, ixp, & maxrec, rhoa_file, fieldname, rhoa_data, & field_loc_center, field_type_scalar) fieldname='q_10' - call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & + call read_data_nc (readm, 0, fyear, ixm, month, ixp, & maxrec, humid_file, fieldname, Qa_data, & field_loc_center, field_type_scalar) @@ -3050,7 +3049,7 @@ subroutine hadgem_data ! ------------------------------------------------------ fieldname='sublim' - call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & + call read_data_nc (readm, 0, fyear, ixm, month, ixp, & maxrec, sublim_file, fieldname, sublim_data, & field_loc_center, field_type_scalar) @@ -3059,12 +3058,12 @@ subroutine hadgem_data do n = 1, ncat write(fieldname, '(a,i1)') 'topmeltn',n - call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & + call read_data_nc (readm, 0, fyear, ixm, month, ixp, & maxrec, topmelt_file(n), fieldname, topmelt_data(:,:,:,:,n), & field_loc_center, field_type_scalar) write(fieldname, '(a,i1)') 'botmeltn',n - call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & + call read_data_nc (readm, 0, fyear, ixm, month, ixp, & maxrec, botmelt_file(n), fieldname, botmelt_data(:,:,:,:,n), & field_loc_center, field_type_scalar) @@ -3077,7 +3076,7 @@ subroutine hadgem_data ! botmelt = fcondtop (as zero layer) ! ! Convert UM sublimation data into CICE LH flux - ! (sublim = - flatn / Lsub) and have same value for all + ! (sublim = - flatn / Lsub) and have same value for all ! categories !-------------------------------------------------------- @@ -3086,7 +3085,7 @@ subroutine hadgem_data do j = 1, ny_block do i = 1, nx_block fcondtopn_f(i,j,n,iblk) = botmelt(i,j,iblk) - fsurfn_f(i,j,n,iblk) = topmelt(i,j,iblk) & + fsurfn_f(i,j,n,iblk) = topmelt(i,j,iblk) & + botmelt(i,j,iblk) flatn_f(i,j,n,iblk) = - sublim(i,j,iblk)*Lsub enddo @@ -3096,12 +3095,12 @@ subroutine hadgem_data enddo ! ncat - endif ! .not. calc_Tsfc + endif ! .not. calc_Tsfc end subroutine hadgem_data !======================================================================= -! monthly forcing +! monthly forcing !======================================================================= subroutine monthly_files (yr) @@ -3118,8 +3117,6 @@ subroutine monthly_files (yr) character(len=*), parameter :: subname = '(monthly_files)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - flw_file = & trim(atm_data_dir)//'/MONTHLY/cldf.omip.dat' @@ -3149,7 +3146,7 @@ subroutine monthly_files (yr) if (my_task == master_task) then write (nu_diag,*) ' ' - write (nu_diag,*) 'Forcing data year = ', fyear + write (nu_diag,*) 'Forcing data year = ', fyear write (nu_diag,*) 'Atmospheric data files:' write (nu_diag,*) trim(flw_file) write (nu_diag,*) trim(rain_file) @@ -3172,7 +3169,7 @@ subroutine monthly_data use ice_flux, only: fsnow, Tair, Qa, wind, strax, stray, fsw use ice_grid, only: hm, tlon, tlat, tmask, umask - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & i, j , & ixm,ixp , & ! record numbers for neighboring months maxrec , & ! maximum record number @@ -3188,25 +3185,23 @@ subroutine monthly_data type (block) :: & this_block ! block information for current block - + character(len=*), parameter :: subname = '(monthly_data)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - !------------------------------------------------------------------- - ! monthly data + ! monthly data ! - ! Assume that monthly data values are located in the middle of the + ! Assume that monthly data values are located in the middle of the ! month. !------------------------------------------------------------------- midmonth = 15 ! data is given on 15th of every month -! midmonth = fix(p5 * real(daymo(mmonth))) ! exact middle +! midmonth = fix(p5 * real(daymo(month))) ! exact middle ! Compute record numbers for surrounding months maxrec = 12 - ixm = mod(mmonth+maxrec-2,maxrec) + 1 - ixp = mod(mmonth, maxrec) + 1 + ixm = mod(month+maxrec-2,maxrec) + 1 + ixp = mod(month, maxrec) + 1 if (mday >= midmonth) ixm = -99 ! other two points will be used if (mday < midmonth) ixp = -99 @@ -3221,29 +3216,29 @@ subroutine monthly_data ! Find interpolation coefficients call interp_coeff_monthly (recslot) - ! Read 2 monthly values + ! Read 2 monthly values readm = .false. - if (istep==1 .or. (mday==midmonth .and. msec==0)) readm = .true. + if (istep==1 .or. (mday==midmonth .and. sec==0)) readm = .true. - call read_clim_data (readm, 0, ixm, mmonth, ixp, & + call read_clim_data (readm, 0, ixm, month, ixp, & flw_file, cldf_data, & field_loc_center, field_type_scalar) - call read_clim_data (readm, 0, ixm, mmonth, ixp, & + call read_clim_data (readm, 0, ixm, month, ixp, & rain_file, fsnow_data, & field_loc_center, field_type_scalar) - call read_clim_data (readm, 0, ixm, mmonth, ixp, & + call read_clim_data (readm, 0, ixm, month, ixp, & tair_file, Tair_data, & field_loc_center, field_type_scalar) - call read_clim_data (readm, 0, ixm, mmonth, ixp, & + call read_clim_data (readm, 0, ixm, month, ixp, & humid_file, Qa_data, & field_loc_center, field_type_scalar) - call read_clim_data (readm, 0, ixm, mmonth, ixp, & + call read_clim_data (readm, 0, ixm, month, ixp, & wind_file, wind_data, & field_loc_center, field_type_scalar) - call read_clim_data (readm, 0, ixm, mmonth, ixp, & + call read_clim_data (readm, 0, ixm, month, ixp, & strax_file, strax_data, & field_loc_center, field_type_vector) - call read_clim_data (readm, 0, ixm, mmonth, ixp, & + call read_clim_data (readm, 0, ixm, month, ixp, & stray_file, stray_data, & field_loc_center, field_type_vector) @@ -3272,7 +3267,7 @@ subroutine monthly_data enddo ! AOMIP - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -3290,42 +3285,42 @@ subroutine monthly_data enddo ! iblk !$OMP END PARALLEL DO - if (debug_forcing) then + if (dbug) then if (my_task == master_task) write (nu_diag,*) 'LY_bulk_data' vmin = global_minval(fsw,distrb_info,tmask) vmax = global_maxval(fsw,distrb_info,tmask) if (my_task.eq.master_task) & - write (nu_diag,*) 'fsw',vmin,vmax + write (nu_diag,*) 'fsw',vmin,vmax vmin = global_minval(cldf,distrb_info,tmask) vmax = global_maxval(cldf,distrb_info,tmask) - if (my_task.eq.master_task) & + if (my_task.eq.master_task) & write (nu_diag,*) 'cldf',vmin,vmax vmin =global_minval(fsnow,distrb_info,tmask) vmax =global_maxval(fsnow,distrb_info,tmask) - if (my_task.eq.master_task) & + if (my_task.eq.master_task) & write (nu_diag,*) 'fsnow',vmin,vmax vmin = global_minval(Tair,distrb_info,tmask) vmax = global_maxval(Tair,distrb_info,tmask) - if (my_task.eq.master_task) & + if (my_task.eq.master_task) & write (nu_diag,*) 'Tair',vmin,vmax vmin = global_minval(wind,distrb_info,umask) vmax = global_maxval(wind,distrb_info,umask) - if (my_task.eq.master_task) & + if (my_task.eq.master_task) & write (nu_diag,*) 'wind',vmin,vmax vmin = global_minval(strax,distrb_info,umask) vmax = global_maxval(strax,distrb_info,umask) - if (my_task.eq.master_task) & + if (my_task.eq.master_task) & write (nu_diag,*) 'strax',vmin,vmax vmin = global_minval(stray,distrb_info,umask) vmax = global_maxval(stray,distrb_info,umask) - if (my_task.eq.master_task) & + if (my_task.eq.master_task) & write (nu_diag,*) 'stray',vmin,vmax vmin = global_minval(Qa,distrb_info,tmask) vmax = global_maxval(Qa,distrb_info,tmask) if (my_task.eq.master_task) & write (nu_diag,*) 'Qa',vmin,vmax - endif ! debug_forcing + endif ! dbug end subroutine monthly_data @@ -3339,7 +3334,7 @@ subroutine oned_data ! local parameters - character (char_len_long) :: & + character (char_len_long) :: & met_file, & ! netcdf filename fieldname ! field name in netcdf file @@ -3360,79 +3355,77 @@ subroutine oned_data Psat , & ! saturation vapour pressure (hPa) ws ! saturation mixing ratio - real (kind=dbl_kind), parameter :: & ! coefficients for Hyland-Wexler Qa - ps1 = 0.58002206e4_dbl_kind, & ! (K) + real (kind=dbl_kind), parameter :: & ! coefficients for Hyland-Wexler Qa + ps1 = 0.58002206e4_dbl_kind, & ! (K) ps2 = 1.3914993_dbl_kind, & ! - ps3 = 0.48640239e-1_dbl_kind, & ! (K^-1) + ps3 = 0.48640239e-1_dbl_kind, & ! (K^-1) ps4 = 0.41764768e-4_dbl_kind, & ! (K^-2) ps5 = 0.14452093e-7_dbl_kind, & ! (K^-3) ps6 = 6.5459673_dbl_kind, & ! - ws1 = 621.97_dbl_kind, & ! for saturation mixing ratio - Pair = 1020._dbl_kind ! Sea level pressure (hPa) - + ws1 = 621.97_dbl_kind, & ! for saturation mixing ratio + Pair = 1020._dbl_kind ! Sea level pressure (hPa) + character(len=*), parameter :: subname = '(oned_data)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - - diag = .false. ! write diagnostic information - + diag = .false. ! write diagnostic information + if (trim(atm_data_format) == 'nc') then ! read nc file - ! hourly data beginning Jan 1, 1989, 01:00 + ! hourly data beginning Jan 1, 1989, 01:00 ! HARDWIRED for dt = 1 hour! met_file = uwind_file call ice_open_nc(met_file,fid) - fieldname='Uatm' - call ice_read_nc(fid,istep1,fieldname,work,diag) + fieldname='Uatm' + call ice_read_nc(fid,istep1,fieldname,work,diag) uatm(:,:,:) = work - fieldname='Vatm' - call ice_read_nc(fid,istep1,fieldname,work,diag) + fieldname='Vatm' + call ice_read_nc(fid,istep1,fieldname,work,diag) vatm(:,:,:) = work - fieldname='Tair' - call ice_read_nc(fid,istep1,fieldname,work,diag) + fieldname='Tair' + call ice_read_nc(fid,istep1,fieldname,work,diag) Temp = work - Tair(:,:,:) = Temp + Tair(:,:,:) = Temp call ice_close_nc(fid) - ! hourly solar data beginning Jan 1, 1989, 01:00 + ! hourly solar data beginning Jan 1, 1989, 01:00 met_file = fsw_file call ice_open_nc(met_file,fid) - fieldname='fsw' - call ice_read_nc(fid,istep1,fieldname,work,diag) + fieldname='fsw' + call ice_read_nc(fid,istep1,fieldname,work,diag) fsw(:,:,:) = work call ice_close_nc(fid) - ! hourly interpolated monthly data beginning Jan 1, 1989, 01:00 + ! hourly interpolated monthly data beginning Jan 1, 1989, 01:00 met_file = humid_file call ice_open_nc(met_file,fid) - fieldname='rh' - call ice_read_nc(fid,istep1,fieldname,work,diag) + fieldname='rh' + call ice_read_nc(fid,istep1,fieldname,work,diag) rh = work - - fieldname='fsnow' - call ice_read_nc(fid,istep1,fieldname,work,diag) + + fieldname='fsnow' + call ice_read_nc(fid,istep1,fieldname,work,diag) fsnow(:,:,:) = work call ice_close_nc(fid) !------------------------------------------------------------------- ! Find specific humidity using Hyland-Wexler formulation - ! Hyland, R.W. and A. Wexler, Formulations for the Thermodynamic - ! Properties of the saturated phases of H20 from 173.15K to 473.15K, + ! Hyland, R.W. and A. Wexler, Formulations for the Thermodynamic + ! Properties of the saturated phases of H20 from 173.15K to 473.15K, ! ASHRAE Trans, 89(2A), 500-519, 1983 !------------------------------------------------------------------- - - Psat = exp(-ps1/Temp + ps2 - ps3*Temp + ps4*Temp**2 - ps5 * Temp**3 & + + Psat = exp(-ps1/Temp + ps2 - ps3*Temp + ps4*Temp**2 - ps5 * Temp**3 & + ps6 * log(Temp))*p01 ! saturation vapour pressure ws = ws1 * Psat/(Pair - Psat) ! saturation mixing ratio - Qa(:,:,:) = rh * ws * p01/(c1 + rh * ws * p01) * p001 + Qa(:,:,:) = rh * ws * p01/(c1 + rh * ws * p01) * p001 ! specific humidity (kg/kg) endif ! atm_data_format @@ -3440,7 +3433,7 @@ subroutine oned_data rhoa (:,:,:) = 1.3_dbl_kind ! air density (kg/m^3) cldf (:,:,:) = p25 ! cloud fraction frain(:,:,:) = c0 ! this is available in hourlymet_rh file - + end subroutine oned_data !======================================================================= @@ -3449,8 +3442,6 @@ subroutine oned_files character(len=*), parameter :: subname = '(oned_files)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - fsw_file = & trim(atm_data_dir)//'/hourlysolar_brw1989_5yr.nc' @@ -3516,8 +3507,6 @@ subroutine ocn_data_clim (dt) character(len=*), parameter :: subname = '(ocn_data_clim)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - if (my_task == master_task .and. istep == 1) then if (trim(ocn_data_type)=='clim') then write (nu_diag,*) ' ' @@ -3541,12 +3530,12 @@ subroutine ocn_data_clim (dt) if (trim(ocn_data_type)=='clim') then midmonth = 15 ! data is given on 15th of every month -!!! midmonth = fix(p5 * real(daymo(mmonth))) ! exact middle +!!! midmonth = fix(p5 * real(daymo(month))) ! exact middle ! Compute record numbers for surrounding months maxrec = 12 - ixm = mod(mmonth+maxrec-2,maxrec) + 1 - ixp = mod(mmonth, maxrec) + 1 + ixm = mod(month+maxrec-2,maxrec) + 1 + ixp = mod(month, maxrec) + 1 if (mday >= midmonth) ixm = -99 ! other two points will be used if (mday < midmonth) ixp = -99 @@ -3562,14 +3551,14 @@ subroutine ocn_data_clim (dt) call interp_coeff_monthly (recslot) readm = .false. - if (istep==1 .or. (mday==midmonth .and. msec==0)) readm = .true. + if (istep==1 .or. (mday==midmonth .and. sec==0)) readm = .true. !------------------------------------------------------------------- ! Read two monthly SSS values and interpolate. ! Note: SSS is restored instantaneously to data. !------------------------------------------------------------------- - call read_clim_data (readm, 0, ixm, mmonth, ixp, & + call read_clim_data (readm, 0, ixm, month, ixp, & sss_file, sss_data, & field_loc_center, field_type_scalar) call interpolate_data (sss_data, sss) @@ -3593,7 +3582,7 @@ subroutine ocn_data_clim (dt) !------------------------------------------------------------------- if (trim(ocn_data_type)=='clim') then - call read_clim_data (readm, 0, ixm, mmonth, ixp, & + call read_clim_data (readm, 0, ixm, month, ixp, & sst_file, sst_data, & field_loc_center, field_type_scalar) call interpolate_data (sst_data, sstdat) @@ -3621,19 +3610,19 @@ end subroutine ocn_data_clim subroutine ocn_data_ncar_init ! Reads NCAR pop ocean forcing data set 'pop_frc_gx1v3_010815.nc' -! +! ! List of ocean forcing fields: Note that order is important! ! (order is determined by field list in vname). -! -! For ocean mixed layer-----------------------------units -! -! 1 sst------temperature---------------------------(C) -! 2 sss------salinity------------------------------(ppt) -! 3 hbl------depth---------------------------------(m) -! 4 u--------surface u current---------------------(m/s) -! 5 v--------surface v current---------------------(m/s) -! 6 dhdx-----surface tilt x direction--------------(m/m) -! 7 dhdy-----surface tilt y direction--------------(m/m) +! +! For ocean mixed layer-----------------------------units +! +! 1 sst------temperature---------------------------(C) +! 2 sss------salinity------------------------------(ppt) +! 3 hbl------depth---------------------------------(m) +! 4 u--------surface u current---------------------(m/s) +! 5 v--------surface v current---------------------(m/s) +! 6 dhdx-----surface tilt x direction--------------(m/m) +! 7 dhdy-----surface tilt y direction--------------(m/m) ! 8 qdp------ocean sub-mixed layer heat flux-------(W/m2) ! ! Fields 4, 5, 6, 7 are on the U-grid; 1, 2, 3, and 8 are @@ -3648,7 +3637,7 @@ subroutine ocn_data_ncar_init use netcdf #endif - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & n , & ! field index m , & ! month index nrec, & ! record number for direct access @@ -3660,10 +3649,12 @@ subroutine ocn_data_ncar_init 'T', 'S', 'hblt', 'U', 'V', & 'dhdx', 'dhdy', 'qdp' / + integer (kind=int_kind) :: & + fid , & ! file id + dimid ! dimension id + integer (kind=int_kind) :: & status , & ! status flag - fid , & ! file id - dimid , & ! dimension id nlat , & ! number of longitudes of data nlon ! number of latitudes of data @@ -3672,8 +3663,6 @@ subroutine ocn_data_ncar_init character(len=*), parameter :: subname = '(ocn_data_ncar_init)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - if (my_task == master_task) then write (nu_diag,*) 'WARNING: evp_prep calculates surface tilt' @@ -3682,7 +3671,7 @@ subroutine ocn_data_ncar_init write (nu_diag,*) 'WARNING: Alter ice_dyn_evp.F90 if desired.' if (restore_ocn) write (nu_diag,*) & - 'SST restoring timescale = ',trestore,' days' + 'SST restoring timescale = ',trestore,' days' sst_file = trim(ocn_data_dir)//'/'//trim(oceanmixed_file) ! not just sst @@ -3701,15 +3690,11 @@ subroutine ocn_data_ncar_init ! status = nf90_inq_dimid(fid,'nlon',dimid) status = nf90_inq_dimid(fid,'ni',dimid) - call ice_check_nc(status, subname//' ERROR: inq dimid ni', file=__FILE__, line=__LINE__) status = nf90_inquire_dimension(fid,dimid,len=nlon) - call ice_check_nc(status, subname//' ERROR: inq dim ni', file=__FILE__, line=__LINE__) - + ! status = nf90_inq_dimid(fid,'nlat',dimid) status = nf90_inq_dimid(fid,'nj',dimid) - call ice_check_nc(status, subname//' ERROR: inq dimid nj', file=__FILE__, line=__LINE__) status = nf90_inquire_dimension(fid,dimid,len=nlat) - call ice_check_nc(status, subname//' ERROR: inq dim nj', file=__FILE__, line=__LINE__) if( nlon .ne. nx_global ) then call abort_ice (error_message=subname//'ice: ocn frc file nlon ne nx_global', & @@ -3725,16 +3710,15 @@ subroutine ocn_data_ncar_init ! Read in ocean forcing data for all 12 months do n=1,nfld do m=1,12 - + ! Note: netCDF does single to double conversion if necessary -! if (n >= 4 .and. n <= 7) then -! call ice_read_nc(fid, m, vname(n), work1, debug_forcing, & -! field_loc_NEcorner, field_type_vector) -! else - call ice_read_nc(fid, m, vname(n), work1, debug_forcing, & + if (n >= 4 .and. n <= 7) then + call ice_read_nc(fid, m, vname(n), work1, dbug, & + field_loc_NEcorner, field_type_vector) + else + call ice_read_nc(fid, m, vname(n), work1, dbug, & field_loc_center, field_type_scalar) -! endif - + endif ocn_frc_m(:,:,:,n,m) = work1(:,:,:) enddo ! month loop @@ -3756,10 +3740,10 @@ subroutine ocn_data_ncar_init do m=1,12 nrec = nrec + 1 if (n >= 4 .and. n <= 7) then - call ice_read (nu_forcing, nrec, work1, 'rda8', debug_forcing, & + call ice_read (nu_forcing, nrec, work1, 'rda8', dbug, & field_loc_NEcorner, field_type_vector) else - call ice_read (nu_forcing, nrec, work1, 'rda8', debug_forcing, & + call ice_read (nu_forcing, nrec, work1, 'rda8', dbug, & field_loc_center, field_type_scalar) endif ocn_frc_m(:,:,:,n,m) = work1(:,:,:) @@ -3770,8 +3754,8 @@ subroutine ocn_data_ncar_init endif !echmod - currents cause Fram outflow to be too large -! ocn_frc_m(:,:,:,4,:) = c0 -! ocn_frc_m(:,:,:,5,:) = c0 + ocn_frc_m(:,:,:,4,:) = c0 + ocn_frc_m(:,:,:,5,:) = c0 !echmod end subroutine ocn_data_ncar_init @@ -3781,19 +3765,19 @@ end subroutine ocn_data_ncar_init subroutine ocn_data_ncar_init_3D ! Reads NCAR pop ocean forcing data set 'oceanmixed_ice_depth.nc' -! +! ! List of ocean forcing fields: Note that order is important! ! (order is determined by field list in vname). -! -! For ocean mixed layer-----------------------------units -! -! 1 sst------temperature---------------------------(C) -! 2 sss------salinity------------------------------(ppt) -! 3 hbl------depth---------------------------------(m) -! 4 u--------surface u current---------------------(m/s) -! 5 v--------surface v current---------------------(m/s) -! 6 dhdx-----surface tilt x direction--------------(m/m) -! 7 dhdy-----surface tilt y direction--------------(m/m) +! +! For ocean mixed layer-----------------------------units +! +! 1 sst------temperature---------------------------(C) +! 2 sss------salinity------------------------------(ppt) +! 3 hbl------depth---------------------------------(m) +! 4 u--------surface u current---------------------(m/s) +! 5 v--------surface v current---------------------(m/s) +! 6 dhdx-----surface tilt x direction--------------(m/m) +! 7 dhdy-----surface tilt y direction--------------(m/m) ! 8 qdp------ocean sub-mixed layer heat flux-------(W/m2) ! ! All fields are on the T-grid. @@ -3803,14 +3787,14 @@ subroutine ocn_data_ncar_init_3D use ice_blocks, only: nx_block, ny_block use ice_domain_size, only: max_blocks - use ice_grid, only: grid_average_X2Y, ANGLET + use ice_grid, only: to_ugrid, ANGLET use ice_read_write, only: ice_read_nc_uv #ifdef USE_NETCDF use netcdf #endif #ifdef USE_NETCDF - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & n , & ! field index m , & ! month index nzlev ! z level of currents @@ -3822,8 +3806,8 @@ subroutine ocn_data_ncar_init_3D 'dhdx', 'dhdy', 'qdp' / integer (kind=int_kind) :: & - fid , & ! file id - dimid ! dimension id + fid , & ! file id + dimid ! dimension id integer (kind=int_kind) :: & status , & ! status flag @@ -3836,8 +3820,6 @@ subroutine ocn_data_ncar_init_3D character(len=*), parameter :: subname = '(ocn_data_ncar_init_3D)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - if (my_task == master_task) then write (nu_diag,*) 'WARNING: evp_prep calculates surface tilt' @@ -3846,7 +3828,7 @@ subroutine ocn_data_ncar_init_3D write (nu_diag,*) 'WARNING: Alter ice_dyn_evp.F if desired.' if (restore_ocn) write (nu_diag,*) & - 'SST restoring timescale = ',trestore,' days' + 'SST restoring timescale = ',trestore,' days' sst_file = trim(ocn_data_dir)//'/'//trim(oceanmixed_file) ! not just sst @@ -3866,15 +3848,11 @@ subroutine ocn_data_ncar_init_3D ! status = nf90_inq_dimid(fid,'nlon',dimid) status = nf90_inq_dimid(fid,'ni',dimid) - call ice_check_nc(status, subname//' ERROR: inq dimid ni', file=__FILE__, line=__LINE__) status = nf90_inquire_dimension(fid,dimid,len=nlon) - call ice_check_nc(status, subname//' ERROR: inq dim ni', file=__FILE__, line=__LINE__) - + ! status = nf90_inq_dimid(fid,'nlat',dimid) status = nf90_inq_dimid(fid,'nj',dimid) - call ice_check_nc(status, subname//' ERROR: inq dimid nj', file=__FILE__, line=__LINE__) status = nf90_inquire_dimension(fid,dimid,len=nlat) - call ice_check_nc(status, subname//' ERROR: inq dim nj', file=__FILE__, line=__LINE__) if( nlon .ne. nx_global ) then call abort_ice (error_message=subname//'ice: ocn frc file nlon ne nx_global', & @@ -3890,18 +3868,18 @@ subroutine ocn_data_ncar_init_3D ! Read in ocean forcing data for all 12 months do n=1,nfld do m=1,12 - + ! Note: netCDF does single to double conversion if necessary if (n == 4 .or. n == 5) then ! 3D currents nzlev = 1 ! surface currents - call ice_read_nc_uv(fid, m, nzlev, vname(n), work1, debug_forcing, & + call ice_read_nc_uv(fid, m, nzlev, vname(n), work1, dbug, & field_loc_center, field_type_scalar) else - call ice_read_nc(fid, m, vname(n), work1, debug_forcing, & + call ice_read_nc(fid, m, vname(n), work1, dbug, & field_loc_center, field_type_scalar) endif - ! the land mask used in ocean_mixed_depth.nc does not + ! the land mask used in ocean_mixed_depth.nc does not ! match our gx1v3 mask (hm) where (work1(:,:,:) < -900.) work1(:,:,:) = c0 @@ -3925,8 +3903,8 @@ subroutine ocn_data_ncar_init_3D work1(:,:,:) = ocn_frc_m(:,:,:,n ,m) work2(:,:,:) = ocn_frc_m(:,:,:,n+1,m) - call grid_average_X2Y('A',work1,'T',ocn_frc_m(:,:,:,n ,m),'U') - call grid_average_X2Y('A',work2,'T',ocn_frc_m(:,:,:,n+1,m),'U') + call to_ugrid(work1,ocn_frc_m(:,:,:,n ,m)) + call to_ugrid(work2,ocn_frc_m(:,:,:,n+1,m)) enddo ! month loop enddo ! field loop @@ -3964,7 +3942,7 @@ subroutine ocn_data_ncar(dt) real (kind=dbl_kind), intent(in) :: & dt ! time step - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & i, j, n, iblk , & ixm,ixp , & ! record numbers for neighboring months maxrec , & ! maximum record number @@ -3979,22 +3957,20 @@ subroutine ocn_data_ncar(dt) character(len=*), parameter :: subname = '(ocn_data_ncar)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - !------------------------------------------------------------------- - ! monthly data + ! monthly data ! - ! Assume that monthly data values are located in the middle of the + ! Assume that monthly data values are located in the middle of the ! month. !------------------------------------------------------------------- - + midmonth = 15 ! data is given on 15th of every month -! midmonth = fix(p5 * real(daymo(mmonth),kind=dbl_kind)) ! exact middle +! midmonth = fix(p5 * real(daymo(month),kind=dbl_kind)) ! exact middle ! Compute record numbers for surrounding months maxrec = 12 - ixm = mod(mmonth+maxrec-2,maxrec) + 1 - ixp = mod(mmonth, maxrec) + 1 + ixm = mod(month+maxrec-2,maxrec) + 1 + ixp = mod(month, maxrec) + 1 if (mday >= midmonth) ixm = -99 ! other two points will be used if (mday < midmonth) ixp = -99 @@ -4009,23 +3985,24 @@ subroutine ocn_data_ncar(dt) ! Find interpolation coefficients call interp_coeff_monthly (recslot) - sst_data(:,:,:,:) = c0 do n = nfld, 1, -1 + !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks ! use sst_data arrays as temporary work space until n=1 if (ixm /= -99) then ! first half of month sst_data(:,:,1,iblk) = ocn_frc_m(:,:,iblk,n,ixm) - sst_data(:,:,2,iblk) = ocn_frc_m(:,:,iblk,n,mmonth) + sst_data(:,:,2,iblk) = ocn_frc_m(:,:,iblk,n,month) else ! second half of month - sst_data(:,:,1,iblk) = ocn_frc_m(:,:,iblk,n,mmonth) + sst_data(:,:,1,iblk) = ocn_frc_m(:,:,iblk,n,month) sst_data(:,:,2,iblk) = ocn_frc_m(:,:,iblk,n,ixp) endif enddo + !$OMP END PARALLEL DO call interpolate_data (sst_data,work1) ! masking by hm is necessary due to NaNs in the data file - do j = 1, ny_block - do i = 1, nx_block + do j = 1, ny_block + do i = 1, nx_block if (n == 2) sss (i,j,:) = c0 if (n == 3) hmix (i,j,:) = c0 if (n == 4) uocn (i,j,:) = c0 @@ -4036,7 +4013,7 @@ subroutine ocn_data_ncar(dt) do iblk = 1, nblocks if (hm(i,j,iblk) == c1) then if (n == 2) sss (i,j,iblk) = work1(i,j,iblk) - if (n == 3) hmix (i,j,iblk) = max(mixed_layer_depth_default,work1(i,j,iblk)) + if (n == 3) hmix (i,j,iblk) = work1(i,j,iblk) if (n == 4) uocn (i,j,iblk) = work1(i,j,iblk) if (n == 5) vocn (i,j,iblk) = work1(i,j,iblk) if (n == 6) ss_tltx(i,j,iblk) = work1(i,j,iblk) @@ -4048,21 +4025,21 @@ subroutine ocn_data_ncar(dt) enddo enddo - do j = 1, ny_block - do i = 1, nx_block - sss (i,j,:) = max (sss(i,j,:), c0) - hmix(i,j,:) = max(hmix(i,j,:), c0) - enddo - enddo + do j = 1, ny_block + do i = 1, nx_block + sss (i,j,:) = max (sss(i,j,:), c0) + hmix(i,j,:) = max(hmix(i,j,:), c0) + enddo + enddo call ocn_freezing_temperature if (restore_ocn) then - do j = 1, ny_block - do i = 1, nx_block - sst(i,j,:) = sst(i,j,:) + (work1(i,j,:)-sst(i,j,:))*dt/trest - enddo - enddo + do j = 1, ny_block + do i = 1, nx_block + sst(i,j,:) = sst(i,j,:) + (work1(i,j,:)-sst(i,j,:))*dt/trest + enddo + enddo ! else sst is only updated in ice_ocean.F endif @@ -4071,20 +4048,20 @@ subroutine ocn_data_ncar(dt) call interpolate_data (sst_data,sst) !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block + do j = 1, ny_block + do i = 1, nx_block if (hm(i,j,iblk) == c1) then - sst(i,j,iblk) = max (sst(i,j,iblk), Tf(i,j,iblk)) + sst(i,j,iblk) = max (sst(i,j,iblk), Tf(i,j,iblk)) else sst(i,j,iblk) = c0 endif - enddo - enddo - enddo + enddo + enddo + enddo !$OMP END PARALLEL DO endif - if (debug_forcing) then + if (dbug) then if (my_task == master_task) & write (nu_diag,*) 'ocn_data_ncar' vmin = global_minval(Tf,distrb_info,tmask) @@ -4138,8 +4115,6 @@ subroutine ocn_data_oned character(len=*), parameter :: subname = '(ocn_data_oned)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - sss (:,:,:) = 34.0_dbl_kind ! sea surface salinity (ppt) call ocn_freezing_temperature @@ -4151,7 +4126,7 @@ subroutine ocn_data_oned ss_tlty(:,:,:) = c0 frzmlt (:,:,:) = c0 ! freezing/melting potential (W/m^2) qdp (:,:,:) = c0 ! deep ocean heat flux (W/m^2) - hmix (:,:,:) = mixed_layer_depth_default ! ocean mixed layer depth + hmix (:,:,:) = c20 ! ocean mixed layer depth end subroutine ocn_data_oned @@ -4161,15 +4136,14 @@ subroutine ocn_data_hadgem(dt) ! Reads in HadGEM ocean forcing data as required from netCDF files ! Current options (selected by ocn_data_type) -! hadgem_sst: read in sst only +! hadgem_sst: read in sst only ! hadgem_sst_uvocn: read in sst plus uocn and vocn ! authors: Ann Keen, Met Office use ice_domain, only: nblocks - use ice_domain_size, only: max_blocks use ice_flux, only: sst, uocn, vocn - use ice_grid, only: ANGLET + use ice_grid, only: t2ugrid_vector, ANGLET real (kind=dbl_kind), intent(in) :: & dt ! time step @@ -4188,16 +4162,14 @@ subroutine ocn_data_hadgem(dt) logical (kind=log_kind) :: readm - character (char_len) :: & + character (char_len) :: & fieldname ! field name in netcdf file - character (char_len_long) :: & + character (char_len_long) :: & filename ! name of netCDF file character(len=*), parameter :: subname = '(ocn_data_hadgem)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - !------------------------------------------------------------------- ! monthly data ! @@ -4206,12 +4178,12 @@ subroutine ocn_data_hadgem(dt) !------------------------------------------------------------------- midmonth = 15 ! data is given on 15th of every month -! midmonth = fix(p5 * real(daymo(mmonth))) ! exact middle +! midmonth = fix(p5 * real(daymo(month))) ! exact middle ! Compute record numbers for surrounding months maxrec = 12 - ixm = mod(mmonth+maxrec-2,maxrec) + 1 - ixp = mod(mmonth, maxrec) + 1 + ixm = mod(month+maxrec-2,maxrec) + 1 + ixp = mod(month, maxrec) + 1 if (mday >= midmonth) ixm = -99 ! other two points will be used if (mday < midmonth) ixp = -99 @@ -4228,7 +4200,7 @@ subroutine ocn_data_hadgem(dt) ! Read 2 monthly values readm = .false. - if (istep==1 .or. (mday==midmonth .and. msec==0)) readm = .true. + if (istep==1 .or. (mday==midmonth .and. sec==0)) readm = .true. if (my_task == master_task .and. istep == 1) then write (nu_diag,*) ' ' @@ -4249,10 +4221,10 @@ subroutine ocn_data_hadgem(dt) ! ----------------------------------------------------------- sst_file = trim(ocn_data_dir)//'/MONTHLY/sst.1997.nc' fieldname='sst' - call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & + call read_data_nc (readm, 0, fyear, ixm, month, ixp, & maxrec, sst_file, fieldname, sst_data, & field_loc_center, field_type_scalar) - + ! Interpolate to current time step call interpolate_data (sst_data, sstdat) @@ -4268,14 +4240,14 @@ subroutine ocn_data_hadgem(dt) enddo enddo !$OMP END PARALLEL DO - endif + endif ! ----------------------------------------------------------- ! Ocean currents ! -------------- - ! Values read in are on T grid and oriented geographically, hence + ! Values read in are on T grid and oriented geographically, hence ! vectors need to be rotated to model grid and then interpolated - ! to U grid. + ! to U grid. ! Also need to be converted from cm s-1 (UM) to m s-1 (CICE) ! ----------------------------------------------------------- @@ -4283,37 +4255,37 @@ subroutine ocn_data_hadgem(dt) filename = trim(ocn_data_dir)//'/MONTHLY/uocn.1997.nc' fieldname='uocn' - call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & + call read_data_nc (readm, 0, fyear, ixm, month, ixp, & maxrec, filename, fieldname, uocn_data, & field_loc_center, field_type_vector) - + ! Interpolate to current time step call interpolate_data (uocn_data, uocn) filename = trim(ocn_data_dir)//'/MONTHLY/vocn.1997.nc' fieldname='vocn' - call read_data_nc (readm, 0, fyear, ixm, mmonth, ixp, & + call read_data_nc (readm, 0, fyear, ixm, month, ixp, & maxrec, filename, fieldname, vocn_data, & field_loc_center, field_type_vector) - + ! Interpolate to current time step call interpolate_data (vocn_data, vocn) - !----------------------------------------------------------------- - ! Rotate zonal/meridional vectors to local coordinates, + !----------------------------------------------------------------- + ! Rotate zonal/meridional vectors to local coordinates, ! and change units - !----------------------------------------------------------------- + !----------------------------------------------------------------- !$OMP PARALLEL DO PRIVATE(iblk,i,j,workx,worky) do iblk = 1, nblocks do j = 1, ny_block do i = 1, nx_block - workx = uocn(i,j,iblk) + workx = uocn(i,j,iblk) worky = vocn(i,j,iblk) - uocn(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & - + worky*sin(ANGLET(i,j,iblk)) - vocn(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) & + uocn(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & + + worky*sin(ANGLET(i,j,iblk)) + vocn(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) & - workx*sin(ANGLET(i,j,iblk)) uocn(i,j,iblk) = uocn(i,j,iblk) * cm_to_m @@ -4324,11 +4296,12 @@ subroutine ocn_data_hadgem(dt) enddo ! nblocks !$OMP END PARALLEL DO - !----------------------------------------------------------------- - ! Interpolate to U grid - !----------------------------------------------------------------- + !----------------------------------------------------------------- + ! Interpolate to U grid + !----------------------------------------------------------------- - ! tcraig, this is now computed in dynamics for consistency + call t2ugrid_vector(uocn) + call t2ugrid_vector(vocn) endif ! ocn_data_type = hadgem_sst_uvocn @@ -4351,10 +4324,6 @@ subroutine ocn_data_hycom_init character (char_len) :: & fieldname ! field name in netcdf file - character(len=*), parameter :: subname = '(ocn_data_hycom_init)' - - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - if (trim(ocn_data_type) == 'hycom') then sss_file = trim(ocn_data_dir)//'ice.restart.surf.nc' @@ -4365,7 +4334,7 @@ subroutine ocn_data_hycom_init fieldname = 'sss' call ice_open_nc (sss_file, fid) - call ice_read_nc (fid, 1 , fieldname, sss, debug_forcing, & + call ice_read_nc (fid, 1 , fieldname, sss, dbug, & field_loc_center, field_type_scalar) call ice_close_nc(fid) @@ -4380,7 +4349,7 @@ subroutine ocn_data_hycom_init fieldname = 'sst' call ice_open_nc (sst_file, fid) - call ice_read_nc (fid, 1 , fieldname, sst, debug_forcing, & + call ice_read_nc (fid, 1 , fieldname, sst, dbug, & field_loc_center, field_type_scalar) call ice_close_nc(fid) @@ -4408,9 +4377,6 @@ subroutine hycom_atm_files fid ! File id character (char_len) :: & varname ! variable name in netcdf file - character(len=*), parameter :: subname = '(hycom_atm_files)' - - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' fsw_file = trim(atm_data_dir)//'/forcing.shwflx.nc' flw_file = trim(atm_data_dir)//'/forcing.radflx.nc' @@ -4454,6 +4420,7 @@ subroutine hycom_atm_data use ice_flux, only: fsw, fsnow, Tair, uatm, vatm, Qa, flw use ice_domain, only: nblocks + use ice_calendar, only: year_init integer (kind=int_kind) :: & recnum ! record number @@ -4473,13 +4440,11 @@ subroutine hycom_atm_data character(len=*), parameter :: subname = '(hycom_atm_data)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - call icepack_query_parameters(Tffresh_out=Tffresh) call icepack_query_parameters(secday_out=secday) - ! current time in HYCOM jday units (HYCOM ref year: 1900,12,31,000000) - hcdate = real(compute_days_between(1900,12,31,myear,mmonth,mday)) + msec/secday + ! current time in HYCOM jday units + hcdate = hc_jday(nyr+year_init-1,0,0)+ yday+sec/secday ! Init recnum try recnum=min(max(oldrecnum,1),Njday_atm-1) @@ -4502,7 +4467,7 @@ subroutine hycom_atm_data write (nu_diag,*) & 'ERROR: CICE: Atm forcing not available at hcdate =',hcdate write (nu_diag,*) & - 'ERROR: CICE: myear, yday ,msec = ',myear, yday, msec + 'ERROR: CICE: nyr, year_init, yday ,sec = ',nyr, year_init, yday, sec call abort_ice ('ERROR: CICE stopped') endif @@ -4553,7 +4518,7 @@ subroutine hycom_atm_data endif ! Interpolate - if (debug_forcing) then + if (dbug) then if (my_task == master_task) then write(nu_diag,*)'CICE: Atm. interpolate: = ',& hcdate,c1intp,c2intp @@ -4606,7 +4571,7 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & ! data is missing, and we assume periodicity when monthly data ! is missing. ! - use ice_diagnostics, only: debug_model_step + use ice_diagnostics, only: check_step logical (kind=log_kind), intent(in) :: flag @@ -4630,6 +4595,8 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & real (kind=dbl_kind), dimension(2), intent(inout) :: & field_data ! 2 values needed for interpolation + character(len=*), parameter :: subname = '(read_data_nc_point)' + integer (kind=int_kind) :: & nrec , & ! record number to read n2, n4 , & ! like ixm and ixp, but @@ -4637,17 +4604,13 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & arg , & ! value of time argument in field_data fid ! file id for netCDF routines - character(len=*), parameter :: subname = '(read_data_nc_point)' - - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - call ice_timer_start(timer_readwrite) ! reading/writing field_data = c0 ! to satisfy intent(out) attribute - if (istep1 > debug_model_step) debug_forcing = .true. !! debugging + if (istep1 > check_step) dbug = .true. !! debugging - if (my_task==master_task .and. (debug_forcing)) then + if (my_task==master_task .and. (dbug)) then write(nu_diag,*) ' ', trim(data_file) endif @@ -4687,14 +4650,14 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & ! write(nu_diag,*) 'ixm, ixx, ixp ', ixm, ixx, ixp ! write(nu_diag,*) 'maxrec ', maxrec ! write(nu_diag,*) 'fieldname ', fieldname - + call ice_open_nc (data_file, fid) arg = 1 nrec = recd + n2 - call ice_read_nc & - (fid, nrec, fieldname, field_data(arg), debug_forcing, & + call ice_read_nc & + (fid, nrec, fieldname, field_data(arg), dbug, & field_loc, field_type) !if (ixx==1) call ice_close_nc(fid) @@ -4708,8 +4671,8 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & arg = arg + 1 nrec = recd + ixx - call ice_read_nc & - (fid, nrec, fieldname, field_data(arg), debug_forcing, & + call ice_read_nc & + (fid, nrec, fieldname, field_data(arg), dbug, & field_loc, field_type) if (ixp /= -99) then @@ -4734,8 +4697,8 @@ subroutine read_data_nc_point (flag, recd, yr, ixm, ixx, ixp, & arg = arg + 1 nrec = recd + n4 - call ice_read_nc & - (fid, nrec, fieldname, field_data(arg), debug_forcing, & + call ice_read_nc & + (fid, nrec, fieldname, field_data(arg), dbug, & field_loc, field_type) endif ! ixp /= -99 @@ -4753,10 +4716,8 @@ subroutine ISPOL_files character(len=*), parameter :: subname = '(ISPOL_files)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - fsw_file = & - trim(atm_data_dir)//'/fsw_sfc_4Xdaily.nc' + trim(atm_data_dir)//'/fsw_sfc_4Xdaily.nc' flw_file = & trim(atm_data_dir)//'/flw_sfc_4Xdaily.nc' @@ -4768,10 +4729,10 @@ subroutine ISPOL_files trim(atm_data_dir)//'/uatm_10m_daily.nc' vwind_file = & - trim(atm_data_dir)//'/vatm_10m_daily.nc' + trim(atm_data_dir)//'/vatm_10m_daily.nc' tair_file = & - trim(atm_data_dir)//'/Tair_2m_daily.nc' + trim(atm_data_dir)//'/Tair_2m_daily.nc' humid_file = & trim(atm_data_dir)//'/Qa_2m_daily.nc' @@ -4794,7 +4755,7 @@ end subroutine ISPOL_files subroutine ISPOL_data -! Defines atmospheric data fields for Antarctic Weddell sea location +! Defines atmospheric data fields for Antarctic Weddell sea location ! authors: Nicole Jeffery, LANL ! @@ -4803,7 +4764,7 @@ subroutine ISPOL_data !local parameters - character (char_len_long) :: & + character (char_len_long) :: & met_file, & ! netcdf filename fieldname ! field name in netcdf file @@ -4812,19 +4773,19 @@ subroutine ISPOL_data Qa_data_p, fsnow_data_p, & fsw_data_p, flw_data_p, & uatm_data_p, vatm_data_p - - real (kind=dbl_kind), parameter :: & ! coefficients for Hyland-Wexler Qa - ps1 = 0.58002206e4_dbl_kind, & ! (K) + + real (kind=dbl_kind), parameter :: & ! coefficients for Hyland-Wexler Qa + ps1 = 0.58002206e4_dbl_kind, & ! (K) ps2 = 1.3914993_dbl_kind, & ! - ps3 = 0.48640239e-1_dbl_kind, & ! (K^-1) + ps3 = 0.48640239e-1_dbl_kind, & ! (K^-1) ps4 = 0.41764768e-4_dbl_kind, & ! (K^-2) ps5 = 0.14452093e-7_dbl_kind, & ! (K^-3) ps6 = 6.5459673_dbl_kind, & ! - ws1 = 621.97_dbl_kind, & ! for saturation mixing ratio - Pair = 1020._dbl_kind, & ! Sea level pressure (hPa) + ws1 = 621.97_dbl_kind, & ! for saturation mixing ratio + Pair = 1020._dbl_kind, & ! Sea level pressure (hPa) lapse_rate = 0.0065_dbl_kind ! (K/m) lapse rate over sea level - - ! for interpolation of hourly data + + ! for interpolation of hourly data integer (kind=int_kind) :: & ixm,ixx,ixp , & ! record numbers for neighboring months maxrec , & ! maximum record number @@ -4833,7 +4794,7 @@ subroutine ISPOL_data ! = 2 for date located at end of time interval real (kind=dbl_kind) :: & secday , & - Qa_pnt + Qa_pnt real (kind=dbl_kind) :: & sec1hr ! number of seconds in 1 hour @@ -4846,26 +4807,24 @@ subroutine ISPOL_data character(len=*), parameter :: subname = '(ISPOL_data)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - call icepack_query_parameters(secday_out=secday) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - + if (trim(atm_data_format) == 'nc') then ! read nc file - + !------------------------------------------------------------------- ! data from NCEP_DOE Reanalysis 2 and Bareiss et al 2008 - ! daily data located at the end of the 24-hour period. + ! daily data located at the end of the 24-hour period. !------------------------------------------------------------------- dataloc = 2 ! data located at end of interval sec1hr = secday ! seconds in day - maxrec = 366 ! + maxrec = 366 ! ! current record number - recnum = int(yday) + recnum = int(yday) ! Compute record numbers for surrounding data (2 on each side) ixm = mod(recnum+maxrec-2,maxrec) + 1 @@ -4882,11 +4841,11 @@ subroutine ISPOL_data read1 = .false. if (istep==1 .or. oldrecnum .ne. recnum) read1 = .true. - + ! Daily 2m Air temperature 1991 - - met_file = tair_file - fieldname='Tair' + + met_file = tair_file + fieldname='Tair' call read_data_nc_point(read1, 0, fyear, ixm, ixx, ixp, & maxrec, met_file, fieldname, Tair_data_p, & @@ -4896,7 +4855,7 @@ subroutine ISPOL_data + c2intp * Tair_data_p(2) & - lapse_rate*8.0_dbl_kind - met_file = humid_file + met_file = humid_file fieldname='Qa' call read_data_nc_point(read1, 0, fyear, ixm, ixx, ixp, & @@ -4904,7 +4863,7 @@ subroutine ISPOL_data field_loc_center, field_type_scalar) Qa_pnt= c1intp * Qa_data_p(1) & - + c2intp * Qa_data_p(2) + + c2intp * Qa_data_p(2) Qa(:,:,:) = Qa_pnt met_file = uwind_file @@ -4915,19 +4874,19 @@ subroutine ISPOL_data field_loc_center, field_type_scalar) uatm(:,:,:) = c1intp * uatm_data_p(1) & - + c2intp * uatm_data_p(2) + + c2intp * uatm_data_p(2) met_file = vwind_file fieldname='vatm' - + call read_data_nc_point(read1, 0, fyear, ixm, ixx, ixp, & maxrec, met_file, fieldname, vatm_data_p, & field_loc_center, field_type_scalar) vatm(:,:,:) = c1intp * vatm_data_p(1) & + c2intp * vatm_data_p(2) - - met_file = rain_file + + met_file = rain_file fieldname='fsnow' call read_data_nc_point(read1, 0, fyear, ixm, ixx, ixp, & @@ -4935,7 +4894,7 @@ subroutine ISPOL_data field_loc_center, field_type_scalar) fsnow(:,:,:) = (c1intp * fsnow_data_p(1) + & - c2intp * fsnow_data_p(2)) + c2intp * fsnow_data_p(2)) !----------------------------- !fsw and flw are every 6 hours @@ -4945,7 +4904,7 @@ subroutine ISPOL_data maxrec = 1460 ! 366*4 ! current record number - recnum4X = 4*int(yday) - 3 + int(real(msec,kind=dbl_kind)/sec1hr) + recnum4X = 4*int(yday) - 3 + int(real(sec,kind=dbl_kind)/sec1hr) ! Compute record numbers for surrounding data (2 on each side) ixm = mod(recnum4X+maxrec-2,maxrec) + 1 @@ -4973,14 +4932,14 @@ subroutine ISPOL_data + c2intp * fsw_data_p(2) met_file = flw_file - fieldname='flw' + fieldname='flw' call read_data_nc_point(read1, 0, fyear, ixm, ixx, ixp, & maxrec, met_file, fieldname, flw_data_p, & field_loc_center, field_type_scalar) flw(:,:,:) = c1intp * flw_data_p(1) & - + c2intp * flw_data_p(2) + + c2intp * flw_data_p(2) endif !nc !flw given cldf and Tair calculated in prepare_forcing @@ -4992,7 +4951,7 @@ subroutine ISPOL_data rhoa (:,:,:) = 1.3_dbl_kind ! air density (kg/m^3) cldf(:,:,:) = c1 !0.25_dbl_kind ! cloud fraction frain(:,:,:) = c0 ! this is available in hourlymet_rh file - + ! Save record number for next time step oldrecnum = recnum oldrecnum4X = recnum4X @@ -5001,20 +4960,20 @@ end subroutine ISPOL_data !======================================================================= - subroutine ocn_data_ispol_init + subroutine ocn_data_ispol_init ! Reads NCAR pop ocean forcing data set 'pop_frc_gx1v3_010815.nc' ! at the ISPOL location -67.4677N, 310.4375E ! -! For ocean mixed layer-----------------------------units -! -! 1 sst------temperature---------------------------(C) -! 2 sss------salinity------------------------------(ppt) -! 3 hbl------depth---------------------------------(m) -! 4 u--------surface u current---------------------(m/s) +! For ocean mixed layer-----------------------------units +! +! 1 sst------temperature---------------------------(C) +! 2 sss------salinity------------------------------(ppt) +! 3 hbl------depth---------------------------------(m) +! 4 u--------surface u current---------------------(m/s) ! 5 v--------surface v current---------------------(m/s) -! 6 dhdx-----surface tilt x direction--------------(m/m) -! 7 dhdy-----surface tilt y direction--------------(m/m) +! 6 dhdx-----surface tilt x direction--------------(m/m) +! 7 dhdy-----surface tilt y direction--------------(m/m) ! 8 qdp------ocean sub-mixed layer heat flux-------(W/m2) ! ! Fields 4, 5, 6, 7 are on the U-grid; 1, 2, 3, and 8 are @@ -5025,7 +4984,7 @@ subroutine ocn_data_ispol_init use ice_gather_scatter use ice_read_write - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & n , & ! field index m ! month index @@ -5036,19 +4995,20 @@ subroutine ocn_data_ispol_init 'dhdx', 'dhdy', 'qdp' / real (kind=dbl_kind) :: & - work + work integer (kind=int_kind) :: & - fid ! file id + fid ! file id - character(len=*), parameter :: subname = '(ocn_data_ispol_init)' + integer (kind=int_kind) :: & + status ! status flag - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' + character(len=*), parameter :: subname = '(ocn_data_ispol_init)' if (my_task == master_task) then if (restore_ocn) write (nu_diag,*) & - 'SST restoring timescale = ',trestore,' days' + 'SST restoring timescale = ',trestore,' days' sst_file = trim(ocn_data_dir)//'/'//trim(oceanmixed_file) ! not just sst @@ -5067,14 +5027,14 @@ subroutine ocn_data_ispol_init ! Read in ocean forcing data for all 12 months do n=1,nfld - do m=1,12 + do m=1,12 ! Note: netCDF does single to double conversion if necessary if (n >= 4 .and. n <= 7) then - call ice_read_nc(fid, m, vname(n), work, debug_forcing, & + call ice_read_nc(fid, m, vname(n), work, dbug, & field_loc_NEcorner, field_type_vector) else - call ice_read_nc(fid, m, vname(n), work, debug_forcing, & - field_loc_center, field_type_scalar) + call ice_read_nc(fid, m, vname(n), work, dbug, & + field_loc_center, field_type_scalar) endif ocn_frc_m(:,:,:,n,m) = work enddo ! month loop @@ -5096,67 +5056,58 @@ end subroutine ocn_data_ispol_init !======================================================================= ! - subroutine box2001_data_atm + subroutine box2001_data -! wind fields as in Hunke, JCP 2001 -! these are defined at the u point +! wind and current fields as in Hunke, JCP 2001 ! authors: Elizabeth Hunke, LANL - use ice_domain, only: nblocks, blocks_ice - use ice_calendar, only: timesecs - use ice_blocks, only: block, get_block, nx_block, ny_block, nghost - use ice_flux, only: uatm, vatm, wind, rhoa, strax, stray - use ice_state, only: aice + use ice_domain, only: nblocks + use ice_blocks, only: nx_block, ny_block, nghost + use ice_flux, only: uocn, vocn, uatm, vatm, wind, rhoa, strax, stray + use ice_grid, only: uvm ! local parameters integer (kind=int_kind) :: & iblk, i,j ! loop indices - integer (kind=int_kind) :: & - iglob(nx_block), & ! global indices - jglob(ny_block) ! global indices - - type (block) :: & - this_block ! block information for current block - real (kind=dbl_kind) :: & - secday, pi , puny, period, pi2, tau - - character(len=*), parameter :: subname = '(box2001_data_atm)' - - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - + secday, pi , puny, period, pi2, tau call icepack_query_parameters(pi_out=pi, pi2_out=pi2, puny_out=puny) call icepack_query_parameters(secday_out=secday) period = c4*secday do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block + do j = 1, ny_block + do i = 1, nx_block + + ! ocean current + ! constant in time, could be initialized in ice_flux.F90 + uocn(i,j,iblk) = p2*real(j-nghost, kind=dbl_kind) & + / real(nx_global,kind=dbl_kind) - p1 + vocn(i,j,iblk) = -p2*real(i-nghost, kind=dbl_kind) & + / real(ny_global,kind=dbl_kind) + p1 - this_block = get_block(blocks_ice(iblk),iblk) - iglob = this_block%i_glob - jglob = this_block%j_glob + uocn(i,j,iblk) = uocn(i,j,iblk) * uvm(i,j,iblk) + vocn(i,j,iblk) = vocn(i,j,iblk) * uvm(i,j,iblk) ! wind components - uatm(i,j,iblk) = c5 + (sin(pi2*timesecs/period)-c3) & - * sin(pi2*real(iglob(i), kind=dbl_kind) & + uatm(i,j,iblk) = c5 + (sin(pi2*time/period)-c3) & + * sin(pi2*real(i-nghost, kind=dbl_kind) & /real(nx_global,kind=dbl_kind)) & - * sin(pi *real(jglob(j), kind=dbl_kind) & + * sin(pi *real(j-nghost, kind=dbl_kind) & /real(ny_global,kind=dbl_kind)) - vatm(i,j,iblk) = c5 + (sin(pi2*timesecs/period)-c3) & - * sin(pi *real(iglob(i), kind=dbl_kind) & + vatm(i,j,iblk) = c5 + (sin(pi2*time/period)-c3) & + * sin(pi *real(i-nghost, kind=dbl_kind) & /real(nx_global,kind=dbl_kind)) & - * sin(pi2*real(jglob(j), kind=dbl_kind) & + * sin(pi2*real(j-nghost, kind=dbl_kind) & /real(ny_global,kind=dbl_kind)) ! wind stress wind(i,j,iblk) = sqrt(uatm(i,j,iblk)**2 + vatm(i,j,iblk)**2) tau = rhoa(i,j,iblk) * 0.0012_dbl_kind * wind(i,j,iblk) - - strax(i,j,iblk) = aice(i,j,iblk) * tau * uatm(i,j,iblk) - stray(i,j,iblk) = aice(i,j,iblk) * tau * vatm(i,j,iblk) + strax(i,j,iblk) = tau * uatm(i,j,iblk) + stray(i,j,iblk) = tau * vatm(i,j,iblk) ! initialization test ! Diagonal wind vectors 1 @@ -5182,191 +5133,27 @@ subroutine box2001_data_atm ! / real(ny_global,kind=dbl_kind) ! initialization test - enddo - enddo - enddo ! nblocks - - end subroutine box2001_data_atm - -!======================================================================= -! - subroutine box2001_data_ocn - -! current fields as in Hunke, JCP 2001 -! these are defined at the u point -! authors: Elizabeth Hunke, LANL - - use ice_domain, only: nblocks, blocks_ice - use ice_blocks, only: block, get_block, nx_block, ny_block, nghost - use ice_flux, only: uocn, vocn - use ice_grid, only: uvm - - ! local parameters - - integer (kind=int_kind) :: & - iblk, i,j ! loop indices - - integer (kind=int_kind) :: & - iglob(nx_block), & ! global indices - jglob(ny_block) ! global indices - - type (block) :: & - this_block ! block information for current block - - character(len=*), parameter :: subname = '(box2001_data_ocn)' - - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - - this_block = get_block(blocks_ice(iblk),iblk) - iglob = this_block%i_glob - jglob = this_block%j_glob - - ! ocean current - ! constant in time, could be initialized in ice_flux.F90 - uocn(i,j,iblk) = p2*real(jglob(j), kind=dbl_kind) & - / real(ny_global,kind=dbl_kind) - p1 - vocn(i,j,iblk) = -p2*real(iglob(i), kind=dbl_kind) & - / real(nx_global,kind=dbl_kind) + p1 - - uocn(i,j,iblk) = uocn(i,j,iblk) * uvm(i,j,iblk) - vocn(i,j,iblk) = vocn(i,j,iblk) * uvm(i,j,iblk) - - enddo - enddo - enddo ! nblocks - - end subroutine box2001_data_ocn - -!======================================================================= -! - subroutine uniform_data_atm(dir,spd) -! uniform wind fields in some direction - - use ice_domain, only: nblocks - use ice_blocks, only: nx_block, ny_block, nghost - use ice_flux, only: uatm, vatm, wind, rhoa, strax, stray - use ice_state, only: aice - - character(len=*), intent(in) :: dir - real(kind=dbl_kind), intent(in), optional :: spd ! velocity - - ! local parameters - - integer (kind=int_kind) :: & - iblk, i,j ! loop indices - - real (kind=dbl_kind) :: & - tau, & - atm_val ! value to use for atm speed - - character(len=*), parameter :: subname = '(uniform_data_atm)' - - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - - ! check for optional spd - if (present(spd)) then - atm_val = spd - else - atm_val = c5 ! default - endif - - ! wind components - if (dir == 'NE') then - uatm = atm_val - vatm = atm_val - elseif (dir == 'N') then - uatm = c0 - vatm = atm_val - elseif (dir == 'E') then - uatm = atm_val - vatm = c0 - elseif (dir == 'S') then - uatm = c0 - vatm = -atm_val - elseif (dir == 'W') then - uatm = -atm_val - vatm = c0 - else - call abort_ice (subname//'ERROR: dir unknown, dir = '//trim(dir), & - file=__FILE__, line=__LINE__) - endif - - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - - ! wind stress - wind(i,j,iblk) = sqrt(uatm(i,j,iblk)**2 + vatm(i,j,iblk)**2) - tau = rhoa(i,j,iblk) * 0.0012_dbl_kind * wind(i,j,iblk) - strax(i,j,iblk) = aice(i,j,iblk) * tau * uatm(i,j,iblk) - stray(i,j,iblk) = aice(i,j,iblk) * tau * vatm(i,j,iblk) - - enddo - enddo + enddo + enddo enddo ! nblocks - end subroutine uniform_data_atm -!======================================================================= - -! - subroutine uniform_data_ocn(dir,spd) - -! uniform current fields in some direction - - use ice_flux, only: uocn, vocn - - character(len=*), intent(in) :: dir - - real(kind=dbl_kind), intent(in), optional :: spd ! velocity - - ! local parameters - - real(kind=dbl_kind) :: & - ocn_val ! value to use for ocean currents - - character(len=*), parameter :: subname = '(uniform_data_ocn)' - - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - - if (present(spd)) then - ocn_val = spd - else - ocn_val = p1 ! default - endif - - ! ocn components - if (dir == 'NE') then - uocn = ocn_val - vocn = ocn_val - elseif (dir == 'N') then - uocn = c0 - vocn = ocn_val - elseif (dir == 'E') then - uocn = ocn_val - vocn = c0 - else - call abort_ice (subname//'ERROR: dir unknown, dir = '//trim(dir), & - file=__FILE__, line=__LINE__) - endif + end subroutine box2001_data - end subroutine uniform_data_ocn !======================================================================= subroutine get_wave_spec - + use ice_read_write, only: ice_read_nc_xyf - use ice_arrays_column, only: wave_spectrum, & + use ice_arrays_column, only: wave_spectrum, wave_sig_ht, & dwavefreq, wavefreq use ice_constants, only: c0 + use ice_domain_size, only: nfreq use ice_timers, only: ice_timer_start, ice_timer_stop, timer_fsd ! local variables integer (kind=int_kind) :: & - fid ! file id for netCDF routines + fid, & ! file id for netCDF routines + k real(kind=dbl_kind), dimension(nfreq) :: & wave_spectrum_profile ! wave spectrum @@ -5375,8 +5162,6 @@ subroutine get_wave_spec logical (kind=log_kind) :: wave_spec character(len=*), parameter :: subname = '(get_wave_spec)' - if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' - call ice_timer_start(timer_fsd) call icepack_query_parameters(wave_spec_out=wave_spec, & @@ -5387,7 +5172,8 @@ subroutine get_wave_spec ! if no wave data is provided, wave_spectrum is zero everywhere wave_spectrum(:,:,:,:) = c0 - debug_forcing = .false. + wave_spec_dir = ocn_data_dir + dbug = .false. ! wave spectrum and frequencies if (wave_spec) then @@ -5404,7 +5190,10 @@ subroutine get_wave_spec file=__FILE__, line=__LINE__) else #ifdef USE_NETCDF - call wave_spec_data + call ice_open_nc(wave_spec_file,fid) + call ice_read_nc_xyf (fid, 1, 'efreq', wave_spectrum(:,:,:,:), dbug, & + field_loc_center, field_type_scalar) + call ice_close_nc(fid) #else write (nu_diag,*) "wave spectrum file not available, requires cpp USE_NETCDF" write (nu_diag,*) "wave spectrum file not available, using default profile" @@ -5419,339 +5208,9 @@ subroutine get_wave_spec end subroutine get_wave_spec -!======================================================================= -! -! Read in wave spectrum forcing as a function of time. 6 hourly -! LR started working from JRA55_data routine -! Changed fields, and changed 3 hourly to 6 hourly -! - subroutine wave_spec_data - - use ice_blocks, only: block, get_block - use ice_global_reductions, only: global_minval, global_maxval - use ice_domain, only: nblocks, distrb_info, blocks_ice - use ice_arrays_column, only: wave_spectrum, & - dwavefreq, wavefreq - use ice_read_write, only: ice_read_nc_xyf - use ice_grid, only: hm, tlon, tlat, tmask, umask - use ice_calendar, only: days_per_year, use_leap_years - - integer (kind=int_kind) :: & - ncid , & ! netcdf file id - i, j, freq , & - ixm,ixx,ixp , & ! record numbers for neighboring months - recnum , & ! record number - maxrec , & ! maximum record number - recslot , & ! spline slot for current record - midmonth , & ! middle day of month - dataloc , & ! = 1 for data located in middle of time interval - ! = 2 for date located at end of time interval - iblk , & ! block index - ilo,ihi,jlo,jhi, & ! beginning and end of physical domain - yr ! current forcing year - - real (kind=dbl_kind) :: & - sec6hr , & ! number of seconds in 3 hours - secday , & ! number of seconds in day - vmin, vmax - - logical (kind=log_kind) :: readm, read6,debug_n_d - - type (block) :: & - this_block ! block information for current block - - real(kind=dbl_kind), dimension(nfreq) :: & - wave_spectrum_profile ! wave spectrum - - character(len=64) :: fieldname !netcdf field name - character(char_len_long) :: spec_file - character(char_len) :: wave_spec_type - logical (kind=log_kind) :: wave_spec - character(len=*), parameter :: subname = '(wave_spec_data)' - - - - debug_n_d = .false. !usually false - - call icepack_query_parameters(secday_out=secday) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - call icepack_init_wave(nfreq, & - wave_spectrum_profile, & - wavefreq, dwavefreq) - - - !spec_file = trim(ocn_data_dir)//'/'//trim(wave_spec_file) - spec_file = trim(wave_spec_file) - wave_spectrum_data = c0 - wave_spectrum = c0 - yr = fyear ! current year - !------------------------------------------------------------------- - ! 6-hourly data - ! - ! Assume that the 6-hourly value is located at the end of the - ! 6-hour period. This is the convention for NCEP reanalysis data. - ! E.g. record 1 gives conditions at 6 am GMT on 1 January. - !------------------------------------------------------------------- - - dataloc = 2 ! data located at end of interval - sec6hr = secday/c4 ! seconds in 6 hours - !maxrec = 2920 ! 365*8; for leap years = 366*8 - - if (use_leap_years) days_per_year = 366 !overrides setting of 365 in ice_calendar - maxrec = days_per_year*4 - - if(days_per_year == 365 .and. (mod(yr, 4) == 0)) then - call abort_ice('days_per_year should be set to 366 for leap years') - end if - - ! current record number - recnum = 4*int(yday) - 3 + int(real(msec,kind=dbl_kind)/sec6hr) - - ! Compute record numbers for surrounding data (2 on each side) - - ixm = mod(recnum+maxrec-2,maxrec) + 1 - ixx = mod(recnum-1, maxrec) + 1 - - ! Compute interpolation coefficients - ! If data is located at the end of the time interval, then the - ! data value for the current record goes in slot 2 - - recslot = 2 - ixp = -99 - call interp_coeff (recnum, recslot, sec6hr, dataloc) - - ! Read - read6 = .false. - if (istep==1 .or. oldrecnum .ne. recnum) read6 = .true. - !------------------------------------------------------------------- - ! File is NETCDF - ! file variable names are: - ! efreq (wave spectrum, energy as a function of wave frequency UNITS) - !------------------------------------------------------------------- - call ice_open_nc(spec_file,ncid) - - call ice_read_nc_xyf(ncid,recnum,'efreq',wave_spectrum_data(:,:,:,1,:),debug_n_d, & - field_loc=field_loc_center, & - field_type=field_type_scalar) - call ice_read_nc_xyf(ncid,recnum,'efreq',wave_spectrum_data(:,:,:,2,:),debug_n_d, & - field_loc=field_loc_center, & - field_type=field_type_scalar) - call ice_close_nc(ncid) - - - ! Interpolate - call interpolate_wavespec_data (wave_spectrum_data, wave_spectrum) - - ! Save record number - oldrecnum = recnum - - if (local_debug) then - if (my_task == master_task) write (nu_diag,*) & - 'wave_spec_data ',spec_file - if (my_task.eq.master_task) & - write (nu_diag,*) 'maxrec',maxrec - write (nu_diag,*) 'days_per_year', days_per_year - - endif ! local debug - - end subroutine wave_spec_data - -!======================================================================= - -! initial snow aging lookup table -! -! Dry snow metamorphism table -! snicar_drdt_bst_fit_60_c070416.nc -! Flanner (file metadata units mislabelled) -! drdsdt0 (10^-6 m/hr) tau (10^-6 m) -! - subroutine init_snowtable - - use ice_broadcast, only: broadcast_array, broadcast_scalar - integer (kind=int_kind) :: & - idx_T_max , & ! Table dimensions - idx_rhos_max, & - idx_Tgrd_max - real (kind=dbl_kind), allocatable :: & - snowage_rhos (:), & - snowage_Tgrd (:), & - snowage_T (:), & - snowage_tau (:,:,:), & - snowage_kappa(:,:,:), & - snowage_drdt0(:,:,:) - - ! local variables - - logical (kind=log_kind) :: diag = .false. - - integer (kind=int_kind) :: & - fid ! file id for netCDF file - - character (char_len) :: & - snw_aging_table, & ! aging table setting - fieldname ! field name in netcdf file - - character(len=*), parameter :: subname = '(init_snowtable)' - - !----------------------------------------------------------------- - ! read table of snow aging parameters - !----------------------------------------------------------------- - - call icepack_query_parameters(snw_aging_table_out=snw_aging_table, & - isnw_rhos_out=idx_rhos_max, isnw_Tgrd_out=idx_Tgrd_max, isnw_T_out=idx_T_max) - - if (my_task == master_task) then - write (nu_diag,*) ' ' - write (nu_diag,*) 'Snow aging file:', trim(snw_filename) - endif - - if (snw_aging_table == 'snicar') then - ! just read the 3d data and pass it in - - call ice_open_nc(snw_filename,fid) - - allocate(snowage_tau (idx_rhos_max, idx_Tgrd_max, idx_T_max)) - allocate(snowage_kappa(idx_rhos_max, idx_Tgrd_max, idx_T_max)) - allocate(snowage_drdt0(idx_rhos_max, idx_Tgrd_max, idx_T_max)) - - fieldname = trim(snw_tau_fname) - call ice_read_nc(fid,fieldname,snowage_tau, diag, & - idx_rhos_max,idx_Tgrd_max,idx_T_max) - fieldname = trim(snw_kappa_fname) - call ice_read_nc(fid,fieldname,snowage_kappa,diag, & - idx_rhos_max,idx_Tgrd_max,idx_T_max) - fieldname = trim(snw_drdt0_fname) - call ice_read_nc(fid,fieldname,snowage_drdt0,diag, & - idx_rhos_max,idx_Tgrd_max,idx_T_max) - - call ice_close_nc(fid) - - call broadcast_array(snowage_tau , master_task) - call broadcast_array(snowage_kappa, master_task) - call broadcast_array(snowage_drdt0, master_task) - - if (my_task == master_task) then - write(nu_diag,*) subname,' ' - write(nu_diag,*) subname,' Successfully read snow aging properties:' - write(nu_diag,*) subname,' snw_aging_table = ',trim(snw_aging_table) - write(nu_diag,*) subname,' idx_rhos_max = ',idx_rhos_max - write(nu_diag,*) subname,' idx_Tgrd_max = ',idx_Tgrd_max - write(nu_diag,*) subname,' idx_T_max = ',idx_T_max - write(nu_diag,*) subname,' Data at rhos, Tgrd, T at first index ' - write(nu_diag,*) subname,' snoage_tau (1,1,1) = ',snowage_tau (1,1,1) - write(nu_diag,*) subname,' snoage_kappa (1,1,1) = ',snowage_kappa(1,1,1) - write(nu_diag,*) subname,' snoage_drdt0 (1,1,1) = ',snowage_drdt0(1,1,1) - write(nu_diag,*) subname,' Data at rhos, Tgrd, T at max index' - write(nu_diag,*) subname,' snoage_tau (max,max,max) = ',snowage_tau (idx_rhos_max, idx_Tgrd_max, idx_T_max) - write(nu_diag,*) subname,' snoage_kappa (max,max,max) = ',snowage_kappa(idx_rhos_max, idx_Tgrd_max, idx_T_max) - write(nu_diag,*) subname,' snoage_drdt0 (max,max,max) = ',snowage_drdt0(idx_rhos_max, idx_Tgrd_max, idx_T_max) - endif - - call icepack_init_parameters( & - snowage_tau_in = snowage_tau, & - snowage_kappa_in = snowage_kappa, & - snowage_drdt0_in = snowage_drdt0 ) - - deallocate(snowage_tau) - deallocate(snowage_kappa) - deallocate(snowage_drdt0) - - else - ! read everything and pass it in - - call ice_open_nc(snw_filename,fid) - - fieldname = trim(snw_rhos_fname) - call ice_get_ncvarsize(fid,fieldname,idx_rhos_max) - fieldname = trim(snw_Tgrd_fname) - call ice_get_ncvarsize(fid,fieldname,idx_Tgrd_max) - fieldname = trim(snw_T_fname) - call ice_get_ncvarsize(fid,fieldname,idx_T_max) - - call broadcast_scalar(idx_rhos_max, master_task) - call broadcast_scalar(idx_Tgrd_max, master_task) - call broadcast_scalar(idx_T_max , master_task) - - allocate(snowage_rhos (idx_rhos_max)) - allocate(snowage_Tgrd (idx_Tgrd_max)) - allocate(snowage_T (idx_T_max)) - allocate(snowage_tau (idx_rhos_max, idx_Tgrd_max, idx_T_max)) - allocate(snowage_kappa(idx_rhos_max, idx_Tgrd_max, idx_T_max)) - allocate(snowage_drdt0(idx_rhos_max, idx_Tgrd_max, idx_T_max)) - - fieldname = trim(snw_rhos_fname) - call ice_read_nc(fid,fieldname,snowage_rhos, diag, & - idx_rhos_max) - fieldname = trim(snw_Tgrd_fname) - call ice_read_nc(fid,fieldname,snowage_Tgrd, diag, & - idx_Tgrd_max) - fieldname = trim(snw_T_fname) - call ice_read_nc(fid,fieldname,snowage_T, diag, & - idx_T_max) - - fieldname = trim(snw_tau_fname) - call ice_read_nc(fid,fieldname,snowage_tau, diag, & - idx_rhos_max,idx_Tgrd_max,idx_T_max) - fieldname = trim(snw_kappa_fname) - call ice_read_nc(fid,fieldname,snowage_kappa,diag, & - idx_rhos_max,idx_Tgrd_max,idx_T_max) - fieldname = trim(snw_drdt0_fname) - call ice_read_nc(fid,fieldname,snowage_drdt0,diag, & - idx_rhos_max,idx_Tgrd_max,idx_T_max) - - call ice_close_nc(fid) - - call broadcast_array(snowage_rhos , master_task) - call broadcast_array(snowage_Tgrd , master_task) - call broadcast_array(snowage_T , master_task) - call broadcast_array(snowage_tau , master_task) - call broadcast_array(snowage_kappa, master_task) - call broadcast_array(snowage_drdt0, master_task) - - if (my_task == master_task) then - write(nu_diag,*) subname,' ' - write(nu_diag,*) subname,' Successfully read snow aging properties:' - write(nu_diag,*) subname,' idx_rhos_max = ',idx_rhos_max - write(nu_diag,*) subname,' idx_Tgrd_max = ',idx_Tgrd_max - write(nu_diag,*) subname,' idx_T_max = ',idx_T_max - write(nu_diag,*) subname,' Data at rhos, Tgrd, T = ',snowage_rhos(1),snowage_Tgrd(1),snowage_T(1) - write(nu_diag,*) subname,' snoage_tau (1,1,1) = ',snowage_tau (1,1,1) - write(nu_diag,*) subname,' snoage_kappa (1,1,1) = ',snowage_kappa(1,1,1) - write(nu_diag,*) subname,' snoage_drdt0 (1,1,1) = ',snowage_drdt0(1,1,1) - write(nu_diag,*) subname,' Data at rhos, Tgrd, T = ', & - snowage_rhos(idx_rhos_max),snowage_Tgrd(idx_Tgrd_max),snowage_T(idx_T_max) - write(nu_diag,*) subname,' snoage_tau (max,max,max) = ',snowage_tau (idx_rhos_max, idx_Tgrd_max, idx_T_max) - write(nu_diag,*) subname,' snoage_kappa (max,max,max) = ',snowage_kappa(idx_rhos_max, idx_Tgrd_max, idx_T_max) - write(nu_diag,*) subname,' snoage_drdt0 (max,max,max) = ',snowage_drdt0(idx_rhos_max, idx_Tgrd_max, idx_T_max) - endif - - call icepack_init_parameters( & - isnw_t_in = idx_T_max, & - isnw_Tgrd_in = idx_Tgrd_max, & - isnw_rhos_in = idx_rhos_max, & - snowage_rhos_in = snowage_rhos, & - snowage_Tgrd_in = snowage_Tgrd, & - snowage_T_in = snowage_T, & - snowage_tau_in = snowage_tau, & - snowage_kappa_in = snowage_kappa, & - snowage_drdt0_in = snowage_drdt0 ) - - deallocate(snowage_rhos) - deallocate(snowage_Tgrd) - deallocate(snowage_T) - deallocate(snowage_tau) - deallocate(snowage_kappa) - deallocate(snowage_drdt0) - - endif - - end subroutine init_snowtable - !======================================================================= end module ice_forcing !======================================================================= + diff --git a/cicecore/cicedyn/general/ice_forcing_bgc.F90 b/cicecore/cicedyn/general/ice_forcing_bgc.F90 index 69c3ea311..e5ef851fa 100644 --- a/cicecore/cicedyn/general/ice_forcing_bgc.F90 +++ b/cicecore/cicedyn/general/ice_forcing_bgc.F90 @@ -14,7 +14,7 @@ module ice_forcing_bgc use ice_blocks, only: nx_block, ny_block use ice_domain_size, only: max_blocks use ice_communicate, only: my_task, master_task - use ice_calendar, only: dt, istep, msec, mday, mmonth + use ice_calendar, only: dt, istep, sec, mday, month use ice_fileunits, only: nu_diag use ice_arrays_column, only: restore_bgc, & bgc_data_dir, fe_data_type @@ -23,7 +23,7 @@ module ice_forcing_bgc use ice_exit, only: abort_ice use ice_forcing, only: bgc_data_type use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted - use icepack_intfc, only: icepack_nspint_3bd, icepack_max_aero, & + use icepack_intfc, only: icepack_nspint, icepack_max_aero, & icepack_max_algae, icepack_max_doc, icepack_max_dic use icepack_intfc, only: icepack_query_tracer_flags, & icepack_query_parameters, icepack_query_parameters, & @@ -32,7 +32,8 @@ module ice_forcing_bgc implicit none private public :: get_forcing_bgc, get_atm_bgc, fzaero_data, alloc_forcing_bgc, & - init_bgc_data, faero_data, faero_default, fiso_default + init_bgc_data, faero_data, faero_default, faero_optics, & + fiso_default integer (kind=int_kind) :: & bgcrecnum = 0 ! old record number (save between steps) @@ -162,12 +163,12 @@ subroutine get_forcing_bgc !------------------------------------------------------------------- midmonth = 15 ! data is given on 15th of every month -!!! midmonth = fix(p5 * real(daymo(mmonth))) ! exact middle +!!! midmonth = fix(p5 * real(daymo(month))) ! exact middle ! Compute record numbers for surrounding months maxrec = 12 - ixm = mod(mmonth+maxrec-2,maxrec) + 1 - ixp = mod(mmonth, maxrec) + 1 + ixm = mod(month+maxrec-2,maxrec) + 1 + ixp = mod(month, maxrec) + 1 if (mday >= midmonth) ixm = -99 ! other two points will be used if (mday < midmonth) ixp = -99 @@ -183,7 +184,7 @@ subroutine get_forcing_bgc call interp_coeff_monthly (recslot) readm = .false. - if (istep==1 .or. (mday==midmonth .and. msec==0)) readm = .true. + if (istep==1 .or. (mday==midmonth .and. sec==0)) readm = .true. endif ! 'clim prep' @@ -191,17 +192,17 @@ subroutine get_forcing_bgc ! Read two monthly silicate values and interpolate. ! Restore toward interpolated value. !------------------------------------------------------------------- - + if (trim(bgc_data_type)=='clim' .AND. tr_bgc_Sil) then - ! call read_clim_data (readm, 0, ixm, mmonth, ixp, & + ! call read_clim_data (readm, 0, ixm, month, ixp, & ! sil_file, sil_data, & ! field_loc_center, field_type_scalar) fieldname = 'silicate' - call read_clim_data_nc (readm, 0, ixm, mmonth, ixp, & + call read_clim_data_nc (readm, 0, ixm, month, ixp, & sil_file, fieldname, sil_data, & field_loc_center, field_type_scalar) call interpolate_data (sil_data, sildat) - + if (istep == 1 .or. .NOT. restore_bgc) then !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block) @@ -274,12 +275,12 @@ subroutine get_forcing_bgc ! Restore toward interpolated value. !------------------------------------------------------------------- - if (trim(bgc_data_type)=='clim' .AND. tr_bgc_Nit) then - ! call read_clim_data (readm, 0, ixm, mmonth, ixp, & + if (trim(bgc_data_type)=='clim' .AND. tr_bgc_Nit) then + ! call read_clim_data (readm, 0, ixm, month, ixp, & ! nit_file, nit_data, & ! field_loc_center, field_type_scalar) fieldname = 'nitrate' - call read_clim_data_nc (readm, 0, ixm, mmonth, ixp, & + call read_clim_data_nc (readm, 0, ixm, month, ixp, & nit_file, fieldname, nit_data, & field_loc_center, field_type_scalar) call interpolate_data (nit_data, nitdat) @@ -320,7 +321,7 @@ subroutine get_forcing_bgc do i = ilo, ihi nit(i,j,iblk) = nit(i,j,iblk) & - + (nitdat(i,j,iblk)-nit(i,j,iblk))*dt/trest + + (nitdat(i,j,iblk)-nit(i,j,iblk))*dt/trest ks = icepack_max_algae + 1 ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !nit ks = 2*icepack_max_algae + icepack_max_doc + 7 + icepack_max_dic @@ -331,7 +332,7 @@ subroutine get_forcing_bgc !$OMP END PARALLEL DO endif !restore_bgc -! elseif (trim(nit_data_type) == 'sss' .AND. tr_bgc_Nit) then +! elseif (trim(nit_data_type) == 'sss' .AND. tr_bgc_Nit) then ! !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block) ! do iblk = 1, nblocks @@ -344,11 +345,11 @@ subroutine get_forcing_bgc ! do j = jlo, jhi ! do i = ilo, ihi -! nit(i,j,iblk) = sss(i,j,iblk) +! nit(i,j,iblk) = sss(i,j,iblk) ! ks = icepack_max_algae + 1 -! ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !nit +! ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !nit ! ks = 2*icepack_max_algae + icepack_max_doc + 7 + icepack_max_dic -! ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !PON +! ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !PON ! enddo ! enddo ! enddo @@ -366,12 +367,12 @@ subroutine get_forcing_bgc do j = jlo, jhi do i = ilo, ihi - + nit(i,j,iblk) = 12.0_dbl_kind ks = icepack_max_algae + 1 - ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !nit + ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !nit ks = 2*icepack_max_algae + icepack_max_doc + 7 + icepack_max_dic - ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !PON + ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !PON enddo enddo enddo @@ -380,15 +381,15 @@ subroutine get_forcing_bgc endif !tr_bgc_Nit !------------------------------------------------------------------- - ! Data from Papdimitrious et al., 2007, Limnol. Oceanogr. - ! and WOA at 68oS, 304.5oE : - ! daily data located at the end of the 24-hour period. + ! Data from Papdimitrious et al., 2007, Limnol. Oceanogr. + ! and WOA at 68oS, 304.5oE : + ! daily data located at the end of the 24-hour period. !------------------------------------------------------------------- if (trim(bgc_data_type) == 'ISPOL') then nit_file = trim(bgc_data_dir)//'nutrients_daily_ISPOL_WOA_field3.nc' - sil_file = trim(bgc_data_dir)//'nutrients_daily_ISPOL_WOA_field3.nc' + sil_file = trim(bgc_data_dir)//'nutrients_daily_ISPOL_WOA_field3.nc' if (my_task == master_task .and. istep == 1) then if (tr_bgc_Sil) then @@ -407,45 +408,45 @@ subroutine get_forcing_bgc dataloc = 2 ! data located at end of interval sec1hr = secday ! seconds in day - maxrec = 365 ! + maxrec = 365 ! ! current record number - recnum = int(yday) + recnum = int(yday) ! Compute record numbers for surrounding data (2 on each side) ixm = mod(recnum+maxrec-2,maxrec) + 1 ixx = mod(recnum-1, maxrec) + 1 - + recslot = 2 ixp = -99 call interp_coeff (recnum, recslot, sec1hr, dataloc) read1 = .false. if (istep==1 .or. bgcrecnum .ne. recnum) read1 = .true. - - + + if (tr_bgc_Sil) then met_file = sil_file - fieldname= 'silicate' + fieldname= 'silicate' call read_data_nc_point(read1, 0, fyear, ixm, ixx, ixp, & maxrec, met_file, fieldname, sil_data_p, & field_loc_center, field_type_scalar) - + sil(:,:,:) = c1intp * sil_data_p(1) & + c2intp * sil_data_p(2) endif if (tr_bgc_Nit) then met_file = nit_file - fieldname= 'nitrate' + fieldname= 'nitrate' call read_data_nc_point(read1, 0, fyear, ixm, ixx, ixp, & maxrec, met_file, fieldname, nit_data_p, & field_loc_center, field_type_scalar) - + nit(:,:,:) = c1intp * nit_data_p(1) & + c2intp * nit_data_p(2) endif - + !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks @@ -457,13 +458,13 @@ subroutine get_forcing_bgc do j = jlo, jhi do i = ilo, ihi - + ks = 2*icepack_max_algae + icepack_max_doc + 3 + icepack_max_dic - ocean_bio_all(i,j,ks,iblk) = sil(i,j,iblk) !Sil + ocean_bio_all(i,j,ks,iblk) = sil(i,j,iblk) !Sil ks = icepack_max_algae + 1 ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !nit ks = 2*icepack_max_algae + icepack_max_doc + 7 + icepack_max_dic - ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !PON + ocean_bio_all(i,j,ks,iblk) = nit(i,j,iblk) !PON enddo enddo enddo @@ -479,11 +480,11 @@ end subroutine get_forcing_bgc ! ! author: Nicole Jeffery, LANL - subroutine get_atm_bgc + subroutine get_atm_bgc use ice_blocks, only: block, get_block use ice_domain, only: nblocks, blocks_ice - use ice_domain_size, only: n_zaero + use ice_domain_size, only: n_zaero use ice_flux_bgc, only: flux_bio_atm, faero_atm ! local variables @@ -491,7 +492,7 @@ subroutine get_atm_bgc integer (kind=int_kind) :: & i, j, nn , & ! horizontal indices ilo,ihi,jlo,jhi, & ! beginning and end of physical domain - iblk ! block index + iblk ! block index logical (kind=log_kind) :: & tr_zaero @@ -519,15 +520,15 @@ subroutine get_atm_bgc !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,nn) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - - do nn = 1, n_zaero + + do nn = 1, n_zaero do j = jlo, jhi - do i = ilo, ihi + do i = ilo, ihi flux_bio_atm(i,j,nlt_zaero(nn),iblk) = faero_atm(i,j,nn,iblk) enddo enddo @@ -568,10 +569,10 @@ subroutine faero_default faero_atm(:,:,1,:) = 1.e-12_dbl_kind ! kg/m^2 s faero_atm(:,:,2,:) = 1.e-13_dbl_kind - faero_atm(:,:,3,:) = 1.e-14_dbl_kind - faero_atm(:,:,4,:) = 1.e-14_dbl_kind - faero_atm(:,:,5,:) = 1.e-14_dbl_kind - faero_atm(:,:,6,:) = 1.e-14_dbl_kind + faero_atm(:,:,3,:) = 1.e-14_dbl_kind + faero_atm(:,:,4,:) = 1.e-14_dbl_kind + faero_atm(:,:,5,:) = 1.e-14_dbl_kind + faero_atm(:,:,6,:) = 1.e-14_dbl_kind end subroutine faero_default @@ -583,7 +584,7 @@ end subroutine faero_default subroutine faero_data - use ice_calendar, only: mmonth, mday, istep, msec + use ice_calendar, only: month, mday, istep, sec use ice_domain_size, only: max_blocks use ice_blocks, only: nx_block, ny_block use ice_flux_bgc, only: faero_atm @@ -597,11 +598,11 @@ subroutine faero_data aero2_data , & ! field values at 2 temporal data points aero3_data ! field values at 2 temporal data points - character (char_len_long) :: & + character (char_len_long) :: & aero_file, & ! netcdf filename fieldname ! field name in netcdf file - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & ixm,ixp , & ! record numbers for neighboring months maxrec , & ! maximum record number recslot , & ! spline slot for current record @@ -617,19 +618,19 @@ subroutine faero_data !------------------------------------------------------------------- - ! monthly data + ! monthly data ! - ! Assume that monthly data values are located in the middle of the + ! Assume that monthly data values are located in the middle of the ! month. !------------------------------------------------------------------- midmonth = 15 ! data is given on 15th of every month -! midmonth = fix(p5 * real(daymo(mmonth))) ! exact middle +! midmonth = fix(p5 * real(daymo(month))) ! exact middle ! Compute record numbers for surrounding months maxrec = 12 - ixm = mod(mmonth+maxrec-2,maxrec) + 1 - ixp = mod(mmonth, maxrec) + 1 + ixm = mod(month+maxrec-2,maxrec) + 1 + ixp = mod(month, maxrec) + 1 if (mday >= midmonth) ixm = 99 ! other two points will be used if (mday < midmonth) ixp = 99 @@ -644,25 +645,25 @@ subroutine faero_data ! Find interpolation coefficients call interp_coeff_monthly (recslot) - ! Read 2 monthly values + ! Read 2 monthly values readm = .false. - if (istep==1 .or. (mday==midmonth .and. msec==0)) readm = .true. + if (istep==1 .or. (mday==midmonth .and. sec==0)) readm = .true. -! aero_file = trim(atm_data_dir)//'faero.nc' - aero_file = '/usr/projects/climate/eclare/DATA/gx1v3/faero.nc' +! aero_file = trim(atm_data_dir)//'faero.nc' + aero_file = '/usr/projects/climate/eclare/DATA/gx1v3/faero.nc' fieldname='faero_atm001' - call read_clim_data_nc (readm, 0, ixm, mmonth, ixp, & + call read_clim_data_nc (readm, 0, ixm, month, ixp, & aero_file, fieldname, aero1_data, & field_loc_center, field_type_scalar) fieldname='faero_atm002' - call read_clim_data_nc (readm, 0, ixm, mmonth, ixp, & + call read_clim_data_nc (readm, 0, ixm, month, ixp, & aero_file, fieldname, aero2_data, & field_loc_center, field_type_scalar) fieldname='faero_atm003' - call read_clim_data_nc (readm, 0, ixm, mmonth, ixp, & + call read_clim_data_nc (readm, 0, ixm, month, ixp, & aero_file, fieldname, aero3_data, & field_loc_center, field_type_scalar) @@ -694,11 +695,11 @@ subroutine fzaero_data save :: & aero_data ! field values at 2 temporal data points - character (char_len_long) :: & + character (char_len_long) :: & aero_file, & ! netcdf filename fieldname ! field name in netcdf file - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & ixm,ixp , & ! record numbers for neighboring months maxrec , & ! maximum record number recslot , & ! spline slot for current record @@ -719,19 +720,19 @@ subroutine fzaero_data allocate( aero_data(nx_block,ny_block,2,max_blocks) ) !------------------------------------------------------------------- - ! monthly data + ! monthly data ! - ! Assume that monthly data values are located in the middle of the + ! Assume that monthly data values are located in the middle of the ! month. !------------------------------------------------------------------- midmonth = 15 ! data is given on 15th of every month -! midmonth = fix(p5 * real(daymo(mmonth))) ! exact middle +! midmonth = fix(p5 * real(daymo(month))) ! exact middle ! Compute record numbers for surrounding months maxrec = 12 - ixm = mod(mmonth+maxrec-2,maxrec) + 1 - ixp = mod(mmonth, maxrec) + 1 + ixm = mod(month+maxrec-2,maxrec) + 1 + ixp = mod(month, maxrec) + 1 if (mday >= midmonth) ixm = -99 ! other two points will be used if (mday < midmonth) ixp = -99 @@ -746,16 +747,16 @@ subroutine fzaero_data ! Find interpolation coefficients call interp_coeff_monthly (recslot) - ! Read 2 monthly values + ! Read 2 monthly values readm = .false. - if (istep==1 .or. (mday==midmonth .and. msec==0)) readm = .true. + if (istep==1 .or. (mday==midmonth .and. sec==0)) readm = .true. -! aero_file = trim(atm_data_dir)//'faero.nc' +! aero_file = trim(atm_data_dir)//'faero.nc' ! Cam5 monthly total black carbon deposition on the gx1 grid" - aero_file = '/usr/projects/climate/njeffery/DATA/CAM/Hailong_Wang/Cam5_bc_monthly_popgrid.nc' + aero_file = '/usr/projects/climate/njeffery/DATA/CAM/Hailong_Wang/Cam5_bc_monthly_popgrid.nc' fieldname='bcd' - call read_clim_data_nc (readm, 0, ixm, mmonth, ixp, & + call read_clim_data_nc (readm, 0, ixm, month, ixp, & aero_file, fieldname, aero_data, & field_loc_center, field_type_scalar) @@ -785,11 +786,11 @@ subroutine init_bgc_data (fed1,fep1) ! local parameters integer (kind=int_kind) :: & - fid ! file id for netCDF file + fid ! file id for netCDF file logical (kind=log_kind) :: diag - character (char_len_long) :: & + character (char_len_long) :: & iron_file, & ! netcdf filename fieldname ! field name in netcdf file @@ -801,7 +802,7 @@ subroutine init_bgc_data (fed1,fep1) !------------------------------------------------------------------- if (trim(fe_data_type) == 'clim') then - diag = .true. ! write diagnostic information + diag = .true. ! write diagnostic information iron_file = trim(bgc_data_dir)//'dFe_50m_annual_Tagliabue_gx1.nc' if (my_task == master_task) then @@ -813,12 +814,12 @@ subroutine init_bgc_data (fed1,fep1) fieldname='dFe' ! Currently only first fed value is read - call ice_read_nc(fid,1,fieldname,fed1,diag) - where ( fed1(:,:,:) > 1.e20) fed1(:,:,:) = p1 + call ice_read_nc(fid,1,fieldname,fed1,diag) + where ( fed1(:,:,:) > 1.e20) fed1(:,:,:) = p1 - if (my_task == master_task) call ice_close_nc(fid) + if (my_task == master_task) call ice_close_nc(fid) - diag = .true. ! write diagnostic information + diag = .true. ! write diagnostic information iron_file = trim(bgc_data_dir)//'pFe_bathy_gx1.nc' if (my_task == master_task) then @@ -830,15 +831,180 @@ subroutine init_bgc_data (fed1,fep1) fieldname='pFe' ! Currently only first fep value is read - call ice_read_nc(fid,1,fieldname,fep1,diag) - where ( fep1(:,:,:) > 1.e20) fep1(:,:,:) = p1 - - if (my_task == master_task) call ice_close_nc(fid) + call ice_read_nc(fid,1,fieldname,fep1,diag) + where ( fep1(:,:,:) > 1.e20) fep1(:,:,:) = p1 + if (my_task == master_task) call ice_close_nc(fid) + endif - + end subroutine init_bgc_data +!======================================================================= +! +! Aerosol optical properties for bulk and modal aerosol formulation +! X_bc_tab properties are from snicar_optics_5bnd_mam_c140303 (Mark Flanner 2009) +! ==> "Mie optical parameters for CLM snowpack treatment" Includes +! ice (effective radii from 30-1500um), black carbon, organic carbon and dust +! +! authors: Elizabeth Hunke, LANL + + subroutine faero_optics + + use ice_broadcast, only: broadcast_array + use ice_read_write, only: ice_open_nc, ice_close_nc + use ice_communicate, only: my_task, master_task + use ice_arrays_column, only: & + kaer_tab, & ! aerosol mass extinction cross section (m2/kg) + waer_tab, & ! aerosol single scatter albedo (fraction) + gaer_tab, & ! aerosol asymmetry parameter (cos(theta)) + kaer_bc_tab, & ! BC mass extinction cross section (m2/kg) + waer_bc_tab, & ! BC single scatter albedo (fraction) + gaer_bc_tab, & ! BC aerosol asymmetry parameter (cos(theta)) + bcenh ! BC absorption enhancement facto + +#ifdef USE_NETCDF + use netcdf +#endif + + ! local parameters + + logical (kind=log_kind) :: modal_aero + + integer (kind=int_kind) :: & + varid , & ! variable id + status , & ! status output from netcdf routines + n, k ! index + + real (kind=dbl_kind) :: & + amin, amax, asum ! min, max values and sum of input array + + integer (kind=int_kind) :: & + fid ! file id for netCDF file + + character (char_len_long) :: & + optics_file, & ! netcdf filename + fieldname ! field name in netcdf file + + character(len=*), parameter :: subname = '(faero_optics)' + + ! this data is used in bulk aerosol treatment in dEdd radiation + kaer_tab = reshape((/ & ! aerosol mass extinction cross section (m2/kg) + 11580.61872, 5535.41835, 2793.79690, & + 25798.96479, 11536.03871, 4688.24207, & + 196.49772, 204.14078, 214.42287, & + 2665.85867, 2256.71027, 820.36024, & + 840.78295, 1028.24656, 1163.03298, & + 387.51211, 414.68808, 450.29814/), & + (/icepack_nspint,icepack_max_aero/)) + waer_tab = reshape((/ & ! aerosol single scatter albedo (fraction) + 0.29003, 0.17349, 0.06613, & + 0.51731, 0.41609, 0.21324, & + 0.84467, 0.94216, 0.95666, & + 0.97764, 0.99402, 0.98552, & + 0.94146, 0.98527, 0.99093, & + 0.90034, 0.96543, 0.97678/), & + (/icepack_nspint,icepack_max_aero/)) + gaer_tab = reshape((/ & ! aerosol asymmetry parameter (cos(theta)) + 0.35445, 0.19838, 0.08857, & + 0.52581, 0.32384, 0.14970, & + 0.83162, 0.78306, 0.74375, & + 0.68861, 0.70836, 0.54171, & + 0.70239, 0.66115, 0.71983, & + 0.78734, 0.73580, 0.64411/), & + (/icepack_nspint,icepack_max_aero/)) + + ! this data is used in MODAL AEROSOL treatment in dEdd radiation + kaer_bc_tab = reshape((/ & ! aerosol mass extinction cross section (m2/kg) + 12955.44732, 5946.89461, 2772.33366, & + 12085.30664, 7438.83131, 3657.13084, & + 9753.99698, 7342.87139, 4187.79304, & + 7815.74879, 6659.65096, 4337.98863, & + 6381.28194, 5876.78408, 4254.65054, & + 5326.93163, 5156.74532, 4053.66581, & + 4538.09763, 4538.60875, 3804.10884, & + 3934.17604, 4020.20799, 3543.27199, & + 3461.20656, 3587.80962, 3289.98060, & + 3083.03396, 3226.27231, 3052.91441/), & + (/icepack_nspint,10/)) + + waer_bc_tab = reshape((/ & ! aerosol single scatter albedo (fraction) + 0.26107, 0.15861, 0.06535, & + 0.37559, 0.30318, 0.19483, & + 0.42224, 0.36913, 0.27875, & + 0.44777, 0.40503, 0.33026, & + 0.46444, 0.42744, 0.36426, & + 0.47667, 0.44285, 0.38827, & + 0.48635, 0.45428, 0.40617, & + 0.49440, 0.46328, 0.42008, & + 0.50131, 0.47070, 0.43128, & + 0.50736, 0.47704, 0.44056/), & + (/icepack_nspint,10/)) + + gaer_bc_tab = reshape((/ & ! aerosol asymmetry parameter (cos(theta)) + 0.28328, 0.19644, 0.10498, & + 0.44488, 0.32615, 0.19612, & + 0.54724, 0.41611, 0.26390, & + 0.61711, 0.48475, 0.31922, & + 0.66673, 0.53923, 0.36632, & + 0.70296, 0.58337, 0.40732, & + 0.73002, 0.61960, 0.44344, & + 0.75064, 0.64959, 0.47551, & + 0.76663, 0.67461, 0.50415, & + 0.77926, 0.69561, 0.52981/),& + (/icepack_nspint,10/)) + + bcenh(:,:,:) = c0 + + call icepack_query_parameters(modal_aero_out=modal_aero) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + if (modal_aero) then +#ifdef USE_NETCDF + optics_file = & + '/usr/projects/climate/njeffery/DATA/CAM/snicar/snicar_optics_5bnd_mam_c140303.nc' + + if (my_task == master_task) then + write (nu_diag,*) ' ' + write (nu_diag,*) 'Read optics for modal aerosol treament in' + write (nu_diag,*) trim(optics_file) + call ice_open_nc(optics_file,fid) + endif + + fieldname='bcint_enh_mam_cice' + if (my_task == master_task) then + + status = nf90_inq_varid(fid, trim(fieldname), varid) + + if (status /= nf90_noerr) then + call abort_ice (subname//'ERROR: Cannot find variable '//trim(fieldname)) + endif + status = nf90_get_var( fid, varid, bcenh, & + start=(/1,1,1,1/), & + count=(/3,10,8,1/) ) + do n=1,10 + amin = minval(bcenh(:,n,:)) + amax = maxval(bcenh(:,n,:)) + asum = sum (bcenh(:,n,:)) + write(nu_diag,*) ' min, max, sum =', amin, amax, asum + enddo + call ice_close_nc(fid) + endif !master_task + do n=1,3 + do k=1,8 + call broadcast_array(bcenh(n,:,k), master_task) + enddo + enddo +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) +#endif + endif ! modal_aero + + end subroutine faero_optics + !======================================================================= end module ice_forcing_bgc diff --git a/cicecore/cicedyn/general/ice_step_mod.F90 b/cicecore/cicedyn/general/ice_step_mod.F90 index 2726a6101..fde7a16cf 100644 --- a/cicecore/cicedyn/general/ice_step_mod.F90 +++ b/cicecore/cicedyn/general/ice_step_mod.F90 @@ -12,14 +12,7 @@ module ice_step_mod use ice_kinds_mod - use ice_blocks, only: block, get_block - use ice_blocks, only: nx_block, ny_block - use ice_constants, only: c0, c1, c1000, c4, p25 - use ice_constants, only: field_loc_center, field_loc_NEcorner, & - field_loc_Nface, field_loc_Eface, & - field_type_scalar, field_type_vector - use ice_domain, only: halo_info, nblocks, blocks_ice - use ice_domain_size, only: max_blocks + use ice_constants, only: c0, c1, c1000, c4 use ice_exit, only: abort_ice use ice_fileunits, only: nu_diag use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted @@ -43,12 +36,8 @@ module ice_step_mod private public :: step_therm1, step_therm2, step_dyn_horiz, step_dyn_ridge, & - step_snow, prep_radiation, step_radiation, ocean_mixed_layer, & - update_state, biogeochemistry, step_dyn_wave, step_prep - - real (kind=dbl_kind), dimension (:,:,:), allocatable :: & - uvelT_icep, & ! uvel for wind stress computation in icepack - vvelT_icep ! vvel for wind stress computation in icepack + prep_radiation, step_radiation, ocean_mixed_layer, & + update_state, biogeochemistry, save_init, step_dyn_wave !======================================================================= @@ -62,8 +51,6 @@ subroutine save_init use ice_state, only: aice, aicen, aice_init, aicen_init, & vicen, vicen_init, vsnon, vsnon_init - character(len=*), parameter :: subname = '(save_init)' - !----------------------------------------------------------------- ! Save the ice area passed to the coupler (so that history fields ! can be made consistent with coupler fields). @@ -77,54 +64,6 @@ subroutine save_init end subroutine save_init -!======================================================================= - - subroutine step_prep -! prep for step, called outside nblock loop - - use ice_flux, only: uatm, vatm, uatmT, vatmT - use ice_grid, only: grid_atm_dynu, grid_atm_dynv, grid_average_X2Y - use ice_state, only: uvel, vvel - - logical (kind=log_kind) :: & - highfreq ! highfreq flag - - logical (kind=log_kind), save :: & - first_call = .true. ! first call flag - - character(len=*), parameter :: subname = '(step_prep)' - - ! Save initial state - - call save_init - - ! Compute uatmT, vatmT - - call grid_average_X2Y('S',uatm,grid_atm_dynu,uatmT,'T') - call grid_average_X2Y('S',vatm,grid_atm_dynv,vatmT,'T') - - !----------------------------------------------------------------- - ! Compute uvelT_icep, vvelT_icep - !----------------------------------------------------------------- - - if (first_call) then - allocate(uvelT_icep(nx_block,ny_block,max_blocks)) - allocate(vvelT_icep(nx_block,ny_block,max_blocks)) - uvelT_icep = c0 - vvelT_icep = c0 - endif - - call icepack_query_parameters(highfreq_out=highfreq) - - if (highfreq) then - call grid_average_X2Y('A', uvel, 'U', uvelT_icep, 'T') - call grid_average_X2Y('A', vvel, 'U', vvelT_icep, 'T') - endif - - first_call = .false. - - end subroutine step_prep - !======================================================================= ! ! Scales radiation fields computed on the previous time step. @@ -133,6 +72,8 @@ end subroutine step_prep subroutine prep_radiation (iblk) + use ice_blocks, only: block, get_block + use ice_domain, only: blocks_ice use ice_domain_size, only: ncat, nilyr, nslyr use ice_flux, only: scale_factor, swvdr, swvdf, swidr, swidf, & alvdr_ai, alvdf_ai, alidr_ai, alidf_ai, & @@ -157,14 +98,14 @@ subroutine prep_radiation (iblk) character(len=*), parameter :: subname = '(prep_radiation)' - call ice_timer_start(timer_sw,iblk) ! shortwave + call ice_timer_start(timer_sw) ! shortwave alvdr_init(:,:,iblk) = c0 alvdf_init(:,:,iblk) = c0 alidr_init(:,:,iblk) = c0 alidf_init(:,:,iblk) = c0 - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -182,7 +123,8 @@ subroutine prep_radiation (iblk) alidr_init(i,j,iblk) = alidr_ai(i,j,iblk) alidf_init(i,j,iblk) = alidf_ai(i,j,iblk) - call icepack_prep_radiation (scale_factor=scale_factor(i,j,iblk), & + call icepack_prep_radiation (ncat=ncat, nilyr=nilyr, nslyr=nslyr, & + scale_factor=scale_factor(i,j,iblk), & aice = aice (i,j, iblk), aicen = aicen (i,j, :,iblk), & swvdr = swvdr (i,j, iblk), swvdf = swvdf (i,j, iblk), & swidr = swidr (i,j, iblk), swidf = swidf (i,j, iblk), & @@ -204,7 +146,7 @@ subroutine prep_radiation (iblk) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - call ice_timer_stop(timer_sw,iblk) ! shortwave + call ice_timer_stop(timer_sw) ! shortwave end subroutine prep_radiation @@ -221,46 +163,43 @@ subroutine step_therm1 (dt, iblk) Cdn_ocn, Cdn_ocn_skin, Cdn_ocn_floe, Cdn_ocn_keel, Cdn_atm_ratio, & Cdn_atm, Cdn_atm_skin, Cdn_atm_floe, Cdn_atm_rdg, Cdn_atm_pond, & hfreebd, hdraft, hridge, distrdg, hkeel, dkeel, lfloe, dfloe, & - fswsfcn, fswintn, Sswabsn, Iswabsn, meltsliqn, meltsliq, & + fswsfcn, fswintn, Sswabsn, Iswabsn, & fswthrun, fswthrun_vdr, fswthrun_vdf, fswthrun_idr, fswthrun_idf + use ice_blocks, only: block, get_block, nx_block, ny_block use ice_calendar, only: yday + use ice_domain, only: blocks_ice use ice_domain_size, only: ncat, nilyr, nslyr, n_iso, n_aero - use ice_flux, only: frzmlt, sst, Tf, strocnxT_iavg, strocnyT_iavg, rside, fbot, Tbot, Tsnice, & - meltsn, melttn, meltbn, congeln, snoicen, uatmT, vatmT, fside, wlat, & - wind, rhoa, potT, Qa, zlvl, zlvs, strax, stray, flatn, fsensn, fsurfn, fcondtopn, & - flw, fsnow, fpond, sss, mlt_onset, frz_onset, fcondbotn, fcondbot, fsloss, & + use ice_flux, only: frzmlt, sst, Tf, strocnxT, strocnyT, rside, fbot, Tbot, Tsnice, & + meltsn, melttn, meltbn, congeln, snoicen, uatm, vatm, fside, & + wind, rhoa, potT, Qa, zlvl, strax, stray, flatn, fsensn, fsurfn, fcondtopn, & + flw, fsnow, fpond, sss, mlt_onset, frz_onset, fcondbotn, fcondbot, & frain, Tair, strairxT, strairyT, fsurf, fcondtop, fsens, & flat, fswabs, flwout, evap, evaps, evapi, Tref, Qref, Uref, fresh, fsalt, fhocn, & fswthru, fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & meltt, melts, meltb, congel, snoice, & flatn_f, fsensn_f, fsurfn_f, fcondtopn_f, & - send_i2x_per_cat, fswthrun_ai, dsnow + send_i2x_per_cat, fswthrun_ai use ice_flux_bgc, only: dsnown, faero_atm, faero_ocn, fiso_atm, fiso_ocn, & Qa_iso, Qref_iso, fiso_evap, HDO_ocn, H2_16O_ocn, H2_18O_ocn use ice_grid, only: lmask_n, lmask_s, tmask - use ice_state, only: aice, aicen, aicen_init, vicen_init, & - vice, vicen, vsno, vsnon, trcrn, vsnon_init -#ifdef CICE_IN_NEMO - use ice_state, only: aice_init -#endif + use ice_state, only: aice, aicen, aice_init, aicen_init, vicen_init, & + vice, vicen, vsno, vsnon, trcrn, uvel, vvel, vsnon_init #ifdef CESMCOUPLED use ice_prescribed_mod, only: prescribed_ice #else - logical (kind=log_kind) :: & - prescribed_ice ! if .true., use prescribed ice instead of computed + logical (kind=log_kind) :: & + prescribed_ice ! if .true., use prescribed ice instead of computed #endif + real (kind=dbl_kind), intent(in) :: & - dt ! time step (s) + dt ! time step integer (kind=int_kind), intent(in) :: & - iblk ! block index + iblk ! block index ! local variables -#ifdef CICE_IN_NEMO - real (kind=dbl_kind) :: & - raice ! reciprocal of ice concentration -#endif + integer (kind=int_kind) :: & ilo,ihi,jlo,jhi, & ! beginning and end of physical domain i, j , & ! horizontal indices @@ -270,36 +209,32 @@ subroutine step_therm1 (dt, iblk) integer (kind=int_kind) :: & ntrcr, nt_apnd, nt_hpnd, nt_ipnd, nt_alvl, nt_vlvl, nt_Tsfc, & nt_iage, nt_FY, nt_qice, nt_sice, nt_aero, nt_qsno, & - nt_isosno, nt_isoice, nt_rsnw, nt_smice, nt_smliq + nt_isosno, nt_isoice logical (kind=log_kind) :: & - tr_iage, tr_FY, tr_iso, tr_aero, tr_pond, & - tr_pond_lvl, tr_pond_topo, calc_Tsfc, snwgrain + tr_iage, tr_FY, tr_iso, tr_aero, tr_pond, tr_pond_cesm, & + tr_pond_lvl, tr_pond_topo, calc_Tsfc real (kind=dbl_kind) :: & - puny ! a very small number + puny real (kind=dbl_kind), dimension(n_aero,2,ncat) :: & - aerosno, aeroice ! kg/m^2 + aerosno, aeroice ! kg/m^2 real (kind=dbl_kind), dimension(n_iso,ncat) :: & - isosno, isoice ! kg/m^2 - - real (kind=dbl_kind), dimension(nslyr,ncat) :: & - rsnwn, smicen, smliqn + isosno, isoice ! kg/m^2 type (block) :: & - this_block ! block information for current block + this_block ! block information for current block character(len=*), parameter :: subname = '(step_therm1)' call icepack_query_parameters(puny_out=puny) call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc) - call icepack_query_parameters(snwgrain_out=snwgrain) call icepack_query_tracer_sizes(ntrcr_out=ntrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_iso_out=tr_iso, & - tr_aero_out=tr_aero, tr_pond_out=tr_pond, & + tr_aero_out=tr_aero, tr_pond_out=tr_pond, tr_pond_cesm_out=tr_pond_cesm, & tr_pond_lvl_out=tr_pond_lvl, tr_pond_topo_out=tr_pond_topo) call icepack_query_tracer_indices( & nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & @@ -307,7 +242,6 @@ subroutine step_therm1 (dt, iblk) nt_iage_out=nt_iage, nt_FY_out=nt_FY, & nt_qice_out=nt_qice, nt_sice_out=nt_sice, & nt_aero_out=nt_aero, nt_qsno_out=nt_qsno, & - nt_rsnw_out=nt_rsnw, nt_smice_out=nt_smice, nt_smliq_out=nt_smliq, & nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -317,9 +251,7 @@ subroutine step_therm1 (dt, iblk) prescribed_ice = .false. #endif - rsnwn (:,:) = c0 - smicen (:,:) = c0 - smliqn (:,:) = c0 + isosno (:,:) = c0 isoice (:,:) = c0 aerosno(:,:,:) = c0 aeroice(:,:,:) = c0 @@ -346,25 +278,15 @@ subroutine step_therm1 (dt, iblk) enddo ! j #endif - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - + do j = jlo, jhi do i = ilo, ihi - if (snwgrain) then - do n = 1, ncat - do k = 1, nslyr - rsnwn (k,n) = trcrn(i,j,nt_rsnw +k-1,n,iblk) - smicen(k,n) = trcrn(i,j,nt_smice+k-1,n,iblk) - smliqn(k,n) = trcrn(i,j,nt_smliq+k-1,n,iblk) - enddo - enddo - endif ! snwgrain - if (tr_iso) then ! trcrn(nt_iso*) has units kg/m^3 do n=1,ncat do k=1,n_iso @@ -400,31 +322,27 @@ subroutine step_therm1 (dt, iblk) vicen = vicen (i,j,:,iblk), & vsno = vsno (i,j, iblk), & vsnon = vsnon (i,j,:,iblk), & - uvel = uvelT_icep (i,j, iblk), & - vvel = vvelT_icep (i,j, iblk), & + uvel = uvel (i,j, iblk), & + vvel = vvel (i,j, iblk), & Tsfc = trcrn (i,j,nt_Tsfc,:,iblk), & - zqsn = trcrn (i,j,nt_qsno:nt_qsno+nslyr-1,:,iblk), & - zqin = trcrn (i,j,nt_qice:nt_qice+nilyr-1,:,iblk), & - zSin = trcrn (i,j,nt_sice:nt_sice+nilyr-1,:,iblk), & - alvl = trcrn (i,j,nt_alvl,:,iblk), & - vlvl = trcrn (i,j,nt_vlvl,:,iblk), & - apnd = trcrn (i,j,nt_apnd,:,iblk), & - hpnd = trcrn (i,j,nt_hpnd,:,iblk), & - ipnd = trcrn (i,j,nt_ipnd,:,iblk), & + zqsn = trcrn (i,j,nt_qsno:nt_qsno+nslyr-1,:,iblk), & + zqin = trcrn (i,j,nt_qice:nt_qice+nilyr-1,:,iblk), & + zSin = trcrn (i,j,nt_sice:nt_sice+nilyr-1,:,iblk), & + alvl = trcrn (i,j,nt_alvl,:,iblk), & + vlvl = trcrn (i,j,nt_vlvl,:,iblk), & + apnd = trcrn (i,j,nt_apnd,:,iblk), & + hpnd = trcrn (i,j,nt_hpnd,:,iblk), & + ipnd = trcrn (i,j,nt_ipnd,:,iblk), & iage = trcrn (i,j,nt_iage,:,iblk), & - FY = trcrn (i,j,nt_FY ,:,iblk), & - rsnwn = rsnwn (:,:), & - smicen = smicen (:,:), & - smliqn = smliqn (:,:), & + FY = trcrn (i,j,nt_FY ,:,iblk), & aerosno = aerosno (:,:,:), & aeroice = aeroice (:,:,:), & isosno = isosno (:,:), & isoice = isoice (:,:), & - uatm = uatmT (i,j, iblk), & - vatm = vatmT (i,j, iblk), & + uatm = uatm (i,j, iblk), & + vatm = vatm (i,j, iblk), & wind = wind (i,j, iblk), & zlvl = zlvl (i,j, iblk), & - zlvs = zlvs (i,j, iblk), & Qa = Qa (i,j, iblk), & Qa_iso = Qa_iso (i,j,:,iblk), & rhoa = rhoa (i,j, iblk), & @@ -459,19 +377,17 @@ subroutine step_therm1 (dt, iblk) sst = sst (i,j, iblk), & sss = sss (i,j, iblk), & Tf = Tf (i,j, iblk), & - strocnxT = strocnxT_iavg(i,j, iblk), & - strocnyT = strocnyT_iavg(i,j, iblk), & + strocnxT = strocnxT (i,j, iblk), & + strocnyT = strocnyT (i,j, iblk), & fbot = fbot (i,j, iblk), & Tbot = Tbot (i,j, iblk), & - Tsnice = Tsnice (i,j, iblk), & + Tsnice = Tsnice (i,j, iblk), & frzmlt = frzmlt (i,j, iblk), & rside = rside (i,j, iblk), & fside = fside (i,j, iblk), & - wlat = wlat (i,j, iblk), & fsnow = fsnow (i,j, iblk), & frain = frain (i,j, iblk), & fpond = fpond (i,j, iblk), & - fsloss = fsloss (i,j, iblk), & fsurf = fsurf (i,j, iblk), & fsurfn = fsurfn (i,j,:,iblk), & fcondtop = fcondtop (i,j, iblk), & @@ -501,10 +417,10 @@ subroutine step_therm1 (dt, iblk) fsalt = fsalt (i,j, iblk), & fhocn = fhocn (i,j, iblk), & fswthru = fswthru (i,j, iblk), & - fswthru_vdr = fswthru_vdr (i,j, iblk), & - fswthru_vdf = fswthru_vdf (i,j, iblk), & - fswthru_idr = fswthru_idr (i,j, iblk), & - fswthru_idf = fswthru_idf (i,j, iblk), & + fswthru_vdr = fswthru_vdr (i,j, iblk),& + fswthru_vdf = fswthru_vdf (i,j, iblk),& + fswthru_idr = fswthru_idr (i,j, iblk),& + fswthru_idf = fswthru_idf (i,j, iblk),& flatn_f = flatn_f (i,j,:,iblk), & fsensn_f = fsensn_f (i,j,:,iblk), & fsurfn_f = fsurfn_f (i,j,:,iblk), & @@ -529,10 +445,7 @@ subroutine step_therm1 (dt, iblk) congeln = congeln (i,j,:,iblk), & snoice = snoice (i,j, iblk), & snoicen = snoicen (i,j,:,iblk), & - dsnow = dsnow (i,j, iblk), & dsnown = dsnown (i,j,:,iblk), & - meltsliq = meltsliq (i,j, iblk), & - meltsliqn = meltsliqn (i,j,:,iblk), & lmask_n = lmask_n (i,j, iblk), & lmask_s = lmask_s (i,j, iblk), & mlt_onset = mlt_onset (i,j, iblk), & @@ -554,16 +467,6 @@ subroutine step_therm1 (dt, iblk) endif - if (snwgrain) then - do n = 1, ncat - do k = 1, nslyr - trcrn(i,j,nt_rsnw +k-1,n,iblk) = rsnwn (k,n) - trcrn(i,j,nt_smice+k-1,n,iblk) = smicen(k,n) - trcrn(i,j,nt_smliq+k-1,n,iblk) = smliqn(k,n) - enddo - enddo - endif ! snwgrain - if (tr_iso) then do n = 1, ncat if (vicen(i,j,n,iblk) > puny) & @@ -610,14 +513,16 @@ end subroutine step_therm1 subroutine step_therm2 (dt, iblk) - use ice_arrays_column, only: hin_max, ocean_bio, wave_sig_ht, & + use ice_arrays_column, only: hin_max, fzsal, ocean_bio, wave_sig_ht, & wave_spectrum, wavefreq, dwavefreq, & first_ice, bgrid, cgrid, igrid, floe_rad_c, floe_binwidth, & d_afsd_latg, d_afsd_newi, d_afsd_latm, d_afsd_weld + use ice_blocks, only: block, get_block use ice_calendar, only: yday - use ice_domain_size, only: ncat, nilyr, nslyr, nblyr, nfsd + use ice_domain, only: blocks_ice + use ice_domain_size, only: ncat, nilyr, nslyr, n_aero, nblyr, nfsd use ice_flux, only: fresh, frain, fpond, frzmlt, frazil, frz_onset, & - fsalt, Tf, sss, salinz, fhocn, rside, fside, wlat, & + update_ocn_f, fsalt, Tf, sss, salinz, fhocn, rside, fside, & meltl, frazil_diag use ice_flux_bgc, only: flux_bio, faero_ocn, & fiso_ocn, HDO_ocn, H2_16O_ocn, H2_18O_ocn @@ -643,7 +548,7 @@ subroutine step_therm2 (dt, iblk) logical (kind=log_kind) :: & tr_fsd, & ! floe size distribution tracers - z_tracers ! vertical biogeochemistry + z_tracers type (block) :: & this_block ! block information for current block @@ -657,14 +562,14 @@ subroutine step_therm2 (dt, iblk) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - ! nltrcr is only used as a zbgc flag in icepack (number of zbgc tracers > 0) + ! tcraig, nltrcr used to be the number of zbgc tracers, but it's used as a zbgc flag in icepack if (z_tracers) then nltrcr = 1 else nltrcr = 0 endif - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -681,7 +586,7 @@ subroutine step_therm2 (dt, iblk) call icepack_step_therm2(dt=dt, ncat=ncat, & nltrcr=nltrcr, nilyr=nilyr, nslyr=nslyr, nblyr=nblyr, & - hin_max = hin_max (:), & + hin_max = hin_max (:), & aicen = aicen (i,j,:,iblk), & vicen = vicen (i,j,:,iblk), & vsnon = vsnon (i,j,:,iblk), & @@ -700,7 +605,6 @@ subroutine step_therm2 (dt, iblk) rside = rside (i,j, iblk), & meltl = meltl (i,j, iblk), & fside = fside (i,j, iblk), & - wlat = wlat (i,j, iblk), & frzmlt = frzmlt (i,j, iblk), & frazil = frazil (i,j, iblk), & frain = frain (i,j, iblk), & @@ -708,11 +612,13 @@ subroutine step_therm2 (dt, iblk) fresh = fresh (i,j, iblk), & fsalt = fsalt (i,j, iblk), & fhocn = fhocn (i,j, iblk), & + update_ocn_f = update_ocn_f, & bgrid = bgrid, & cgrid = cgrid, & igrid = igrid, & faero_ocn = faero_ocn (i,j,:,iblk), & first_ice = first_ice (i,j,:,iblk), & + fzsal = fzsal (i,j, iblk), & flux_bio = flux_bio (i,j,1:nbtrcr,iblk), & ocean_bio = ocean_bio (i,j,1:nbtrcr,iblk), & frazil_diag= frazil_diag(i,j,iblk), & @@ -750,30 +656,28 @@ end subroutine step_therm2 ! ! authors: Elizabeth Hunke, LANL - subroutine update_state (dt, daidt, dvidt, dvsdt, dagedt, offset) + subroutine update_state (dt, daidt, dvidt, dagedt, offset) + use ice_blocks, only: nx_block, ny_block + use ice_domain, only: nblocks use ice_domain_size, only: ncat ! use ice_grid, only: tmask use ice_state, only: aicen, trcrn, vicen, vsnon, & aice, trcr, vice, vsno, aice0, trcr_depend, & bound_state, trcr_base, nt_strata, n_trcr_strata - use ice_flux, only: Tf - use ice_timers, only: ice_timer_start, ice_timer_stop, timer_bound, timer_updstate + use ice_timers, only: ice_timer_start, ice_timer_stop, timer_bound real (kind=dbl_kind), intent(in) :: & - dt ! time step + dt , & ! time step + offset ! d(age)/dt time offset = dt for thermo, 0 for dyn - real (kind=dbl_kind), dimension(:,:,:), intent(inout), optional :: & - daidt, & ! change in ice area per time step - dvidt, & ! change in ice volume per time step - dvsdt, & ! change in snow volume per time step - dagedt ! change in ice age per time step + real (kind=dbl_kind), dimension(:,:,:), intent(inout) :: & + daidt, & ! change in ice area per time step + dvidt, & ! change in ice volume per time step + dagedt ! change in ice age per time step - real (kind=dbl_kind), intent(in), optional :: & - offset ! d(age)/dt time offset = dt for thermo, 0 for dyn - - integer (kind=int_kind) :: & - iblk, & ! block index + integer (kind=int_kind) :: & + iblk, & ! block index i,j, & ! horizontal indices ntrcr, & ! nt_iage ! @@ -783,7 +687,6 @@ subroutine update_state (dt, daidt, dvidt, dvsdt, dagedt, offset) character(len=*), parameter :: subname='(update_state)' - call ice_timer_start(timer_updstate) call icepack_query_tracer_flags(tr_iage_out=tr_iage) call icepack_query_tracer_sizes(ntrcr_out=ntrcr) call icepack_query_tracer_indices(nt_iage_out=nt_iage) @@ -801,15 +704,15 @@ subroutine update_state (dt, daidt, dvidt, dvsdt, dagedt, offset) ntrcr, trcrn) call ice_timer_stop(timer_bound) - !$OMP PARALLEL DO PRIVATE(iblk,i,j) SCHEDULE(runtime) + !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block do i = 1, nx_block !----------------------------------------------------------------- - ! Aggregate the updated state variables (includes ghost cells). - !----------------------------------------------------------------- - + ! Aggregate the updated state variables (includes ghost cells). + !----------------------------------------------------------------- + ! if (tmask(i,j,iblk)) & call icepack_aggregate(ncat = ncat, & aicen = aicen(i,j,:,iblk), & @@ -825,29 +728,24 @@ subroutine update_state (dt, daidt, dvidt, dvsdt, dagedt, offset) trcr_depend = trcr_depend(:), & trcr_base = trcr_base(:,:), & n_trcr_strata = n_trcr_strata(:), & - nt_strata = nt_strata(:,:), & - Tf = Tf(i,j,iblk)) - - if (present(offset)) then - - !----------------------------------------------------------------- - ! Compute thermodynamic area and volume tendencies. - !----------------------------------------------------------------- - - if (present(daidt)) daidt(i,j,iblk) = (aice(i,j,iblk) - daidt(i,j,iblk)) / dt - if (present(dvidt)) dvidt(i,j,iblk) = (vice(i,j,iblk) - dvidt(i,j,iblk)) / dt - if (present(dvsdt)) dvsdt(i,j,iblk) = (vsno(i,j,iblk) - dvsdt(i,j,iblk)) / dt - if (present(dagedt) .and. tr_iage) then - if (offset > c0) then ! thermo - if (trcr(i,j,nt_iage,iblk) > c0) & - dagedt(i,j,iblk) = (trcr(i,j,nt_iage,iblk) & - - dagedt(i,j,iblk) - offset) / dt - else ! dynamics - dagedt(i,j,iblk) = (trcr(i,j,nt_iage,iblk) & - - dagedt(i,j,iblk)) / dt - endif - endif ! tr_iage - endif ! present(offset) + nt_strata = nt_strata(:,:)) + + !----------------------------------------------------------------- + ! Compute thermodynamic area and volume tendencies. + !----------------------------------------------------------------- + + daidt(i,j,iblk) = (aice(i,j,iblk) - daidt(i,j,iblk)) / dt + dvidt(i,j,iblk) = (vice(i,j,iblk) - dvidt(i,j,iblk)) / dt + if (tr_iage) then + if (offset > c0) then ! thermo + if (trcr(i,j,nt_iage,iblk) > c0) & + dagedt(i,j,iblk) = (trcr(i,j,nt_iage,iblk) & + - dagedt(i,j,iblk) - offset) / dt + else ! dynamics + dagedt(i,j,iblk) = (trcr(i,j,nt_iage,iblk) & + - dagedt(i,j,iblk)) / dt + endif + endif enddo ! i enddo ! j @@ -857,7 +755,6 @@ subroutine update_state (dt, daidt, dvidt, dvsdt, dagedt, offset) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - call ice_timer_stop(timer_updstate) end subroutine update_state @@ -870,8 +767,10 @@ end subroutine update_state subroutine step_dyn_wave (dt) - use ice_arrays_column, only: wave_spectrum, & + use ice_arrays_column, only: wave_spectrum, wave_sig_ht, & d_afsd_wave, floe_rad_l, floe_rad_c, wavefreq, dwavefreq + use ice_blocks, only: block, get_block + use ice_domain, only: blocks_ice, nblocks use ice_domain_size, only: ncat, nfsd, nfreq use ice_state, only: trcrn, aicen, aice, vice use ice_timers, only: ice_timer_start, ice_timer_stop, timer_column, & @@ -888,7 +787,9 @@ subroutine step_dyn_wave (dt) integer (kind=int_kind) :: & ilo,ihi,jlo,jhi, & ! beginning and end of physical domain iblk, & ! block index - i, j ! horizontal indices + i, j, & ! horizontal indices + ntrcr, & ! + nbtrcr ! character (len=char_len) :: wave_spec_type @@ -945,86 +846,38 @@ end subroutine step_dyn_wave subroutine step_dyn_horiz (dt) - use ice_boundary, only: ice_HaloUpdate use ice_dyn_evp, only: evp use ice_dyn_eap, only: eap - use ice_dyn_vp, only: implicit_solver - use ice_dyn_shared, only: kdyn - use ice_flux, only: strocnxU, strocnyU, strocnxT_iavg, strocnyT_iavg + use ice_dyn_shared, only: kdyn, ktransport use ice_flux, only: init_history_dyn - use ice_grid, only: grid_average_X2Y - use ice_state, only: aiU - use ice_transport_driver, only: advection, transport_upwind, transport_remap +!deprecate upwind use ice_transport_driver, only: advection, transport_upwind, transport_remap + use ice_transport_driver, only: advection, transport_remap real (kind=dbl_kind), intent(in) :: & dt ! dynamics time step - ! local variables - - type (block) :: & - this_block ! block information for current block - - integer (kind=int_kind) :: & - ilo,ihi,jlo,jhi, & ! beginning and end of physical domain - iblk, & ! block index - i, j ! horizontal indices - - real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & - work1, & ! temporary - work2 ! temporary - character(len=*), parameter :: subname = '(step_dyn_horiz)' call init_history_dyn ! initialize dynamic history variables !----------------------------------------------------------------- - ! Ice dynamics (momentum equation) + ! Elastic-viscous-plastic ice dynamics !----------------------------------------------------------------- if (kdyn == 1) call evp (dt) if (kdyn == 2) call eap (dt) - if (kdyn == 3) call implicit_solver (dt) - - !----------------------------------------------------------------- - ! Compute strocnxT_iavg, strocnyT_iavg for thermo and coupling - !----------------------------------------------------------------- - - ! strocn computed on U, N, E as needed. Map strocn U divided by aiU to T - ! conservation requires aiU be divided before averaging - work1 = c0 - work2 = c0 - !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) - do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk), iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - do j = jlo, jhi - do i = ilo, ihi - if (aiU(i,j,iblk) /= c0) then - work1(i,j,iblk) = strocnxU(i,j,iblk)/aiU(i,j,iblk) - work2(i,j,iblk) = strocnyU(i,j,iblk)/aiU(i,j,iblk) - endif - enddo - enddo - enddo - !$OMP END PARALLEL DO - call ice_HaloUpdate (work1, halo_info, & - field_loc_NEcorner, field_type_vector) - call ice_HaloUpdate (work2, halo_info, & - field_loc_NEcorner, field_type_vector) - call grid_average_X2Y('F', work1, 'U', strocnxT_iavg, 'T') ! shift - call grid_average_X2Y('F', work2, 'U', strocnyT_iavg, 'T') !----------------------------------------------------------------- ! Horizontal ice transport !----------------------------------------------------------------- - if (advection == 'upwind') then - call transport_upwind (dt) ! upwind - elseif (advection == 'remap') then +!deprecate upwind if (ktransport > 0) then + if (ktransport > 0 .and. advection == 'remap') then +!deprecate upwind if (advection == 'upwind') then +!deprecate upwind call transport_upwind (dt) ! upwind +!deprecate upwind else call transport_remap (dt) ! incremental remapping +!deprecate upwind endif endif end subroutine step_dyn_horiz @@ -1038,13 +891,15 @@ end subroutine step_dyn_horiz subroutine step_dyn_ridge (dt, ndtd, iblk) - use ice_arrays_column, only: hin_max, first_ice + use ice_arrays_column, only: hin_max, fzsal, first_ice + use ice_blocks, only: block, get_block + use ice_domain, only: blocks_ice use ice_domain_size, only: ncat, nilyr, nslyr, n_aero, nblyr use ice_flux, only: & rdg_conv, rdg_shear, dardg1dt, dardg2dt, & dvirdgdt, opening, fpond, fresh, fhocn, & aparticn, krdgn, aredistn, vredistn, dardg1ndt, dardg2ndt, & - dvirdgndt, araftn, vraftn, fsalt, Tf + dvirdgndt, araftn, vraftn, fsalt use ice_flux_bgc, only: flux_bio, faero_ocn, fiso_ocn use ice_grid, only: tmask use ice_state, only: trcrn, vsnon, aicen, vicen, & @@ -1058,14 +913,14 @@ subroutine step_dyn_ridge (dt, ndtd, iblk) integer (kind=int_kind), intent(in) :: & ndtd, & ! number of dynamics subcycles - iblk ! block index + iblk ! block index ! local variables type (block) :: & this_block ! block information for current block - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & ilo,ihi,jlo,jhi, & ! beginning and end of physical domain i, j, & ! horizontal indices ntrcr, & ! @@ -1077,8 +932,8 @@ subroutine step_dyn_ridge (dt, ndtd, iblk) ! Ridging !----------------------------------------------------------------- - call ice_timer_start(timer_column,iblk) - call ice_timer_start(timer_ridge,iblk) + call ice_timer_start(timer_column) + call ice_timer_start(timer_ridge) call icepack_query_tracer_sizes(ntrcr_out=ntrcr, nbtrcr_out=nbtrcr) call icepack_warnings_flush(nu_diag) @@ -1135,8 +990,8 @@ subroutine step_dyn_ridge (dt, ndtd, iblk) aice = aice (i,j, iblk), & fsalt = fsalt (i,j, iblk), & first_ice = first_ice(i,j,:,iblk), & - flux_bio = flux_bio (i,j,1:nbtrcr,iblk), & - Tf = Tf(i,j,iblk)) + fzsal = fzsal (i,j, iblk), & + flux_bio = flux_bio (i,j,1:nbtrcr,iblk)) endif ! tmask @@ -1147,119 +1002,11 @@ subroutine step_dyn_ridge (dt, ndtd, iblk) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - call ice_timer_stop(timer_ridge,iblk) - call ice_timer_stop(timer_column,iblk) + call ice_timer_stop(timer_ridge) + call ice_timer_stop(timer_column) end subroutine step_dyn_ridge -!======================================================================= -! -! Updates snow tracers -! -! authors: Elizabeth C. Hunke, LANL -! Nicole Jeffery, LANL - - subroutine step_snow (dt, iblk) - - use ice_calendar, only: nstreams - use ice_domain_size, only: ncat, nslyr, nilyr - use ice_flux, only: snwcnt, wind, fresh, fhocn, fsloss, fsnow - use ice_state, only: trcrn, vsno, vsnon, vicen, aicen, aice - use icepack_intfc, only: icepack_step_snow - - real (kind=dbl_kind), intent(in) :: & - dt ! time step - - integer (kind=int_kind), intent(in) :: & - iblk ! block index - - ! local variables - - integer (kind=int_kind) :: & - nt_smice, nt_smliq, nt_rsnw, & - nt_Tsfc, nt_qice, nt_sice, nt_qsno, & - nt_alvl, nt_vlvl, nt_rhos - - integer (kind=int_kind) :: & - ilo,ihi,jlo,jhi, & ! beginning and end of physical domain - i, j, & ! horizontal indices - ns ! history streams index - - real (kind=dbl_kind) :: & - puny - - real (kind=dbl_kind) :: & - fhs ! flag for presence of snow - - character(len=*), parameter :: subname = '(step_snow)' - - type (block) :: & - this_block ! block information for current block - - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - - !----------------------------------------------------------------- - ! query icepack values - !----------------------------------------------------------------- - - call icepack_query_parameters(puny_out=puny) - call icepack_query_tracer_indices( & - nt_smice_out=nt_smice, nt_smliq_out=nt_smliq, & - nt_rsnw_out=nt_rsnw, nt_Tsfc_out=nt_Tsfc, & - nt_qice_out=nt_qice, nt_sice_out=nt_sice, nt_qsno_out=nt_qsno, & - nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, nt_rhos_out=nt_rhos) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - !----------------------------------------------------------------- - ! Snow redistribution and metamorphosis - !----------------------------------------------------------------- - - do j = jlo, jhi - do i = ilo, ihi - - call icepack_step_snow (dt, nilyr, & - nslyr, ncat, & - wind (i,j, iblk), & - aice (i,j, iblk), & - aicen(i,j,:,iblk), & - vicen(i,j,:,iblk), & - vsnon(i,j,:,iblk), & - trcrn(i,j,nt_Tsfc,:,iblk), & - trcrn(i,j,nt_qice,:,iblk), & ! top layer only - trcrn(i,j,nt_sice,:,iblk), & ! top layer only - trcrn(i,j,nt_qsno:nt_qsno+nslyr-1,:,iblk), & - trcrn(i,j,nt_alvl,:,iblk), & - trcrn(i,j,nt_vlvl,:,iblk), & - trcrn(i,j,nt_smice:nt_smice+nslyr-1,:,iblk), & - trcrn(i,j,nt_smliq:nt_smliq+nslyr-1,:,iblk), & - trcrn(i,j,nt_rsnw:nt_rsnw+nslyr-1,:,iblk), & - trcrn(i,j,nt_rhos:nt_rhos+nslyr-1,:,iblk), & - fresh (i,j,iblk), & - fhocn (i,j,iblk), & - fsloss (i,j,iblk), & - fsnow (i,j,iblk)) - enddo - enddo - - ! increment counter for history averaging - do j = jlo, jhi - do i = ilo, ihi - fhs = c0 - if (vsno(i,j,iblk) > puny) fhs = c1 - do ns = 1, nstreams - snwcnt(i,j,iblk,ns) = snwcnt(i,j,iblk,ns) + fhs - enddo - enddo - enddo - - end subroutine step_snow - !======================================================================= ! ! Computes radiation fields @@ -1275,8 +1022,11 @@ subroutine step_radiation (dt, iblk) fswthrun, fswthrun_vdr, fswthrun_vdf, fswthrun_idr, fswthrun_idf, & albicen, albsnon, albpndn, & alvdrn, alidrn, alvdfn, alidfn, apeffn, trcrn_sw, snowfracn, & - swgrid, igrid - use ice_calendar, only: calendar_type, days_per_year, nextsw_cday, yday, msec + kaer_tab, waer_tab, gaer_tab, kaer_bc_tab, waer_bc_tab, & + gaer_bc_tab, bcenh, swgrid, igrid + use ice_blocks, only: block, get_block + use ice_calendar, only: calendar_type, days_per_year, nextsw_cday, yday, sec + use ice_domain, only: blocks_ice use ice_domain_size, only: ncat, n_aero, nilyr, nslyr, n_zaero, n_algae, nblyr use ice_flux, only: swvdr, swvdf, swidr, swidf, coszen, fsnow use ice_grid, only: TLAT, TLON, tmask @@ -1302,7 +1052,7 @@ subroutine step_radiation (dt, iblk) this_block ! block information for current block integer (kind=int_kind) :: & - nt_Tsfc, nt_alvl, nt_rsnw, & + nt_Tsfc, nt_alvl, & nt_apnd, nt_hpnd, nt_ipnd, nt_aero, nlt_chl_sw, & ntrcr, nbtrcr, nbtrcr_sw, nt_fbri @@ -1313,14 +1063,13 @@ subroutine step_radiation (dt, iblk) nlt_zaero_sw, nt_zaero logical (kind=log_kind) :: & - tr_bgc_N, tr_zaero, tr_brine, dEdd_algae, modal_aero, snwgrain + tr_bgc_N, tr_zaero, tr_brine, dEdd_algae, modal_aero real (kind=dbl_kind), dimension(ncat) :: & - fbri ! brine height to ice thickness + fbri ! brine height to ice thickness real(kind= dbl_kind), dimension(:,:), allocatable :: & - ztrcr_sw, & ! zaerosols (kg/m^3) and chla (mg/m^3) - rsnow ! snow grain radius tracer (10^-6 m) + ztrcr_sw logical (kind=log_kind) :: & debug, & ! flag for printing debugging information @@ -1328,27 +1077,25 @@ subroutine step_radiation (dt, iblk) character(len=*), parameter :: subname = '(step_radiation)' - call ice_timer_start(timer_sw,iblk) ! shortwave + call ice_timer_start(timer_sw) ! shortwave call icepack_query_tracer_sizes(ntrcr_out=ntrcr, & nbtrcr_out=nbtrcr, nbtrcr_sw_out=nbtrcr_sw) call icepack_query_tracer_flags( & tr_brine_out=tr_brine, tr_bgc_N_out=tr_bgc_N, tr_zaero_out=tr_zaero) call icepack_query_tracer_indices( & - nt_Tsfc_out=nt_Tsfc, nt_alvl_out=nt_alvl, nt_rsnw_out=nt_rsnw, & + nt_Tsfc_out=nt_Tsfc, nt_alvl_out=nt_alvl, & nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, nt_aero_out=nt_aero, & nlt_chl_sw_out=nlt_chl_sw, nlt_zaero_sw_out=nlt_zaero_sw, & nt_fbri_out=nt_fbri, nt_zaero_out=nt_zaero, nt_bgc_N_out=nt_bgc_N) - call icepack_query_parameters(dEdd_algae_out=dEdd_algae, modal_aero_out=modal_aero, & - snwgrain_out=snwgrain) + call icepack_query_parameters(dEdd_algae_out=dEdd_algae, modal_aero_out=modal_aero) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) allocate(ztrcr_sw(nbtrcr_sw,ncat)) - allocate(rsnow(nslyr,ncat)) - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -1368,21 +1115,17 @@ subroutine step_radiation (dt, iblk) write (nu_diag, *) 'my_task = ',my_task enddo ! ipoint endif - fbri (:) = c0 + fbri(:) = c0 ztrcr_sw(:,:) = c0 - rsnow (:,:) = c0 do n = 1, ncat - if (tr_brine) fbri(n) = trcrn(i,j,nt_fbri,n,iblk) - if (snwgrain) then - do k = 1, nslyr - rsnow(k,n) = trcrn(i,j,nt_rsnw+k-1,n,iblk) - enddo - endif + if (tr_brine) fbri(n) = trcrn(i,j,nt_fbri,n,iblk) enddo if (tmask(i,j,iblk)) then - call icepack_step_radiation (dt=dt, & + call icepack_step_radiation (dt=dt, ncat=ncat, & + nblyr=nblyr, nilyr=nilyr, nslyr=nslyr, & + dEdd_algae=dEdd_algae, & swgrid=swgrid(:), igrid=igrid(:), & fbri=fbri(:), & aicen=aicen(i,j, :,iblk), & @@ -1401,7 +1144,12 @@ subroutine step_radiation (dt, iblk) calendar_type=calendar_type, & days_per_year=days_per_year, & nextsw_cday=nextsw_cday, yday=yday, & - sec=msec, & + sec=sec, & + kaer_tab=kaer_tab, kaer_bc_tab=kaer_bc_tab(:,:), & + waer_tab=waer_tab, waer_bc_tab=waer_bc_tab(:,:), & + gaer_tab=gaer_tab, gaer_bc_tab=gaer_bc_tab(:,:), & + bcenh=bcenh(:,:,:), & + modal_aero=modal_aero, & swvdr =swvdr (i,j ,iblk), swvdf =swvdf (i,j ,iblk), & swidr =swidr (i,j ,iblk), swidf =swidf (i,j ,iblk), & coszen =coszen (i,j ,iblk), fsnow =fsnow (i,j ,iblk), & @@ -1419,9 +1167,10 @@ subroutine step_radiation (dt, iblk) albpndn =albpndn (i,j,: ,iblk), apeffn =apeffn (i,j,: ,iblk), & snowfracn=snowfracn(i,j,: ,iblk), & dhsn =dhsn (i,j,: ,iblk), ffracn =ffracn(i,j,:,iblk), & - rsnow =rsnow (:,:), l_print_point=l_print_point) - endif + l_print_point=l_print_point) + endif + if (dEdd_algae .and. (tr_zaero .or. tr_bgc_N)) then do n = 1, ncat do k = 1, nbtrcr_sw @@ -1438,9 +1187,8 @@ subroutine step_radiation (dt, iblk) file=__FILE__, line=__LINE__) deallocate(ztrcr_sw) - deallocate(rsnow) - call ice_timer_stop(timer_sw,iblk) ! shortwave + call ice_timer_stop(timer_sw) ! shortwave end subroutine step_radiation @@ -1457,7 +1205,8 @@ end subroutine step_radiation subroutine ocean_mixed_layer (dt, iblk) use ice_arrays_column, only: Cdn_atm, Cdn_atm_ratio - use ice_flux, only: sst, Tf, Qa, uatmT, vatmT, wind, potT, rhoa, zlvl, & + use ice_blocks, only: nx_block, ny_block + use ice_flux, only: sst, Tf, Qa, uatm, vatm, wind, potT, rhoa, zlvl, & frzmlt, fhocn, fswthru, flw, flwout_ocn, fsens_ocn, flat_ocn, evap_ocn, & alvdr_ocn, alidr_ocn, alvdf_ocn, alidf_ocn, swidf, swvdf, swidr, swvdr, & qdp, hmix, strairx_ocn, strairy_ocn, Tref_ocn, Qref_ocn @@ -1538,24 +1287,24 @@ subroutine ocean_mixed_layer (dt, iblk) j = indxj(ij) call icepack_atm_boundary(sfctype = 'ocn', & - Tsf = sst (i,j,iblk), & + Tsf = sst (i,j,iblk), & potT = potT (i,j,iblk), & - uatm = uatmT (i,j,iblk), & - vatm = vatmT (i,j,iblk), & - wind = wind (i,j,iblk), & - zlvl = zlvl (i,j,iblk), & - Qa = Qa (i,j,iblk), & + uatm = uatm (i,j,iblk), & + vatm = vatm (i,j,iblk), & + wind = wind (i,j,iblk), & + zlvl = zlvl (i,j,iblk), & + Qa = Qa (i,j,iblk), & rhoa = rhoa (i,j,iblk), & - strx = strairx_ocn(i,j,iblk), & - stry = strairy_ocn(i,j,iblk), & - Tref = Tref_ocn (i,j,iblk), & - Qref = Qref_ocn (i,j,iblk), & - delt = delt (i,j), & + strx = strairx_ocn(i,j,iblk), & + stry = strairy_ocn(i,j,iblk), & + Tref = Tref_ocn (i,j,iblk), & + Qref = Qref_ocn (i,j,iblk), & + delt = delt (i,j), & delq = delq (i,j), & lhcoef = lhcoef (i,j), & shcoef = shcoef (i,j), & - Cdn_atm = Cdn_atm (i,j,iblk), & - Cdn_atm_ratio_n = Cdn_atm_ratio(i,j,iblk)) + Cdn_atm = Cdn_atm (i,j,iblk), & + Cdn_atm_ratio_n = Cdn_atm_ratio(i,j,iblk)) enddo ! ij call icepack_warnings_flush(nu_diag) @@ -1576,6 +1325,9 @@ subroutine ocean_mixed_layer (dt, iblk) ! Compute ocean fluxes and update SST !----------------------------------------------------------------- +!DIR$ CONCURRENT !Cray +!cdir nodep !NEC +!ocl novrec !Fujitsu do ij = 1, icells i = indxi(ij) j = indxj(ij) @@ -1606,21 +1358,23 @@ end subroutine ocean_mixed_layer subroutine biogeochemistry (dt, iblk) use ice_arrays_column, only: upNO, upNH, iDi, iki, zfswin, & - darcy_V, grow_net, & + zsal_tot, darcy_V, grow_net, & PP_net, hbri,dhbr_bot, dhbr_top, Zoo,& fbio_snoice, fbio_atmice, ocean_bio, & first_ice, fswpenln, bphi, bTiz, ice_bio_net, & - snow_bio_net, fswthrun, & - ocean_bio_all, sice_rho, & + snow_bio_net, fswthrun, Rayleigh_criteria, & + ocean_bio_all, sice_rho, fzsal, fzsal_g, & bgrid, igrid, icgrid, cgrid + use ice_blocks, only: block, get_block + use ice_domain, only: blocks_ice use ice_domain_size, only: nblyr, nilyr, nslyr, n_algae, n_zaero, ncat, & n_doc, n_dic, n_don, n_fed, n_fep use ice_flux, only: meltbn, melttn, congeln, snoicen, & sst, sss, fsnow, meltsn - use ice_flux_bgc, only: hin_old, flux_bio, flux_bio_atm, faero_atm, & + use ice_flux_bgc, only: hin_old, flux_bio, flux_bio_atm, faero_atm, & nit, amm, sil, dmsp, dms, algalN, doc, don, dic, fed, fep, zaeros, hum use ice_state, only: aicen_init, vicen_init, aicen, vicen, vsnon, & - trcrn, vsnon_init, aice0 + trcrn, vsnon_init, aice0 use ice_timers, only: timer_bgc, ice_timer_start, ice_timer_stop real (kind=dbl_kind), intent(in) :: & @@ -1665,9 +1419,9 @@ subroutine biogeochemistry (dt, iblk) if (tr_brine .or. skl_bgc) then - call ice_timer_start(timer_bgc,iblk) ! biogeochemistry + call ice_timer_start(timer_bgc) ! biogeochemistry - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -1675,7 +1429,7 @@ subroutine biogeochemistry (dt, iblk) ! Define ocean concentrations for tracers used in simulation do j = jlo, jhi - do i = ilo, ihi + do i = ilo, ihi call icepack_load_ocean_bio_array(max_nbtrcr = icepack_max_nbtrcr, & max_algae = icepack_max_algae, max_don = icepack_max_don, & @@ -1691,8 +1445,8 @@ subroutine biogeochemistry (dt, iblk) ocean_bio_all = ocean_bio_all(i,j,:,iblk)) do mm = 1,nbtrcr - ocean_bio(i,j,mm,iblk) = ocean_bio_all(i,j,bio_index_o(mm),iblk) - enddo ! mm + ocean_bio(i,j,mm,iblk) = ocean_bio_all(i,j,bio_index_o(mm),iblk) + enddo ! mm if (tr_zaero) then do mm = 1, n_zaero ! update aerosols flux_bio_atm(i,j,nlt_zaero(mm),iblk) = faero_atm(i,j,mm,iblk) @@ -1708,6 +1462,7 @@ subroutine biogeochemistry (dt, iblk) iDi = iDi (i,j,:,:, iblk), & iki = iki (i,j,:,:, iblk), & zfswin = zfswin (i,j,:,:, iblk), & + zsal_tot = zsal_tot (i,j, iblk), & darcy_V = darcy_V (i,j,:, iblk), & grow_net = grow_net (i,j, iblk), & PP_net = PP_net (i,j, iblk), & @@ -1726,11 +1481,13 @@ subroutine biogeochemistry (dt, iblk) snow_bio_net = snow_bio_net(i,j,1:nbtrcr, iblk), & fswthrun = fswthrun (i,j,:, iblk), & sice_rho = sice_rho (i,j,:, iblk), & + fzsal = fzsal (i,j, iblk), & + fzsal_g = fzsal_g (i,j, iblk), & meltbn = meltbn (i,j,:, iblk), & melttn = melttn (i,j,:, iblk), & congeln = congeln (i,j,:, iblk), & - snoicen = snoicen (i,j,:, iblk), & - sst = sst (i,j, iblk), & + snoicen = snoicen (i,j,:, iblk), & + sst = sst (i,j, iblk), & sss = sss (i,j, iblk), & fsnow = fsnow (i,j, iblk), & meltsn = meltsn (i,j,:, iblk), & @@ -1745,6 +1502,7 @@ subroutine biogeochemistry (dt, iblk) aice0 = aice0 (i,j, iblk), & trcrn = trcrn (i,j,:,:, iblk), & vsnon_init = vsnon_init (i,j,:, iblk), & + Rayleigh_criteria = Rayleigh_criteria(i,j,iblk), & skl_bgc = skl_bgc) enddo ! i @@ -1754,7 +1512,7 @@ subroutine biogeochemistry (dt, iblk) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - call ice_timer_stop(timer_bgc,iblk) ! biogeochemistry + call ice_timer_stop(timer_bgc) ! biogeochemistry endif ! tr_brine .or. skl_bgc diff --git a/cicecore/cicedyn/infrastructure/ice_domain.F90 b/cicecore/cicedyn/infrastructure/ice_domain.F90 index 8b680f2d4..efe07c98a 100644 --- a/cicecore/cicedyn/infrastructure/ice_domain.F90 +++ b/cicecore/cicedyn/infrastructure/ice_domain.F90 @@ -18,15 +18,15 @@ module ice_domain use ice_kinds_mod use ice_constants, only: shlat, nhlat use ice_communicate, only: my_task, master_task, get_num_procs, & - add_mpi_barriers, ice_barrier + add_mpi_barriers use ice_broadcast, only: broadcast_scalar, broadcast_array use ice_blocks, only: block, get_block, create_blocks, nghost, & - nblocks_x, nblocks_y, nblocks_tot, nx_block, ny_block, debug_blocks + nblocks_x, nblocks_y, nblocks_tot, nx_block, ny_block use ice_distribution, only: distrb use ice_boundary, only: ice_halo use ice_exit, only: abort_ice use ice_fileunits, only: nu_nml, nml_filename, nu_diag, & - get_fileunit, release_fileunit, flush_fileunit + get_fileunit, release_fileunit use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_query_parameters @@ -63,8 +63,6 @@ module ice_domain maskhalo_dyn , & ! if true, use masked halo updates for dynamics maskhalo_remap , & ! if true, use masked halo updates for transport maskhalo_bound , & ! if true, use masked halo updates for bound_state - halo_dynbundle , & ! if true, bundle halo update in dynamics - landblockelim , & ! if true, land block elimination is on orca_halogrid ! if true, input fields are haloed as defined by orca grid !----------------------------------------------------------------------- @@ -79,9 +77,8 @@ module ice_domain distribution_type, &! method to use for distributing blocks ! 'cartesian', 'roundrobin', 'sectrobin', 'sectcart' ! 'rake', 'spacecurve', etc - distribution_wght ! method for weighting work per block + distribution_wght ! method for weighting work per block ! 'block' = POP default configuration - ! 'blockall' = no land block elimination ! 'latitude' = no. ocean points * |lat| ! 'file' = read distribution_wgth_file character (char_len_long) :: & @@ -104,7 +101,7 @@ subroutine init_domain_blocks use ice_distribution, only: processor_shape use ice_domain_size, only: ncat, nilyr, nslyr, max_blocks, & nx_global, ny_global, block_size_x, block_size_y - use ice_fileunits, only: goto_nml + !---------------------------------------------------------------------- ! ! local variables @@ -114,9 +111,6 @@ subroutine init_domain_blocks integer (int_kind) :: & nml_error ! namelist read error flag - character(len=char_len) :: nml_name ! text namelist name - character(len=char_len_long) :: tmpstr2 ! for namelist check - character(len=*), parameter :: subname = '(init_domain_blocks)' !---------------------------------------------------------------------- @@ -140,8 +134,7 @@ subroutine init_domain_blocks maskhalo_dyn, & maskhalo_remap, & maskhalo_bound, & - add_mpi_barriers, & - debug_blocks + add_mpi_barriers !---------------------------------------------------------------------- ! @@ -159,49 +152,31 @@ subroutine init_domain_blocks maskhalo_dyn = .false. ! if true, use masked halos for dynamics maskhalo_remap = .false. ! if true, use masked halos for transport maskhalo_bound = .false. ! if true, use masked halos for bound_state - halo_dynbundle = .true. ! if true, bundle halo updates in dynamics add_mpi_barriers = .false. ! if true, throttle communication - debug_blocks = .false. ! if true, print verbose block information - max_blocks = -1 ! max number of blocks per processor + max_blocks = -1 ! max number of blocks per processor block_size_x = -1 ! size of block in first horiz dimension block_size_y = -1 ! size of block in second horiz dimension nx_global = -1 ! NXGLOB, i-axis size ny_global = -1 ! NYGLOB, j-axis size - landblockelim = .true. ! on by default + call get_fileunit(nu_nml) if (my_task == master_task) then - nml_name = 'domain_nml' - write(nu_diag,*) subname,' Reading ', trim(nml_name) - - call get_fileunit(nu_nml) - open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) + open (nu_nml, file=nml_filename, status='old',iostat=nml_error) if (nml_error /= 0) then - call abort_ice(subname//' ERROR: domain_nml open file '// & - trim(nml_filename), file=__FILE__, line=__LINE__) + nml_error = -1 + else + nml_error = 1 endif - - call goto_nml(nu_nml,trim(nml_name),nml_error) - if (nml_error /= 0) then - call abort_ice(subname//' ERROR: searching for '// trim(nml_name), & - file=__FILE__, line=__LINE__) - endif - - nml_error = 1 do while (nml_error > 0) read(nu_nml, nml=domain_nml,iostat=nml_error) - ! check if error - if (nml_error /= 0) then - ! backspace and re-read erroneous line - backspace(nu_nml) - read(nu_nml,fmt='(A)') tmpstr2 - call abort_ice(subname//' ERROR: ' // trim(nml_name) // ' reading ' // & - trim(tmpstr2), file=__FILE__, line=__LINE__) - endif end do + if (nml_error == 0) close(nu_nml) + endif + call release_fileunit(nu_nml) - close(nu_nml) - call release_fileunit(nu_nml) - + call broadcast_scalar(nml_error, master_task) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: error reading domain_nml') endif call broadcast_scalar(nprocs, master_task) @@ -215,11 +190,12 @@ subroutine init_domain_blocks call broadcast_scalar(maskhalo_remap, master_task) call broadcast_scalar(maskhalo_bound, master_task) call broadcast_scalar(add_mpi_barriers, master_task) - call broadcast_scalar(debug_blocks, master_task) if (my_task == master_task) then if (max_blocks < 1) then - max_blocks=( ((nx_global-1)/block_size_x + 1) * & - ((ny_global-1)/block_size_y + 1) - 1) / nprocs + 1 + max_blocks=int( & + ( (dble(nx_global-1)/dble(block_size_x + 1)) * & + (dble(ny_global-1)/dble(block_size_y + 1)) ) & + / dble(nprocs)) max_blocks=max(1,max_blocks) write(nu_diag,'(/,a52,i6,/)') & '(ice_domain): max_block < 1: max_block estimated to ',max_blocks @@ -241,7 +217,7 @@ subroutine init_domain_blocks !*** !*** domain size zero or negative !*** - call abort_ice(subname//' ERROR: Invalid domain: size < 1', file=__FILE__, line=__LINE__) ! no domain + call abort_ice(subname//'ERROR: Invalid domain: size < 1') ! no domain else if (nprocs /= get_num_procs()) then !*** !*** input nprocs does not match system (eg MPI) request @@ -249,14 +225,13 @@ subroutine init_domain_blocks #if (defined CESMCOUPLED) nprocs = get_num_procs() #else - write(nu_diag,*) subname,' ERROR: nprocs, get_num_procs = ',nprocs,get_num_procs() - call abort_ice(subname//' ERROR: Input nprocs not same as system request', file=__FILE__, line=__LINE__) + call abort_ice(subname//'ERROR: Input nprocs not same as system request') #endif else if (nghost < 1) then !*** !*** must have at least 1 layer of ghost cells !*** - call abort_ice(subname//' ERROR: Not enough ghost cells allocated', file=__FILE__, line=__LINE__) + call abort_ice(subname//'ERROR: Not enough ghost cells allocated') endif !---------------------------------------------------------------------- @@ -293,7 +268,6 @@ subroutine init_domain_blocks write(nu_diag,'(a,l6)') ' maskhalo_remap = ', maskhalo_remap write(nu_diag,'(a,l6)') ' maskhalo_bound = ', maskhalo_bound write(nu_diag,'(a,l6)') ' add_mpi_barriers = ', add_mpi_barriers - write(nu_diag,'(a,l6)') ' debug_blocks = ', debug_blocks write(nu_diag,'(a,2i6)') ' block_size_x,_y = ', block_size_x, block_size_y write(nu_diag,'(a,i6)') ' max_blocks = ', max_blocks write(nu_diag,'(a,i6,/)')' Number of ghost cells = ', nghost @@ -305,7 +279,7 @@ end subroutine init_domain_blocks !*********************************************************************** - subroutine init_domain_distribution(KMTG,ULATG,grid_ice) + subroutine init_domain_distribution(KMTG,ULATG) ! This routine calls appropriate setup routines to distribute blocks ! across processors and defines arrays with block ids for any local @@ -313,16 +287,13 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) ! initialized here through calls to the appropriate boundary routines. use ice_boundary, only: ice_HaloCreate - use ice_distribution, only: create_distribution, create_local_block_ids, ice_distributionGet + use ice_distribution, only: create_distribution, create_local_block_ids use ice_domain_size, only: max_blocks, nx_global, ny_global real (dbl_kind), dimension(nx_global,ny_global), intent(in) :: & KMTG ,&! global topography ULATG ! global latitude field (radians) - character(len=*), intent(in) :: & - grid_ice ! grid_ice, B, C, CD, etc - !---------------------------------------------------------------------- ! ! local variables @@ -340,9 +311,6 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) integer (int_kind) :: & i,j,n ,&! dummy loop indices ig,jg ,&! global indices - igm1,igp1,jgm1,jgp1,&! global indices - ninfo ,&! ice_distributionGet check - np, nlb, m ,&! debug blocks temporaries work_unit ,&! size of quantized work unit #ifdef USE_NETCDF fid ,&! file id @@ -358,7 +326,6 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) rad_to_deg ! radians to degrees integer (int_kind), dimension(:), allocatable :: & - blkinfo ,&! ice_distributionGet check nocn ,&! number of ocean points per block work_per_block ! number of work units per block @@ -374,7 +341,7 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) ! ! check that there are at least nghost+1 rows or columns of land cells ! for closed boundary conditions (otherwise grid lengths are zero in -! cells neighboring ocean points). +! cells neighboring ocean points). ! !---------------------------------------------------------------------- @@ -384,7 +351,6 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) file=__FILE__, line=__LINE__) if (trim(ns_boundary_type) == 'closed') then - call abort_ice(subname//' ERROR: ns_boundary_type = closed not supported', file=__FILE__, line=__LINE__) allocate(nocn(nblocks_tot)) nocn = 0 do n=1,nblocks_tot @@ -416,15 +382,13 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) enddo endif if (nocn(n) > 0) then - write(nu_diag,*) subname,'ns closed, Not enough land cells along ns edge' - call abort_ice(subname//' ERROR: Not enough land cells along ns edge for ns closed', & - file=__FILE__, line=__LINE__) + print*, 'ice: Not enough land cells along ns edge' + call abort_ice(subname//'ERROR: Not enough land cells along ns edge') endif enddo deallocate(nocn) endif if (trim(ew_boundary_type) == 'closed') then - call abort_ice(subname//' ERROR: ew_boundary_type = closed not supported', file=__FILE__, line=__LINE__) allocate(nocn(nblocks_tot)) nocn = 0 do n=1,nblocks_tot @@ -456,9 +420,8 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) enddo endif if (nocn(n) > 0) then - write(nu_diag,*) subname,'ew closed, Not enough land cells along ew edge' - call abort_ice(subname//' ERROR: Not enough land cells along ew edge for ew closed', & - file=__FILE__, line=__LINE__) + print*, 'ice: Not enough land cells along ew edge' + call abort_ice(subname//'ERROR: Not enough land cells along ew edge') endif enddo deallocate(nocn) @@ -472,13 +435,11 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) !---------------------------------------------------------------------- if (distribution_wght == 'latitude') then - flat = max(NINT(abs(ULATG*rad_to_deg), int_kind),1) ! linear function + flat = NINT(abs(ULATG*rad_to_deg), int_kind) ! linear function else flat = 1 endif - if (distribution_wght == 'blockall') landblockelim = .false. - allocate(nocn(nblocks_tot)) if (distribution_wght == 'file') then @@ -486,29 +447,16 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) if (my_task == master_task) then ! cannot use ice_read_write due to circular dependency #ifdef USE_NETCDF + write(nu_diag,*) 'read ',trim(distribution_wght_file),minval(wght),maxval(wght) status = nf90_open(distribution_wght_file, NF90_NOWRITE, fid) if (status /= nf90_noerr) then - call abort_ice(subname//' ERROR: Cannot open '//trim(distribution_wght_file), & - file=__FILE__, line=__LINE__) + call abort_ice (subname//'ERROR: Cannot open '//trim(distribution_wght_file)) endif status = nf90_inq_varid(fid, 'wght', varid) - if (status /= nf90_noerr) then - call abort_ice(subname//' ERROR: Cannot find wght '//trim(distribution_wght_file), & - file=__FILE__, line=__LINE__) - endif status = nf90_get_var(fid, varid, wght) - if (status /= nf90_noerr) then - call abort_ice(subname//' ERROR: Cannot get wght '//trim(distribution_wght_file), & - file=__FILE__, line=__LINE__) - endif status = nf90_close(fid) - if (status /= nf90_noerr) then - call abort_ice(subname//' ERROR: Cannot close '//trim(distribution_wght_file), & - file=__FILE__, line=__LINE__) - endif - write(nu_diag,*) 'read ',trim(distribution_wght_file),minval(wght),maxval(wght) #else - call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & file=__FILE__, line=__LINE__) #endif endif @@ -547,25 +495,10 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) if (this_block%i_glob(i) > 0) then ig = this_block%i_glob(i) jg = this_block%j_glob(j) - if (grid_ice == 'C' .or. grid_ice == 'CD') then - ! Have to be careful about block elimination with C/CD - ! Use a bigger stencil - igm1 = mod(ig-2+nx_global,nx_global)+1 - igp1 = mod(ig,nx_global)+1 - jgm1 = max(jg-1,1) - jgp1 = min(jg+1,ny_global) - if ((KMTG(ig ,jg ) > puny .or. & - KMTG(igm1,jg ) > puny .or. KMTG(igp1,jg ) > puny .or. & - KMTG(ig ,jgp1) > puny .or. KMTG(ig ,jgm1) > puny) .and. & - (ULATG(ig,jg) < shlat/rad_to_deg .or. & - ULATG(ig,jg) > nhlat/rad_to_deg) ) & - nocn(n) = nocn(n) + flat(ig,jg) - else - if (KMTG(ig,jg) > puny .and. & - (ULATG(ig,jg) < shlat/rad_to_deg .or. & - ULATG(ig,jg) > nhlat/rad_to_deg) ) & - nocn(n) = nocn(n) + flat(ig,jg) - endif + if (KMTG(ig,jg) > puny .and. & + (ULATG(ig,jg) < shlat/rad_to_deg .or. & + ULATG(ig,jg) > nhlat/rad_to_deg) ) & + nocn(n) = nocn(n) + flat(ig,jg) endif end do endif @@ -575,15 +508,15 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) !*** points, so where the block is not completely land, !*** reset nocn to be the full size of the block - ! use processor_shape = 'square-pop' and distribution_wght = 'block' + ! use processor_shape = 'square-pop' and distribution_wght = 'block' ! to make CICE and POP decompositions/distributions identical. #ifdef CICE_IN_NEMO ! Keep all blocks even the ones only containing land points if (distribution_wght == 'block') nocn(n) = nx_block*ny_block #else - if (distribution_wght == 'block' .and. nocn(n) > 0) nocn(n) = nx_block*ny_block - if (.not. landblockelim) nocn(n) = max(nocn(n),1) + if (distribution_wght == 'block' .and. & ! POP style + nocn(n) > 0) nocn(n) = nx_block*ny_block #endif end do endif ! distribution_wght = file @@ -595,11 +528,11 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) allocate(work_per_block(nblocks_tot)) where (nocn > 1) - work_per_block = nocn/work_unit + 2 + work_per_block = nocn/work_unit + 2 elsewhere (nocn == 1) - work_per_block = nocn/work_unit + 1 + work_per_block = nocn/work_unit + 1 elsewhere - work_per_block = 0 + work_per_block = 0 end where if (my_task == master_task) then write(nu_diag,*) 'ice_domain work_unit, max_work_unit = ',work_unit, max_work_unit @@ -628,85 +561,6 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) call create_local_block_ids(blocks_ice, distrb_info) - ! write out block distribution - ! internal check of icedistributionGet as part of verification process - if (debug_blocks) then - - call flush_fileunit(nu_diag) - call ice_barrier() - if (my_task == master_task) then - write(nu_diag,*) ' ' - write(nu_diag,'(2a)') subname, ' Blocks by Proc:' - endif - call ice_distributionGet(distrb_info, nprocs=np, numLocalBlocks=nlb) - do m = 1, np - if (m == my_task+1) then - do n=1,nlb - write(nu_diag,'(2a,3i8)') & - subname,' my_task, local block ID, global block ID: ', & - my_task, n, distrb_info%blockGlobalID(n) - enddo - call flush_fileunit(nu_diag) - endif - call ice_barrier() - enddo - - if (my_task == master_task) then - write(nu_diag,*) ' ' - write(nu_diag,'(2a)') subname, ' Blocks by Global Block ID:' - do m = 1, nblocks_tot - write(nu_diag,'(2a,3i8)') & - subname,' global block id, proc, local block ID: ', & - m, distrb_info%blockLocation(m), distrb_info%blockLocalID(m) - enddo - call flush_fileunit(nu_diag) - endif - call ice_barrier() - - call ice_distributionGet(distrb_info, nprocs=ninfo) - if (ninfo /= distrb_info%nprocs) & - call abort_ice(subname//' ice_distributionGet nprocs ERROR', file=__FILE__, line=__LINE__) - - call ice_distributionGet(distrb_info, communicator=ninfo) - if (ninfo /= distrb_info%communicator) & - call abort_ice(subname//' ice_distributionGet communicator ERROR', file=__FILE__, line=__LINE__) - - call ice_distributionGet(distrb_info, numLocalBlocks=ninfo) - if (ninfo /= distrb_info%numLocalBlocks) & - call abort_ice(subname//' ice_distributionGet numLocalBlocks ERROR', file=__FILE__, line=__LINE__) - - allocate(blkinfo(ninfo)) - - call ice_distributionGet(distrb_info, blockGlobalID = blkinfo) - do n = 1, ninfo - if (blkinfo(n) /= distrb_info%blockGlobalID(n)) & - call abort_ice(subname//' ice_distributionGet blockGlobalID ERROR', file=__FILE__, line=__LINE__) - enddo - - deallocate(blkinfo) - allocate(blkinfo(nblocks_tot)) - - call ice_distributionGet(distrb_info, blockLocation = blkinfo) - do n = 1, nblocks_tot - if (blkinfo(n) /= distrb_info%blockLocation(n)) & - call abort_ice(subname//' ice_distributionGet blockLocation ERROR', file=__FILE__, line=__LINE__) - enddo - - call ice_distributionGet(distrb_info, blockLocalID = blkinfo) - do n = 1, nblocks_tot - if (blkinfo(n) /= distrb_info%blockLocalID(n)) & - call abort_ice(subname//' ice_distributionGet blockLocalID ERROR', file=__FILE__, line=__LINE__) - enddo - - deallocate(blkinfo) - - if (my_task == master_task) then - write(nu_diag,*) ' ' - write(nu_diag,'(2a)') subname,' ice_distributionGet checks pass' - write(nu_diag,*) ' ' - endif - endif - if (associated(blocks_ice)) then nblocks = size(blocks_ice) else @@ -715,10 +569,10 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) nblocks_max = 0 tblocks_tmp = 0 do n=0,distrb_info%nprocs - 1 - nblocks_tmp = nblocks - call broadcast_scalar(nblocks_tmp, n) - nblocks_max = max(nblocks_max,nblocks_tmp) - tblocks_tmp = tblocks_tmp + nblocks_tmp + nblocks_tmp = nblocks + call broadcast_scalar(nblocks_tmp, n) + nblocks_max = max(nblocks_max,nblocks_tmp) + tblocks_tmp = tblocks_tmp + nblocks_tmp end do if (my_task == master_task) then @@ -727,16 +581,19 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) endif if (nblocks_max > max_blocks) then - write(outstring,*) ' ERROR: num blocks exceed max: increase max to', nblocks_max - call abort_ice(subname//trim(outstring), file=__FILE__, line=__LINE__) + write(outstring,*) & + 'ERROR: num blocks exceed max: increase max to', nblocks_max + call abort_ice(subname//trim(outstring), & + file=__FILE__, line=__LINE__) else if (nblocks_max < max_blocks) then - write(outstring,*) 'WARNING: ice no. blocks too large: decrease max to', nblocks_max - if (my_task == master_task) then - write(nu_diag,*) ' ********WARNING***********' - write(nu_diag,*) subname,trim(outstring) - write(nu_diag,*) ' **************************' - write(nu_diag,*) ' ' - endif + write(outstring,*) & + 'WARNING: ice no. blocks too large: decrease max to', nblocks_max + if (my_task == master_task) then + write(nu_diag,*) ' ********WARNING***********' + write(nu_diag,*) subname,trim(outstring) + write(nu_diag,*) ' **************************' + write(nu_diag,*) ' ' + endif endif !---------------------------------------------------------------------- diff --git a/cicecore/cicedyn/infrastructure/ice_restart_driver.F90 b/cicecore/cicedyn/infrastructure/ice_restart_driver.F90 index bde40dd14..7eb7c020d 100644 --- a/cicecore/cicedyn/infrastructure/ice_restart_driver.F90 +++ b/cicecore/cicedyn/infrastructure/ice_restart_driver.F90 @@ -9,7 +9,7 @@ ! 2004-05: Block structure added by William Lipscomb ! Restart module separated from history module ! 2006 ECH: Accepted some CESM code into mainstream CICE -! Converted to free source form (F90) +! Converted to free source form (F90) ! 2008 ECH: Rearranged order in which internal stresses are written and read ! 2010 ECH: Changed eice, esno to qice, qsno ! 2012 ECH: Added routines for reading/writing extended grid @@ -19,10 +19,8 @@ module ice_restart_driver use ice_kinds_mod use ice_arrays_column, only: oceanmixed_ice - use ice_communicate, only: my_task, master_task use ice_constants, only: c0, c1, p5, & field_loc_center, field_loc_NEcorner, & - field_loc_Eface, field_loc_Nface, & field_type_scalar, field_type_vector use ice_restart_shared, only: restart_dir, pointer_file, & runid, use_restart_time, lenstr, restart_coszen @@ -55,18 +53,13 @@ subroutine dumpfile(filename_spec) use ice_blocks, only: nx_block, ny_block use ice_domain, only: nblocks use ice_domain_size, only: nilyr, nslyr, ncat, max_blocks - use ice_dyn_shared, only: iceUmask, iceEmask, iceNmask, kdyn use ice_flux, only: scale_factor, swvdr, swvdf, swidr, swidf, & - strocnxT_iavg, strocnyT_iavg, sst, frzmlt, & + strocnxT, strocnyT, sst, frzmlt, iceumask, & stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & - stress12_1, stress12_2, stress12_3, stress12_4, & - stresspT, stressmT, stress12T, & - stresspU, stressmU, stress12U + stress12_1, stress12_2, stress12_3, stress12_4 use ice_flux, only: coszen - use ice_grid, only: grid_ice, tmask - use ice_state, only: aicen, vicen, vsnon, trcrn, uvel, vvel, & - uvelE, vvelE, uvelN, vvelN + use ice_state, only: aicen, vicen, vsnon, trcrn, uvel, vvel character(len=char_len_long), intent(in), optional :: filename_spec @@ -86,7 +79,7 @@ subroutine dumpfile(filename_spec) character(len=*), parameter :: subname = '(dumpfile)' call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_sice_out=nt_sice, & - nt_qice_out=nt_qice, nt_qsno_out=nt_qsno) + nt_qice_out=nt_qice, nt_qsno_out=nt_qsno) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -99,20 +92,6 @@ subroutine dumpfile(filename_spec) diag = .true. - !----------------------------------------------------------------- - ! Zero out tracers over land - !----------------------------------------------------------------- - - !$OMP PARALLEL DO PRIVATE(iblk,i,j) - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - if (.not. tmask(i,j,iblk)) trcrn(i,j,:,:,iblk) = c0 - enddo - enddo - enddo - !$OMP END PARALLEL DO - !----------------------------------------------------------------- ! state variables ! Tsfc is the only tracer written to binary files. All other @@ -148,24 +127,10 @@ subroutine dumpfile(filename_spec) call write_restart_field(nu_dump,0,uvel,'ruf8','uvel',1,diag) call write_restart_field(nu_dump,0,vvel,'ruf8','vvel',1,diag) - if (grid_ice == 'CD') then - call write_restart_field(nu_dump,0,uvelE,'ruf8','uvelE',1,diag) - call write_restart_field(nu_dump,0,vvelE,'ruf8','vvelE',1,diag) - call write_restart_field(nu_dump,0,uvelN,'ruf8','uvelN',1,diag) - call write_restart_field(nu_dump,0,vvelN,'ruf8','vvelN',1,diag) - endif - - if (grid_ice == 'C') then - call write_restart_field(nu_dump,0,uvelE,'ruf8','uvelE',1,diag) - call write_restart_field(nu_dump,0,vvelN,'ruf8','vvelN',1,diag) - endif - !----------------------------------------------------------------- ! radiation fields !----------------------------------------------------------------- - if (restart_coszen) call write_restart_field(nu_dump,0,coszen,'ruf8','coszen',1,diag) - call write_restart_field(nu_dump,0,scale_factor,'ruf8','scale_factor',1,diag) call write_restart_field(nu_dump,0,swvdr,'ruf8','swvdr',1,diag) @@ -176,8 +141,8 @@ subroutine dumpfile(filename_spec) !----------------------------------------------------------------- ! ocean stress (for bottom heat flux in thermo) !----------------------------------------------------------------- - call write_restart_field(nu_dump,0,strocnxT_iavg,'ruf8','strocnxT',1,diag) - call write_restart_field(nu_dump,0,strocnyT_iavg,'ruf8','strocnyT',1,diag) + call write_restart_field(nu_dump,0,strocnxT,'ruf8','strocnxT',1,diag) + call write_restart_field(nu_dump,0,strocnyT,'ruf8','strocnyT',1,diag) !----------------------------------------------------------------- ! internal stress @@ -197,71 +162,21 @@ subroutine dumpfile(filename_spec) call write_restart_field(nu_dump,0,stress12_2,'ruf8','stress12_2',1,diag) call write_restart_field(nu_dump,0,stress12_4,'ruf8','stress12_4',1,diag) - if (grid_ice == 'CD') then - call write_restart_field(nu_dump,0,stresspT ,'ruf8','stresspT' ,1,diag) - call write_restart_field(nu_dump,0,stressmT ,'ruf8','stressmT' ,1,diag) - call write_restart_field(nu_dump,0,stress12T,'ruf8','stress12T',1,diag) - call write_restart_field(nu_dump,0,stresspU ,'ruf8','stresspU' ,1,diag) - call write_restart_field(nu_dump,0,stressmU ,'ruf8','stressmU' ,1,diag) - call write_restart_field(nu_dump,0,stress12U,'ruf8','stress12U',1,diag) - endif - - if (grid_ice == 'C') then - call write_restart_field(nu_dump,0,stresspT ,'ruf8','stresspT' ,1,diag) - call write_restart_field(nu_dump,0,stressmT ,'ruf8','stressmT' ,1,diag) - call write_restart_field(nu_dump,0,stress12U,'ruf8','stress12U',1,diag) - endif - !----------------------------------------------------------------- ! ice mask for dynamics !----------------------------------------------------------------- - if (kdyn > 0) then - !$OMP PARALLEL DO PRIVATE(iblk,i,j) - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - work1(i,j,iblk) = c0 - if (iceUmask(i,j,iblk)) work1(i,j,iblk) = c1 - enddo - enddo + + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + work1(i,j,iblk) = c0 + if (iceumask(i,j,iblk)) work1(i,j,iblk) = c1 enddo - !$OMP END PARALLEL DO - call write_restart_field(nu_dump,0,work1,'ruf8','iceumask',1,diag) - - if (grid_ice == 'CD' .or. grid_ice == 'C') then - - !$OMP PARALLEL DO PRIVATE(iblk,i,j) - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - work1(i,j,iblk) = c0 - if (iceNmask(i,j,iblk)) work1(i,j,iblk) = c1 - enddo - enddo - enddo - !$OMP END PARALLEL DO - call write_restart_field(nu_dump,0,work1,'ruf8','icenmask',1,diag) - - !$OMP PARALLEL DO PRIVATE(iblk,i,j) - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - work1(i,j,iblk) = c0 - if (iceEmask(i,j,iblk)) work1(i,j,iblk) = c1 - enddo - enddo - enddo - !$OMP END PARALLEL DO - call write_restart_field(nu_dump,0,work1,'ruf8','iceemask',1,diag) - endif - else - work1(:,:,:) = c0 - call write_restart_field(nu_dump,0,work1,'ruf8','iceumask',1,diag) - if (grid_ice == 'CD' .or. grid_ice == 'C') then - call write_restart_field(nu_dump,0,work1,'ruf8','icenmask',1,diag) - call write_restart_field(nu_dump,0,work1,'ruf8','iceemask',1,diag) - endif - endif + enddo + enddo + !$OMP END PARALLEL DO + call write_restart_field(nu_dump,0,work1,'ruf8','iceumask',1,diag) ! for mixed layer model if (oceanmixed_ice) then @@ -280,23 +195,20 @@ subroutine restartfile (ice_ic) use ice_boundary, only: ice_HaloUpdate_stress use ice_blocks, only: nghost, nx_block, ny_block - use ice_calendar, only: istep0, npt, calendar + use ice_calendar, only: istep0, npt + use ice_communicate, only: my_task, master_task use ice_domain, only: nblocks, halo_info use ice_domain_size, only: nilyr, nslyr, ncat, & max_blocks - use ice_dyn_shared, only: iceUmask, iceEmask, iceNmask,kdyn use ice_flux, only: scale_factor, swvdr, swvdf, swidr, swidf, & - strocnxT_iavg, strocnyT_iavg, sst, frzmlt, & + strocnxT, strocnyT, sst, frzmlt, iceumask, & stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & - stress12_1, stress12_2, stress12_3, stress12_4, & - stresspT, stressmT, stress12T, & - stresspU, stressmU, stress12U - use ice_flux, only: coszen, Tf - use ice_grid, only: tmask, grid_type, grid_ice, grid_average_X2Y + stress12_1, stress12_2, stress12_3, stress12_4 + use ice_flux, only: coszen + use ice_grid, only: tmask, grid_type use ice_state, only: trcr_depend, aice, vice, vsno, trcr, & aice0, aicen, vicen, vsnon, trcrn, aice_init, uvel, vvel, & - uvelE, vvelE, uvelN, vvelN, & trcr_base, nt_strata, n_trcr_strata character (*), optional :: ice_ic @@ -324,13 +236,12 @@ subroutine restartfile (ice_ic) file=__FILE__, line=__LINE__) call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_sice_out=nt_sice, & - nt_qice_out=nt_qice, nt_qsno_out=nt_qsno) + nt_qice_out=nt_qice, nt_qsno_out=nt_qsno) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) call init_restart_read(ice_ic) - call calendar() diag = .true. @@ -386,30 +297,6 @@ subroutine restartfile (ice_ic) call read_restart_field(nu_restart,0,vvel,'ruf8', & 'vvel',1,diag,field_loc_NEcorner, field_type_vector) - if (grid_ice == 'CD') then - if (query_field(nu_restart,'uvelE')) & - call read_restart_field(nu_restart,0,uvelE,'ruf8', & - 'uvelE',1,diag,field_loc_Eface, field_type_vector) - if (query_field(nu_restart,'vvelE')) & - call read_restart_field(nu_restart,0,vvelE,'ruf8', & - 'vvelE',1,diag,field_loc_Eface, field_type_vector) - if (query_field(nu_restart,'uvelN')) & - call read_restart_field(nu_restart,0,uvelN,'ruf8', & - 'uvelN',1,diag,field_loc_Nface, field_type_vector) - if (query_field(nu_restart,'vvelN')) & - call read_restart_field(nu_restart,0,vvelN,'ruf8', & - 'vvelN',1,diag,field_loc_Nface, field_type_vector) - endif - - if (grid_ice == 'C') then - if (query_field(nu_restart,'uvelE')) & - call read_restart_field(nu_restart,0,uvelE,'ruf8', & - 'uvelE',1,diag,field_loc_Eface, field_type_vector) - if (query_field(nu_restart,'vvelN')) & - call read_restart_field(nu_restart,0,vvelN,'ruf8', & - 'vvelN',1,diag,field_loc_Nface, field_type_vector) - endif - !----------------------------------------------------------------- ! radiation fields !----------------------------------------------------------------- @@ -436,9 +323,9 @@ subroutine restartfile (ice_ic) if (my_task == master_task) & write(nu_diag,*) 'min/max ocean stress components' - call read_restart_field(nu_restart,0,strocnxT_iavg,'ruf8', & + call read_restart_field(nu_restart,0,strocnxT,'ruf8', & 'strocnxT',1,diag,field_loc_center, field_type_vector) - call read_restart_field(nu_restart,0,strocnyT_iavg,'ruf8', & + call read_restart_field(nu_restart,0,strocnyT,'ruf8', & 'strocnyT',1,diag,field_loc_center, field_type_vector) !----------------------------------------------------------------- @@ -448,7 +335,7 @@ subroutine restartfile (ice_ic) !----------------------------------------------------------------- if (my_task == master_task) write(nu_diag,*) & 'internal stress components' - + call read_restart_field(nu_restart,0,stressp_1,'ruf8', & 'stressp_1',1,diag,field_loc_center,field_type_scalar) ! stressp_1 call read_restart_field(nu_restart,0,stressp_3,'ruf8', & @@ -477,27 +364,6 @@ subroutine restartfile (ice_ic) call read_restart_field(nu_restart,0,stress12_4,'ruf8', & 'stress12_4',1,diag,field_loc_center,field_type_scalar) ! stress12_4 - if (grid_ice == 'CD' .or. grid_ice == 'C') then - if (query_field(nu_restart,'stresspT')) & - call read_restart_field(nu_restart,0,stresspT,'ruf8', & - 'stresspT' ,1,diag,field_loc_center,field_type_scalar) ! stresspT - if (query_field(nu_restart,'stressmT')) & - call read_restart_field(nu_restart,0,stressmT,'ruf8', & - 'stressmT' ,1,diag,field_loc_center,field_type_scalar) ! stressmT - if (query_field(nu_restart,'stress12T')) & - call read_restart_field(nu_restart,0,stress12T,'ruf8', & - 'stress12T',1,diag,field_loc_center,field_type_scalar) ! stress12T - if (query_field(nu_restart,'stresspU')) & - call read_restart_field(nu_restart,0,stresspU,'ruf8', & - 'stresspU' ,1,diag,field_loc_NEcorner,field_type_scalar) ! stresspU - if (query_field(nu_restart,'stressmU')) & - call read_restart_field(nu_restart,0,stressmU,'ruf8', & - 'stressmU' ,1,diag,field_loc_NEcorner,field_type_scalar) ! stressmU - if (query_field(nu_restart,'stress12U')) & - call read_restart_field(nu_restart,0,stress12U,'ruf8', & - 'stress12U',1,diag,field_loc_NEcorner,field_type_scalar) ! stress12U - endif - if (trim(grid_type) == 'tripole') then call ice_HaloUpdate_stress(stressp_1, stressp_3, halo_info, & field_loc_center, field_type_scalar) @@ -525,90 +391,23 @@ subroutine restartfile (ice_ic) field_loc_center, field_type_scalar) call ice_HaloUpdate_stress(stress12_4, stress12_2, halo_info, & field_loc_center, field_type_scalar) - ! TODO: CD-grid endif !----------------------------------------------------------------- ! ice mask for dynamics !----------------------------------------------------------------- - if (kdyn > 0) then + if (my_task == master_task) & + write(nu_diag,*) 'ice mask for dynamics' - if (my_task == master_task) & - write(nu_diag,*) 'ice mask for dynamics' - if (query_field(nu_restart,'iceumask')) then - call read_restart_field(nu_restart,0,work1,'ruf8', & - 'iceumask',1,diag,field_loc_center, field_type_scalar) - - iceUmask(:,:,:) = .false. - !$OMP PARALLEL DO PRIVATE(iblk,i,j) - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - if (work1(i,j,iblk) > p5) iceUmask(i,j,iblk) = .true. - enddo - enddo - enddo - !$OMP END PARALLEL DO - endif - if (grid_ice == 'CD' .or. grid_ice == 'C') then - - if (query_field(nu_restart,'icenmask')) then - call read_restart_field(nu_restart,0,work1,'ruf8', & - 'icenmask',1,diag,field_loc_center, field_type_scalar) - - iceNmask(:,:,:) = .false. - !$OMP PARALLEL DO PRIVATE(iblk,i,j) - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - if (work1(i,j,iblk) > p5) iceNmask(i,j,iblk) = .true. - enddo - enddo - enddo - !$OMP END PARALLEL DO - endif - - if (query_field(nu_restart,'iceemask')) then - call read_restart_field(nu_restart,0,work1,'ruf8', & - 'iceemask',1,diag,field_loc_center, field_type_scalar) - - iceEmask(:,:,:) = .false. - !$OMP PARALLEL DO PRIVATE(iblk,i,j) - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - if (work1(i,j,iblk) > p5) iceEmask(i,j,iblk) = .true. - enddo - enddo - enddo - !$OMP END PARALLEL DO - endif - endif - else - if (my_task == master_task) & - write(nu_diag,*) 'ice mask for dynamics - not used, however mandatory to read in binary files' - if (query_field(nu_restart,'iceumask')) then - call read_restart_field(nu_restart,0,work1,'ruf8', & - 'iceumask',1,diag,field_loc_center, field_type_scalar) - endif - if (grid_ice == 'CD' .or. grid_ice == 'C') then - if (query_field(nu_restart,'icenmask')) then - call read_restart_field(nu_restart,0,work1,'ruf8', & - 'icenmask',1,diag,field_loc_center, field_type_scalar) - endif - if (query_field(nu_restart,'iceemask')) then - call read_restart_field(nu_restart,0,work1,'ruf8', & - 'iceemask',1,diag,field_loc_center, field_type_scalar) - endif - endif - endif + call read_restart_field(nu_restart,0,work1,'ruf8', & + 'iceumask',1,diag,field_loc_center, field_type_scalar) - ! set Tsfcn to c0 on land + iceumask(:,:,:) = .false. !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block do i = 1, nx_block - if (.not. tmask(i,j,iblk)) trcrn(i,j,nt_Tsfc,:,iblk) = c0 + if (work1(i,j,iblk) > p5) iceumask(i,j,iblk) = .true. enddo enddo enddo @@ -663,7 +462,6 @@ subroutine restartfile (ice_ic) stress12_4(i,j,iblk) = c0 enddo enddo - ! TODO: CD-grid ? enddo !$OMP END PARALLEL DO @@ -700,8 +498,7 @@ subroutine restartfile (ice_ic) trcr_depend = trcr_depend, & trcr_base = trcr_base, & n_trcr_strata = n_trcr_strata, & - nt_strata = nt_strata, & - Tf = Tf(i,j,iblk)) + nt_strata = nt_strata) aice_init(i,j,iblk) = aice(i,j,iblk) enddo @@ -730,14 +527,13 @@ subroutine restartfile_v4 (ice_ic) use ice_broadcast, only: broadcast_scalar use ice_blocks, only: nghost, nx_block, ny_block - use ice_calendar, only: istep0, istep1, timesecs, calendar, npt, & - set_date_from_timesecs + use ice_calendar, only: istep0, istep1, time, time_forc, calendar, npt + use ice_communicate, only: my_task, master_task use ice_domain, only: nblocks, distrb_info use ice_domain_size, only: nilyr, nslyr, ncat, nx_global, ny_global, & max_blocks - use ice_dyn_shared, only: iceUmask use ice_flux, only: scale_factor, swvdr, swvdf, swidr, swidf, & - strocnxT_iavg, strocnyT_iavg, sst, frzmlt, Tf, & + strocnxT, strocnyT, sst, frzmlt, iceumask, & stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4 @@ -773,9 +569,6 @@ subroutine restartfile_v4 (ice_ic) real (kind=dbl_kind), dimension(:,:), allocatable :: & work_g1, work_g2 - real (kind=dbl_kind) :: & - time_forc ! historic, now local - character(len=*), parameter :: subname = '(restartfile_v4)' call icepack_query_tracer_sizes(ntrcr_out=ntrcr) @@ -784,7 +577,7 @@ subroutine restartfile_v4 (ice_ic) file=__FILE__, line=__LINE__) call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_sice_out=nt_sice, & - nt_qice_out=nt_qice, nt_qsno_out=nt_qsno) + nt_qice_out=nt_qice, nt_qsno_out=nt_qsno) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -807,15 +600,14 @@ subroutine restartfile_v4 (ice_ic) if (use_restart_time) then if (my_task == master_task) then - read (nu_restart) istep0,timesecs,time_forc - write(nu_diag,*) 'Restart read at istep=',istep0,timesecs + read (nu_restart) istep0,time,time_forc + write(nu_diag,*) 'Restart read at istep=',istep0,time,time_forc endif call broadcast_scalar(istep0,master_task) istep1 = istep0 - call broadcast_scalar(timesecs,master_task) -! call broadcast_scalar(time_forc,master_task) - call set_date_from_timesecs(timesecs) - call calendar() + call broadcast_scalar(time,master_task) + call broadcast_scalar(time_forc,master_task) + call calendar(time) else @@ -902,9 +694,9 @@ subroutine restartfile_v4 (ice_ic) if (my_task == master_task) & write(nu_diag,*) 'min/max ocean stress components' - call ice_read(nu_restart,0,strocnxT_iavg,'ruf8',diag, & + call ice_read(nu_restart,0,strocnxT,'ruf8',diag, & field_loc_center, field_type_vector) - call ice_read(nu_restart,0,strocnyT_iavg,'ruf8',diag, & + call ice_read(nu_restart,0,strocnyT,'ruf8',diag, & field_loc_center, field_type_vector) !----------------------------------------------------------------- @@ -914,7 +706,7 @@ subroutine restartfile_v4 (ice_ic) !----------------------------------------------------------------- if (my_task == master_task) write(nu_diag,*) & 'internal stress components' - + allocate (work_g1(nx_global,ny_global), & work_g2(nx_global,ny_global)) @@ -971,12 +763,12 @@ subroutine restartfile_v4 (ice_ic) call ice_read(nu_restart,0,work1,'ruf8',diag, & field_loc_center, field_type_scalar) - iceUmask(:,:,:) = .false. + iceumask(:,:,:) = .false. !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1, ny_block do i = 1, nx_block - if (work1(i,j,iblk) > p5) iceUmask(i,j,iblk) = .true. + if (work1(i,j,iblk) > p5) iceumask(i,j,iblk) = .true. enddo enddo enddo @@ -1069,8 +861,7 @@ subroutine restartfile_v4 (ice_ic) trcr_depend = trcr_depend, & trcr_base = trcr_base, & n_trcr_strata = n_trcr_strata, & - nt_strata = nt_strata, & - Tf = Tf(i,j,iblk)) + nt_strata = nt_strata) aice_init(i,j,iblk) = aice(i,j,iblk) enddo @@ -1085,7 +876,7 @@ subroutine restartfile_v4 (ice_ic) ! creates new file filename = trim(restart_dir) // '/iced.converted' - call dumpfile(filename) + call dumpfile(filename) call final_restart ! stop diff --git a/cicecore/cicedyn/infrastructure/io/io_binary/ice_restart.F90 b/cicecore/cicedyn/infrastructure/io/io_binary/ice_restart.F90 index 606f0d46b..8ecfeb6f1 100644 --- a/cicecore/cicedyn/infrastructure/io/io_binary/ice_restart.F90 +++ b/cicecore/cicedyn/infrastructure/io/io_binary/ice_restart.F90 @@ -11,16 +11,15 @@ module ice_restart use ice_kinds_mod use ice_restart_shared, only: & restart, restart_ext, restart_dir, restart_file, pointer_file, & - runid, runtype, use_restart_time, lenstr + runid, runtype, use_restart_time, restart_format, lenstr use ice_fileunits, only: nu_diag, nu_rst_pointer use ice_fileunits, only: nu_dump, nu_dump_eap, nu_dump_FY, nu_dump_age use ice_fileunits, only: nu_dump_lvl, nu_dump_pond, nu_dump_hbrine - use ice_fileunits, only: nu_dump_iso, nu_dump_snow - use ice_fileunits, only: nu_dump_bgc, nu_dump_aero, nu_dump_fsd - use ice_fileunits, only: nu_restart, nu_restart_eap, nu_restart_FY, nu_restart_age + use ice_fileunits, only: nu_dump_bgc, nu_dump_aero, nu_dump_fsd, nu_dump_iso + use ice_fileunits, only: nu_restart, nu_restart_eap, nu_restart_FY, nu_restart_age use ice_fileunits, only: nu_restart_lvl, nu_restart_pond, nu_restart_hbrine use ice_fileunits, only: nu_restart_bgc, nu_restart_aero, nu_restart_fsd - use ice_fileunits, only: nu_restart_iso, nu_restart_snow + use ice_fileunits, only: nu_restart_iso use ice_exit, only: abort_ice use icepack_intfc, only: icepack_query_parameters use icepack_intfc, only: icepack_query_tracer_sizes @@ -30,10 +29,7 @@ module ice_restart implicit none private public :: init_restart_write, init_restart_read, & - read_restart_field, write_restart_field, final_restart, & - query_field - - real(kind=dbl_kind) :: time_forc = -99. ! historic now local + read_restart_field, write_restart_field, final_restart !======================================================================= @@ -46,8 +42,7 @@ module ice_restart subroutine init_restart_read(ice_ic) - use ice_calendar, only: istep0, istep1, timesecs, npt, myear, & - set_date_from_timesecs + use ice_calendar, only: istep0, istep1, time, time_forc, npt, nyr use ice_communicate, only: my_task, master_task use ice_dyn_shared, only: kdyn use ice_read_write, only: ice_open, ice_open_ext @@ -57,8 +52,9 @@ subroutine init_restart_read(ice_ic) ! local variables logical (kind=log_kind) :: & - tr_fsd, tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, & - tr_pond_topo, tr_pond_lvl, tr_brine, tr_snow + solve_zsal, tr_fsd, & + tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & + tr_pond_topo, tr_pond_lvl, tr_brine character(len=char_len_long) :: & filename, filename0 @@ -76,18 +72,19 @@ subroutine init_restart_read(ice_ic) character(len=*), parameter :: subname = '(init_restart_read)' + call icepack_query_parameters( & + solve_zsal_out=solve_zsal) call icepack_query_tracer_sizes( & nbtrcr_out=nbtrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, tr_fsd_out=tr_fsd, & - tr_iso_out=tr_iso, tr_aero_out=tr_aero, & - tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, & - tr_snow_out=tr_snow, tr_brine_out=tr_brine) + tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & + tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, tr_brine_out=tr_brine) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - if (present(ice_ic)) then + if (present(ice_ic)) then filename = trim(ice_ic) else if (my_task == master_task) then @@ -108,19 +105,18 @@ subroutine init_restart_read(ice_ic) call ice_open(nu_restart,trim(filename),0) endif if (use_restart_time) then - read (nu_restart) istep0,timesecs,time_forc,myear + read (nu_restart) istep0,time,time_forc,nyr else read (nu_restart) iignore,rignore,rignore ! use namelist values endif - write(nu_diag,*) 'Restart read at istep=',istep0,timesecs + write(nu_diag,*) 'Restart read at istep=',istep0,time,time_forc endif call broadcast_scalar(istep0,master_task) - call broadcast_scalar(timesecs,master_task) + call broadcast_scalar(time,master_task) call broadcast_scalar(time_forc,master_task) - call broadcast_scalar(myear,master_task) - call set_date_from_timesecs(timesecs) - + call broadcast_scalar(nyr,master_task) + istep1 = istep0 ! Supplemental restart files @@ -128,7 +124,7 @@ subroutine init_restart_read(ice_ic) if (kdyn == 2) then if (my_task == master_task) then n = index(filename0,trim(restart_file)) - if (n == 0) call abort_ice(subname//' ERROR: eap restart: filename discrepancy') + if (n == 0) call abort_ice(subname//'ERROR: eap restart: filename discrepancy') string1 = trim(filename0(1:n-1)) string2 = trim(filename0(n+lenstr(restart_file):lenstr(filename0))) write(filename,'(a,a,a,a)') & @@ -148,7 +144,7 @@ subroutine init_restart_read(ice_ic) if (tr_fsd) then if (my_task == master_task) then n = index(filename0,trim(restart_file)) - if (n == 0) call abort_ice(subname//' ERROR: fsd restart: filename discrepancy') + if (n == 0) call abort_ice(subname//'ERROR: fsd restart: filename discrepancy') string1 = trim(filename0(1:n-1)) string2 = trim(filename0(n+lenstr(restart_file):lenstr(filename0))) write(filename,'(a,a,a,a)') & @@ -168,7 +164,7 @@ subroutine init_restart_read(ice_ic) if (tr_iage) then if (my_task == master_task) then n = index(filename0,trim(restart_file)) - if (n == 0) call abort_ice(subname//' ERROR: iage restart: filename discrepancy') + if (n == 0) call abort_ice(subname//'ERROR: iage restart: filename discrepancy') string1 = trim(filename0(1:n-1)) string2 = trim(filename0(n+lenstr(restart_file):lenstr(filename0))) write(filename,'(a,a,a,a)') & @@ -188,7 +184,7 @@ subroutine init_restart_read(ice_ic) if (tr_FY) then if (my_task == master_task) then n = index(filename0,trim(restart_file)) - if (n == 0) call abort_ice(subname//' ERROR: FY restart: filename discrepancy') + if (n == 0) call abort_ice(subname//'ERROR: FY restart: filename discrepancy') string1 = trim(filename0(1:n-1)) string2 = trim(filename0(n+lenstr(restart_file):lenstr(filename0))) write(filename,'(a,a,a,a)') & @@ -208,7 +204,7 @@ subroutine init_restart_read(ice_ic) if (tr_lvl) then if (my_task == master_task) then n = index(filename0,trim(restart_file)) - if (n == 0) call abort_ice(subname//' ERROR: lvl restart: filename discrepancy') + if (n == 0) call abort_ice(subname//'ERROR: lvl restart: filename discrepancy') string1 = trim(filename0(1:n-1)) string2 = trim(filename0(n+lenstr(restart_file):lenstr(filename0))) write(filename,'(a,a,a,a)') & @@ -225,15 +221,15 @@ subroutine init_restart_read(ice_ic) endif endif - if (tr_pond_lvl) then + if (tr_pond_cesm) then if (my_task == master_task) then n = index(filename0,trim(restart_file)) - if (n == 0) call abort_ice(subname//' ERROR:pond_lvl restart: filename discrepancy') + if (n == 0) call abort_ice(subname//'ERROR: pond_cesm restart: filename discrepancy') string1 = trim(filename0(1:n-1)) string2 = trim(filename0(n+lenstr(restart_file):lenstr(filename0))) write(filename,'(a,a,a,a)') & string1(1:lenstr(string1)), & - restart_file(1:lenstr(restart_file)),'.pond_lvl', & + restart_file(1:lenstr(restart_file)),'.pond_cesm', & string2(1:lenstr(string2)) if (restart_ext) then call ice_open_ext(nu_restart_pond,filename,0) @@ -245,15 +241,15 @@ subroutine init_restart_read(ice_ic) endif endif - if (tr_pond_topo) then + if (tr_pond_lvl) then if (my_task == master_task) then n = index(filename0,trim(restart_file)) - if (n == 0) call abort_ice(subname//' ERROR: pond_topo restart: filename discrepancy') + if (n == 0) call abort_ice(subname//'ERROR:pond_lvl restart: filename discrepancy') string1 = trim(filename0(1:n-1)) string2 = trim(filename0(n+lenstr(restart_file):lenstr(filename0))) write(filename,'(a,a,a,a)') & string1(1:lenstr(string1)), & - restart_file(1:lenstr(restart_file)),'.pond_topo', & + restart_file(1:lenstr(restart_file)),'.pond_lvl', & string2(1:lenstr(string2)) if (restart_ext) then call ice_open_ext(nu_restart_pond,filename,0) @@ -265,22 +261,22 @@ subroutine init_restart_read(ice_ic) endif endif - if (tr_snow) then + if (tr_pond_topo) then if (my_task == master_task) then n = index(filename0,trim(restart_file)) - if (n == 0) call abort_ice(subname//' ERROR: snow restart: filename discrepancy') + if (n == 0) call abort_ice(subname//'ERROR: pond_topo restart: filename discrepancy') string1 = trim(filename0(1:n-1)) string2 = trim(filename0(n+lenstr(restart_file):lenstr(filename0))) write(filename,'(a,a,a,a)') & string1(1:lenstr(string1)), & - restart_file(1:lenstr(restart_file)),'.snow', & + restart_file(1:lenstr(restart_file)),'.pond_topo', & string2(1:lenstr(string2)) if (restart_ext) then - call ice_open_ext(nu_restart_snow,filename,0) + call ice_open_ext(nu_restart_pond,filename,0) else - call ice_open(nu_restart_snow,filename,0) + call ice_open(nu_restart_pond,filename,0) endif - read (nu_restart_snow) iignore,rignore,rignore + read (nu_restart_pond) iignore,rignore,rignore write(nu_diag,*) 'Reading ',filename(1:lenstr(filename)) endif endif @@ -288,7 +284,7 @@ subroutine init_restart_read(ice_ic) if (tr_brine) then if (my_task == master_task) then n = index(filename0,trim(restart_file)) - if (n == 0) call abort_ice(subname//' ERROR: brine restart: filename discrepancy') + if (n == 0) call abort_ice(subname//'ERROR: brine restart: filename discrepancy') string1 = trim(filename0(1:n-1)) string2 = trim(filename0(n+lenstr(restart_file):lenstr(filename0))) write(filename,'(a,a,a,a)') & @@ -305,10 +301,10 @@ subroutine init_restart_read(ice_ic) endif endif - if (nbtrcr > 0) then + if (solve_zsal .or. nbtrcr > 0) then if (my_task == master_task) then n = index(filename0,trim(restart_file)) - if (n == 0) call abort_ice(subname//' ERROR: bgc restart: filename discrepancy') + if (n == 0) call abort_ice(subname//'ERROR: bgc restart: filename discrepancy') string1 = trim(filename0(1:n-1)) string2 = trim(filename0(n+lenstr(restart_file):lenstr(filename0))) write(filename,'(a,a,a,a)') & @@ -328,7 +324,7 @@ subroutine init_restart_read(ice_ic) if (tr_iso) then if (my_task == master_task) then n = index(filename0,trim(restart_file)) - if (n == 0) call abort_ice(subname//' ERROR: iso restart: filename discrepancy') + if (n == 0) call abort_ice(subname//'ERROR: iso restart: filename discrepancy') string1 = trim(filename0(1:n-1)) string2 = trim(filename0(n+lenstr(restart_file):lenstr(filename0))) write(filename,'(a,a,a,a)') & @@ -348,7 +344,7 @@ subroutine init_restart_read(ice_ic) if (tr_aero) then if (my_task == master_task) then n = index(filename0,trim(restart_file)) - if (n == 0) call abort_ice(subname//' ERROR: aero restart: filename discrepancy') + if (n == 0) call abort_ice(subname//'ERROR: aero restart: filename discrepancy') string1 = trim(filename0(1:n-1)) string2 = trim(filename0(n+lenstr(restart_file):lenstr(filename0))) write(filename,'(a,a,a,a)') & @@ -379,8 +375,8 @@ end subroutine init_restart_read subroutine init_restart_write(filename_spec) - use ice_calendar, only: msec, mmonth, mday, myear, istep1, & - timesecs + use ice_calendar, only: sec, month, mday, nyr, istep1, & + time, time_forc, year_init use ice_communicate, only: my_task, master_task use ice_dyn_shared, only: kdyn use ice_read_write, only: ice_open, ice_open_ext @@ -390,23 +386,26 @@ subroutine init_restart_write(filename_spec) ! local variables logical (kind=log_kind) :: & - tr_fsd, tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, & - tr_pond_topo, tr_pond_lvl, tr_brine, tr_snow + solve_zsal, tr_fsd, & + tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & + tr_pond_topo, tr_pond_lvl, tr_brine integer (kind=int_kind) :: & - nbtrcr ! number of bgc tracers + nbtrcr, & ! number of bgc tracers + iyear, imonth, iday ! year, month, day character(len=char_len_long) :: filename character(len=*), parameter :: subname = '(init_restart_write)' + call icepack_query_parameters( & + solve_zsal_out=solve_zsal) call icepack_query_tracer_sizes( & nbtrcr_out=nbtrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, tr_fsd_out=tr_fsd, & - tr_iso_out=tr_iso, tr_aero_out=tr_aero, & - tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, & - tr_snow_out=tr_snow, tr_brine_out=tr_brine) + tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & + tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, tr_brine_out=tr_brine) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -415,12 +414,16 @@ subroutine init_restart_write(filename_spec) if (present(filename_spec)) then filename = trim(filename_spec) else + iyear = nyr + year_init - 1 + imonth = month + iday = mday + write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.', & - myear,'-',mmonth,'-',mday,'-',msec + iyear,'-',month,'-',mday,'-',sec end if - + ! write pointer (path/file) if (my_task == master_task) then open(nu_rst_pointer,file=pointer_file) @@ -431,7 +434,7 @@ subroutine init_restart_write(filename_spec) else call ice_open(nu_dump,filename,0) endif - write(nu_dump) istep1,timesecs,time_forc,myear + write(nu_dump) istep1,time,time_forc,nyr write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif @@ -442,7 +445,7 @@ subroutine init_restart_write(filename_spec) write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.eap.', & - myear,'-',mmonth,'-',mday,'-',msec + iyear,'-',month,'-',mday,'-',sec if (restart_ext) then call ice_open_ext(nu_dump_eap,filename,0) @@ -451,7 +454,7 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_eap) istep1,timesecs,time_forc + write(nu_dump_eap) istep1,time,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif @@ -462,7 +465,7 @@ subroutine init_restart_write(filename_spec) write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.fsd.', & - myear,'-',mmonth,'-',mday,'-',msec + iyear,'-',month,'-',mday,'-',sec if (restart_ext) then call ice_open_ext(nu_dump_fsd,filename,0) @@ -471,7 +474,7 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_fsd) istep1,timesecs,time_forc + write(nu_dump_fsd) istep1,time,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif @@ -482,7 +485,7 @@ subroutine init_restart_write(filename_spec) write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.FY.', & - myear,'-',mmonth,'-',mday,'-',msec + iyear,'-',month,'-',mday,'-',sec if (restart_ext) then call ice_open_ext(nu_dump_FY,filename,0) @@ -491,7 +494,7 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_FY) istep1,timesecs,time_forc + write(nu_dump_FY) istep1,time,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif @@ -502,7 +505,7 @@ subroutine init_restart_write(filename_spec) write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.iage.', & - myear,'-',mmonth,'-',mday,'-',msec + iyear,'-',month,'-',mday,'-',sec if (restart_ext) then call ice_open_ext(nu_dump_age,filename,0) @@ -511,7 +514,7 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_age) istep1,timesecs,time_forc + write(nu_dump_age) istep1,time,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif @@ -522,7 +525,7 @@ subroutine init_restart_write(filename_spec) write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.lvl.', & - myear,'-',mmonth,'-',mday,'-',msec + iyear,'-',month,'-',mday,'-',sec if (restart_ext) then call ice_open_ext(nu_dump_lvl,filename,0) @@ -531,18 +534,18 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_lvl) istep1,timesecs,time_forc + write(nu_dump_lvl) istep1,time,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif endif - if (tr_pond_lvl) then + if (tr_pond_cesm) then write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & - restart_file(1:lenstr(restart_file)),'.pond_lvl.', & - myear,'-',mmonth,'-',mday,'-',msec + restart_file(1:lenstr(restart_file)),'.pond_cesm.', & + iyear,'-',month,'-',mday,'-',sec if (restart_ext) then call ice_open_ext(nu_dump_pond,filename,0) @@ -551,18 +554,18 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_pond) istep1,timesecs,time_forc + write(nu_dump_pond) istep1,time,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif endif - if (tr_pond_topo) then + if (tr_pond_lvl) then write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & - restart_file(1:lenstr(restart_file)),'.pond_topo.', & - myear,'-',mmonth,'-',mday,'-',msec + restart_file(1:lenstr(restart_file)),'.pond_lvl.', & + iyear,'-',month,'-',mday,'-',sec if (restart_ext) then call ice_open_ext(nu_dump_pond,filename,0) @@ -571,27 +574,27 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_pond) istep1,timesecs,time_forc + write(nu_dump_pond) istep1,time,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif endif - if (tr_snow) then + if (tr_pond_topo) then write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & - restart_file(1:lenstr(restart_file)),'.snow.', & - myear,'-',mmonth,'-',mday,'-',msec + restart_file(1:lenstr(restart_file)),'.pond_topo.', & + iyear,'-',month,'-',mday,'-',sec if (restart_ext) then - call ice_open_ext(nu_dump_snow,filename,0) + call ice_open_ext(nu_dump_pond,filename,0) else - call ice_open(nu_dump_snow,filename,0) + call ice_open(nu_dump_pond,filename,0) endif if (my_task == master_task) then - write(nu_dump_snow) istep1,timesecs,time_forc + write(nu_dump_pond) istep1,time,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif @@ -602,7 +605,7 @@ subroutine init_restart_write(filename_spec) write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.brine.', & - myear,'-',mmonth,'-',mday,'-',msec + iyear,'-',month,'-',mday,'-',sec if (restart_ext) then call ice_open_ext(nu_dump_hbrine,filename,0) @@ -611,18 +614,18 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_hbrine) istep1,timesecs,time_forc + write(nu_dump_hbrine) istep1,time,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif endif - if (nbtrcr > 0) then + if (solve_zsal .or. nbtrcr > 0) then write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.bgc.', & - myear,'-',mmonth,'-',mday,'-',msec + iyear,'-',month,'-',mday,'-',sec if (restart_ext) then call ice_open_ext(nu_dump_bgc,filename,0) @@ -631,7 +634,7 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_bgc) istep1,timesecs,time_forc + write(nu_dump_bgc) istep1,time,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif endif @@ -641,7 +644,7 @@ subroutine init_restart_write(filename_spec) write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.iso.', & - myear,'-',mmonth,'-',mday,'-',msec + iyear,'-',month,'-',mday,'-',sec if (restart_ext) then call ice_open_ext(nu_dump_iso,filename,0) @@ -650,7 +653,7 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_iso) istep1,timesecs,time_forc + write(nu_dump_iso) istep1,time,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif @@ -661,7 +664,7 @@ subroutine init_restart_write(filename_spec) write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.aero.', & - myear,'-',mmonth,'-',mday,'-',msec + iyear,'-',month,'-',mday,'-',sec if (restart_ext) then call ice_open_ext(nu_dump_aero,filename,0) @@ -670,7 +673,7 @@ subroutine init_restart_write(filename_spec) endif if (my_task == master_task) then - write(nu_dump_aero) istep1,timesecs,time_forc + write(nu_dump_aero) istep1,time,time_forc write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif @@ -743,7 +746,7 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3, & endif end subroutine read_restart_field - + !======================================================================= ! Writes a single restart field. @@ -800,25 +803,27 @@ end subroutine write_restart_field subroutine final_restart() - use ice_calendar, only: istep1, timesecs + use ice_calendar, only: istep1, time, time_forc use ice_communicate, only: my_task, master_task logical (kind=log_kind) :: & - tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, & - tr_pond_topo, tr_pond_lvl, tr_brine, tr_snow + solve_zsal, & + tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & + tr_pond_topo, tr_pond_lvl, tr_brine integer (kind=int_kind) :: & nbtrcr ! number of bgc tracers character(len=*), parameter :: subname = '(final_restart)' + call icepack_query_parameters( & + solve_zsal_out=solve_zsal) call icepack_query_tracer_sizes( & nbtrcr_out=nbtrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, & - tr_iso_out=tr_iso, tr_aero_out=tr_aero, & - tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, & - tr_snow_out=tr_snow, tr_brine_out=tr_brine) + tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & + tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, tr_brine_out=tr_brine) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -831,35 +836,18 @@ subroutine final_restart() if (tr_iage) close(nu_dump_age) if (tr_FY) close(nu_dump_FY) if (tr_lvl) close(nu_dump_lvl) + if (tr_pond_cesm) close(nu_dump_pond) if (tr_pond_lvl) close(nu_dump_pond) if (tr_pond_topo) close(nu_dump_pond) - if (tr_snow) close(nu_dump_snow) if (tr_brine) close(nu_dump_hbrine) - if (nbtrcr > 0) close(nu_dump_bgc) + if (solve_zsal .or. nbtrcr > 0) & + close(nu_dump_bgc) - write(nu_diag,*) 'Restart read/written ',istep1,timesecs + write(nu_diag,*) 'Restart read/written ',istep1,time,time_forc endif end subroutine final_restart -!======================================================================= - -! Inquire field existance, doesn't work in binary files so set to true and return -! author T. Craig - - logical function query_field(nu,vname) - - integer (kind=int_kind), intent(in) :: nu ! unit number - character (len=*) , intent(in) :: vname ! variable name - - ! local variables - - character(len=*), parameter :: subname = '(query_field)' - - query_field = .true. - - end function query_field - !======================================================================= end module ice_restart diff --git a/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_restart.F90 b/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_restart.F90 index e9be45481..1570890b8 100644 --- a/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_restart.F90 +++ b/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_restart.F90 @@ -10,16 +10,13 @@ module ice_restart use ice_broadcast - use ice_communicate, only: my_task, master_task use ice_kinds_mod #ifdef USE_NETCDF use netcdf #endif - use ice_read_write, only: ice_check_nc use ice_restart_shared, only: & restart_ext, restart_dir, restart_file, pointer_file, & - runid, use_restart_time, lenstr, restart_coszen, restart_format, & - restart_chunksize, restart_deflate + runid, use_restart_time, lcdf64, lenstr, restart_coszen use ice_fileunits, only: nu_diag, nu_rst_pointer use ice_exit, only: abort_ice use icepack_intfc, only: icepack_query_parameters @@ -30,12 +27,9 @@ module ice_restart implicit none private public :: init_restart_write, init_restart_read, & - read_restart_field, write_restart_field, final_restart, & - query_field + read_restart_field, write_restart_field, final_restart - integer (kind=int_kind) :: ncid , & - dimid_ni, & ! netCDF identifiers - dimid_nj + integer (kind=int_kind) :: ncid !======================================================================= @@ -48,8 +42,9 @@ module ice_restart subroutine init_restart_read(ice_ic) - use ice_calendar, only: msec, mmonth, mday, myear, & - istep0, istep1, npt + use ice_calendar, only: sec, month, mday, nyr, istep0, istep1, & + time, time_forc, npt + use ice_communicate, only: my_task, master_task character(len=char_len_long), intent(in), optional :: ice_ic @@ -63,7 +58,7 @@ subroutine init_restart_read(ice_ic) character(len=*), parameter :: subname = '(init_restart_read)' #ifdef USE_NETCDF - if (present(ice_ic)) then + if (present(ice_ic)) then filename = trim(ice_ic) else if (my_task == master_task) then @@ -80,44 +75,29 @@ subroutine init_restart_read(ice_ic) write(nu_diag,*) 'Using restart dump=', trim(filename) status = nf90_open(trim(filename), nf90_nowrite, ncid) - call ice_check_nc(status, subname//' ERROR: open '//trim(filename), file=__FILE__, line=__LINE__) - + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: reading restart ncfile '//trim(filename)) + if (use_restart_time) then - ! for backwards compatibility, check nyr, month, and sec as well - status = nf90_get_att(ncid, nf90_global, 'istep1', istep0) - call ice_check_nc(status, subname//" ERROR: reading restart step ",file=__FILE__,line=__LINE__) - - status = nf90_get_att(ncid, nf90_global, 'myear', myear) - if (status /= nf90_noerr) then - status = nf90_get_att(ncid, nf90_global, 'nyr', myear) - call ice_check_nc(status, subname//" ERROR: reading restart year ",file=__FILE__,line=__LINE__) - endif - - status = nf90_get_att(ncid, nf90_global, 'mmonth', mmonth) - if (status /= nf90_noerr) then - status = nf90_get_att(ncid, nf90_global, 'month', mmonth) - call ice_check_nc(status, subname//" ERROR: reading restart month ",file=__FILE__,line=__LINE__) - endif - + status = nf90_get_att(ncid, nf90_global, 'istep1', istep0) + status = nf90_get_att(ncid, nf90_global, 'time', time) + status = nf90_get_att(ncid, nf90_global, 'time_forc', time_forc) + status = nf90_get_att(ncid, nf90_global, 'nyr', nyr) + if (status == nf90_noerr) then + status = nf90_get_att(ncid, nf90_global, 'month', month) status = nf90_get_att(ncid, nf90_global, 'mday', mday) - call ice_check_nc(status, subname//" ERROR: reading restart day ",file=__FILE__,line=__LINE__) - - status = nf90_get_att(ncid, nf90_global, 'msec', msec) - if (status /= nf90_noerr) then - status = nf90_get_att(ncid, nf90_global, 'sec', msec) - call ice_check_nc(status, subname//" ERROR: reading restart sec ",file=__FILE__,line=__LINE__) - endif - + status = nf90_get_att(ncid, nf90_global, 'sec', sec) + endif endif ! use namelist values if use_restart_time = F + write(nu_diag,*) 'Restart read at istep=',istep0,time,time_forc endif call broadcast_scalar(istep0,master_task) - call broadcast_scalar(myear,master_task) - call broadcast_scalar(mmonth,master_task) - call broadcast_scalar(mday,master_task) - call broadcast_scalar(msec,master_task) - + call broadcast_scalar(time,master_task) + call broadcast_scalar(time_forc,master_task) + call broadcast_scalar(nyr,master_task) + istep1 = istep0 ! if runid is bering then need to correct npt for istep0 @@ -125,7 +105,7 @@ subroutine init_restart_read(ice_ic) npt = npt - istep0 endif #else - call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined for '//trim(ice_ic), & + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined for '//trim(ice_ic), & file=__FILE__, line=__LINE__) #endif @@ -139,22 +119,23 @@ end subroutine init_restart_read subroutine init_restart_write(filename_spec) use ice_blocks, only: nghost - use ice_calendar, only: msec, mmonth, mday, myear, istep1 + use ice_calendar, only: sec, month, mday, nyr, istep1, & + time, time_forc, year_init + use ice_communicate, only: my_task, master_task use ice_domain_size, only: nx_global, ny_global, ncat, nilyr, nslyr, & n_iso, n_aero, nblyr, n_zaero, n_algae, n_doc, & n_dic, n_don, n_fed, n_fep, nfsd use ice_arrays_column, only: oceanmixed_ice use ice_dyn_shared, only: kdyn - use ice_grid, only: grid_ice character(len=char_len_long), intent(in), optional :: filename_spec ! local variables logical (kind=log_kind) :: & - skl_bgc, z_tracers, tr_fsd, & - tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, & - tr_pond_topo, tr_pond_lvl, tr_brine, tr_snow, & + solve_zsal, skl_bgc, z_tracers, tr_fsd, & + tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & + tr_pond_topo, tr_pond_lvl, tr_brine, & tr_bgc_N, tr_bgc_C, tr_bgc_Nit, & tr_bgc_Sil, tr_bgc_DMS, & tr_bgc_chl, tr_bgc_Am, & @@ -165,6 +146,7 @@ subroutine init_restart_write(filename_spec) integer (kind=int_kind) :: & k, n, & ! index nx, ny, & ! global array size + iyear, & ! year nbtrcr ! number of bgc tracers character(len=char_len_long) :: filename @@ -172,7 +154,8 @@ subroutine init_restart_write(filename_spec) integer (kind=int_kind), allocatable :: dims(:) integer (kind=int_kind) :: & - + dimid_ni, & ! netCDF identifiers + dimid_nj, & ! dimid_ncat, & ! iflag, & ! netCDF creation flag status ! status variable from netCDF routine @@ -183,14 +166,13 @@ subroutine init_restart_write(filename_spec) #ifdef USE_NETCDF call icepack_query_parameters( & - skl_bgc_out=skl_bgc, z_tracers_out=z_tracers) + solve_zsal_out=solve_zsal, skl_bgc_out=skl_bgc, z_tracers_out=z_tracers) call icepack_query_tracer_sizes( & nbtrcr_out=nbtrcr) call icepack_query_tracer_flags( & tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, tr_fsd_out=tr_fsd, & - tr_iso_out=tr_iso, tr_aero_out=tr_aero, & - tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, & - tr_snow_out=tr_snow, tr_brine_out=tr_brine, & + tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & + tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, tr_brine_out=tr_brine, & tr_bgc_N_out=tr_bgc_N, tr_bgc_C_out=tr_bgc_C, tr_bgc_Nit_out=tr_bgc_Nit, & tr_bgc_Sil_out=tr_bgc_Sil, tr_bgc_DMS_out=tr_bgc_DMS, & tr_bgc_chl_out=tr_bgc_chl, tr_bgc_Am_out=tr_bgc_Am, & @@ -205,10 +187,12 @@ subroutine init_restart_write(filename_spec) if (present(filename_spec)) then filename = trim(filename_spec) else + iyear = nyr + year_init - 1 + write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & restart_dir(1:lenstr(restart_dir)), & restart_file(1:lenstr(restart_file)),'.', & - myear,'-',mmonth,'-',mday,'-',msec + iyear,'-',month,'-',mday,'-',sec end if ! write pointer (path/file) @@ -218,31 +202,19 @@ subroutine init_restart_write(filename_spec) write(nu_rst_pointer,'(a)') filename close(nu_rst_pointer) - if (restart_format == 'cdf1') then - iflag = nf90_clobber - elseif (restart_format == 'cdf2') then - iflag = ior(nf90_clobber,nf90_64bit_offset) - elseif (restart_format == 'cdf5') then - iflag = ior(nf90_clobber,nf90_64bit_data) - elseif (restart_format == 'hdf5') then - iflag = ior(nf90_clobber,nf90_netcdf4) - else - call abort_ice(subname//' ERROR: restart_format not allowed for '//trim(restart_format), & - file=__FILE__, line=__LINE__) - endif + iflag = 0 + if (lcdf64) iflag = nf90_64bit_offset status = nf90_create(trim(filename), iflag, ncid) - call ice_check_nc(status, subname//' ERROR: creating '//trim(filename), file=__FILE__, line=__LINE__) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: creating restart ncfile '//trim(filename)) status = nf90_put_att(ncid,nf90_global,'istep1',istep1) - call ice_check_nc(status, subname//' ERROR: writing att istep', file=__FILE__, line=__LINE__) - status = nf90_put_att(ncid,nf90_global,'myear',myear) - call ice_check_nc(status, subname//' ERROR: writing att year', file=__FILE__, line=__LINE__) - status = nf90_put_att(ncid,nf90_global,'mmonth',mmonth) - call ice_check_nc(status, subname//' ERROR: writing att month', file=__FILE__, line=__LINE__) + status = nf90_put_att(ncid,nf90_global,'time',time) + status = nf90_put_att(ncid,nf90_global,'time_forc',time_forc) + status = nf90_put_att(ncid,nf90_global,'nyr',nyr) + status = nf90_put_att(ncid,nf90_global,'month',month) status = nf90_put_att(ncid,nf90_global,'mday',mday) - call ice_check_nc(status, subname//' ERROR: writing att day', file=__FILE__, line=__LINE__) - status = nf90_put_att(ncid,nf90_global,'msec',msec) - call ice_check_nc(status, subname//' ERROR: writing att sec', file=__FILE__, line=__LINE__) + status = nf90_put_att(ncid,nf90_global,'sec',sec) nx = nx_global ny = ny_global @@ -251,16 +223,13 @@ subroutine init_restart_write(filename_spec) ny = ny_global + 2*nghost endif status = nf90_def_dim(ncid,'ni',nx,dimid_ni) - call ice_check_nc(status, subname//' ERROR: writing dim ni', file=__FILE__, line=__LINE__) status = nf90_def_dim(ncid,'nj',ny,dimid_nj) - call ice_check_nc(status, subname//' ERROR: writing dim nj', file=__FILE__, line=__LINE__) status = nf90_def_dim(ncid,'ncat',ncat,dimid_ncat) - call ice_check_nc(status, subname//' ERROR: writing dim ncat', file=__FILE__, line=__LINE__) - !----------------------------------------------------------------- - ! 2D restart fields - !----------------------------------------------------------------- + !----------------------------------------------------------------- + ! 2D restart fields + !----------------------------------------------------------------- allocate(dims(2)) @@ -269,21 +238,7 @@ subroutine init_restart_write(filename_spec) call define_rest_field(ncid,'uvel',dims) call define_rest_field(ncid,'vvel',dims) - - if (grid_ice == 'CD') then - call define_rest_field(ncid,'uvelE',dims) - call define_rest_field(ncid,'vvelE',dims) - call define_rest_field(ncid,'uvelN',dims) - call define_rest_field(ncid,'vvelN',dims) - endif - - if (grid_ice == 'C') then - call define_rest_field(ncid,'uvelE',dims) - call define_rest_field(ncid,'vvelN',dims) - endif - if (restart_coszen) call define_rest_field(ncid,'coszen',dims) - call define_rest_field(ncid,'scale_factor',dims) call define_rest_field(ncid,'swvdr',dims) call define_rest_field(ncid,'swvdf',dims) @@ -310,18 +265,6 @@ subroutine init_restart_write(filename_spec) call define_rest_field(ncid,'iceumask',dims) - if (grid_ice == 'CD' .or. grid_ice == 'C') then - call define_rest_field(ncid,'stresspT' ,dims) - call define_rest_field(ncid,'stressmT' ,dims) - call define_rest_field(ncid,'stress12T',dims) - call define_rest_field(ncid,'stresspU' ,dims) - call define_rest_field(ncid,'stressmU' ,dims) - call define_rest_field(ncid,'stress12U',dims) - call define_rest_field(ncid,'icenmask',dims) - call define_rest_field(ncid,'iceemask',dims) - endif - - if (oceanmixed_ice) then call define_rest_field(ncid,'sst',dims) call define_rest_field(ncid,'frzmlt',dims) @@ -381,11 +324,11 @@ subroutine init_restart_write(filename_spec) enddo endif if (tr_bgc_Fe ) then - do k=1,n_fed + do k=1,n_fed write(nchar,'(i3.3)') k call define_rest_field(ncid,'fed'//trim(nchar),dims) enddo - do k=1,n_fep + do k=1,n_fep write(nchar,'(i3.3)') k call define_rest_field(ncid,'fep'//trim(nchar),dims) enddo @@ -398,11 +341,13 @@ subroutine init_restart_write(filename_spec) endif endif !nbtrcr + if (solve_zsal) call define_rest_field(ncid,'sss',dims) + deallocate(dims) - !----------------------------------------------------------------- - ! 3D restart fields (ncat) - !----------------------------------------------------------------- + !----------------------------------------------------------------- + ! 3D restart fields (ncat) + !----------------------------------------------------------------- allocate(dims(3)) @@ -428,6 +373,11 @@ subroutine init_restart_write(filename_spec) call define_rest_field(ncid,'vlvl',dims) end if + if (tr_pond_cesm) then + call define_rest_field(ncid,'apnd',dims) + call define_rest_field(ncid,'hpnd',dims) + end if + if (tr_pond_topo) then call define_rest_field(ncid,'apnd',dims) call define_rest_field(ncid,'hpnd',dims) @@ -487,26 +437,28 @@ subroutine init_restart_write(filename_spec) if (tr_bgc_PON) & call define_rest_field(ncid,'bgc_PON' ,dims) if (tr_bgc_DON) then - do k = 1, n_don + do k = 1, n_don write(nchar,'(i3.3)') k call define_rest_field(ncid,'bgc_DON'//trim(nchar) ,dims) enddo endif if (tr_bgc_Fe ) then - do k = 1, n_fed + do k = 1, n_fed write(nchar,'(i3.3)') k call define_rest_field(ncid,'bgc_Fed'//trim(nchar) ,dims) enddo - do k = 1, n_fep + do k = 1, n_fep write(nchar,'(i3.3)') k call define_rest_field(ncid,'bgc_Fep'//trim(nchar) ,dims) enddo endif endif !skl_bgc + if (solve_zsal) & + call define_rest_field(ncid,'Rayleigh',dims) - !----------------------------------------------------------------- - ! 4D restart fields, written as layers of 3D - !----------------------------------------------------------------- + !----------------------------------------------------------------- + ! 4D restart fields, written as layers of 3D + !----------------------------------------------------------------- do k=1,nilyr write(nchar,'(i3.3)') k @@ -519,16 +471,6 @@ subroutine init_restart_write(filename_spec) call define_rest_field(ncid,'qsno'//trim(nchar),dims) enddo - if (tr_snow) then - do k=1,nslyr - write(nchar,'(i3.3)') k - call define_rest_field(ncid,'smice'//trim(nchar),dims) - call define_rest_field(ncid,'smliq'//trim(nchar),dims) - call define_rest_field(ncid, 'rhos'//trim(nchar),dims) - call define_rest_field(ncid, 'rsnw'//trim(nchar),dims) - enddo - endif - if (tr_fsd) then do k=1,nfsd write(nchar,'(i3.3)') k @@ -554,119 +496,126 @@ subroutine init_restart_write(filename_spec) enddo endif + if (solve_zsal) then + do k = 1, nblyr + write(nchar,'(i3.3)') k + call define_rest_field(ncid,'zSalinity'//trim(nchar),dims) + enddo + endif + if (z_tracers) then if (tr_zaero) then - do n = 1, n_zaero - write(ncharb,'(i3.3)') n - do k = 1, nblyr+3 - write(nchar,'(i3.3)') k - call define_rest_field(ncid,'zaero'//trim(ncharb)//trim(nchar),dims) - enddo !k - enddo !n + do n = 1, n_zaero + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(ncid,'zaero'//trim(ncharb)//trim(nchar),dims) + enddo !k + enddo !n endif !tr_zaero if (tr_bgc_Nit) then - do k = 1, nblyr+3 - write(nchar,'(i3.3)') k - call define_rest_field(ncid,'bgc_Nit'//trim(nchar),dims) - enddo + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(ncid,'bgc_Nit'//trim(nchar),dims) + enddo endif if (tr_bgc_N) then - do n = 1, n_algae - write(ncharb,'(i3.3)') n - do k = 1, nblyr+3 - write(nchar,'(i3.3)') k - call define_rest_field(ncid,'bgc_N'//trim(ncharb)//trim(nchar),dims) - enddo - enddo + do n = 1, n_algae + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(ncid,'bgc_N'//trim(ncharb)//trim(nchar),dims) + enddo + enddo endif if (tr_bgc_C) then - ! do n = 1, n_algae - ! write(ncharb,'(i3.3)') n - ! do k = 1, nblyr+3 - ! write(nchar,'(i3.3)') k - ! call define_rest_field(ncid,'bgc_C'//trim(ncharb)//trim(nchar),dims) - ! enddo - ! enddo - do n = 1, n_doc - write(ncharb,'(i3.3)') n - do k = 1, nblyr+3 - write(nchar,'(i3.3)') k - call define_rest_field(ncid,'bgc_DOC'//trim(ncharb)//trim(nchar),dims) - enddo - enddo - do n = 1, n_dic - write(ncharb,'(i3.3)') n - do k = 1, nblyr+3 - write(nchar,'(i3.3)') k - call define_rest_field(ncid,'bgc_DIC'//trim(ncharb)//trim(nchar),dims) - enddo - enddo + ! do n = 1, n_algae + ! write(ncharb,'(i3.3)') n + ! do k = 1, nblyr+3 + ! write(nchar,'(i3.3)') k + ! call define_rest_field(ncid,'bgc_C'//trim(ncharb)//trim(nchar),dims) + ! enddo + ! enddo + do n = 1, n_doc + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(ncid,'bgc_DOC'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + do n = 1, n_dic + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(ncid,'bgc_DIC'//trim(ncharb)//trim(nchar),dims) + enddo + enddo endif if (tr_bgc_chl) then - do n = 1, n_algae - write(ncharb,'(i3.3)') n - do k = 1, nblyr+3 - write(nchar,'(i3.3)') k - call define_rest_field(ncid,'bgc_chl'//trim(ncharb)//trim(nchar),dims) - enddo - enddo + do n = 1, n_algae + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(ncid,'bgc_chl'//trim(ncharb)//trim(nchar),dims) + enddo + enddo endif if (tr_bgc_Am) then - do k = 1, nblyr+3 - write(nchar,'(i3.3)') k - call define_rest_field(ncid,'bgc_Am'//trim(nchar),dims) - enddo + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(ncid,'bgc_Am'//trim(nchar),dims) + enddo endif if (tr_bgc_Sil) then - do k = 1, nblyr+3 - write(nchar,'(i3.3)') k - call define_rest_field(ncid,'bgc_Sil'//trim(nchar),dims) - enddo + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(ncid,'bgc_Sil'//trim(nchar),dims) + enddo endif if (tr_bgc_hum) then - do k = 1, nblyr+3 - write(nchar,'(i3.3)') k - call define_rest_field(ncid,'bgc_hum'//trim(nchar),dims) - enddo + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(ncid,'bgc_hum'//trim(nchar),dims) + enddo endif if (tr_bgc_DMS) then - do k = 1, nblyr+3 - write(nchar,'(i3.3)') k - call define_rest_field(ncid,'bgc_DMSPp'//trim(nchar),dims) - call define_rest_field(ncid,'bgc_DMSPd'//trim(nchar),dims) - call define_rest_field(ncid,'bgc_DMS'//trim(nchar),dims) - enddo + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(ncid,'bgc_DMSPp'//trim(nchar),dims) + call define_rest_field(ncid,'bgc_DMSPd'//trim(nchar),dims) + call define_rest_field(ncid,'bgc_DMS'//trim(nchar),dims) + enddo endif if (tr_bgc_PON) then - do k = 1, nblyr+3 - write(nchar,'(i3.3)') k - call define_rest_field(ncid,'bgc_PON'//trim(nchar),dims) - enddo + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(ncid,'bgc_PON'//trim(nchar),dims) + enddo endif if (tr_bgc_DON) then - do n = 1, n_don - write(ncharb,'(i3.3)') n - do k = 1, nblyr+3 - write(nchar,'(i3.3)') k - call define_rest_field(ncid,'bgc_DON'//trim(ncharb)//trim(nchar),dims) - enddo - enddo + do n = 1, n_don + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(ncid,'bgc_DON'//trim(ncharb)//trim(nchar),dims) + enddo + enddo endif if (tr_bgc_Fe ) then - do n = 1, n_fed - write(ncharb,'(i3.3)') n - do k = 1, nblyr+3 - write(nchar,'(i3.3)') k - call define_rest_field(ncid,'bgc_Fed'//trim(ncharb)//trim(nchar),dims) - enddo - enddo - do n = 1, n_fep - write(ncharb,'(i3.3)') n - do k = 1, nblyr+3 - write(nchar,'(i3.3)') k - call define_rest_field(ncid,'bgc_Fep'//trim(ncharb)//trim(nchar),dims) - enddo - enddo + do n = 1, n_fed + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(ncid,'bgc_Fed'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + do n = 1, n_fep + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(ncid,'bgc_Fep'//trim(ncharb)//trim(nchar),dims) + enddo + enddo endif do k = 1, nbtrcr write(nchar,'(i3.3)') k @@ -676,13 +625,12 @@ subroutine init_restart_write(filename_spec) deallocate(dims) status = nf90_enddef(ncid) - call ice_check_nc(status, subname//' ERROR: enddef', file=__FILE__, line=__LINE__) write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) endif ! master_task #else - call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined for '//trim(filename_spec), & + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined for '//trim(filename_spec), & file=__FILE__, line=__LINE__) #endif @@ -701,79 +649,79 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3, & use ice_read_write, only: ice_read_nc integer (kind=int_kind), intent(in) :: & - nu , & ! unit number (not used for netcdf) - ndim3 , & ! third dimension - nrec ! record number (0 for sequential access) + nu , & ! unit number (not used for netcdf) + ndim3 , & ! third dimension + nrec ! record number (0 for sequential access) real (kind=dbl_kind), dimension(nx_block,ny_block,ndim3,max_blocks), intent(inout) :: & - work ! input array (real, 8-byte) + work ! input array (real, 8-byte) character (len=4), intent(in) :: & - atype ! format for output array - ! (real/integer, 4-byte/8-byte) + atype ! format for output array + ! (real/integer, 4-byte/8-byte) logical (kind=log_kind), intent(in) :: & - diag ! if true, write diagnostic output + diag ! if true, write diagnostic output character (len=*), intent(in) :: vname integer (kind=int_kind), optional, intent(in) :: & - field_loc, & ! location of field on staggered grid - field_type ! type of field (scalar, vector, angle) + field_loc, & ! location of field on staggered grid + field_type ! type of field (scalar, vector, angle) ! local variables real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks) :: & - work2 ! input array (real, 8-byte) + work2 ! input array (real, 8-byte) character(len=*), parameter :: subname = '(read_restart_field)' #ifdef USE_NETCDF - if (present(field_loc)) then - if (ndim3 == ncat) then - if (restart_ext) then - call ice_read_nc(ncid,1,vname,work,diag, & - field_loc=field_loc,field_type=field_type,restart_ext=restart_ext) - else - call ice_read_nc(ncid,1,vname,work,diag,field_loc,field_type) - endif - elseif (ndim3 == 1) then - if (restart_ext) then - call ice_read_nc(ncid,1,vname,work2,diag, & - field_loc=field_loc,field_type=field_type,restart_ext=restart_ext) + if (present(field_loc)) then + if (ndim3 == ncat) then + if (restart_ext) then + call ice_read_nc(ncid,1,vname,work,diag, & + field_loc=field_loc,field_type=field_type,restart_ext=restart_ext) + else + call ice_read_nc(ncid,1,vname,work,diag,field_loc,field_type) + endif + elseif (ndim3 == 1) then + if (restart_ext) then + call ice_read_nc(ncid,1,vname,work2,diag, & + field_loc=field_loc,field_type=field_type,restart_ext=restart_ext) + else + call ice_read_nc(ncid,1,vname,work2,diag,field_loc,field_type) + endif + work(:,:,1,:) = work2(:,:,:) else - call ice_read_nc(ncid,1,vname,work2,diag,field_loc,field_type) + write(nu_diag,*) 'ndim3 not supported ',ndim3 endif - work(:,:,1,:) = work2(:,:,:) else - write(nu_diag,*) 'ndim3 not supported ',ndim3 - endif - else - if (ndim3 == ncat) then - if (restart_ext) then - call ice_read_nc(ncid, 1, vname, work, diag, restart_ext=restart_ext) - else - call ice_read_nc(ncid, 1, vname, work, diag) - endif - elseif (ndim3 == 1) then - if (restart_ext) then - call ice_read_nc(ncid, 1, vname, work2, diag, restart_ext=restart_ext) + if (ndim3 == ncat) then + if (restart_ext) then + call ice_read_nc(ncid, 1, vname, work, diag, restart_ext=restart_ext) + else + call ice_read_nc(ncid, 1, vname, work, diag) + endif + elseif (ndim3 == 1) then + if (restart_ext) then + call ice_read_nc(ncid, 1, vname, work2, diag, restart_ext=restart_ext) + else + call ice_read_nc(ncid, 1, vname, work2, diag) + endif + work(:,:,1,:) = work2(:,:,:) else - call ice_read_nc(ncid, 1, vname, work2, diag) + write(nu_diag,*) 'ndim3 not supported ',ndim3 endif - work(:,:,1,:) = work2(:,:,:) - else - write(nu_diag,*) 'ndim3 not supported ',ndim3 endif - endif #else - call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & file=__FILE__, line=__LINE__) #endif end subroutine read_restart_field - + !======================================================================= ! Writes a single restart field. @@ -786,59 +734,54 @@ subroutine write_restart_field(nu,nrec,work,atype,vname,ndim3,diag) use ice_read_write, only: ice_write_nc integer (kind=int_kind), intent(in) :: & - nu , & ! unit number - ndim3 , & ! third dimension - nrec ! record number (0 for sequential access) + nu , & ! unit number + ndim3 , & ! third dimension + nrec ! record number (0 for sequential access) real (kind=dbl_kind), dimension(nx_block,ny_block,ndim3,max_blocks), intent(in) :: & - work ! input array (real, 8-byte) + work ! input array (real, 8-byte) character (len=4), intent(in) :: & - atype ! format for output array - ! (real/integer, 4-byte/8-byte) + atype ! format for output array + ! (real/integer, 4-byte/8-byte) logical (kind=log_kind), intent(in) :: & - diag ! if true, write diagnostic output + diag ! if true, write diagnostic output character (len=*), intent(in) :: vname ! local variables integer (kind=int_kind) :: & - varid , & ! variable id - status ! status variable from netCDF routine + varid, & ! variable id + status ! status variable from netCDF routine real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks) :: & - work2 ! input array (real, 8-byte) + work2 ! input array (real, 8-byte) character(len=*), parameter :: subname = '(write_restart_field)' #ifdef USE_NETCDF - varid = -99 - if (my_task == master_task) then - ! ncid is only valid on master status = nf90_inq_varid(ncid,trim(vname),varid) - call ice_check_nc(status, subname//' ERROR: inq varid '//trim(vname), file=__FILE__, line=__LINE__) - endif - if (ndim3 == ncat) then - if (restart_ext) then - call ice_write_nc(ncid, 1, varid, work, diag, restart_ext, varname=trim(vname)) - else - call ice_write_nc(ncid, 1, varid, work, diag, varname=trim(vname)) - endif - elseif (ndim3 == 1) then - work2(:,:,:) = work(:,:,1,:) - if (restart_ext) then - call ice_write_nc(ncid, 1, varid, work2, diag, restart_ext, varname=trim(vname)) + if (ndim3 == ncat) then + if (restart_ext) then + call ice_write_nc(ncid, 1, varid, work, diag, restart_ext, varname=trim(vname)) + else + call ice_write_nc(ncid, 1, varid, work, diag, varname=trim(vname)) + endif + elseif (ndim3 == 1) then + work2(:,:,:) = work(:,:,1,:) + if (restart_ext) then + call ice_write_nc(ncid, 1, varid, work2, diag, restart_ext, varname=trim(vname)) + else + call ice_write_nc(ncid, 1, varid, work2, diag, varname=trim(vname)) + endif else - call ice_write_nc(ncid, 1, varid, work2, diag, varname=trim(vname)) + write(nu_diag,*) 'ndim3 not supported',ndim3 endif - else - write(nu_diag,*) 'ndim3 not supported',ndim3 - endif #else - call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & file=__FILE__, line=__LINE__) #endif @@ -851,22 +794,21 @@ end subroutine write_restart_field subroutine final_restart() - use ice_calendar, only: istep1, myear, mmonth, mday, msec + use ice_calendar, only: istep1, time, time_forc + use ice_communicate, only: my_task, master_task integer (kind=int_kind) :: status character(len=*), parameter :: subname = '(final_restart)' #ifdef USE_NETCDF - if (my_task == master_task) then - ! ncid is only valid on master - status = nf90_close(ncid) - call ice_check_nc(status, subname//' ERROR: closing', file=__FILE__, line=__LINE__) - write(nu_diag,'(a,i8,4x,i4.4,a,i2.2,a,i2.2,a,i5.5)') & - 'Restart read/written ',istep1,myear,'-',mmonth,'-',mday,'-',msec - endif + status = nf90_close(ncid) + + if (my_task == master_task) & + write(nu_diag,*) 'Restart read/written ',istep1,time,time_forc + #else - call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & file=__FILE__, line=__LINE__) #endif @@ -885,68 +827,20 @@ subroutine define_rest_field(ncid, vname, dims) integer (kind=int_kind) :: varid - integer (kind=int_kind) :: chunks(size(dims)), status, i + integer (kind=int_kind) :: & + status ! status variable from netCDF routine character(len=*), parameter :: subname = '(define_rest_field)' #ifdef USE_NETCDF - status = nf90_def_var(ncid,trim(vname),nf90_double,dims,varid) - call ice_check_nc(status, subname//' ERROR: def var '//trim(vname), file=__FILE__, line=__LINE__) - - if (restart_format=='hdf5' .and. size(dims)>1) then - if (dims(1)==dimid_ni .and. dims(2)==dimid_nj) then - chunks(1)=restart_chunksize(1) - chunks(2)=restart_chunksize(2) - do i = 3, size(dims) - chunks(i) = 0 - enddo - status = nf90_def_var_chunking(ncid, varid, NF90_CHUNKED, chunksizes=chunks) - call ice_check_nc(status, subname//' ERROR: chunking var '//trim(vname), file=__FILE__, line=__LINE__) - endif - endif - - if (restart_format=='hdf5' .and. restart_deflate/=0) then - status=nf90_def_var_deflate(ncid, varid, shuffle=0, deflate=1, deflate_level=restart_deflate) - call ice_check_nc(status, subname//' ERROR deflating var '//trim(vname), file=__FILE__, line=__LINE__) - endif - #else - call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & file=__FILE__, line=__LINE__) #endif - + end subroutine define_rest_field -!======================================================================= - -! Inquire field existance -! author T. Craig - - logical function query_field(nu,vname) - - integer (kind=int_kind), intent(in) :: nu ! unit number - character (len=*) , intent(in) :: vname ! variable name - - ! local variables - - integer (kind=int_kind) :: status, varid - character(len=*), parameter :: subname = '(query_field)' - - query_field = .false. -#ifdef USE_NETCDF - if (my_task == master_task) then - status = nf90_inq_varid(ncid,trim(vname),varid) - if (status == nf90_noerr) query_field = .true. - endif - call broadcast_scalar(query_field,master_task) -#else - call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & - file=__FILE__, line=__LINE__) -#endif - - end function query_field - !======================================================================= end module ice_restart diff --git a/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 new file mode 100644 index 000000000..c3dc83a24 --- /dev/null +++ b/cicecore/cicedynB/dynamics/ice_dyn_shared.F90 @@ -0,0 +1,998 @@ +!======================================================================= + +! Elastic-viscous-plastic sea ice dynamics model code shared with other +! approaches +! +! author: Elizabeth C. Hunke, LANL +! +! 2013: Split from ice_dyn_evp.F90 by Elizabeth Hunke + + module ice_dyn_shared + + use ice_kinds_mod + use ice_communicate, only: my_task, master_task + use ice_constants, only: c0, c1, c2, p01, p001 + use ice_constants, only: omega, spval_dbl, p5, c4 + use ice_blocks, only: nx_block, ny_block + use ice_domain_size, only: max_blocks + use ice_fileunits, only: nu_diag + use ice_exit, only: abort_ice + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_query_parameters + + implicit none + private + public :: init_evp, set_evp_parameters, stepu, principal_stress, & + dyn_prep1, dyn_prep2, dyn_finish, basal_stress_coeff, & + alloc_dyn_shared + + ! namelist parameters + + integer (kind=int_kind), public :: & + kdyn , & ! type of dynamics ( -1, 0 = off, 1 = evp, 2 = eap ) + kridge , & ! set to "-1" to turn off ridging + ktransport , & ! set to "-1" to turn off transport + ndte ! number of subcycles: ndte=dt/dte + + character (len=char_len), public :: & + coriolis , & ! 'constant', 'zero', or 'latitude' + ssh_stress ! 'geostrophic' or 'coupled' + + logical (kind=log_kind), public :: & + revised_evp ! if true, use revised evp procedure + + integer (kind=int_kind), public :: & + kevp_kernel ! 0 = 2D org version + ! 1 = 1D representation raw (not implemented) + ! 2 = 1D + calculate distances inline (implemented) + ! 3 = 1D + calculate distances inline + real*4 internal (not implemented yet) + ! other EVP parameters + + character (len=char_len), public :: & + yield_curve ! 'ellipse' ('teardrop' needs further testing) + ! + real (kind=dbl_kind), parameter, public :: & + eyc = 0.36_dbl_kind, & + ! coefficient for calculating the parameter E + cosw = c1 , & ! cos(ocean turning angle) ! turning angle = 0 + sinw = c0 , & ! sin(ocean turning angle) ! turning angle = 0 + a_min = p001, & ! minimum ice area + m_min = p01 ! minimum ice mass (kg/m^2) + + real (kind=dbl_kind), public :: & + revp , & ! 0 for classic EVP, 1 for revised EVP + e_ratio , & ! e = EVP ellipse aspect ratio + ecci , & ! 1/e^2 + dtei , & ! 1/dte, where dte is subcycling timestep (1/s) +! dte2T , & ! dte/2T + denom1 ! constants for stress equation + + real (kind=dbl_kind), public :: & ! Bouillon et al relaxation constants + arlx , & ! alpha for stressp + arlx1i , & ! (inverse of alpha) for stressp + brlx ! beta for momentum + + real (kind=dbl_kind), allocatable, public :: & + fcor_blk(:,:,:) ! Coriolis parameter (1/s) + + real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & + uvel_init, & ! x-component of velocity (m/s), beginning of timestep + vvel_init ! y-component of velocity (m/s), beginning of timestep + + ! ice isotropic tensile strength parameter + real (kind=dbl_kind), public :: & + Ktens ! T=Ktens*P (tensile strength: see Konig and Holland, 2010) + + logical (kind=log_kind), public :: & + basalstress ! if true, basal stress for landfast on + + ! basal stress parameters + real (kind=dbl_kind), public :: & + k1, & ! 1st free parameter for landfast parameterization + k2, & ! second free parameter (N/m^3) for landfast parametrization + alphab, & ! alphab=Cb factor in Lemieux et al 2015 + threshold_hw ! max water depth for grounding + ! see keel data from Amundrud et al. 2004 (JGR) + + +!======================================================================= + + contains + +!======================================================================= +! +! Allocate space for all variables +! + subroutine alloc_dyn_shared + + integer (int_kind) :: ierr + + allocate( & + uvel_init (nx_block,ny_block,max_blocks), & ! x-component of velocity (m/s), beginning of timestep + vvel_init (nx_block,ny_block,max_blocks), & ! y-component of velocity (m/s), beginning of timestep + stat=ierr) + if (ierr/=0) call abort_ice('(alloc_dyn_shared): Out of memory') + + end subroutine alloc_dyn_shared + +!======================================================================= + +! Initialize parameters and variables needed for the evp dynamics +! author: Elizabeth C. Hunke, LANL + + subroutine init_evp (dt) + + use ice_blocks, only: nx_block, ny_block + use ice_domain, only: nblocks + use ice_domain_size, only: max_blocks + use ice_flux, only: rdg_conv, rdg_shear, iceumask, & + stressp_1, stressp_2, stressp_3, stressp_4, & + stressm_1, stressm_2, stressm_3, stressm_4, & + stress12_1, stress12_2, stress12_3, stress12_4 + use ice_state, only: uvel, vvel, divu, shear + use ice_grid, only: ULAT + + real (kind=dbl_kind), intent(in) :: & + dt ! time step + + ! local variables + + integer (kind=int_kind) :: & + i, j, & + iblk ! block index + + character(len=*), parameter :: subname = '(init_evp)' + + call set_evp_parameters (dt) + + if (my_task == master_task) then + write(nu_diag,*) 'dt = ',dt + write(nu_diag,*) 'dte = ',dt/real(ndte,kind=dbl_kind) + write(nu_diag,*) 'tdamp =', eyc*dt + endif + + allocate(fcor_blk(nx_block,ny_block,max_blocks)) + + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + + ! velocity + uvel(i,j,iblk) = c0 ! m/s + vvel(i,j,iblk) = c0 ! m/s + + ! strain rates + divu (i,j,iblk) = c0 + shear(i,j,iblk) = c0 + rdg_conv (i,j,iblk) = c0 + rdg_shear(i,j,iblk) = c0 + + ! Coriolis parameter + if (trim(coriolis) == 'constant') then + fcor_blk(i,j,iblk) = 1.46e-4_dbl_kind ! Hibler 1979, N. Hem; 1/s + else if (trim(coriolis) == 'zero') then + fcor_blk(i,j,iblk) = 0.0 + else + fcor_blk(i,j,iblk) = c2*omega*sin(ULAT(i,j,iblk)) ! 1/s + endif + + ! stress tensor, kg/s^2 + stressp_1 (i,j,iblk) = c0 + stressp_2 (i,j,iblk) = c0 + stressp_3 (i,j,iblk) = c0 + stressp_4 (i,j,iblk) = c0 + stressm_1 (i,j,iblk) = c0 + stressm_2 (i,j,iblk) = c0 + stressm_3 (i,j,iblk) = c0 + stressm_4 (i,j,iblk) = c0 + stress12_1(i,j,iblk) = c0 + stress12_2(i,j,iblk) = c0 + stress12_3(i,j,iblk) = c0 + stress12_4(i,j,iblk) = c0 + + ! ice extent mask on velocity points + iceumask(i,j,iblk) = .false. + + enddo ! i + enddo ! j + enddo ! iblk + !$OMP END PARALLEL DO + + end subroutine init_evp + +!======================================================================= + +! Set parameters needed for the evp dynamics. +! Note: This subroutine is currently called only during initialization. +! If the dynamics time step can vary during runtime, it should +! be called whenever the time step changes. +! +! author: Elizabeth C. Hunke, LANL + + subroutine set_evp_parameters (dt) + + real (kind=dbl_kind), intent(in) :: & + dt ! time step + + ! local variables + + !real (kind=dbl_kind) :: & + !dte , & ! subcycling timestep for EVP dynamics, s + !ecc , & ! (ratio of major to minor ellipse axes)^2 + !tdamp2 ! 2*(wave damping time scale T) + + character(len=*), parameter :: subname = '(set_evp_parameters)' + + ! elastic time step + !dte = dt/real(ndte,kind=dbl_kind) ! s + !dtei = c1/dte ! 1/s + dtei = real(ndte,kind=dbl_kind)/dt + + ! major/minor axis length ratio, squared + !ecc = e_ratio**2 + !ecci = c1/ecc ! 1/ecc + ecci = c1/e_ratio**2 ! 1/ecc + + ! constants for stress equation + !tdamp2 = c2*eyc*dt ! s + !dte2T = dte/tdamp2 or c1/(c2*eyc*real(ndte,kind=dbl_kind)) ! ellipse (unitless) + + if (revised_evp) then ! Bouillon et al, Ocean Mod 2013 + revp = c1 + denom1 = c1 + arlx1i = c1/arlx + else ! Hunke, JCP 2013 with modified stress eq + revp = c0 + !arlx1i = dte2T + !arlx = c1/arlx1i + !brlx = dt*dtei + arlx = c2*eyc*real(ndte,kind=dbl_kind) + arlx1i = c1/arlx + brlx = real(ndte,kind=dbl_kind) + denom1 = c1/(c1+arlx1i) + endif + if (my_task == master_task) then + write (nu_diag,*) 'arlx, arlxi, brlx, denom1', & + arlx, arlx1i, brlx, denom1 + endif + + end subroutine set_evp_parameters + +!======================================================================= + +! Computes quantities needed in the stress tensor (sigma) +! and momentum (u) equations, but which do not change during +! the thermodynamics/transport time step: +! ice mass and ice extent masks +! +! author: Elizabeth C. Hunke, LANL + + subroutine dyn_prep1 (nx_block, ny_block, & + ilo, ihi, jlo, jhi, & + aice, vice, & + vsno, tmask, & + strairxT, strairyT, & + strairx, strairy, & + tmass, icetmask) + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block, & ! block dimensions + ilo,ihi,jlo,jhi ! beginning and end of physical domain + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + aice , & ! concentration of ice + vice , & ! volume per unit area of ice (m) + vsno , & ! volume per unit area of snow (m) + strairxT, & ! stress on ice by air, x-direction + strairyT ! stress on ice by air, y-direction + + logical (kind=log_kind), dimension (nx_block,ny_block), intent(in) :: & + tmask ! land/boundary mask, thickness (T-cell) + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out) :: & + strairx , & ! stress on ice by air, x-direction + strairy , & ! stress on ice by air, y-direction + tmass ! total mass of ice and snow (kg/m^2) + + integer (kind=int_kind), dimension (nx_block,ny_block), intent(out) :: & + icetmask ! ice extent mask (T-cell) + + ! local variables + + integer (kind=int_kind) :: & + i, j + + real (kind=dbl_kind) :: & + rhoi, rhos + + logical (kind=log_kind), dimension(nx_block,ny_block) :: & + tmphm ! temporary mask + + character(len=*), parameter :: subname = '(dyn_prep1)' + + call icepack_query_parameters(rhos_out=rhos, rhoi_out=rhoi) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + do j = 1, ny_block + do i = 1, nx_block + + !----------------------------------------------------------------- + ! total mass of ice and snow, centered in T-cell + ! NOTE: vice and vsno must be up to date in all grid cells, + ! including ghost cells + !----------------------------------------------------------------- + if (tmask(i,j)) then + tmass(i,j) = (rhoi*vice(i,j) + rhos*vsno(i,j)) ! kg/m^2 + else + tmass(i,j) = c0 + endif + + !----------------------------------------------------------------- + ! ice extent mask (T-cells) + !----------------------------------------------------------------- + tmphm(i,j) = tmask(i,j) .and. (aice (i,j) > a_min) & + .and. (tmass(i,j) > m_min) + + !----------------------------------------------------------------- + ! prep to convert to U grid + !----------------------------------------------------------------- + ! these quantities include the factor of aice needed for + ! correct treatment of free drift + strairx(i,j) = strairxT(i,j) + strairy(i,j) = strairyT(i,j) + + !----------------------------------------------------------------- + ! augmented mask (land + open ocean) + !----------------------------------------------------------------- + icetmask (i,j) = 0 + + enddo + enddo + + do j = jlo, jhi + do i = ilo, ihi + + ! extend ice extent mask (T-cells) to points around pack + if (tmphm(i-1,j+1) .or. tmphm(i,j+1) .or. tmphm(i+1,j+1) .or. & + tmphm(i-1,j) .or. tmphm(i,j) .or. tmphm(i+1,j) .or. & + tmphm(i-1,j-1) .or. tmphm(i,j-1) .or. tmphm(i+1,j-1) ) then + icetmask(i,j) = 1 + endif + + if (.not.tmask(i,j)) icetmask(i,j) = 0 + + enddo + enddo + + end subroutine dyn_prep1 + +!======================================================================= +! Computes quantities needed in the stress tensor (sigma) +! and momentum (u) equations, but which do not change during +! the thermodynamics/transport time step: +! --wind stress shift to U grid, +! --ice mass and ice extent masks, +! initializes ice velocity for new points to ocean sfc current +! +! author: Elizabeth C. Hunke, LANL + + subroutine dyn_prep2 (nx_block, ny_block, & + ilo, ihi, jlo, jhi, & + icellt, icellu, & + indxti, indxtj, & + indxui, indxuj, & + aiu, umass, & + umassdti, fcor, & + umask, & + uocn, vocn, & + strairx, strairy, & + ss_tltx, ss_tlty, & + icetmask, iceumask, & + fm, dt, & + strtltx, strtlty, & + strocnx, strocny, & + strintx, strinty, & + taubx, tauby, & + waterx, watery, & + forcex, forcey, & + stressp_1, stressp_2, & + stressp_3, stressp_4, & + stressm_1, stressm_2, & + stressm_3, stressm_4, & + stress12_1, stress12_2, & + stress12_3, stress12_4, & + uvel_init, vvel_init, & + uvel, vvel, & + Tbu) + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block, & ! block dimensions + ilo,ihi,jlo,jhi ! beginning and end of physical domain + + integer (kind=int_kind), intent(out) :: & + icellt , & ! no. of cells where icetmask = 1 + icellu ! no. of cells where iceumask = 1 + + integer (kind=int_kind), dimension (nx_block*ny_block), intent(out) :: & + indxti , & ! compressed index in i-direction + indxtj , & ! compressed index in j-direction + indxui , & ! compressed index in i-direction + indxuj ! compressed index in j-direction + + logical (kind=log_kind), dimension (nx_block,ny_block), intent(in) :: & + umask ! land/boundary mask, thickness (U-cell) + + integer (kind=int_kind), dimension (nx_block,ny_block), intent(in) :: & + icetmask ! ice extent mask (T-cell) + + logical (kind=log_kind), dimension (nx_block,ny_block), intent(inout) :: & + iceumask ! ice extent mask (U-cell) + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + aiu , & ! ice fraction on u-grid + umass , & ! total mass of ice and snow (u grid) + fcor , & ! Coriolis parameter (1/s) + strairx , & ! stress on ice by air, x-direction + strairy , & ! stress on ice by air, y-direction + uocn , & ! ocean current, x-direction (m/s) + vocn , & ! ocean current, y-direction (m/s) + ss_tltx , & ! sea surface slope, x-direction (m/m) + ss_tlty ! sea surface slope, y-direction + + real (kind=dbl_kind), intent(in) :: & + dt ! time step + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out) :: & + Tbu, & ! coefficient for basal stress (N/m^2) + uvel_init,& ! x-component of velocity (m/s), beginning of time step + vvel_init,& ! y-component of velocity (m/s), beginning of time step + umassdti, & ! mass of U-cell/dt (kg/m^2 s) + waterx , & ! for ocean stress calculation, x (m/s) + watery , & ! for ocean stress calculation, y (m/s) + forcex , & ! work array: combined atm stress and ocn tilt, x + forcey ! work array: combined atm stress and ocn tilt, y + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(inout) :: & + fm , & ! Coriolis param. * mass in U-cell (kg/s) + stressp_1, stressp_2, stressp_3, stressp_4 , & ! sigma11+sigma22 + stressm_1, stressm_2, stressm_3, stressm_4 , & ! sigma11-sigma22 + stress12_1,stress12_2,stress12_3,stress12_4, & ! sigma12 + uvel , & ! x-component of velocity (m/s) + vvel , & ! y-component of velocity (m/s) + strtltx , & ! stress due to sea surface slope, x-direction + strtlty , & ! stress due to sea surface slope, y-direction + strocnx , & ! ice-ocean stress, x-direction + strocny , & ! ice-ocean stress, y-direction + strintx , & ! divergence of internal ice stress, x (N/m^2) + strinty , & ! divergence of internal ice stress, y (N/m^2) + taubx , & ! basal stress, x-direction (N/m^2) + tauby ! basal stress, y-direction (N/m^2) + + ! local variables + + integer (kind=int_kind) :: & + i, j, ij + + real (kind=dbl_kind) :: gravit + + logical (kind=log_kind), dimension(nx_block,ny_block) :: & + iceumask_old ! old-time iceumask + + character(len=*), parameter :: subname = '(dyn_prep2)' + + !----------------------------------------------------------------- + ! Initialize + !----------------------------------------------------------------- + + do j = 1, ny_block + do i = 1, nx_block + waterx (i,j) = c0 + watery (i,j) = c0 + forcex (i,j) = c0 + forcey (i,j) = c0 + umassdti (i,j) = c0 + Tbu (i,j) = c0 + taubx (i,j) = c0 + tauby (i,j) = c0 + + if (icetmask(i,j)==0) then + stressp_1 (i,j) = c0 + stressp_2 (i,j) = c0 + stressp_3 (i,j) = c0 + stressp_4 (i,j) = c0 + stressm_1 (i,j) = c0 + stressm_2 (i,j) = c0 + stressm_3 (i,j) = c0 + stressm_4 (i,j) = c0 + stress12_1(i,j) = c0 + stress12_2(i,j) = c0 + stress12_3(i,j) = c0 + stress12_4(i,j) = c0 + endif + enddo ! i + enddo ! j + + !----------------------------------------------------------------- + ! Identify cells where icetmask = 1 + ! Note: The icellt mask includes north and east ghost cells + ! where stresses are needed. + !----------------------------------------------------------------- + + icellt = 0 + do j = jlo, jhi+1 + do i = ilo, ihi+1 + if (icetmask(i,j) == 1) then + icellt = icellt + 1 + indxti(icellt) = i + indxtj(icellt) = j + endif + enddo + enddo + + !----------------------------------------------------------------- + ! Define iceumask + ! Identify cells where iceumask is true + ! Initialize velocity where needed + !----------------------------------------------------------------- + + icellu = 0 + do j = jlo, jhi + do i = ilo, ihi + + ! ice extent mask (U-cells) + iceumask_old(i,j) = iceumask(i,j) ! save + iceumask(i,j) = (umask(i,j)) .and. (aiu (i,j) > a_min) & + .and. (umass(i,j) > m_min) + + if (iceumask(i,j)) then + icellu = icellu + 1 + indxui(icellu) = i + indxuj(icellu) = j + + ! initialize velocity for new ice points to ocean sfc current + if (.not. iceumask_old(i,j)) then + uvel(i,j) = uocn(i,j) + vvel(i,j) = vocn(i,j) + endif + else + ! set velocity and stresses to zero for masked-out points + uvel(i,j) = c0 + vvel(i,j) = c0 + strintx(i,j) = c0 + strinty(i,j) = c0 + strocnx(i,j) = c0 + strocny(i,j) = c0 + endif + + uvel_init(i,j) = uvel(i,j) + vvel_init(i,j) = vvel(i,j) + enddo + enddo + + !----------------------------------------------------------------- + ! Define variables for momentum equation + !----------------------------------------------------------------- + + if (trim(ssh_stress) == 'coupled') then + call icepack_query_parameters(gravit_out=gravit) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + endif + + do ij = 1, icellu + i = indxui(ij) + j = indxuj(ij) + + umassdti(i,j) = umass(i,j)/dt ! kg/m^2 s + + fm(i,j) = fcor(i,j)*umass(i,j) ! Coriolis * mass + + ! for ocean stress + waterx(i,j) = uocn(i,j)*cosw - vocn(i,j)*sinw*sign(c1,fm(i,j)) + watery(i,j) = vocn(i,j)*cosw + uocn(i,j)*sinw*sign(c1,fm(i,j)) + + ! combine tilt with wind stress + if (trim(ssh_stress) == 'geostrophic') then + ! calculate tilt from geostrophic currents if needed + strtltx(i,j) = -fm(i,j)*vocn(i,j) + strtlty(i,j) = fm(i,j)*uocn(i,j) + elseif (trim(ssh_stress) == 'coupled') then + strtltx(i,j) = -gravit*umass(i,j)*ss_tltx(i,j) + strtlty(i,j) = -gravit*umass(i,j)*ss_tlty(i,j) + else + call abort_ice(subname//' ERROR: unknown ssh_stress='//trim(ssh_stress), & + file=__FILE__, line=__LINE__) + endif + + forcex(i,j) = strairx(i,j) + strtltx(i,j) + forcey(i,j) = strairy(i,j) + strtlty(i,j) + enddo + + end subroutine dyn_prep2 + +!======================================================================= + +! Calculation of the surface stresses +! Integration of the momentum equation to find velocity (u,v) +! +! author: Elizabeth C. Hunke, LANL + + subroutine stepu (nx_block, ny_block, & + icellu, Cw, & + indxui, indxuj, & + ksub, & + aiu, str, & + uocn, vocn, & + waterx, watery, & + forcex, forcey, & + umassdti, fm, & + uarear, & + strintx, strinty, & + taubx, tauby, & + uvel_init, vvel_init,& + uvel, vvel, & + Tbu) + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block, & ! block dimensions + icellu, & ! total count when iceumask is true + ksub ! subcycling iteration + + integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & + indxui , & ! compressed index in i-direction + indxuj ! compressed index in j-direction + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + Tbu, & ! coefficient for basal stress (N/m^2) + uvel_init,& ! x-component of velocity (m/s), beginning of timestep + vvel_init,& ! y-component of velocity (m/s), beginning of timestep + aiu , & ! ice fraction on u-grid + waterx , & ! for ocean stress calculation, x (m/s) + watery , & ! for ocean stress calculation, y (m/s) + forcex , & ! work array: combined atm stress and ocn tilt, x + forcey , & ! work array: combined atm stress and ocn tilt, y + umassdti, & ! mass of U-cell/dt (kg/m^2 s) + uocn , & ! ocean current, x-direction (m/s) + vocn , & ! ocean current, y-direction (m/s) + fm , & ! Coriolis param. * mass in U-cell (kg/s) + uarear ! 1/uarea + + real (kind=dbl_kind), dimension(nx_block,ny_block,8), intent(in) :: & + str + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(inout) :: & + uvel , & ! x-component of velocity (m/s) + vvel ! y-component of velocity (m/s) + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(inout) :: & + strintx , & ! divergence of internal ice stress, x (N/m^2) + strinty , & ! divergence of internal ice stress, y (N/m^2) + taubx , & ! basal stress, x-direction (N/m^2) + tauby ! basal stress, y-direction (N/m^2) + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(inout) :: & + Cw ! ocean-ice neutral drag coefficient + + ! local variables + + integer (kind=int_kind) :: & + i, j, ij + + real (kind=dbl_kind) :: & + uold, vold , & ! old-time uvel, vvel + vrel , & ! relative ice-ocean velocity + cca,ccb,ab2,cc1,cc2,& ! intermediate variables + taux, tauy , & ! part of ocean stress term + Cb , & ! complete basal stress coeff + rhow ! + + real (kind=dbl_kind) :: & + u0 = 5e-5_dbl_kind ! residual velocity for basal stress (m/s) + + character(len=*), parameter :: subname = '(stepu)' + + !----------------------------------------------------------------- + ! integrate the momentum equation + !----------------------------------------------------------------- + + call icepack_query_parameters(rhow_out=rhow) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + do ij =1, icellu + i = indxui(ij) + j = indxuj(ij) + + uold = uvel(i,j) + vold = vvel(i,j) + + ! (magnitude of relative ocean current)*rhow*drag*aice + vrel = aiu(i,j)*rhow*Cw(i,j)*sqrt((uocn(i,j) - uold)**2 + & + (vocn(i,j) - vold)**2) ! m/s + ! ice/ocean stress + taux = vrel*waterx(i,j) ! NOTE this is not the entire + tauy = vrel*watery(i,j) ! ocn stress term + + Cb = Tbu(i,j) / (sqrt(uold**2 + vold**2) + u0) ! for basal stress + ! revp = 0 for classic evp, 1 for revised evp + cca = (brlx + revp)*umassdti(i,j) + vrel * cosw + Cb ! kg/m^2 s + + ccb = fm(i,j) + sign(c1,fm(i,j)) * vrel * sinw ! kg/m^2 s + + ab2 = cca**2 + ccb**2 + + ! divergence of the internal stress tensor + strintx(i,j) = uarear(i,j)* & + (str(i,j,1) + str(i+1,j,2) + str(i,j+1,3) + str(i+1,j+1,4)) + strinty(i,j) = uarear(i,j)* & + (str(i,j,5) + str(i,j+1,6) + str(i+1,j,7) + str(i+1,j+1,8)) + + ! finally, the velocity components + cc1 = strintx(i,j) + forcex(i,j) + taux & + + umassdti(i,j)*(brlx*uold + revp*uvel_init(i,j)) + cc2 = strinty(i,j) + forcey(i,j) + tauy & + + umassdti(i,j)*(brlx*vold + revp*vvel_init(i,j)) + + uvel(i,j) = (cca*cc1 + ccb*cc2) / ab2 ! m/s + vvel(i,j) = (cca*cc2 - ccb*cc1) / ab2 + + ! calculate basal stress component for outputs + if (ksub == ndte) then ! on last subcycling iteration + if ( basalstress ) then + taubx(i,j) = -uvel(i,j)*Tbu(i,j) / (sqrt(uold**2 + vold**2) + u0) + tauby(i,j) = -vvel(i,j)*Tbu(i,j) / (sqrt(uold**2 + vold**2) + u0) + endif + endif + + enddo ! ij + + end subroutine stepu + +!======================================================================= + +! Calculation of the ice-ocean stress. +! ...the sign will be reversed later... +! +! author: Elizabeth C. Hunke, LANL + + subroutine dyn_finish (nx_block, ny_block, & + icellu, Cw, & + indxui, indxuj, & + uvel, vvel, & + uocn, vocn, & + aiu, fm, & + strintx, strinty, & + strairx, strairy, & + strocnx, strocny, & + strocnxT, strocnyT) + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block, & ! block dimensions + icellu ! total count when iceumask is true + + integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & + indxui , & ! compressed index in i-direction + indxuj ! compressed index in j-direction + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + uvel , & ! x-component of velocity (m/s) + vvel , & ! y-component of velocity (m/s) + uocn , & ! ocean current, x-direction (m/s) + vocn , & ! ocean current, y-direction (m/s) + aiu , & ! ice fraction on u-grid + fm , & ! Coriolis param. * mass in U-cell (kg/s) + strintx , & ! divergence of internal ice stress, x (N/m^2) + strinty , & ! divergence of internal ice stress, y (N/m^2) + strairx , & ! stress on ice by air, x-direction + strairy ! stress on ice by air, y-direction + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(inout) :: & + strocnx , & ! ice-ocean stress, x-direction + strocny ! ice-ocean stress, y-direction + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out) :: & + strocnxT, & ! ice-ocean stress, x-direction + strocnyT ! ice-ocean stress, y-direction + + ! local variables + + integer (kind=int_kind) :: & + i, j, ij + + real (kind=dbl_kind) :: vrel, rhow + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(inout) :: & + Cw ! ocean-ice neutral drag coefficient + + character(len=*), parameter :: subname = '(dyn_finish)' + + call icepack_query_parameters(rhow_out=rhow) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + do j = 1, ny_block + do i = 1, nx_block + strocnxT(i,j) = c0 + strocnyT(i,j) = c0 + enddo + enddo + + ! ocean-ice stress for coupling + do ij =1, icellu + i = indxui(ij) + j = indxuj(ij) + + vrel = rhow*Cw(i,j)*sqrt((uocn(i,j) - uvel(i,j))**2 + & + (vocn(i,j) - vvel(i,j))**2) ! m/s + +! strocnx(i,j) = strocnx(i,j) & +! - vrel*(uvel(i,j)*cosw - vvel(i,j)*sinw) * aiu(i,j) +! strocny(i,j) = strocny(i,j) & +! - vrel*(vvel(i,j)*cosw + uvel(i,j)*sinw) * aiu(i,j) + + ! update strocnx to most recent iterate and complete the term + vrel = vrel * aiu(i,j) + strocnx(i,j) = vrel*((uocn(i,j) - uvel(i,j))*cosw & + - (vocn(i,j) - vvel(i,j))*sinw*sign(c1,fm(i,j))) + strocny(i,j) = vrel*((vocn(i,j) - vvel(i,j))*cosw & + + (uocn(i,j) - uvel(i,j))*sinw*sign(c1,fm(i,j))) + + ! Hibler/Bryan stress + ! the sign is reversed later, therefore negative here +! strocnx(i,j) = -(strairx(i,j) + strintx(i,j)) +! strocny(i,j) = -(strairy(i,j) + strinty(i,j)) + + ! Prepare to convert to T grid + ! divide by aice for coupling + strocnxT(i,j) = strocnx(i,j) / aiu(i,j) + strocnyT(i,j) = strocny(i,j) / aiu(i,j) + enddo + + end subroutine dyn_finish + +!======================================================================= +! Computes basal stress Tbu coefficients (landfast ice) +! +! Lemieux, J. F., B. Tremblay, F. Dupont, M. Plante, G.C. Smith, D. Dumont (2015). +! A basal stress parameterization form modeling landfast ice, J. Geophys. Res. +! Oceans, 120, 3157-3173. +! +! Lemieux, J. F., F. Dupont, P. Blain, F. Roy, G.C. Smith, G.M. Flato (2016). +! Improving the simulation of landfast ice by combining tensile strength and a +! parameterization for grounded ridges, J. Geophys. Res. Oceans, 121. +! +! author: JF Lemieux, Philippe Blain (ECCC) +! +! note: Tbu is a part of the Cb as defined in Lemieux et al. 2015 and 2016. +! + subroutine basal_stress_coeff (nx_block, ny_block, & + icellu, & + indxui, indxuj, & + vice, aice, & + hwater, Tbu) + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block, & ! block dimensions + icellu ! no. of cells where icetmask = 1 + + integer (kind=int_kind), dimension (nx_block*ny_block), intent(in) :: & + indxui , & ! compressed index in i-direction + indxuj ! compressed index in j-direction + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + aice , & ! concentration of ice at tracer location + vice , & ! volume per unit area of ice at tracer location + hwater ! water depth at tracer location + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(inout) :: & + Tbu ! coefficient for basal stress (N/m^2) + + real (kind=dbl_kind) :: & + au, & ! concentration of ice at u location + hu, & ! volume per unit area of ice at u location (mean thickness) + hwu, & ! water depth at u location + hcu ! critical thickness at u location + + integer (kind=int_kind) :: & + i, j, ij + + character(len=*), parameter :: subname = '(basal_stress_coeff)' + + do ij = 1, icellu + i = indxui(ij) + j = indxuj(ij) + + ! convert quantities to u-location + + hwu = min(hwater(i,j),hwater(i+1,j),hwater(i,j+1),hwater(i+1,j+1)) + + if (hwu < threshold_hw) then + + au = max(aice(i,j),aice(i+1,j),aice(i,j+1),aice(i+1,j+1)) + hu = max(vice(i,j),vice(i+1,j),vice(i,j+1),vice(i+1,j+1)) + + ! 1- calculate critical thickness + hcu = au * hwu / k1 + + ! 2- calculate basal stress factor + Tbu(i,j) = k2 * max(c0,(hu - hcu)) * exp(-alphab * (c1 - au)) + + endif + + enddo ! ij + + end subroutine basal_stress_coeff + +!======================================================================= + +! Computes principal stresses for comparison with the theoretical +! yield curve; northeast values +! +! author: Elizabeth C. Hunke, LANL + + subroutine principal_stress(nx_block, ny_block, & + stressp_1, stressm_1, & + stress12_1, strength, & + sig1, sig2, & + sigP) + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block ! block dimensions + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + stressp_1 , & ! sigma11 + sigma22 + stressm_1 , & ! sigma11 - sigma22 + stress12_1, & ! sigma12 + strength ! for normalization of sig1 and sig2 + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out):: & + sig1 , & ! normalized principal stress component + sig2 , & ! normalized principal stress component + sigP ! internal ice pressure (N/m) + + ! local variables + + integer (kind=int_kind) :: i, j + + real (kind=dbl_kind) :: puny + + character(len=*), parameter :: subname = '(principal_stress)' + + call icepack_query_parameters(puny_out=puny) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + do j = 1, ny_block + do i = 1, nx_block + if (strength(i,j) > puny) then + ! ice internal pressure + sigP(i,j) = -p5*stressp_1(i,j) + + ! normalized principal stresses + sig1(i,j) = (p5*(stressp_1(i,j) & + + sqrt(stressm_1(i,j)**2+c4*stress12_1(i,j)**2))) & + / strength(i,j) + sig2(i,j) = (p5*(stressp_1(i,j) & + - sqrt(stressm_1(i,j)**2+c4*stress12_1(i,j)**2))) & + / strength(i,j) + else + sig1(i,j) = spval_dbl + sig2(i,j) = spval_dbl + sigP(i,j) = spval_dbl + endif + enddo + enddo + + end subroutine principal_stress + +!======================================================================= + + end module ice_dyn_shared + +!======================================================================= diff --git a/cicecore/cicedynB/general/ice_init.F90 b/cicecore/cicedynB/general/ice_init.F90 new file mode 100644 index 000000000..f2eaae17d --- /dev/null +++ b/cicecore/cicedynB/general/ice_init.F90 @@ -0,0 +1,2431 @@ +!======================================================================= + +! parameter and variable initializations +! +! authors Elizabeth C. Hunke and William H. Lipscomb, LANL +! C. M. Bitz, UW +! +! 2004 WHL: Block structure added +! 2006 ECH: Added namelist variables, warnings. +! Replaced old default initial ice conditions with 3.14 version. +! Converted to free source form (F90). + + module ice_init + + use ice_kinds_mod + use ice_communicate, only: my_task, master_task, ice_barrier + use ice_constants, only: c0, c1, c2, c3, p2, p5 + use ice_exit, only: abort_ice + use ice_fileunits, only: nu_nml, nu_diag, nu_diag_set, nml_filename, diag_type, & + ice_stdout, get_fileunit, release_fileunit, bfbflag, flush_fileunit, & + ice_IOUnitsMinUnit, ice_IOUnitsMaxUnit +#ifdef CESMCOUPLED + use ice_fileunits, only: inst_suffix +#endif + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_aggregate + use icepack_intfc, only: icepack_init_trcr + use icepack_intfc, only: icepack_init_parameters + use icepack_intfc, only: icepack_init_tracer_flags + use icepack_intfc, only: icepack_init_tracer_sizes + use icepack_intfc, only: icepack_query_tracer_flags + use icepack_intfc, only: icepack_query_tracer_sizes + use icepack_intfc, only: icepack_query_tracer_indices + use icepack_intfc, only: icepack_query_parameters + + implicit none + private + + character(len=char_len_long),public :: & + ice_ic ! method of ice cover initialization + ! 'default' => latitude and sst dependent + ! 'none' => no ice + ! note: restart = .true. overwrites + + public :: input_data, init_state, set_state_var + +!======================================================================= + + contains + +!======================================================================= + +! Namelist variables, set to default values; may be altered +! at run time +! +! author Elizabeth C. Hunke, LANL + + subroutine input_data + + use ice_broadcast, only: broadcast_scalar, broadcast_array + use ice_diagnostics, only: diag_file, print_global, print_points, latpnt, lonpnt + use ice_domain, only: close_boundaries, ns_boundary_type, orca_halogrid + use ice_domain_size, only: ncat, nilyr, nslyr, nblyr, nfsd, nfreq, & + n_iso, n_aero, n_zaero, n_algae, & + n_doc, n_dic, n_don, n_fed, n_fep, & + max_nstrm + use ice_calendar, only: year_init, istep0, histfreq, histfreq_n, & + dumpfreq, dumpfreq_n, diagfreq, & + npt, dt, ndtd, days_per_year, use_leap_years, & + write_ic, dump_last + use ice_arrays_column, only: oceanmixed_ice + use ice_restart_column, only: restart_age, restart_FY, restart_lvl, & + restart_pond_cesm, restart_pond_lvl, restart_pond_topo, restart_aero, & + restart_fsd, restart_iso + use ice_restart_shared, only: & + restart, restart_ext, restart_coszen, restart_dir, restart_file, pointer_file, & + runid, runtype, use_restart_time, restart_format, lcdf64 + use ice_history_shared, only: hist_avg, history_dir, history_file, & + incond_dir, incond_file, version_name, & + history_precision, history_format + use ice_flux, only: update_ocn_f, l_mpond_fresh + use ice_flux, only: default_season + use ice_flux_bgc, only: cpl_bgc + use ice_forcing, only: & + ycycle, fyear_init, dbug, & + atm_data_type, atm_data_dir, precip_units, rotate_wind, & + atm_data_format, ocn_data_format, & + bgc_data_type, & + ocn_data_type, ocn_data_dir, wave_spec_file, & + oceanmixed_file, restore_ocn, trestore, & + ice_data_type + use ice_arrays_column, only: bgc_data_dir, fe_data_type + use ice_grid, only: grid_file, gridcpl_file, kmt_file, & + bathymetry_file, use_bathymetry, & + bathymetry_format, & + grid_type, grid_format, & + dxrect, dyrect + use ice_dyn_shared, only: ndte, kdyn, revised_evp, yield_curve, & + kevp_kernel, & + basalstress, k1, k2, alphab, threshold_hw, & + Ktens, e_ratio, coriolis, ssh_stress, & + kridge, ktransport, brlx, arlx + use ice_transport_driver, only: advection, conserv_check + use ice_restoring, only: restore_ice +#ifdef CESMCOUPLED + use shr_file_mod, only: shr_file_setIO +#endif + + ! local variables + + integer (kind=int_kind) :: & + nml_error, & ! namelist i/o error flag + n ! loop index + + logical :: exists + + real (kind=dbl_kind) :: ustar_min, albicev, albicei, albsnowv, albsnowi, & + ahmax, R_ice, R_pnd, R_snw, dT_mlt, rsnw_mlt, emissivity, & + mu_rdg, hs0, dpscale, rfracmin, rfracmax, pndaspect, hs1, hp1, & + a_rapid_mode, Rac_rapid_mode, aspect_rapid_mode, dSdt_slow_mode, & + phi_c_slow_mode, phi_i_mushy, kalg, atmiter_conv, Pstar, Cstar, & + sw_frac, sw_dtemp + + integer (kind=int_kind) :: ktherm, kstrength, krdg_partic, krdg_redist, natmiter, & + kitd, kcatbound + + character (len=char_len) :: shortwave, albedo_type, conduct, fbot_xfer_type, & + tfrz_option, frzpnd, atmbndy, wave_spec_type + + logical (kind=log_kind) :: calc_Tsfc, formdrag, highfreq, calc_strair, wave_spec, & + sw_redist + + logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_pond + logical (kind=log_kind) :: tr_iso, tr_aero, tr_fsd + logical (kind=log_kind) :: tr_pond_cesm, tr_pond_lvl, tr_pond_topo + integer (kind=int_kind) :: numin, numax ! unit number limits + + integer (kind=int_kind) :: rpcesm, rplvl, rptopo + real (kind=dbl_kind) :: Cf, ksno, puny + character (len=char_len) :: abort_list + character (len=64) :: tmpstr + character (len=128) :: tmpstr2 + + character(len=*), parameter :: subname='(input_data)' + + !----------------------------------------------------------------- + ! Namelist variables + !----------------------------------------------------------------- + + namelist /setup_nml/ & + days_per_year, use_leap_years, year_init, istep0, & + dt, npt, ndtd, numin, & + runtype, runid, bfbflag, numax, & + ice_ic, restart, restart_dir, restart_file, & + restart_ext, use_restart_time, restart_format, lcdf64, & + pointer_file, dumpfreq, dumpfreq_n, dump_last, & + diagfreq, diag_type, diag_file, history_format,& + print_global, print_points, latpnt, lonpnt, & + dbug, histfreq, histfreq_n, hist_avg, & + history_dir, history_file, history_precision, cpl_bgc, & + conserv_check, & + write_ic, incond_dir, incond_file, version_name + + namelist /grid_nml/ & + grid_format, grid_type, grid_file, kmt_file, & + bathymetry_file, use_bathymetry, nfsd, bathymetry_format, & + ncat, nilyr, nslyr, nblyr, & + kcatbound, gridcpl_file, dxrect, dyrect, & + close_boundaries, orca_halogrid + + namelist /tracer_nml/ & + tr_iage, restart_age, & + tr_FY, restart_FY, & + tr_lvl, restart_lvl, & + tr_pond_cesm, restart_pond_cesm, & + tr_pond_lvl, restart_pond_lvl, & + tr_pond_topo, restart_pond_topo, & + tr_iso, restart_iso, & + tr_aero, restart_aero, & + tr_fsd, restart_fsd, & + n_iso, n_aero, n_zaero, n_algae, & + n_doc, n_dic, n_don, n_fed, n_fep + + namelist /thermo_nml/ & + kitd, ktherm, conduct, ksno, & + a_rapid_mode, Rac_rapid_mode, aspect_rapid_mode, & + dSdt_slow_mode, phi_c_slow_mode, phi_i_mushy, & + sw_redist, sw_frac, sw_dtemp + + namelist /dynamics_nml/ & + kdyn, ndte, revised_evp, yield_curve, & + kevp_kernel, & + brlx, arlx, ssh_stress, & + advection, coriolis, kridge, ktransport, & + kstrength, krdg_partic, krdg_redist, mu_rdg, & + e_ratio, Ktens, Cf, basalstress, & + k1, k2, alphab, threshold_hw, & + Pstar, Cstar + + namelist /shortwave_nml/ & + shortwave, albedo_type, & + albicev, albicei, albsnowv, albsnowi, & + ahmax, R_ice, R_pnd, R_snw, & + dT_mlt, rsnw_mlt, kalg + + namelist /ponds_nml/ & + hs0, dpscale, frzpnd, & + rfracmin, rfracmax, pndaspect, hs1, & + hp1 + + namelist /forcing_nml/ & + formdrag, atmbndy, calc_strair, calc_Tsfc, & + highfreq, natmiter, atmiter_conv, & + ustar_min, emissivity, & + fbot_xfer_type, update_ocn_f, l_mpond_fresh, tfrz_option, & + oceanmixed_ice, restore_ice, restore_ocn, trestore, & + precip_units, default_season, wave_spec_type,nfreq, & + atm_data_type, ocn_data_type, bgc_data_type, fe_data_type, & + ice_data_type, wave_spec_file, restart_coszen, & + fyear_init, ycycle, & + atm_data_dir, ocn_data_dir, bgc_data_dir, & + atm_data_format, ocn_data_format, rotate_wind, & + oceanmixed_file + + !----------------------------------------------------------------- + ! default values + !----------------------------------------------------------------- + + abort_list = "" + + call icepack_query_parameters(puny_out=puny) +! nu_diag not yet defined +! call icepack_warnings_flush(nu_diag) +! if (icepack_warnings_aborted()) call abort_ice(error_message=subname//'Icepack Abort0', & +! file=__FILE__, line=__LINE__) + + days_per_year = 365 ! number of days in a year + use_leap_years= .false.! if true, use leap years (Feb 29) + year_init = 0 ! initial year + istep0 = 0 ! no. of steps taken in previous integrations, + ! real (dumped) or imagined (to set calendar) +#ifndef CESMCOUPLED + dt = 3600.0_dbl_kind ! time step, s +#endif + numin = 11 ! min allowed unit number + numax = 99 ! max allowed unit number + npt = 99999 ! total number of time steps (dt) + diagfreq = 24 ! how often diag output is written + print_points = .false. ! if true, print point data + print_global = .true. ! if true, print global diagnostic data + bfbflag = 'off' ! off = optimized + diag_type = 'stdout' + diag_file = 'ice_diag.d' + histfreq(1) = '1' ! output frequency option for different streams + histfreq(2) = 'h' ! output frequency option for different streams + histfreq(3) = 'd' ! output frequency option for different streams + histfreq(4) = 'm' ! output frequency option for different streams + histfreq(5) = 'y' ! output frequency option for different streams + histfreq_n(:) = 1 ! output frequency + hist_avg = .true. ! if true, write time-averages (not snapshots) + history_format = 'default' ! history file format + history_dir = './' ! write to executable dir for default + history_file = 'iceh' ! history file name prefix + history_precision = 4 ! precision of history files + write_ic = .false. ! write out initial condition + cpl_bgc = .false. ! couple bgc thru driver + incond_dir = history_dir ! write to history dir for default + incond_file = 'iceh_ic'! file prefix + dumpfreq='y' ! restart frequency option + dumpfreq_n = 1 ! restart frequency + dump_last = .false. ! write restart on last time step + restart = .false. ! if true, read restart files for initialization + restart_dir = './' ! write to executable dir for default + restart_file = 'iced' ! restart file name prefix + restart_ext = .false. ! if true, read/write ghost cells + restart_coszen = .false. ! if true, read/write coszen + use_restart_time = .true. ! if true, use time info written in file + pointer_file = 'ice.restart_file' + restart_format = 'default' ! restart file format + lcdf64 = .false. ! 64 bit offset for netCDF + ice_ic = 'default' ! latitude and sst-dependent + grid_format = 'bin' ! file format ('bin'=binary or 'nc'=netcdf) + grid_type = 'rectangular' ! define rectangular grid internally + grid_file = 'unknown_grid_file' + gridcpl_file = 'unknown_gridcpl_file' + orca_halogrid = .false. ! orca haloed grid + bathymetry_file = 'unknown_bathymetry_file' + bathymetry_format = 'default' + use_bathymetry = .false. + kmt_file = 'unknown_kmt_file' + version_name = 'unknown_version_name' + ncat = 0 ! number of ice thickness categories + nfsd = 1 ! number of floe size categories (1 = default) + nilyr = 0 ! number of vertical ice layers + nslyr = 0 ! number of vertical snow layers + nblyr = 0 ! number of bio layers + + kitd = 1 ! type of itd conversions (0 = delta, 1 = linear) + kcatbound = 1 ! category boundary formula (0 = old, 1 = new, etc) + kdyn = 1 ! type of dynamics (-1, 0 = off, 1 = evp, 2 = eap) + ndtd = 1 ! dynamic time steps per thermodynamic time step + ndte = 120 ! subcycles per dynamics timestep: ndte=dt_dyn/dte + kevp_kernel = 0 ! EVP kernel (0 = 2D, >0: 1D. Only ver. 2 is implemented yet) + brlx = 300.0_dbl_kind ! revised_evp values. Otherwise overwritten in ice_dyn_shared + arlx = 300.0_dbl_kind ! revised_evp values. Otherwise overwritten in ice_dyn_shared + revised_evp = .false. ! if true, use revised procedure for evp dynamics + yield_curve = 'ellipse' + kstrength = 1 ! 1 = Rothrock 75 strength, 0 = Hibler 79 + Pstar = 2.75e4_dbl_kind ! constant in Hibler strength formula (kstrength = 0) + Cstar = 20._dbl_kind ! constant in Hibler strength formula (kstrength = 0) + krdg_partic = 1 ! 1 = new participation, 0 = Thorndike et al 75 + krdg_redist = 1 ! 1 = new redistribution, 0 = Hibler 80 + mu_rdg = 3 ! e-folding scale of ridged ice, krdg_partic=1 (m^0.5) + Cf = 17.0_dbl_kind ! ratio of ridging work to PE change in ridging + ksno = 0.3_dbl_kind ! snow thermal conductivity + dxrect = 0.0_dbl_kind ! user defined grid spacing in cm in x direction + dyrect = 0.0_dbl_kind ! user defined grid spacing in cm in y direction + close_boundaries = .false. ! true = set land on edges of grid + basalstress= .false. ! if true, basal stress for landfast is on + k1 = 8.0_dbl_kind ! 1st free parameter for landfast parameterization + k2 = 15.0_dbl_kind ! dah: second free parameter (N/m^3) for landfast parametrization + alphab = 20.0_dbl_kind ! alphab=Cb factor in Lemieux et al 2015 + threshold_hw = 30.0_dbl_kind ! max water depth for grounding + Ktens = 0.0_dbl_kind ! T=Ktens*P (tensile strength: see Konig and Holland, 2010) + e_ratio = 2.0_dbl_kind ! EVP ellipse aspect ratio + advection = 'remap' ! incremental remapping transport scheme + conserv_check = .false.! tracer conservation check + shortwave = 'ccsm3' ! 'ccsm3' or 'dEdd' (delta-Eddington) + albedo_type = 'ccsm3' ! 'ccsm3' or 'constant' + ktherm = 1 ! -1 = OFF, 0 = 0-layer, 1 = BL99, 2 = mushy thermo + conduct = 'bubbly' ! 'MU71' or 'bubbly' (Pringle et al 2007) + coriolis = 'latitude' ! latitude dependent, or 'constant' + ssh_stress = 'geostrophic' ! 'geostrophic' or 'coupled' + kridge = 1 ! -1 = off, 1 = on + ktransport = 1 ! -1 = off, 1 = on + calc_Tsfc = .true. ! calculate surface temperature + update_ocn_f = .false. ! include fresh water and salt fluxes for frazil + ustar_min = 0.005 ! minimum friction velocity for ocean heat flux (m/s) + emissivity = 0.95 ! emissivity of snow and ice + l_mpond_fresh = .false. ! logical switch for including meltpond freshwater + ! flux feedback to ocean model + fbot_xfer_type = 'constant' ! transfer coefficient type for ocn heat flux + R_ice = 0.00_dbl_kind ! tuning parameter for sea ice + R_pnd = 0.00_dbl_kind ! tuning parameter for ponded sea ice + R_snw = 1.50_dbl_kind ! tuning parameter for snow over sea ice + dT_mlt = 1.5_dbl_kind ! change in temp to give non-melt to melt change + ! in snow grain radius + rsnw_mlt = 1500._dbl_kind ! maximum melting snow grain radius + kalg = 0.60_dbl_kind ! algae absorption coefficient for 0.5 m thick layer + ! 0.5 m path of 75 mg Chl a / m2 + hp1 = 0.01_dbl_kind ! critical pond lid thickness for topo ponds + hs0 = 0.03_dbl_kind ! snow depth for transition to bare sea ice (m) + hs1 = 0.03_dbl_kind ! snow depth for transition to bare pond ice (m) + dpscale = c1 ! alter e-folding time scale for flushing + frzpnd = 'cesm' ! melt pond refreezing parameterization + rfracmin = 0.15_dbl_kind ! minimum retained fraction of meltwater + rfracmax = 0.85_dbl_kind ! maximum retained fraction of meltwater + pndaspect = 0.8_dbl_kind ! ratio of pond depth to area fraction + albicev = 0.78_dbl_kind ! visible ice albedo for h > ahmax + albicei = 0.36_dbl_kind ! near-ir ice albedo for h > ahmax + albsnowv = 0.98_dbl_kind ! cold snow albedo, visible + albsnowi = 0.70_dbl_kind ! cold snow albedo, near IR + ahmax = 0.3_dbl_kind ! thickness above which ice albedo is constant (m) + atmbndy = 'default' ! or 'constant' + default_season = 'winter' ! default forcing data, if data is not read in + fyear_init = 1900 ! first year of forcing cycle + ycycle = 1 ! number of years in forcing cycle + atm_data_format = 'bin' ! file format ('bin'=binary or 'nc'=netcdf) + atm_data_type = 'default' + atm_data_dir = ' ' + rotate_wind = .true. ! rotate wind/stress composants to computational grid orientation + calc_strair = .true. ! calculate wind stress + formdrag = .false. ! calculate form drag + highfreq = .false. ! calculate high frequency RASM coupling + natmiter = 5 ! number of iterations for atm boundary layer calcs + atmiter_conv = c0 ! ustar convergence criteria + precip_units = 'mks' ! 'mm_per_month' or + ! 'mm_per_sec' = 'mks' = kg/m^2 s + tfrz_option = 'mushy' ! freezing temp formulation + oceanmixed_ice = .false. ! if true, use internal ocean mixed layer + wave_spec_type = 'none' ! type of wave spectrum forcing + nfreq = 25 ! number of wave frequencies + wave_spec_file = ' ' ! wave forcing file name + ocn_data_format = 'bin' ! file format ('bin'=binary or 'nc'=netcdf) + bgc_data_type = 'default' + fe_data_type = 'default' + ice_data_type = 'default' ! used by some tests to initialize ice state (concentration, velocities) + bgc_data_dir = 'unknown_bgc_data_dir' + ocn_data_type = 'default' + ocn_data_dir = 'unknown_ocn_data_dir' + oceanmixed_file = 'unknown_oceanmixed_file' ! ocean forcing data + restore_ocn = .false. ! restore sst if true + trestore = 90 ! restoring timescale, days (0 instantaneous) + restore_ice = .false. ! restore ice state on grid edges if true + dbug = .false. ! true writes diagnostics for input forcing + + latpnt(1) = 90._dbl_kind ! latitude of diagnostic point 1 (deg) + lonpnt(1) = 0._dbl_kind ! longitude of point 1 (deg) + latpnt(2) = -65._dbl_kind ! latitude of diagnostic point 2 (deg) + lonpnt(2) = -45._dbl_kind ! longitude of point 2 (deg) + +#ifndef CESMCOUPLED + runid = 'unknown' ! run ID used in CESM and for machine 'bering' + runtype = 'initial' ! run type: 'initial', 'continue' +#endif + + ! extra tracers + tr_iage = .false. ! ice age + restart_age = .false. ! ice age restart + tr_FY = .false. ! ice age + restart_FY = .false. ! ice age restart + tr_lvl = .false. ! level ice + restart_lvl = .false. ! level ice restart + tr_pond_cesm = .false. ! CESM melt ponds + restart_pond_cesm = .false. ! melt ponds restart + tr_pond_lvl = .false. ! level-ice melt ponds + restart_pond_lvl = .false. ! melt ponds restart + tr_pond_topo = .false. ! explicit melt ponds (topographic) + restart_pond_topo = .false. ! melt ponds restart + tr_iso = .false. ! isotopes + restart_iso = .false. ! isotopes restart + tr_aero = .false. ! aerosols + restart_aero = .false. ! aerosols restart + tr_fsd = .false. ! floe size distribution + restart_fsd = .false. ! floe size distribution restart + + n_iso = 0 + n_aero = 0 + n_zaero = 0 + n_algae = 0 + n_doc = 0 + n_dic = 0 + n_don = 0 + n_fed = 0 + n_fep = 0 + + ! mushy layer gravity drainage physics + a_rapid_mode = 0.5e-3_dbl_kind ! channel radius for rapid drainage mode (m) + Rac_rapid_mode = 10.0_dbl_kind ! critical Rayleigh number + aspect_rapid_mode = 1.0_dbl_kind ! aspect ratio (larger is wider) + dSdt_slow_mode = -1.5e-7_dbl_kind ! slow mode drainage strength (m s-1 K-1) + phi_c_slow_mode = 0.05_dbl_kind ! critical liquid fraction porosity cutoff + phi_i_mushy = 0.85_dbl_kind ! liquid fraction of congelation ice + + ! shortwave redistribution in the thermodynamics + sw_redist = .false. + sw_frac = 0.9_dbl_kind + sw_dtemp = 0.02_dbl_kind + + !----------------------------------------------------------------- + ! read from input file + !----------------------------------------------------------------- + +#ifdef CESMCOUPLED + nml_filename = 'ice_in'//trim(inst_suffix) +#endif + + call get_fileunit(nu_nml) + + if (my_task == master_task) then + open (nu_nml, file=nml_filename, status='old',iostat=nml_error) + if (nml_error /= 0) then + nml_error = -1 + else + nml_error = 1 + endif + + do while (nml_error > 0) + print*,'Reading setup_nml' + read(nu_nml, nml=setup_nml,iostat=nml_error) + if (nml_error /= 0) exit + print*,'Reading grid_nml' + read(nu_nml, nml=grid_nml,iostat=nml_error) + if (nml_error /= 0) exit + print*,'Reading tracer_nml' + read(nu_nml, nml=tracer_nml,iostat=nml_error) + if (nml_error /= 0) exit + print*,'Reading thermo_nml' + read(nu_nml, nml=thermo_nml,iostat=nml_error) + if (nml_error /= 0) exit + print*,'Reading dynamics_nml' + read(nu_nml, nml=dynamics_nml,iostat=nml_error) + if (nml_error /= 0) exit + print*,'Reading shortwave_nml' + read(nu_nml, nml=shortwave_nml,iostat=nml_error) + if (nml_error /= 0) exit + print*,'Reading ponds_nml' + read(nu_nml, nml=ponds_nml,iostat=nml_error) + if (nml_error /= 0) exit + print*,'Reading forcing_nml' + read(nu_nml, nml=forcing_nml,iostat=nml_error) + if (nml_error /= 0) exit + end do + if (nml_error == 0) close(nu_nml) + endif + call broadcast_scalar(nml_error, master_task) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: reading namelist', & + file=__FILE__, line=__LINE__) + endif + call release_fileunit(nu_nml) + + !----------------------------------------------------------------- + ! set up diagnostics output and resolve conflicts + !----------------------------------------------------------------- + +#ifdef CESMCOUPLED + ! Note in CESMCOUPLED mode diag_file is not utilized and + ! runid and runtype are obtained from the driver, not from the namelist + + if (my_task == master_task) then + history_file = trim(runid) // ".cice" // trim(inst_suffix) //".h" + restart_file = trim(runid) // ".cice" // trim(inst_suffix) //".r" + incond_file = trim(runid) // ".cice" // trim(inst_suffix) //".i" + ! Note by tcraig - this if test is needed because the nuopc cap sets + ! nu_diag before this routine is called. This creates a conflict. + ! In addition, in the nuopc cap, shr_file_setIO will fail if the + ! needed namelist is missing (which it is in the CIME nuopc implementation) + if (.not. nu_diag_set) then + inquire(file='ice_modelio.nml'//trim(inst_suffix),exist=exists) + if (exists) then + call get_fileUnit(nu_diag) + call shr_file_setIO('ice_modelio.nml'//trim(inst_suffix),nu_diag) + end if + endif + else + ! each task gets unique ice log filename when if test is true, for debugging + if (1 == 0) then + call get_fileUnit(nu_diag) + write(tmpstr,'(a,i4.4)') "ice.log.task_",my_task + open(nu_diag,file=tmpstr) + endif + end if + if (trim(ice_ic) /= 'default' .and. trim(ice_ic) /= 'none') then + restart = .true. + end if +#else + if (trim(diag_type) == 'file') call get_fileunit(nu_diag) +#endif + + !----------------------------------------------------------------- + ! broadcast namelist settings + !----------------------------------------------------------------- + + call broadcast_scalar(numin, master_task) + call broadcast_scalar(numax, master_task) + call broadcast_scalar(days_per_year, master_task) + call broadcast_scalar(use_leap_years, master_task) + call broadcast_scalar(year_init, master_task) + call broadcast_scalar(istep0, master_task) + call broadcast_scalar(dt, master_task) + call broadcast_scalar(npt, master_task) + call broadcast_scalar(diagfreq, master_task) + call broadcast_scalar(print_points, master_task) + call broadcast_scalar(print_global, master_task) + call broadcast_scalar(bfbflag, master_task) + call broadcast_scalar(diag_type, master_task) + call broadcast_scalar(diag_file, master_task) + do n = 1, max_nstrm + call broadcast_scalar(histfreq(n), master_task) + enddo + call broadcast_array(histfreq_n, master_task) + call broadcast_scalar(hist_avg, master_task) + call broadcast_scalar(history_dir, master_task) + call broadcast_scalar(history_file, master_task) + call broadcast_scalar(history_precision, master_task) + call broadcast_scalar(history_format, master_task) + call broadcast_scalar(write_ic, master_task) + call broadcast_scalar(cpl_bgc, master_task) + call broadcast_scalar(incond_dir, master_task) + call broadcast_scalar(incond_file, master_task) + call broadcast_scalar(dumpfreq, master_task) + call broadcast_scalar(dumpfreq_n, master_task) + call broadcast_scalar(dump_last, master_task) + call broadcast_scalar(restart_file, master_task) + call broadcast_scalar(restart, master_task) + call broadcast_scalar(restart_dir, master_task) + call broadcast_scalar(restart_ext, master_task) + call broadcast_scalar(restart_coszen, master_task) + call broadcast_scalar(use_restart_time, master_task) + call broadcast_scalar(restart_format, master_task) + call broadcast_scalar(lcdf64, master_task) + call broadcast_scalar(pointer_file, master_task) + call broadcast_scalar(ice_ic, master_task) + call broadcast_scalar(grid_format, master_task) + call broadcast_scalar(dxrect, master_task) + call broadcast_scalar(dyrect, master_task) + call broadcast_scalar(close_boundaries, master_task) + call broadcast_scalar(grid_type, master_task) + call broadcast_scalar(grid_file, master_task) + call broadcast_scalar(gridcpl_file, master_task) + call broadcast_scalar(orca_halogrid, master_task) + call broadcast_scalar(bathymetry_file, master_task) + call broadcast_scalar(bathymetry_format, master_task) + call broadcast_scalar(use_bathymetry, master_task) + call broadcast_scalar(kmt_file, master_task) + call broadcast_scalar(kitd, master_task) + call broadcast_scalar(kcatbound, master_task) + call broadcast_scalar(kdyn, master_task) + call broadcast_scalar(ndtd, master_task) + call broadcast_scalar(ndte, master_task) + call broadcast_scalar(kevp_kernel, master_task) + call broadcast_scalar(brlx, master_task) + call broadcast_scalar(arlx, master_task) + call broadcast_scalar(revised_evp, master_task) + call broadcast_scalar(yield_curve, master_task) + call broadcast_scalar(kstrength, master_task) + call broadcast_scalar(Pstar, master_task) + call broadcast_scalar(Cstar, master_task) + call broadcast_scalar(krdg_partic, master_task) + call broadcast_scalar(krdg_redist, master_task) + call broadcast_scalar(mu_rdg, master_task) + call broadcast_scalar(Cf, master_task) + call broadcast_scalar(ksno, master_task) + call broadcast_scalar(basalstress, master_task) + call broadcast_scalar(k1, master_task) + call broadcast_scalar(k2, master_task) + call broadcast_scalar(alphab, master_task) + call broadcast_scalar(threshold_hw, master_task) + call broadcast_scalar(Ktens, master_task) + call broadcast_scalar(e_ratio, master_task) + call broadcast_scalar(advection, master_task) + call broadcast_scalar(conserv_check, master_task) + call broadcast_scalar(shortwave, master_task) + call broadcast_scalar(albedo_type, master_task) + call broadcast_scalar(ktherm, master_task) + call broadcast_scalar(coriolis, master_task) + call broadcast_scalar(ssh_stress, master_task) + call broadcast_scalar(kridge, master_task) + call broadcast_scalar(ktransport, master_task) + call broadcast_scalar(conduct, master_task) + call broadcast_scalar(R_ice, master_task) + call broadcast_scalar(R_pnd, master_task) + call broadcast_scalar(R_snw, master_task) + call broadcast_scalar(dT_mlt, master_task) + call broadcast_scalar(rsnw_mlt, master_task) + call broadcast_scalar(kalg, master_task) + call broadcast_scalar(hp1, master_task) + call broadcast_scalar(hs0, master_task) + call broadcast_scalar(hs1, master_task) + call broadcast_scalar(dpscale, master_task) + call broadcast_scalar(frzpnd, master_task) + call broadcast_scalar(rfracmin, master_task) + call broadcast_scalar(rfracmax, master_task) + call broadcast_scalar(pndaspect, master_task) + call broadcast_scalar(albicev, master_task) + call broadcast_scalar(albicei, master_task) + call broadcast_scalar(albsnowv, master_task) + call broadcast_scalar(albsnowi, master_task) + call broadcast_scalar(ahmax, master_task) + call broadcast_scalar(atmbndy, master_task) + call broadcast_scalar(fyear_init, master_task) + call broadcast_scalar(ycycle, master_task) + call broadcast_scalar(atm_data_format, master_task) + call broadcast_scalar(atm_data_type, master_task) + call broadcast_scalar(atm_data_dir, master_task) + call broadcast_scalar(rotate_wind, master_task) + call broadcast_scalar(calc_strair, master_task) + call broadcast_scalar(calc_Tsfc, master_task) + call broadcast_scalar(formdrag, master_task) + call broadcast_scalar(highfreq, master_task) + call broadcast_scalar(natmiter, master_task) + call broadcast_scalar(atmiter_conv, master_task) + call broadcast_scalar(update_ocn_f, master_task) + call broadcast_scalar(l_mpond_fresh, master_task) + call broadcast_scalar(ustar_min, master_task) + call broadcast_scalar(emissivity, master_task) + call broadcast_scalar(fbot_xfer_type, master_task) + call broadcast_scalar(precip_units, master_task) + call broadcast_scalar(oceanmixed_ice, master_task) + call broadcast_scalar(wave_spec_type, master_task) + call broadcast_scalar(wave_spec_file, master_task) + call broadcast_scalar(nfreq, master_task) + call broadcast_scalar(tfrz_option, master_task) + call broadcast_scalar(ocn_data_format, master_task) + call broadcast_scalar(bgc_data_type, master_task) + call broadcast_scalar(fe_data_type, master_task) + call broadcast_scalar(ice_data_type, master_task) + call broadcast_scalar(bgc_data_dir, master_task) + call broadcast_scalar(ocn_data_type, master_task) + call broadcast_scalar(ocn_data_dir, master_task) + call broadcast_scalar(oceanmixed_file, master_task) + call broadcast_scalar(restore_ocn, master_task) + call broadcast_scalar(trestore, master_task) + call broadcast_scalar(restore_ice, master_task) + call broadcast_scalar(dbug, master_task) + call broadcast_array (latpnt(1:2), master_task) + call broadcast_array (lonpnt(1:2), master_task) + call broadcast_scalar(runid, master_task) + call broadcast_scalar(runtype, master_task) + + if (dbug) & ! else only master_task writes to file + call broadcast_scalar(nu_diag, master_task) + + ! tracers + call broadcast_scalar(tr_iage, master_task) + call broadcast_scalar(restart_age, master_task) + call broadcast_scalar(tr_FY, master_task) + call broadcast_scalar(restart_FY, master_task) + call broadcast_scalar(tr_lvl, master_task) + call broadcast_scalar(restart_lvl, master_task) + call broadcast_scalar(tr_pond_cesm, master_task) + call broadcast_scalar(restart_pond_cesm, master_task) + call broadcast_scalar(tr_pond_lvl, master_task) + call broadcast_scalar(restart_pond_lvl, master_task) + call broadcast_scalar(tr_pond_topo, master_task) + call broadcast_scalar(restart_pond_topo, master_task) + call broadcast_scalar(tr_iso, master_task) + call broadcast_scalar(restart_iso, master_task) + call broadcast_scalar(tr_aero, master_task) + call broadcast_scalar(restart_aero, master_task) + call broadcast_scalar(tr_fsd, master_task) + call broadcast_scalar(restart_fsd, master_task) + call broadcast_scalar(ncat, master_task) + call broadcast_scalar(nfsd, master_task) + call broadcast_scalar(nilyr, master_task) + call broadcast_scalar(nslyr, master_task) + call broadcast_scalar(nblyr, master_task) + call broadcast_scalar(n_iso, master_task) + call broadcast_scalar(n_aero, master_task) + call broadcast_scalar(n_zaero, master_task) + call broadcast_scalar(n_algae, master_task) + call broadcast_scalar(n_doc, master_task) + call broadcast_scalar(n_dic, master_task) + call broadcast_scalar(n_don, master_task) + call broadcast_scalar(n_fed, master_task) + call broadcast_scalar(n_fep, master_task) + call broadcast_scalar(a_rapid_mode, master_task) + call broadcast_scalar(Rac_rapid_mode, master_task) + call broadcast_scalar(aspect_rapid_mode, master_task) + call broadcast_scalar(dSdt_slow_mode, master_task) + call broadcast_scalar(phi_c_slow_mode, master_task) + call broadcast_scalar(phi_i_mushy, master_task) + call broadcast_scalar(sw_redist, master_task) + call broadcast_scalar(sw_frac, master_task) + call broadcast_scalar(sw_dtemp, master_task) + +#ifdef CESMCOUPLED + pointer_file = trim(pointer_file) // trim(inst_suffix) +#endif + + !----------------------------------------------------------------- + ! verify inputs + !----------------------------------------------------------------- + + if (my_task == master_task) then + if (trim(diag_type) == 'file') then + write(ice_stdout,*) 'Diagnostic output will be in file ',diag_file + open (nu_diag, file=diag_file, status='unknown') + endif + write(nu_diag,*) '--------------------------------' + write(nu_diag,*) ' ',subname + write(nu_diag,*) ' CICE model diagnostic output ' + write(nu_diag,*) '--------------------------------' + write(nu_diag,*) ' ' + endif + + if (trim(runtype) == 'continue' .and. .not.restart) then + if (my_task == master_task) & + write(nu_diag,*) subname//' WARNING: runtype=continue, setting restart=.true.' + restart = .true. + endif + + if (trim(runtype) /= 'continue' .and. restart .and. & + (ice_ic == 'none' .or. ice_ic == 'default')) then + if (my_task == master_task) & + write(nu_diag,*) subname//' WARNING: runtype ne continue and ice_ic=none|default, setting restart=.false.' + restart = .false. + endif + + if (trim(runtype) /= 'continue' .and. (ice_ic == 'none' .or. ice_ic == 'default')) then + if (my_task == master_task) & + write(nu_diag,*) subname//' WARNING: ice_ic = none or default, setting restart flags to .false.' + restart = .false. + restart_iso = .false. + restart_aero = .false. + restart_fsd = .false. + restart_age = .false. + restart_fy = .false. + restart_lvl = .false. + restart_pond_cesm = .false. + restart_pond_lvl = .false. + restart_pond_topo = .false. +! tcraig, OK to leave as true, needed for boxrestore case +! restart_ext = .false. + endif + + if (trim(runtype) == 'initial' .and. .not.(restart) .and. & + ice_ic /= 'none' .and. ice_ic /= 'default') then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: runtype, restart, ice_ic are inconsistent:' + write(nu_diag,*) subname//' ERROR: runtype=',trim(runtype), ' restart=',restart, ' ice_ic=',trim(ice_ic) + write(nu_diag,*) subname//' ERROR: Please review user guide' + endif + abort_list = trim(abort_list)//":1" + endif + +!deprecate upwind if (advection /= 'remap' .and. advection /= 'upwind' .and. advection /= 'none') then + if (advection /= 'remap' .and. advection /= 'none') then + if (trim(advection) == 'upwind') then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: upwind advection has been deprecated' + endif + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: invalid advection=',trim(advection) + abort_list = trim(abort_list)//":3" + endif + + if (ncat == 1 .and. kitd == 1) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: kitd incompatability: ncat=1 and kitd=1' + write(nu_diag,*) subname//' ERROR: Remapping the ITD is not allowed for ncat=1.' + write(nu_diag,*) subname//' ERROR: Use kitd = 0 (delta function ITD) with kcatbound = 0' + write(nu_diag,*) subname//' ERROR: or for column configurations use kcatbound = -1' + endif + abort_list = trim(abort_list)//":4" + endif + + if (ncat /= 1 .and. kcatbound == -1) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: ITD required for ncat > 1' + write(nu_diag,*) subname//' ERROR: ncat=',ncat,' kcatbound=',kcatbound + write(nu_diag,*) subname//' ERROR: Please review user guide' + endif + abort_list = trim(abort_list)//":5" + endif + + if (kdyn == 2 .and. revised_evp) then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: revised_evp = T with EAP dynamics' + write(nu_diag,*) subname//' WARNING: revised_evp is ignored' + endif + revised_evp = .false. + endif + + if (kdyn > 2) then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: kdyn out of range' + endif + abort_list = trim(abort_list)//":33" + endif + + rpcesm = 0 + rplvl = 0 + rptopo = 0 + if (tr_pond_cesm) rpcesm = 1 + if (tr_pond_lvl ) rplvl = 1 + if (tr_pond_topo) rptopo = 1 + + tr_pond = .false. ! explicit melt ponds + if (rpcesm + rplvl + rptopo > 0) tr_pond = .true. + + if (rpcesm + rplvl + rptopo > 1) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: Must use only one melt pond scheme' + endif + abort_list = trim(abort_list)//":6" + endif + + if (tr_pond_lvl .and. .not. tr_lvl) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: tr_pond_lvl=T but tr_lvl=F' + endif + abort_list = trim(abort_list)//":30" + endif + +! tcraig - this was originally implemented by resetting hs0=0. EH says it might be OK +! to not reset it but extra calculations are done and it might not be bfb. In our +! testing, we should explicitly set hs0 to 0. when setting tr_pond_lvl=T, and otherwise +! this will abort (safest option until additional testing is done) + if (tr_pond_lvl .and. abs(hs0) > puny) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: tr_pond_lvl=T and hs0 /= 0' + endif + abort_list = trim(abort_list)//":7" + endif + + if (trim(shortwave) /= 'dEdd' .and. tr_pond .and. calc_tsfc) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: tr_pond=T, calc_tsfc=T, invalid shortwave' + write(nu_diag,*) subname//' ERROR: Must use shortwave=dEdd' + endif + abort_list = trim(abort_list)//":8" + endif + + if (tr_iso .and. n_iso==0) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: isotopes activated but' + write(nu_diag,*) subname//' ERROR: not allocated in tracer array.' + write(nu_diag,*) subname//' ERROR: if tr_iso, n_iso must be > 0.' + endif + abort_list = trim(abort_list)//":31" + endif + + if (tr_aero .and. n_aero==0) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: aerosols activated but' + write(nu_diag,*) subname//' ERROR: not allocated in tracer array.' + write(nu_diag,*) subname//' ERROR: if tr_aero, n_aero must be > 0.' + endif + abort_list = trim(abort_list)//":9" + endif + + if (ncat < 1) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: ncat < 1' + endif + abort_list = trim(abort_list)//":32" + endif + + if (nilyr < 1) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: nilyr < 1' + endif + abort_list = trim(abort_list)//":33" + endif + + if (nslyr < 1) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: nslyr < 1' + endif + abort_list = trim(abort_list)//":34" + endif + + if (nblyr < 1) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: nblyr < 1' + write(nu_diag,*) subname//' ERROR: not allowed due to history implementation.' + endif + abort_list = trim(abort_list)//":35" + endif + + if (nfsd < 1) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: nfsd < 1' + write(nu_diag,*) subname//' ERROR: not allowed due to history implementation.' + endif + abort_list = trim(abort_list)//":36" + endif + + if (trim(shortwave) /= 'dEdd' .and. tr_aero) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: tr_aero=T, invalid shortwave' + write(nu_diag,*) subname//' ERROR: Must use shortwave=dEdd' + endif + abort_list = trim(abort_list)//":10" + endif + + if ((rfracmin < -puny .or. rfracmin > c1+puny) .or. & + (rfracmax < -puny .or. rfracmax > c1+puny) .or. & + (rfracmin > rfracmax)) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: rfracmin, rfracmax must be between 0 and 1' + write(nu_diag,*) subname//' ERROR: and rfracmax >= rfracmin' + endif + abort_list = trim(abort_list)//":11" + endif + rfracmin = min(max(rfracmin,c0),c1) + rfracmax = min(max(rfracmax,c0),c1) + + if (trim(atm_data_type) == 'monthly' .and. calc_strair) then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: atm_data_type=monthly and calc_strair=T' + abort_list = trim(abort_list)//":12" + endif + + if (ktherm == 2 .and. .not. calc_Tsfc) then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: ktherm = 2 and calc_Tsfc=F' + abort_list = trim(abort_list)//":13" + endif + +! tcraig, is it really OK for users to run inconsistently? +! ech: yes, for testing sensitivities. It's not recommended for science runs + if (ktherm == 1 .and. trim(tfrz_option) /= 'linear_salt') then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: ktherm = 1 and tfrz_option = ',trim(tfrz_option) + write(nu_diag,*) subname//' WARNING: For consistency, set tfrz_option = linear_salt' + endif + endif + if (ktherm == 2 .and. trim(tfrz_option) /= 'mushy') then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: ktherm = 2 and tfrz_option = ',trim(tfrz_option) + write(nu_diag,*) subname//' WARNING: For consistency, set tfrz_option = mushy' + endif + endif +!tcraig + if (ktherm == 1 .and. .not.sw_redist) then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: ktherm = 1 and sw_redist = ',sw_redist + write(nu_diag,*) subname//' WARNING: For consistency, set sw_redist = .true.' + endif + endif + + if (formdrag) then + if (trim(atmbndy) == 'constant') then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: formdrag=T and atmbndy=constant' + abort_list = trim(abort_list)//":14" + endif + + if (.not. calc_strair) then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: formdrag=T and calc_strair=F' + abort_list = trim(abort_list)//":15" + endif + + if (.not. tr_pond) then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: formdrag=T and tr_pond=F' + abort_list = trim(abort_list)//":16" + endif + + if (tr_pond_cesm) then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: formdrag=T and frzpnd=cesm' + abort_list = trim(abort_list)//":17" + endif + + if (.not. tr_lvl) then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: formdrag=T and tr_lvl=F' + abort_list = trim(abort_list)//":18" + endif + endif + + if (trim(fbot_xfer_type) == 'Cdn_ocn' .and. .not. formdrag) then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: formdrag=F and fbot_xfer_type=Cdn_ocn' + abort_list = trim(abort_list)//":19" + endif + + if(history_precision .ne. 4 .and. history_precision .ne. 8) then + write (nu_diag,*) subname//' ERROR: bad value for history_precision, allowed values: 4, 8' + abort_list = trim(abort_list)//":22" + endif + + if (.not.(trim(dumpfreq) == 'y' .or. trim(dumpfreq) == 'Y' .or. & + trim(dumpfreq) == 'm' .or. trim(dumpfreq) == 'M' .or. & + trim(dumpfreq) == 'd' .or. trim(dumpfreq) == 'D' .or. & + trim(dumpfreq) == 'h' .or. trim(dumpfreq) == 'H' .or. & + trim(dumpfreq) == '1' )) then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: unrecognized dumpfreq=', trim(dumpfreq) + write(nu_diag,*) subname//' WARNING: No restarts files will be written' + write(nu_diag,*) subname//' WARNING: Allowed values : ''y'', ''m'', ''d'', ''h'', ''1''' + endif + endif + + ice_IOUnitsMinUnit = numin + ice_IOUnitsMaxUnit = numax + + call icepack_init_parameters(Cf_in=Cf) + call icepack_init_parameters(ksno_in=ksno) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname//'Icepack Abort1', & + file=__FILE__, line=__LINE__) + + wave_spec = .false. + if (tr_fsd .and. (trim(wave_spec_type) /= 'none')) wave_spec = .true. + + !----------------------------------------------------------------- + ! spew + !----------------------------------------------------------------- + + if (my_task == master_task) then + + write(nu_diag,*) ' Overview of model configuration with relevant parameters' + write(nu_diag,*) ' ========================================================' + write(nu_diag,*) ' For details, compare namelist output below with the' + write(nu_diag,*) ' Case Settings section in the model documentation.' + write(nu_diag,*) ' ' + write(nu_diag,*) ' Calendar' + write(nu_diag,*) '--------------------------------' + write(nu_diag,1022) ' days_per_year = ',days_per_year,' number of days in a model year' + if (use_leap_years) then + tmpstr2 = ' leap days are included' + else + tmpstr2 = ' leap days are not included' + endif + write(nu_diag,1012) ' use_leap_years = ',use_leap_years,trim(tmpstr2) + write(nu_diag,1002) ' dt = ', dt, ' model time step' + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Grid, Discretization' + write(nu_diag,*) '--------------------------------' + if (trim(grid_type) == 'rectangular') & + write(nu_diag,*) 'grid_type = ', & + trim(grid_type),': internally defined, rectangular grid' + if (trim(grid_type) == 'regional') & + write(nu_diag,*) 'grid_type = ', & + trim(grid_type),': user-defined, regional grid' + if (trim(grid_type) == 'displaced_pole') & + write(nu_diag,*) 'grid_type = ', & + trim(grid_type),': user-defined grid with rotated north pole' + if (trim(grid_type) == 'tripole') then + write(nu_diag,*) 'grid_type = ', & + trim(grid_type),': user-defined grid with northern hemisphere zipper' + if (trim(ns_boundary_type) == 'tripole') then + tmpstr2 = ' on U points (nodes)' + elseif (trim(ns_boundary_type) == 'tripoleT') then + tmpstr2 = ' on T points (cell centers)' + endif + write(nu_diag,*) 'ns_boundary_type = ', trim(ns_boundary_type),trim(tmpstr2) + endif + if (trim(grid_type) /= 'rectangular') then + if (use_bathymetry) then + tmpstr2 = ' bathymetric input data is used' + else + tmpstr2 = ' bathymetric input data is not used' + endif + write(nu_diag,1012) ' use_bathymetry = ', use_bathymetry,trim(tmpstr2) + write(nu_diag,*) ' bathymetry_format= ', trim(bathymetry_format) + endif + write(nu_diag,1022) ' nilyr = ', nilyr, ' number of ice layers (equal thickness)' + write(nu_diag,1022) ' nslyr = ', nslyr, ' number of snow layers (equal thickness)' + write(nu_diag,1022) ' nblyr = ', nblyr, ' number of bio layers (equal thickness)' + if (trim(shortwave) == 'dEdd') & + write(nu_diag,*) 'dEdd interior and sfc scattering layers are used in both ice, snow (unequal)' + write(nu_diag,1022) ' ncat = ', ncat, ' number of ice categories' + if (kcatbound == 0) then + tmpstr2 = ' original ITD category bounds' + elseif (kcatbound == 1) then + tmpstr2 = ' round-number category bounds' + elseif (kcatbound == 2) then + tmpstr2 = ' WMO standard ITD categories' + elseif (kcatbound == -1) then + tmpstr2 = ' one thickness category' + endif + write(nu_diag,1022) ' kcatbound = ', kcatbound,trim(tmpstr2) + if (kitd==0) then + tmpstr2 = ' delta function ITD approx' + else + tmpstr2 = ' linear remapping ITD approx' + endif + write(nu_diag,1022) ' kitd = ', kitd,trim(tmpstr2) + + if (tr_fsd) then + tmpstr2 = ' floe size distribution is enabled' + ! write(nu_diag,1002) ' floediam = ', floediam, ' constant floe diameter' + else + tmpstr2 = ' floe size distribution is disabled' + endif + write(nu_diag,1012) ' tr_fsd = ', tr_fsd,trim(tmpstr2) + write(nu_diag,1022) ' nfsd = ', nfsd, ' number of floe size categories' + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Horizontal Dynamics' + write(nu_diag,*) '--------------------------------' + if (kdyn == 1) then + tmpstr2 = ' elastic-viscous-plastic dynamics' + write(nu_diag,*) 'yield_curve = ', trim(yield_curve) + if (trim(yield_curve) == 'ellipse') & + write(nu_diag,1007) ' e_ratio = ', e_ratio, ' aspect ratio of ellipse' + elseif (kdyn == 2) then + tmpstr2 = ' elastic-anisotropic-plastic dynamics' + elseif (kdyn < 1) then + tmpstr2 = ' dynamics disabled' + endif + write(nu_diag,1022) ' kdyn = ', kdyn,trim(tmpstr2) + if (kdyn >= 1) then + if (revised_evp) then + tmpstr2 = ' revised EVP formulation used' + else + tmpstr2 = ' revised EVP formulation not used' + endif + write(nu_diag,1012) ' revised_evp = ', revised_evp,trim(tmpstr2) + write(nu_diag,1022) ' kevp_kernel = ', kevp_kernel,' EVP solver' + + write(nu_diag,1022) ' ndtd = ', ndtd, ' number of dynamics/advection/ridging/steps per thermo timestep' + write(nu_diag,1022) ' ndte = ', ndte, ' number of EVP or EAP subcycles' + write(nu_diag,1007) ' arlx = ', arlx, ' stress equation factor alpha' + write(nu_diag,1007) ' brlx = ', brlx, ' stress equation factor beta' + + if (trim(coriolis) == 'latitude') then + tmpstr2 = ': latitude-dependent Coriolis parameter' + elseif (trim(coriolis) == 'contant') then + tmpstr2 = ' = 1.46e-4/s' + elseif (trim(coriolis) == 'zero') then + tmpstr2 = ' = 0.0' + endif + write(nu_diag,*) 'coriolis = ',trim(coriolis),trim(tmpstr2) + + if (trim(ssh_stress) == 'geostrophic') then + tmpstr2 = ': from ocean velocity' + elseif (trim(ssh_stress) == 'coupled') then + tmpstr2 = ': from coupled sea surface height gradients' + endif + write(nu_diag,*) 'ssh_stress = ',trim(ssh_stress),trim(tmpstr2) + + if (ktransport == 1) then + tmpstr2 = ' transport enabled' + if (trim(advection) == 'remap') then + tmpstr2 = ': linear remapping advection' +!deprecate upwind elseif (trim(advection) == 'upwind') then +!deprecate upwind tmpstr2 = ': donor cell (upwind) advection' + elseif (trim(advection) == 'none') then + tmpstr2 = ': advection off' + endif + write(nu_diag,*) 'advection = ', trim(advection),trim(tmpstr2) + else + tmpstr2 = ' transport disabled' + endif + write(nu_diag,1022) ' ktransport = ', ktransport,trim(tmpstr2) + + if (basalstress) then + tmpstr2 = ' use basal stress parameterization for landfast ice' + else + tmpstr2 = ' basal stress not used for landfast ice' + endif + write(nu_diag,1012) ' basalstress = ', basalstress,trim(tmpstr2) + if (basalstress) then + write(nu_diag,1007) ' k1 = ', k1, ' free parameter for landfast ice' + write(nu_diag,1007) ' k2 = ', k2, ' free parameter for landfast ice' + write(nu_diag,1007) ' alphab = ', alphab, ' factor for landfast ice' + write(nu_diag,1007) ' threshold_hw = ', threshold_hw, ' max water depth for grounding ice' + endif + write(nu_diag,1007) ' Ktens = ', Ktens, ' tensile strength factor' + endif ! kdyn enabled + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Mechanical Deformation (Ridging) and Ice Strength' + write(nu_diag,*) '--------------------------------------------------' + if (kridge == 1) then + tmpstr2 = ' ridging enabled' + else + tmpstr2 = ' ridging disabled' + endif + write(nu_diag,1012) ' tr_lvl = ', tr_lvl,' ridging related tracers' + write(nu_diag,1022) ' kridge = ', kridge,trim(tmpstr2) + if (kridge == 1) then + if (krdg_partic == 1) then + tmpstr2 = ' new participation function' + else + tmpstr2 = ' old participation function' + endif + write(nu_diag,1022) ' krdg_partic = ', krdg_partic,trim(tmpstr2) + if (krdg_partic == 1) & + write(nu_diag,1007) ' mu_rdg = ', mu_rdg,' e-folding scale of ridged ice' + if (krdg_redist == 1) then + tmpstr2 = ' new redistribution function' + else + tmpstr2 = ' old redistribution function' + endif + write(nu_diag,1022) ' krdg_redist = ', krdg_redist,trim(tmpstr2) + endif + + if (kstrength == 0) then + tmpstr2 = ' Hibler (1979)' + elseif (kstrength == 1) then + tmpstr2 = ' Rothrock (1975)' + endif + write(nu_diag,1022) ' kstrength = ', kstrength,trim(tmpstr2) + if (kstrength == 0) then + write(nu_diag,1009) ' Pstar = ', Pstar, ' P* strength factor' + write(nu_diag,1007) ' Cstar = ', Cstar, ' C* strength exponent factor' + elseif (kstrength == 1) then + write(nu_diag,1007) ' Cf = ', Cf, ' ratio of ridging work to PE change' + endif + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Thermodynamics' + write(nu_diag,*) '--------------------------------' + + if (ktherm == 1) then + tmpstr2 = ' Bitz and Lipscomb 1999 thermo' + elseif (ktherm == 2) then + tmpstr2 = ' mushy-layer thermo' + elseif (ktherm == 0) then + tmpstr2 = ' zero-layer thermo' + elseif (ktherm < 0) then + tmpstr2 = ' thermodynamics disabled' + endif + if (ktherm >= 0) then + write(nu_diag,1022) ' ktherm = ', ktherm,trim(tmpstr2) + write(nu_diag,1002) ' dt = ', dt, ' thermodynamic time step' + write(nu_diag,1007) ' ksno = ', ksno,' snow thermal conductivity' + if (ktherm == 1) & + write(nu_diag,*) 'conduct = ', trim(conduct),' ice thermal conductivity' + write(nu_diag,1012) ' sw_redist = ', sw_redist,' redistribute internal shortwave to surface' + write(nu_diag,1002) ' sw_frac = ', sw_frac,' fraction redistributed' + write(nu_diag,1002) ' sw_dtemp = ', sw_dtemp,' temperature difference from freezing to redistribute' + if (ktherm == 2) then + write(nu_diag,1002) ' a_rapid_mode = ', a_rapid_mode,' brine channel diameter' + write(nu_diag,1007) ' Rac_rapid_mode = ', Rac_rapid_mode,' critical Rayleigh number' + write(nu_diag,1007) ' aspect_rapid_mode= ', aspect_rapid_mode,' brine convection aspect ratio' + write(nu_diag,*) 'dSdt_slow_mode = ', dSdt_slow_mode,' drainage strength parameter' + write(nu_diag,1007) ' phi_c_slow_mode = ', phi_c_slow_mode,' critical liquid fraction' + write(nu_diag,1007) ' phi_i_mushy = ', phi_i_mushy,' solid fraction at lower boundary' + endif + endif + !write(nu_diag,1007) ' hfrazilmin = ', hfrazilmin,' minimum new frazil ice thickness' + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Radiation' + write(nu_diag,*) '--------------------------------' + if (trim(shortwave) == 'dEdd') then + tmpstr2 = ': delta-Eddington multiple-scattering method' + elseif (trim(shortwave) == 'ccsm3') then + tmpstr2 = ': NCAR CCSM3 distribution method' + endif + write(nu_diag,*) ' shortwave = ', trim(shortwave),trim(tmpstr2) + if (trim(shortwave) == 'dEdd') then + write(nu_diag,1007) ' R_ice = ', R_ice,' tuning parameter for sea ice albedo' + write(nu_diag,1007) ' R_pnd = ', R_pnd,' tuning parameter for ponded sea ice albedo' + write(nu_diag,1007) ' R_snw = ', R_snw,' tuning parameter for snow broadband albedo' + write(nu_diag,1007) ' dT_mlt = ', dT_mlt,' change in temperature per change in snow grain radius' + write(nu_diag,1002) ' rsnw_mlt = ', rsnw_mlt,' maximum melting snow grain radius' + write(nu_diag,1007) ' kalg = ', kalg,' absorption coefficient for algae' + else + if (trim(albedo_type) == 'ccsm3') then + tmpstr2 = ': NCAR CCSM3 albedos' + elseif (trim(albedo_type) == 'constant') then + tmpstr2 = ': four constant albedos' + endif + write(nu_diag,*) 'albedo_type = ', trim(albedo_type),trim(tmpstr2) + if (trim(albedo_type) == 'ccsm3') then + write(nu_diag,1007) ' albicev = ', albicev,' visible ice albedo for thicker ice' + write(nu_diag,1007) ' albicei = ', albicei,' near infrared ice albedo for thicker ice' + write(nu_diag,1007) ' albsnowv = ', albsnowv,' visible, cold snow albedo' + write(nu_diag,1007) ' albsnowi = ', albsnowi,' near infrared, cold snow albedo' + write(nu_diag,1007) ' ahmax = ', ahmax,' albedo is constant above this thickness' + endif + endif + write(nu_diag,1007) ' emissivity = ', emissivity,' emissivity of snow and ice' + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Atmospheric Forcing / Coupling' + write(nu_diag,*) '--------------------------------' + write(nu_diag,1012) ' calc_Tsfc = ', calc_Tsfc,' calculate surface temperature as part of thermo' + write(nu_diag,1012) ' calc_strair = ', calc_strair,' calculate wind stress and speed' + write(nu_diag,1012) ' rotate_wind = ', rotate_wind,' rotate wind/stress to computational grid' + write(nu_diag,1012) ' formdrag = ', formdrag,' use form drag parameterization' + if (trim(atmbndy) == 'default') then + tmpstr2 = ': stability-based boundary layer' + write(nu_diag,1012) ' highfreq = ', highfreq,' high-frequency atmospheric coupling' + write(nu_diag,1022) ' natmiter = ', natmiter,' number of atmo boundary layer iterations' + write(nu_diag,1006) ' atmiter_conv = ', atmiter_conv,' convergence criterion for ustar' + elseif (trim(atmbndy) == 'constant') then + tmpstr2 = ': boundary layer uses bulk transfer coefficients' + endif + write(nu_diag,*) 'atmbndy = ', trim(atmbndy),trim(tmpstr2) + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Oceanic Forcing / Coupling' + write(nu_diag,*) '--------------------------------' + if (oceanmixed_ice) then + tmpstr2 = ' ocean mixed layer calculation (SST) enabled' + else + tmpstr2 = ' ocean mixed layer calculation (SST) disabled' + endif + write(nu_diag,1012) ' oceanmixed_ice = ', oceanmixed_ice,trim(tmpstr2) + if (oceanmixed_ice) then + write(nu_diag,*) ' WARNING: ocean mixed layer ON' + write(nu_diag,*) ' WARNING: will impact ocean forcing interaction' + write(nu_diag,*) ' WARNING: coupled forcing will be modified by mixed layer routine' + endif + if (trim(tfrz_option) == 'minus1p8') then + tmpstr2 = ': constant ocean freezing temperature (-1.8C)' + elseif (trim(tfrz_option) == 'linear_salt') then + tmpstr2 = ': linear function of salinity (use with ktherm=1)' + elseif (trim(tfrz_option) == 'mushy') then + tmpstr2 = ': Assur (1958) as in mushy-layer thermo (ktherm=2)' + endif + write(nu_diag,*) 'tfrz_option = ', trim(tfrz_option),trim(tmpstr2) + if (update_ocn_f) then + tmpstr2 = ' frazil water/salt fluxes included in ocean fluxes' + else + tmpstr2 = ' frazil water/salt fluxes not included in ocean fluxes' + endif + write(nu_diag,1012) ' update_ocn_f = ', update_ocn_f,trim(tmpstr2) + if (l_mpond_fresh .and. tr_pond_topo) then + tmpstr2 = ' retain (topo) pond water until ponds drain' + else + tmpstr2 = ' pond water not retained on ice (virtual only)' + endif + write(nu_diag,1012) ' l_mpond_fresh = ', l_mpond_fresh,trim(tmpstr2) + if (trim(fbot_xfer_type) == 'constant') then + tmpstr2 = ': ocean heat transfer coefficient is constant' + elseif (trim(fbot_xfer_type) == 'Cdn_ocn') then + tmpstr2 = ': variable ocean heat transfer coefficient' ! only used with form_drag=T? + endif + write(nu_diag,*) 'fbot_xfer_type = ', trim(fbot_xfer_type),trim(tmpstr2) + write(nu_diag,1006) ' ustar_min = ', ustar_min,' minimum value of ocean friction velocity' + + if (tr_fsd) then + if (wave_spec) then + tmpstr2 = ' use wave spectrum for floe size distribution' + else + tmpstr2 = ' floe size distribution does not use wave spectrum' + endif + write(nu_diag,1012) ' wave_spec = ', wave_spec,trim(tmpstr2) + if (wave_spec) then + if (trim(wave_spec_type) == 'none') then + tmpstr2 = ': no wave data provided, no wave-ice interactions' + elseif (trim(wave_spec_type) == 'profile') then + tmpstr2 = ': use fixed dummy wave spectrum for testing' + elseif (trim(wave_spec_type) == 'constant') then + tmpstr2 = ': constant wave spectrum data file provided for testing' + elseif (trim(wave_spec_type) == 'random') then + tmpstr2 = ': wave data file provided, spectrum generated using random number' + endif + write(nu_diag,*) 'wave_spec_type = ', trim(wave_spec_type),trim(tmpstr2) + endif + write(nu_diag,1022) ' nfreq = ', nfreq,' number of wave spectral forcing frequencies' + endif + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Age related tracers' + write(nu_diag,*) '--------------------------------' + write(nu_diag,1012) ' tr_iage = ', tr_iage,' chronological ice age' + write(nu_diag,1012) ' tr_FY = ', tr_FY,' first-year ice area' + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Melt ponds' + write(nu_diag,*) '--------------------------------' + if (tr_pond_cesm) then + write(nu_diag,1012) ' tr_pond_cesm = ', tr_pond_cesm,' CESM pond formulation' + write(nu_diag,1007) ' pndaspect = ', pndaspect + elseif (tr_pond_lvl) then + write(nu_diag,1012) ' tr_pond_lvl = ', tr_pond_lvl,' level-ice pond formulation' + write(nu_diag,1007) ' pndaspect = ', pndaspect + write(nu_diag,1006) ' dpscale = ', dpscale,' time scale for flushing in permeable ice' + if (trim(frzpnd) == 'hlid') then + tmpstr2 = ': Stefan refreezing with pond ice thickness' + elseif (trim(frzpnd) == 'cesm') then + tmpstr2 = ': CESM refreezing empirical formula' + endif + write(nu_diag,*) ' frzpnd = ', trim(frzpnd),trim(tmpstr2) + write(nu_diag,1007) ' hs1 = ', hs1,' snow depth of transition to pond ice' + elseif (tr_pond_topo) then + write(nu_diag,1012) ' tr_pond_topo = ', tr_pond_topo,' topo pond formulation' + write(nu_diag,1007) ' hp1 = ', hp1,' critical ice lid thickness for topo ponds' + elseif (trim(shortwave) == 'ccsm3') then + write(nu_diag,*) 'Pond effects on radiation are treated implicitly in the ccsm3 shortwave scheme' + else + write(nu_diag,*) ' Using default dEdd melt pond scheme for testing only' + endif + + if (trim(shortwave) == 'dEdd') then + write(nu_diag,1007) ' hs0 = ', hs0,' snow depth of transition to bare sea ice' + endif + + write(nu_diag,1007) ' rfracmin = ', rfracmin,' minimum fraction of melt water added to ponds' + write(nu_diag,1007) ' rfracmax = ', rfracmax,' maximum fraction of melt water added to ponds' + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Primary state variables, tracers' + write(nu_diag,*) ' (excluding biogeochemistry)' + write(nu_diag,*) '---------------------------------' + write(nu_diag,*) 'Conserved properties (all tracers are conserved):' + write(nu_diag,*) 'ice concentration, volume and enthalpy' + write(nu_diag,*) 'snow volume and enthalpy' + if (ktherm == 2) write(nu_diag,*) 'ice salinity' + if (tr_fsd) write(nu_diag,1012) ' tr_fsd = ', tr_fsd,' floe size distribution' + if (tr_lvl) write(nu_diag,1012) ' tr_lvl = ', tr_lvl,' ridging related tracers' + if (tr_pond_lvl) write(nu_diag,1012) ' tr_pond_lvl = ', tr_pond_lvl,' level-ice pond formulation' + if (tr_pond_topo) write(nu_diag,1012) ' tr_pond_topo = ', tr_pond_topo,' topo pond formulation' + if (tr_pond_cesm) write(nu_diag,1012) ' tr_pond_cesm = ', tr_pond_cesm,' CESM pond formulation' + if (tr_iage) write(nu_diag,1012) ' tr_iage = ', tr_iage,' chronological ice age' + if (tr_FY) write(nu_diag,1012) ' tr_FY = ', tr_FY,' first-year ice area' + if (tr_iso) write(nu_diag,1012) ' tr_iso = ', tr_iso,' diagnostic isotope tracers' + if (tr_aero) write(nu_diag,1012) ' tr_aero = ', tr_aero,' CESM aerosol tracers' + write(nu_diag,*) 'Non-conserved properties:' + write(nu_diag,*) 'ice surface temperature' + write(nu_diag,*) 'ice velocity components and internal stress' + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Other ice_in namelist parameters:' + write(nu_diag,*) ' ==================================== ' + write(nu_diag,*) ' ' + if (trim(runid) /= 'unknown') & + write(nu_diag,*) ' runid = ', & + trim(runid) + write(nu_diag,1030) ' runtype = ', & + trim(runtype) + write(nu_diag,1020) ' year_init = ', year_init + write(nu_diag,1020) ' istep0 = ', istep0 + write(nu_diag,1020) ' npt = ', npt + write(nu_diag,1020) ' diagfreq = ', diagfreq + write(nu_diag,1010) ' print_global = ', print_global + write(nu_diag,1010) ' print_points = ', print_points + write(nu_diag,1030) ' bfbflag = ', bfbflag + write(nu_diag,1020) ' numin = ', numin + write(nu_diag,1020) ' numax = ', numax + write(nu_diag,1050) ' histfreq = ', histfreq(:) + write(nu_diag,1040) ' histfreq_n = ', histfreq_n(:) + write(nu_diag,1010) ' hist_avg = ', hist_avg + if (.not. hist_avg) write(nu_diag,*) ' History data will be snapshots' + write(nu_diag,*) ' history_dir = ', & + trim(history_dir) + write(nu_diag,*) ' history_file = ', & + trim(history_file) + write(nu_diag,1020) ' history_precision = ', history_precision + write(nu_diag,*) ' history_format = ', & + trim(history_format) + if (write_ic) then + write(nu_diag,*) ' Initial condition will be written in ', & + trim(incond_dir) + endif + write(nu_diag,1030) ' dumpfreq = ', & + trim(dumpfreq) + write(nu_diag,1020) ' dumpfreq_n = ', dumpfreq_n + write(nu_diag,1010) ' dump_last = ', dump_last + write(nu_diag,1010) ' restart = ', restart + write(nu_diag,*) ' restart_dir = ', & + trim(restart_dir) + write(nu_diag,*) ' restart_ext = ', restart_ext + write(nu_diag,*) ' restart_coszen = ', restart_coszen + write(nu_diag,*) ' restart_format = ', & + trim(restart_format) + write(nu_diag,*) ' lcdf64 = ', & + lcdf64 + write(nu_diag,*) ' restart_file = ', & + trim(restart_file) + write(nu_diag,*) ' pointer_file = ', & + trim(pointer_file) + write(nu_diag,*) ' use_restart_time = ', use_restart_time + write(nu_diag,*) ' ice_ic = ', & + trim(ice_ic) + if (trim(grid_type) /= 'rectangular' .or. & + trim(grid_type) /= 'column') then + write(nu_diag,*) ' grid_file = ', & + trim(grid_file) + write(nu_diag,*) ' gridcpl_file = ', & + trim(gridcpl_file) + write(nu_diag,*) ' bathymetry_file = ', & + trim(bathymetry_file) + write(nu_diag,*) ' kmt_file = ', & + trim(kmt_file) + endif + write(nu_diag,1010) ' close_boundaries = ', & + close_boundaries + write(nu_diag,1010) ' orca_halogrid = ', & + orca_halogrid + + write(nu_diag,1010) ' conserv_check = ', conserv_check + + write(nu_diag,1020) ' fyear_init = ', & + fyear_init + write(nu_diag,1020) ' ycycle = ', ycycle + write(nu_diag,*) ' atm_data_type = ', & + trim(atm_data_type) + if (trim(atm_data_type) /= 'default') then + write(nu_diag,*) ' atm_data_dir = ', & + trim(atm_data_dir) + write(nu_diag,*) ' precip_units = ', & + trim(precip_units) + elseif (trim(atm_data_type)=='default') then + write(nu_diag,*) ' default_season = ', trim(default_season) + endif + + if (wave_spec) then + write(nu_diag,*) ' wave_spec_file = ', trim(wave_spec_file) + endif + if (trim(bgc_data_type) == 'ncar' .or. & + trim(ocn_data_type) == 'ncar') then + write(nu_diag,*) ' oceanmixed_file = ', & + trim(oceanmixed_file) + endif + if (cpl_bgc) then + write(nu_diag,1000) ' BGC coupling is switched ON' + else + write(nu_diag,1000) ' BGC coupling is switched OFF' + endif + write(nu_diag,*) ' bgc_data_type = ', & + trim(bgc_data_type) + write(nu_diag,*) ' fe_data_type = ', & + trim(fe_data_type) + write(nu_diag,*) ' ice_data_type = ', & + trim(ice_data_type) + write(nu_diag,*) ' bgc_data_dir = ', & + trim(bgc_data_dir) + write(nu_diag,*) ' ocn_data_type = ', & + trim(ocn_data_type) + if (trim(bgc_data_type) /= 'default' .or. & + trim(ocn_data_type) /= 'default') then + write(nu_diag,*) ' ocn_data_dir = ', & + trim(ocn_data_dir) + write(nu_diag,1010) ' restore_ocn = ', & + restore_ocn + endif + write(nu_diag,1010) ' restore_ice = ', & + restore_ice + if (restore_ice .or. restore_ocn) & + write(nu_diag,1020) ' trestore = ', trestore + + write(nu_diag,*) ' ' + write(nu_diag,'(a30,2f8.2)') 'Diagnostic point 1: lat, lon =', & + latpnt(1), lonpnt(1) + write(nu_diag,'(a30,2f8.2)') 'Diagnostic point 2: lat, lon =', & + latpnt(2), lonpnt(2) + + ! tracer restarts + write(nu_diag,1010) ' restart_age = ', restart_age + write(nu_diag,1010) ' restart_FY = ', restart_FY + write(nu_diag,1010) ' restart_lvl = ', restart_lvl + write(nu_diag,1010) ' restart_pond_cesm = ', restart_pond_cesm + write(nu_diag,1010) ' restart_pond_lvl = ', restart_pond_lvl + write(nu_diag,1010) ' restart_pond_topo = ', restart_pond_topo + write(nu_diag,1010) ' restart_iso = ', restart_iso + write(nu_diag,1010) ' restart_aero = ', restart_aero + write(nu_diag,1010) ' restart_fsd = ', restart_fsd + + write(nu_diag,1020) ' n_iso = ', n_iso + write(nu_diag,1020) ' n_aero = ', n_aero + write(nu_diag,1020) ' n_zaero = ', n_zaero + write(nu_diag,1020) ' n_algae = ', n_algae + write(nu_diag,1020) ' n_doc = ', n_doc + write(nu_diag,1020) ' n_dic = ', n_dic + write(nu_diag,1020) ' n_don = ', n_don + write(nu_diag,1020) ' n_fed = ', n_fed + write(nu_diag,1020) ' n_fep = ', n_fep + + endif ! my_task = master_task + + if (grid_type /= 'displaced_pole' .and. & + grid_type /= 'tripole' .and. & + grid_type /= 'column' .and. & + grid_type /= 'rectangular' .and. & + grid_type /= 'cpom_grid' .and. & + grid_type /= 'regional' .and. & + grid_type /= 'latlon' ) then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: unknown grid_type=',trim(grid_type) + abort_list = trim(abort_list)//":20" + endif + + ! check for valid kevp_kernel + ! tcraig, kevp_kernel=2 is not validated, do not allow use + ! use "102" to test "2" for now + if (kevp_kernel /= 0) then + if (kevp_kernel == 102) then + kevp_kernel = 2 + else + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: kevp_kernel = ',kevp_kernel + if (kevp_kernel == 2) then + if (my_task == master_task) write(nu_diag,*) subname//' kevp_kernel=2 not validated, use kevp_kernel=102 for testing until it is validated' + endif + abort_list = trim(abort_list)//":21" + endif + endif + + if (abort_list /= "") then + call flush_fileunit(nu_diag) + endif + call ice_barrier() + if (abort_list /= "") then + write(nu_diag,*) subname,' ERROR: abort_list = ',trim(abort_list) + call abort_ice (subname//' ABORTING on input ERRORS', & + file=__FILE__, line=__LINE__) + endif + + call icepack_init_parameters(ustar_min_in=ustar_min, albicev_in=albicev, albicei_in=albicei, & + albsnowv_in=albsnowv, albsnowi_in=albsnowi, natmiter_in=natmiter, atmiter_conv_in=atmiter_conv, & + emissivity_in=emissivity, & + ahmax_in=ahmax, shortwave_in=shortwave, albedo_type_in=albedo_type, R_ice_in=R_ice, R_pnd_in=R_pnd, & + R_snw_in=R_snw, dT_mlt_in=dT_mlt, rsnw_mlt_in=rsnw_mlt, & + kstrength_in=kstrength, krdg_partic_in=krdg_partic, krdg_redist_in=krdg_redist, mu_rdg_in=mu_rdg, & + atmbndy_in=atmbndy, calc_strair_in=calc_strair, formdrag_in=formdrag, highfreq_in=highfreq, & + kitd_in=kitd, kcatbound_in=kcatbound, hs0_in=hs0, dpscale_in=dpscale, frzpnd_in=frzpnd, & + rfracmin_in=rfracmin, rfracmax_in=rfracmax, pndaspect_in=pndaspect, hs1_in=hs1, hp1_in=hp1, & + ktherm_in=ktherm, calc_Tsfc_in=calc_Tsfc, conduct_in=conduct, & + a_rapid_mode_in=a_rapid_mode, Rac_rapid_mode_in=Rac_rapid_mode, & + aspect_rapid_mode_in=aspect_rapid_mode, dSdt_slow_mode_in=dSdt_slow_mode, & + phi_c_slow_mode_in=phi_c_slow_mode, phi_i_mushy_in=phi_i_mushy, conserv_check_in=conserv_check, & + wave_spec_type_in = wave_spec_type, & + wave_spec_in=wave_spec, nfreq_in=nfreq, & + tfrz_option_in=tfrz_option, kalg_in=kalg, fbot_xfer_type_in=fbot_xfer_type, & + Pstar_in=Pstar, Cstar_in=Cstar, & + sw_redist_in=sw_redist, sw_frac_in=sw_frac, sw_dtemp_in=sw_dtemp) + call icepack_init_tracer_flags(tr_iage_in=tr_iage, tr_FY_in=tr_FY, & + tr_lvl_in=tr_lvl, tr_iso_in=tr_iso, tr_aero_in=tr_aero, & + tr_fsd_in=tr_fsd, tr_pond_in=tr_pond, & + tr_pond_cesm_in=tr_pond_cesm, tr_pond_lvl_in=tr_pond_lvl, tr_pond_topo_in=tr_pond_topo) + call icepack_init_tracer_sizes(ncat_in=ncat, nilyr_in=nilyr, nslyr_in=nslyr, nblyr_in=nblyr, & + nfsd_in=nfsd, n_algae_in=n_algae, n_iso_in=n_iso, n_aero_in=n_aero, & + n_DOC_in=n_DOC, n_DON_in=n_DON, & + n_DIC_in=n_DIC, n_fed_in=n_fed, n_fep_in=n_fep, n_zaero_in=n_zaero) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + 1000 format (a30,2x,f9.2) ! a30 to align formatted, unformatted statements + 1002 format (a20,1x,f7.2,a) + 1005 format (a30,2x,f12.6) ! float + 1006 format (a20,2x,f10.6,a) + 1007 format (a20,2x,f6.2,a) + 1009 format (a20,2x,d13.6,a) ! float, exponential notation + 1010 format (a30,2x,l6) ! logical + 1012 format (a20,2x,l3,1x,a) ! logical + 1020 format (a30,2x,i6) ! integer + 1022 format (a20,2x,i3,1x,a) ! integer + 1030 format (a30, a8) ! character + 1040 format (a30,2x,6i6) ! integer + 1050 format (a30,2x,6a6) ! character + + end subroutine input_data + +!======================================================================= + +! Initialize state for the itd model +! +! authors: C. M. Bitz, UW +! William H. Lipscomb, LANL + + subroutine init_state + + use ice_blocks, only: block, get_block, nx_block, ny_block + use ice_domain, only: nblocks, blocks_ice + use ice_domain_size, only: ncat, nilyr, nslyr, n_iso, n_aero, nfsd + use ice_flux, only: sst, Tf, Tair, salinz, Tmltz + use ice_grid, only: tmask, ULON, TLAT + use ice_state, only: trcr_depend, aicen, trcrn, vicen, vsnon, & + aice0, aice, vice, vsno, trcr, aice_init, bound_state, & + n_trcr_strata, nt_strata, trcr_base, uvel, vvel + + integer (kind=int_kind) :: & + ilo, ihi , & ! physical domain indices + jlo, jhi , & ! physical domain indices + iglob(nx_block), & ! global indices + jglob(ny_block), & ! global indices + i, j , & ! horizontal indices + k , & ! vertical index + it , & ! tracer index + iblk ! block index + + logical (kind=log_kind) :: & + heat_capacity ! from icepack + + integer (kind=int_kind) :: ntrcr + logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_fsd + logical (kind=log_kind) :: tr_pond_cesm, tr_pond_lvl, tr_pond_topo + integer (kind=int_kind) :: nt_Tsfc, nt_sice, nt_qice, nt_qsno, nt_iage, nt_FY + integer (kind=int_kind) :: nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd + integer (kind=int_kind) :: nt_isosno, nt_isoice, nt_aero, nt_fsd + + type (block) :: & + this_block ! block information for current block + + character(len=*), parameter :: subname='(init_state)' + + !----------------------------------------------------------------- + + call icepack_query_parameters(heat_capacity_out=heat_capacity) + call icepack_query_tracer_sizes(ntrcr_out=ntrcr) + call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & + tr_lvl_out=tr_lvl, tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_fsd_out=tr_fsd, & + tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, tr_pond_topo_out=tr_pond_topo) + call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_sice_out=nt_sice, & + nt_qice_out=nt_qice, nt_qsno_out=nt_qsno, nt_iage_out=nt_iage, nt_fy_out=nt_fy, & + nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, & + nt_ipnd_out=nt_ipnd, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd, & + nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !----------------------------------------------------------------- + ! Check number of layers in ice and snow. + !----------------------------------------------------------------- + + if (my_task == master_task) then + + if (nilyr < 1) then + write(nu_diag,*) subname//' ERROR: Must have at least one ice layer' + write(nu_diag,*) subname//' ERROR: nilyr =', nilyr + call abort_ice (error_message=subname//' Not enough ice layers', & + file=__FILE__, line=__LINE__) + endif + + if (nslyr < 1) then + write(nu_diag,*) subname//' ERROR: Must have at least one snow layer' + write(nu_diag,*) subname//' ERROR: nslyr =', nslyr + call abort_ice(error_message=subname//' Not enough snow layers', & + file=__FILE__, line=__LINE__) + endif + + if (.not.heat_capacity) then + + if (nilyr > 1) then + write(nu_diag,*) subname//' ERROR: Must have nilyr = 1 if heat_capacity=F' + write(nu_diag,*) subname//' ERROR: nilyr =', nilyr + call abort_ice(error_message=subname//' Too many ice layers', & + file=__FILE__, line=__LINE__) + endif + + if (nslyr > 1) then + write(nu_diag,*) subname//' ERROR: Must have nslyr = 1 if heat_capacity=F' + write(nu_diag,*) subname//' ERROR: nslyr =', nslyr + call abort_ice(error_message=subname//' Too many snow layers', & + file=__FILE__, line=__LINE__) + endif + + endif ! heat_capacity = F + + endif ! my_task + + !----------------------------------------------------------------- + ! Set tracer types + !----------------------------------------------------------------- + + trcr_depend(nt_Tsfc) = 0 ! ice/snow surface temperature + do k = 1, nilyr + trcr_depend(nt_sice + k - 1) = 1 ! volume-weighted ice salinity + trcr_depend(nt_qice + k - 1) = 1 ! volume-weighted ice enthalpy + enddo + do k = 1, nslyr + trcr_depend(nt_qsno + k - 1) = 2 ! volume-weighted snow enthalpy + enddo + if (tr_iage) trcr_depend(nt_iage) = 1 ! volume-weighted ice age + if (tr_FY) trcr_depend(nt_FY) = 0 ! area-weighted first-year ice area + if (tr_lvl) trcr_depend(nt_alvl) = 0 ! level ice area + if (tr_lvl) trcr_depend(nt_vlvl) = 1 ! level ice volume + if (tr_pond_cesm) then + trcr_depend(nt_apnd) = 0 ! melt pond area + trcr_depend(nt_hpnd) = 2+nt_apnd ! melt pond depth + endif + if (tr_pond_lvl) then + trcr_depend(nt_apnd) = 2+nt_alvl ! melt pond area + trcr_depend(nt_hpnd) = 2+nt_apnd ! melt pond depth + trcr_depend(nt_ipnd) = 2+nt_apnd ! refrozen pond lid + endif + if (tr_pond_topo) then + trcr_depend(nt_apnd) = 0 ! melt pond area + trcr_depend(nt_hpnd) = 2+nt_apnd ! melt pond depth + trcr_depend(nt_ipnd) = 2+nt_apnd ! refrozen pond lid + endif + if (tr_fsd) then + do it = 1, nfsd + trcr_depend(nt_fsd + it - 1) = 0 ! area-weighted floe size distribution + enddo + endif + if (tr_iso) then ! isotopes + do it = 1, n_iso + trcr_depend(nt_isosno+it-1) = 2 ! snow + trcr_depend(nt_isoice+it-1) = 1 ! ice + enddo + endif + if (tr_aero) then ! volume-weighted aerosols + do it = 1, n_aero + trcr_depend(nt_aero+(it-1)*4 ) = 2 ! snow + trcr_depend(nt_aero+(it-1)*4+1) = 2 ! snow + trcr_depend(nt_aero+(it-1)*4+2) = 1 ! ice + trcr_depend(nt_aero+(it-1)*4+3) = 1 ! ice + enddo + endif + + trcr_base = c0 + + do it = 1, ntrcr + ! mask for base quantity on which tracers are carried + if (trcr_depend(it) == 0) then ! area + trcr_base(it,1) = c1 + elseif (trcr_depend(it) == 1) then ! ice volume + trcr_base(it,2) = c1 + elseif (trcr_depend(it) == 2) then ! snow volume + trcr_base(it,3) = c1 + else + trcr_base(it,1) = c1 ! default: ice area + trcr_base(it,2) = c0 + trcr_base(it,3) = c0 + endif + + ! initialize number of underlying tracer layers + n_trcr_strata(it) = 0 + ! default indices of underlying tracer layers + nt_strata (it,1) = 0 + nt_strata (it,2) = 0 + enddo + + if (tr_pond_cesm) then + n_trcr_strata(nt_hpnd) = 1 ! melt pond depth + nt_strata (nt_hpnd,1) = nt_apnd ! on melt pond area + endif + if (tr_pond_lvl) then + n_trcr_strata(nt_apnd) = 1 ! melt pond area + nt_strata (nt_apnd,1) = nt_alvl ! on level ice area + n_trcr_strata(nt_hpnd) = 2 ! melt pond depth + nt_strata (nt_hpnd,2) = nt_apnd ! on melt pond area + nt_strata (nt_hpnd,1) = nt_alvl ! on level ice area + n_trcr_strata(nt_ipnd) = 2 ! refrozen pond lid + nt_strata (nt_ipnd,2) = nt_apnd ! on melt pond area + nt_strata (nt_ipnd,1) = nt_alvl ! on level ice area + endif + if (tr_pond_topo) then + n_trcr_strata(nt_hpnd) = 1 ! melt pond depth + nt_strata (nt_hpnd,1) = nt_apnd ! on melt pond area + n_trcr_strata(nt_ipnd) = 1 ! refrozen pond lid + nt_strata (nt_ipnd,1) = nt_apnd ! on melt pond area + endif + + !----------------------------------------------------------------- + ! Set state variables + !----------------------------------------------------------------- + +!MHRI: CHECK THIS OMP + !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block, & + !$OMP iglob,jglob) + do iblk = 1, nblocks + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + iglob = this_block%i_glob + jglob = this_block%j_glob + + call set_state_var (nx_block, ny_block, & + ilo, ihi, jlo, jhi, & + iglob, jglob, & + ice_ic, tmask(:,:, iblk), & + ULON (:,:, iblk), & + TLAT (:,:, iblk), & + Tair (:,:, iblk), sst (:,:, iblk), & + Tf (:,:, iblk), & + salinz(:,:,:, iblk), Tmltz(:,:,:, iblk), & + aicen(:,:, :,iblk), trcrn(:,:,:,:,iblk), & + vicen(:,:, :,iblk), vsnon(:,:, :,iblk), & + uvel (:,:, iblk), vvel (:,:, iblk)) + + enddo ! iblk + !$OMP END PARALLEL DO + + !----------------------------------------------------------------- + ! ghost cell updates + !----------------------------------------------------------------- + + call bound_state (aicen, & + vicen, vsnon, & + ntrcr, trcrn) + + !----------------------------------------------------------------- + ! compute aggregate ice state and open water area + !----------------------------------------------------------------- + + !$OMP PARALLEL DO PRIVATE(iblk,it,i,j) + do iblk = 1, nblocks + + do j = 1, ny_block + do i = 1, nx_block + aice(i,j,iblk) = c0 + vice(i,j,iblk) = c0 + vsno(i,j,iblk) = c0 + do it = 1, ntrcr + trcr(i,j,it,iblk) = c0 + enddo + + if (tmask(i,j,iblk)) & + call icepack_aggregate(ncat = ncat, & + aicen = aicen(i,j,:,iblk), & + trcrn = trcrn(i,j,:,:,iblk), & + vicen = vicen(i,j,:,iblk), & + vsnon = vsnon(i,j,:,iblk), & + aice = aice (i,j, iblk), & + trcr = trcr (i,j,:,iblk), & + vice = vice (i,j, iblk), & + vsno = vsno (i,j, iblk), & + aice0 = aice0(i,j, iblk), & + ntrcr = ntrcr, & + trcr_depend = trcr_depend(:), & + trcr_base = trcr_base(:,:), & + n_trcr_strata = n_trcr_strata(:), & + nt_strata = nt_strata(:,:)) + + aice_init(i,j,iblk) = aice(i,j,iblk) + + enddo + enddo + + enddo ! iblk + !$OMP END PARALLEL DO + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + end subroutine init_state + +!======================================================================= + +! Initialize state in each ice thickness category +! +! authors: C. M. Bitz +! William H. Lipscomb, LANL + + subroutine set_state_var (nx_block, ny_block, & + ilo, ihi, jlo, jhi, & + iglob, jglob, & + ice_ic, tmask, & + ULON, & + TLAT, & + Tair, sst, & + Tf, & + salinz, Tmltz, & + aicen, trcrn, & + vicen, vsnon, & + uvel, vvel) + + use ice_arrays_column, only: hin_max + use ice_domain_size, only: nilyr, nslyr, nx_global, ny_global, ncat + use ice_grid, only: grid_type + use ice_forcing, only: ice_data_type + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block, & ! block dimensions + ilo, ihi , & ! physical domain indices + jlo, jhi , & ! + iglob(nx_block) , & ! global indices + jglob(ny_block) ! + + character(len=char_len_long), intent(in) :: & + ice_ic ! method of ice cover initialization + + logical (kind=log_kind), dimension (nx_block,ny_block), intent(in) :: & + tmask ! true for ice/ocean cells + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + ULON , & ! longitude of velocity pts (radians) + TLAT ! latitude of temperature pts (radians) + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + Tair , & ! air temperature (K) + Tf , & ! freezing temperature (C) + sst ! sea surface temperature (C) + + real (kind=dbl_kind), dimension (nx_block,ny_block,nilyr), intent(in) :: & + salinz , & ! initial salinity profile + Tmltz ! initial melting temperature profile + + real (kind=dbl_kind), dimension (nx_block,ny_block,ncat), intent(out) :: & + aicen , & ! concentration of ice + vicen , & ! volume per unit area of ice (m) + vsnon ! volume per unit area of snow (m) + + real (kind=dbl_kind), intent(out), dimension (:,:,:,:) :: & ! (nx_block,ny_block,ntrcr,ncat) + trcrn ! ice tracers + ! 1: surface temperature of ice/snow (C) + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out) :: & + uvel , & ! ice velocity + vvel ! + + ! local variables + + integer (kind=int_kind) :: & + i, j , & ! horizontal indices + ij , & ! horizontal index, combines i and j loops + k , & ! ice layer index + n , & ! thickness category index + it , & ! tracer index + icells ! number of cells initialized with ice + + integer (kind=int_kind), dimension(nx_block*ny_block) :: & + indxi, indxj ! compressed indices for cells with aicen > puny + + real (kind=dbl_kind) :: & + Tsfc, sum, hbar, puny, rhos, Lfresh, rad_to_deg + + real (kind=dbl_kind), dimension(ncat) :: & + ainit, hinit ! initial area, thickness + + real (kind=dbl_kind), dimension(nilyr) :: & + qin ! ice enthalpy (J/m3) + + real (kind=dbl_kind), dimension(nslyr) :: & + qsn ! snow enthalpy (J/m3) + + real (kind=dbl_kind), parameter :: & + hsno_init = 0.20_dbl_kind , & ! initial snow thickness (m) + edge_init_nh = 70._dbl_kind, & ! initial ice edge, N.Hem. (deg) + edge_init_sh = -60._dbl_kind ! initial ice edge, S.Hem. (deg) + + logical (kind=log_kind) :: tr_brine, tr_lvl + integer (kind=int_kind) :: ntrcr + integer (kind=int_kind) :: nt_Tsfc, nt_qice, nt_qsno, nt_sice + integer (kind=int_kind) :: nt_fbri, nt_alvl, nt_vlvl + + character(len=*), parameter :: subname='(set_state_var)' + + !----------------------------------------------------------------- + + call icepack_query_tracer_sizes(ntrcr_out=ntrcr) + call icepack_query_tracer_flags(tr_brine_out=tr_brine, tr_lvl_out=tr_lvl) + call icepack_query_tracer_indices( nt_Tsfc_out=nt_Tsfc, nt_qice_out=nt_qice, & + nt_qsno_out=nt_qsno, nt_sice_out=nt_sice, & + nt_fbri_out=nt_fbri, nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl) + call icepack_query_parameters(rhos_out=rhos, Lfresh_out=Lfresh, puny_out=puny, & + rad_to_deg_out=rad_to_deg) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + indxi(:) = 0 + indxj(:) = 0 + + ! Initialize state variables. + ! If restarting, these values are overwritten. + + do n = 1, ncat + do j = 1, ny_block + do i = 1, nx_block + aicen(i,j,n) = c0 + vicen(i,j,n) = c0 + vsnon(i,j,n) = c0 + trcrn(i,j,nt_Tsfc,n) = Tf(i,j) ! surface temperature + if (ntrcr >= 2) then + do it = 2, ntrcr + trcrn(i,j,it,n) = c0 + enddo + endif + if (tr_lvl) trcrn(i,j,nt_alvl,n) = c1 + if (tr_lvl) trcrn(i,j,nt_vlvl,n) = c1 + if (tr_brine) trcrn(i,j,nt_fbri,n) = c1 + do k = 1, nilyr + trcrn(i,j,nt_sice+k-1,n) = salinz(i,j,k) + enddo + do k = 1, nslyr + trcrn(i,j,nt_qsno+k-1,n) = -rhos * Lfresh + enddo + enddo + enddo + enddo + + if (trim(ice_ic) == 'default') then + + !----------------------------------------------------------------- + ! Place ice where ocean surface is cold. + ! Note: If SST is not read from a file, then the ocean is assumed + ! to be at its freezing point everywhere, and ice will + ! extend to the prescribed edges. + !----------------------------------------------------------------- + + if (trim(ice_data_type) == 'box2001') then + + hbar = c2 ! initial ice thickness + do n = 1, ncat + hinit(n) = c0 + ainit(n) = c0 + if (hbar > hin_max(n-1) .and. hbar < hin_max(n)) then + hinit(n) = hbar + ainit(n) = p5 !echmod symm + endif + enddo + + elseif (trim(ice_data_type) == 'boxslotcyl') then + + hbar = c1 ! initial ice thickness (1 m) + do n = 1, ncat + hinit(n) = c0 + ainit(n) = c0 + if (hbar > hin_max(n-1) .and. hbar < hin_max(n)) then + hinit(n) = hbar + ainit(n) = c1 !echmod symm + endif + enddo + + else + + ! initial category areas in cells with ice + hbar = c3 ! initial ice thickness with greatest area + ! Note: the resulting average ice thickness + ! tends to be less than hbar due to the + ! nonlinear distribution of ice thicknesses + sum = c0 + do n = 1, ncat + if (n < ncat) then + hinit(n) = p5*(hin_max(n-1) + hin_max(n)) ! m + else ! n=ncat + hinit(n) = (hin_max(n-1) + c1) ! m + endif + ! parabola, max at h=hbar, zero at h=0, 2*hbar + ainit(n) = max(c0, (c2*hbar*hinit(n) - hinit(n)**2)) + sum = sum + ainit(n) + enddo + do n = 1, ncat + ainit(n) = ainit(n) / (sum + puny/ncat) ! normalize + enddo + + endif ! ice_data_type + + if (trim(grid_type) == 'rectangular') then + + ! place ice on left side of domain + icells = 0 + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j)) then + if (ULON(i,j) < -50./rad_to_deg) then + icells = icells + 1 + indxi(icells) = i + indxj(icells) = j + endif ! ULON + endif ! tmask + enddo ! i + enddo ! j + + else + + ! place ice at high latitudes where ocean sfc is cold + icells = 0 + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j)) then + ! place ice in high latitudes where ocean sfc is cold + if ( (sst (i,j) <= Tf(i,j)+p2) .and. & + (TLAT(i,j) < edge_init_sh/rad_to_deg .or. & + TLAT(i,j) > edge_init_nh/rad_to_deg) ) then + icells = icells + 1 + indxi(icells) = i + indxj(icells) = j + endif ! cold surface + endif ! tmask + enddo ! i + enddo ! j + + endif ! rectgrid + + do n = 1, ncat + + ! ice volume, snow volume + do ij = 1, icells + i = indxi(ij) + j = indxj(ij) + + aicen(i,j,n) = ainit(n) + + if (trim(ice_data_type) == 'box2001') then + if (hinit(n) > c0) then +! ! constant slope from 0 to 1 in x direction + aicen(i,j,n) = (real(iglob(i), kind=dbl_kind)-p5) & + / (real(nx_global,kind=dbl_kind)) +! ! constant slope from 0 to 0.5 in x direction +! aicen(i,j,n) = (real(iglob(i), kind=dbl_kind)-p5) & +! / (real(nx_global,kind=dbl_kind)) * p5 + ! quadratic +! aicen(i,j,n) = max(c0,(real(iglob(i), kind=dbl_kind)-p5) & +! / (real(nx_global,kind=dbl_kind)) & +! * (real(jglob(j), kind=dbl_kind)-p5) & +! / (real(ny_global,kind=dbl_kind)) * p5) +! aicen(i,j,n) = max(c0,(real(nx_global, kind=dbl_kind) & +! - real(iglob(i), kind=dbl_kind)-p5) & +! / (real(nx_global,kind=dbl_kind)) & +! * (real(ny_global, kind=dbl_kind) & +! - real(jglob(j), kind=dbl_kind)-p5) & +! / (real(ny_global,kind=dbl_kind)) * p5) + endif + vicen(i,j,n) = hinit(n) * aicen(i,j,n) ! m + elseif (trim(ice_data_type) == 'boxslotcyl') then + if (hinit(n) > c0) then + ! slotted cylinder + call boxslotcyl_data_aice(aicen, i, j, & + nx_block, ny_block, & + n, ainit, & + iglob, jglob) + endif + vicen(i,j,n) = hinit(n) * aicen(i,j,n) ! m + else + vicen(i,j,n) = hinit(n) * ainit(n) ! m + endif + vsnon(i,j,n) = min(aicen(i,j,n)*hsno_init,p2*vicen(i,j,n)) + + call icepack_init_trcr(Tair = Tair(i,j), Tf = Tf(i,j), & + Sprofile = salinz(i,j,:), & + Tprofile = Tmltz(i,j,:), & + Tsfc = Tsfc, & + nilyr = nilyr, nslyr = nslyr, & + qin = qin(:), qsn = qsn(:)) + + ! surface temperature + trcrn(i,j,nt_Tsfc,n) = Tsfc ! deg C + ! ice enthalpy, salinity + do k = 1, nilyr + trcrn(i,j,nt_qice+k-1,n) = qin(k) + trcrn(i,j,nt_sice+k-1,n) = salinz(i,j,k) + enddo + ! snow enthalpy + do k = 1, nslyr + trcrn(i,j,nt_qsno+k-1,n) = qsn(k) + enddo ! nslyr + ! brine fraction + if (tr_brine) trcrn(i,j,nt_fbri,n) = c1 + + enddo ! ij + enddo ! ncat + + ! velocity initialization for special tests + if (trim(ice_data_type) == 'boxslotcyl') then + do j = 1, ny_block + do i = 1, nx_block + call boxslotcyl_data_vel(i, j, & + nx_block, ny_block, & + iglob, jglob, & + uvel, vvel) + enddo ! j + enddo ! i + endif + endif ! ice_ic + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + end subroutine set_state_var + +!======================================================================= + +! Set ice concentration for slotted cylinder advection test +! +! author: Philippe Blain (ECCC) + + subroutine boxslotcyl_data_aice(aicen, i, j, & + nx_block, ny_block, & + n, ainit, & + iglob, jglob) + + use ice_constants, only: c0, c2, c5, p3, p166, p75, p5 + use ice_domain_size, only: nx_global, ny_global, ncat + use ice_grid, only: dxrect, dyrect + + integer (kind=int_kind), intent(in) :: & + i, j , & ! local indices + nx_block, ny_block, & ! block dimensions + iglob(nx_block) , & ! global indices + jglob(ny_block) , & + n ! thickness category index + + real (kind=dbl_kind), dimension(ncat) :: & + ainit ! initial area + + real (kind=dbl_kind), dimension (nx_block,ny_block,ncat), intent(out) :: & + aicen ! concentration of ice + + ! local variables + + logical :: in_slot, in_cyl, in_slotted_cyl + + real (kind=dbl_kind), dimension (2) :: & + slot_x, & ! geometric limits of the slot + slot_y + + real (kind=dbl_kind) :: & + diam , & ! cylinder diameter + radius , & ! cylinder radius + center_x, & ! cylinder center + center_y, & + width , & ! slot width + length ! slot height + + character(len=*), parameter :: subname = '(boxslotcyl_data_aice)' + + ! Geometric configuration of the slotted cylinder + diam = p3 *dxrect*(nx_global-1) + center_x = p5 *dxrect*(nx_global-1) + center_y = p75*dyrect*(ny_global-1) + radius = p5*diam + width = p166*diam + length = c5*p166*diam + + slot_x(1) = center_x - width/c2 + slot_x(2) = center_x + width/c2 + slot_y(1) = center_y - radius + slot_y(2) = center_y + (length - radius) + + ! check if grid point is inside slotted cylinder + in_slot = (dxrect*real(iglob(i)-1, kind=dbl_kind) >= slot_x(1)) .and. & + (dxrect*real(iglob(i)-1, kind=dbl_kind) <= slot_x(2)) .and. & + (dyrect*real(jglob(j)-1, kind=dbl_kind) >= slot_y(1)) .and. & + (dyrect*real(jglob(j)-1, kind=dbl_kind) <= slot_y(2)) + + in_cyl = sqrt((dxrect*real(iglob(i)-1, kind=dbl_kind) - center_x)**c2 + & + (dyrect*real(jglob(j)-1, kind=dbl_kind) - center_y)**c2) <= radius + + in_slotted_cyl = in_cyl .and. .not. in_slot + + if (in_slotted_cyl) then + aicen(i,j,n) = ainit(n) + else + aicen(i,j,n) = c0 + endif + + + end subroutine boxslotcyl_data_aice + +!======================================================================= + +! Set ice velocity for slotted cylinder advection test +! +! author: Philippe Blain (ECCC) + + subroutine boxslotcyl_data_vel(i, j, & + nx_block, ny_block, & + iglob, jglob, & + uvel, vvel) + + use ice_constants, only: c2, c12, p5, cm_to_m + use ice_domain_size, only: nx_global, ny_global + use ice_grid, only: dxrect + + integer (kind=int_kind), intent(in) :: & + i, j, & ! local indices + nx_block, ny_block, & ! block dimensions + iglob(nx_block), & ! global indices + jglob(ny_block) + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out) :: & + uvel, vvel ! ice velocity + + ! local variables + + real (kind=dbl_kind) :: & + pi , & ! pi + secday , & ! seconds per day + max_vel , & ! max velocity + domain_length , & ! physical domain length + period ! rotational period + + character(len=*), parameter :: subname = '(boxslotcyl_data_vel)' + + call icepack_query_parameters(secday_out=secday, pi_out=pi) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + domain_length = dxrect*cm_to_m*nx_global + period = c12*secday ! 12 days rotational period + max_vel = pi*domain_length/period + + uvel(i,j) = c2*max_vel*(real(jglob(j), kind=dbl_kind) - p5) & + / real(ny_global - 1, kind=dbl_kind) - max_vel + vvel(i,j) = -c2*max_vel*(real(iglob(i), kind=dbl_kind) - p5) & + / real(nx_global - 1, kind=dbl_kind) + max_vel + + end subroutine boxslotcyl_data_vel + +!======================================================================= + + end module ice_init + +!======================================================================= diff --git a/cicecore/cicedynB/infrastructure/ice_grid.F90 b/cicecore/cicedynB/infrastructure/ice_grid.F90 new file mode 100644 index 000000000..34b37cf29 --- /dev/null +++ b/cicecore/cicedynB/infrastructure/ice_grid.F90 @@ -0,0 +1,2561 @@ +#ifdef ncdf +#define USE_NETCDF +#endif +!======================================================================= + +! Spatial grids, masks, and boundary conditions +! +! authors: Elizabeth C. Hunke and William H. Lipscomb, LANL +! Tony Craig, NCAR +! +! 2004: Block structure added by William Lipscomb +! init_grid split into two parts as in POP 2.0 +! Boundary update routines replaced by POP versions +! 2006: Converted to free source form (F90) by Elizabeth Hunke +! 2007: Option to read from netcdf files (A. Keen, Met Office) +! Grid reading routines reworked by E. Hunke for boundary values + + module ice_grid + + use ice_kinds_mod + use ice_broadcast, only: broadcast_scalar, broadcast_array + use ice_boundary, only: ice_HaloUpdate, ice_HaloExtrapolate + use ice_communicate, only: my_task, master_task + use ice_blocks, only: block, get_block, nx_block, ny_block, nghost + use ice_domain_size, only: nx_global, ny_global, max_blocks + use ice_domain, only: blocks_ice, nblocks, halo_info, distrb_info, & + ew_boundary_type, ns_boundary_type, init_domain_distribution + use ice_fileunits, only: nu_diag, nu_grid, nu_kmt, & + get_fileunit, release_fileunit + use ice_gather_scatter, only: gather_global, scatter_global + use ice_read_write, only: ice_read, ice_read_nc, ice_read_global, & + ice_read_global_nc, ice_open, ice_open_nc, ice_close_nc + use ice_timers, only: timer_bound, ice_timer_start, ice_timer_stop + use ice_exit, only: abort_ice + use ice_global_reductions, only: global_minval, global_maxval + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_query_parameters + + implicit none + private + public :: init_grid1, init_grid2, & + t2ugrid_vector, u2tgrid_vector, & + to_ugrid, to_tgrid, alloc_grid + + character (len=char_len_long), public :: & + grid_format , & ! file format ('bin'=binary or 'nc'=netcdf) + gridcpl_file , & ! input file for POP coupling grid info + grid_file , & ! input file for POP grid info + kmt_file , & ! input file for POP grid info + bathymetry_file, & ! input bathymetry for basalstress + bathymetry_format, & ! bathymetry file format (default or pop) + grid_spacing , & ! default of 30.e3m or set by user in namelist + grid_type ! current options are rectangular (default), + ! displaced_pole, tripole, regional + + real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & + dxt , & ! width of T-cell through the middle (m) + dyt , & ! height of T-cell through the middle (m) + dxu , & ! width of U-cell through the middle (m) + dyu , & ! height of U-cell through the middle (m) + HTE , & ! length of eastern edge of T-cell (m) + HTN , & ! length of northern edge of T-cell (m) + tarea , & ! area of T-cell (m^2) + uarea , & ! area of U-cell (m^2) + tarear , & ! 1/tarea + uarear , & ! 1/uarea + tinyarea,& ! puny*tarea + tarean , & ! area of NH T-cells + tareas , & ! area of SH T-cells + ULON , & ! longitude of velocity pts (radians) + ULAT , & ! latitude of velocity pts (radians) + TLON , & ! longitude of temp pts (radians) + TLAT , & ! latitude of temp pts (radians) + ANGLE , & ! for conversions between POP grid and lat/lon + ANGLET , & ! ANGLE converted to T-cells + bathymetry , & ! ocean depth, for grounding keels and bergs (m) + ocn_gridcell_frac ! only relevant for lat-lon grids + ! gridcell value of [1 - (land fraction)] (T-cell) + + real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & + cyp , & ! 1.5*HTE - 0.5*HTE + cxp , & ! 1.5*HTN - 0.5*HTN + cym , & ! 0.5*HTE - 1.5*HTE + cxm , & ! 0.5*HTN - 1.5*HTN + dxhy , & ! 0.5*(HTE - HTE) + dyhx ! 0.5*(HTN - HTN) + + ! grid dimensions for rectangular grid + real (kind=dbl_kind), public :: & + dxrect, & ! user_specified spacing (cm) in x-direction (uniform HTN) + dyrect ! user_specified spacing (cm) in y-direction (uniform HTE) + + ! Corners of grid boxes for history output + real (kind=dbl_kind), dimension (:,:,:,:), allocatable, public :: & + lont_bounds, & ! longitude of gridbox corners for T point + latt_bounds, & ! latitude of gridbox corners for T point + lonu_bounds, & ! longitude of gridbox corners for U point + latu_bounds ! latitude of gridbox corners for U point + + ! geometric quantities used for remapping transport + real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & + xav , & ! mean T-cell value of x + yav , & ! mean T-cell value of y + xxav , & ! mean T-cell value of xx +! xyav , & ! mean T-cell value of xy +! yyav , & ! mean T-cell value of yy + yyav ! mean T-cell value of yy +! xxxav, & ! mean T-cell value of xxx +! xxyav, & ! mean T-cell value of xxy +! xyyav, & ! mean T-cell value of xyy +! yyyav ! mean T-cell value of yyy + + real (kind=dbl_kind), & + dimension (:,:,:,:,:), allocatable, public :: & + mne, & ! matrices used for coordinate transformations in remapping + mnw, & ! ne = northeast corner, nw = northwest, etc. + mse, & + msw + + ! masks + real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & + hm , & ! land/boundary mask, thickness (T-cell) + bm , & ! task/block id + uvm , & ! land/boundary mask, velocity (U-cell) + kmt ! ocean topography mask for bathymetry (T-cell) + + logical (kind=log_kind), public :: & + use_bathymetry ! flag for reading in bathymetry_file + + logical (kind=log_kind), & + dimension (:,:,:), allocatable, public :: & + tmask , & ! land/boundary mask, thickness (T-cell) + umask , & ! land/boundary mask, velocity (U-cell) + lmask_n, & ! northern hemisphere mask + lmask_s ! southern hemisphere mask + + real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & + rndex_global ! global index for local subdomain (dbl) + + logical (kind=log_kind), private :: & + l_readCenter ! If anglet exist in grid file read it otherwise calculate it + + +!======================================================================= + + contains + +!======================================================================= +! +! Allocate space for all variables +! + subroutine alloc_grid + + integer (int_kind) :: ierr + + allocate( & + dxt (nx_block,ny_block,max_blocks), & ! width of T-cell through the middle (m) + dyt (nx_block,ny_block,max_blocks), & ! height of T-cell through the middle (m) + dxu (nx_block,ny_block,max_blocks), & ! width of U-cell through the middle (m) + dyu (nx_block,ny_block,max_blocks), & ! height of U-cell through the middle (m) + HTE (nx_block,ny_block,max_blocks), & ! length of eastern edge of T-cell (m) + HTN (nx_block,ny_block,max_blocks), & ! length of northern edge of T-cell (m) + tarea (nx_block,ny_block,max_blocks), & ! area of T-cell (m^2) + uarea (nx_block,ny_block,max_blocks), & ! area of U-cell (m^2) + tarear (nx_block,ny_block,max_blocks), & ! 1/tarea + uarear (nx_block,ny_block,max_blocks), & ! 1/uarea + tinyarea (nx_block,ny_block,max_blocks), & ! puny*tarea + tarean (nx_block,ny_block,max_blocks), & ! area of NH T-cells + tareas (nx_block,ny_block,max_blocks), & ! area of SH T-cells + ULON (nx_block,ny_block,max_blocks), & ! longitude of velocity pts (radians) + ULAT (nx_block,ny_block,max_blocks), & ! latitude of velocity pts (radians) + TLON (nx_block,ny_block,max_blocks), & ! longitude of temp pts (radians) + TLAT (nx_block,ny_block,max_blocks), & ! latitude of temp pts (radians) + ANGLE (nx_block,ny_block,max_blocks), & ! for conversions between POP grid and lat/lon + ANGLET (nx_block,ny_block,max_blocks), & ! ANGLE converted to T-cells + bathymetry(nx_block,ny_block,max_blocks),& ! ocean depth, for grounding keels and bergs (m) + ocn_gridcell_frac(nx_block,ny_block,max_blocks),& ! only relevant for lat-lon grids + cyp (nx_block,ny_block,max_blocks), & ! 1.5*HTE - 0.5*HTE + cxp (nx_block,ny_block,max_blocks), & ! 1.5*HTN - 0.5*HTN + cym (nx_block,ny_block,max_blocks), & ! 0.5*HTE - 1.5*HTE + cxm (nx_block,ny_block,max_blocks), & ! 0.5*HTN - 1.5*HTN + dxhy (nx_block,ny_block,max_blocks), & ! 0.5*(HTE - HTE) + dyhx (nx_block,ny_block,max_blocks), & ! 0.5*(HTN - HTN) + xav (nx_block,ny_block,max_blocks), & ! mean T-cell value of x + yav (nx_block,ny_block,max_blocks), & ! mean T-cell value of y + xxav (nx_block,ny_block,max_blocks), & ! mean T-cell value of xx + yyav (nx_block,ny_block,max_blocks), & ! mean T-cell value of yy + hm (nx_block,ny_block,max_blocks), & ! land/boundary mask, thickness (T-cell) + bm (nx_block,ny_block,max_blocks), & ! task/block id + uvm (nx_block,ny_block,max_blocks), & ! land/boundary mask, velocity (U-cell) + kmt (nx_block,ny_block,max_blocks), & ! ocean topography mask for bathymetry (T-cell) + tmask (nx_block,ny_block,max_blocks), & ! land/boundary mask, thickness (T-cell) + umask (nx_block,ny_block,max_blocks), & ! land/boundary mask, velocity (U-cell) + lmask_n (nx_block,ny_block,max_blocks), & ! northern hemisphere mask + lmask_s (nx_block,ny_block,max_blocks), & ! southern hemisphere mask + rndex_global(nx_block,ny_block,max_blocks), & ! global index for local subdomain (dbl) + lont_bounds(4,nx_block,ny_block,max_blocks), & ! longitude of gridbox corners for T point + latt_bounds(4,nx_block,ny_block,max_blocks), & ! latitude of gridbox corners for T point + lonu_bounds(4,nx_block,ny_block,max_blocks), & ! longitude of gridbox corners for U point + latu_bounds(4,nx_block,ny_block,max_blocks), & ! latitude of gridbox corners for U point + mne (2,2,nx_block,ny_block,max_blocks), & ! matrices used for coordinate transformations in remapping + mnw (2,2,nx_block,ny_block,max_blocks), & ! ne = northeast corner, nw = northwest, etc. + mse (2,2,nx_block,ny_block,max_blocks), & + msw (2,2,nx_block,ny_block,max_blocks), & + stat=ierr) + if (ierr/=0) call abort_ice('(alloc_grid): Out of memory') + + end subroutine alloc_grid + +!======================================================================= + +! Distribute blocks across processors. The distribution is optimized +! based on latitude and topography, contained in the ULAT and KMT arrays. +! +! authors: William Lipscomb and Phil Jones, LANL + + subroutine init_grid1 + + use ice_blocks, only: nx_block, ny_block + use ice_broadcast, only: broadcast_array + use ice_constants, only: c1 + + integer (kind=int_kind) :: & + fid_grid, & ! file id for netCDF grid file + fid_kmt ! file id for netCDF kmt file + + character (char_len) :: & + fieldname ! field name in netCDF file + + real (kind=dbl_kind), dimension(:,:), allocatable :: & + work_g1, work_g2 + + real (kind=dbl_kind) :: & + rad_to_deg + + character(len=*), parameter :: subname = '(init_grid1)' + + !----------------------------------------------------------------- + ! Get global ULAT and KMT arrays used for block decomposition. + !----------------------------------------------------------------- + + call icepack_query_parameters(rad_to_deg_out=rad_to_deg) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + allocate(work_g1(nx_global,ny_global)) + allocate(work_g2(nx_global,ny_global)) + + if (trim(grid_type) == 'displaced_pole' .or. & + trim(grid_type) == 'tripole' .or. & + trim(grid_type) == 'regional' ) then + + if (trim(grid_format) == 'nc') then + + call ice_open_nc(grid_file,fid_grid) + call ice_open_nc(kmt_file,fid_kmt) + + fieldname='ulat' + call ice_read_global_nc(fid_grid,1,fieldname,work_g1,.true.) + fieldname='kmt' + call ice_read_global_nc(fid_kmt,1,fieldname,work_g2,.true.) + + if (my_task == master_task) then + call ice_close_nc(fid_grid) + call ice_close_nc(fid_kmt) + endif + + else + + call ice_open(nu_grid,grid_file,64) ! ULAT + call ice_open(nu_kmt, kmt_file, 32) ! KMT + + call ice_read_global(nu_grid,1,work_g1,'rda8',.true.) ! ULAT + call ice_read_global(nu_kmt, 1,work_g2,'ida4',.true.) ! KMT + + if (my_task == master_task) then + close (nu_grid) + close (nu_kmt) + endif + + endif + + else ! rectangular grid + + work_g1(:,:) = 75._dbl_kind/rad_to_deg ! arbitrary polar latitude + work_g2(:,:) = c1 + + endif + + call broadcast_array(work_g1, master_task) ! ULAT + call broadcast_array(work_g2, master_task) ! KMT + + !----------------------------------------------------------------- + ! distribute blocks among processors + !----------------------------------------------------------------- + + call init_domain_distribution(work_g2, work_g1) ! KMT, ULAT + + deallocate(work_g1) + deallocate(work_g2) + + !----------------------------------------------------------------- + ! write additional domain information + !----------------------------------------------------------------- + + if (my_task == master_task) then + write(nu_diag,'(a26,i6)') ' Block size: nx_block = ',nx_block + write(nu_diag,'(a26,i6)') ' ny_block = ',ny_block + endif + + end subroutine init_grid1 + +!======================================================================= + +! Horizontal grid initialization: +! +! U{LAT,LONG} = true {latitude,longitude} of U points +! HT{N,E} = cell widths on {N,E} sides of T cell +! ANGLE = angle between local x direction and true east +! hm = land mask (c1 for ocean points, c0 for land points) +! D{X,Y}{T,U} = {x,y} spacing centered at {T,U} points +! T-grid and ghost cell values +! Various grid quantities needed for dynamics and transport +! +! author: Elizabeth C. Hunke, LANL + + subroutine init_grid2 + + use ice_blocks, only: get_block, block, nx_block, ny_block + use ice_constants, only: c0, c1, c2, p5, p25, c1p5, & + field_loc_center, field_loc_NEcorner, & + field_type_scalar, field_type_vector, field_type_angle + use ice_domain_size, only: max_blocks + + integer (kind=int_kind) :: & + i, j, iblk, & + ilo,ihi,jlo,jhi ! beginning and end of physical domain + + real (kind=dbl_kind) :: & + angle_0, angle_w, angle_s, angle_sw, & + pi, pi2, puny + logical (kind=log_kind), dimension(nx_block,ny_block,max_blocks):: & + out_of_range + + real (kind=dbl_kind), dimension(:,:), allocatable :: & + work_g1 + + type (block) :: & + this_block ! block information for current block + + character(len=*), parameter :: subname = '(init_grid2)' + + !----------------------------------------------------------------- + ! lat, lon, cell widths, angle, land mask + !----------------------------------------------------------------- + + call icepack_query_parameters(pi_out=pi, pi2_out=pi2, puny_out=puny) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + if (trim(grid_type) == 'displaced_pole' .or. & + trim(grid_type) == 'tripole' .or. & + trim(grid_type) == 'regional' ) then + if (trim(grid_format) == 'nc') then + call popgrid_nc ! read POP grid lengths from nc file + else + call popgrid ! read POP grid lengths directly + endif +#ifdef CESMCOUPLED + elseif (trim(grid_type) == 'latlon') then + call latlongrid ! lat lon grid for sequential CESM (CAM mode) + return +#endif + elseif (trim(grid_type) == 'cpom_grid') then + call cpomgrid ! cpom model orca1 type grid + else + call rectgrid ! regular rectangular grid + endif + + !----------------------------------------------------------------- + ! T-grid cell and U-grid cell quantities + !----------------------------------------------------------------- + +! tarea(:,:,:) = c0 + + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + tarea(i,j,iblk) = dxt(i,j,iblk)*dyt(i,j,iblk) + uarea(i,j,iblk) = dxu(i,j,iblk)*dyu(i,j,iblk) + if (tarea(i,j,iblk) > c0) then + tarear(i,j,iblk) = c1/tarea(i,j,iblk) + else + tarear(i,j,iblk) = c0 ! possible on boundaries + endif + if (uarea(i,j,iblk) > c0) then + uarear(i,j,iblk) = c1/uarea(i,j,iblk) + else + uarear(i,j,iblk) = c0 ! possible on boundaries + endif + tinyarea(i,j,iblk) = puny*tarea(i,j,iblk) + + dxhy(i,j,iblk) = p5*(HTE(i,j,iblk) - HTE(i-1,j,iblk)) + dyhx(i,j,iblk) = p5*(HTN(i,j,iblk) - HTN(i,j-1,iblk)) + enddo + enddo + + do j = jlo, jhi+1 + do i = ilo, ihi+1 + cyp(i,j,iblk) = (c1p5*HTE(i,j,iblk) - p5*HTE(i-1,j,iblk)) + cxp(i,j,iblk) = (c1p5*HTN(i,j,iblk) - p5*HTN(i,j-1,iblk)) + ! match order of operations in cyp, cxp for tripole grids + cym(i,j,iblk) = -(c1p5*HTE(i-1,j,iblk) - p5*HTE(i,j,iblk)) + cxm(i,j,iblk) = -(c1p5*HTN(i,j-1,iblk) - p5*HTN(i,j,iblk)) + enddo + enddo + + enddo ! iblk + !$OMP END PARALLEL DO + + !----------------------------------------------------------------- + ! Ghost cell updates + ! On the tripole grid, one must be careful with updates of + ! quantities that involve a difference of cell lengths. + ! For example, dyhx and dxhy are cell-centered vector components. + ! Also note that on the tripole grid, cxp and cxm would swap places, + ! as would cyp and cym. These quantities are computed only + ! in north and east ghost cells (above), not south and west. + !----------------------------------------------------------------- + + call ice_timer_start(timer_bound) + call ice_HaloUpdate (tarea, halo_info, & + field_loc_center, field_type_scalar, & + fillValue=c1) + call ice_HaloUpdate (uarea, halo_info, & + field_loc_NEcorner, field_type_scalar, & + fillValue=c1) + call ice_HaloUpdate (tarear, halo_info, & + field_loc_center, field_type_scalar, & + fillValue=c1) + call ice_HaloUpdate (uarear, halo_info, & + field_loc_NEcorner, field_type_scalar, & + fillValue=c1) + call ice_HaloUpdate (tinyarea, halo_info, & + field_loc_center, field_type_scalar, & + fillValue=c1) + call ice_HaloUpdate (dxhy, halo_info, & + field_loc_center, field_type_vector, & + fillValue=c1) + call ice_HaloUpdate (dyhx, halo_info, & + field_loc_center, field_type_vector, & + fillValue=c1) + call ice_timer_stop(timer_bound) + + !----------------------------------------------------------------- + ! Calculate ANGLET to be compatible with POP ocean model + ! First, ensure that -pi <= ANGLE <= pi + !----------------------------------------------------------------- + + out_of_range = .false. + where (ANGLE < -pi .or. ANGLE > pi) out_of_range = .true. + if (count(out_of_range) > 0) then + write(nu_diag,*) subname,' angle = ',minval(ANGLE),maxval(ANGLE),count(out_of_range) + call abort_ice (subname//' ANGLE out of expected range', & + file=__FILE__, line=__LINE__) + endif + + !----------------------------------------------------------------- + ! Compute ANGLE on T-grid + !----------------------------------------------------------------- + if (trim(grid_type) == 'cpom_grid') then + ANGLET(:,:,:) = ANGLE(:,:,:) + else if (.not. (l_readCenter)) then + ANGLET = c0 + + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block, & + !$OMP angle_0,angle_w,angle_s,angle_sw) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + angle_0 = ANGLE(i ,j ,iblk) ! w----0 + angle_w = ANGLE(i-1,j ,iblk) ! | | + angle_s = ANGLE(i, j-1,iblk) ! | | + angle_sw = ANGLE(i-1,j-1,iblk) ! sw---s + ANGLET(i,j,iblk) = atan2(p25*(sin(angle_0)+ & + sin(angle_w)+ & + sin(angle_s)+ & + sin(angle_sw)),& + p25*(cos(angle_0)+ & + cos(angle_w)+ & + cos(angle_s)+ & + cos(angle_sw))) + enddo + enddo + enddo + !$OMP END PARALLEL DO + endif ! cpom_grid + if (trim(grid_type) == 'regional' .and. & + (.not. (l_readCenter))) then + ! for W boundary extrapolate from interior + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + i = ilo + if (this_block%i_glob(i) == 1) then + do j = jlo, jhi + ANGLET(i,j,iblk) = c2*ANGLET(i+1,j,iblk)-ANGLET(i+2,j,iblk) + enddo + endif + enddo + !$OMP END PARALLEL DO + endif ! regional + + call ice_timer_start(timer_bound) + call ice_HaloUpdate (ANGLET, halo_info, & + field_loc_center, field_type_angle, & + fillValue=c1) + call ice_timer_stop(timer_bound) + + call makemask ! velocity mask, hemisphere masks + if (.not. (l_readCenter)) then + call Tlatlon ! get lat, lon on the T grid + endif + !----------------------------------------------------------------- + ! bathymetry + !----------------------------------------------------------------- + + if (trim(bathymetry_format) == 'default') then + call get_bathymetry + elseif (trim(bathymetry_format) == 'pop') then + call get_bathymetry_popfile + else + call abort_ice(subname//'ERROR: bathymetry_format value must be default or pop', & + file=__FILE__, line=__LINE__) + endif + + !---------------------------------------------------------------- + ! Corner coordinates for CF compliant history files + !---------------------------------------------------------------- + + call gridbox_corners + + !----------------------------------------------------------------- + ! Compute global index (used for unpacking messages from coupler) + !----------------------------------------------------------------- + + if (my_task==master_task) then + allocate(work_g1(nx_global,ny_global)) + do j=1,ny_global + do i=1,nx_global + work_g1(i,j) = real((j-1)*nx_global + i,kind=dbl_kind) + enddo + enddo + else + allocate(work_g1(1,1)) ! to save memory + endif + + call scatter_global(rndex_global, work_g1, & + master_task, distrb_info, & + field_loc_center, field_type_scalar) + + deallocate(work_g1) + + end subroutine init_grid2 + +!======================================================================= + +! POP displaced pole grid and land mask (or tripole). +! Grid record number, field and units are: \\ +! (1) ULAT (radians) \\ +! (2) ULON (radians) \\ +! (3) HTN (cm) \\ +! (4) HTE (cm) \\ +! (5) HUS (cm) \\ +! (6) HUW (cm) \\ +! (7) ANGLE (radians) +! +! Land mask record number and field is (1) KMT. +! +! author: Elizabeth C. Hunke, LANL + + subroutine popgrid + + use ice_blocks, only: nx_block, ny_block + use ice_constants, only: c0, c1, & + field_loc_center, field_loc_NEcorner, & + field_type_scalar, field_type_angle + use ice_domain_size, only: max_blocks + + integer (kind=int_kind) :: & + i, j, iblk, & + ilo,ihi,jlo,jhi ! beginning and end of physical domain + + logical (kind=log_kind) :: diag + + real (kind=dbl_kind), dimension(:,:), allocatable :: & + work_g1 + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & + work1 + + type (block) :: & + this_block ! block information for current block + + character(len=*), parameter :: subname = '(popgrid)' + + call ice_open(nu_grid,grid_file,64) + call ice_open(nu_kmt,kmt_file,32) + + diag = .true. ! write diagnostic info + + !----------------------------------------------------------------- + ! topography + !----------------------------------------------------------------- + + call ice_read(nu_kmt,1,work1,'ida4',diag, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + hm (:,:,:) = c0 + kmt(:,:,:) = c0 + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + kmt(i,j,iblk) = work1(i,j,iblk) + if (kmt(i,j,iblk) >= c1) hm(i,j,iblk) = c1 + enddo + enddo + enddo + !$OMP END PARALLEL DO + + !----------------------------------------------------------------- + ! lat, lon, angle + !----------------------------------------------------------------- + + allocate(work_g1(nx_global,ny_global)) + + call ice_read_global(nu_grid,1,work_g1,'rda8',.true.) ! ULAT + call gridbox_verts(work_g1,latt_bounds) + call scatter_global(ULAT, work_g1, master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + call ice_HaloExtrapolate(ULAT, distrb_info, & + ew_boundary_type, ns_boundary_type) + + call ice_read_global(nu_grid,2,work_g1,'rda8',.true.) ! ULON + call gridbox_verts(work_g1,lont_bounds) + call scatter_global(ULON, work_g1, master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + call ice_HaloExtrapolate(ULON, distrb_info, & + ew_boundary_type, ns_boundary_type) + + call ice_read_global(nu_grid,7,work_g1,'rda8',.true.) ! ANGLE + call scatter_global(ANGLE, work_g1, master_task, distrb_info, & + field_loc_NEcorner, field_type_angle) + + !----------------------------------------------------------------- + ! cell dimensions + ! calculate derived quantities from global arrays to preserve + ! information on boundaries + !----------------------------------------------------------------- + + call ice_read_global(nu_grid,3,work_g1,'rda8',.true.) ! HTN + call primary_grid_lengths_HTN(work_g1) ! dxu, dxt + + call ice_read_global(nu_grid,4,work_g1,'rda8',.true.) ! HTE + call primary_grid_lengths_HTE(work_g1) ! dyu, dyt + + deallocate(work_g1) + + if (my_task == master_task) then + close (nu_grid) + close (nu_kmt) + endif + + end subroutine popgrid + +!======================================================================= + +! POP displaced pole grid and land mask. +! Grid record number, field and units are: \\ +! (1) ULAT (radians) \\ +! (2) ULON (radians) \\ +! (3) HTN (cm) \\ +! (4) HTE (cm) \\ +! (5) HUS (cm) \\ +! (6) HUW (cm) \\ +! (7) ANGLE (radians) +! +! Land mask record number and field is (1) KMT. +! +! author: Elizabeth C. Hunke, LANL +! Revised for netcdf input: Ann Keen, Met Office, May 2007 + + subroutine popgrid_nc + + use ice_blocks, only: nx_block, ny_block + use ice_constants, only: c0, c1, & + field_loc_center, field_loc_NEcorner, & + field_type_scalar, field_type_angle + use ice_domain_size, only: max_blocks +#ifdef USE_NETCDF + use netcdf +#endif + + integer (kind=int_kind) :: & + i, j, iblk, & + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain + fid_grid, & ! file id for netCDF grid file + fid_kmt ! file id for netCDF kmt file + + logical (kind=log_kind) :: diag + + character (char_len) :: & + fieldname ! field name in netCDF file + + real (kind=dbl_kind) :: & + pi + + real (kind=dbl_kind), dimension(:,:), allocatable :: & + work_g1 + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & + work1 + + type (block) :: & + this_block ! block information for current block + + integer(kind=int_kind) :: & + varid + integer (kind=int_kind) :: & + status ! status flag + + + character(len=*), parameter :: subname = '(popgrid_nc)' + +#ifdef USE_NETCDF + call icepack_query_parameters(pi_out=pi) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call ice_open_nc(grid_file,fid_grid) + call ice_open_nc(kmt_file,fid_kmt) + + diag = .true. ! write diagnostic info + l_readCenter = .false. + !----------------------------------------------------------------- + ! topography + !----------------------------------------------------------------- + + fieldname='kmt' + call ice_read_nc(fid_kmt,1,fieldname,work1,diag, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + hm (:,:,:) = c0 + kmt(:,:,:) = c0 + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + kmt(i,j,iblk) = work1(i,j,iblk) + if (kmt(i,j,iblk) >= c1) hm(i,j,iblk) = c1 + enddo + enddo + enddo + !$OMP END PARALLEL DO + + !----------------------------------------------------------------- + ! lat, lon, angle + !----------------------------------------------------------------- + + allocate(work_g1(nx_global,ny_global)) + + fieldname='ulat' + call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) ! ULAT + call gridbox_verts(work_g1,latt_bounds) + call scatter_global(ULAT, work_g1, master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + call ice_HaloExtrapolate(ULAT, distrb_info, & + ew_boundary_type, ns_boundary_type) + + fieldname='ulon' + call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) ! ULON + call gridbox_verts(work_g1,lont_bounds) + call scatter_global(ULON, work_g1, master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + call ice_HaloExtrapolate(ULON, distrb_info, & + ew_boundary_type, ns_boundary_type) + + fieldname='angle' + call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) ! ANGLE + call scatter_global(ANGLE, work_g1, master_task, distrb_info, & + field_loc_NEcorner, field_type_angle) + ! fix ANGLE: roundoff error due to single precision + where (ANGLE > pi) ANGLE = pi + where (ANGLE < -pi) ANGLE = -pi + + ! if grid file includes anglet then read instead + fieldname='anglet' + if (my_task == master_task) then + status = nf90_inq_varid(fid_grid, trim(fieldname) , varid) + if (status /= nf90_noerr) then + write(nu_diag,*) subname//' CICE will calculate angleT, TLON and TLAT' + else + write(nu_diag,*) subname//' angleT, TLON and TLAT is read from grid file' + l_readCenter = .true. + endif + endif + call broadcast_scalar(l_readCenter,master_task) + if (l_readCenter) then + call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) + call scatter_global(ANGLET, work_g1, master_task, distrb_info, & + field_loc_center, field_type_angle) + where (ANGLET > pi) ANGLET = pi + where (ANGLET < -pi) ANGLET = -pi + fieldname="tlon" + call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) + call scatter_global(TLON, work_g1, master_task, distrb_info, & + field_loc_center, field_type_scalar) + fieldname="tlat" + call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) + call scatter_global(TLAT, work_g1, master_task, distrb_info, & + field_loc_center, field_type_scalar) + endif + !----------------------------------------------------------------- + ! cell dimensions + ! calculate derived quantities from global arrays to preserve + ! information on boundaries + !----------------------------------------------------------------- + + fieldname='htn' + call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) ! HTN + call primary_grid_lengths_HTN(work_g1) ! dxu, dxt + fieldname='hte' + call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) ! HTE + call primary_grid_lengths_HTE(work_g1) ! dyu, dyt + + deallocate(work_g1) + + if (my_task == master_task) then + call ice_close_nc(fid_grid) + call ice_close_nc(fid_kmt) + endif +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) +#endif + + end subroutine popgrid_nc + +#ifdef CESMCOUPLED +!======================================================================= + +! Read in kmt file that matches CAM lat-lon grid and has single column +! functionality +! author: Mariana Vertenstein +! 2007: Elizabeth Hunke upgraded to netcdf90 and cice ncdf calls + + subroutine latlongrid + +! use ice_boundary + use ice_domain_size + use ice_scam, only : scmlat, scmlon, single_column + use ice_constants, only: c0, c1, p5, p25, & + field_loc_center, field_type_scalar, radius +#ifdef USE_NETCDF + use netcdf +#endif + + integer (kind=int_kind) :: & + i, j, iblk + + integer (kind=int_kind) :: & + ni, nj, ncid, dimid, varid, ier + + type (block) :: & + this_block ! block information for current block + + integer (kind=int_kind) :: & + ilo,ihi,jlo,jhi ! beginning and end of physical domain + + real (kind=dbl_kind) :: & + closelat, & ! Single-column latitude value + closelon, & ! Single-column longitude value + closelatidx, & ! Single-column latitude index to retrieve + closelonidx ! Single-column longitude index to retrieve + + integer (kind=int_kind) :: & + start(2), & ! Start index to read in + count(2) ! Number of points to read in + + integer (kind=int_kind) :: & + start3(3), & ! Start index to read in + count3(3) ! Number of points to read in + + integer (kind=int_kind) :: & + status ! status flag + + real (kind=dbl_kind), allocatable :: & + lats(:),lons(:),pos_lons(:), glob_grid(:,:) ! temporaries + + real (kind=dbl_kind) :: & + pos_scmlon,& ! temporary + pi, & + puny, & + scamdata ! temporary + + character(len=*), parameter :: subname = '(lonlatgrid)' + +#ifdef USE_NETCDF + !----------------------------------------------------------------- + ! - kmt file is actually clm fractional land file + ! - Determine consistency of dimensions + ! - Read in lon/lat centers in degrees from kmt file + ! - Read in ocean from "kmt" file (1 for ocean, 0 for land) + !----------------------------------------------------------------- + + call icepack_query_parameters(pi_out=pi, puny_out=puny) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + ! Determine dimension of domain file and check for consistency + + if (my_task == master_task) then + call ice_open_nc(kmt_file, ncid) + + status = nf90_inq_dimid (ncid, 'ni', dimid) + status = nf90_inquire_dimension(ncid, dimid, len=ni) + status = nf90_inq_dimid (ncid, 'nj', dimid) + status = nf90_inquire_dimension(ncid, dimid, len=nj) + end if + + ! Determine start/count to read in for either single column or global lat-lon grid + ! If single_column, then assume that only master_task is used since there is only one task + + if (single_column) then + ! Check for consistency + if (my_task == master_task) then + if ((nx_global /= 1).or. (ny_global /= 1)) then + write(nu_diag,*) 'Because you have selected the column model flag' + write(nu_diag,*) 'Please set nx_global=ny_global=1 in file' + write(nu_diag,*) 'ice_domain_size.F and recompile' + call abort_ice (subname//'ERROR: check nx_global, ny_global') + endif + end if + + ! Read in domain file for single column + allocate(lats(nj)) + allocate(lons(ni)) + allocate(pos_lons(ni)) + allocate(glob_grid(ni,nj)) + + start3=(/1,1,1/) + count3=(/ni,nj,1/) + status = nf90_inq_varid(ncid, 'xc' , varid) + if (status /= nf90_noerr) call abort_ice (subname//' inq_varid xc') + status = nf90_get_var(ncid, varid, glob_grid, start3, count3) + if (status /= nf90_noerr) call abort_ice (subname//' get_var xc') + do i = 1,ni + lons(i) = glob_grid(i,1) + end do + + status = nf90_inq_varid(ncid, 'yc' , varid) + if (status /= nf90_noerr) call abort_ice (subname//' inq_varid yc') + status = nf90_get_var(ncid, varid, glob_grid, start3, count3) + if (status /= nf90_noerr) call abort_ice (subname//' get_var yc') + do j = 1,nj + lats(j) = glob_grid(1,j) + end do + + ! convert lons array and scmlon to 0,360 and find index of value closest to 0 + ! and obtain single-column longitude/latitude indices to retrieve + + pos_lons(:)= mod(lons(:) + 360._dbl_kind,360._dbl_kind) + pos_scmlon = mod(scmlon + 360._dbl_kind,360._dbl_kind) + start(1) = (MINLOC(abs(pos_lons-pos_scmlon),dim=1)) + start(2) = (MINLOC(abs(lats -scmlat ),dim=1)) + + deallocate(lats) + deallocate(lons) + deallocate(pos_lons) + deallocate(glob_grid) + + status = nf90_inq_varid(ncid, 'xc' , varid) + if (status /= nf90_noerr) call abort_ice (subname//' inq_varid xc') + status = nf90_get_var(ncid, varid, scamdata, start) + if (status /= nf90_noerr) call abort_ice (subname//' get_var xc') + TLON = scamdata + status = nf90_inq_varid(ncid, 'yc' , varid) + if (status /= nf90_noerr) call abort_ice (subname//' inq_varid yc') + status = nf90_get_var(ncid, varid, scamdata, start) + if (status /= nf90_noerr) call abort_ice (subname//' get_var yc') + TLAT = scamdata + status = nf90_inq_varid(ncid, 'area' , varid) + if (status /= nf90_noerr) call abort_ice (subname//' inq_varid area') + status = nf90_get_var(ncid, varid, scamdata, start) + if (status /= nf90_noerr) call abort_ice (subname//' get_var are') + tarea = scamdata + status = nf90_inq_varid(ncid, 'mask' , varid) + if (status /= nf90_noerr) call abort_ice (subname//' inq_varid mask') + status = nf90_get_var(ncid, varid, scamdata, start) + if (status /= nf90_noerr) call abort_ice (subname//' get_var mask') + hm = scamdata + status = nf90_inq_varid(ncid, 'frac' , varid) + if (status /= nf90_noerr) call abort_ice (subname//' inq_varid frac') + status = nf90_get_var(ncid, varid, scamdata, start) + if (status /= nf90_noerr) call abort_ice (subname//' get_var frac') + ocn_gridcell_frac = scamdata + else + ! Check for consistency + if (my_task == master_task) then + if (nx_global /= ni .and. ny_global /= nj) then + write(nu_diag,*) 'latlongrid: ni,nj = ',ni,nj + write(nu_diag,*) 'latlongrid: nx_g,ny_g = ',nx_global, ny_global + call abort_ice (subname//'ERROR: ni,nj not equal to nx_global,ny_global') + end if + end if + + ! Read in domain file for global lat-lon grid + call ice_read_nc(ncid, 1, 'xc' , TLON , diag=.true.) + call ice_read_nc(ncid, 1, 'yc' , TLAT , diag=.true.) + call ice_read_nc(ncid, 1, 'area', tarea , diag=.true., & + field_loc=field_loc_center,field_type=field_type_scalar) + call ice_read_nc(ncid, 1, 'mask', hm , diag=.true.) + call ice_read_nc(ncid, 1, 'frac', ocn_gridcell_frac, diag=.true.) + end if + + if (my_task == master_task) then + call ice_close_nc(ncid) + end if + + !$OMP PARALLEL DO PRIVATE(iblk,this_block,ilo,ihi,jlo,jhi,i,j) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + ! Convert from degrees to radians + TLON(i,j,iblk) = pi*TLON(i,j,iblk)/180._dbl_kind + + ! Convert from degrees to radians + TLAT(i,j,iblk) = pi*TLAT(i,j,iblk)/180._dbl_kind + + ! Convert from radians^2 to m^2 + ! (area in domain file is in radians^2 and tarea is in m^2) + tarea(i,j,iblk) = tarea(i,j,iblk) * (radius*radius) + end do + end do + end do + !$OMP END PARALLEL DO + + !----------------------------------------------------------------- + ! Calculate various geometric 2d arrays + ! The U grid (velocity) is not used when run with sequential CAM + ! because we only use thermodynamic sea ice. However, ULAT is used + ! in the default initialization of CICE so we calculate it here as + ! a "dummy" so that CICE will initialize with ice. If a no ice + ! initialization is OK (or desired) this can be commented out and + ! ULAT will remain 0 as specified above. ULAT is located at the + ! NE corner of the grid cell, TLAT at the center, so here ULAT is + ! hacked by adding half the latitudinal spacing (in radians) to + ! TLAT. + !----------------------------------------------------------------- + + !$OMP PARALLEL DO PRIVATE(iblk,this_block,ilo,ihi,jlo,jhi,i,j) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + + if (ny_global == 1) then + uarea(i,j,iblk) = tarea(i,j, iblk) + else + uarea(i,j,iblk) = p25* & + (tarea(i,j, iblk) + tarea(i+1,j, iblk) & + + tarea(i,j+1,iblk) + tarea(i+1,j+1,iblk)) + endif + tarear(i,j,iblk) = c1/tarea(i,j,iblk) + uarear(i,j,iblk) = c1/uarea(i,j,iblk) + tinyarea(i,j,iblk) = puny*tarea(i,j,iblk) + + if (single_column) then + ULAT (i,j,iblk) = TLAT(i,j,iblk)+(pi/nj) + else + if (ny_global == 1) then + ULAT (i,j,iblk) = TLAT(i,j,iblk) + else + ULAT (i,j,iblk) = TLAT(i,j,iblk)+(pi/ny_global) + endif + endif + ULON (i,j,iblk) = c0 + ANGLE (i,j,iblk) = c0 + + ANGLET(i,j,iblk) = c0 + HTN (i,j,iblk) = 1.e36_dbl_kind + HTE (i,j,iblk) = 1.e36_dbl_kind + dxt (i,j,iblk) = 1.e36_dbl_kind + dyt (i,j,iblk) = 1.e36_dbl_kind + dxu (i,j,iblk) = 1.e36_dbl_kind + dyu (i,j,iblk) = 1.e36_dbl_kind + dxhy (i,j,iblk) = 1.e36_dbl_kind + dyhx (i,j,iblk) = 1.e36_dbl_kind + cyp (i,j,iblk) = 1.e36_dbl_kind + cxp (i,j,iblk) = 1.e36_dbl_kind + cym (i,j,iblk) = 1.e36_dbl_kind + cxm (i,j,iblk) = 1.e36_dbl_kind + enddo + enddo + enddo + !$OMP END PARALLEL DO + + call makemask +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) +#endif + + end subroutine latlongrid +#endif + +!======================================================================= + +! Regular rectangular grid and mask +! +! author: Elizabeth C. Hunke, LANL + + subroutine rectgrid + + use ice_blocks, only: nx_block, ny_block + use ice_constants, only: c0, c1, c2, radius, cm_to_m, & + field_loc_center, field_loc_NEcorner, field_type_scalar + use ice_domain, only: close_boundaries + + integer (kind=int_kind) :: & + i, j, iblk, & + imid, jmid + + real (kind=dbl_kind) :: & + length, & + rad_to_deg + + real (kind=dbl_kind), dimension(:,:), allocatable :: & + work_g1 + + character(len=*), parameter :: subname = '(rectgrid)' + + !----------------------------------------------------------------- + ! Calculate various geometric 2d arrays + !----------------------------------------------------------------- + + call icepack_query_parameters(rad_to_deg_out=rad_to_deg) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + ANGLE(i,j,iblk) = c0 ! "square with the world" + enddo + enddo + enddo + !$OMP END PARALLEL DO + + allocate(work_g1(nx_global,ny_global)) + + ! Weddell Sea + ! lower left corner of grid is 55W, 75S + + ! Barrow AK + ! lower left corner of grid is 156.5W, 71.35N + + if (my_task == master_task) then + work_g1 = c0 + length = dxrect*cm_to_m/radius*rad_to_deg + +! work_g1(1,:) = -55._dbl_kind ! Weddell Sea + work_g1(1,:) = -156.5_dbl_kind ! Barrow AK + + do j = 1, ny_global + do i = 2, nx_global + work_g1(i,j) = work_g1(i-1,j) + length ! ULON + enddo + enddo + work_g1(:,:) = work_g1(:,:) / rad_to_deg + endif + call scatter_global(ULON, work_g1, master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + call ice_HaloExtrapolate(ULON, distrb_info, & + ew_boundary_type, ns_boundary_type) + + if (my_task == master_task) then + work_g1 = c0 + length = dyrect*cm_to_m/radius*rad_to_deg + +! work_g1(:,1) = -75._dbl_kind ! Weddell Sea + work_g1(:,1) = 71.35_dbl_kind ! Barrow AK + + do i = 1, nx_global + do j = 2, ny_global + work_g1(i,j) = work_g1(i,j-1) + length ! ULAT + enddo + enddo + work_g1(:,:) = work_g1(:,:) / rad_to_deg + endif + call scatter_global(ULAT, work_g1, master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + call ice_HaloExtrapolate(ULAT, distrb_info, & + ew_boundary_type, ns_boundary_type) + + if (my_task == master_task) then + do j = 1, ny_global + do i = 1, nx_global + work_g1(i,j) = dxrect ! HTN + enddo + enddo + endif + call primary_grid_lengths_HTN(work_g1) ! dxu, dxt + + if (my_task == master_task) then + do j = 1, ny_global + do i = 1, nx_global + work_g1(i,j) = dyrect ! HTE + enddo + enddo + endif + call primary_grid_lengths_HTE(work_g1) ! dyu, dyt + + !----------------------------------------------------------------- + ! Construct T-cell land mask + ! Keyed on ew_boundary_type; ns_boundary_type should be 'open'. + !----------------------------------------------------------------- + + if (my_task == master_task) then + work_g1(:,:) = c0 ! initialize hm as land + + if (trim(ew_boundary_type) == 'cyclic') then + + do j = 3,ny_global-2 ! closed top and bottom + do i = 1,nx_global ! open sides + work_g1(i,j) = c1 ! NOTE nx_global > 5 + enddo + enddo + + elseif (trim(ew_boundary_type) == 'open') then + + ! land in the upper left and lower right corners, + ! otherwise open boundaries + imid = nint(aint(real(nx_global)/c2)) + jmid = nint(aint(real(ny_global)/c2)) + + do j = 3,ny_global-2 + do i = 3,nx_global-2 + work_g1(i,j) = c1 ! open central domain + enddo + enddo + + if (nx_global > 5 .and. ny_global > 5) then + + do j = 1, jmid+2 + do i = 1, imid+2 + work_g1(i,j) = c1 ! open lower left corner + enddo + enddo + + do j = max(jmid-2,1), ny_global + do i = max(imid-2,1), nx_global + work_g1(i,j) = c1 ! open upper right corner + enddo + enddo + + endif + + if (close_boundaries) then + work_g1(:, 1:2) = c0 + work_g1(:, ny_global-1:ny_global) = c0 + work_g1(1:2, :) = c0 + work_g1(nx_global-1:nx_global, :) = c0 + endif + + elseif (trim(ew_boundary_type) == 'closed') then + + call abort_ice(subname//'ERROR: closed boundaries not available') + + endif + endif + + call scatter_global(hm, work_g1, master_task, distrb_info, & + field_loc_center, field_type_scalar) + + deallocate(work_g1) + + end subroutine rectgrid + +!======================================================================= + +! CPOM displaced pole grid and land mask. \\ +! Grid record number, field and units are: \\ +! (1) ULAT (degrees) \\ +! (2) ULON (degrees) \\ +! (3) HTN (m) \\ +! (4) HTE (m) \\ +! (7) ANGLE (radians) \\ +! +! Land mask record number and field is (1) KMT. +! +! author: Adrian K. Turner, CPOM, UCL, 09/08/06 + + subroutine cpomgrid + + use ice_blocks, only: nx_block, ny_block + use ice_constants, only: c0, c1, m_to_cm, & + field_loc_NEcorner, field_type_scalar + use ice_domain_size, only: max_blocks + + integer (kind=int_kind) :: & + i, j, iblk, & + ilo,ihi,jlo,jhi ! beginning and end of physical domain + + logical (kind=log_kind) :: diag + + real (kind=dbl_kind), dimension(:,:), allocatable :: & + work_g1 + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & + work1 + + real (kind=dbl_kind) :: & + rad_to_deg + + type (block) :: & + this_block ! block information for current block + + character(len=*), parameter :: subname = '(cpomgrid)' + + call icepack_query_parameters(rad_to_deg_out=rad_to_deg) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call ice_open(nu_grid,grid_file,64) + call ice_open(nu_kmt,kmt_file,32) + + diag = .true. ! write diagnostic info + + ! topography + call ice_read(nu_kmt,1,work1,'ida4',diag) + + hm (:,:,:) = c0 + kmt(:,:,:) = c0 + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + kmt(i,j,iblk) = work1(i,j,iblk) + if (kmt(i,j,iblk) >= c1) hm(i,j,iblk) = c1 + enddo + enddo + enddo + !$OMP END PARALLEL DO + + allocate(work_g1(nx_global,ny_global)) + + ! lat, lon, cell dimensions, angles + call ice_read_global(nu_grid,1,work_g1, 'rda8',diag) + call scatter_global(ULAT, work_g1, master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + + call ice_read_global(nu_grid,2,work_g1, 'rda8',diag) + call scatter_global(ULON, work_g1, master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + + call ice_read_global(nu_grid,3,work_g1, 'rda8',diag) + work_g1 = work_g1 * m_to_cm + call primary_grid_lengths_HTN(work_g1) ! dxu, dxt + + call ice_read_global(nu_grid,4,work_g1, 'rda8',diag) + work_g1 = work_g1 * m_to_cm + call primary_grid_lengths_HTE(work_g1) ! dyu, dyt + + call ice_read_global(nu_grid,7,work_g1,'rda8',diag) + call scatter_global(ANGLE, work_g1, master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + + ! fix units + ULAT = ULAT / rad_to_deg + ULON = ULON / rad_to_deg + + deallocate(work_g1) + + if (my_task == master_task) then + close (nu_grid) + close (nu_kmt) + endif + + write(nu_diag,*) "min/max HTN: ", minval(HTN), maxval(HTN) + write(nu_diag,*) "min/max HTE: ", minval(HTE), maxval(HTE) + + end subroutine cpomgrid + +!======================================================================= + +! Calculate dxu and dxt from HTN on the global grid, to preserve +! ghost cell and/or land values that might otherwise be lost. Scatter +! dxu, dxt and HTN to all processors. +! +! author: Elizabeth C. Hunke, LANL + + subroutine primary_grid_lengths_HTN(work_g) + + use ice_constants, only: p5, c2, cm_to_m, & + field_loc_center, field_loc_NEcorner, & + field_loc_Nface, field_type_scalar + + real (kind=dbl_kind), dimension(:,:) :: work_g ! global array holding HTN + + ! local variables + + integer (kind=int_kind) :: & + i, j, & + ip1 ! i+1 + + real (kind=dbl_kind), dimension(:,:), allocatable :: & + work_g2 + + character(len=*), parameter :: subname = '(primary_grid_lengths_HTN)' + + if (my_task == master_task) then + allocate(work_g2(nx_global,ny_global)) + else + allocate(work_g2(1,1)) + endif + + if (my_task == master_task) then + do j = 1, ny_global + do i = 1, nx_global + work_g(i,j) = work_g(i,j) * cm_to_m ! HTN + enddo + enddo + do j = 1, ny_global + do i = 1, nx_global + ! assume cyclic; noncyclic will be handled during scatter + ip1 = i+1 + if (i == nx_global) ip1 = 1 + work_g2(i,j) = p5*(work_g(i,j) + work_g(ip1,j)) ! dxu + enddo + enddo + endif + call scatter_global(HTN, work_g, master_task, distrb_info, & + field_loc_Nface, field_type_scalar) + call scatter_global(dxu, work_g2, master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + + if (my_task == master_task) then + do j = 2, ny_global + do i = 1, nx_global + work_g2(i,j) = p5*(work_g(i,j) + work_g(i,j-1)) ! dxt + enddo + enddo + ! extrapolate to obtain dxt along j=1 + do i = 1, nx_global + work_g2(i,1) = c2*work_g(i,2) - work_g(i,3) ! dxt + enddo + endif + call scatter_global(dxt, work_g2, master_task, distrb_info, & + field_loc_center, field_type_scalar) + + deallocate(work_g2) + + end subroutine primary_grid_lengths_HTN + +!======================================================================= +! Calculate dyu and dyt from HTE on the global grid, to preserve +! ghost cell and/or land values that might otherwise be lost. Scatter +! dyu, dyt and HTE to all processors. +! +! author: Elizabeth C. Hunke, LANL + + subroutine primary_grid_lengths_HTE(work_g) + + use ice_constants, only: p5, c2, cm_to_m, & + field_loc_center, field_loc_NEcorner, & + field_loc_Eface, field_type_scalar + + real (kind=dbl_kind), dimension(:,:) :: work_g ! global array holding HTE + + ! local variables + + integer (kind=int_kind) :: & + i, j, & + im1 ! i-1 + + real (kind=dbl_kind), dimension(:,:), allocatable :: & + work_g2 + + character(len=*), parameter :: subname = '(primary_grid_lengths_HTE)' + + if (my_task == master_task) then + allocate(work_g2(nx_global,ny_global)) + else + allocate(work_g2(1,1)) + endif + + if (my_task == master_task) then + do j = 1, ny_global + do i = 1, nx_global + work_g(i,j) = work_g(i,j) * cm_to_m ! HTE + enddo + enddo + do j = 1, ny_global-1 + do i = 1, nx_global + work_g2(i,j) = p5*(work_g(i,j) + work_g(i,j+1)) ! dyu + enddo + enddo + ! extrapolate to obtain dyu along j=ny_global + if (ny_global > 1) then + do i = 1, nx_global + work_g2(i,ny_global) = c2*work_g(i,ny_global-1) & + - work_g(i,ny_global-2) ! dyu + enddo + endif + endif + call scatter_global(HTE, work_g, master_task, distrb_info, & + field_loc_Eface, field_type_scalar) + call scatter_global(dyu, work_g2, master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + + if (my_task == master_task) then + do j = 1, ny_global + do i = 1, nx_global + ! assume cyclic; noncyclic will be handled during scatter + im1 = i-1 + if (i == 1) im1 = nx_global + work_g2(i,j) = p5*(work_g(i,j) + work_g(im1,j)) ! dyt + enddo + enddo + endif + call scatter_global(dyt, work_g2, master_task, distrb_info, & + field_loc_center, field_type_scalar) + + deallocate(work_g2) + + end subroutine primary_grid_lengths_HTE + +!======================================================================= + +! Sets the boundary values for the T cell land mask (hm) and +! makes the logical land masks for T and U cells (tmask, umask). +! Also creates hemisphere masks (mask-n northern, mask-s southern) +! +! author: Elizabeth C. Hunke, LANL + + subroutine makemask + + use ice_constants, only: c0, p5, & + field_loc_center, field_loc_NEcorner, field_type_scalar + + integer (kind=int_kind) :: & + i, j, iblk, & + ilo,ihi,jlo,jhi ! beginning and end of physical domain + + real (kind=dbl_kind) :: & + puny + + type (block) :: & + this_block ! block information for current block + + character(len=*), parameter :: subname = '(makemask)' + + call icepack_query_parameters(puny_out=puny) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call ice_timer_start(timer_bound) + call ice_HaloUpdate (kmt, halo_info, & + field_loc_center, field_type_scalar) + call ice_HaloUpdate (hm, halo_info, & + field_loc_center, field_type_scalar) + call ice_timer_stop(timer_bound) + + !----------------------------------------------------------------- + ! construct T-cell and U-cell masks + !----------------------------------------------------------------- + + bm = c0 +! uvm = c0 + + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + uvm(i,j,iblk) = min (hm(i,j, iblk), hm(i+1,j, iblk), & + hm(i,j+1,iblk), hm(i+1,j+1,iblk)) + bm(i,j,iblk) = my_task + iblk/100.0_dbl_kind + enddo + enddo + enddo + !$OMP END PARALLEL DO + + call ice_timer_start(timer_bound) + call ice_HaloUpdate (uvm, halo_info, & + field_loc_NEcorner, field_type_scalar) + call ice_HaloUpdate (bm, halo_info, & + field_loc_center, field_type_scalar) + call ice_timer_stop(timer_bound) + + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + tmask(i,j,iblk) = .false. + umask(i,j,iblk) = .false. + if ( hm(i,j,iblk) > p5) tmask(i,j,iblk) = .true. + if (uvm(i,j,iblk) > p5) umask(i,j,iblk) = .true. + enddo + enddo + + !----------------------------------------------------------------- + ! create hemisphere masks + !----------------------------------------------------------------- + + lmask_n(:,:,iblk) = .false. + lmask_s(:,:,iblk) = .false. + + tarean(:,:,iblk) = c0 + tareas(:,:,iblk) = c0 + + do j = 1, ny_block + do i = 1, nx_block + + if (ULAT(i,j,iblk) >= -puny) lmask_n(i,j,iblk) = .true. ! N. Hem. + if (ULAT(i,j,iblk) < -puny) lmask_s(i,j,iblk) = .true. ! S. Hem. + + ! N hemisphere area mask (m^2) + if (lmask_n(i,j,iblk)) tarean(i,j,iblk) = tarea(i,j,iblk) & + * hm(i,j,iblk) + + ! S hemisphere area mask (m^2) + if (lmask_s(i,j,iblk)) tareas(i,j,iblk) = tarea(i,j,iblk) & + * hm(i,j,iblk) + + enddo + enddo + + enddo ! iblk + !$OMP END PARALLEL DO + + end subroutine makemask + +!======================================================================= + +! Initializes latitude and longitude on T grid +! +! author: Elizabeth C. Hunke, LANL; code originally based on POP grid +! generation routine + + subroutine Tlatlon + + use ice_constants, only: c0, c1, c2, c4, & + field_loc_center, field_type_scalar + + integer (kind=int_kind) :: & + i, j, iblk , & ! horizontal indices + ilo,ihi,jlo,jhi ! beginning and end of physical domain + + real (kind=dbl_kind) :: & + z1,x1,y1,z2,x2,y2,z3,x3,y3,z4,x4,y4,tx,ty,tz,da, & + rad_to_deg + + type (block) :: & + this_block ! block information for current block + + character(len=*), parameter :: subname = '(Tlatlon)' + + call icepack_query_parameters(rad_to_deg_out=rad_to_deg) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + TLAT(:,:,:) = c0 + TLON(:,:,:) = c0 + + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block, & + !$OMP x1,y1,z1,x2,y2,z2,x3,y3,z3,x4,y4,z4, & + !$OMP tx,ty,tz,da) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + + z1 = cos(ULAT(i-1,j-1,iblk)) + x1 = cos(ULON(i-1,j-1,iblk))*z1 + y1 = sin(ULON(i-1,j-1,iblk))*z1 + z1 = sin(ULAT(i-1,j-1,iblk)) + + z2 = cos(ULAT(i,j-1,iblk)) + x2 = cos(ULON(i,j-1,iblk))*z2 + y2 = sin(ULON(i,j-1,iblk))*z2 + z2 = sin(ULAT(i,j-1,iblk)) + + z3 = cos(ULAT(i-1,j,iblk)) + x3 = cos(ULON(i-1,j,iblk))*z3 + y3 = sin(ULON(i-1,j,iblk))*z3 + z3 = sin(ULAT(i-1,j,iblk)) + + z4 = cos(ULAT(i,j,iblk)) + x4 = cos(ULON(i,j,iblk))*z4 + y4 = sin(ULON(i,j,iblk))*z4 + z4 = sin(ULAT(i,j,iblk)) + + tx = (x1+x2+x3+x4)/c4 + ty = (y1+y2+y3+y4)/c4 + tz = (z1+z2+z3+z4)/c4 + da = sqrt(tx**2+ty**2+tz**2) + + tz = tz/da + + ! TLON in radians East + TLON(i,j,iblk) = c0 + if (tx /= c0 .or. ty /= c0) TLON(i,j,iblk) = atan2(ty,tx) + + ! TLAT in radians North + TLAT(i,j,iblk) = asin(tz) + + enddo ! i + enddo ! j + enddo ! iblk + !$OMP END PARALLEL DO + if (trim(grid_type) == 'regional') then + ! for W boundary extrapolate from interior + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + i = ilo + if (this_block%i_glob(i) == 1) then + do j = jlo, jhi + TLON(i,j,iblk) = c2*TLON(i+1,j,iblk) - & + TLON(i+2,j,iblk) + TLAT(i,j,iblk) = c2*TLAT(i+1,j,iblk) - & + TLAT(i+2,j,iblk) + enddo + endif + enddo + !$OMP END PARALLEL DO + endif ! regional + + call ice_timer_start(timer_bound) + call ice_HaloUpdate (TLON, halo_info, & + field_loc_center, field_type_scalar, & + fillValue=c1) + call ice_HaloUpdate (TLAT, halo_info, & + field_loc_center, field_type_scalar, & + fillValue=c1) + call ice_HaloExtrapolate(TLON, distrb_info, & + ew_boundary_type, ns_boundary_type) + call ice_HaloExtrapolate(TLAT, distrb_info, & + ew_boundary_type, ns_boundary_type) + call ice_timer_stop(timer_bound) + + x1 = global_minval(TLON, distrb_info, tmask) + x2 = global_maxval(TLON, distrb_info, tmask) + x3 = global_minval(TLAT, distrb_info, tmask) + x4 = global_maxval(TLAT, distrb_info, tmask) + + y1 = global_minval(ULON, distrb_info, umask) + y2 = global_maxval(ULON, distrb_info, umask) + y3 = global_minval(ULAT, distrb_info, umask) + y4 = global_maxval(ULAT, distrb_info, umask) + + if (my_task==master_task) then + write(nu_diag,*) ' ' + if (nx_block > 5+2*nghost .and. ny_block > 5+2*nghost) then + write(nu_diag,*) 'min/max ULON:', y1*rad_to_deg, y2*rad_to_deg + write(nu_diag,*) 'min/max ULAT:', y3*rad_to_deg, y4*rad_to_deg + endif + write(nu_diag,*) 'min/max TLON:', x1*rad_to_deg, x2*rad_to_deg + write(nu_diag,*) 'min/max TLAT:', x3*rad_to_deg, x4*rad_to_deg + endif ! my_task + + end subroutine Tlatlon + +!======================================================================= + +! Transfer vector component from T-cell centers to U-cell centers. +! +! author: Elizabeth C. Hunke, LANL + + subroutine t2ugrid_vector (work) + + use ice_blocks, only: nx_block, ny_block + use ice_constants, only: field_loc_center, field_type_vector + use ice_domain_size, only: max_blocks + + real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks), intent(inout) :: & + work + + ! local variables + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & + work1 + + character(len=*), parameter :: subname = '(t2ugrid_vector)' + + work1(:,:,:) = work(:,:,:) + + call ice_timer_start(timer_bound) + call ice_HaloUpdate (work1, halo_info, & + field_loc_center, field_type_vector) + call ice_timer_stop(timer_bound) + + call to_ugrid(work1,work) + + end subroutine t2ugrid_vector + +!======================================================================= + +! Shifts quantities from the T-cell midpoint (work1) to the U-cell +! midpoint (work2) +! NOTE: Input array includes ghost cells that must be updated before +! calling this routine. +! +! author: Elizabeth C. Hunke, LANL + + subroutine to_ugrid(work1,work2) + + use ice_constants, only: c0, p25 + + real (kind=dbl_kind), intent(in) :: & + work1(nx_block,ny_block,max_blocks) + + real (kind=dbl_kind), intent(out) :: & + work2(nx_block,ny_block,max_blocks) + + type (block) :: & + this_block ! block information for current block + + character(len=*), parameter :: subname = '(to_ugrid)' + + ! local variables + + integer (kind=int_kind) :: & + i, j, iblk, & + ilo,ihi,jlo,jhi ! beginning and end of physical domain + + work2(:,:,:) = c0 + + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + work2(i,j,iblk) = p25 * & + (work1(i, j, iblk)*tarea(i, j, iblk) & + + work1(i+1,j, iblk)*tarea(i+1,j, iblk) & + + work1(i, j+1,iblk)*tarea(i, j+1,iblk) & + + work1(i+1,j+1,iblk)*tarea(i+1,j+1,iblk)) & + / uarea(i, j, iblk) + enddo + enddo + enddo + !$OMP END PARALLEL DO + + end subroutine to_ugrid + +!======================================================================= + +! Transfer from U-cell centers to T-cell centers. Writes work into +! another array that has ghost cells +! NOTE: Input array is dimensioned only over physical cells. +! +! author: Elizabeth C. Hunke, LANL + + subroutine u2tgrid_vector (work) + + use ice_blocks, only: nx_block, ny_block + use ice_constants, only: field_loc_NEcorner, field_type_vector + use ice_domain_size, only: max_blocks + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks), intent(inout) :: & + work + + ! local variables + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & + work1 + + character(len=*), parameter :: subname = '(u2tgrid_vector)' + + work1(:,:,:) = work(:,:,:) + + call ice_timer_start(timer_bound) + call ice_HaloUpdate (work1, halo_info, & + field_loc_NEcorner, field_type_vector) + call ice_timer_stop(timer_bound) + + call to_tgrid(work1,work) + + end subroutine u2tgrid_vector + +!======================================================================= + +! Shifts quantities from the U-cell midpoint (work1) to the T-cell +! midpoint (work2) +! NOTE: Input array includes ghost cells that must be updated before +! calling this routine. +! +! author: Elizabeth C. Hunke, LANL + + subroutine to_tgrid(work1, work2) + + use ice_constants, only: p25 + + real (kind=dbl_kind) :: work1(nx_block,ny_block,max_blocks), & + work2(nx_block,ny_block,max_blocks) + + ! local variables + + integer (kind=int_kind) :: & + i, j, iblk, & + ilo,ihi,jlo,jhi ! beginning and end of physical domain + + type (block) :: & + this_block ! block information for current block + + character(len=*), parameter :: subname = '(to_tgrid)' + + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + work2(i,j,iblk) = p25 * & + (work1(i, j ,iblk) * uarea(i, j, iblk) & + + work1(i-1,j ,iblk) * uarea(i-1,j, iblk) & + + work1(i, j-1,iblk) * uarea(i, j-1,iblk) & + + work1(i-1,j-1,iblk) * uarea(i-1,j-1,iblk)) & + / tarea(i, j, iblk) + enddo + enddo + enddo + !$OMP END PARALLEL DO + + end subroutine to_tgrid + +!======================================================================= +! The following code is used for obtaining the coordinates of the grid +! vertices for CF-compliant netCDF history output. Approximate! +!======================================================================= + +! These fields are only used for netcdf history output, and the +! ghost cell values are not needed. +! NOTE: Extrapolations were used: these fields are approximate! +! +! authors: A. McLaren, Met Office +! E. Hunke, LANL + + subroutine gridbox_corners + + use ice_blocks, only: nx_block, ny_block + use ice_constants, only: c0, c2, c360, & + field_loc_NEcorner, field_type_scalar + use ice_domain_size, only: max_blocks + + integer (kind=int_kind) :: & + i,j,iblk,icorner,& ! index counters + ilo,ihi,jlo,jhi ! beginning and end of physical domain + + real (kind=dbl_kind), dimension(:,:), allocatable :: & + work_g2 + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & + work1 + + real (kind=dbl_kind) :: & + rad_to_deg + + type (block) :: & + this_block ! block information for current block + + character(len=*), parameter :: subname = '(gridbox_corners)' + + call icepack_query_parameters(rad_to_deg_out=rad_to_deg) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !------------------------------------------------------------- + ! Get coordinates of grid boxes for each block as follows: + ! (1) SW corner, (2) SE corner, (3) NE corner, (4) NW corner + !------------------------------------------------------------- + + latu_bounds(:,:,:,:) = c0 + lonu_bounds(:,:,:,:) = c0 + + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + + latu_bounds(1,i,j,iblk)=TLAT(i ,j ,iblk)*rad_to_deg + latu_bounds(2,i,j,iblk)=TLAT(i+1,j ,iblk)*rad_to_deg + latu_bounds(3,i,j,iblk)=TLAT(i+1,j+1,iblk)*rad_to_deg + latu_bounds(4,i,j,iblk)=TLAT(i ,j+1,iblk)*rad_to_deg + + lonu_bounds(1,i,j,iblk)=TLON(i ,j ,iblk)*rad_to_deg + lonu_bounds(2,i,j,iblk)=TLON(i+1,j ,iblk)*rad_to_deg + lonu_bounds(3,i,j,iblk)=TLON(i+1,j+1,iblk)*rad_to_deg + lonu_bounds(4,i,j,iblk)=TLON(i ,j+1,iblk)*rad_to_deg + + enddo + enddo + enddo + !$OMP END PARALLEL DO + + !---------------------------------------------------------------- + ! extrapolate on global grid to get edge values + !---------------------------------------------------------------- + + if (my_task == master_task) then + allocate(work_g2(nx_global,ny_global)) + else + allocate(work_g2(1,1)) + endif + + work1(:,:,:) = latu_bounds(2,:,:,:) +! work_g2 = c0 + + call gather_global(work_g2, work1, master_task, distrb_info) + if (my_task == master_task) then + do j = 1, ny_global + work_g2(nx_global,j) = c2*work_g2(nx_global-1,j) & + - work_g2(nx_global-2,j) + enddo + endif + call scatter_global(work1, work_g2, & + master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + latu_bounds(2,:,:,:) = work1(:,:,:) + + work1(:,:,:) = latu_bounds(3,:,:,:) + call gather_global(work_g2, work1, master_task, distrb_info) + if (my_task == master_task) then + do i = 1, nx_global + work_g2(i,ny_global) = c2*work_g2(i,ny_global-1) & + - work_g2(i,ny_global-2) + enddo + do j = 1, ny_global + work_g2(nx_global,j) = c2*work_g2(nx_global-1,j) & + - work_g2(nx_global-2,j) + enddo + endif + call scatter_global(work1, work_g2, & + master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + latu_bounds(3,:,:,:) = work1(:,:,:) + + work1(:,:,:) = latu_bounds(4,:,:,:) + call gather_global(work_g2, work1, master_task, distrb_info) + if (my_task == master_task) then + do i = 1, nx_global + work_g2(i,ny_global) = c2*work_g2(i,ny_global-1) & + - work_g2(i,ny_global-2) + enddo + endif + call scatter_global(work1, work_g2, & + master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + latu_bounds(4,:,:,:) = work1(:,:,:) + + work1(:,:,:) = lonu_bounds(2,:,:,:) + call gather_global(work_g2, work1, master_task, distrb_info) + if (my_task == master_task) then + do j = 1, ny_global + work_g2(nx_global,j) = c2*work_g2(nx_global-1,j) & + - work_g2(nx_global-2,j) + enddo + endif + call scatter_global(work1, work_g2, & + master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + lonu_bounds(2,:,:,:) = work1(:,:,:) + + work1(:,:,:) = lonu_bounds(3,:,:,:) + call gather_global(work_g2, work1, master_task, distrb_info) + if (my_task == master_task) then + do i = 1, nx_global + work_g2(i,ny_global) = c2*work_g2(i,ny_global-1) & + - work_g2(i,ny_global-2) + enddo + do j = 1, ny_global + work_g2(nx_global,j) = c2*work_g2(nx_global-1,j) & + - work_g2(nx_global-2,j) + enddo + endif + call scatter_global(work1, work_g2, & + master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + lonu_bounds(3,:,:,:) = work1(:,:,:) + + work1(:,:,:) = lonu_bounds(4,:,:,:) + call gather_global(work_g2, work1, master_task, distrb_info) + if (my_task == master_task) then + do i = 1, nx_global + work_g2(i,ny_global) = c2*work_g2(i,ny_global-1) & + - work_g2(i,ny_global-2) + enddo + endif + call scatter_global(work1, work_g2, & + master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + lonu_bounds(4,:,:,:) = work1(:,:,:) + + deallocate(work_g2) + + !---------------------------------------------------------------- + ! Convert longitude to Degrees East >0 for history output + !---------------------------------------------------------------- + + allocate(work_g2(nx_block,ny_block)) ! not used as global here + !OMP fails in this loop + do iblk = 1, nblocks + do icorner = 1, 4 + work_g2(:,:) = lont_bounds(icorner,:,:,iblk) + c360 + where (work_g2 > c360) work_g2 = work_g2 - c360 + where (work_g2 < c0 ) work_g2 = work_g2 + c360 + lont_bounds(icorner,:,:,iblk) = work_g2(:,:) + work_g2(:,:) = lonu_bounds(icorner,:,:,iblk) + c360 + where (work_g2 > c360) work_g2 = work_g2 - c360 + where (work_g2 < c0 ) work_g2 = work_g2 + c360 + lonu_bounds(icorner,:,:,iblk) = work_g2(:,:) + enddo + enddo + deallocate(work_g2) + + end subroutine gridbox_corners + +!======================================================================= + +! NOTE: Boundary conditions for fields on NW, SW, SE corners +! have not been implemented; using NE corner location for all. +! Extrapolations are also used: these fields are approximate! +! +! authors: A. McLaren, Met Office +! E. Hunke, LANL + + subroutine gridbox_verts(work_g,vbounds) + + use ice_blocks, only: nx_block, ny_block + use ice_constants, only: c0, c2, & + field_loc_NEcorner, field_type_scalar + use ice_domain_size, only: max_blocks + + real (kind=dbl_kind), dimension(:,:), intent(in) :: & + work_g + + real (kind=dbl_kind), dimension(4,nx_block,ny_block,max_blocks), intent(out) :: & + vbounds + + integer (kind=int_kind) :: & + i,j ! index counters + + real (kind=dbl_kind) :: & + rad_to_deg + + real (kind=dbl_kind), dimension(:,:), allocatable :: & + work_g2 + + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & + work1 + + character(len=*), parameter :: subname = '(gridbox_verts)' + + call icepack_query_parameters(rad_to_deg_out=rad_to_deg) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + if (my_task == master_task) then + allocate(work_g2(nx_global,ny_global)) + else + allocate(work_g2(1,1)) + endif + + !------------------------------------------------------------- + ! Get coordinates of grid boxes for each block as follows: + ! (1) SW corner, (2) SE corner, (3) NE corner, (4) NW corner + !------------------------------------------------------------- + + work_g2(:,:) = c0 + if (my_task == master_task) then + do j = 2, ny_global + do i = 2, nx_global + work_g2(i,j) = work_g(i-1,j-1) * rad_to_deg + enddo + enddo + ! extrapolate + do j = 1, ny_global + work_g2(1,j) = c2*work_g2(2,j) - work_g2(3,j) + enddo + do i = 1, nx_global + work_g2(i,1) = c2*work_g2(i,2) - work_g2(i,3) + enddo + endif + call scatter_global(work1, work_g2, & + master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + vbounds(1,:,:,:) = work1(:,:,:) + + work_g2(:,:) = c0 + if (my_task == master_task) then + do j = 2, ny_global + do i = 1, nx_global + work_g2(i,j) = work_g(i,j-1) * rad_to_deg + enddo + enddo + ! extrapolate + do i = 1, nx_global + work_g2(i,1) = (c2*work_g2(i,2) - work_g2(i,3)) + enddo + endif + call scatter_global(work1, work_g2, & + master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + vbounds(2,:,:,:) = work1(:,:,:) + + work_g2(:,:) = c0 + if (my_task == master_task) then + do j = 1, ny_global + do i = 1, nx_global + work_g2(i,j) = work_g(i,j) * rad_to_deg + enddo + enddo + endif + call scatter_global(work1, work_g2, & + master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + vbounds(3,:,:,:) = work1(:,:,:) + + work_g2(:,:) = c0 + if (my_task == master_task) then + do j = 1, ny_global + do i = 2, nx_global + work_g2(i,j) = work_g(i-1,j ) * rad_to_deg + enddo + enddo + ! extrapolate + do j = 1, ny_global + work_g2(1,j) = c2*work_g2(2,j) - work_g2(3,j) + enddo + endif + call scatter_global(work1, work_g2, & + master_task, distrb_info, & + field_loc_NEcorner, field_type_scalar) + vbounds(4,:,:,:) = work1(:,:,:) + + deallocate (work_g2) + + end subroutine gridbox_verts + +!======================================================================= +! ocean bathymetry for grounded sea ice (basalstress) or icebergs +! currently hardwired for 40 levels (gx3, gx1 grids) +! should be read from a file instead (see subroutine read_basalstress_bathy) + + subroutine get_bathymetry + + integer (kind=int_kind) :: & + i, j, k, iblk ! loop indices + + integer (kind=int_kind), parameter :: & + nlevel = 40 ! number of layers (gx3 grid) + + real (kind=dbl_kind), dimension(nlevel) :: & + depth ! total depth, m + + real (kind=dbl_kind) :: & + puny + + real (kind=dbl_kind), dimension(nlevel), parameter :: & + thick = (/ & ! ocean layer thickness, m + 10.01244_dbl_kind, 10.11258_dbl_kind, 10.31682_dbl_kind, & + 10.63330_dbl_kind, 11.07512_dbl_kind, 11.66145_dbl_kind, & + 12.41928_dbl_kind, 13.38612_dbl_kind, 14.61401_dbl_kind, & + 16.17561_dbl_kind, 18.17368_dbl_kind, 20.75558_dbl_kind, & + 24.13680_dbl_kind, 28.63821_dbl_kind, 34.74644_dbl_kind, & + 43.20857_dbl_kind, 55.16812_dbl_kind, 72.30458_dbl_kind, & + 96.74901_dbl_kind, 130.0392_dbl_kind, 170.0489_dbl_kind, & + 207.9933_dbl_kind, 233.5694_dbl_kind, 245.2719_dbl_kind, & + 248.9804_dbl_kind, 249.8322_dbl_kind, 249.9787_dbl_kind, & + 249.9979_dbl_kind, 249.9998_dbl_kind, 250.0000_dbl_kind, & + 250.0000_dbl_kind, 250.0000_dbl_kind, 250.0000_dbl_kind, & + 250.0000_dbl_kind, 250.0000_dbl_kind, 250.0000_dbl_kind, & + 250.0000_dbl_kind, 250.0000_dbl_kind, 250.0000_dbl_kind, & + 250.0000_dbl_kind /) + + character(len=*), parameter :: subname = '(get_bathymetry)' + + call icepack_query_parameters(puny_out=puny) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + if (use_bathymetry) then + + call read_basalstress_bathy + + else + + ! convert to total depth + depth(1) = thick(1) + do k = 2, nlevel + depth(k) = depth(k-1) + thick(k) + enddo + + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + k = kmt(i,j,iblk) + if (k > puny) bathymetry(i,j,iblk) = depth(k) + enddo + enddo + enddo + + endif ! bathymetry_file + + end subroutine get_bathymetry + +!======================================================================= +! with use_bathymetry = false, vertical depth profile generated for max KMT +! with use_bathymetry = true, expects to read in pop vert_grid file + + subroutine get_bathymetry_popfile + + integer (kind=int_kind) :: & + i, j, k, iblk ! loop indices + + integer (kind=int_kind) :: & + ntmp, nlevel , & ! number of levels (max KMT) + k1 , & ! levels + ierr , & ! error tag + fid ! fid unit number + + real (kind=dbl_kind), dimension(:),allocatable :: & + depth , & ! total depth, m + thick ! layer thickness, cm -> m + + character(len=*), parameter :: subname = '(get_bathymetry_popfile)' + + ntmp = maxval(KMT) + nlevel = global_maxval(ntmp,distrb_info) + + if (my_task==master_task) then + write(nu_diag,*) subname,' KMT max = ',nlevel + endif + + allocate(depth(nlevel),thick(nlevel)) + thick = -999999. + depth = -999999. + + if (use_bathymetry) then + + write (nu_diag,*) subname,' Bathymetry file = ', trim(bathymetry_file) + if (my_task == master_task) then + call get_fileunit(fid) + open(fid,file=bathymetry_file,form='formatted',iostat=ierr) + if (ierr/=0) call abort_ice(subname//' open error') + do k = 1,nlevel + read(fid,*,iostat=ierr) thick(k) + if (ierr/=0) call abort_ice(subname//' read error') + enddo + call release_fileunit(fid) + endif + + call broadcast_array(thick,master_task) + + else + + ! create thickness profile + k1 = min(5,nlevel) + do k = 1,k1 + thick(k) = max(10000._dbl_kind/float(nlevel),500._dbl_kind) + enddo + do k = k1+1,nlevel + thick(k) = min(thick(k-1)*1.2_dbl_kind,20000._dbl_kind) + enddo + + endif + + ! convert thick from cm to m + thick = thick / 100._dbl_kind + + ! convert to total depth + depth(1) = thick(1) + do k = 2, nlevel + depth(k) = depth(k-1) + thick(k) + if (depth(k) < 0.) call abort_ice(subname//' negative depth error') + enddo + + if (my_task==master_task) then + do k = 1,nlevel + write(nu_diag,'(2a,i6,2f13.7)') subname,' k, thick(m), depth(m) = ',k,thick(k),depth(k) + enddo + endif + + bathymetry = 0._dbl_kind + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + k = kmt(i,j,iblk) + if (k > nlevel) call abort_ice(subname//' kmt/nlevel error') + if (k > 0) bathymetry(i,j,iblk) = depth(k) + enddo + enddo + enddo + + deallocate(depth,thick) + + end subroutine get_bathymetry_popfile + +!======================================================================= + +! Read bathymetry data for basal stress calculation (grounding scheme for +! landfast ice) in CICE stand-alone mode. When CICE is in coupled mode +! (e.g. CICE-NEMO), hwater should be uptated at each time level so that +! it varies with ocean dynamics. +! +! author: Fred Dupont, CMC + + subroutine read_basalstress_bathy + + ! use module + use ice_read_write + use ice_constants, only: field_loc_center, field_type_scalar + + ! local variables + integer (kind=int_kind) :: & + fid_init ! file id for netCDF init file + + character (char_len_long) :: & ! input data file names + fieldname + + logical (kind=log_kind) :: diag=.true. + + character(len=*), parameter :: subname = '(read_basalstress_bathy)' + + if (my_task == master_task) then + write (nu_diag,*) ' ' + write (nu_diag,*) 'Bathymetry file: ', trim(bathymetry_file) + call icepack_warnings_flush(nu_diag) + endif + + call ice_open_nc(bathymetry_file,fid_init) + + fieldname='Bathymetry' + + if (my_task == master_task) then + write(nu_diag,*) 'reading ',TRIM(fieldname) + call icepack_warnings_flush(nu_diag) + endif + call ice_read_nc(fid_init,1,fieldname,bathymetry,diag, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + + call ice_close_nc(fid_init) + + if (my_task == master_task) then + write(nu_diag,*) 'closing file ',TRIM(bathymetry_file) + call icepack_warnings_flush(nu_diag) + endif + + end subroutine read_basalstress_bathy + +!======================================================================= + + end module ice_grid + +!======================================================================= diff --git a/cicecore/cicedynB/infrastructure/ice_read_write.F90 b/cicecore/cicedynB/infrastructure/ice_read_write.F90 new file mode 100644 index 000000000..87d0813cc --- /dev/null +++ b/cicecore/cicedynB/infrastructure/ice_read_write.F90 @@ -0,0 +1,2456 @@ +#ifdef ncdf +#define USE_NETCDF +#endif +!======================================================================= + +! Routines for opening, reading and writing external files +! +! author: Tony Craig, NCAR +! +! 2004: Block structure added by William Lipscomb, LANL +! 2006: Converted to free source form (F90) by Elizabeth Hunke +! 2007: netcdf versions added by Alison McLaren & Ann Keen, Met Office + + module ice_read_write + + use ice_kinds_mod + use ice_constants, only: c0, spval_dbl, & + field_loc_noupdate, field_type_noupdate + use ice_communicate, only: my_task, master_task + use ice_broadcast, only: broadcast_scalar + use ice_domain, only: distrb_info, orca_halogrid + use ice_domain_size, only: max_blocks, nx_global, ny_global, ncat + use ice_blocks, only: nx_block, ny_block, nghost + use ice_exit, only: abort_ice + use ice_fileunits, only: nu_diag + +#ifdef USE_NETCDF + use netcdf +#endif + + implicit none + + private + + integer (kind=int_kind), parameter, private :: & + bits_per_byte = 8 ! number of bits per byte. + ! used to determine RecSize in ice_open + + public :: ice_open, & + ice_open_ext, & + ice_open_nc, & + ice_read, & + ice_read_ext, & + ice_read_nc, & + ice_read_global, & + ice_read_global_nc, & + ice_read_nc_uv, & + ice_read_nc_xyf, & + ice_write, & + ice_write_nc, & + ice_write_ext, & + ice_read_vec_nc, & + ice_get_ncvarsize, & + ice_close_nc + + interface ice_write + module procedure ice_write_xyt, & + ice_write_xyzt + end interface + + interface ice_read + module procedure ice_read_xyt, & + ice_read_xyzt + end interface + + interface ice_read_nc + module procedure ice_read_nc_xy, & + ice_read_nc_xyz, & + !ice_read_nc_xyf, & + ice_read_nc_point, & + ice_read_nc_z + end interface + + interface ice_write_nc + module procedure ice_write_nc_xy, & + ice_write_nc_xyz + end interface + +!======================================================================= + + contains + +!======================================================================= + +! Opens an unformatted file for reading. +! nbits indicates whether the file is sequential or direct access. +! +! author: Tony Craig, NCAR + + subroutine ice_open(nu, filename, nbits, algn) + + integer (kind=int_kind), intent(in) :: & + nu , & ! unit number + nbits ! no. of bits per variable (0 for sequential access) + + integer (kind=int_kind), intent(in), optional :: algn + integer (kind=int_kind) :: RecSize, Remnant, nbytes + + character (*) :: filename + + character(len=*), parameter :: subname = '(ice_open)' + + if (my_task == master_task) then + + if (nbits == 0) then ! sequential access + + open(nu,file=filename,form='unformatted') + + else ! direct access + + ! use nbytes to compute RecSize. + ! this prevents integer overflow with large global grids using nbits + ! nx*ny*nbits > 2^31 -1 (i.e., global grid 9000x7054x64) + nbytes = nbits/bits_per_byte + RecSize = nx_global*ny_global*nbytes + + if (present(algn)) then + ! If data is keept in blocks using given sizes (=algn) + ! Used in eg. HYCOM binary files, which are stored as "blocks" dividable by 16384 bit (=algn) + if (algn /= 0) then + Remnant = modulo(RecSize,algn) + if (Remnant /= 0) then + RecSize = RecSize + (algn - Remnant) + endif + endif + endif + open(nu,file=filename,recl=RecSize, & + form='unformatted',access='direct') + endif ! nbits = 0 + + endif ! my_task = master_task + + end subroutine ice_open + +!======================================================================= + +! Opens an unformatted file for reading, incl ghost cells (direct access). +! nbits indicates whether the file is sequential or direct access. +! +! authors: Tony Craig, NCAR +! David Hebert, NRLSSC + + subroutine ice_open_ext(nu, filename, nbits) + + integer (kind=int_kind), intent(in) :: & + nu , & ! unit number + nbits ! no. of bits per variable (0 for sequential access) + + integer (kind=int_kind) :: RecSize, nbytes + + character (*) :: filename + + integer (kind=int_kind) :: & + nx, ny ! grid dimensions including ghost cells + + character(len=*), parameter :: subname = '(ice_open_ext)' + + if (my_task == master_task) then + + if (nbits == 0) then ! sequential access + + open(nu,file=filename,form='unformatted') + + else ! direct access + + nx = nx_global + 2*nghost + ny = ny_global + 2*nghost + + ! use nbytes to compute RecSize. + ! this prevents integer overflow with large global grids using nbits + ! nx*ny*nbits > 2^31 -1 (i.e., global grid 9000x7054x64) + nbytes = nbits/bits_per_byte + RecSize = nx*ny*nbytes + open(nu,file=filename,recl=RecSize, & + form='unformatted',access='direct') + endif ! nbits = 0 + + endif ! my_task = master_task + + end subroutine ice_open_ext + +!======================================================================= + +! Read an unformatted file and scatter to processors. +! work is a real array, atype indicates the format of the data. +! If the optional variables field_loc and field_type are present, +! the ghost cells are filled using values from the global array. +! This prevents them from being filled with zeroes in land cells +! (subroutine ice_HaloUpdate need not be called). +! +! author: Tony Craig, NCAR + + subroutine ice_read_xyt(nu, nrec, work, atype, diag, & + field_loc, field_type, & + ignore_eof, hit_eof) + + use ice_gather_scatter, only: scatter_global + + integer (kind=int_kind), intent(in) :: & + nu , & ! unit number + nrec ! record number (0 for sequential access) + + real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks), intent(out) :: & + work ! output array (real, 8-byte) + + character (len=4), intent(in) :: & + atype ! format for input array + ! (real/integer, 4-byte/8-byte) + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + integer (kind=int_kind), optional, intent(in) :: & + field_loc, & ! location of field on staggered grid + field_type ! type of field (scalar, vector, angle) + + logical (kind=log_kind), optional, intent(in) :: ignore_eof + logical (kind=log_kind), optional, intent(out) :: hit_eof + + ! local variables + + integer (kind=int_kind) :: i, j, ios + + real (kind=dbl_kind) :: & + amin, amax, asum ! min, max values and sum of input array + + logical (kind=log_kind) :: ignore_eof_use + + real (kind=dbl_kind), dimension(:,:), allocatable :: & + work_g1 + + real (kind=real_kind), dimension(:,:), allocatable :: & + work_gr + + integer(kind=int_kind), dimension(:,:), allocatable :: & + work_gi4 + + integer(selected_int_kind(13)), dimension(:,:), allocatable :: & + work_gi8 + + character(len=*), parameter :: subname = '(ice_read_xyt)' + + if (my_task == master_task) then + allocate(work_g1(nx_global,ny_global)) + else + allocate(work_g1(1,1)) ! to save memory + endif + + if (my_task == master_task) then + + !------------------------------------------------------------------- + ! Read global array according to format atype + !------------------------------------------------------------------- + if (present(hit_eof)) hit_eof = .false. + + if (atype == 'ida4') then + allocate(work_gi4(nx_global,ny_global)) + read(nu,rec=nrec) work_gi4 + work_g1 = real(work_gi4,kind=dbl_kind) + deallocate(work_gi4) + elseif (atype == 'ida8') then + allocate(work_gi8(nx_global,ny_global)) + read(nu,rec=nrec) work_gi8 + work_g1 = real(work_gi8,kind=dbl_kind) + deallocate(work_gi8) + elseif (atype == 'rda4') then + allocate(work_gr(nx_global,ny_global)) + read(nu,rec=nrec) work_gr + work_g1 = work_gr + deallocate(work_gr) + elseif (atype == 'rda8') then + read(nu,rec=nrec) work_g1 + elseif (atype == 'ruf8') then + if (present(ignore_eof)) then + ignore_eof_use = ignore_eof + else + ignore_eof_use = .false. + endif + if (ignore_eof_use) then + ! Read line from file, checking for end-of-file + read(nu, iostat=ios) ((work_g1(i,j),i=1,nx_global), & + j=1,ny_global) + if (present(hit_eof)) hit_eof = ios < 0 + else + read(nu) ((work_g1(i,j),i=1,nx_global),j=1,ny_global) + endif + else + write(nu_diag,*) ' ERROR: reading unknown atype ',atype + endif + endif ! my_task = master_task + + if (present(hit_eof)) then + call broadcast_scalar(hit_eof,master_task) + if (hit_eof) then + deallocate(work_g1) + return + endif + endif + + !------------------------------------------------------------------- + ! optional diagnostics + !------------------------------------------------------------------- + if (my_task==master_task .and. diag) then + amin = minval(work_g1) + amax = maxval(work_g1, mask = work_g1 /= spval_dbl) + asum = sum(work_g1, mask = work_g1 /= spval_dbl) + write(nu_diag,*) ' read_global ',nu, nrec, amin, amax, asum + endif + + !------------------------------------------------------------------- + ! Scatter data to individual processors. + ! NOTE: Ghost cells are not updated unless field_loc is present. + !------------------------------------------------------------------- + + if (present(field_loc)) then + call scatter_global(work, work_g1, master_task, distrb_info, & + field_loc, field_type) + else + call scatter_global(work, work_g1, master_task, distrb_info, & + field_loc_noupdate, field_type_noupdate) + endif + + deallocate(work_g1) + + end subroutine ice_read_xyt + +!======================================================================= +! Read an unformatted file and scatter to processors. +! work is a real array, atype indicates the format of the data. +! If the optional variables field_loc and field_type are present, +! the ghost cells are filled using values from the global array. +! This prevents them from being filled with zeroes in land cells +! (subroutine ice_HaloUpdate need not be called). +! +! author: Tony Craig, NCAR + + subroutine ice_read_xyzt(nu, nrec, work, atype, diag, & + field_loc, field_type, & + ignore_eof, hit_eof) + + use ice_gather_scatter, only: scatter_global + use ice_domain_size, only: nblyr + + integer (kind=int_kind), intent(in) :: & + nu , & ! unit number + nrec ! record number (0 for sequential access) + + real (kind=dbl_kind), dimension(nx_block,ny_block,nblyr+2,max_blocks), intent(out) :: & + work ! output array (real, 8-byte) + + character (len=4), intent(in) :: & + atype ! format for input array + ! (real/integer, 4-byte/8-byte) + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + integer (kind=int_kind), optional, intent(in) :: & + field_loc, & ! location of field on staggered grid + field_type ! type of field (scalar, vector, angle) + + logical (kind=log_kind), optional, intent(in) :: ignore_eof + logical (kind=log_kind), optional, intent(out) :: hit_eof + + ! local variables + + integer (kind=int_kind) :: i, j, k, ios + + real (kind=dbl_kind) :: & + amin, amax, asum ! min, max values and sum of input array + + logical (kind=log_kind) :: ignore_eof_use + + + real (kind=dbl_kind), dimension(:,:,:), allocatable :: & + work_g4 + + integer(kind=int_kind), dimension(:,:,:), allocatable :: & + work_gi5 + + integer(selected_int_kind(13)), dimension(:,:,:), allocatable :: & + work_gi9 + + real (kind=real_kind), dimension(:,:,:), allocatable :: & + work_gr3 + + character(len=*), parameter :: subname = '(ice_read_xyzt)' + + if (my_task == master_task) then + allocate(work_g4(nx_global,ny_global,nblyr+2)) + else + allocate(work_g4(1,1,nblyr+2)) ! to save memory + endif + + if (my_task == master_task) then + + !------------------------------------------------------------------- + ! Read global array according to format atype + !------------------------------------------------------------------- + if (present(hit_eof)) hit_eof = .false. + + if (atype == 'ida4') then + allocate(work_gi5(nx_global,ny_global,nblyr+2)) + read(nu,rec=nrec) work_gi5 + work_g4 = real(work_gi5,kind=dbl_kind) + deallocate(work_gi5) + elseif (atype == 'ida8') then + allocate(work_gi9(nx_global,ny_global,nblyr+2)) + read(nu,rec=nrec) work_gi9 + work_g4 = real(work_gi9,kind=dbl_kind) + deallocate(work_gi9) + elseif (atype == 'rda4') then + allocate(work_gr3(nx_global,ny_global,nblyr+2)) + read(nu,rec=nrec) work_gr3 + work_g4 = work_gr3 + deallocate(work_gr3) + elseif (atype == 'rda8') then + read(nu,rec=nrec) work_g4 + elseif (atype == 'ruf8') then + if (present(ignore_eof)) then + ignore_eof_use = ignore_eof + else + ignore_eof_use = .false. + endif + if (ignore_eof_use) then + ! Read line from file, checking for end-of-file + read(nu, iostat=ios) (((work_g4(i,j,k),i=1,nx_global), & + j=1,ny_global), & + k=1,nblyr+2) + if (present(hit_eof)) hit_eof = ios < 0 + else + read(nu) (((work_g4(i,j,k),i=1,nx_global),j=1,ny_global),& + k=1,nblyr+2) + endif + else + write(nu_diag,*) ' ERROR: reading unknown atype ',atype + endif + endif ! my_task = master_task + + if (present(hit_eof)) then + call broadcast_scalar(hit_eof,master_task) + if (hit_eof) then + deallocate(work_g4) + return + endif + endif + + !------------------------------------------------------------------- + ! optional diagnostics + !------------------------------------------------------------------- + if (my_task==master_task .and. diag) then + amin = minval(work_g4) + amax = maxval(work_g4, mask = work_g4 /= spval_dbl) + asum = sum (work_g4, mask = work_g4 /= spval_dbl) + write(nu_diag,*) ' read_global ',nu, nrec, amin, amax, asum + endif + + !------------------------------------------------------------------- + ! Scatter data to individual processors. + ! NOTE: Ghost cells are not updated unless field_loc is present. + !------------------------------------------------------------------- + + do k = 1, nblyr+2 + + if (present(field_loc)) then + call scatter_global(work(:,:,k,:), work_g4(:,:,k), master_task, distrb_info, & + field_loc, field_type) + + else + + call scatter_global(work(:,:,k,:), work_g4(:,:,k), master_task, distrb_info, & + field_loc_noupdate, field_type_noupdate) + endif + + enddo !k + deallocate(work_g4) + + end subroutine ice_read_xyzt + +!======================================================================= + +! Read an unformatted file +! Just like ice_read except that it returns a global array. +! work_g is a real array, atype indicates the format of the data +! +! Adapted by William Lipscomb, LANL, from ice_read + + subroutine ice_read_global (nu, nrec, work_g, atype, diag, & + ignore_eof, hit_eof) + + integer (kind=int_kind), intent(in) :: & + nu , & ! unit number + nrec ! record number (0 for sequential access) + + real (kind=dbl_kind), dimension(nx_global,ny_global), intent(out) :: & + work_g ! output array (real, 8-byte) + + character (len=4) :: & + atype ! format for input array + ! (real/integer, 4-byte/8-byte) + + logical (kind=log_kind) :: & + diag ! if true, write diagnostic output + + logical (kind=log_kind), optional, intent(in) :: ignore_eof + logical (kind=log_kind), optional, intent(out) :: hit_eof + + ! local variables + + integer (kind=int_kind) :: i, j, ios + + real (kind=dbl_kind) :: & + amin, amax, asum ! min, max values and sum of input array + + logical (kind=log_kind) :: ignore_eof_use + + real (kind=real_kind), dimension(:,:), allocatable :: & + work_gr + + integer(kind=int_kind), dimension(:,:), allocatable :: & + work_gi4 + + integer(selected_int_kind(13)), dimension(:,:), allocatable :: & + work_gi8 + + character(len=*), parameter :: subname = '(ice_read_global)' + + work_g(:,:) = c0 + + if (my_task == master_task) then + + !------------------------------------------------------------------- + ! Read global array according to format atype + !------------------------------------------------------------------- + if (present(hit_eof)) hit_eof = .false. + + if (atype == 'ida4') then + allocate(work_gi4(nx_global,ny_global)) + read(nu,rec=nrec) work_gi4 + work_g = real(work_gi4,kind=dbl_kind) + deallocate(work_gi4) + elseif (atype == 'ida8') then + allocate(work_gi8(nx_global,ny_global)) + read(nu,rec=nrec) work_gi8 + work_g = real(work_gi8,kind=dbl_kind) + deallocate(work_gi8) + elseif (atype == 'rda4') then + allocate(work_gr(nx_global,ny_global)) + read(nu,rec=nrec) work_gr + work_g = work_gr + deallocate(work_gr) + elseif (atype == 'rda8') then + read(nu,rec=nrec) work_g + elseif (atype == 'ruf8') then + if (present(ignore_eof)) then + ignore_eof_use = ignore_eof + else + ignore_eof_use = .false. + endif + if (ignore_eof_use) then + ! Read line from file, checking for end-of-file + read(nu, iostat=ios) ((work_g(i,j),i=1,nx_global), & + j=1,ny_global) + if (present(hit_eof)) hit_eof = ios < 0 + else + read(nu) ((work_g(i,j),i=1,nx_global),j=1,ny_global) + endif + else + write(nu_diag,*) ' ERROR: reading unknown atype ',atype + endif + endif ! my_task = master_task + + if (present(hit_eof)) then + call broadcast_scalar(hit_eof,master_task) + if (hit_eof) return + endif + + !------------------------------------------------------------------- + ! optional diagnostics + !------------------------------------------------------------------- + if (my_task == master_task .and. diag) then + amin = minval(work_g) + amax = maxval(work_g, mask = work_g /= spval_dbl) + asum = sum (work_g, mask = work_g /= spval_dbl) + write(nu_diag,*) ' read_global ',nu, nrec, amin, amax,asum + endif + + end subroutine ice_read_global + +!======================================================================= + +! Read an unformatted file and scatter to processors, incl ghost cells. +! work is a real array, atype indicates the format of the data. +! (subroutine ice_HaloUpdate need not be called). + + subroutine ice_read_ext(nu, nrec, work, atype, diag, & + ignore_eof, hit_eof) + + use ice_gather_scatter, only: scatter_global_ext + + integer (kind=int_kind), intent(in) :: & + nu , & ! unit number + nrec ! record number (0 for sequential access) + + real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks), intent(out) :: & + work ! output array (real, 8-byte) + + character (len=4), intent(in) :: & + atype ! format for input array + ! (real/integer, 4-byte/8-byte) + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + logical (kind=log_kind), optional, intent(in) :: ignore_eof + logical (kind=log_kind), optional, intent(out) :: hit_eof + + ! local variables + + integer (kind=int_kind) :: i, j, ios, nx, ny + + real (kind=dbl_kind) :: & + amin, amax, asum ! min, max values and sum of input array + + logical (kind=log_kind) :: ignore_eof_use + + real (kind=dbl_kind), dimension(:,:), allocatable :: & + work_g1 + + real (kind=real_kind), dimension(:,:), allocatable :: & + work_gr + + integer(kind=int_kind), dimension(:,:), allocatable :: & + work_gi4 + + integer(selected_int_kind(13)), dimension(:,:), allocatable :: & + work_gi8 + + character(len=*), parameter :: subname = '(ice_read_ext)' + + nx = nx_global + 2*nghost + ny = ny_global + 2*nghost + + if (my_task == master_task) then + allocate(work_g1(nx,ny)) + else + allocate(work_g1(1,1)) ! to save memory + endif + + if (my_task == master_task) then + + !------------------------------------------------------------------- + ! Read global array according to format atype + !------------------------------------------------------------------- + if (present(hit_eof)) hit_eof = .false. + + if (atype == 'ida4') then + allocate(work_gi4(nx,ny)) + read(nu,rec=nrec) work_gi4 + work_g1 = real(work_gi4,kind=dbl_kind) + deallocate(work_gi4) + elseif (atype == 'ida8') then + allocate(work_gi8(nx,ny)) + read(nu,rec=nrec) work_gi8 + work_g1 = real(work_gi8,kind=dbl_kind) + deallocate(work_gi8) + elseif (atype == 'rda4') then + allocate(work_gr(nx,ny)) + read(nu,rec=nrec) work_gr + work_g1 = work_gr + deallocate(work_gr) + elseif (atype == 'rda8') then + read(nu,rec=nrec) work_g1 + elseif (atype == 'ruf8') then + if (present(ignore_eof)) then + ignore_eof_use = ignore_eof + else + ignore_eof_use = .false. + endif + if (ignore_eof_use) then + ! Read line from file, checking for end-of-file + read(nu, iostat=ios) ((work_g1(i,j),i=1,nx), & + j=1,ny) + if (present(hit_eof)) hit_eof = ios < 0 + else + read(nu) ((work_g1(i,j),i=1,nx),j=1,ny) + endif + else + write(nu_diag,*) ' ERROR: reading unknown atype ',atype + endif + endif ! my_task = master_task + + if (present(hit_eof)) then + call broadcast_scalar(hit_eof,master_task) + if (hit_eof) then + deallocate(work_g1) + return + endif + endif + + !------------------------------------------------------------------- + ! optional diagnostics + !------------------------------------------------------------------- + if (my_task==master_task .and. diag) then + amin = minval(work_g1) + amax = maxval(work_g1, mask = work_g1 /= spval_dbl) + asum = sum (work_g1, mask = work_g1 /= spval_dbl) + write(nu_diag,*) ' read_global ',nu, nrec, amin, amax, asum + endif + + !------------------------------------------------------------------- + ! Scatter data to individual processors. + ! NOTE: Ghost cells are always updated + !------------------------------------------------------------------- + + call scatter_global_ext(work, work_g1, master_task, distrb_info) + + deallocate(work_g1) + + end subroutine ice_read_ext + +!======================================================================= + +! Writes an unformatted file +! work is a real array, atype indicates the format of the data + + subroutine ice_write_xyt(nu, nrec, work, atype, diag) + + use ice_gather_scatter, only: gather_global + + integer (kind=int_kind), intent(in) :: & + nu , & ! unit number + nrec ! record number (0 for sequential access) + + real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks), intent(in) :: & + work ! input array (real, 8-byte) + + character (len=4), intent(in) :: & + atype ! format for output array + ! (real/integer, 4-byte/8-byte) + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + ! local variables + + integer (kind=int_kind) :: i, j + + real (kind=dbl_kind) :: & + amin, amax, asum ! min, max values and sum of input array + + real (kind=dbl_kind), dimension(:,:), allocatable :: & + work_g1 + + real (kind=real_kind), dimension(:,:), allocatable :: & + work_gr + + integer(kind=int_kind), dimension(:,:), allocatable :: & + work_gi4 + + integer(selected_int_kind(13)), dimension(:,:), allocatable :: & + work_gi8 + + character(len=*), parameter :: subname = '(ice_write_xyt)' + + !------------------------------------------------------------------- + ! Gather data from individual processors + !------------------------------------------------------------------- + + if (my_task == master_task) then + allocate(work_g1(nx_global,ny_global)) + else + allocate(work_g1(1,1)) ! to save memory + endif + + call gather_global(work_g1, work, master_task, distrb_info, spc_val=c0) + + if (my_task == master_task) then + + !------------------------------------------------------------------- + ! Write global array according to format atype + !------------------------------------------------------------------- + if (atype == 'ida4') then + allocate(work_gi4(nx_global,ny_global)) + work_gi4 = nint(work_g1) + write(nu,rec=nrec) work_gi4 + deallocate(work_gi4) + elseif (atype == 'ida8') then + allocate(work_gi8(nx_global,ny_global)) + work_gi8 = nint(work_g1) + write(nu,rec=nrec) work_gi8 + deallocate(work_gi8) + elseif (atype == 'rda4') then + allocate(work_gr(nx_global,ny_global)) + work_gr = work_g1 + write(nu,rec=nrec) work_gr + deallocate(work_gr) + elseif (atype == 'rda8') then + write(nu,rec=nrec) work_g1 + elseif (atype == 'ruf8') then + write(nu) ((work_g1(i,j),i=1,nx_global),j=1,ny_global) + else + write(nu_diag,*) ' ERROR: writing unknown atype ',atype + endif + + !------------------------------------------------------------------- + ! diagnostics + !------------------------------------------------------------------- + if (diag) then + amin = minval(work_g1) + amax = maxval(work_g1, mask = work_g1 /= spval_dbl) + asum = sum (work_g1, mask = work_g1 /= spval_dbl) + write(nu_diag,*) ' write_global ', nu, nrec, amin, amax, asum + endif + + endif ! my_task = master_task + + deallocate(work_g1) + + end subroutine ice_write_xyt + +!======================================================================= + +! Writes an unformatted file +! work is a real array, atype indicates the format of the data + + subroutine ice_write_xyzt(nu, nrec, work, atype, diag) + + use ice_gather_scatter, only: gather_global + use ice_domain_size, only: nblyr + + integer (kind=int_kind), intent(in) :: & + nu , & ! unit number + nrec ! record number (0 for sequential access) + + real (kind=dbl_kind), dimension(nx_block,ny_block,nblyr+2,max_blocks), & + intent(in) :: & + work ! input array (real, 8-byte) + + character (len=4), intent(in) :: & + atype ! format for output array + ! (real/integer, 4-byte/8-byte) + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + ! local variables + + integer (kind=int_kind) :: i, j, k + + real (kind=dbl_kind) :: & + amin, amax, asum ! min, max values and sum of input array + + real (kind=dbl_kind), dimension(:,:,:), allocatable :: & + work_g4 + + real (kind=real_kind), dimension(:,:,:), allocatable :: & + work_gr3 + + integer(kind=int_kind), dimension(:,:,:), allocatable :: & + work_gi5 + + integer(selected_int_kind(13)), dimension(:,:,:), allocatable :: & + work_gi9 + + character(len=*), parameter :: subname = '(ice_write_xyzt)' + + !------------------------------------------------------------------- + ! Gather data from individual processors + !------------------------------------------------------------------- + + if (my_task == master_task) then + allocate(work_g4(nx_global,ny_global,nblyr+2)) + else + allocate(work_g4(1,1,nblyr+2)) ! to save memory + endif + do k = 1,nblyr+2 + call gather_global(work_g4(:,:,k), work(:,:,k,:), master_task, & + distrb_info, spc_val=c0) + enddo !k + + if (my_task == master_task) then + + !------------------------------------------------------------------- + ! Write global array according to format atype + !------------------------------------------------------------------- + if (atype == 'ida4') then + allocate(work_gi5(nx_global,ny_global,nblyr+2)) + work_gi5 = nint(work_g4) + write(nu,rec=nrec) work_gi5 + deallocate(work_gi5) + elseif (atype == 'ida8') then + allocate(work_gi9(nx_global,ny_global,nblyr+2)) + work_gi9 = nint(work_g4) + write(nu,rec=nrec) work_gi9 + deallocate(work_gi9) + elseif (atype == 'rda4') then + allocate(work_gr3(nx_global,ny_global,nblyr+2)) + work_gr3 = work_g4 + write(nu,rec=nrec) work_gr3 + deallocate(work_gr3) + elseif (atype == 'rda8') then + write(nu,rec=nrec) work_g4 + elseif (atype == 'ruf8') then + write(nu)(((work_g4(i,j,k),i=1,nx_global),j=1,ny_global), & + k=1,nblyr+2) + else + write(nu_diag,*) ' ERROR: writing unknown atype ',atype + endif + + !------------------------------------------------------------------- + ! diagnostics + !------------------------------------------------------------------- + if (diag) then + amin = minval(work_g4) + amax = maxval(work_g4, mask = work_g4 /= spval_dbl) + asum = sum (work_g4, mask = work_g4 /= spval_dbl) + write(nu_diag,*) ' write_global ', nu, nrec, amin, amax, asum + endif + + endif ! my_task = master_task + + deallocate(work_g4) + + end subroutine ice_write_xyzt + +!======================================================================= +! +! Writes an unformatted file, including ghost cells +! work is a real array, atype indicates the format of the data +! +! author: Tony Craig, NCAR + + subroutine ice_write_ext(nu, nrec, work, atype, diag) + + use ice_gather_scatter, only: gather_global_ext + + integer (kind=int_kind), intent(in) :: & + nu , & ! unit number + nrec ! record number (0 for sequential access) + + real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks), & + intent(in) :: & + work ! input array (real, 8-byte) + + character (len=4), intent(in) :: & + atype ! format for output array + ! (real/integer, 4-byte/8-byte) + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + ! local variables + + integer (kind=int_kind) :: i, j, nx, ny + + real (kind=dbl_kind) :: & + amin, amax, asum ! min, max values and sum of input array + + real (kind=dbl_kind), dimension(:,:), allocatable :: & + work_g1 + + real (kind=real_kind), dimension(:,:), allocatable :: & + work_gr + + integer(kind=int_kind), dimension(:,:), allocatable :: & + work_gi4 + + integer(selected_int_kind(13)), dimension(:,:), allocatable :: & + work_gi8 + + character(len=*), parameter :: subname = '(ice_write_ext)' + + !------------------------------------------------------------------- + ! Gather data from individual processors + !------------------------------------------------------------------- + + nx = nx_global + 2*nghost + ny = ny_global + 2*nghost + + if (my_task == master_task) then + allocate(work_g1(nx,ny)) + else + allocate(work_g1(1,1)) ! to save memory + endif + + call gather_global_ext(work_g1, work, master_task, distrb_info, spc_val=c0) + + if (my_task == master_task) then + + !------------------------------------------------------------------- + ! Write global array according to format atype + !------------------------------------------------------------------- + if (atype == 'ida4') then + allocate(work_gi4(nx,ny)) + work_gi4 = nint(work_g1) + write(nu,rec=nrec) work_gi4 + deallocate(work_gi4) + elseif (atype == 'ida8') then + allocate(work_gi8(nx,ny)) + work_gi8 = nint(work_g1) + write(nu,rec=nrec) work_gi8 + deallocate(work_gi8) + elseif (atype == 'rda4') then + allocate(work_gr(nx,ny)) + work_gr = work_g1 + write(nu,rec=nrec) work_gr + deallocate(work_gr) + elseif (atype == 'rda8') then + write(nu,rec=nrec) work_g1 + elseif (atype == 'ruf8') then + write(nu) ((work_g1(i,j),i=1,nx),j=1,ny) + else + write(nu_diag,*) ' ERROR: writing unknown atype ',atype + endif + + !------------------------------------------------------------------- + ! diagnostics + !------------------------------------------------------------------- + if (diag) then + amin = minval(work_g1) + amax = maxval(work_g1, mask = work_g1 /= spval_dbl) + asum = sum (work_g1, mask = work_g1 /= spval_dbl) + write(nu_diag,*) ' write_global ', nu, nrec, amin, amax, asum + endif + + endif ! my_task = master_task + + deallocate(work_g1) + + end subroutine ice_write_ext + +!======================================================================= + +! Opens a netCDF file for reading +! Adapted by Alison McLaren, Met Office from ice_open + + subroutine ice_open_nc(filename, fid) + + character (char_len_long), intent(in) :: & + filename ! netCDF filename + + integer (kind=int_kind), intent(out) :: & + fid ! unit number + + ! local variables + + character(len=*), parameter :: subname = '(ice_open_nc)' + +#ifdef USE_NETCDF + integer (kind=int_kind) :: & + status ! status variable from netCDF routine + + if (my_task == master_task) then + + status = nf90_open(filename, NF90_NOWRITE, fid) + if (status /= nf90_noerr) then + call abort_ice (subname//'ERROR: Cannot open '//trim(filename) ) + endif + + endif ! my_task = master_task + +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined for '//trim(filename), & + file=__FILE__, line=__LINE__) + fid = -999 ! to satisfy intent(out) attribute +#endif + end subroutine ice_open_nc + +!======================================================================= + +! Read a netCDF file and scatter to processors. +! If the optional variables field_loc and field_type are present, +! the ghost cells are filled using values from the global array. +! This prevents them from being filled with zeroes in land cells +! (subroutine ice_HaloUpdate need not be called). +! +! Adapted by Alison McLaren, Met Office from ice_read + + subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & + field_loc, field_type, restart_ext) + + use ice_gather_scatter, only: scatter_global, scatter_global_ext + + integer (kind=int_kind), intent(in) :: & + fid , & ! file id + nrec ! record number + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + character (len=*), intent(in) :: & + varname ! field name in netcdf file + + real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks), intent(out) :: & + work ! output array (real, 8-byte) + + logical (kind=log_kind), optional, intent(in) :: & + restart_ext ! if true, read extended grid + + integer (kind=int_kind), optional, intent(in) :: & + field_loc, & ! location of field on staggered grid + field_type ! type of field (scalar, vector, angle) + + ! local variables + + character(len=*), parameter :: subname = '(ice_read_nc_xy)' + +#ifdef USE_NETCDF +! netCDF file diagnostics: + integer (kind=int_kind) :: & + varid , & ! variable id + status ! status output from netcdf routines +! ndim, nvar, & ! sizes of netcdf file +! id, & ! dimension index +! dimlen ! dimension size + + real (kind=dbl_kind) :: & + amin, amax, asum ! min, max values and sum of input array + +! character (char_len) :: & +! dimname ! dimension name + + real (kind=dbl_kind), dimension(:,:), allocatable :: & + work_g1 + + integer (kind=int_kind) :: nx, ny + + real (kind=dbl_kind), dimension(:,:), allocatable :: & + work_g2 + + if (orca_halogrid .and. .not. present(restart_ext)) then + if (my_task == master_task) then + allocate(work_g2(nx_global+2,ny_global+1)) + else + allocate(work_g2(1,1)) ! to save memory + endif + work_g2(:,:) = c0 + endif + + nx = nx_global + ny = ny_global + + if (present(restart_ext)) then + if (restart_ext) then + nx = nx_global + 2*nghost + ny = ny_global + 2*nghost + endif + endif + + if (my_task == master_task) then + allocate(work_g1(nx,ny)) + else + allocate(work_g1(1,1)) ! to save memory + endif + + if (my_task == master_task) then + + !------------------------------------------------------------- + ! Find out ID of required variable + !------------------------------------------------------------- + + status = nf90_inq_varid(fid, trim(varname), varid) + + if (status /= nf90_noerr) then + call abort_ice (subname//'ERROR: Cannot find variable '//trim(varname) ) + endif + + !-------------------------------------------------------------- + ! Read global array + !-------------------------------------------------------------- + + if (orca_halogrid .and. .not. present(restart_ext)) then + status = nf90_get_var( fid, varid, work_g2, & + start=(/1,1,nrec/), & + count=(/nx_global+2,ny_global+1,1/) ) + work_g1 = work_g2(2:nx_global+1,1:ny_global) + else + status = nf90_get_var( fid, varid, work_g1, & + start=(/1,1,nrec/), & + count=(/nx,ny,1/) ) + endif + + endif ! my_task = master_task + + !------------------------------------------------------------------- + ! optional diagnostics + !------------------------------------------------------------------- + + if (my_task==master_task .and. diag) then +! write(nu_diag,*) & +! 'ice_read_nc_xy, fid= ',fid, ', nrec = ',nrec, & +! ', varname = ',trim(varname) +! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) +! write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar +! do id=1,ndim +! status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) +! write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen +! enddo + amin = minval(work_g1) + amax = maxval(work_g1, mask = work_g1 /= spval_dbl) + asum = sum (work_g1, mask = work_g1 /= spval_dbl) + write(nu_diag,*) ' min, max, sum =', amin, amax, asum, trim(varname) + endif + + !------------------------------------------------------------------- + ! Scatter data to individual processors. + ! NOTE: Ghost cells are not updated unless field_loc is present. + !------------------------------------------------------------------- + + if (present(restart_ext)) then + if (restart_ext) then + call scatter_global_ext(work, work_g1, master_task, distrb_info) + endif + else + if (present(field_loc)) then + call scatter_global(work, work_g1, master_task, distrb_info, & + field_loc, field_type) + else + call scatter_global(work, work_g1, master_task, distrb_info, & + field_loc_noupdate, field_type_noupdate) + endif + endif + + deallocate(work_g1) + if (orca_halogrid .and. .not. present(restart_ext)) deallocate(work_g2) + +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) + work = c0 ! to satisfy intent(out) attribute +#endif + end subroutine ice_read_nc_xy + +!======================================================================= + +! Read a netCDF file and scatter to processors. +! If the optional variables field_loc and field_type are present, +! the ghost cells are filled using values from the global array. +! This prevents them from being filled with zeroes in land cells +! (subroutine ice_HaloUpdate need not be called). +! +! Adapted by David Bailey, NCAR from ice_read_nc_xy + + subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & + field_loc, field_type, restart_ext) + + use ice_gather_scatter, only: scatter_global, scatter_global_ext + + integer (kind=int_kind), intent(in) :: & + fid , & ! file id + nrec ! record number + + character (len=*), intent(in) :: & + varname ! field name in netcdf file + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + real (kind=dbl_kind), dimension(nx_block,ny_block,ncat,max_blocks), intent(out) :: & + work ! output array (real, 8-byte) + + logical (kind=log_kind), optional, intent(in) :: & + restart_ext ! if true, read extended grid + + integer (kind=int_kind), optional, intent(in) :: & + field_loc, & ! location of field on staggered grid + field_type ! type of field (scalar, vector, angle) + + ! local variables + + character(len=*), parameter :: subname = '(ice_read_nc_xyz)' + +#ifdef USE_NETCDF +! netCDF file diagnostics: + integer (kind=int_kind) :: & + n, & ! ncat index + varid , & ! variable id + status ! status output from netcdf routines +! ndim, nvar, & ! sizes of netcdf file +! id, & ! dimension index +! dimlen ! size of dimension + + real (kind=dbl_kind) :: & + amin, amax, asum ! min, max values and sum of input array + +! character (char_len) :: & +! dimname ! dimension name + + real (kind=dbl_kind), dimension(:,:,:), allocatable :: & + work_g1 + + integer (kind=int_kind) :: nx, ny + + real (kind=dbl_kind), dimension(:,:,:), allocatable :: & + work_g2 + + if (orca_halogrid .and. .not. present(restart_ext)) then + if (my_task == master_task) then + allocate(work_g2(nx_global+2,ny_global+1,ncat)) + else + allocate(work_g2(1,1,ncat)) ! to save memory + endif + work_g2(:,:,:) = c0 + endif + + nx = nx_global + ny = ny_global + + if (present(restart_ext)) then + if (restart_ext) then + nx = nx_global + 2*nghost + ny = ny_global + 2*nghost + endif + endif + + if (my_task == master_task) then + allocate(work_g1(nx,ny,ncat)) + else + allocate(work_g1(1,1,ncat)) ! to save memory + endif + + if (my_task == master_task) then + + !------------------------------------------------------------- + ! Find out ID of required variable + !------------------------------------------------------------- + + status = nf90_inq_varid(fid, trim(varname), varid) + + if (status /= nf90_noerr) then + call abort_ice (subname//'ERROR: Cannot find variable '//trim(varname) ) + endif + + !-------------------------------------------------------------- + ! Read global array + !-------------------------------------------------------------- + + if (orca_halogrid .and. .not. present(restart_ext)) then + status = nf90_get_var( fid, varid, work_g2, & + start=(/1,1,1,nrec/), & + count=(/nx_global+2,ny_global+1,ncat,1/) ) + work_g1 = work_g2(2:nx_global+1,1:ny_global,:) + else + status = nf90_get_var( fid, varid, work_g1, & + start=(/1,1,1,nrec/), & + count=(/nx,ny,ncat,1/) ) + endif + + endif ! my_task = master_task + + !------------------------------------------------------------------- + ! optional diagnostics + !------------------------------------------------------------------- + + if (my_task==master_task .and. diag) then +! write(nu_diag,*) & +! 'ice_read_nc_xyz, fid= ',fid, ', nrec = ',nrec, & +! ', varname = ',trim(varname) +! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) +! write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar +! do id=1,ndim +! status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) +! write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen +! enddo + do n=1,ncat + amin = minval(work_g1(:,:,n)) + amax = maxval(work_g1(:,:,n), mask = work_g1(:,:,n) /= spval_dbl) + asum = sum (work_g1(:,:,n), mask = work_g1(:,:,n) /= spval_dbl) + write(nu_diag,*) ' min, max, sum =', amin, amax, asum, trim(varname) + enddo + endif + + !------------------------------------------------------------------- + ! Scatter data to individual processors. + ! NOTE: Ghost cells are not updated unless field_loc is present. + !------------------------------------------------------------------- + + if (present(restart_ext)) then + if (restart_ext) then + do n=1,ncat + call scatter_global_ext(work(:,:,n,:), work_g1(:,:,n), & + master_task, distrb_info) + enddo + endif + else + if (present(field_loc)) then + do n=1,ncat + call scatter_global(work(:,:,n,:), work_g1(:,:,n), master_task, & + distrb_info, field_loc, field_type) + enddo + else + do n=1,ncat + call scatter_global(work(:,:,n,:), work_g1(:,:,n), master_task, & + distrb_info, field_loc_noupdate, field_type_noupdate) + enddo + endif + endif + + deallocate(work_g1) + if (orca_halogrid .and. .not. present(restart_ext)) deallocate(work_g2) + +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) + work = c0 ! to satisfy intent(out) attribute +#endif + end subroutine ice_read_nc_xyz + +!======================================================================= + +! Read a netCDF file and scatter to processors. +! If the optional variables field_loc and field_type are present, +! the ghost cells are filled using values from the global array. +! This prevents them from being filled with zeroes in land cells +! (subroutine ice_HaloUpdate need not be called). +! +! Adapted by David Bailey, NCAR from ice_read_nc_xy +! Adapted by Lettie Roach, NIWA to read nfreq +! by changing all occurrences of ncat to nfreq + + subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & + field_loc, field_type, restart_ext) + + use ice_fileunits, only: nu_diag + use ice_domain_size, only: nfsd, nfreq + use ice_gather_scatter, only: scatter_global, scatter_global_ext + + integer (kind=int_kind), intent(in) :: & + fid , & ! file id + nrec ! record number + + character (len=*), intent(in) :: & + varname ! field name in netcdf file + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + real (kind=dbl_kind), dimension(nx_block,ny_block,nfreq,1,max_blocks), & + intent(out) :: & + work ! output array (real, 8-byte) + + logical (kind=log_kind), optional, intent(in) :: & + restart_ext ! if true, read extended grid + + integer (kind=int_kind), optional, intent(in) :: & + field_loc, & ! location of field on staggered grid + field_type ! type of field (scalar, vector, angle) + + ! local variables + +! netCDF file diagnostics: + integer (kind=int_kind) :: & + varid, & ! variable id + status, & ! status output from netcdf routines + ndim, nvar, & ! sizes of netcdf file + id, & ! dimension index + n, & ! ncat index + dimlen ! size of dimension + + real (kind=dbl_kind) :: & + missingvalue, & ! missing value + amin, amax, asum ! min, max values and sum of input array + + character (char_len) :: & + dimname ! dimension name + + real (kind=dbl_kind), dimension(:,:,:), allocatable :: & + work_g1 + + integer (kind=int_kind) :: nx, ny + + character(len=*), parameter :: subname = '(ice_read_nc_xyf)' + +#ifdef USE_NETCDF + real (kind=dbl_kind), dimension(:,:,:), allocatable :: & + work_g2 + + if (orca_halogrid .and. .not. present(restart_ext)) then + if (my_task == master_task) then + allocate(work_g2(nx_global+2,ny_global+1,nfreq)) + else + allocate(work_g2(1,1,nfreq)) ! to save memory + endif + work_g2(:,:,:) = c0 + endif + + nx = nx_global + ny = ny_global + + if (present(restart_ext)) then + if (restart_ext) then + nx = nx_global + 2*nghost + ny = ny_global + 2*nghost + endif + endif + + if (my_task == master_task) then + allocate(work_g1(nx,ny,nfreq)) + else + allocate(work_g1(1,1,nfreq)) ! to save memory + endif + + if (my_task == master_task) then + + !------------------------------------------------------------- + ! Find out ID of required variable + !------------------------------------------------------------- + + status = nf90_inq_varid(fid, trim(varname), varid) + + if (status /= nf90_noerr) then + call abort_ice ( & + 'ice_read_nc_xyf: Cannot find variable '//trim(varname) ) + endif + + !-------------------------------------------------------------- + ! Read global array + !-------------------------------------------------------------- + + if (orca_halogrid .and. .not. present(restart_ext)) then + status = nf90_get_var( fid, varid, work_g2, & + start=(/1,1,1,nrec/), & + count=(/nx_global+2,ny_global+1,nfreq,1/) ) + work_g1 = work_g2(2:nx_global+1,1:ny_global,:) + else + status = nf90_get_var( fid, varid, work_g1, & + start=(/1,1,1,nrec/), & + count=(/nx,ny,nfreq,1/) ) + endif + + status = nf90_get_att(fid, varid, "missing_value", missingvalue) + endif ! my_task = master_task + + !------------------------------------------------------------------- + ! optional diagnostics + !------------------------------------------------------------------- + + if (my_task==master_task .and. diag) then + write(nu_diag,*) & + 'ice_read_nc_xyf, fid= ',fid, ', nrec = ',nrec, & + ', varname = ',trim(varname) + status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) + write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar + do id=1,ndim + status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) + write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen + enddo + write(nu_diag,*) 'missingvalue= ',missingvalue + do n = 1, nfreq + amin = minval(work_g1(:,:,n)) + amax = maxval(work_g1(:,:,n), mask = work_g1(:,:,n) /= missingvalue) + asum = sum (work_g1(:,:,n), mask = work_g1(:,:,n) /= missingvalue) + write(nu_diag,*) ' min, max, sum =', amin, amax, asum + enddo + endif + + !------------------------------------------------------------------- + ! Scatter data to individual processors. + ! NOTE: Ghost cells are not updated unless field_loc is present. + !------------------------------------------------------------------- + + if (present(restart_ext)) then + if (restart_ext) then + do n = 1, nfreq + call scatter_global_ext(work(:,:,n,1,:), work_g1(:,:,n), & + master_task, distrb_info) + enddo + endif + else + if (present(field_loc)) then + do n = 1, nfreq + call scatter_global(work(:,:,n,1,:), work_g1(:,:,n), master_task, & + distrb_info, field_loc, field_type) + enddo + else + do n = 1, nfreq + call scatter_global(work(:,:,n,1,:), work_g1(:,:,n), master_task, & + distrb_info, field_loc_noupdate, field_type_noupdate) + enddo + endif + endif + +! echmod: this should not be necessary if fill/missing are only on land + where (work > 1.0e+30_dbl_kind) work = c0 + + deallocate(work_g1) + if (orca_halogrid .and. .not. present(restart_ext)) deallocate(work_g2) + +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) + work = c0 ! to satisfy intent(out) attribute +#endif + + end subroutine ice_read_nc_xyf + +!======================================================================= + +! Read a netCDF file +! Adapted by Alison McLaren, Met Office from ice_read + + subroutine ice_read_nc_point(fid, nrec, varname, work, diag, & + field_loc, field_type) + + integer (kind=int_kind), intent(in) :: & + fid , & ! file id + nrec ! record number + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + character (char_len), intent(in) :: & + varname ! field name in netcdf file + + integer (kind=int_kind), optional, intent(in) :: & + field_loc, & ! location of field on staggered grid + field_type ! type of field (scalar, vector, angle) + + real (kind=dbl_kind), intent(out) :: & + work ! output variable (real, 8-byte) + + ! local variables + + character(len=*), parameter :: subname = '(ice_read_nc_point)' + +#ifdef USE_NETCDF +! netCDF file diagnostics: + integer (kind=int_kind) :: & + varid, & ! netcdf id for field + status, & ! status output from netcdf routines + ndim, nvar, & ! sizes of netcdf file + id, & ! dimension index + dimlen ! size of dimension + + real (kind=dbl_kind), dimension(1) :: & + workg ! temporary work variable + + character (char_len) :: & + dimname ! dimension name + + if (my_task == master_task) then + + !------------------------------------------------------------- + ! Find out ID of required variable + !------------------------------------------------------------- + + status = nf90_inq_varid(fid, trim(varname), varid) + + if (status /= nf90_noerr) then + call abort_ice (subname//'ERROR: Cannot find variable '//trim(varname) ) + endif + + !-------------------------------------------------------------- + ! Read point variable + !-------------------------------------------------------------- + + status = nf90_get_var(fid, varid, workg, & + start= (/ nrec /), & + count=(/ 1 /) ) + + if (status /= nf90_noerr) then + call abort_ice (subname//'ERROR: Cannot get variable '//trim(varname) ) + endif + endif ! my_task = master_task + + !------------------------------------------------------------------- + ! optional diagnostics + !------------------------------------------------------------------- + + if (my_task==master_task .and. diag) then + write(nu_diag,*) & + 'ice_read_nc_point, fid= ',fid, ', nrec = ',nrec, & + ', varname = ',trim(varname) + status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) + write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar + do id=1,ndim + status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) + write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen + enddo + endif + + work = workg(1) + +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) + work = c0 ! to satisfy intent(out) attribute +#endif + end subroutine ice_read_nc_point + +!======================================================================= + +! Adapted by Nicole Jeffery, LANL + + subroutine ice_read_nc_z(fid, nrec, varname, work, diag, & + field_loc, field_type) + + use ice_domain_size, only: nilyr + + integer (kind=int_kind), intent(in) :: & + fid , & ! file id + nrec ! record number + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + character (char_len), intent(in) :: & + varname ! field name in netcdf file + + integer (kind=int_kind), optional, intent(in) :: & + field_loc, & ! location of field on staggered grid + field_type ! type of field (scalar, vector, angle) + + real (kind=dbl_kind), dimension(nilyr), intent(out) :: & + work ! output array (real, 8-byte) + + ! local variables + +#ifdef USE_NETCDF + real (kind=dbl_kind), dimension(:), allocatable :: & + work_z + +! netCDF file diagnostics: + integer (kind=int_kind) :: & + varid, & ! netcdf id for field + status, & ! status output from netcdf routines + ndim, nvar, & ! sizes of netcdf file + id, & ! dimension index + dimlen ! size of dimension + + character (char_len) :: & + dimname ! dimension name +#endif + + character(len=*), parameter :: subname = '(ice_read_nc_z)' + +#ifdef USE_NETCDF + + allocate(work_z(nilyr)) + + if (my_task == master_task) then + + !------------------------------------------------------------- + ! Find out ID of required variable + !------------------------------------------------------------- + + status = nf90_inq_varid(fid, trim(varname), varid) + + if (status /= nf90_noerr) then + call abort_ice (subname//'ERROR: Cannot find variable '//trim(varname) ) + endif + + !-------------------------------------------------------------- + ! Read global array + !-------------------------------------------------------------- + + status = nf90_get_var( fid, varid, work_z, & + start=(/1,nrec/), & + count=(/nilyr,1/) ) + + endif ! my_task = master_task + + !------------------------------------------------------------------- + ! optional diagnostics + !------------------------------------------------------------------- + + if (my_task==master_task .and. diag) then + write(nu_diag,*) & + 'ice_read_nc_z, fid= ',fid, ', nrec = ',nrec, & + ', varname = ',trim(varname) + status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) + write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar + do id=1,ndim + status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) + write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen + enddo + endif + + work(:) = work_z(:) + deallocate(work_z) + +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) + work = c0 ! to satisfy intent(out) attribute +#endif + end subroutine ice_read_nc_z + +!======================================================================= + +! Write a netCDF file. +! +! Adapted by David Bailey, NCAR + + subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & + restart_ext, varname) + + use ice_gather_scatter, only: gather_global, gather_global_ext + + integer (kind=int_kind), intent(in) :: & + fid , & ! file id + varid , & ! variable id + nrec ! record number + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + logical (kind=log_kind), optional, intent(in) :: & + restart_ext ! if true, write extended grid + + real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks), intent(in) :: & + work ! output array (real, 8-byte) + + character (len=*), optional, intent(in) :: & + varname ! variable name + + ! local variables + + character(len=*), parameter :: subname = '(ice_read_nc_xy)' + +#ifdef USE_NETCDF +! netCDF file diagnostics: + integer (kind=int_kind) :: & + status ! status output from netcdf routines +! ndim, nvar, & ! sizes of netcdf file +! id, & ! dimension index +! dimlen ! size of dimension + + real (kind=dbl_kind) :: & + amin, amax, asum ! min, max values and sum of input array + + character (char_len) :: & + lvarname ! variable name +! dimname ! dimension name + + real (kind=dbl_kind), dimension(:,:), allocatable :: & + work_g1 + + integer (kind=int_kind) :: nx, ny + + nx = nx_global + ny = ny_global + + if (present(restart_ext)) then + if (restart_ext) then + nx = nx_global + 2*nghost + ny = ny_global + 2*nghost + endif + endif + + if (present(varname)) then + lvarname = trim(varname) + else + lvarname = ' ' + endif + + if (my_task == master_task) then + allocate(work_g1(nx,ny)) + else + allocate(work_g1(1,1)) ! to save memory + endif + + if (present(restart_ext)) then + if (restart_ext) then + call gather_global_ext(work_g1, work, master_task, distrb_info, spc_val=c0) + endif + else + call gather_global(work_g1, work, master_task, distrb_info, spc_val=c0) + endif + + if (my_task == master_task) then + + !-------------------------------------------------------------- + ! Write global array + !-------------------------------------------------------------- + + status = nf90_put_var( fid, varid, work_g1, & + start=(/1,1,nrec/), & + count=(/nx,ny,1/) ) + + endif ! my_task = master_task + + !------------------------------------------------------------------- + ! optional diagnostics + !------------------------------------------------------------------- + + if (my_task==master_task .and. diag) then +! write(nu_diag,*) & +! 'ice_write_nc_xy, fid= ',fid, ', nrec = ',nrec, & +! ', varid = ',varid +! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) +! write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar +! do id=1,ndim +! status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) +! write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen +! enddo + amin = minval(work_g1) + amax = maxval(work_g1, mask = work_g1 /= spval_dbl) + asum = sum (work_g1, mask = work_g1 /= spval_dbl) + write(nu_diag,*) ' min, max, sum =', amin, amax, asum, trim(lvarname) + endif + + deallocate(work_g1) + +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) +#endif + + end subroutine ice_write_nc_xy + +!======================================================================= + +! Write a netCDF file. +! +! Adapted by David Bailey, NCAR + + subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & + restart_ext, varname) + + use ice_gather_scatter, only: gather_global, gather_global_ext + + integer (kind=int_kind), intent(in) :: & + fid , & ! file id + varid , & ! variable id + nrec ! record number + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + logical (kind=log_kind), optional, intent(in) :: & + restart_ext ! if true, read extended grid + + real (kind=dbl_kind), dimension(nx_block,ny_block,ncat,max_blocks), intent(in) :: & + work ! output array (real, 8-byte) + + character (len=*), optional, intent(in) :: & + varname ! variable name + + ! local variables + + character(len=*), parameter :: subname = '(ice_read_nc_xyz)' + +#ifdef USE_NETCDF +! netCDF file diagnostics: + integer (kind=int_kind) :: & + n, & ! ncat index + status ! status output from netcdf routines +! ndim, nvar, & ! sizes of netcdf file +! id, & ! dimension index +! dimlen ! size of dimension + + real (kind=dbl_kind) :: & + amin, amax, asum ! min, max values and sum of input array + + character (char_len) :: & + lvarname ! variable name +! dimname ! dimension name + + real (kind=dbl_kind), dimension(:,:,:), allocatable :: & + work_g1 + + integer (kind=int_kind) :: nx, ny + + nx = nx_global + ny = ny_global + + if (present(restart_ext)) then + if (restart_ext) then + nx = nx_global + 2*nghost + ny = ny_global + 2*nghost + endif + endif + + if (my_task == master_task) then + allocate(work_g1(nx,ny,ncat)) + else + allocate(work_g1(1,1,ncat)) ! to save memory + endif + + if (present(restart_ext)) then + if (restart_ext) then + do n=1,ncat + call gather_global_ext(work_g1(:,:,n), work(:,:,n,:), & + master_task, distrb_info, spc_val=c0) + enddo + endif + else + do n=1,ncat + call gather_global(work_g1(:,:,n), work(:,:,n,:), & + master_task, distrb_info, spc_val=c0) + enddo + endif + + if (present(varname)) then + lvarname = trim(varname) + else + lvarname = ' ' + endif + + if (my_task == master_task) then + + !-------------------------------------------------------------- + ! Read global array + !-------------------------------------------------------------- + + status = nf90_put_var( fid, varid, work_g1, & + start=(/1,1,1,nrec/), & + count=(/nx,ny,ncat,1/) ) + + endif ! my_task = master_task + + !------------------------------------------------------------------- + ! optional diagnostics + !------------------------------------------------------------------- + + if (my_task==master_task .and. diag) then +! write(nu_diag,*) & +! 'ice_write_nc_xyz, fid= ',fid, ', nrec = ',nrec, & +! ', varid = ',varid +! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) +! write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar +! do id=1,ndim +! status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) +! write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen +! enddo + amin = 10000._dbl_kind + amax = -10000._dbl_kind + do n=1,ncat + amin = minval(work_g1(:,:,n)) + amax = maxval(work_g1(:,:,n), mask = work_g1(:,:,n) /= spval_dbl) + asum = sum (work_g1(:,:,n), mask = work_g1(:,:,n) /= spval_dbl) + write(nu_diag,*) ' min, max, sum =', amin, amax, asum, trim(lvarname) + enddo + endif + + deallocate(work_g1) + +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) +#endif + + end subroutine ice_write_nc_xyz + +!======================================================================= + +! Read a netcdf file. +! Just like ice_read_nc except that it returns a global array. +! work_g is a real array +! +! Adapted by William Lipscomb, LANL, from ice_read +! Adapted by Ann Keen, Met Office, to read from a netcdf file + + subroutine ice_read_global_nc (fid, nrec, varname, work_g, diag) + + integer (kind=int_kind), intent(in) :: & + fid , & ! file id + nrec ! record number + + character (char_len), intent(in) :: & + varname ! field name in netcdf file + + real (kind=dbl_kind), dimension(nx_global,ny_global), intent(out) :: & + work_g ! output array (real, 8-byte) + + logical (kind=log_kind) :: & + diag ! if true, write diagnostic output + + ! local variables + + character(len=*), parameter :: subname = '(ice_read_global_nc)' + +#ifdef USE_NETCDF +! netCDF file diagnostics: + integer (kind=int_kind) :: & + varid, & ! netcdf id for field + status ! status output from netcdf routines +! ndim, nvar, & ! sizes of netcdf file +! id, & ! dimension index +! dimlen ! size of dimension + + real (kind=dbl_kind) :: & + amin, amax, asum ! min, max values and sum of input array + +! character (char_len) :: & +! dimname ! dimension name +! + real (kind=dbl_kind), dimension(:,:), allocatable :: & + work_g3 + + if (orca_halogrid) then + if (my_task == master_task) then + allocate(work_g3(nx_global+2,ny_global+1)) + else + allocate(work_g3(1,1)) ! to save memory + endif + work_g3(:,:) = c0 + endif + + work_g(:,:) = c0 + + if (my_task == master_task) then + + !------------------------------------------------------------- + ! Find out ID of required variable + !------------------------------------------------------------- + + status = nf90_inq_varid(fid, trim(varname), varid) + + if (status /= nf90_noerr) then + call abort_ice (subname//'ERROR: Cannot find variable '//trim(varname) ) + endif + + !-------------------------------------------------------------- + ! Read global array + !-------------------------------------------------------------- + + if (orca_halogrid) then + status = nf90_get_var( fid, varid, work_g3, & + start=(/1,1,nrec/), & + count=(/nx_global+2,ny_global+1,1/) ) + work_g=work_g3(2:nx_global+1,1:ny_global) + else + status = nf90_get_var( fid, varid, work_g, & + start=(/1,1,nrec/), & + count=(/nx_global,ny_global,1/) ) + endif + endif ! my_task = master_task + + !------------------------------------------------------------------- + ! optional diagnostics + !------------------------------------------------------------------- + + if (my_task == master_task .and. diag) then +! write(nu_diag,*) & +! 'ice_read_global_nc, fid= ',fid, ', nrec = ',nrec, & +! ', varname = ',trim(varname) +! status = nf90_inquire(fid, nDimensions=ndim, nVariables=nvar) +! write(nu_diag,*) 'ndim= ',ndim,', nvar= ',nvar +! do id=1,ndim +! status = nf90_inquire_dimension(fid,id,name=dimname,len=dimlen) +! write(nu_diag,*) 'Dim name = ',trim(dimname),', size = ',dimlen +! enddo + amin = minval(work_g) + amax = maxval(work_g, mask = work_g /= spval_dbl) + asum = sum (work_g, mask = work_g /= spval_dbl) + write(nu_diag,*) 'min, max, sum = ', amin, amax, asum, trim(varname) + endif + + if (orca_halogrid) deallocate(work_g3) + +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) + work_g = c0 ! to satisfy intent(out) attribute +#endif + + end subroutine ice_read_global_nc + +!======================================================================= + +! Closes a netCDF file +! author: Alison McLaren, Met Office + + subroutine ice_close_nc(fid) + + integer (kind=int_kind), intent(in) :: & + fid ! unit number + + ! local variables + + character(len=*), parameter :: subname = '(ice_close_nc)' + +#ifdef USE_NETCDF + integer (kind=int_kind) :: & + status ! status variable from netCDF routine + + if (my_task == master_task) then + status = nf90_close(fid) + endif +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) +#endif + + end subroutine ice_close_nc + +!======================================================================= + +! Read a netCDF file and scatter to processors. +! If the optional variables field_loc and field_type are present, +! the ghost cells are filled using values from the global array. +! This prevents them from being filled with zeroes in land cells +! (subroutine ice_HaloUpdate need not be called). +! +! Adapted by Elizabeth Hunke for reading 3D ocean currents + + subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & + field_loc, field_type, restart_ext) + + use ice_gather_scatter, only: scatter_global, scatter_global_ext + + integer (kind=int_kind), intent(in) :: & + fid , & ! file id + nrec , & ! record number + nzlev ! z level + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + character (len=*), intent(in) :: & + varname ! field name in netcdf file + + real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks), intent(out) :: & + work ! output array (real, 8-byte) + + logical (kind=log_kind), optional, intent(in) :: & + restart_ext ! if true, read extended grid + + integer (kind=int_kind), optional, intent(in) :: & + field_loc, & ! location of field on staggered grid + field_type ! type of field (scalar, vector, angle) + + ! local variables + + character(len=*), parameter :: subname = '(ice_read_nc_uv)' + +#ifdef USE_NETCDF +! netCDF file diagnostics: + integer (kind=int_kind) :: & + varid , & ! variable id + status ! status output from netcdf routines +! ndim, nvar, & ! sizes of netcdf file +! id, & ! dimension index +! dimlen ! size of dimension + + real (kind=dbl_kind) :: & + amin, amax, asum ! min, max values and sum of input array + +! character (char_len) :: & +! dimname ! dimension name + + real (kind=dbl_kind), dimension(:,:), allocatable :: & + work_g1 + + integer (kind=int_kind) :: nx, ny + + nx = nx_global + ny = ny_global + + if (present(restart_ext)) then + if (restart_ext) then + nx = nx_global + 2*nghost + ny = ny_global + 2*nghost + endif + endif + + if (my_task == master_task) then + allocate(work_g1(nx,ny)) + else + allocate(work_g1(1,1)) ! to save memory + endif + + if (my_task == master_task) then + + !------------------------------------------------------------- + ! Find out ID of required variable + !------------------------------------------------------------- + + status = nf90_inq_varid(fid, trim(varname), varid) + + if (status /= nf90_noerr) then + call abort_ice (subname//'ERROR: Cannot find variable '//trim(varname) ) + endif + + !-------------------------------------------------------------- + ! Read global array + !-------------------------------------------------------------- + + status = nf90_get_var( fid, varid, work_g1, & + start=(/1,1,nzlev,nrec/), & + count=(/nx,ny,1,1/) ) + + endif ! my_task = master_task + + !------------------------------------------------------------------- + ! optional diagnostics + !------------------------------------------------------------------- + + if (my_task==master_task .and. diag) then + amin = minval(work_g1) + amax = maxval(work_g1, mask = work_g1 /= spval_dbl) + asum = sum (work_g1, mask = work_g1 /= spval_dbl) + write(nu_diag,*) ' min, max, sum =', amin, amax, asum, trim(varname) + endif + + !------------------------------------------------------------------- + ! Scatter data to individual processors. + ! NOTE: Ghost cells are not updated unless field_loc is present. + !------------------------------------------------------------------- + + if (present(restart_ext)) then + if (restart_ext) then + call scatter_global_ext(work, work_g1, master_task, distrb_info) + endif + else + if (present(field_loc)) then + call scatter_global(work, work_g1, master_task, distrb_info, & + field_loc, field_type) + else + call scatter_global(work, work_g1, master_task, distrb_info, & + field_loc_noupdate, field_type_noupdate) + endif + endif + + deallocate(work_g1) + +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) + work = c0 ! to satisfy intent(out) attribute +#endif + + end subroutine ice_read_nc_uv + +!======================================================================= +! Read a vector in a netcdf file. +! Just like ice_read_global_nc except that it returns a vector. +! work_g is a real vector +! +! Adapted by William Lipscomb, LANL, from ice_read +! Adapted by Ann Keen, Met Office, to read from a netcdf file + + subroutine ice_read_vec_nc (fid, nrec, varname, work_g, diag) + + integer (kind=int_kind), intent(in) :: & + fid , & ! file id + nrec ! record number + + character (char_len), intent(in) :: & + varname ! field name in netcdf file + + real (kind=dbl_kind), dimension(nrec), & + intent(out) :: & + work_g ! output array (real, 8-byte) + + logical (kind=log_kind) :: & + diag ! if true, write diagnostic output + + ! local variables + + character(len=*), parameter :: subname = '(ice_read_vec_nc)' + +#ifdef USE_NETCDF +! netCDF file diagnostics: + integer (kind=int_kind) :: & + varid, & ! netcdf id for field + status ! status output from netcdf routines + + real (kind=dbl_kind) :: & + amin, amax ! min, max values of input vector + + work_g(:) = c0 + + if (my_task == master_task) then + + !------------------------------------------------------------- + ! Find out ID of required variable + !------------------------------------------------------------- + + status = nf90_inq_varid(fid, trim(varname), varid) + + if (status /= nf90_noerr) then + call abort_ice (subname//'ERROR: Cannot find variable '//trim(varname) ) + endif + + !-------------------------------------------------------------- + ! Read global array + !-------------------------------------------------------------- + + status = nf90_get_var( fid, varid, work_g, & + start=(/1/), & + count=(/nrec/) ) + endif ! my_task = master_task + + !------------------------------------------------------------------- + ! optional diagnostics + !------------------------------------------------------------------- + + if (my_task == master_task .and. diag) then + amin = minval(work_g) + amax = maxval(work_g) + write(nu_diag,*) 'min, max, nrec = ', amin, amax, nrec + endif + +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) + work_g = c0 ! to satisfy intent(out) attribute +#endif + + end subroutine ice_read_vec_nc + +!======================================================================= +! Get number of variables of a given variable + subroutine ice_get_ncvarsize(fid,varname,recsize) + + integer (kind=int_kind), intent(in) :: & + fid ! file id + character (char_len), intent(in) :: & + varname ! field name in netcdf file + integer (kind=int_kind), intent(out) :: & + recsize ! Number of records in file + + ! local variables + +#ifdef USE_NETCDF + integer (kind=int_kind) :: & + ndims, i, status + character (char_len) :: & + cvar +#endif + character(len=*), parameter :: subname = '(ice_get_ncvarsize)' + +#ifdef USE_NETCDF + if (my_task == master_task) then + status=nf90_inquire(fid, nDimensions = nDims) + if (status /= nf90_noerr) then + call abort_ice (subname//'ERROR: inquire nDimensions' ) + endif + do i=1,nDims + status = nf90_inquire_dimension(fid,i,name=cvar,len=recsize) + if (status /= nf90_noerr) then + call abort_ice (subname//'ERROR: inquire len for variable '//trim(cvar) ) + endif + if (trim(cvar) == trim(varname)) exit + enddo + if (trim(cvar) .ne. trim(varname)) then + call abort_ice (subname//'ERROR: Did not find variable '//trim(varname) ) + endif + endif +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) + recsize = 0 ! to satisfy intent(out) attribute +#endif + + end subroutine ice_get_ncvarsize + +!======================================================================= + + end module ice_read_write + +!======================================================================= diff --git a/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 new file mode 100644 index 000000000..b3024302e --- /dev/null +++ b/cicecore/cicedynB/infrastructure/io/io_netcdf/ice_history_write.F90 @@ -0,0 +1,1589 @@ +#ifdef ncdf +#define USE_NETCDF +#endif +!======================================================================= +! +! Writes history in netCDF format +! +! authors Tony Craig and Bruce Briegleb, NCAR +! Elizabeth C. Hunke and William H. Lipscomb, LANL +! C. M. Bitz, UW +! +! 2004 WHL: Block structure added +! 2006 ECH: Accepted some CESM code into mainstream CICE +! Added ice_present, aicen, vicen; removed aice1...10, vice1...1. +! Added histfreq_n and histfreq='h' options, removed histfreq='w' +! Converted to free source form (F90) +! Added option for binary output instead of netCDF +! 2009 D Bailey and ECH: Generalized for multiple frequency output +! 2010 Alison McLaren and ECH: Added 3D capability +! 2013 ECH split from ice_history.F90 + + module ice_history_write + + use ice_constants, only: c0, c360, spval, spval_dbl + use ice_fileunits, only: nu_diag + use ice_exit, only: abort_ice + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_query_parameters + + implicit none + private + public :: ice_write_hist + +!======================================================================= + + contains + +!======================================================================= +! +! write average ice quantities or snapshots +! +! author: Elizabeth C. Hunke, LANL + + subroutine ice_write_hist (ns) + + use ice_kinds_mod + use ice_arrays_column, only: hin_max, floe_rad_c + use ice_blocks, only: nx_block, ny_block + use ice_broadcast, only: broadcast_scalar + use ice_calendar, only: time, sec, idate, idate0, write_ic, & + histfreq, dayyr, days_per_year, use_leap_years + use ice_communicate, only: my_task, master_task + use ice_domain, only: distrb_info + use ice_domain_size, only: nx_global, ny_global, max_nstrm, max_blocks + use ice_gather_scatter, only: gather_global + use ice_grid, only: TLON, TLAT, ULON, ULAT, hm, bm, tarea, uarea, & + dxu, dxt, dyu, dyt, HTN, HTE, ANGLE, ANGLET, & + lont_bounds, latt_bounds, lonu_bounds, latu_bounds + use ice_history_shared + use ice_restart_shared, only: runid, lcdf64 +#ifdef USE_NETCDF + use netcdf +#endif + + integer (kind=int_kind), intent(in) :: ns + + ! local variables + + real (kind=dbl_kind), dimension(:,:), allocatable :: work_g1 + real (kind=real_kind), dimension(:,:), allocatable :: work_gr + real (kind=real_kind), dimension(:,:,:), allocatable :: work_gr3 + real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks) :: & + work1 + + integer (kind=int_kind) :: i,k,ic,n,nn, & + ncid,status,imtid,jmtid,kmtidi,kmtids,kmtidb, cmtid,timid,varid, & + nvertexid,ivertex,kmtida,iflag, fmtid + integer (kind=int_kind), dimension(3) :: dimid + integer (kind=int_kind), dimension(4) :: dimidz + integer (kind=int_kind), dimension(5) :: dimidcz + integer (kind=int_kind), dimension(3) :: dimid_nverts + integer (kind=int_kind), dimension(6) :: dimidex +! real (kind=real_kind) :: ltime + real (kind=dbl_kind) :: ltime2 + character (char_len) :: title + character (char_len_long) :: ncfile(max_nstrm) + real (kind=dbl_kind) :: secday, rad_to_deg + + integer (kind=int_kind) :: ind,boundid + + integer (kind=int_kind) :: lprecision + + character (char_len) :: start_time,current_date,current_time + character (len=8) :: cdate + + ! 4 coordinate variables: TLON, TLAT, ULON, ULAT + INTEGER (kind=int_kind), PARAMETER :: ncoord = 4 + + ! 4 vertices in each grid cell + INTEGER (kind=int_kind), PARAMETER :: nverts = 4 + + ! 4 variables describe T, U grid boundaries: + ! lont_bounds, latt_bounds, lonu_bounds, latu_bounds + INTEGER (kind=int_kind), PARAMETER :: nvar_verts = 4 + + TYPE coord_attributes ! netcdf coordinate attributes + character (len=11) :: short_name + character (len=45) :: long_name + character (len=20) :: units + END TYPE coord_attributes + + TYPE req_attributes ! req'd netcdf attributes + type (coord_attributes) :: req + character (len=20) :: coordinates + END TYPE req_attributes + + TYPE(req_attributes), dimension(nvar) :: var + TYPE(coord_attributes), dimension(ncoord) :: coord_var + TYPE(coord_attributes), dimension(nvar_verts) :: var_nverts + TYPE(coord_attributes), dimension(nvarz) :: var_nz + CHARACTER (char_len), dimension(ncoord) :: coord_bounds + + character(len=*), parameter :: subname = '(ice_write_hist)' + +#ifdef USE_NETCDF + call icepack_query_parameters(secday_out=secday, rad_to_deg_out=rad_to_deg) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + lprecision = nf90_float + if (history_precision == 8) lprecision = nf90_double + + if (my_task == master_task) then + +! ltime=time/int(secday) + ltime2=time/int(secday) + + call construct_filename(ncfile(ns),'nc',ns) + + ! add local directory path name to ncfile + if (write_ic) then + ncfile(ns) = trim(incond_dir)//ncfile(ns) + else + ncfile(ns) = trim(history_dir)//ncfile(ns) + endif + + ! create file + iflag = nf90_clobber + if (lcdf64) iflag = ior(iflag,nf90_64bit_offset) + status = nf90_create(ncfile(ns), iflag, ncid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: creating history ncfile '//ncfile(ns)) + + !----------------------------------------------------------------- + ! define dimensions + !----------------------------------------------------------------- + + if (hist_avg) then + status = nf90_def_dim(ncid,'d2',2,boundid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining dim d2') + endif + + status = nf90_def_dim(ncid,'ni',nx_global,imtid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining dim ni') + + status = nf90_def_dim(ncid,'nj',ny_global,jmtid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining dim nj') + + status = nf90_def_dim(ncid,'nc',ncat_hist,cmtid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining dim nc') + + status = nf90_def_dim(ncid,'nkice',nzilyr,kmtidi) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining dim nki') + + status = nf90_def_dim(ncid,'nksnow',nzslyr,kmtids) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining dim nks') + + status = nf90_def_dim(ncid,'nkbio',nzblyr,kmtidb) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining dim nkb') + + status = nf90_def_dim(ncid,'nkaer',nzalyr,kmtida) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining dim nka') + + status = nf90_def_dim(ncid,'time',NF90_UNLIMITED,timid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining dim time') + + status = nf90_def_dim(ncid,'nvertices',nverts,nvertexid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining dim nverts') + + status = nf90_def_dim(ncid,'nf',nfsd_hist,fmtid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining dim nf') + + !----------------------------------------------------------------- + ! define coordinate variables + !----------------------------------------------------------------- + +!sgl status = nf90_def_var(ncid,'time',nf90_float,timid,varid) + status = nf90_def_var(ncid,'time',nf90_double,timid,varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining var time') + + status = nf90_put_att(ncid,varid,'long_name','model time') + if (status /= nf90_noerr) call abort_ice(subname// & + 'ice Error: time long_name') + + write(cdate,'(i8.8)') idate0 + write(title,'(a,a,a,a,a,a,a,a)') 'days since ', & + cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' 00:00:00' + status = nf90_put_att(ncid,varid,'units',title) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: time units') + + if (days_per_year == 360) then + status = nf90_put_att(ncid,varid,'calendar','360_day') + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: time calendar') + elseif (days_per_year == 365 .and. .not.use_leap_years ) then + status = nf90_put_att(ncid,varid,'calendar','NoLeap') + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: time calendar') + elseif (use_leap_years) then + status = nf90_put_att(ncid,varid,'calendar','Gregorian') + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: time calendar') + else + call abort_ice(subname//'ERROR: invalid calendar settings') + endif + + if (hist_avg) then + status = nf90_put_att(ncid,varid,'bounds','time_bounds') + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: time bounds') + endif + + !----------------------------------------------------------------- + ! Define attributes for time bounds if hist_avg is true + !----------------------------------------------------------------- + + if (hist_avg) then + dimid(1) = boundid + dimid(2) = timid + status = nf90_def_var(ncid,'time_bounds',lprecision,dimid(1:2),varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining var time_bounds') + status = nf90_put_att(ncid,varid,'long_name', & + 'boundaries for time-averaging interval') + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: time_bounds long_name') + write(cdate,'(i8.8)') idate0 + write(title,'(a,a,a,a,a,a,a,a)') 'days since ', & + cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' 00:00:00' + status = nf90_put_att(ncid,varid,'units',title) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: time_bounds units') + endif + + !----------------------------------------------------------------- + ! define information for required time-invariant variables + !----------------------------------------------------------------- + + ind = 0 + ind = ind + 1 + coord_var(ind) = coord_attributes('TLON', & + 'T grid center longitude', 'degrees_east') + coord_bounds(ind) = 'lont_bounds' + ind = ind + 1 + coord_var(ind) = coord_attributes('TLAT', & + 'T grid center latitude', 'degrees_north') + coord_bounds(ind) = 'latt_bounds' + ind = ind + 1 + coord_var(ind) = coord_attributes('ULON', & + 'U grid center longitude', 'degrees_east') + coord_bounds(ind) = 'lonu_bounds' + ind = ind + 1 + coord_var(ind) = coord_attributes('ULAT', & + 'U grid center latitude', 'degrees_north') + coord_bounds(ind) = 'latu_bounds' + + var_nz(1) = coord_attributes('NCAT', 'category maximum thickness', 'm') + var_nz(2) = coord_attributes('VGRDi', 'vertical ice levels', '1') + var_nz(3) = coord_attributes('VGRDs', 'vertical snow levels', '1') + var_nz(4) = coord_attributes('VGRDb', 'vertical ice-bio levels', '1') + var_nz(5) = coord_attributes('VGRDa', 'vertical snow-ice-bio levels', '1') + var_nz(6) = coord_attributes('NFSD', 'category floe size (center)', 'm') + + !----------------------------------------------------------------- + ! define information for optional time-invariant variables + !----------------------------------------------------------------- + + var(n_tarea)%req = coord_attributes('tarea', & + 'area of T grid cells', 'm^2') + var(n_tarea)%coordinates = 'TLON TLAT' + var(n_uarea)%req = coord_attributes('uarea', & + 'area of U grid cells', 'm^2') + var(n_uarea)%coordinates = 'ULON ULAT' + var(n_dxt)%req = coord_attributes('dxt', & + 'T cell width through middle', 'm') + var(n_dxt)%coordinates = 'TLON TLAT' + var(n_dyt)%req = coord_attributes('dyt', & + 'T cell height through middle', 'm') + var(n_dyt)%coordinates = 'TLON TLAT' + var(n_dxu)%req = coord_attributes('dxu', & + 'U cell width through middle', 'm') + var(n_dxu)%coordinates = 'ULON ULAT' + var(n_dyu)%req = coord_attributes('dyu', & + 'U cell height through middle', 'm') + var(n_dyu)%coordinates = 'ULON ULAT' + var(n_HTN)%req = coord_attributes('HTN', & + 'T cell width on North side','m') + var(n_HTN)%coordinates = 'TLON TLAT' + var(n_HTE)%req = coord_attributes('HTE', & + 'T cell width on East side', 'm') + var(n_HTE)%coordinates = 'TLON TLAT' + var(n_ANGLE)%req = coord_attributes('ANGLE', & + 'angle grid makes with latitude line on U grid', & + 'radians') + var(n_ANGLE)%coordinates = 'ULON ULAT' + var(n_ANGLET)%req = coord_attributes('ANGLET', & + 'angle grid makes with latitude line on T grid', & + 'radians') + var(n_ANGLET)%coordinates = 'TLON TLAT' + + ! These fields are required for CF compliance + ! dimensions (nx,ny,nverts) + var_nverts(n_lont_bnds) = coord_attributes('lont_bounds', & + 'longitude boundaries of T cells', 'degrees_east') + var_nverts(n_latt_bnds) = coord_attributes('latt_bounds', & + 'latitude boundaries of T cells', 'degrees_north') + var_nverts(n_lonu_bnds) = coord_attributes('lonu_bounds', & + 'longitude boundaries of U cells', 'degrees_east') + var_nverts(n_latu_bnds) = coord_attributes('latu_bounds', & + 'latitude boundaries of U cells', 'degrees_north') + + !----------------------------------------------------------------- + ! define attributes for time-invariant variables + !----------------------------------------------------------------- + + dimid(1) = imtid + dimid(2) = jmtid + dimid(3) = timid + + do i = 1, ncoord + status = nf90_def_var(ncid, coord_var(i)%short_name, lprecision, & + dimid(1:2), varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining short_name for '//coord_var(i)%short_name) + status = nf90_put_att(ncid,varid,'long_name',coord_var(i)%long_name) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining long_name for '//coord_var(i)%short_name) + status = nf90_put_att(ncid, varid, 'units', coord_var(i)%units) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining units for '//coord_var(i)%short_name) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'missing_value',spval) + else + status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining missing_value for '//coord_var(i)%short_name) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'_FillValue',spval) + else + status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining _FillValue for '//coord_var(i)%short_name) + if (coord_var(i)%short_name == 'ULAT') then + status = nf90_put_att(ncid,varid,'comment', & + 'Latitude of NE corner of T grid cell') + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining comment for '//coord_var(i)%short_name) + endif + if (f_bounds) then + status = nf90_put_att(ncid, varid, 'bounds', coord_bounds(i)) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining bounds for '//coord_var(i)%short_name) + endif + enddo + + ! Extra dimensions (NCAT, NZILYR, NZSLYR, NZBLYR, NZALYR, NFSD) + dimidex(1)=cmtid + dimidex(2)=kmtidi + dimidex(3)=kmtids + dimidex(4)=kmtidb + dimidex(5)=kmtida + dimidex(6)=fmtid + + do i = 1, nvarz + if (igrdz(i)) then + status = nf90_def_var(ncid, var_nz(i)%short_name, & + lprecision, dimidex(i), varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining short_name for '//var_nz(i)%short_name) + status = nf90_put_att(ncid,varid,'long_name',var_nz(i)%long_name) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining long_name for '//var_nz(i)%short_name) + status = nf90_put_att(ncid, varid, 'units', var_nz(i)%units) + if (Status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining units for '//var_nz(i)%short_name) + endif + enddo + + ! Attributes for tmask, blkmask defined separately, since they have no units + if (igrd(n_tmask)) then + status = nf90_def_var(ncid, 'tmask', lprecision, dimid(1:2), varid) + if (status /= nf90_noerr) call abort_ice(subname//'ERROR: defining var tmask') + status = nf90_put_att(ncid,varid, 'long_name', 'ocean grid mask') + if (status /= nf90_noerr) call abort_ice(subname//'ERROR: tmask long_name') + status = nf90_put_att(ncid, varid, 'coordinates', 'TLON TLAT') + if (status /= nf90_noerr) call abort_ice(subname//'ERROR: tmask units') + status = nf90_put_att(ncid,varid,'comment', '0 = land, 1 = ocean') + if (status /= nf90_noerr) call abort_ice(subname//'ERROR: tmask comment') + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'missing_value',spval) + else + status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname//'ERROR: defining missing_value for tmask') + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'_FillValue',spval) + else + status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname//'ERROR: defining _FillValue for tmask') + endif + + if (igrd(n_blkmask)) then + status = nf90_def_var(ncid, 'blkmask', lprecision, dimid(1:2), varid) + if (status /= nf90_noerr) call abort_ice(subname//'ERROR: defining var blkmask') + status = nf90_put_att(ncid,varid, 'long_name', 'ice grid block mask') + if (status /= nf90_noerr) call abort_ice(subname//'ERROR: blkmask long_name') + status = nf90_put_att(ncid, varid, 'coordinates', 'TLON TLAT') + if (status /= nf90_noerr) call abort_ice(subname//'ERROR: blkmask units') + status = nf90_put_att(ncid,varid,'comment', 'mytask + iblk/100') + if (status /= nf90_noerr) call abort_ice(subname//'ERROR: blkmask comment') + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'missing_value',spval) + else + status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname//'ERROR: defining missing_value for blkmask') + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'_FillValue',spval) + else + status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname//'ERROR: defining _FillValue for blkmask') + endif + + do i = 3, nvar ! note n_tmask=1, n_blkmask=2 + if (igrd(i)) then + status = nf90_def_var(ncid, var(i)%req%short_name, & + lprecision, dimid(1:2), varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining variable '//var(i)%req%short_name) + status = nf90_put_att(ncid,varid, 'long_name', var(i)%req%long_name) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining long_name for '//var(i)%req%short_name) + status = nf90_put_att(ncid, varid, 'units', var(i)%req%units) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining units for '//var(i)%req%short_name) + status = nf90_put_att(ncid, varid, 'coordinates', var(i)%coordinates) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining coordinates for '//var(i)%req%short_name) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'missing_value',spval) + else + status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining missing_value for '//var(i)%req%short_name) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'_FillValue',spval) + else + status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining _FillValue for '//var(i)%req%short_name) + endif + enddo + + ! Fields with dimensions (nverts,nx,ny) + dimid_nverts(1) = nvertexid + dimid_nverts(2) = imtid + dimid_nverts(3) = jmtid + do i = 1, nvar_verts + if (f_bounds) then + status = nf90_def_var(ncid, var_nverts(i)%short_name, & + lprecision,dimid_nverts, varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining variable '//var_nverts(i)%short_name) + status = nf90_put_att(ncid,varid, 'long_name', var_nverts(i)%long_name) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining long_name for '//var_nverts(i)%short_name) + status = nf90_put_att(ncid, varid, 'units', var_nverts(i)%units) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining units for '//var_nverts(i)%short_name) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'missing_value',spval) + else + status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining missing_value for '//var_nverts(i)%short_name) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'_FillValue',spval) + else + status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining _FillValue for '//var_nverts(i)%short_name) + endif + enddo + + do n=1,num_avail_hist_fields_2D + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = nf90_def_var(ncid, avail_hist_fields(n)%vname, & + lprecision, dimid, varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining variable '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'units', & + avail_hist_fields(n)%vunit) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining units for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid, 'long_name', & + avail_hist_fields(n)%vdesc) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining long_name for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'coordinates', & + avail_hist_fields(n)%vcoord) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining coordinates for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'cell_measures', & + avail_hist_fields(n)%vcellmeas) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining cell measures for '//avail_hist_fields(n)%vname) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'missing_value',spval) + else + status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining missing_value for '//avail_hist_fields(n)%vname) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'_FillValue',spval) + else + status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining _FillValue for '//avail_hist_fields(n)%vname) + + !----------------------------------------------------------------- + ! Add cell_methods attribute to variables if averaged + !----------------------------------------------------------------- + if (hist_avg) then + if (TRIM(avail_hist_fields(n)%vname)/='sig1' & + .or.TRIM(avail_hist_fields(n)%vname)/='sig2' & + .or.TRIM(avail_hist_fields(n)%vname)/='sistreave' & + .or.TRIM(avail_hist_fields(n)%vname)/='sistremax' & + .or.TRIM(avail_hist_fields(n)%vname)/='sigP') then + status = nf90_put_att(ncid,varid,'cell_methods','time: mean') + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining cell methods for '//avail_hist_fields(n)%vname) + endif + endif + + if (histfreq(ns) == '1' .or. .not. hist_avg & + .or. n==n_divu(ns) .or. n==n_shear(ns) & ! snapshots + .or. n==n_sig1(ns) .or. n==n_sig2(ns) & + .or. n==n_sigP(ns) .or. n==n_trsig(ns) & + .or. n==n_sistreave(ns) .or. n==n_sistremax(ns) & + .or. n==n_mlt_onset(ns) .or. n==n_frz_onset(ns) & + .or. n==n_hisnap(ns) .or. n==n_aisnap(ns)) then + status = nf90_put_att(ncid,varid,'time_rep','instantaneous') + else + status = nf90_put_att(ncid,varid,'time_rep','averaged') + endif + endif + enddo ! num_avail_hist_fields_2D + + dimidz(1) = imtid + dimidz(2) = jmtid + dimidz(3) = cmtid + dimidz(4) = timid + + do n = n2D + 1, n3Dccum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = nf90_def_var(ncid, avail_hist_fields(n)%vname, & + lprecision, dimidz, varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining variable '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'units', & + avail_hist_fields(n)%vunit) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining units for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid, 'long_name', & + avail_hist_fields(n)%vdesc) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining long_name for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'coordinates', & + avail_hist_fields(n)%vcoord) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining coordinates for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'cell_measures', & + avail_hist_fields(n)%vcellmeas) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining cell measures for '//avail_hist_fields(n)%vname) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'missing_value',spval) + else + status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining missing_value for '//avail_hist_fields(n)%vname) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'_FillValue',spval) + else + status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining _FillValue for '//avail_hist_fields(n)%vname) + + !----------------------------------------------------------------- + ! Add cell_methods attribute to variables if averaged + !----------------------------------------------------------------- + if (hist_avg) then + status = nf90_put_att(ncid,varid,'cell_methods','time: mean') + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining cell methods for '//avail_hist_fields(n)%vname) + endif + + if (histfreq(ns) == '1' .or. .not. hist_avg) then + status = nf90_put_att(ncid,varid,'time_rep','instantaneous') + else + status = nf90_put_att(ncid,varid,'time_rep','averaged') + endif + endif + enddo ! num_avail_hist_fields_3Dc + + dimidz(1) = imtid + dimidz(2) = jmtid + dimidz(3) = kmtidi + dimidz(4) = timid + + do n = n3Dccum + 1, n3Dzcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = nf90_def_var(ncid, avail_hist_fields(n)%vname, & + lprecision, dimidz, varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining variable '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'units', & + avail_hist_fields(n)%vunit) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining units for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid, 'long_name', & + avail_hist_fields(n)%vdesc) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining long_name for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'coordinates', & + avail_hist_fields(n)%vcoord) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining coordinates for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'cell_measures', & + avail_hist_fields(n)%vcellmeas) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining cell measures for '//avail_hist_fields(n)%vname) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'missing_value',spval) + else + status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining missing_value for '//avail_hist_fields(n)%vname) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'_FillValue',spval) + else + status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining _FillValue for '//avail_hist_fields(n)%vname) + + endif + enddo ! num_avail_hist_fields_3Dz + + dimidz(1) = imtid + dimidz(2) = jmtid + dimidz(3) = kmtidb + dimidz(4) = timid + + do n = n3Dzcum + 1, n3Dbcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = nf90_def_var(ncid, avail_hist_fields(n)%vname, & + lprecision, dimidz, varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining variable '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'units', & + avail_hist_fields(n)%vunit) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining units for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid, 'long_name', & + avail_hist_fields(n)%vdesc) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining long_name for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'coordinates', & + avail_hist_fields(n)%vcoord) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining coordinates for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'cell_measures', & + avail_hist_fields(n)%vcellmeas) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining cell measures for '//avail_hist_fields(n)%vname) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'missing_value',spval) + else + status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining missing_value for '//avail_hist_fields(n)%vname) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'_FillValue',spval) + else + status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining _FillValue for '//avail_hist_fields(n)%vname) + + endif + enddo ! num_avail_hist_fields_3Db + + dimidz(1) = imtid + dimidz(2) = jmtid + dimidz(3) = kmtida + dimidz(4) = timid + + do n = n3Dbcum + 1, n3Dacum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = nf90_def_var(ncid, avail_hist_fields(n)%vname, & + lprecision, dimidz, varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining variable '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'units', & + avail_hist_fields(n)%vunit) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining units for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid, 'long_name', & + avail_hist_fields(n)%vdesc) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining long_name for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'coordinates', & + avail_hist_fields(n)%vcoord) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining coordinates for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'cell_measures', & + avail_hist_fields(n)%vcellmeas) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining cell measures for '//avail_hist_fields(n)%vname) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'missing_value',spval) + else + status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining missing_value for '//avail_hist_fields(n)%vname) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'_FillValue',spval) + else + status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining _FillValue for '//avail_hist_fields(n)%vname) + + endif + enddo ! num_avail_hist_fields_3Da + + dimidz(1) = imtid + dimidz(2) = jmtid + dimidz(3) = fmtid + dimidz(4) = timid + + do n = n3Dacum + 1, n3Dfcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = nf90_def_var(ncid, avail_hist_fields(n)%vname, & + lprecision, dimidz, varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining variable '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'units', & + avail_hist_fields(n)%vunit) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining units for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid, 'long_name', & + avail_hist_fields(n)%vdesc) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining long_name for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'coordinates', & + avail_hist_fields(n)%vcoord) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining coordinates for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'cell_measures', & + avail_hist_fields(n)%vcellmeas) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining cell measures for '//avail_hist_fields(n)%vname) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'missing_value',spval) + else + status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining missing_value for '//avail_hist_fields(n)%vname) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'_FillValue',spval) + else + status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining _FillValue for '//avail_hist_fields(n)%vname) + + endif + enddo ! num_avail_hist_fields_3Df + + dimidcz(1) = imtid + dimidcz(2) = jmtid + dimidcz(3) = kmtidi + dimidcz(4) = cmtid + dimidcz(5) = timid + + do n = n3Dfcum + 1, n4Dicum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = nf90_def_var(ncid, avail_hist_fields(n)%vname, & +! lprecision, dimidcz, varid) + lprecision, dimidcz(1:4), varid) ! ferret + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining variable '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'units', & + avail_hist_fields(n)%vunit) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining units for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid, 'long_name', & + avail_hist_fields(n)%vdesc) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining long_name for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'coordinates', & + avail_hist_fields(n)%vcoord) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining coordinates for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'cell_measures', & + avail_hist_fields(n)%vcellmeas) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining cell measures for '//avail_hist_fields(n)%vname) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'missing_value',spval) + else + status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining missing_value for '//avail_hist_fields(n)%vname) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'_FillValue',spval) + else + status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining _FillValue for '//avail_hist_fields(n)%vname) + + !----------------------------------------------------------------- + ! Add cell_methods attribute to variables if averaged + !----------------------------------------------------------------- + if (hist_avg) then + status = nf90_put_att(ncid,varid,'cell_methods','time: mean') + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining cell methods for '//avail_hist_fields(n)%vname) + endif + + if (histfreq(ns) == '1' .or. .not. hist_avg) then + status = nf90_put_att(ncid,varid,'time_rep','instantaneous') + else + status = nf90_put_att(ncid,varid,'time_rep','averaged') + endif + endif + enddo ! num_avail_hist_fields_4Di + + dimidcz(1) = imtid + dimidcz(2) = jmtid + dimidcz(3) = kmtids + dimidcz(4) = cmtid + dimidcz(5) = timid + + do n = n4Dicum + 1, n4Dscum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = nf90_def_var(ncid, avail_hist_fields(n)%vname, & +! lprecision, dimidcz, varid) + lprecision, dimidcz(1:4), varid) ! ferret + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining variable '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'units', & + avail_hist_fields(n)%vunit) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining units for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid, 'long_name', & + avail_hist_fields(n)%vdesc) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining long_name for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'coordinates', & + avail_hist_fields(n)%vcoord) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining coordinates for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'cell_measures', & + avail_hist_fields(n)%vcellmeas) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining cell measures for '//avail_hist_fields(n)%vname) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'missing_value',spval) + else + status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining missing_value for '//avail_hist_fields(n)%vname) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'_FillValue',spval) + else + status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining _FillValue for '//avail_hist_fields(n)%vname) + + !----------------------------------------------------------------- + ! Add cell_methods attribute to variables if averaged + !----------------------------------------------------------------- + if (hist_avg) then + status = nf90_put_att(ncid,varid,'cell_methods','time: mean') + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining cell methods for '//avail_hist_fields(n)%vname) + endif + + if (histfreq(ns) == '1' .or. .not. hist_avg) then + status = nf90_put_att(ncid,varid,'time_rep','instantaneous') + else + status = nf90_put_att(ncid,varid,'time_rep','averaged') + endif + endif + enddo ! num_avail_hist_fields_4Ds + + dimidcz(1) = imtid + dimidcz(2) = jmtid + dimidcz(3) = fmtid + dimidcz(4) = cmtid + dimidcz(5) = timid + + do n = n4Dscum + 1, n4Dfcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = nf90_def_var(ncid, avail_hist_fields(n)%vname, & +! lprecision, dimidcz, varid) + lprecision, dimidcz(1:4), varid) ! ferret + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining variable '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'units', & + avail_hist_fields(n)%vunit) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining units for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid, 'long_name', & + avail_hist_fields(n)%vdesc) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining long_name for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'coordinates', & + avail_hist_fields(n)%vcoord) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining coordinates for '//avail_hist_fields(n)%vname) + status = nf90_put_att(ncid,varid,'cell_measures', & + avail_hist_fields(n)%vcellmeas) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining cell measures for '//avail_hist_fields(n)%vname) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'missing_value',spval) + else + status = nf90_put_att(ncid,varid,'missing_value',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining missing_value for '//avail_hist_fields(n)%vname) + if (lprecision == nf90_float) then + status = nf90_put_att(ncid,varid,'_FillValue',spval) + else + status = nf90_put_att(ncid,varid,'_FillValue',spval_dbl) + endif + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining _FillValue for '//avail_hist_fields(n)%vname) + + !----------------------------------------------------------------- + ! Add cell_methods attribute to variables if averaged + !----------------------------------------------------------------- + if (hist_avg) then + status = nf90_put_att(ncid,varid,'cell_methods','time: mean') + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: defining cell methods for '//avail_hist_fields(n)%vname) + endif + + if (histfreq(ns) == '1' .or. .not. hist_avg) then + status = nf90_put_att(ncid,varid,'time_rep','instantaneous') + else + status = nf90_put_att(ncid,varid,'time_rep','averaged') + endif + endif + enddo ! num_avail_hist_fields_4Df + + !----------------------------------------------------------------- + ! global attributes + !----------------------------------------------------------------- + ! ... the user should change these to something useful ... + !----------------------------------------------------------------- +#ifdef CESMCOUPLED + status = nf90_put_att(ncid,nf90_global,'title',runid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: in global attribute title') +#else + title = 'sea ice model output for CICE' + status = nf90_put_att(ncid,nf90_global,'title',title) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: in global attribute title') +#endif + title = 'Diagnostic and Prognostic Variables' + status = nf90_put_att(ncid,nf90_global,'contents',title) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: global attribute contents') + + write(title,'(2a)') 'Los Alamos Sea Ice Model, ', trim(version_name) + status = nf90_put_att(ncid,nf90_global,'source',title) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: global attribute source') + + if (use_leap_years) then + write(title,'(a,i3,a)') 'This year has ',int(dayyr),' days' + else + write(title,'(a,i3,a)') 'All years have exactly ',int(dayyr),' days' + endif + status = nf90_put_att(ncid,nf90_global,'comment',title) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: global attribute comment') + + write(title,'(a,i8.8)') 'File written on model date ',idate + status = nf90_put_att(ncid,nf90_global,'comment2',title) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: global attribute date1') + + write(title,'(a,i6)') 'seconds elapsed into model date: ',sec + status = nf90_put_att(ncid,nf90_global,'comment3',title) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: global attribute date2') + + title = 'CF-1.0' + status = & + nf90_put_att(ncid,nf90_global,'conventions',title) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: in global attribute conventions') + + call date_and_time(date=current_date, time=current_time) + write(start_time,1000) current_date(1:4), current_date(5:6), & + current_date(7:8), current_time(1:2), & + current_time(3:4), current_time(5:8) +1000 format('This dataset was created on ', & + a,'-',a,'-',a,' at ',a,':',a,':',a) + + status = nf90_put_att(ncid,nf90_global,'history',start_time) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: global attribute history') + + status = nf90_put_att(ncid,nf90_global,'io_flavor','io_netcdf') + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: global attribute io_flavor') + + !----------------------------------------------------------------- + ! end define mode + !----------------------------------------------------------------- + + status = nf90_enddef(ncid) + if (status /= nf90_noerr) call abort_ice(subname//'ERROR in nf90_enddef') + + !----------------------------------------------------------------- + ! write time variable + !----------------------------------------------------------------- + + status = nf90_inq_varid(ncid,'time',varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: getting time varid') +!sgl status = nf90_put_var(ncid,varid,ltime) + status = nf90_put_var(ncid,varid,ltime2) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: writing time variable') + + !----------------------------------------------------------------- + ! write time_bounds info + !----------------------------------------------------------------- + + if (hist_avg) then + status = nf90_inq_varid(ncid,'time_bounds',varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: getting time_bounds id') + status = nf90_put_var(ncid,varid,time_beg(ns),start=(/1/)) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: writing time_beg') + status = nf90_put_var(ncid,varid,time_end(ns),start=(/2/)) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: writing time_end') + endif + + endif ! master_task + + if (my_task==master_task) then + allocate(work_g1(nx_global,ny_global)) + allocate(work_gr(nx_global,ny_global)) + else + allocate(work_gr(1,1)) ! to save memory + allocate(work_g1(1,1)) + endif + + work_g1(:,:) = c0 + + !----------------------------------------------------------------- + ! write coordinate variables + !----------------------------------------------------------------- + + do i = 1,ncoord + call broadcast_scalar(coord_var(i)%short_name,master_task) + SELECT CASE (coord_var(i)%short_name) + CASE ('TLON') + ! Convert T grid longitude from -180 -> 180 to 0 to 360 + work1 = TLON*rad_to_deg + c360 + where (work1 > c360) work1 = work1 - c360 + where (work1 < c0 ) work1 = work1 + c360 + call gather_global(work_g1,work1,master_task,distrb_info) + CASE ('TLAT') + work1 = TLAT*rad_to_deg + call gather_global(work_g1,work1,master_task,distrb_info) + CASE ('ULON') + work1 = ULON*rad_to_deg + call gather_global(work_g1,work1,master_task,distrb_info) + CASE ('ULAT') + work1 = ULAT*rad_to_deg + call gather_global(work_g1,work1,master_task,distrb_info) + END SELECT + + if (my_task == master_task) then + work_gr = work_g1 + status = nf90_inq_varid(ncid, coord_var(i)%short_name, varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//coord_var(i)%short_name) + status = nf90_put_var(ncid,varid,work_gr) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: writing'//coord_var(i)%short_name) + endif + enddo + + ! Extra dimensions (NCAT, NFSD, VGRD*) + + do i = 1, nvarz + if (igrdz(i)) then + call broadcast_scalar(var_nz(i)%short_name,master_task) + if (my_task == master_task) then + status = nf90_inq_varid(ncid, var_nz(i)%short_name, varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//var_nz(i)%short_name) + SELECT CASE (var_nz(i)%short_name) + CASE ('NCAT') + status = nf90_put_var(ncid,varid,hin_max(1:ncat_hist)) + CASE ('NFSD') + status = nf90_put_var(ncid,varid,floe_rad_c(1:nfsd_hist)) + CASE ('VGRDi') ! index - needed for Met Office analysis code + status = nf90_put_var(ncid,varid,(/(k, k=1,nzilyr)/)) + CASE ('VGRDs') ! index - needed for Met Office analysis code + status = nf90_put_var(ncid,varid,(/(k, k=1,nzslyr)/)) + CASE ('VGRDb') + status = nf90_put_var(ncid,varid,(/(k, k=1,nzblyr)/)) + CASE ('VGRDa') + status = nf90_put_var(ncid,varid,(/(k, k=1,nzalyr)/)) + END SELECT + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: writing'//var_nz(i)%short_name) + endif + endif + enddo + + !----------------------------------------------------------------- + ! write grid masks, area and rotation angle + !----------------------------------------------------------------- + + if (igrd(n_tmask)) then + call gather_global(work_g1, hm, master_task, distrb_info) + if (my_task == master_task) then + work_gr=work_g1 + status = nf90_inq_varid(ncid, 'tmask', varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: getting varid for tmask') + status = nf90_put_var(ncid,varid,work_gr) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: writing variable tmask') + endif + endif + + if (igrd(n_blkmask)) then + call gather_global(work_g1, bm, master_task, distrb_info) + if (my_task == master_task) then + work_gr=work_g1 + status = nf90_inq_varid(ncid, 'blkmask', varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: getting varid for blkmask') + status = nf90_put_var(ncid,varid,work_gr) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: writing variable blkmask') + endif + endif + + do i = 3, nvar ! note n_tmask=1, n_blkmask=2 + if (igrd(i)) then + call broadcast_scalar(var(i)%req%short_name,master_task) + SELECT CASE (var(i)%req%short_name) + CASE ('tarea') + call gather_global(work_g1, tarea, master_task, distrb_info) + CASE ('uarea') + call gather_global(work_g1, uarea, master_task, distrb_info) + CASE ('dxu') + call gather_global(work_g1, dxu, master_task, distrb_info) + CASE ('dyu') + call gather_global(work_g1, dyu, master_task, distrb_info) + CASE ('dxt') + call gather_global(work_g1, dxt, master_task, distrb_info) + CASE ('dyt') + call gather_global(work_g1, dyt, master_task, distrb_info) + CASE ('HTN') + call gather_global(work_g1, HTN, master_task, distrb_info) + CASE ('HTE') + call gather_global(work_g1, HTE, master_task, distrb_info) + CASE ('ANGLE') + call gather_global(work_g1, ANGLE, master_task, distrb_info) + CASE ('ANGLET') + call gather_global(work_g1, ANGLET,master_task, distrb_info) + END SELECT + + if (my_task == master_task) then + work_gr=work_g1 + status = nf90_inq_varid(ncid, var(i)%req%short_name, varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//var(i)%req%short_name) + status = nf90_put_var(ncid,varid,work_gr) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: writing variable '//var(i)%req%short_name) + endif + endif + enddo + + deallocate(work_gr) + + !---------------------------------------------------------------- + ! Write coordinates of grid box vertices + !---------------------------------------------------------------- + + if (f_bounds) then + if (my_task==master_task) then + allocate(work_gr3(nverts,nx_global,ny_global)) + else + allocate(work_gr3(1,1,1)) ! to save memory + endif + + work_gr3(:,:,:) = c0 + work1 (:,:,:) = c0 + + do i = 1, nvar_verts + call broadcast_scalar(var_nverts(i)%short_name,master_task) + SELECT CASE (var_nverts(i)%short_name) + CASE ('lont_bounds') + do ivertex = 1, nverts + work1(:,:,:) = lont_bounds(ivertex,:,:,:) + call gather_global(work_g1, work1, master_task, distrb_info) + if (my_task == master_task) work_gr3(ivertex,:,:) = work_g1(:,:) + enddo + CASE ('latt_bounds') + do ivertex = 1, nverts + work1(:,:,:) = latt_bounds(ivertex,:,:,:) + call gather_global(work_g1, work1, master_task, distrb_info) + if (my_task == master_task) work_gr3(ivertex,:,:) = work_g1(:,:) + enddo + CASE ('lonu_bounds') + do ivertex = 1, nverts + work1(:,:,:) = lonu_bounds(ivertex,:,:,:) + call gather_global(work_g1, work1, master_task, distrb_info) + if (my_task == master_task) work_gr3(ivertex,:,:) = work_g1(:,:) + enddo + CASE ('latu_bounds') + do ivertex = 1, nverts + work1(:,:,:) = latu_bounds(ivertex,:,:,:) + call gather_global(work_g1, work1, master_task, distrb_info) + if (my_task == master_task) work_gr3(ivertex,:,:) = work_g1(:,:) + enddo + END SELECT + + if (my_task == master_task) then + status = nf90_inq_varid(ncid, var_nverts(i)%short_name, varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//var_nverts(i)%short_name) + status = nf90_put_var(ncid,varid,work_gr3) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: writing variable '//var_nverts(i)%short_name) + endif + enddo + deallocate(work_gr3) + endif + + !----------------------------------------------------------------- + ! write variable data + !----------------------------------------------------------------- + + if (my_task==master_task) then + allocate(work_gr(nx_global,ny_global)) + else + allocate(work_gr(1,1)) ! to save memory + endif + work_gr(:,:) = c0 + work_g1(:,:) = c0 + + do n=1,num_avail_hist_fields_2D + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + call gather_global(work_g1, a2D(:,:,n,:), & + master_task, distrb_info) + if (my_task == master_task) then + work_gr(:,:) = work_g1(:,:) + status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + status = nf90_put_var(ncid,varid,work_gr(:,:), & + count=(/nx_global,ny_global/)) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: writing variable '//avail_hist_fields(n)%vname) + endif + endif + enddo ! num_avail_hist_fields_2D + + work_gr(:,:) = c0 + work_g1(:,:) = c0 + + do n = n2D + 1, n3Dccum + nn = n - n2D + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + if (my_task == master_task) then + status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + endif + do k = 1, ncat_hist + call gather_global(work_g1, a3Dc(:,:,k,nn,:), & + master_task, distrb_info) + work_gr(:,:) = work_g1(:,:) + + if (my_task == master_task) then + status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + status = nf90_put_var(ncid,varid,work_gr(:,:), & + start=(/ 1, 1,k/), & + count=(/nx_global,ny_global,1/)) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: writing variable '//avail_hist_fields(n)%vname) + endif + enddo ! k + endif + enddo ! num_avail_hist_fields_3Dc + + work_gr(:,:) = c0 + work_g1(:,:) = c0 + + do n = n3Dccum+1, n3Dzcum + nn = n - n3Dccum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + if (my_task == master_task) then + status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + endif + do k = 1, nzilyr + call gather_global(work_g1, a3Dz(:,:,k,nn,:), & + master_task, distrb_info) + work_gr(:,:) = work_g1(:,:) + + if (my_task == master_task) then + status = nf90_put_var(ncid,varid,work_gr(:,:), & + start=(/ 1, 1,k/), & + count=(/nx_global,ny_global,1/)) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: writing variable '//avail_hist_fields(n)%vname) + endif + enddo ! k + endif + enddo ! num_avail_hist_fields_3Dz + + work_gr(:,:) = c0 + work_g1(:,:) = c0 + + do n = n3Dzcum+1, n3Dbcum + nn = n - n3Dzcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + if (my_task == master_task) then + status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + endif + do k = 1, nzblyr + call gather_global(work_g1, a3Db(:,:,k,nn,:), & + master_task, distrb_info) + work_gr(:,:) = work_g1(:,:) + + if (my_task == master_task) then + status = nf90_put_var(ncid,varid,work_gr(:,:), & + start=(/ 1, 1,k/), & + count=(/nx_global,ny_global,1/)) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: writing variable '//avail_hist_fields(n)%vname) + endif + enddo ! k + endif + enddo ! num_avail_hist_fields_3Db + + work_gr(:,:) = c0 + work_g1(:,:) = c0 + + do n = n3Dbcum+1, n3Dacum + nn = n - n3Dbcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + if (my_task == master_task) then + status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + endif + do k = 1, nzalyr + call gather_global(work_g1, a3Da(:,:,k,nn,:), & + master_task, distrb_info) + work_gr(:,:) = work_g1(:,:) + + if (my_task == master_task) then + status = nf90_put_var(ncid,varid,work_gr(:,:), & + start=(/ 1, 1,k/), & + count=(/nx_global,ny_global,1/)) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: writing variable '//avail_hist_fields(n)%vname) + endif + enddo ! k + endif + enddo ! num_avail_hist_fields_3Da + + work_gr(:,:) = c0 + work_g1(:,:) = c0 + + do n = n3Dacum+1, n3Dfcum + nn = n - n3Dacum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + if (my_task == master_task) then + status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + endif + do k = 1, nfsd_hist + call gather_global(work_g1, a3Df(:,:,k,nn,:), & + master_task, distrb_info) + work_gr(:,:) = work_g1(:,:) + if (my_task == master_task) then + status = nf90_put_var(ncid,varid,work_gr(:,:), & + start=(/ 1, 1,k/), & + count=(/nx_global,ny_global,1/)) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: writing variable '//avail_hist_fields(n)%vname) + endif + enddo ! k + endif + enddo ! num_avail_hist_fields_3Df + + work_gr(:,:) = c0 + work_g1(:,:) = c0 + + do n = n3Dfcum+1, n4Dicum + nn = n - n3Dfcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + if (my_task == master_task) then + status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + endif + do ic = 1, ncat_hist + do k = 1, nzilyr + call gather_global(work_g1, a4Di(:,:,k,ic,nn,:), & + master_task, distrb_info) + work_gr(:,:) = work_g1(:,:) + if (my_task == master_task) then + status = nf90_put_var(ncid,varid,work_gr(:,:), & + start=(/ 1, 1,k,ic/), & + count=(/nx_global,ny_global,1, 1/)) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: writing variable '//avail_hist_fields(n)%vname) + endif + enddo ! k + enddo ! ic + endif + enddo ! num_avail_hist_fields_4Di + + work_gr(:,:) = c0 + work_g1(:,:) = c0 + + do n = n4Dicum+1, n4Dscum + nn = n - n4Dicum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + if (my_task == master_task) then + status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + endif + do ic = 1, ncat_hist + do k = 1, nzslyr + call gather_global(work_g1, a4Ds(:,:,k,ic,nn,:), & + master_task, distrb_info) + work_gr(:,:) = work_g1(:,:) + if (my_task == master_task) then + status = nf90_put_var(ncid,varid,work_gr(:,:), & + start=(/ 1, 1,k,ic/), & + count=(/nx_global,ny_global,1, 1/)) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: writing variable '//avail_hist_fields(n)%vname) + endif + enddo ! k + enddo ! ic + endif + enddo ! num_avail_hist_fields_4Ds + + do n = n4Dscum+1, n4Dfcum + nn = n - n4Dscum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + if (my_task == master_task) then + status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + endif + do ic = 1, ncat_hist + do k = 1, nfsd_hist + call gather_global(work_g1, a4Df(:,:,k,ic,nn,:), & + master_task, distrb_info) + work_gr(:,:) = work_g1(:,:) + if (my_task == master_task) then + status = nf90_put_var(ncid,varid,work_gr(:,:), & + start=(/ 1, 1,k,ic/), & + count=(/nx_global,ny_global,1, 1/)) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: writing variable '//avail_hist_fields(n)%vname) + endif + enddo ! k + enddo ! ic + endif + enddo ! num_avail_hist_fields_4Df + + deallocate(work_gr) + deallocate(work_g1) + + !----------------------------------------------------------------- + ! close output dataset + !----------------------------------------------------------------- + + if (my_task == master_task) then + status = nf90_close(ncid) + if (status /= nf90_noerr) call abort_ice(subname// & + 'ERROR: closing netCDF history file') + write(nu_diag,*) ' ' + write(nu_diag,*) 'Finished writing ',trim(ncfile(ns)) + endif + +#else + call abort_ice(subname//'ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) +#endif + + end subroutine ice_write_hist + +!======================================================================= + + end module ice_history_write + +!======================================================================= diff --git a/cicecore/cicedynB/infrastructure/io/io_pio/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_pio/ice_restart.F90 new file mode 100644 index 000000000..d673c7f7a --- /dev/null +++ b/cicecore/cicedynB/infrastructure/io/io_pio/ice_restart.F90 @@ -0,0 +1,884 @@ +!======================================================================= +! +! Read and write ice model restart files using pio interfaces. +! authors David A Bailey, NCAR + + module ice_restart + + use ice_broadcast + use ice_exit, only: abort_ice + use ice_fileunits, only: nu_diag, nu_restart, nu_rst_pointer + use ice_kinds_mod + use ice_restart_shared, only: & + restart, restart_ext, restart_dir, restart_file, pointer_file, & + runid, runtype, use_restart_time, restart_format, lcdf64, lenstr + use ice_pio + use pio + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_query_tracer_flags, icepack_query_tracer_indices + use icepack_intfc, only: icepack_query_parameters, icepack_query_tracer_sizes + + implicit none + private + public :: init_restart_write, init_restart_read, & + read_restart_field, write_restart_field, final_restart + + type(file_desc_t) :: File + type(var_desc_t) :: vardesc + + type(io_desc_t) :: iodesc2d + type(io_desc_t) :: iodesc3d_ncat + +!======================================================================= + + contains + +!======================================================================= + +! Sets up restart file for reading. +! author David A Bailey, NCAR + + subroutine init_restart_read(ice_ic) + + use ice_calendar, only: istep0, istep1, time, time_forc, nyr, month, & + mday, sec, npt + use ice_communicate, only: my_task, master_task + use ice_domain_size, only: ncat + use ice_read_write, only: ice_open + + character(len=char_len_long), intent(in), optional :: ice_ic + + ! local variables + + character(len=char_len_long) :: & + filename, filename0 + + integer (kind=int_kind) :: status + + character(len=*), parameter :: subname = '(init_restart_read)' + + if (present(ice_ic)) then + filename = trim(ice_ic) + else + if (my_task == master_task) then + open(nu_rst_pointer,file=pointer_file) + read(nu_rst_pointer,'(a)') filename0 + filename = trim(filename0) + close(nu_rst_pointer) + write(nu_diag,*) 'Read ',pointer_file(1:lenstr(pointer_file)) + endif + call broadcast_scalar(filename, master_task) + endif + + if (my_task == master_task) then + write(nu_diag,*) 'Using restart dump=', trim(filename) + end if + + if (restart_format == 'pio') then + File%fh=-1 + call ice_pio_init(mode='read', filename=trim(filename), File=File) + + call ice_pio_initdecomp(iodesc=iodesc2d) + call ice_pio_initdecomp(ndim3=ncat , iodesc=iodesc3d_ncat,remap=.true.) + + if (use_restart_time) then + status = pio_get_att(File, pio_global, 'istep1', istep0) + status = pio_get_att(File, pio_global, 'time', time) + status = pio_get_att(File, pio_global, 'time_forc', time_forc) + call pio_seterrorhandling(File, PIO_BCAST_ERROR) + status = pio_get_att(File, pio_global, 'nyr', nyr) + call pio_seterrorhandling(File, PIO_INTERNAL_ERROR) + if (status == PIO_noerr) then + status = pio_get_att(File, pio_global, 'month', month) + status = pio_get_att(File, pio_global, 'mday', mday) + status = pio_get_att(File, pio_global, 'sec', sec) + endif + endif ! use namelist values if use_restart_time = F + endif + + if (my_task == master_task) then + write(nu_diag,*) 'Restart read at istep=',istep0,time,time_forc + endif + + call broadcast_scalar(istep0,master_task) + call broadcast_scalar(time,master_task) + call broadcast_scalar(time_forc,master_task) + call broadcast_scalar(nyr,master_task) + + istep1 = istep0 + + ! if runid is bering then need to correct npt for istep0 + if (trim(runid) == 'bering') then + npt = npt - istep0 + endif + + end subroutine init_restart_read + +!======================================================================= + +! Sets up restart file for writing. +! author David A Bailey, NCAR + + subroutine init_restart_write(filename_spec) + + use ice_calendar, only: sec, month, mday, nyr, istep1, & + time, time_forc, year_init + use ice_communicate, only: my_task, master_task + use ice_domain_size, only: nx_global, ny_global, ncat, nilyr, nslyr, & + n_iso, n_aero, nblyr, n_zaero, n_algae, n_doc, & + n_dic, n_don, n_fed, n_fep + use ice_dyn_shared, only: kdyn + use ice_arrays_column, only: oceanmixed_ice + + logical (kind=log_kind) :: & + solve_zsal, skl_bgc, z_tracers + + logical (kind=log_kind) :: & + tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & + tr_pond_topo, tr_pond_lvl, tr_brine, & + tr_bgc_N, tr_bgc_C, tr_bgc_Nit, & + tr_bgc_Sil, tr_bgc_DMS, & + tr_bgc_chl, tr_bgc_Am, & + tr_bgc_PON, tr_bgc_DON, & + tr_zaero, tr_bgc_Fe, & + tr_bgc_hum + + integer (kind=int_kind) :: & + nbtrcr + + character(len=char_len_long), intent(in), optional :: filename_spec + + ! local variables + + integer (kind=int_kind) :: & + iyear, imonth, iday ! year, month, day + + character(len=char_len_long) :: filename + + integer (kind=int_kind) :: dimid_ni, dimid_nj, dimid_ncat, & + dimid_nilyr, dimid_nslyr, dimid_naero + + integer (kind=int_kind), allocatable :: dims(:) + + integer (kind=int_kind) :: & + k, n, & ! loop index + status ! status variable from netCDF routine + + character (len=3) :: nchar, ncharb + + character(len=*), parameter :: subname = '(init_restart_write)' + + call icepack_query_tracer_sizes(nbtrcr_out=nbtrcr) + call icepack_query_tracer_flags( & + tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, & + tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & + tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, tr_brine_out=tr_brine, & + tr_bgc_N_out=tr_bgc_N, tr_bgc_C_out=tr_bgc_C, tr_bgc_Nit_out=tr_bgc_Nit, & + tr_bgc_Sil_out=tr_bgc_Sil, tr_bgc_DMS_out=tr_bgc_DMS, & + tr_bgc_chl_out=tr_bgc_chl, tr_bgc_Am_out=tr_bgc_Am, & + tr_bgc_PON_out=tr_bgc_PON, tr_bgc_DON_out=tr_bgc_DON, & + tr_zaero_out=tr_zaero, tr_bgc_Fe_out=tr_bgc_Fe, & + tr_bgc_hum_out=tr_bgc_hum) + call icepack_query_parameters(solve_zsal_out=solve_zsal, skl_bgc_out=skl_bgc, & + z_tracers_out=z_tracers) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + ! construct path/file + if (present(filename_spec)) then + filename = trim(filename_spec) + else + iyear = nyr + year_init - 1 + imonth = month + iday = mday + + write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & + restart_dir(1:lenstr(restart_dir)), & + restart_file(1:lenstr(restart_file)),'.', & + iyear,'-',month,'-',mday,'-',sec + end if + + if (restart_format /= 'bin') filename = trim(filename) // '.nc' + + ! write pointer (path/file) + if (my_task == master_task) then + open(nu_rst_pointer,file=pointer_file) + write(nu_rst_pointer,'(a)') filename + close(nu_rst_pointer) + endif + + if (restart_format == 'pio') then + + File%fh=-1 + call ice_pio_init(mode='write',filename=trim(filename), File=File, & + clobber=.true., cdf64=lcdf64 ) + + status = pio_put_att(File,pio_global,'istep1',istep1) + status = pio_put_att(File,pio_global,'time',time) + status = pio_put_att(File,pio_global,'time_forc',time_forc) + status = pio_put_att(File,pio_global,'nyr',nyr) + status = pio_put_att(File,pio_global,'month',month) + status = pio_put_att(File,pio_global,'mday',mday) + status = pio_put_att(File,pio_global,'sec',sec) + + status = pio_def_dim(File,'ni',nx_global,dimid_ni) + status = pio_def_dim(File,'nj',ny_global,dimid_nj) + status = pio_def_dim(File,'ncat',ncat,dimid_ncat) + + !----------------------------------------------------------------- + ! 2D restart fields + !----------------------------------------------------------------- + + allocate(dims(2)) + + dims(1) = dimid_ni + dims(2) = dimid_nj + + call define_rest_field(File,'uvel',dims) + call define_rest_field(File,'vvel',dims) + +#ifdef CESMCOUPLED + call define_rest_field(File,'coszen',dims) +#endif + call define_rest_field(File,'scale_factor',dims) + call define_rest_field(File,'swvdr',dims) + call define_rest_field(File,'swvdf',dims) + call define_rest_field(File,'swidr',dims) + call define_rest_field(File,'swidf',dims) + + call define_rest_field(File,'strocnxT',dims) + call define_rest_field(File,'strocnyT',dims) + + call define_rest_field(File,'stressp_1',dims) + call define_rest_field(File,'stressp_2',dims) + call define_rest_field(File,'stressp_3',dims) + call define_rest_field(File,'stressp_4',dims) + + call define_rest_field(File,'stressm_1',dims) + call define_rest_field(File,'stressm_2',dims) + call define_rest_field(File,'stressm_3',dims) + call define_rest_field(File,'stressm_4',dims) + + call define_rest_field(File,'stress12_1',dims) + call define_rest_field(File,'stress12_2',dims) + call define_rest_field(File,'stress12_3',dims) + call define_rest_field(File,'stress12_4',dims) + + call define_rest_field(File,'iceumask',dims) + + if (oceanmixed_ice) then + call define_rest_field(File,'sst',dims) + call define_rest_field(File,'frzmlt',dims) + endif + + if (tr_FY) then + call define_rest_field(File,'frz_onset',dims) + end if + + if (kdyn == 2) then + call define_rest_field(File,'a11_1',dims) + call define_rest_field(File,'a11_2',dims) + call define_rest_field(File,'a11_3',dims) + call define_rest_field(File,'a11_4',dims) + call define_rest_field(File,'a12_1',dims) + call define_rest_field(File,'a12_2',dims) + call define_rest_field(File,'a12_3',dims) + call define_rest_field(File,'a12_4',dims) + endif + + if (tr_pond_lvl) then + call define_rest_field(File,'fsnow',dims) + endif + + if (nbtrcr > 0) then + if (tr_bgc_N) then + do k=1,n_algae + write(nchar,'(i3.3)') k + call define_rest_field(File,'algalN'//trim(nchar),dims) + enddo + endif + if (tr_bgc_C) then + do k=1,n_doc + write(nchar,'(i3.3)') k + call define_rest_field(File,'doc'//trim(nchar),dims) + enddo + do k=1,n_dic + write(nchar,'(i3.3)') k + call define_rest_field(File,'dic'//trim(nchar),dims) + enddo + endif + call define_rest_field(File,'nit' ,dims) + if (tr_bgc_Am) & + call define_rest_field(File,'amm' ,dims) + if (tr_bgc_Sil) & + call define_rest_field(File,'sil' ,dims) + if (tr_bgc_hum) & + call define_rest_field(File,'hum' ,dims) + if (tr_bgc_DMS) then + call define_rest_field(File,'dmsp' ,dims) + call define_rest_field(File,'dms' ,dims) + endif + if (tr_bgc_DON) then + do k=1,n_don + write(nchar,'(i3.3)') k + call define_rest_field(File,'don'//trim(nchar),dims) + enddo + endif + if (tr_bgc_Fe ) then + do k=1,n_fed + write(nchar,'(i3.3)') k + call define_rest_field(File,'fed'//trim(nchar),dims) + enddo + do k=1,n_fep + write(nchar,'(i3.3)') k + call define_rest_field(File,'fep'//trim(nchar),dims) + enddo + endif + if (tr_zaero) then + do k=1,n_zaero + write(nchar,'(i3.3)') k + call define_rest_field(File,'zaeros'//trim(nchar),dims) + enddo + endif + endif !nbtrcr + + if (solve_zsal) call define_rest_field(File,'sss',dims) + + deallocate(dims) + + !----------------------------------------------------------------- + ! 3D restart fields (ncat) + !----------------------------------------------------------------- + + allocate(dims(3)) + + dims(1) = dimid_ni + dims(2) = dimid_nj + dims(3) = dimid_ncat + + call define_rest_field(File,'aicen',dims) + call define_rest_field(File,'vicen',dims) + call define_rest_field(File,'vsnon',dims) + call define_rest_field(File,'Tsfcn',dims) + + if (tr_iage) then + call define_rest_field(File,'iage',dims) + end if + + if (tr_FY) then + call define_rest_field(File,'FY',dims) + end if + + if (tr_lvl) then + call define_rest_field(File,'alvl',dims) + call define_rest_field(File,'vlvl',dims) + end if + + if (tr_pond_cesm) then + call define_rest_field(File,'apnd',dims) + call define_rest_field(File,'hpnd',dims) + end if + + if (tr_pond_topo) then + call define_rest_field(File,'apnd',dims) + call define_rest_field(File,'hpnd',dims) + call define_rest_field(File,'ipnd',dims) + end if + + if (tr_pond_lvl) then + call define_rest_field(File,'apnd',dims) + call define_rest_field(File,'hpnd',dims) + call define_rest_field(File,'ipnd',dims) + call define_rest_field(File,'dhs',dims) + call define_rest_field(File,'ffrac',dims) + end if + + if (tr_brine) then + call define_rest_field(File,'fbrn',dims) + call define_rest_field(File,'first_ice',dims) + endif + + if (skl_bgc) then + do k = 1, n_algae + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_N'//trim(nchar) ,dims) + enddo + if (tr_bgc_C) then + ! do k = 1, n_algae + ! write(nchar,'(i3.3)') k + ! call define_rest_field(File,'bgc_C'//trim(nchar) ,dims) + ! enddo + do k = 1, n_doc + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_DOC'//trim(nchar) ,dims) + enddo + do k = 1, n_dic + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_DIC'//trim(nchar) ,dims) + enddo + endif + if (tr_bgc_chl) then + do k = 1, n_algae + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_chl'//trim(nchar) ,dims) + enddo + endif + call define_rest_field(File,'bgc_Nit' ,dims) + if (tr_bgc_Am) & + call define_rest_field(File,'bgc_Am' ,dims) + if (tr_bgc_Sil) & + call define_rest_field(File,'bgc_Sil' ,dims) + if (tr_bgc_hum) & + call define_rest_field(File,'bgc_hum' ,dims) + if (tr_bgc_DMS) then + call define_rest_field(File,'bgc_DMSPp',dims) + call define_rest_field(File,'bgc_DMSPd',dims) + call define_rest_field(File,'bgc_DMS' ,dims) + endif + if (tr_bgc_PON) & + call define_rest_field(File,'bgc_PON' ,dims) + if (tr_bgc_DON) then + do k = 1, n_don + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_DON'//trim(nchar) ,dims) + enddo + endif + if (tr_bgc_Fe ) then + do k = 1, n_fed + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_Fed'//trim(nchar) ,dims) + enddo + do k = 1, n_fep + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_Fep'//trim(nchar) ,dims) + enddo + endif + endif !skl_bgc + if (solve_zsal) & + call define_rest_field(File,'Rayleigh',dims) + + !----------------------------------------------------------------- + ! 4D restart fields, written as layers of 3D + !----------------------------------------------------------------- + + do k=1,nilyr + write(nchar,'(i3.3)') k + call define_rest_field(File,'sice'//trim(nchar),dims) + call define_rest_field(File,'qice'//trim(nchar),dims) + enddo + + do k=1,nslyr + write(nchar,'(i3.3)') k + call define_rest_field(File,'qsno'//trim(nchar),dims) + enddo + + if (tr_iso) then + do k=1,n_iso + write(nchar,'(i3.3)') k + call define_rest_field(File,'isosno'//nchar, dims) + call define_rest_field(File,'isoice'//nchar, dims) + enddo + endif + + if (tr_aero) then + do k=1,n_aero + write(nchar,'(i3.3)') k + call define_rest_field(File,'aerosnossl'//nchar, dims) + call define_rest_field(File,'aerosnoint'//nchar, dims) + call define_rest_field(File,'aeroicessl'//nchar, dims) + call define_rest_field(File,'aeroiceint'//nchar, dims) + enddo + endif + + if (solve_zsal) then + do k = 1, nblyr + write(nchar,'(i3.3)') k + call define_rest_field(File,'zSalinity'//trim(nchar),dims) + enddo + endif + if (z_tracers) then + if (tr_zaero) then + do n = 1, n_zaero + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'zaero'//trim(ncharb)//trim(nchar),dims) + enddo !k + enddo !n + endif !tr_zaero + if (tr_bgc_Nit) then + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_Nit'//trim(nchar),dims) + enddo + endif + if (tr_bgc_N) then + do n = 1, n_algae + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_N'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + endif + if (tr_bgc_C) then + ! do n = 1, n_algae + ! write(ncharb,'(i3.3)') n + ! do k = 1, nblyr+3 + ! write(nchar,'(i3.3)') k + ! call + ! define_rest_field(File,'bgc_C'//trim(ncharb)//trim(nchar),dims) + ! enddo + ! enddo + do n = 1, n_doc + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_DOC'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + do n = 1, n_dic + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_DIC'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + endif + if (tr_bgc_chl) then + do n = 1, n_algae + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_chl'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + endif + if (tr_bgc_Am) then + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_Am'//trim(nchar),dims) + enddo + endif + if (tr_bgc_Sil) then + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_Sil'//trim(nchar),dims) + enddo + endif + if (tr_bgc_hum) then + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_hum'//trim(nchar),dims) + enddo + endif + if (tr_bgc_DMS) then + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_DMSPp'//trim(nchar),dims) + call define_rest_field(File,'bgc_DMSPd'//trim(nchar),dims) + call define_rest_field(File,'bgc_DMS'//trim(nchar),dims) + enddo + endif + if (tr_bgc_PON) then + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_PON'//trim(nchar),dims) + enddo + endif + if (tr_bgc_DON) then + do n = 1, n_don + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_DON'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + endif + if (tr_bgc_Fe ) then + do n = 1, n_fed + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_Fed'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + do n = 1, n_fep + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_Fep'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + endif + do k = 1, nbtrcr + write(nchar,'(i3.3)') k + call define_rest_field(File,'zbgc_frac'//trim(nchar),dims) + enddo + endif !z_tracers + + deallocate(dims) + status = pio_enddef(File) + + call ice_pio_initdecomp(iodesc=iodesc2d) + call ice_pio_initdecomp(ndim3=ncat , iodesc=iodesc3d_ncat, remap=.true.) + + endif + + if (my_task == master_task) then + write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) + endif + + end subroutine init_restart_write + +!======================================================================= + +! Reads a single restart field +! author David A Bailey, NCAR + + subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3,diag, & + field_loc, field_type) + + use ice_blocks, only: nx_block, ny_block + use ice_communicate, only: my_task, master_task + use ice_constants, only: c0, field_loc_center + use ice_boundary, only: ice_HaloUpdate + use ice_domain, only: halo_info, distrb_info, nblocks + use ice_domain_size, only: max_blocks, ncat + use ice_global_reductions, only: global_minval, global_maxval, global_sum + + integer (kind=int_kind), intent(in) :: & + nu , & ! unit number (not used for netcdf) + ndim3 , & ! third dimension + nrec ! record number (0 for sequential access) + + real (kind=dbl_kind), dimension(nx_block,ny_block,ndim3,max_blocks), intent(inout) :: & + work ! input array (real, 8-byte) + + character (len=4), intent(in) :: & + atype ! format for output array + ! (real/integer, 4-byte/8-byte) + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + character (len=*), intent(in) :: vname + + integer (kind=int_kind), optional, intent(in) :: & + field_loc, & ! location of field on staggered grid + field_type ! type of field (scalar, vector, angle) + + ! local variables + + integer (kind=int_kind) :: & + j, & ! dimension counter + n, & ! number of dimensions for variable + ndims, & ! number of variable dimensions + status ! status variable from netCDF routine + + real (kind=dbl_kind) :: amin,amax,asum + + character(len=*), parameter :: subname = '(read_restart_field)' + + if (restart_format == "pio") then + if (my_task == master_task) & + write(nu_diag,*)'Parallel restart file read: ',vname + + call pio_seterrorhandling(File, PIO_BCAST_ERROR) + + status = pio_inq_varid(File,trim(vname),vardesc) + + if (status /= 0) then + call abort_ice(subname//"ERROR: CICE restart? Missing variable: "//trim(vname)) + endif + + status = pio_inq_varndims(File, vardesc, ndims) + + call pio_seterrorhandling(File, PIO_INTERNAL_ERROR) + +! if (ndim3 == ncat .and. ncat>1) then + if (ndim3 == ncat .and. ndims == 3) then + call pio_read_darray(File, vardesc, iodesc3d_ncat, work, status) + if (present(field_loc)) then + do n=1,ndim3 + call ice_HaloUpdate (work(:,:,n,:), halo_info, & + field_loc, field_type) + enddo + endif +! elseif (ndim3 == 1) then + elseif (ndim3 == 1 .and. ndims == 2) then + call pio_read_darray(File, vardesc, iodesc2d, work, status) + if (present(field_loc)) then + call ice_HaloUpdate (work(:,:,1,:), halo_info, & + field_loc, field_type) + endif + else + write(nu_diag,*) "ndim3 not supported ",ndim3 + endif + + if (diag) then + if (ndim3 > 1) then + do n=1,ndim3 + amin = global_minval(work(:,:,n,:),distrb_info) + amax = global_maxval(work(:,:,n,:),distrb_info) + asum = global_sum(work(:,:,n,:), distrb_info, field_loc_center) + if (my_task == master_task) then + write(nu_diag,*) ' min and max =', amin, amax + write(nu_diag,*) ' sum =',asum + endif + enddo + else + amin = global_minval(work(:,:,1,:),distrb_info) + amax = global_maxval(work(:,:,1,:),distrb_info) + asum = global_sum(work(:,:,1,:), distrb_info, field_loc_center) + if (my_task == master_task) then + write(nu_diag,*) ' min and max =', amin, amax + write(nu_diag,*) ' sum =',asum + write(nu_diag,*) '' + endif + endif + + endif + else + call abort_ice(subname//"ERROR: Invalid restart_format: "//trim(restart_format)) + endif + + end subroutine read_restart_field + +!======================================================================= + +! Writes a single restart field. +! author David A Bailey, NCAR + + subroutine write_restart_field(nu,nrec,work,atype,vname,ndim3,diag) + + use ice_blocks, only: nx_block, ny_block + use ice_communicate, only: my_task, master_task + use ice_constants, only: c0, field_loc_center + use ice_domain, only: distrb_info, nblocks + use ice_domain_size, only: max_blocks, ncat + use ice_global_reductions, only: global_minval, global_maxval, global_sum + + integer (kind=int_kind), intent(in) :: & + nu , & ! unit number + ndim3 , & ! third dimension + nrec ! record number (0 for sequential access) + + real (kind=dbl_kind), dimension(nx_block,ny_block,ndim3,max_blocks), intent(in) :: & + work ! input array (real, 8-byte) + + character (len=4), intent(in) :: & + atype ! format for output array + ! (real/integer, 4-byte/8-byte) + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + character (len=*), intent(in) :: vname + + ! local variables + + integer (kind=int_kind) :: & + j, & ! dimension counter + n, & ! dimension counter + ndims, & ! number of variable dimensions + status ! status variable from netCDF routine + + real (kind=dbl_kind) :: amin,amax,asum + + character(len=*), parameter :: subname = '(write_restart_field)' + + if (restart_format == "pio") then + if (my_task == master_task) & + write(nu_diag,*)'Parallel restart file write: ',vname + + status = pio_inq_varid(File,trim(vname),vardesc) + + status = pio_inq_varndims(File, vardesc, ndims) + + if (ndims==3) then + call pio_write_darray(File, vardesc, iodesc3d_ncat,work(:,:,:,1:nblocks), & + status, fillval=c0) + elseif (ndims == 2) then + call pio_write_darray(File, vardesc, iodesc2d, work(:,:,1,1:nblocks), & + status, fillval=c0) + else + write(nu_diag,*) "ndims not supported",ndims,ndim3 + endif + + if (diag) then + if (ndim3 > 1) then + do n=1,ndim3 + amin = global_minval(work(:,:,n,:),distrb_info) + amax = global_maxval(work(:,:,n,:),distrb_info) + asum = global_sum(work(:,:,n,:), distrb_info, field_loc_center) + if (my_task == master_task) then + write(nu_diag,*) ' min and max =', amin, amax + write(nu_diag,*) ' sum =',asum + endif + enddo + else + amin = global_minval(work(:,:,1,:),distrb_info) + amax = global_maxval(work(:,:,1,:),distrb_info) + asum = global_sum(work(:,:,1,:), distrb_info, field_loc_center) + if (my_task == master_task) then + write(nu_diag,*) ' min and max =', amin, amax + write(nu_diag,*) ' sum =',asum + endif + endif + endif + else + call abort_ice(subname//"ERROR: Invalid restart_format: "//trim(restart_format)) + endif + + end subroutine write_restart_field + +!======================================================================= + +! Finalize the restart file. +! author David A Bailey, NCAR + + subroutine final_restart() + + use ice_calendar, only: istep1, time, time_forc + use ice_communicate, only: my_task, master_task + + character(len=*), parameter :: subname = '(final_restart)' + + if (restart_format == 'pio') then + call PIO_freeDecomp(File,iodesc2d) + call PIO_freeDecomp(File,iodesc3d_ncat) + call pio_closefile(File) + endif + + if (my_task == master_task) & + write(nu_diag,*) 'Restart read/written ',istep1,time,time_forc + + end subroutine final_restart + +!======================================================================= + +! Defines a restart field +! author David A Bailey, NCAR + + subroutine define_rest_field(File, vname, dims) + + type(file_desc_t) , intent(in) :: File + character (len=*) , intent(in) :: vname + integer (kind=int_kind), intent(in) :: dims(:) + + integer (kind=int_kind) :: & + status ! status variable from netCDF routine + + character(len=*), parameter :: subname = '(define_rest_field)' + + status = pio_def_var(File,trim(vname),pio_double,dims,vardesc) + + end subroutine define_rest_field + +!======================================================================= + + end module ice_restart + +!======================================================================= diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 new file mode 100644 index 000000000..7e16f2591 --- /dev/null +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_history_write.F90 @@ -0,0 +1,1305 @@ +!======================================================================= +! +! Writes history in netCDF format +! +! authors Tony Craig and Bruce Briegleb, NCAR +! Elizabeth C. Hunke and William H. Lipscomb, LANL +! C. M. Bitz, UW +! +! 2004 WHL: Block structure added +! 2006 ECH: Accepted some CESM code into mainstream CICE +! Added ice_present, aicen, vicen; removed aice1...10, vice1...1. +! Added histfreq_n and histfreq='h' options, removed histfreq='w' +! Converted to free source form (F90) +! Added option for binary output instead of netCDF +! 2009 D Bailey and ECH: Generalized for multiple frequency output +! 2010 Alison McLaren and ECH: Added 3D capability +! + module ice_history_write + + use ice_kinds_mod + use ice_fileunits, only: nu_diag + use ice_exit, only: abort_ice + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_query_parameters + + implicit none + private + public :: ice_write_hist + +!======================================================================= + + contains + +!======================================================================= +! +! write average ice quantities or snapshots +! +! author: Elizabeth C. Hunke, LANL + + subroutine ice_write_hist (ns) + + use ice_blocks, only: nx_block, ny_block + use ice_broadcast, only: broadcast_scalar + use ice_calendar, only: time, sec, idate, idate0, write_ic, & + histfreq, dayyr, days_per_year, use_leap_years + use ice_communicate, only: my_task, master_task + use ice_constants, only: c0, c360, spval, spval_dbl + use ice_domain, only: distrb_info, nblocks + use ice_domain_size, only: nx_global, ny_global, max_blocks, max_nstrm + use ice_gather_scatter, only: gather_global + use ice_grid, only: TLON, TLAT, ULON, ULAT, hm, bm, tarea, uarea, & + dxu, dxt, dyu, dyt, HTN, HTE, ANGLE, ANGLET, tmask, & + lont_bounds, latt_bounds, lonu_bounds, latu_bounds + use ice_history_shared + use ice_arrays_column, only: hin_max, floe_rad_c + use ice_restart_shared, only: runid, lcdf64 + use ice_pio + use pio + + integer (kind=int_kind), intent(in) :: ns + + ! local variables + + integer (kind=int_kind) :: i,j,k,ic,n,nn, & + ncid,status,imtid,jmtid,kmtidi,kmtids,kmtidb, cmtid,timid, & + length,nvertexid,ivertex,kmtida,fmtid + integer (kind=int_kind), dimension(2) :: dimid2 + integer (kind=int_kind), dimension(3) :: dimid3 + integer (kind=int_kind), dimension(4) :: dimidz + integer (kind=int_kind), dimension(5) :: dimidcz + integer (kind=int_kind), dimension(3) :: dimid_nverts + integer (kind=int_kind), dimension(6) :: dimidex + real (kind=real_kind) :: ltime + real (kind= dbl_kind) :: ltime2 + character (char_len) :: title + character (char_len_long) :: ncfile(max_nstrm) + integer (kind=int_kind) :: iotype + + integer (kind=int_kind) :: iyear, imonth, iday + integer (kind=int_kind) :: icategory,ind,i_aice,boundid + + character (char_len) :: start_time,current_date,current_time + character (len=16) :: c_aice + character (len=8) :: cdate + + type(file_desc_t) :: File + type(io_desc_t) :: iodesc2d, & + iodesc3dc, iodesc3dv, iodesc3di, iodesc3db, iodesc3da, & + iodesc3df, & + iodesc4di, iodesc4ds, iodesc4df + type(var_desc_t) :: varid + + ! 4 coordinate variables: TLON, TLAT, ULON, ULAT + INTEGER (kind=int_kind), PARAMETER :: ncoord = 4 + + ! 4 vertices in each grid cell + INTEGER (kind=int_kind), PARAMETER :: nverts = 4 + + ! 4 variables describe T, U grid boundaries: + ! lont_bounds, latt_bounds, lonu_bounds, latu_bounds + INTEGER (kind=int_kind), PARAMETER :: nvar_verts = 4 + + TYPE coord_attributes ! netcdf coordinate attributes + character (len=11) :: short_name + character (len=45) :: long_name + character (len=20) :: units + END TYPE coord_attributes + + TYPE req_attributes ! req'd netcdf attributes + type (coord_attributes) :: req + character (len=20) :: coordinates + END TYPE req_attributes + + TYPE(req_attributes), dimension(nvar) :: var + TYPE(coord_attributes), dimension(ncoord) :: coord_var + TYPE(coord_attributes), dimension(nvar_verts) :: var_nverts + TYPE(coord_attributes), dimension(nvarz) :: var_nz + CHARACTER (char_len), dimension(ncoord) :: coord_bounds + + real (kind=dbl_kind), allocatable :: workr2(:,:,:) + real (kind=dbl_kind), allocatable :: workr3(:,:,:,:) + real (kind=dbl_kind), allocatable :: workr4(:,:,:,:,:) + real (kind=dbl_kind), allocatable :: workr3v(:,:,:,:) + + character(len=char_len_long) :: & + filename + + integer (kind=int_kind), dimension(1) :: & + tim_start,tim_length ! dimension quantities for netCDF + + integer (kind=int_kind), dimension(2) :: & + bnd_start,bnd_length ! dimension quantities for netCDF + + real (kind=dbl_kind) :: secday + real (kind=dbl_kind) :: rad_to_deg + + integer (kind=int_kind) :: lprecision + + character(len=*), parameter :: subname = '(ice_write_hist)' + + call icepack_query_parameters(secday_out=secday) + call icepack_query_parameters(rad_to_deg_out=rad_to_deg) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + if (my_task == master_task) then + call construct_filename(ncfile(ns),'nc',ns) + + ! add local directory path name to ncfile + if (write_ic) then + ncfile(ns) = trim(incond_dir)//ncfile(ns) + else + ncfile(ns) = trim(history_dir)//ncfile(ns) + endif + filename = ncfile(ns) + end if + call broadcast_scalar(filename, master_task) + + ! create file + + iotype = PIO_IOTYPE_NETCDF + if (history_format == 'pio_pnetcdf') iotype = PIO_IOTYPE_PNETCDF + File%fh=-1 + call ice_pio_init(mode='write', filename=trim(filename), File=File, & + clobber=.true., cdf64=lcdf64, iotype=iotype) + + call ice_pio_initdecomp(iodesc=iodesc2d) + call ice_pio_initdecomp(ndim3=ncat_hist, iodesc=iodesc3dc) + call ice_pio_initdecomp(ndim3=nzilyr, iodesc=iodesc3di) + call ice_pio_initdecomp(ndim3=nzblyr, iodesc=iodesc3db) + call ice_pio_initdecomp(ndim3=nzalyr, iodesc=iodesc3da) + call ice_pio_initdecomp(ndim3=nfsd_hist, iodesc=iodesc3df) + call ice_pio_initdecomp(ndim3=nverts, iodesc=iodesc3dv, inner_dim=.true.) + call ice_pio_initdecomp(ndim3=nzilyr, ndim4=ncat_hist, iodesc=iodesc4di) + call ice_pio_initdecomp(ndim3=nzslyr, ndim4=ncat_hist, iodesc=iodesc4ds) + call ice_pio_initdecomp(ndim3=nfsd_hist, ndim4=ncat_hist, iodesc=iodesc4df) + + ltime2 = time/int(secday) + ltime = real(time/int(secday),kind=real_kind) + + ! option of turning on double precision history files + lprecision = pio_real + if (history_precision == 8) lprecision = pio_double + + !----------------------------------------------------------------- + ! define dimensions + !----------------------------------------------------------------- + + if (hist_avg .and. histfreq(ns) /= '1') then + status = pio_def_dim(File,'d2',2,boundid) + endif + + status = pio_def_dim(File,'ni',nx_global,imtid) + status = pio_def_dim(File,'nj',ny_global,jmtid) + status = pio_def_dim(File,'nc',ncat_hist,cmtid) + status = pio_def_dim(File,'nkice',nzilyr,kmtidi) + status = pio_def_dim(File,'nksnow',nzslyr,kmtids) + status = pio_def_dim(File,'nkbio',nzblyr,kmtidb) + status = pio_def_dim(File,'nkaer',nzalyr,kmtida) + status = pio_def_dim(File,'time',PIO_UNLIMITED,timid) + status = pio_def_dim(File,'nvertices',nverts,nvertexid) + status = pio_def_dim(File,'nf',nfsd_hist,fmtid) + + !----------------------------------------------------------------- + ! define coordinate variables: time, time_bounds + !----------------------------------------------------------------- + +!sgl status = pio_def_var(File,'time',pio_real,(/timid/),varid) + status = pio_def_var(File,'time',pio_double,(/timid/),varid) + status = pio_put_att(File,varid,'long_name','model time') + + write(cdate,'(i8.8)') idate0 + write(title,'(a,a,a,a,a,a,a)') 'days since ', & + cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' 00:00:00' + status = pio_put_att(File,varid,'units',trim(title)) + + if (days_per_year == 360) then + status = pio_put_att(File,varid,'calendar','360_day') + elseif (days_per_year == 365 .and. .not.use_leap_years ) then + status = pio_put_att(File,varid,'calendar','NoLeap') + elseif (use_leap_years) then + status = pio_put_att(File,varid,'calendar','Gregorian') + else + call abort_ice(subname//'ERROR: invalid calendar settings') + endif + + if (hist_avg .and. histfreq(ns) /= '1') then + status = pio_put_att(File,varid,'bounds','time_bounds') + endif + + ! Define attributes for time_bounds if hist_avg is true + if (hist_avg .and. histfreq(ns) /= '1') then + dimid2(1) = boundid + dimid2(2) = timid +!sgl status = pio_def_var(File,'time_bounds',pio_real,dimid2,varid) + status = pio_def_var(File,'time_bounds',pio_double,dimid2,varid) + status = pio_put_att(File,varid,'long_name', & + 'boundaries for time-averaging interval') + write(cdate,'(i8.8)') idate0 + write(title,'(a,a,a,a,a,a,a,a)') 'days since ', & + cdate(1:4),'-',cdate(5:6),'-',cdate(7:8),' 00:00:00' + status = pio_put_att(File,varid,'units',trim(title)) + endif + + !----------------------------------------------------------------- + ! define information for required time-invariant variables + !----------------------------------------------------------------- + + ind = 0 + ind = ind + 1 + coord_var(ind) = coord_attributes('TLON', & + 'T grid center longitude', 'degrees_east') + coord_bounds(ind) = 'lont_bounds' + ind = ind + 1 + coord_var(ind) = coord_attributes('TLAT', & + 'T grid center latitude', 'degrees_north') + coord_bounds(ind) = 'latt_bounds' + ind = ind + 1 + coord_var(ind) = coord_attributes('ULON', & + 'U grid center longitude', 'degrees_east') + coord_bounds(ind) = 'lonu_bounds' + ind = ind + 1 + coord_var(ind) = coord_attributes('ULAT', & + 'U grid center latitude', 'degrees_north') + coord_bounds(ind) = 'latu_bounds' + + var_nz(1) = coord_attributes('NCAT', 'category maximum thickness', 'm') + var_nz(2) = coord_attributes('VGRDi', 'vertical ice levels', '1') + var_nz(3) = coord_attributes('VGRDs', 'vertical snow levels', '1') + var_nz(4) = coord_attributes('VGRDb', 'vertical ice-bio levels', '1') + var_nz(5) = coord_attributes('VGRDa', 'vertical snow-ice-bio levels', '1') + var_nz(6) = coord_attributes('NFSD', 'category floe size (center)', 'm') + + !----------------------------------------------------------------- + ! define information for optional time-invariant variables + !----------------------------------------------------------------- + + var(n_tmask)%req = coord_attributes('tmask', & + 'ocean grid mask', ' ') + var(n_tmask)%coordinates = 'TLON TLAT' + + var(n_blkmask)%req = coord_attributes('blkmask', & + 'ice grid block mask', ' ') + var(n_blkmask)%coordinates = 'TLON TLAT' + + var(n_tarea)%req = coord_attributes('tarea', & + 'area of T grid cells', 'm^2') + var(n_tarea)%coordinates = 'TLON TLAT' + + var(n_uarea)%req = coord_attributes('uarea', & + 'area of U grid cells', 'm^2') + var(n_uarea)%coordinates = 'ULON ULAT' + var(n_dxt)%req = coord_attributes('dxt', & + 'T cell width through middle', 'm') + var(n_dxt)%coordinates = 'TLON TLAT' + var(n_dyt)%req = coord_attributes('dyt', & + 'T cell height through middle', 'm') + var(n_dyt)%coordinates = 'TLON TLAT' + var(n_dxu)%req = coord_attributes('dxu', & + 'U cell width through middle', 'm') + var(n_dxu)%coordinates = 'ULON ULAT' + var(n_dyu)%req = coord_attributes('dyu', & + 'U cell height through middle', 'm') + var(n_dyu)%coordinates = 'ULON ULAT' + var(n_HTN)%req = coord_attributes('HTN', & + 'T cell width on North side','m') + var(n_HTN)%coordinates = 'TLON TLAT' + var(n_HTE)%req = coord_attributes('HTE', & + 'T cell width on East side', 'm') + var(n_HTE)%coordinates = 'TLON TLAT' + var(n_ANGLE)%req = coord_attributes('ANGLE', & + 'angle grid makes with latitude line on U grid', & + 'radians') + var(n_ANGLE)%coordinates = 'ULON ULAT' + var(n_ANGLET)%req = coord_attributes('ANGLET', & + 'angle grid makes with latitude line on T grid', & + 'radians') + var(n_ANGLET)%coordinates = 'TLON TLAT' + + ! These fields are required for CF compliance + ! dimensions (nx,ny,nverts) + var_nverts(n_lont_bnds) = coord_attributes('lont_bounds', & + 'longitude boundaries of T cells', 'degrees_east') + var_nverts(n_latt_bnds) = coord_attributes('latt_bounds', & + 'latitude boundaries of T cells', 'degrees_north') + var_nverts(n_lonu_bnds) = coord_attributes('lonu_bounds', & + 'longitude boundaries of U cells', 'degrees_east') + var_nverts(n_latu_bnds) = coord_attributes('latu_bounds', & + 'latitude boundaries of U cells', 'degrees_north') + + !----------------------------------------------------------------- + ! define attributes for time-invariant variables + !----------------------------------------------------------------- + + dimid2(1) = imtid + dimid2(2) = jmtid + + do i = 1, ncoord + status = pio_def_var(File, trim(coord_var(i)%short_name), lprecision, & + dimid2, varid) + status = pio_put_att(File,varid,'long_name',trim(coord_var(i)%long_name)) + status = pio_put_att(File, varid, 'units', trim(coord_var(i)%units)) + if (lprecision == pio_real) then + status = pio_put_att(File, varid, 'missing_value', spval) + status = pio_put_att(File, varid,'_FillValue',spval) + else + status = pio_put_att(File, varid, 'missing_value', spval_dbl) + status = pio_put_att(File, varid,'_FillValue',spval_dbl) + endif + if (coord_var(i)%short_name == 'ULAT') then + status = pio_put_att(File,varid,'comment', & + trim('Latitude of NE corner of T grid cell')) + endif + if (f_bounds) then + status = pio_put_att(File, varid, 'bounds', trim(coord_bounds(i))) + endif + enddo + + ! Extra dimensions (NCAT, NZILYR, NZSLYR, NZBLYR, NZALYR, NFSD) + dimidex(1)=cmtid + dimidex(2)=kmtidi + dimidex(3)=kmtids + dimidex(4)=kmtidb + dimidex(5)=kmtida + dimidex(6)=fmtid + + do i = 1, nvarz + if (igrdz(i)) then + status = pio_def_var(File, trim(var_nz(i)%short_name), lprecision, & + (/dimidex(i)/), varid) + status = pio_put_att(File, varid, 'long_name', var_nz(i)%long_name) + status = pio_put_att(File, varid, 'units' , var_nz(i)%units) + endif + enddo + + ! Attributes for tmask defined separately, since it has no units + if (igrd(n_tmask)) then + status = pio_def_var(File, 'tmask', lprecision, dimid2, varid) + status = pio_put_att(File,varid, 'long_name', 'ocean grid mask') + status = pio_put_att(File, varid, 'coordinates', 'TLON TLAT') + if (lprecision == pio_real) then + status = pio_put_att(File, varid, 'missing_value', spval) + status = pio_put_att(File, varid,'_FillValue',spval) + else + status = pio_put_att(File, varid, 'missing_value', spval_dbl) + status = pio_put_att(File, varid,'_FillValue',spval_dbl) + endif + status = pio_put_att(File,varid,'comment', '0 = land, 1 = ocean') + endif + if (igrd(n_blkmask)) then + status = pio_def_var(File, 'blkmask', lprecision, dimid2, varid) + status = pio_put_att(File,varid, 'long_name', 'ice grid block mask') + status = pio_put_att(File, varid, 'coordinates', 'TLON TLAT') + status = pio_put_att(File,varid,'comment', 'mytask + iblk/100') + if (lprecision == pio_real) then + status = pio_put_att(File, varid, 'missing_value', spval) + status = pio_put_att(File, varid,'_FillValue',spval) + else + status = pio_put_att(File, varid, 'missing_value', spval_dbl) + status = pio_put_att(File, varid,'_FillValue',spval_dbl) + endif + endif + + do i = 3, nvar ! note: n_tmask=1, n_blkmask=2 + if (igrd(i)) then + status = pio_def_var(File, trim(var(i)%req%short_name), & + lprecision, dimid2, varid) + status = pio_put_att(File,varid, 'long_name', trim(var(i)%req%long_name)) + status = pio_put_att(File, varid, 'units', trim(var(i)%req%units)) + status = pio_put_att(File, varid, 'coordinates', trim(var(i)%coordinates)) + if (lprecision == pio_real) then + status = pio_put_att(File, varid, 'missing_value', spval) + status = pio_put_att(File, varid,'_FillValue',spval) + else + status = pio_put_att(File, varid, 'missing_value', spval_dbl) + status = pio_put_att(File, varid,'_FillValue',spval_dbl) + endif + endif + enddo + + ! Fields with dimensions (nverts,nx,ny) + dimid_nverts(1) = nvertexid + dimid_nverts(2) = imtid + dimid_nverts(3) = jmtid + do i = 1, nvar_verts + if (f_bounds) then + status = pio_def_var(File, trim(var_nverts(i)%short_name), & + lprecision,dimid_nverts, varid) + status = & + pio_put_att(File,varid, 'long_name', trim(var_nverts(i)%long_name)) + status = & + pio_put_att(File, varid, 'units', trim(var_nverts(i)%units)) + if (lprecision == pio_real) then + status = pio_put_att(File, varid, 'missing_value', spval) + status = pio_put_att(File, varid,'_FillValue',spval) + else + status = pio_put_att(File, varid, 'missing_value', spval_dbl) + status = pio_put_att(File, varid,'_FillValue',spval_dbl) + endif + endif + enddo + + !----------------------------------------------------------------- + ! define attributes for time-variant variables + !----------------------------------------------------------------- + + !----------------------------------------------------------------- + ! 2D + !----------------------------------------------------------------- + + dimid3(1) = imtid + dimid3(2) = jmtid + dimid3(3) = timid + + do n=1,num_avail_hist_fields_2D + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_def_var(File, trim(avail_hist_fields(n)%vname), & + lprecision, dimid3, varid) + status = pio_put_att(File,varid,'units', & + trim(avail_hist_fields(n)%vunit)) + status = pio_put_att(File,varid, 'long_name', & + trim(avail_hist_fields(n)%vdesc)) + status = pio_put_att(File,varid,'coordinates', & + trim(avail_hist_fields(n)%vcoord)) + status = pio_put_att(File,varid,'cell_measures', & + trim(avail_hist_fields(n)%vcellmeas)) + if (lprecision == pio_real) then + status = pio_put_att(File, varid, 'missing_value', spval) + status = pio_put_att(File, varid,'_FillValue',spval) + else + status = pio_put_att(File, varid, 'missing_value', spval_dbl) + status = pio_put_att(File, varid,'_FillValue',spval_dbl) + endif + + ! Add cell_methods attribute to variables if averaged + if (hist_avg .and. histfreq(ns) /= '1') then + if (TRIM(avail_hist_fields(n)%vname)/='sig1' & + .or.TRIM(avail_hist_fields(n)%vname)/='sig2' & + .or.TRIM(avail_hist_fields(n)%vname)/='sistreave' & + .or.TRIM(avail_hist_fields(n)%vname)/='sistremax' & + .or.TRIM(avail_hist_fields(n)%vname)/='sigP') then + status = pio_put_att(File,varid,'cell_methods','time: mean') + endif + endif + + if (histfreq(ns) == '1' .or. .not. hist_avg & + .or. n==n_divu(ns) .or. n==n_shear(ns) & ! snapshots + .or. n==n_sig1(ns) .or. n==n_sig2(ns) & + .or. n==n_sigP(ns) .or. n==n_trsig(ns) & + .or. n==n_sistreave(ns) .or. n==n_sistremax(ns) & + .or. n==n_mlt_onset(ns) .or. n==n_frz_onset(ns) & + .or. n==n_hisnap(ns) .or. n==n_aisnap(ns)) then + status = pio_put_att(File,varid,'time_rep','instantaneous') + else + status = pio_put_att(File,varid,'time_rep','averaged') + endif + endif + enddo ! num_avail_hist_fields_2D + + !----------------------------------------------------------------- + ! 3D (category) + !----------------------------------------------------------------- + + dimidz(1) = imtid + dimidz(2) = jmtid + dimidz(3) = cmtid + dimidz(4) = timid + + do n = n2D + 1, n3Dccum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_def_var(File, trim(avail_hist_fields(n)%vname), & + lprecision, dimidz, varid) + status = pio_put_att(File,varid,'units', & + trim(avail_hist_fields(n)%vunit)) + status = pio_put_att(File,varid, 'long_name', & + trim(avail_hist_fields(n)%vdesc)) + status = pio_put_att(File,varid,'coordinates', & + trim(avail_hist_fields(n)%vcoord)) + status = pio_put_att(File,varid,'cell_measures', & + trim(avail_hist_fields(n)%vcellmeas)) + if (lprecision == pio_real) then + status = pio_put_att(File, varid, 'missing_value', spval) + status = pio_put_att(File, varid,'_FillValue',spval) + else + status = pio_put_att(File, varid, 'missing_value', spval_dbl) + status = pio_put_att(File, varid,'_FillValue',spval_dbl) + endif + + ! Add cell_methods attribute to variables if averaged + if (hist_avg .and. histfreq(ns) /= '1') then + status = pio_put_att(File,varid,'cell_methods','time: mean') + endif + + if (histfreq(ns) == '1' .or. .not. hist_avg) then + status = pio_put_att(File,varid,'time_rep','instantaneous') + else + status = pio_put_att(File,varid,'time_rep','averaged') + endif + endif + enddo ! num_avail_hist_fields_3Dc + + !----------------------------------------------------------------- + ! 3D (ice layers) + !----------------------------------------------------------------- + + dimidz(1) = imtid + dimidz(2) = jmtid + dimidz(3) = kmtidi + dimidz(4) = timid + + do n = n3Dccum + 1, n3Dzcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_def_var(File, trim(avail_hist_fields(n)%vname), & + lprecision, dimidz, varid) + status = pio_put_att(File,varid,'units', & + trim(avail_hist_fields(n)%vunit)) + status = pio_put_att(File,varid, 'long_name', & + trim(avail_hist_fields(n)%vdesc)) + status = pio_put_att(File,varid,'coordinates', & + trim(avail_hist_fields(n)%vcoord)) + status = pio_put_att(File,varid,'cell_measures', & + trim(avail_hist_fields(n)%vcellmeas)) + if (lprecision == pio_real) then + status = pio_put_att(File, varid, 'missing_value', spval) + status = pio_put_att(File, varid,'_FillValue',spval) + else + status = pio_put_att(File, varid, 'missing_value', spval_dbl) + status = pio_put_att(File, varid,'_FillValue',spval_dbl) + endif + + ! Add cell_methods attribute to variables if averaged + if (hist_avg .and. histfreq(ns) /= '1') then + status = pio_put_att(File,varid,'cell_methods','time: mean') + endif + + if (histfreq(ns) == '1' .or. .not. hist_avg) then + status = pio_put_att(File,varid,'time_rep','instantaneous') + else + status = pio_put_att(File,varid,'time_rep','averaged') + endif + endif + enddo ! num_avail_hist_fields_3Dz + + !----------------------------------------------------------------- + ! 3D (biology ice layers) + !----------------------------------------------------------------- + + dimidz(1) = imtid + dimidz(2) = jmtid + dimidz(3) = kmtidb + dimidz(4) = timid + + do n = n3Dzcum + 1, n3Dbcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_def_var(File, trim(avail_hist_fields(n)%vname), & + lprecision, dimidz, varid) + status = pio_put_att(File,varid,'units', & + trim(avail_hist_fields(n)%vunit)) + status = pio_put_att(File,varid, 'long_name', & + trim(avail_hist_fields(n)%vdesc)) + status = pio_put_att(File,varid,'coordinates', & + trim(avail_hist_fields(n)%vcoord)) + status = pio_put_att(File,varid,'cell_measures', & + trim(avail_hist_fields(n)%vcellmeas)) + if (lprecision == pio_real) then + status = pio_put_att(File, varid, 'missing_value', spval) + status = pio_put_att(File, varid,'_FillValue',spval) + else + status = pio_put_att(File, varid, 'missing_value', spval_dbl) + status = pio_put_att(File, varid,'_FillValue',spval_dbl) + endif + + ! Add cell_methods attribute to variables if averaged + if (hist_avg .and. histfreq(ns) /= '1') then + status = pio_put_att(File,varid,'cell_methods','time: mean') + endif + + if (histfreq(ns) == '1' .or. .not. hist_avg) then + status = pio_put_att(File,varid,'time_rep','instantaneous') + else + status = pio_put_att(File,varid,'time_rep','averaged') + endif + endif + enddo ! num_avail_hist_fields_3Db + + !----------------------------------------------------------------- + ! 3D (biology snow layers) + !----------------------------------------------------------------- + + dimidz(1) = imtid + dimidz(2) = jmtid + dimidz(3) = kmtida + dimidz(4) = timid + + do n = n3Dbcum + 1, n3Dacum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_def_var(File, trim(avail_hist_fields(n)%vname), & + lprecision, dimidz, varid) + status = pio_put_att(File,varid,'units', & + trim(avail_hist_fields(n)%vunit)) + status = pio_put_att(File,varid, 'long_name', & + trim(avail_hist_fields(n)%vdesc)) + status = pio_put_att(File,varid,'coordinates', & + trim(avail_hist_fields(n)%vcoord)) + status = pio_put_att(File,varid,'cell_measures', & + trim(avail_hist_fields(n)%vcellmeas)) + if (lprecision == pio_real) then + status = pio_put_att(File, varid, 'missing_value', spval) + status = pio_put_att(File, varid,'_FillValue',spval) + else + status = pio_put_att(File, varid, 'missing_value', spval_dbl) + status = pio_put_att(File, varid,'_FillValue',spval_dbl) + endif + + ! Add cell_methods attribute to variables if averaged + if (hist_avg .and. histfreq(ns) /= '1') then + status = pio_put_att(File,varid,'cell_methods','time: mean') + endif + + if (histfreq(ns) == '1' .or. .not. hist_avg) then + status = pio_put_att(File,varid,'time_rep','instantaneous') + else + status = pio_put_att(File,varid,'time_rep','averaged') + endif + endif + enddo ! num_avail_hist_fields_3Da + + !----------------------------------------------------------------- + ! 3D (fsd) + !----------------------------------------------------------------- + + dimidz(1) = imtid + dimidz(2) = jmtid + dimidz(3) = fmtid + dimidz(4) = timid + + do n = n3Dacum + 1, n3Dfcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_def_var(File, trim(avail_hist_fields(n)%vname), & + lprecision, dimidz, varid) + status = pio_put_att(File,varid,'units', & + trim(avail_hist_fields(n)%vunit)) + status = pio_put_att(File,varid, 'long_name', & + trim(avail_hist_fields(n)%vdesc)) + status = pio_put_att(File,varid,'coordinates', & + trim(avail_hist_fields(n)%vcoord)) + status = pio_put_att(File,varid,'cell_measures', & + trim(avail_hist_fields(n)%vcellmeas)) + if (lprecision == pio_real) then + status = pio_put_att(File, varid, 'missing_value', spval) + status = pio_put_att(File, varid,'_FillValue',spval) + else + status = pio_put_att(File, varid, 'missing_value', spval_dbl) + status = pio_put_att(File, varid,'_FillValue',spval_dbl) + endif + + ! Add cell_methods attribute to variables if averaged + if (hist_avg .and. histfreq(ns) /= '1') then + status = pio_put_att(File,varid,'cell_methods','time: mean') + endif + + if (histfreq(ns) == '1' .or. .not. hist_avg) then + status = pio_put_att(File,varid,'time_rep','instantaneous') + else + status = pio_put_att(File,varid,'time_rep','averaged') + endif + endif + enddo ! num_avail_hist_fields_3Df + + !----------------------------------------------------------------- + ! define attributes for 4D variables + ! time coordinate is dropped + !----------------------------------------------------------------- + + !----------------------------------------------------------------- + ! 4D (ice categories) + !----------------------------------------------------------------- + + dimidcz(1) = imtid + dimidcz(2) = jmtid + dimidcz(3) = kmtidi + dimidcz(4) = cmtid + dimidcz(5) = timid + + do n = n3Dfcum + 1, n4Dicum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_def_var(File, trim(avail_hist_fields(n)%vname), & + lprecision, dimidcz, varid) + status = pio_put_att(File,varid,'units', & + trim(avail_hist_fields(n)%vunit)) + status = pio_put_att(File,varid, 'long_name', & + trim(avail_hist_fields(n)%vdesc)) + status = pio_put_att(File,varid,'coordinates', & + trim(avail_hist_fields(n)%vcoord)) + status = pio_put_att(File,varid,'cell_measures', & + trim(avail_hist_fields(n)%vcellmeas)) + if (lprecision == pio_real) then + status = pio_put_att(File, varid, 'missing_value', spval) + status = pio_put_att(File, varid,'_FillValue',spval) + else + status = pio_put_att(File, varid, 'missing_value', spval_dbl) + status = pio_put_att(File, varid,'_FillValue',spval_dbl) + endif + + ! Add cell_methods attribute to variables if averaged + if (hist_avg .and. histfreq(ns) /= '1') then + status = pio_put_att(File,varid,'cell_methods','time: mean') + endif + + if (histfreq(ns) == '1' .or. .not. hist_avg) then + status = pio_put_att(File,varid,'time_rep','instantaneous') + else + status = pio_put_att(File,varid,'time_rep','averaged') + endif + endif + enddo ! num_avail_hist_fields_4Di + + !----------------------------------------------------------------- + ! 4D (snow layers) + !----------------------------------------------------------------- + + dimidcz(1) = imtid + dimidcz(2) = jmtid + dimidcz(3) = kmtids + dimidcz(4) = cmtid + dimidcz(5) = timid + + do n = n4Dicum + 1, n4Dscum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_def_var(File, trim(avail_hist_fields(n)%vname), & + lprecision, dimidcz, varid) + status = pio_put_att(File,varid,'units', & + trim(avail_hist_fields(n)%vunit)) + status = pio_put_att(File,varid, 'long_name', & + trim(avail_hist_fields(n)%vdesc)) + status = pio_put_att(File,varid,'coordinates', & + trim(avail_hist_fields(n)%vcoord)) + status = pio_put_att(File,varid,'cell_measures', & + trim(avail_hist_fields(n)%vcellmeas)) + if (lprecision == pio_real) then + status = pio_put_att(File, varid, 'missing_value', spval) + status = pio_put_att(File, varid,'_FillValue',spval) + else + status = pio_put_att(File, varid, 'missing_value', spval_dbl) + status = pio_put_att(File, varid,'_FillValue',spval_dbl) + endif + + ! Add cell_methods attribute to variables if averaged + if (hist_avg .and. histfreq(ns) /= '1') then + status = pio_put_att(File,varid,'cell_methods','time: mean') + endif + + if (histfreq(ns) == '1' .or. .not. hist_avg) then + status = pio_put_att(File,varid,'time_rep','instantaneous') + else + status = pio_put_att(File,varid,'time_rep','averaged') + endif + endif + enddo ! num_avail_hist_fields_4Ds + + + !----------------------------------------------------------------- + ! 4D (fsd layers) + !----------------------------------------------------------------- + + dimidcz(1) = imtid + dimidcz(2) = jmtid + dimidcz(3) = fmtid + dimidcz(4) = cmtid + dimidcz(5) = timid + + do n = n4Dscum + 1, n4Dfcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_def_var(File, trim(avail_hist_fields(n)%vname), & + lprecision, dimidcz, varid) + status = pio_put_att(File,varid,'units', & + trim(avail_hist_fields(n)%vunit)) + status = pio_put_att(File,varid, 'long_name', & + trim(avail_hist_fields(n)%vdesc)) + status = pio_put_att(File,varid,'coordinates', & + trim(avail_hist_fields(n)%vcoord)) + status = pio_put_att(File,varid,'cell_measures', & + trim(avail_hist_fields(n)%vcellmeas)) + if (lprecision == pio_real) then + status = pio_put_att(File, varid, 'missing_value', spval) + status = pio_put_att(File, varid,'_FillValue',spval) + else + status = pio_put_att(File, varid, 'missing_value', spval_dbl) + status = pio_put_att(File, varid,'_FillValue',spval_dbl) + endif + + ! Add cell_methods attribute to variables if averaged + if (hist_avg .and. histfreq(ns) /= '1') then + status = pio_put_att(File,varid,'cell_methods','time: mean') + endif + + if (histfreq(ns) == '1' .or. .not. hist_avg) then + status = pio_put_att(File,varid,'time_rep','instantaneous') + else + status = pio_put_att(File,varid,'time_rep','averaged') + endif + endif + enddo ! num_avail_hist_fields_4Df + + !----------------------------------------------------------------- + ! global attributes + !----------------------------------------------------------------- + ! ... the user should change these to something useful ... + !----------------------------------------------------------------- +#ifdef CESMCOUPLED + status = pio_put_att(File,pio_global,'title',runid) +#else + title = 'sea ice model output for CICE' + status = pio_put_att(File,pio_global,'title',trim(title)) +#endif + title = 'Diagnostic and Prognostic Variables' + status = pio_put_att(File,pio_global,'contents',trim(title)) + + write(title,'(2a)') 'Los Alamos Sea Ice Model, ', trim(version_name) + status = pio_put_att(File,pio_global,'source',trim(title)) + + if (use_leap_years) then + write(title,'(a,i3,a)') 'This year has ',int(dayyr),' days' + else + write(title,'(a,i3,a)') 'All years have exactly ',int(dayyr),' days' + endif + status = pio_put_att(File,pio_global,'comment',trim(title)) + + write(title,'(a,i8.8)') 'File written on model date ',idate + status = pio_put_att(File,pio_global,'comment2',trim(title)) + + write(title,'(a,i6)') 'seconds elapsed into model date: ',sec + status = pio_put_att(File,pio_global,'comment3',trim(title)) + + title = 'CF-1.0' + status = & + pio_put_att(File,pio_global,'conventions',trim(title)) + + call date_and_time(date=current_date, time=current_time) + write(start_time,1000) current_date(1:4), current_date(5:6), & + current_date(7:8), current_time(1:2), & + current_time(3:4) +1000 format('This dataset was created on ', & + a,'-',a,'-',a,' at ',a,':',a) + status = pio_put_att(File,pio_global,'history',trim(start_time)) + + if (history_format == 'pio_pnetcdf') then + status = pio_put_att(File,pio_global,'io_flavor','io_pio pnetcdf') + else + status = pio_put_att(File,pio_global,'io_flavor','io_pio netcdf') + endif + + !----------------------------------------------------------------- + ! end define mode + !----------------------------------------------------------------- + + status = pio_enddef(File) + + !----------------------------------------------------------------- + ! write time variable + !----------------------------------------------------------------- + + status = pio_inq_varid(File,'time',varid) +!sgl status = pio_put_var(File,varid,(/1/),ltime) + status = pio_put_var(File,varid,(/1/),ltime2) + + !----------------------------------------------------------------- + ! write time_bounds info + !----------------------------------------------------------------- + + if (hist_avg .and. histfreq(ns) /= '1') then + status = pio_inq_varid(File,'time_bounds',varid) + time_bounds=(/time_beg(ns),time_end(ns)/) + bnd_start = (/1,1/) + bnd_length = (/2,1/) + status = pio_put_var(File,varid,ival=time_bounds, & + start=bnd_start(:),count=bnd_length(:)) + endif + + !----------------------------------------------------------------- + ! write coordinate variables + !----------------------------------------------------------------- + + allocate(workr2(nx_block,ny_block,nblocks)) + + do i = 1,ncoord + status = pio_inq_varid(File, coord_var(i)%short_name, varid) + SELECT CASE (coord_var(i)%short_name) + CASE ('TLON') + ! Convert T grid longitude from -180 -> 180 to 0 to 360 + workr2(:,:,:) = mod(tlon(:,:,1:nblocks)*rad_to_deg + c360, c360) + CASE ('TLAT') + workr2(:,:,:) = tlat(:,:,1:nblocks)*rad_to_deg + CASE ('ULON') + workr2(:,:,:) = ulon(:,:,1:nblocks)*rad_to_deg + CASE ('ULAT') + workr2(:,:,:) = ulat(:,:,1:nblocks)*rad_to_deg + END SELECT + call pio_write_darray(File, varid, iodesc2d, & + workr2, status, fillval=spval_dbl) + enddo + + ! Extra dimensions (NCAT, NFSD, VGRD*) + + do i = 1, nvarz + if (igrdz(i)) then + status = pio_inq_varid(File, var_nz(i)%short_name, varid) + SELECT CASE (var_nz(i)%short_name) + CASE ('NCAT') + status = pio_put_var(File, varid, hin_max(1:ncat_hist)) + CASE ('NFSD') + status = pio_put_var(File, varid, floe_rad_c(1:nfsd_hist)) + CASE ('VGRDi') + status = pio_put_var(File, varid, (/(k, k=1,nzilyr)/)) + CASE ('VGRDs') + status = pio_put_var(File, varid, (/(k, k=1,nzslyr)/)) + CASE ('VGRDb') + status = pio_put_var(File, varid, (/(k, k=1,nzblyr)/)) + CASE ('VGRDa') + status = pio_put_var(File, varid, (/(k, k=1,nzalyr)/)) + END SELECT + endif + enddo + + !----------------------------------------------------------------- + ! write grid masks, area and rotation angle + !----------------------------------------------------------------- + +! if (igrd(n_tmask)) then +! status = pio_inq_varid(File, 'tmask', varid) +! call pio_write_darray(File, varid, iodesc2d, & +! hm(:,:,1:nblocks), status, fillval=spval_dbl) +! endif +! if (igrd(n_blkmask)) then +! status = pio_inq_varid(File, 'blkmask', varid) +! call pio_write_darray(File, varid, iodesc2d, & +! bm(:,:,1:nblocks), status, fillval=spval_dbl) +! endif + + do i = 1, nvar ! note: n_tmask=1, n_blkmask=2 + if (igrd(i)) then + SELECT CASE (var(i)%req%short_name) + CASE ('tmask') + workr2 = hm(:,:,1:nblocks) + CASE ('blkmask') + workr2 = bm(:,:,1:nblocks) + CASE ('tarea') + workr2 = tarea(:,:,1:nblocks) + CASE ('uarea') + workr2 = uarea(:,:,1:nblocks) + CASE ('dxu') + workr2 = dxu(:,:,1:nblocks) + CASE ('dyu') + workr2 = dyu(:,:,1:nblocks) + CASE ('dxt') + workr2 = dxt(:,:,1:nblocks) + CASE ('dyt') + workr2 = dyt(:,:,1:nblocks) + CASE ('HTN') + workr2 = HTN(:,:,1:nblocks) + CASE ('HTE') + workr2 = HTE(:,:,1:nblocks) + CASE ('ANGLE') + workr2 = ANGLE(:,:,1:nblocks) + CASE ('ANGLET') + workr2 = ANGLET(:,:,1:nblocks) + END SELECT + status = pio_inq_varid(File, var(i)%req%short_name, varid) + call pio_write_darray(File, varid, iodesc2d, & + workr2, status, fillval=spval_dbl) + endif + enddo + + !---------------------------------------------------------------- + ! Write coordinates of grid box vertices + !---------------------------------------------------------------- + + if (f_bounds) then + allocate(workr3v(nverts,nx_block,ny_block,nblocks)) + workr3v (:,:,:,:) = c0 + do i = 1, nvar_verts + SELECT CASE (var_nverts(i)%short_name) + CASE ('lont_bounds') + do ivertex = 1, nverts + workr3v(ivertex,:,:,:) = lont_bounds(ivertex,:,:,1:nblocks) + enddo + CASE ('latt_bounds') + do ivertex = 1, nverts + workr3v(ivertex,:,:,:) = latt_bounds(ivertex,:,:,1:nblocks) + enddo + CASE ('lonu_bounds') + do ivertex = 1, nverts + workr3v(ivertex,:,:,:) = lonu_bounds(ivertex,:,:,1:nblocks) + enddo + CASE ('latu_bounds') + do ivertex = 1, nverts + workr3v(ivertex,:,:,:) = latu_bounds(ivertex,:,:,1:nblocks) + enddo + END SELECT + + status = pio_inq_varid(File, var_nverts(i)%short_name, varid) + call pio_write_darray(File, varid, iodesc3dv, & + workr3v, status, fillval=spval_dbl) + enddo + deallocate(workr3v) + endif ! f_bounds + + + !----------------------------------------------------------------- + ! write variable data + !----------------------------------------------------------------- + + ! 2D + do n=1,num_avail_hist_fields_2D + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + if (status /= pio_noerr) call abort_ice(subname// & + 'ERROR getting varid for '//avail_hist_fields(n)%vname) + workr2(:,:,:) = a2D(:,:,n,1:nblocks) +#ifdef CESM1_PIO + call pio_setframe(varid, int(1,kind=PIO_OFFSET)) +#else + call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) +#endif + call pio_write_darray(File, varid, iodesc2d,& + workr2, status, fillval=spval_dbl) + endif + enddo ! num_avail_hist_fields_2D + + deallocate(workr2) + + ! 3D (category) + allocate(workr3(nx_block,ny_block,nblocks,ncat_hist)) + do n = n2D + 1, n3Dccum + nn = n - n2D + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + if (status /= pio_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + do j = 1, nblocks + do i = 1, ncat_hist + workr3(:,:,j,i) = a3Dc(:,:,i,nn,j) + enddo + enddo +#ifdef CESM1_PIO + call pio_setframe(varid, int(1,kind=PIO_OFFSET)) +#else + call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) +#endif + call pio_write_darray(File, varid, iodesc3dc,& + workr3, status, fillval=spval_dbl) + endif + enddo ! num_avail_hist_fields_3Dc + deallocate(workr3) + + ! 3D (vertical ice) + allocate(workr3(nx_block,ny_block,nblocks,nzilyr)) + do n = n3Dccum+1, n3Dzcum + nn = n - n3Dccum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + if (status /= pio_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + do j = 1, nblocks + do i = 1, nzilyr + workr3(:,:,j,i) = a3Dz(:,:,i,nn,j) + enddo + enddo +#ifdef CESM1_PIO + call pio_setframe(varid, int(1,kind=PIO_OFFSET)) +#else + call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) +#endif + call pio_write_darray(File, varid, iodesc3di,& + workr3, status, fillval=spval_dbl) + endif + enddo ! num_avail_hist_fields_3Dz + deallocate(workr3) + + ! 3D (vertical ice biology) + allocate(workr3(nx_block,ny_block,nblocks,nzblyr)) + do n = n3Dzcum+1, n3Dbcum + nn = n - n3Dzcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + if (status /= pio_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + do j = 1, nblocks + do i = 1, nzblyr + workr3(:,:,j,i) = a3Db(:,:,i,nn,j) + enddo + enddo +#ifdef CESM1_PIO + call pio_setframe(varid, int(1,kind=PIO_OFFSET)) +#else + call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) +#endif + call pio_write_darray(File, varid, iodesc3db,& + workr3, status, fillval=spval_dbl) + endif + enddo ! num_avail_hist_fields_3Db + deallocate(workr3) + + ! 3D (vertical snow biology) + allocate(workr3(nx_block,ny_block,nblocks,nzalyr)) + do n = n3Dbcum+1, n3Dacum + nn = n - n3Dbcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + if (status /= pio_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + do j = 1, nblocks + do i = 1, nzalyr + workr3(:,:,j,i) = a3Da(:,:,i,nn,j) + enddo + enddo +#ifdef CESM1_PIO + call pio_setframe(varid, int(1,kind=PIO_OFFSET)) +#else + call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) +#endif + call pio_write_darray(File, varid, iodesc3da,& + workr3, status, fillval=spval_dbl) + endif + enddo ! num_avail_hist_fields_3Db + deallocate(workr3) + + ! 3D (fsd) + allocate(workr3(nx_block,ny_block,nblocks,nfsd_hist)) + do n = n3Dacum+1, n3Dfcum + nn = n - n3Dacum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + if (status /= pio_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + do j = 1, nblocks + do i = 1, nfsd_hist + workr3(:,:,j,i) = a3Df(:,:,i,nn,j) + enddo + enddo +#ifdef CESM1_PIO + call pio_setframe(varid, int(1,kind=PIO_OFFSET)) +#else + call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) +#endif + call pio_write_darray(File, varid, iodesc3df,& + workr3, status, fillval=spval_dbl) + endif + enddo ! num_avail_hist_fields_3Df + deallocate(workr3) + + allocate(workr4(nx_block,ny_block,nblocks,ncat_hist,nzilyr)) + ! 4D (categories, fsd) + do n = n3Dfcum+1, n4Dicum + nn = n - n3Dfcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + if (status /= pio_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + do j = 1, nblocks + do i = 1, ncat_hist + do k = 1, nzilyr + workr4(:,:,j,i,k) = a4Di(:,:,k,i,nn,j) + enddo ! k + enddo ! i + enddo ! j +#ifdef CESM1_PIO + call pio_setframe(varid, int(1,kind=PIO_OFFSET)) +#else + call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) +#endif + call pio_write_darray(File, varid, iodesc4di,& + workr4, status, fillval=spval_dbl) + endif + enddo ! num_avail_hist_fields_4Di + deallocate(workr4) + + allocate(workr4(nx_block,ny_block,nblocks,ncat_hist,nzslyr)) + ! 4D (categories, vertical ice) + do n = n4Dicum+1, n4Dscum + nn = n - n4Dicum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + if (status /= pio_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + do j = 1, nblocks + do i = 1, ncat_hist + do k = 1, nzslyr + workr4(:,:,j,i,k) = a4Ds(:,:,k,i,nn,j) + enddo ! k + enddo ! i + enddo ! j +#ifdef CESM1_PIO + call pio_setframe(varid, int(1,kind=PIO_OFFSET)) +#else + call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) +#endif + call pio_write_darray(File, varid, iodesc4ds,& + workr4, status, fillval=spval_dbl) + endif + enddo ! num_avail_hist_fields_4Ds + deallocate(workr4) + + allocate(workr4(nx_block,ny_block,nblocks,ncat_hist,nfsd_hist)) + ! 4D (categories, vertical ice) + do n = n4Dscum+1, n4Dfcum + nn = n - n4Dscum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + if (status /= pio_noerr) call abort_ice(subname// & + 'ERROR: getting varid for '//avail_hist_fields(n)%vname) + do j = 1, nblocks + do i = 1, ncat_hist + do k = 1, nfsd_hist + workr4(:,:,j,i,k) = a4Df(:,:,k,i,nn,j) + enddo ! k + enddo ! i + enddo ! j +#ifdef CESM1_PIO + call pio_setframe(varid, int(1,kind=PIO_OFFSET)) +#else + call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) +#endif + call pio_write_darray(File, varid, iodesc4df,& + workr4, status, fillval=spval_dbl) + endif + enddo ! num_avail_hist_fields_4Df + deallocate(workr4) + +! similarly for num_avail_hist_fields_4Db (define workr4b, iodesc4db) + + + !----------------------------------------------------------------- + ! clean-up PIO descriptors + !----------------------------------------------------------------- + + call pio_freedecomp(File,iodesc2d) + call pio_freedecomp(File,iodesc3dv) + call pio_freedecomp(File,iodesc3dc) + call pio_freedecomp(File,iodesc3di) + call pio_freedecomp(File,iodesc3db) + call pio_freedecomp(File,iodesc3da) + call pio_freedecomp(File,iodesc3df) + call pio_freedecomp(File,iodesc4di) + call pio_freedecomp(File,iodesc4ds) + call pio_freedecomp(File,iodesc4df) + + !----------------------------------------------------------------- + ! close output dataset + !----------------------------------------------------------------- + + call pio_closefile(File) + if (my_task == master_task) then + write(nu_diag,*) ' ' + write(nu_diag,*) 'Finished writing ',trim(ncfile(ns)) + endif + + end subroutine ice_write_hist + +!======================================================================= + + end module ice_history_write + +!======================================================================= diff --git a/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 new file mode 100644 index 000000000..063934fee --- /dev/null +++ b/cicecore/cicedynB/infrastructure/io/io_pio2/ice_restart.F90 @@ -0,0 +1,882 @@ +!======================================================================= +! +! Read and write ice model restart files using pio interfaces. +! authors David A Bailey, NCAR + + module ice_restart + + use ice_broadcast + use ice_exit, only: abort_ice + use ice_fileunits, only: nu_diag, nu_restart, nu_rst_pointer + use ice_kinds_mod + use ice_restart_shared, only: & + restart, restart_ext, restart_dir, restart_file, pointer_file, & + runid, runtype, use_restart_time, restart_format, lcdf64, lenstr, & + restart_coszen + use ice_pio + use pio + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_query_tracer_flags, icepack_query_tracer_indices + use icepack_intfc, only: icepack_query_parameters, icepack_query_tracer_sizes + + implicit none + private + public :: init_restart_write, init_restart_read, & + read_restart_field, write_restart_field, final_restart + + type(file_desc_t) :: File + type(var_desc_t) :: vardesc + + type(io_desc_t) :: iodesc2d + type(io_desc_t) :: iodesc3d_ncat + +!======================================================================= + + contains + +!======================================================================= + +! Sets up restart file for reading. +! author David A Bailey, NCAR + + subroutine init_restart_read(ice_ic) + + use ice_calendar, only: istep0, istep1, time, time_forc, nyr, month, & + mday, sec, npt + use ice_communicate, only: my_task, master_task + use ice_domain_size, only: ncat + use ice_read_write, only: ice_open + + character(len=char_len_long), intent(in), optional :: ice_ic + + ! local variables + + character(len=char_len_long) :: & + filename, filename0 + + integer (kind=int_kind) :: status + + character(len=*), parameter :: subname = '(init_restart_read)' + + if (present(ice_ic)) then + filename = trim(ice_ic) + else + if (my_task == master_task) then + open(nu_rst_pointer,file=pointer_file) + read(nu_rst_pointer,'(a)') filename0 + filename = trim(filename0) + close(nu_rst_pointer) + write(nu_diag,*) 'Read ',pointer_file(1:lenstr(pointer_file)) + endif + call broadcast_scalar(filename, master_task) + endif + + if (my_task == master_task) then + write(nu_diag,*) 'Using restart dump=', trim(filename) + end if + + if (restart_format == 'pio') then + File%fh=-1 + call ice_pio_init(mode='read', filename=trim(filename), File=File) + + call ice_pio_initdecomp(iodesc=iodesc2d) + call ice_pio_initdecomp(ndim3=ncat , iodesc=iodesc3d_ncat,remap=.true.) + + if (use_restart_time) then + status = pio_get_att(File, pio_global, 'istep1', istep0) + status = pio_get_att(File, pio_global, 'time', time) + status = pio_get_att(File, pio_global, 'time_forc', time_forc) + call pio_seterrorhandling(File, PIO_BCAST_ERROR) + status = pio_get_att(File, pio_global, 'nyr', nyr) + call pio_seterrorhandling(File, PIO_INTERNAL_ERROR) + if (status == PIO_noerr) then + status = pio_get_att(File, pio_global, 'month', month) + status = pio_get_att(File, pio_global, 'mday', mday) + status = pio_get_att(File, pio_global, 'sec', sec) + endif + endif ! use namelist values if use_restart_time = F + endif + + if (my_task == master_task) then + write(nu_diag,*) 'Restart read at istep=',istep0,time,time_forc + endif + + call broadcast_scalar(istep0,master_task) + call broadcast_scalar(time,master_task) + call broadcast_scalar(time_forc,master_task) + call broadcast_scalar(nyr,master_task) + + istep1 = istep0 + + ! if runid is bering then need to correct npt for istep0 + if (trim(runid) == 'bering') then + npt = npt - istep0 + endif + + end subroutine init_restart_read + +!======================================================================= + +! Sets up restart file for writing. +! author David A Bailey, NCAR + + subroutine init_restart_write(filename_spec) + + use ice_calendar, only: sec, month, mday, nyr, istep1, & + time, time_forc, year_init + use ice_communicate, only: my_task, master_task + use ice_domain_size, only: nx_global, ny_global, ncat, nilyr, nslyr, & + n_iso, n_aero, nblyr, n_zaero, n_algae, n_doc, & + n_dic, n_don, n_fed, n_fep + use ice_dyn_shared, only: kdyn + use ice_arrays_column, only: oceanmixed_ice + + logical (kind=log_kind) :: & + solve_zsal, skl_bgc, z_tracers + + logical (kind=log_kind) :: & + tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero, tr_pond_cesm, & + tr_pond_topo, tr_pond_lvl, tr_brine, & + tr_bgc_N, tr_bgc_C, tr_bgc_Nit, & + tr_bgc_Sil, tr_bgc_DMS, & + tr_bgc_chl, tr_bgc_Am, & + tr_bgc_PON, tr_bgc_DON, & + tr_zaero, tr_bgc_Fe, & + tr_bgc_hum + + integer (kind=int_kind) :: & + nbtrcr + + character(len=char_len_long), intent(in), optional :: filename_spec + + ! local variables + + integer (kind=int_kind) :: & + iyear, imonth, iday ! year, month, day + + character(len=char_len_long) :: filename + + integer (kind=int_kind) :: dimid_ni, dimid_nj, dimid_ncat, & + dimid_nilyr, dimid_nslyr, dimid_naero + + integer (kind=int_kind), allocatable :: dims(:) + + integer (kind=int_kind) :: & + k, n, & ! loop index + status ! status variable from netCDF routine + + character (len=3) :: nchar, ncharb + + character(len=*), parameter :: subname = '(init_restart_write)' + + call icepack_query_tracer_sizes(nbtrcr_out=nbtrcr) + call icepack_query_tracer_flags( & + tr_iage_out=tr_iage, tr_FY_out=tr_FY, tr_lvl_out=tr_lvl, & + tr_iso_out=tr_iso, tr_aero_out=tr_aero, tr_pond_cesm_out=tr_pond_cesm, & + tr_pond_topo_out=tr_pond_topo, tr_pond_lvl_out=tr_pond_lvl, tr_brine_out=tr_brine, & + tr_bgc_N_out=tr_bgc_N, tr_bgc_C_out=tr_bgc_C, tr_bgc_Nit_out=tr_bgc_Nit, & + tr_bgc_Sil_out=tr_bgc_Sil, tr_bgc_DMS_out=tr_bgc_DMS, & + tr_bgc_chl_out=tr_bgc_chl, tr_bgc_Am_out=tr_bgc_Am, & + tr_bgc_PON_out=tr_bgc_PON, tr_bgc_DON_out=tr_bgc_DON, & + tr_zaero_out=tr_zaero, tr_bgc_Fe_out=tr_bgc_Fe, & + tr_bgc_hum_out=tr_bgc_hum) + call icepack_query_parameters(solve_zsal_out=solve_zsal, skl_bgc_out=skl_bgc, & + z_tracers_out=z_tracers) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + ! construct path/file + if (present(filename_spec)) then + filename = trim(filename_spec) + else + iyear = nyr + year_init - 1 + imonth = month + iday = mday + + write(filename,'(a,a,a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & + restart_dir(1:lenstr(restart_dir)), & + restart_file(1:lenstr(restart_file)),'.', & + iyear,'-',month,'-',mday,'-',sec + end if + + if (restart_format /= 'bin') filename = trim(filename) // '.nc' + + ! write pointer (path/file) + if (my_task == master_task) then + open(nu_rst_pointer,file=pointer_file) + write(nu_rst_pointer,'(a)') filename + close(nu_rst_pointer) + endif + + if (restart_format == 'pio') then + + File%fh=-1 + call ice_pio_init(mode='write',filename=trim(filename), File=File, & + clobber=.true., cdf64=lcdf64 ) + + status = pio_put_att(File,pio_global,'istep1',istep1) + status = pio_put_att(File,pio_global,'time',time) + status = pio_put_att(File,pio_global,'time_forc',time_forc) + status = pio_put_att(File,pio_global,'nyr',nyr) + status = pio_put_att(File,pio_global,'month',month) + status = pio_put_att(File,pio_global,'mday',mday) + status = pio_put_att(File,pio_global,'sec',sec) + + status = pio_def_dim(File,'ni',nx_global,dimid_ni) + status = pio_def_dim(File,'nj',ny_global,dimid_nj) + status = pio_def_dim(File,'ncat',ncat,dimid_ncat) + + !----------------------------------------------------------------- + ! 2D restart fields + !----------------------------------------------------------------- + + allocate(dims(2)) + + dims(1) = dimid_ni + dims(2) = dimid_nj + + call define_rest_field(File,'uvel',dims) + call define_rest_field(File,'vvel',dims) + if (restart_coszen) call define_rest_field(File,'coszen',dims) + call define_rest_field(File,'scale_factor',dims) + call define_rest_field(File,'swvdr',dims) + call define_rest_field(File,'swvdf',dims) + call define_rest_field(File,'swidr',dims) + call define_rest_field(File,'swidf',dims) + + call define_rest_field(File,'strocnxT',dims) + call define_rest_field(File,'strocnyT',dims) + + call define_rest_field(File,'stressp_1',dims) + call define_rest_field(File,'stressp_2',dims) + call define_rest_field(File,'stressp_3',dims) + call define_rest_field(File,'stressp_4',dims) + + call define_rest_field(File,'stressm_1',dims) + call define_rest_field(File,'stressm_2',dims) + call define_rest_field(File,'stressm_3',dims) + call define_rest_field(File,'stressm_4',dims) + + call define_rest_field(File,'stress12_1',dims) + call define_rest_field(File,'stress12_2',dims) + call define_rest_field(File,'stress12_3',dims) + call define_rest_field(File,'stress12_4',dims) + + call define_rest_field(File,'iceumask',dims) + + if (oceanmixed_ice) then + call define_rest_field(File,'sst',dims) + call define_rest_field(File,'frzmlt',dims) + endif + + if (tr_FY) then + call define_rest_field(File,'frz_onset',dims) + end if + + if (kdyn == 2) then + call define_rest_field(File,'a11_1',dims) + call define_rest_field(File,'a11_2',dims) + call define_rest_field(File,'a11_3',dims) + call define_rest_field(File,'a11_4',dims) + call define_rest_field(File,'a12_1',dims) + call define_rest_field(File,'a12_2',dims) + call define_rest_field(File,'a12_3',dims) + call define_rest_field(File,'a12_4',dims) + endif + + if (tr_pond_lvl) then + call define_rest_field(File,'fsnow',dims) + endif + + if (nbtrcr > 0) then + if (tr_bgc_N) then + do k=1,n_algae + write(nchar,'(i3.3)') k + call define_rest_field(File,'algalN'//trim(nchar),dims) + enddo + endif + if (tr_bgc_C) then + do k=1,n_doc + write(nchar,'(i3.3)') k + call define_rest_field(File,'doc'//trim(nchar),dims) + enddo + do k=1,n_dic + write(nchar,'(i3.3)') k + call define_rest_field(File,'dic'//trim(nchar),dims) + enddo + endif + call define_rest_field(File,'nit' ,dims) + if (tr_bgc_Am) & + call define_rest_field(File,'amm' ,dims) + if (tr_bgc_Sil) & + call define_rest_field(File,'sil' ,dims) + if (tr_bgc_hum) & + call define_rest_field(File,'hum' ,dims) + if (tr_bgc_DMS) then + call define_rest_field(File,'dmsp' ,dims) + call define_rest_field(File,'dms' ,dims) + endif + if (tr_bgc_DON) then + do k=1,n_don + write(nchar,'(i3.3)') k + call define_rest_field(File,'don'//trim(nchar),dims) + enddo + endif + if (tr_bgc_Fe ) then + do k=1,n_fed + write(nchar,'(i3.3)') k + call define_rest_field(File,'fed'//trim(nchar),dims) + enddo + do k=1,n_fep + write(nchar,'(i3.3)') k + call define_rest_field(File,'fep'//trim(nchar),dims) + enddo + endif + if (tr_zaero) then + do k=1,n_zaero + write(nchar,'(i3.3)') k + call define_rest_field(File,'zaeros'//trim(nchar),dims) + enddo + endif + endif !nbtrcr + + if (solve_zsal) call define_rest_field(File,'sss',dims) + + deallocate(dims) + + !----------------------------------------------------------------- + ! 3D restart fields (ncat) + !----------------------------------------------------------------- + + allocate(dims(3)) + + dims(1) = dimid_ni + dims(2) = dimid_nj + dims(3) = dimid_ncat + + call define_rest_field(File,'aicen',dims) + call define_rest_field(File,'vicen',dims) + call define_rest_field(File,'vsnon',dims) + call define_rest_field(File,'Tsfcn',dims) + + if (tr_iage) then + call define_rest_field(File,'iage',dims) + end if + + if (tr_FY) then + call define_rest_field(File,'FY',dims) + end if + + if (tr_lvl) then + call define_rest_field(File,'alvl',dims) + call define_rest_field(File,'vlvl',dims) + end if + + if (tr_pond_cesm) then + call define_rest_field(File,'apnd',dims) + call define_rest_field(File,'hpnd',dims) + end if + + if (tr_pond_topo) then + call define_rest_field(File,'apnd',dims) + call define_rest_field(File,'hpnd',dims) + call define_rest_field(File,'ipnd',dims) + end if + + if (tr_pond_lvl) then + call define_rest_field(File,'apnd',dims) + call define_rest_field(File,'hpnd',dims) + call define_rest_field(File,'ipnd',dims) + call define_rest_field(File,'dhs',dims) + call define_rest_field(File,'ffrac',dims) + end if + + if (tr_brine) then + call define_rest_field(File,'fbrn',dims) + call define_rest_field(File,'first_ice',dims) + endif + + if (skl_bgc) then + do k = 1, n_algae + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_N'//trim(nchar) ,dims) + enddo + if (tr_bgc_C) then + ! do k = 1, n_algae + ! write(nchar,'(i3.3)') k + ! call define_rest_field(File,'bgc_C'//trim(nchar) ,dims) + ! enddo + do k = 1, n_doc + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_DOC'//trim(nchar) ,dims) + enddo + do k = 1, n_dic + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_DIC'//trim(nchar) ,dims) + enddo + endif + if (tr_bgc_chl) then + do k = 1, n_algae + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_chl'//trim(nchar) ,dims) + enddo + endif + call define_rest_field(File,'bgc_Nit' ,dims) + if (tr_bgc_Am) & + call define_rest_field(File,'bgc_Am' ,dims) + if (tr_bgc_Sil) & + call define_rest_field(File,'bgc_Sil' ,dims) + if (tr_bgc_hum) & + call define_rest_field(File,'bgc_hum' ,dims) + if (tr_bgc_DMS) then + call define_rest_field(File,'bgc_DMSPp',dims) + call define_rest_field(File,'bgc_DMSPd',dims) + call define_rest_field(File,'bgc_DMS' ,dims) + endif + if (tr_bgc_PON) & + call define_rest_field(File,'bgc_PON' ,dims) + if (tr_bgc_DON) then + do k = 1, n_don + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_DON'//trim(nchar) ,dims) + enddo + endif + if (tr_bgc_Fe ) then + do k = 1, n_fed + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_Fed'//trim(nchar) ,dims) + enddo + do k = 1, n_fep + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_Fep'//trim(nchar) ,dims) + enddo + endif + endif !skl_bgc + if (solve_zsal) & + call define_rest_field(File,'Rayleigh',dims) + + !----------------------------------------------------------------- + ! 4D restart fields, written as layers of 3D + !----------------------------------------------------------------- + + do k=1,nilyr + write(nchar,'(i3.3)') k + call define_rest_field(File,'sice'//trim(nchar),dims) + call define_rest_field(File,'qice'//trim(nchar),dims) + enddo + + do k=1,nslyr + write(nchar,'(i3.3)') k + call define_rest_field(File,'qsno'//trim(nchar),dims) + enddo + + if (tr_iso) then + do k=1,n_iso + write(nchar,'(i3.3)') k + call define_rest_field(File,'isosno'//nchar, dims) + call define_rest_field(File,'isoice'//nchar, dims) + enddo + endif + + if (tr_aero) then + do k=1,n_aero + write(nchar,'(i3.3)') k + call define_rest_field(File,'aerosnossl'//nchar, dims) + call define_rest_field(File,'aerosnoint'//nchar, dims) + call define_rest_field(File,'aeroicessl'//nchar, dims) + call define_rest_field(File,'aeroiceint'//nchar, dims) + enddo + endif + + if (solve_zsal) then + do k = 1, nblyr + write(nchar,'(i3.3)') k + call define_rest_field(File,'zSalinity'//trim(nchar),dims) + enddo + endif + if (z_tracers) then + if (tr_zaero) then + do n = 1, n_zaero + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'zaero'//trim(ncharb)//trim(nchar),dims) + enddo !k + enddo !n + endif !tr_zaero + if (tr_bgc_Nit) then + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_Nit'//trim(nchar),dims) + enddo + endif + if (tr_bgc_N) then + do n = 1, n_algae + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_N'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + endif + if (tr_bgc_C) then + ! do n = 1, n_algae + ! write(ncharb,'(i3.3)') n + ! do k = 1, nblyr+3 + ! write(nchar,'(i3.3)') k + ! call + ! define_rest_field(File,'bgc_C'//trim(ncharb)//trim(nchar),dims) + ! enddo + ! enddo + do n = 1, n_doc + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_DOC'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + do n = 1, n_dic + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_DIC'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + endif + if (tr_bgc_chl) then + do n = 1, n_algae + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_chl'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + endif + if (tr_bgc_Am) then + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_Am'//trim(nchar),dims) + enddo + endif + if (tr_bgc_Sil) then + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_Sil'//trim(nchar),dims) + enddo + endif + if (tr_bgc_hum) then + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_hum'//trim(nchar),dims) + enddo + endif + if (tr_bgc_DMS) then + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_DMSPp'//trim(nchar),dims) + call define_rest_field(File,'bgc_DMSPd'//trim(nchar),dims) + call define_rest_field(File,'bgc_DMS'//trim(nchar),dims) + enddo + endif + if (tr_bgc_PON) then + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_PON'//trim(nchar),dims) + enddo + endif + if (tr_bgc_DON) then + do n = 1, n_don + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_DON'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + endif + if (tr_bgc_Fe ) then + do n = 1, n_fed + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_Fed'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + do n = 1, n_fep + write(ncharb,'(i3.3)') n + do k = 1, nblyr+3 + write(nchar,'(i3.3)') k + call define_rest_field(File,'bgc_Fep'//trim(ncharb)//trim(nchar),dims) + enddo + enddo + endif + do k = 1, nbtrcr + write(nchar,'(i3.3)') k + call define_rest_field(File,'zbgc_frac'//trim(nchar),dims) + enddo + endif !z_tracers + + deallocate(dims) + status = pio_enddef(File) + + call ice_pio_initdecomp(iodesc=iodesc2d) + call ice_pio_initdecomp(ndim3=ncat , iodesc=iodesc3d_ncat, remap=.true.) + + endif + + if (my_task == master_task) then + write(nu_diag,*) 'Writing ',filename(1:lenstr(filename)) + endif + + end subroutine init_restart_write + +!======================================================================= + +! Reads a single restart field +! author David A Bailey, NCAR + + subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3,diag, & + field_loc, field_type) + + use ice_blocks, only: nx_block, ny_block + use ice_communicate, only: my_task, master_task + use ice_constants, only: c0, field_loc_center + use ice_boundary, only: ice_HaloUpdate + use ice_domain, only: halo_info, distrb_info, nblocks + use ice_domain_size, only: max_blocks, ncat + use ice_global_reductions, only: global_minval, global_maxval, global_sum + + integer (kind=int_kind), intent(in) :: & + nu , & ! unit number + ndim3 , & ! third dimension + nrec ! record number (0 for sequential access) + + real (kind=dbl_kind), dimension(nx_block,ny_block,ndim3,max_blocks), intent(inout) :: & + work ! input array (real, 8-byte) + + character (len=4), intent(in) :: & + atype ! format for output array + ! (real/integer, 4-byte/8-byte) + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + character (len=*), intent(in) :: vname + + integer (kind=int_kind), optional, intent(in) :: & + field_loc, & ! location of field on staggered grid + field_type ! type of field (scalar, vector, angle) + + ! local variables + + integer (kind=int_kind) :: & + j, & ! dimension counter + n, & ! number of dimensions for variable + ndims, & ! number of variable dimensions + status ! status variable from netCDF routine + + real (kind=dbl_kind) :: amin,amax,asum + + character(len=*), parameter :: subname = '(read_restart_field)' + + if (restart_format == "pio") then + if (my_task == master_task) & + write(nu_diag,*)'Parallel restart file read: ',vname + + call pio_seterrorhandling(File, PIO_BCAST_ERROR) + + status = pio_inq_varid(File,trim(vname),vardesc) + + if (status /= 0) then + call abort_ice(subname//"ERROR: CICE restart? Missing variable: "//trim(vname)) + endif + + status = pio_inq_varndims(File, vardesc, ndims) + + call pio_seterrorhandling(File, PIO_INTERNAL_ERROR) + +! if (ndim3 == ncat .and. ncat>1) then + if (ndim3 == ncat .and. ndims == 3) then + call pio_read_darray(File, vardesc, iodesc3d_ncat, work, status) + if (present(field_loc)) then + do n=1,ndim3 + call ice_HaloUpdate (work(:,:,n,:), halo_info, & + field_loc, field_type) + enddo + endif +! elseif (ndim3 == 1) then + elseif (ndim3 == 1 .and. ndims == 2) then + call pio_read_darray(File, vardesc, iodesc2d, work, status) + if (present(field_loc)) then + call ice_HaloUpdate (work(:,:,1,:), halo_info, & + field_loc, field_type) + endif + else + write(nu_diag,*) "ndim3 not supported ",ndim3 + endif + + if (diag) then + if (ndim3 > 1) then + do n=1,ndim3 + amin = global_minval(work(:,:,n,:),distrb_info) + amax = global_maxval(work(:,:,n,:),distrb_info) + asum = global_sum(work(:,:,n,:), distrb_info, field_loc_center) + if (my_task == master_task) then + write(nu_diag,*) ' min and max =', amin, amax + write(nu_diag,*) ' sum =',asum + endif + enddo + else + amin = global_minval(work(:,:,1,:),distrb_info) + amax = global_maxval(work(:,:,1,:),distrb_info) + asum = global_sum(work(:,:,1,:), distrb_info, field_loc_center) + if (my_task == master_task) then + write(nu_diag,*) ' min and max =', amin, amax + write(nu_diag,*) ' sum =',asum + write(nu_diag,*) '' + endif + endif + + endif + else + call abort_ice(subname//"ERROR: Invalid restart_format: "//trim(restart_format)) + endif + + end subroutine read_restart_field + +!======================================================================= + +! Writes a single restart field. +! author David A Bailey, NCAR + + subroutine write_restart_field(nu,nrec,work,atype,vname,ndim3,diag) + + use ice_blocks, only: nx_block, ny_block + use ice_communicate, only: my_task, master_task + use ice_constants, only: c0, field_loc_center + use ice_domain, only: distrb_info, nblocks + use ice_domain_size, only: max_blocks, ncat + use ice_global_reductions, only: global_minval, global_maxval, global_sum + + integer (kind=int_kind), intent(in) :: & + nu , & ! unit number + ndim3 , & ! third dimension + nrec ! record number (0 for sequential access) + + real (kind=dbl_kind), dimension(nx_block,ny_block,ndim3,max_blocks), intent(in) :: & + work ! input array (real, 8-byte) + + character (len=4), intent(in) :: & + atype ! format for output array + ! (real/integer, 4-byte/8-byte) + + logical (kind=log_kind), intent(in) :: & + diag ! if true, write diagnostic output + + character (len=*), intent(in) :: vname + + ! local variables + + integer (kind=int_kind) :: & + j, & ! dimension counter + n, & ! dimension counter + ndims, & ! number of variable dimensions + status ! status variable from netCDF routine + + real (kind=dbl_kind) :: amin,amax,asum + + character(len=*), parameter :: subname = '(write_restart_field)' + + if (restart_format == "pio") then + if (my_task == master_task) & + write(nu_diag,*)'Parallel restart file write: ',vname + + status = pio_inq_varid(File,trim(vname),vardesc) + + status = pio_inq_varndims(File, vardesc, ndims) + + if (ndims==3) then + call pio_write_darray(File, vardesc, iodesc3d_ncat,work(:,:,:,1:nblocks), & + status, fillval=c0) + elseif (ndims == 2) then + call pio_write_darray(File, vardesc, iodesc2d, work(:,:,1,1:nblocks), & + status, fillval=c0) + else + write(nu_diag,*) "ndims not supported",ndims,ndim3 + endif + + if (diag) then + if (ndim3 > 1) then + do n=1,ndim3 + amin = global_minval(work(:,:,n,:),distrb_info) + amax = global_maxval(work(:,:,n,:),distrb_info) + asum = global_sum(work(:,:,n,:), distrb_info, field_loc_center) + if (my_task == master_task) then + write(nu_diag,*) ' min and max =', amin, amax + write(nu_diag,*) ' sum =',asum + endif + enddo + else + amin = global_minval(work(:,:,1,:),distrb_info) + amax = global_maxval(work(:,:,1,:),distrb_info) + asum = global_sum(work(:,:,1,:), distrb_info, field_loc_center) + if (my_task == master_task) then + write(nu_diag,*) ' min and max =', amin, amax + write(nu_diag,*) ' sum =',asum + endif + endif + endif + else + call abort_ice(subname//"ERROR: Invalid restart_format: "//trim(restart_format)) + endif + + end subroutine write_restart_field + +!======================================================================= + +! Finalize the restart file. +! author David A Bailey, NCAR + + subroutine final_restart() + + use ice_calendar, only: istep1, time, time_forc + use ice_communicate, only: my_task, master_task + + character(len=*), parameter :: subname = '(final_restart)' + + if (restart_format == 'pio') then + call PIO_freeDecomp(File,iodesc2d) + call PIO_freeDecomp(File,iodesc3d_ncat) + call pio_closefile(File) + endif + + if (my_task == master_task) & + write(nu_diag,*) 'Restart read/written ',istep1,time,time_forc + + end subroutine final_restart + +!======================================================================= + +! Defines a restart field +! author David A Bailey, NCAR + + subroutine define_rest_field(File, vname, dims) + + type(file_desc_t) , intent(in) :: File + character (len=*) , intent(in) :: vname + integer (kind=int_kind), intent(in) :: dims(:) + + integer (kind=int_kind) :: & + status ! status variable from netCDF routine + + character(len=*), parameter :: subname = '(define_rest_field)' + + status = pio_def_var(File,trim(vname),pio_double,dims,vardesc) + + end subroutine define_rest_field + +!======================================================================= + + end module ice_restart + +!======================================================================= diff --git a/cicecore/drivers/direct/hadgem3/CICE.F90 b/cicecore/drivers/direct/hadgem3/CICE.F90 index fac02de9b..72bf1b747 100644 --- a/cicecore/drivers/direct/hadgem3/CICE.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE.F90 @@ -1,21 +1,21 @@ !======================================================================= -! Copyright (c) 2023, Triad National Security, LLC +! Copyright (c) 2020, Triad National Security, LLC ! All rights reserved. -! -! Copyright 2023. Triad National Security, LLC. This software was -! produced under U.S. Government contract DE-AC52-06NA25396 for Los +! +! Copyright 2020. Triad National Security, LLC. This software was +! produced under U.S. Government contract DE-AC52-06NA25396 for Los ! Alamos National Laboratory (LANL), which is operated by Triad -! National Security, LLC for the U.S. Department of Energy. The U.S. -! Government has rights to use, reproduce, and distribute this software. -! NEITHER THE GOVERNMENT NOR TRIAD NATIONAL SECURITY, LLC MAKES ANY +! National Security, LLC for the U.S. Department of Energy. The U.S. +! Government has rights to use, reproduce, and distribute this software. +! NEITHER THE GOVERNMENT NOR TRIAD NATIONAL SECURITY, LLC MAKES ANY ! WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF -! THIS SOFTWARE. If software is modified to produce derivative works, -! such modified software should be clearly marked, so as not to confuse +! THIS SOFTWARE. If software is modified to produce derivative works, +! such modified software should be clearly marked, so as not to confuse ! it with the version available from LANL. ! ! The full license and distribution policy are available from ! https://github.com/CICE-Consortium -! +! !======================================================================= ! ! Main driver routine for CICE. Initializes and steps through the model. @@ -48,7 +48,7 @@ program icemodel call CICE_Run !----------------------------------------------------------------- - ! Finalize CICE + ! Finalize CICE !----------------------------------------------------------------- call CICE_Finalize @@ -56,3 +56,40 @@ program icemodel end program icemodel !======================================================================= +! +! Wrapper for the print_state debugging routine. +! Useful for debugging in the main driver (see ice.F_debug) +! ip, jp, mtask are set in ice_diagnostics.F +! +! author Elizabeth C. Hunke, LANL +! + subroutine debug_ice(plabeld) + + use ice_kinds_mod + use ice_calendar, only: istep1 + use ice_communicate, only: my_task + use ice_diagnostics, only: check_step, iblkp, ip, jp, mtask, print_state + use ice_domain, only: nblocks + use ice_blocks, only: nx_block, ny_block + + character (char_len), intent(in) :: plabeld + + ! local + integer (kind=int_kind) :: i, j, iblk + + if (istep1 >= check_step) then + + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + if (iblk==iblkp .and. i==ip .and. j==jp .and. my_task==mtask) & + call print_state(plabeld,i,j,iblk) + enddo + enddo + enddo + + endif + + end subroutine debug_ice + +!======================================================================= diff --git a/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 index 3f87f2ca8..dc41ff9fd 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 @@ -18,7 +18,7 @@ module CICE_InitMod use icepack_intfc, only: icepack_aggregate use icepack_intfc, only: icepack_init_itd, icepack_init_itd_hist use icepack_intfc, only: icepack_init_fsd_bounds, icepack_init_wave - use icepack_intfc, only: icepack_configure, icepack_init_radiation + use icepack_intfc, only: icepack_configure use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_query_parameters, icepack_query_tracer_flags, & icepack_query_tracer_indices, icepack_query_tracer_sizes @@ -64,38 +64,35 @@ subroutine cice_init ocean_bio_all, ice_bio_net, snow_bio_net, alloc_arrays_column use ice_arrays_column, only: floe_rad_l, floe_rad_c, & floe_binwidth, c_fsd_range - use ice_calendar, only: dt, dt_dyn, write_ic, & - init_calendar, advance_timestep, calc_timesteps + use ice_calendar, only: dt, dt_dyn, time, istep, istep1, write_ic, & + init_calendar, calendar use ice_communicate, only: init_communicate, my_task, master_task use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd - use ice_dyn_eap, only: init_eap - use ice_dyn_evp, only: init_evp - use ice_dyn_vp, only: init_vp - use ice_dyn_shared, only: kdyn + use ice_dyn_eap, only: init_eap, alloc_dyn_eap + use ice_dyn_shared, only: kdyn, init_evp, basalstress, alloc_dyn_shared use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & get_forcing_atmo, get_forcing_ocn, alloc_forcing, get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & - faero_data, faero_default, alloc_forcing_bgc - use ice_grid, only: init_grid1, init_grid2, alloc_grid, dealloc_grid + faero_data, faero_default, faero_optics, alloc_forcing_bgc + use ice_grid, only: init_grid1, init_grid2, alloc_grid use ice_history, only: init_hist, accum_hist use ice_restart_shared, only: restart, runid, runtype use ice_init, only: input_data, init_state - use ice_init_column, only: init_thermo_vertical, init_shortwave, init_zbgc, input_zbgc, count_tracers + use ice_init_column, only: init_thermo_vertical, init_shortwave, init_zbgc use ice_kinds_mod use ice_restoring, only: ice_HaloRestore_init use ice_timers, only: timer_total, init_ice_timers, ice_timer_start use ice_transport_driver, only: init_transport - use lib_mpp, only: mpi_comm_opa ! NEMO MPI communicator logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers, & tr_fsd, wave_spec character(len=*),parameter :: subname = '(cice_init)' - call init_communicate(mpi_comm_opa) ! initial setup for message passing + call init_communicate ! initial setup for message passing call init_fileunits ! unit numbers call icepack_configure() ! initialize icepack @@ -104,14 +101,15 @@ subroutine cice_init file=__FILE__,line= __LINE__) call input_data ! namelist variables - call input_zbgc ! vertical biogeochemistry namelist - call count_tracers ! count tracers + + if (trim(runid) == 'bering') call check_finished_file call init_domain_blocks ! set up block decomposition call init_grid1 ! domain distribution call alloc_grid ! allocate grid call alloc_arrays_column ! allocate column arrays call alloc_state ! allocate state + call alloc_dyn_shared ! allocate dyn shared (init_uvel,init_vvel) call alloc_flux_bgc ! allocate flux_bgc call alloc_flux ! allocate flux call init_ice_timers ! initialize all timers @@ -122,12 +120,11 @@ subroutine cice_init call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file - if (kdyn == 1) then - call init_evp - else if (kdyn == 2) then - call init_eap ! define eap dynamics parameters, variables - else if (kdyn == 3) then - call init_vp ! define vp dynamics parameters, variables + if (kdyn == 2) then + call alloc_dyn_eap ! allocate dyn_eap arrays + call init_eap (dt_dyn) ! define eap dynamics parameters, variables + else ! for both kdyn = 0 or 1 + call init_evp (dt_dyn) ! define evp dynamics parameters, variables endif call init_coupler_flux ! initialize fluxes exchanged with coupler @@ -151,9 +148,11 @@ subroutine cice_init write_diags=(my_task == master_task)) ! write diag on master only call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + if (icepack_warnings_aborted()) call abort_ice(error_message="subname", & file=__FILE__, line=__LINE__) + call calendar(time) ! determine the initial date + #ifndef CICE_IN_NEMO call init_forcing_ocn(dt) ! initialize sss and sst from data #endif @@ -169,22 +168,25 @@ subroutine cice_init call init_diags ! initialize diagnostic output points call init_history_therm ! initialize thermo history variables call init_history_dyn ! initialize dynamic history variables - call calc_timesteps ! update timestep counter if not using npt_unit="1" - call icepack_init_radiation ! initialize icepack shortwave tables call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(subname, & file=__FILE__,line= __LINE__) - ! Initialize shortwave components using swdn from previous timestep - ! if restarting. These components will be scaled to current forcing + if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical + !property tables + + ! Initialize shortwave components using swdn from previous timestep + ! if restarting. These components will be scaled to current forcing ! in prep_radiation. if (trim(runtype) == 'continue' .or. restart) & call init_shortwave ! initialize radiative transfer - ! determine the time and date at the end of the first timestep - call advance_timestep() + istep = istep + 1 ! update time step counters + istep1 = istep1 + 1 + time = time + dt ! determine the time and date + call calendar(time) ! at the end of the first timestep !-------------------------------------------------------------------- ! coupler communication or forcing data initialization @@ -213,8 +215,7 @@ subroutine cice_init call init_flux_atm ! initialize atmosphere fluxes sent to coupler call init_flux_ocn ! initialize ocean fluxes sent to coupler - call dealloc_grid ! deallocate temporary grid arrays - if (write_ic) call accum_hist(dt) ! write initial conditions + if (write_ic) call accum_hist(dt) ! write initial conditions end subroutine cice_init @@ -224,20 +225,21 @@ subroutine init_restart use ice_arrays_column, only: dhsn use ice_blocks, only: nx_block, ny_block - use ice_calendar, only: calendar + use ice_calendar, only: time, calendar use ice_constants, only: c0 use ice_domain, only: nblocks use ice_domain_size, only: ncat, n_aero, nfsd use ice_dyn_eap, only: read_restart_eap use ice_dyn_shared, only: kdyn - use ice_flux, only: sss, Tf + use ice_flux, only: sss use ice_grid, only: tmask use ice_init, only: ice_ic use ice_init_column, only: init_age, init_FY, init_lvl, & - init_meltponds_lvl, init_meltponds_topo, & + init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & + restart_pond_cesm, read_restart_pond_cesm, & restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & restart_fsd, read_restart_fsd, & @@ -252,7 +254,7 @@ subroutine init_restart i, j , & ! horizontal indices iblk ! block index logical(kind=log_kind) :: & - tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & + tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & tr_pond_topo, tr_fsd, tr_aero, tr_brine, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & @@ -261,8 +263,6 @@ subroutine init_restart nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & nt_iage, nt_FY, nt_aero, nt_fsd - character(len=*), parameter :: subname = '(init_restart)' - call icepack_query_tracer_sizes(ntrcr_out=ntrcr) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -271,20 +271,20 @@ subroutine init_restart call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & - tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & + tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & tr_fsd_out=tr_fsd) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd) call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + if (icepack_warnings_aborted()) call abort_ice(error_message="subname", & file=__FILE__, line=__LINE__) - if (trim(runtype) == 'continue') then + if (trim(runtype) == 'continue') then ! start from core restart file call restartfile() ! given by pointer in ice_in - call calendar() ! update time parameters + call calendar(time) ! update time parameters if (kdyn == 2) call read_restart_eap ! EAP else if (restart) then ! ice_ic = core restart file call restartfile (ice_ic) ! or 'default' or 'none' @@ -292,17 +292,17 @@ subroutine init_restart ! call restartfile_v4 (ice_ic) ! CICE v4.1 binary restart file !!! uncomment if EAP restart data exists ! if (kdyn == 2) call read_restart_eap - endif + endif ! tracers - ! ice age tracer - if (tr_iage) then + ! ice age tracer + if (tr_iage) then if (trim(runtype) == 'continue') & restart_age = .true. if (restart_age) then call read_restart_age else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_age(trcrn(:,:,nt_iage,:,iblk)) enddo ! iblk endif @@ -313,7 +313,7 @@ subroutine init_restart if (restart_FY) then call read_restart_FY else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_FY(trcrn(:,:,nt_FY,:,iblk)) enddo ! iblk endif @@ -324,12 +324,25 @@ subroutine init_restart if (restart_lvl) then call read_restart_lvl else - do iblk = 1, nblocks - call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & + do iblk = 1, nblocks + call init_lvl(trcrn(:,:,nt_alvl,:,iblk), & trcrn(:,:,nt_vlvl,:,iblk)) enddo ! iblk endif endif + ! CESM melt ponds + if (tr_pond_cesm) then + if (trim(runtype) == 'continue') & + restart_pond_cesm = .true. + if (restart_pond_cesm) then + call read_restart_pond_cesm + else + do iblk = 1, nblocks + call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk)) + enddo ! iblk + endif + endif ! level-ice melt ponds if (tr_pond_lvl) then if (trim(runtype) == 'continue') & @@ -337,7 +350,7 @@ subroutine init_restart if (restart_pond_lvl) then call read_restart_pond_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_lvl(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk), & @@ -352,7 +365,7 @@ subroutine init_restart if (restart_pond_topo) then call read_restart_pond_topo else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_topo(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk)) @@ -373,7 +386,7 @@ subroutine init_restart if (restart_aero) then call read_restart_aero else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_aerosol(trcrn(:,:,nt_aero:nt_aero+4*n_aero-1,:,iblk)) enddo ! iblk endif ! .not. restart_aero @@ -426,8 +439,7 @@ subroutine init_restart trcr_depend = trcr_depend, & trcr_base = trcr_base, & n_trcr_strata = n_trcr_strata, & - nt_strata = nt_strata, & - Tf = Tf(i,j,iblk)) + nt_strata = nt_strata) else ! tcraig, reset all tracer values on land to zero trcrn(i,j,:,:,iblk) = c0 @@ -438,11 +450,39 @@ subroutine init_restart !$OMP END PARALLEL DO call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + if (icepack_warnings_aborted()) call abort_ice(error_message="subname", & file=__FILE__, line=__LINE__) end subroutine init_restart +!======================================================================= +! +! Check whether a file indicating that the previous run finished cleanly +! If so, then do not continue the current restart. This is needed only +! for runs on machine 'bering' (set using runid = 'bering'). +! +! author: Adrian Turner, LANL + + subroutine check_finished_file() + + use ice_communicate, only: my_task, master_task + use ice_restart_shared, only: restart_dir + + character(len=char_len_long) :: filename + logical :: lexist = .false. + + if (my_task == master_task) then + + filename = trim(restart_dir)//"finished" + inquire(file=filename, exist=lexist) + if (lexist) then + call abort_ice("subname"//"ERROR: Found already finished file - quitting") + end if + + endif + + end subroutine check_finished_file + !======================================================================= end module CICE_InitMod diff --git a/cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 b/cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 deleted file mode 100644 index 78c703c91..000000000 --- a/cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 +++ /dev/null @@ -1,656 +0,0 @@ -!======================================================================= -! -! Main driver for time stepping of CICE. -! -! authors Elizabeth C. Hunke, LANL -! Philip W. Jones, LANL -! William H. Lipscomb, LANL -! -! 2006 ECH: moved exit timeLoop to prevent execution of unnecessary timestep -! 2006 ECH: Streamlined for efficiency -! 2006 ECH: Converted to free source form (F90) -! 2007 BPB: Modified Delta-Eddington shortwave interface -! 2008 ECH: moved ESMF code to its own driver - - module CICE_RunMod - - use ice_kinds_mod - use ice_fileunits, only: nu_diag - use ice_arrays_column, only: oceanmixed_ice - use ice_constants, only: c0, c1 - use ice_constants, only: field_loc_center, field_type_scalar - use ice_exit, only: abort_ice - use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted - use icepack_intfc, only: icepack_max_aero - use icepack_intfc, only: icepack_query_parameters - use icepack_intfc, only: icepack_query_tracer_flags, icepack_query_tracer_sizes - - implicit none - private - public :: CICE_Run, ice_step - -!======================================================================= - - contains - -!======================================================================= -! -! This is the main driver routine for advancing CICE forward in time. -! -! author Elizabeth C. Hunke, LANL -! Philip W. Jones, LANL -! William H. Lipscomb, LANL - - subroutine CICE_Run - - use ice_calendar, only: stop_now, advance_timestep - use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & - get_wave_spec - use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, fzaero_data, & - faero_default - use ice_flux, only: init_flux_atm, init_flux_ocn - use ice_timers, only: ice_timer_start, ice_timer_stop, & - timer_couple, timer_step - logical (kind=log_kind) :: & - tr_aero, tr_zaero, skl_bgc, z_tracers, wave_spec, tr_fsd - - character(len=*), parameter :: subname = '(CICE_Run)' - - !-------------------------------------------------------------------- - ! initialize error code and step timer - !-------------------------------------------------------------------- - - call ice_timer_start(timer_step) ! start timing entire run - - call icepack_query_parameters(skl_bgc_out=skl_bgc, & - z_tracers_out=z_tracers, & - wave_spec_out=wave_spec) - call icepack_query_tracer_flags(tr_aero_out=tr_aero, & - tr_zaero_out=tr_zaero, & - tr_fsd_out=tr_fsd) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - -#ifndef CICE_IN_NEMO - !-------------------------------------------------------------------- - ! timestep loop - !-------------------------------------------------------------------- - - timeLoop: do -#endif - - call ice_step - - call advance_timestep() ! advance time - -#ifndef CICE_IN_NEMO - if (stop_now >= 1) exit timeLoop -#endif - - call ice_timer_start(timer_couple) ! atm/ocn coupling - -! standalone -! for now, wave_spectrum is constant in time -! if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice -! call get_forcing_atmo ! atmospheric forcing from data -! call get_forcing_ocn(dt) ! ocean forcing from data - -! ! aerosols -! ! if (tr_aero) call faero_data ! data file -! ! if (tr_zaero) call fzaero_data ! data file (gx1) -! if (tr_aero .or. tr_zaero) call faero_default ! default values - -! if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry - - if (z_tracers) call get_atm_bgc ! biogeochemistry - - call init_flux_atm ! initialize atmosphere fluxes sent to coupler - call init_flux_ocn ! initialize ocean fluxes sent to coupler - - call ice_timer_stop(timer_couple) ! atm/ocn coupling - -#ifndef CICE_IN_NEMO - enddo timeLoop -#endif - - !-------------------------------------------------------------------- - ! end of timestep loop - !-------------------------------------------------------------------- - - call ice_timer_stop(timer_step) ! end timestepping loop timer - - end subroutine CICE_Run - -!======================================================================= -! -! Calls drivers for physics components, some initialization, and output -! -! author Elizabeth C. Hunke, LANL -! William H. Lipscomb, LANL - - subroutine ice_step - - use ice_boundary, only: ice_HaloUpdate - use ice_calendar, only: dt, dt_dyn, ndtd, diagfreq, write_restart, istep - use ice_diagnostics, only: init_mass_diags, runtime_diags - use ice_diagnostics_bgc, only: hbrine_diags, zsal_diags, bgc_diags - use ice_domain, only: halo_info, nblocks - use ice_domain_size, only: nslyr - use ice_dyn_eap, only: write_restart_eap - use ice_dyn_shared, only: kdyn - use ice_flux, only: scale_factor, init_history_therm, & - daidtt, daidtd, dvidtt, dvidtd, dvsdtt, dvsdtd, dagedtt, dagedtd - use ice_history, only: accum_hist - use ice_history_bgc, only: init_history_bgc - use ice_restart, only: final_restart - use ice_restart_column, only: write_restart_age, write_restart_FY, & - write_restart_lvl, write_restart_pond_lvl, & - write_restart_pond_topo, write_restart_aero, write_restart_fsd, & - write_restart_bgc, write_restart_hbrine - use ice_restart_driver, only: dumpfile - use ice_restoring, only: restore_ice, ice_HaloRestore - use ice_state, only: trcrn - use ice_step_mod, only: prep_radiation, step_therm1, step_therm2, & - update_state, step_dyn_horiz, step_dyn_ridge, step_radiation, & - biogeochemistry, step_prep, step_dyn_wave - use ice_timers, only: ice_timer_start, ice_timer_stop, & - timer_diags, timer_column, timer_thermo, timer_bound, & - timer_hist, timer_readwrite - - integer (kind=int_kind) :: & - iblk , & ! block index - k , & ! dynamics supercycling index - ktherm ! thermodynamics is off when ktherm = -1 - - real (kind=dbl_kind) :: & - offset ! d(age)/dt time offset - - logical (kind=log_kind) :: & - tr_iage, tr_FY, tr_lvl, tr_fsd, & - tr_pond_lvl, tr_pond_topo, tr_brine, tr_aero, & - calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec - - character(len=*), parameter :: subname = '(ice_step)' - - call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc, skl_bgc_out=skl_bgc, & - solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & - wave_spec_out=wave_spec) - call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & - tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & - tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & - tr_fsd_out=tr_fsd) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - -#ifdef ICE_DA - !--------------------------------------------------------------- - ! Update CICE state variables using data assimilation increments - !--------------------------------------------------------------- - call da_state_update -#endif - - !----------------------------------------------------------------- - ! restoring on grid boundaries - !----------------------------------------------------------------- - - if (restore_ice) call ice_HaloRestore - - !----------------------------------------------------------------- - ! initialize diagnostics and save initial state values - !----------------------------------------------------------------- - - call ice_timer_start(timer_diags) ! diagnostics/history - call init_mass_diags ! diagnostics per timestep - call init_history_therm - call init_history_bgc - call ice_timer_stop(timer_diags) ! diagnostics/history - - call ice_timer_start(timer_column) ! column physics - call ice_timer_start(timer_thermo) ! thermodynamics - - call step_prep - - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - - if (ktherm >= 0) then - - !----------------------------------------------------------------- - ! Scale radiation fields - !----------------------------------------------------------------- - - if (calc_Tsfc) call prep_radiation (iblk) - - !----------------------------------------------------------------- - ! thermodynamics and biogeochemistry - !----------------------------------------------------------------- - - call step_therm1 (dt, iblk) ! vertical thermodynamics - call biogeochemistry (dt, iblk) ! biogeochemistry - call step_therm2 (dt, iblk) ! ice thickness distribution thermo - - endif - - enddo ! iblk - !$OMP END PARALLEL DO - - ! clean up, update tendency diagnostics - offset = dt - call update_state (dt=dt, daidt=daidtt, dvidt=dvidtt, dvsdt=dvsdtt, & - dagedt=dagedtt, offset=offset) - - call ice_timer_stop(timer_thermo) ! thermodynamics - call ice_timer_stop(timer_column) ! column physics - - !----------------------------------------------------------------- - ! dynamics, transport, ridging - !----------------------------------------------------------------- - - ! wave fracture of the floe size distribution - ! note this is called outside of the dynamics subcycling loop - if (tr_fsd .and. wave_spec) call step_dyn_wave(dt) - - do k = 1, ndtd - - ! momentum, stress, transport - call step_dyn_horiz (dt_dyn) - - ! ridging - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - call step_dyn_ridge (dt_dyn, ndtd, iblk) - enddo - !$OMP END PARALLEL DO - - ! clean up, update tendency diagnostics - offset = c0 - call update_state (dt=dt_dyn, daidt=daidtd, dvidt=dvidtd, dvsdt=dvsdtd, & - dagedt=dagedtd, offset=offset) - - enddo - - !----------------------------------------------------------------- - ! albedo, shortwave radiation - !----------------------------------------------------------------- - - call ice_timer_start(timer_column) ! column physics - call ice_timer_start(timer_thermo) ! thermodynamics - - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - - if (ktherm >= 0) call step_radiation (dt, iblk) - - !----------------------------------------------------------------- - ! get ready for coupling and the next time step - !----------------------------------------------------------------- - - call coupling_prep (iblk) - - enddo ! iblk - !$OMP END PARALLEL DO - - call ice_timer_start(timer_bound) - call ice_HaloUpdate (scale_factor, halo_info, & - field_loc_center, field_type_scalar) - call ice_timer_stop(timer_bound) - - call ice_timer_stop(timer_thermo) ! thermodynamics - call ice_timer_stop(timer_column) ! column physics - - !----------------------------------------------------------------- - ! write data - !----------------------------------------------------------------- - - call ice_timer_start(timer_diags) ! diagnostics - if (mod(istep,diagfreq) == 0) then - call runtime_diags(dt) ! log file - if (solve_zsal) call zsal_diags - if (skl_bgc .or. z_tracers) call bgc_diags - if (tr_brine) call hbrine_diags - endif - call ice_timer_stop(timer_diags) ! diagnostics - - call ice_timer_start(timer_hist) ! history - call accum_hist (dt) ! history file - call ice_timer_stop(timer_hist) ! history - - call ice_timer_start(timer_readwrite) ! reading/writing - if (write_restart == 1) then - call dumpfile ! core variables for restarting - if (tr_iage) call write_restart_age - if (tr_FY) call write_restart_FY - if (tr_lvl) call write_restart_lvl - if (tr_pond_lvl) call write_restart_pond_lvl - if (tr_pond_topo) call write_restart_pond_topo - if (tr_fsd) call write_restart_fsd - if (tr_aero) call write_restart_aero - if (solve_zsal .or. skl_bgc .or. z_tracers) & - call write_restart_bgc - if (tr_brine) call write_restart_hbrine - if (kdyn == 2) call write_restart_eap - call final_restart - endif - - call ice_timer_stop(timer_readwrite) ! reading/writing - - end subroutine ice_step - -!======================================================================= -! -! Prepare for coupling -! -! authors: Elizabeth C. Hunke, LANL - - subroutine coupling_prep (iblk) - - use ice_arrays_column, only: alvdfn, alidfn, alvdrn, alidrn, & - albicen, albsnon, albpndn, apeffn, fzsal_g, fzsal, snowfracn - use ice_blocks, only: nx_block, ny_block, get_block, block - use ice_domain, only: blocks_ice - use ice_calendar, only: dt, nstreams - use ice_domain_size, only: ncat - use ice_flux, only: alvdf, alidf, alvdr, alidr, albice, albsno, & - albpnd, albcnt, apeff_ai, coszen, fpond, fresh, l_mpond_fresh, & - alvdf_ai, alidf_ai, alvdr_ai, alidr_ai, fhocn_ai, & - fresh_ai, fsalt_ai, fsalt, & - fswthru_ai, fhocn, fswthru, scale_factor, snowfrac, & - fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & - swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyt, & - fsens, flat, fswabs, flwout, evap, Tref, Qref, & - fsurfn_f, flatn_f, scale_fluxes, frzmlt_init, frzmlt - use ice_flux_bgc, only: faero_ocn, fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai - use ice_grid, only: tmask - use ice_state, only: aicen, aice, aice_init - use ice_step_mod, only: ocean_mixed_layer - use ice_timers, only: timer_couple, ice_timer_start, ice_timer_stop - - integer (kind=int_kind), intent(in) :: & - iblk ! block index - - ! local variables - - integer (kind=int_kind) :: & - ilo,ihi,jlo,jhi, & ! beginning and end of physical domain - n , & ! thickness category index - i,j , & ! horizontal indices - k , & ! tracer index - nbtrcr ! - - type (block) :: & - this_block ! block information for current block - - logical (kind=log_kind) :: & - calc_Tsfc ! - - real (kind=dbl_kind) :: & - cszn , & ! counter for history averaging - puny , & ! - rhofresh , & ! - netsw ! flag for shortwave radiation presence - - character(len=*), parameter :: subname = '(coupling_prep)' - - call icepack_query_parameters(puny_out=puny, rhofresh_out=rhofresh) - call icepack_query_tracer_sizes(nbtrcr_out=nbtrcr) - call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - !----------------------------------------------------------------- - ! Save current value of frzmlt for diagnostics. - ! Update mixed layer with heat and radiation from ice. - !----------------------------------------------------------------- - - do j = 1, ny_block - do i = 1, nx_block - frzmlt_init (i,j,iblk) = frzmlt(i,j,iblk) - enddo - enddo - - call ice_timer_start(timer_couple) ! atm/ocn coupling - - if (oceanmixed_ice) & - call ocean_mixed_layer (dt,iblk) ! ocean surface fluxes and sst - - !----------------------------------------------------------------- - ! Aggregate albedos - !----------------------------------------------------------------- - - do j = 1, ny_block - do i = 1, nx_block - alvdf(i,j,iblk) = c0 - alidf(i,j,iblk) = c0 - alvdr(i,j,iblk) = c0 - alidr(i,j,iblk) = c0 - - albice(i,j,iblk) = c0 - albsno(i,j,iblk) = c0 - albpnd(i,j,iblk) = c0 - apeff_ai(i,j,iblk) = c0 - snowfrac(i,j,iblk) = c0 - - ! for history averaging - cszn = c0 - netsw = swvdr(i,j,iblk)+swidr(i,j,iblk)+swvdf(i,j,iblk)+swidf(i,j,iblk) - if (netsw > puny) cszn = c1 - do n = 1, nstreams - albcnt(i,j,iblk,n) = albcnt(i,j,iblk,n) + cszn - enddo - enddo - enddo - - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - - do n = 1, ncat - do j = jlo, jhi - do i = ilo, ihi - if (aicen(i,j,n,iblk) > puny) then - - alvdf(i,j,iblk) = alvdf(i,j,iblk) & - + alvdfn(i,j,n,iblk)*aicen(i,j,n,iblk) - alidf(i,j,iblk) = alidf(i,j,iblk) & - + alidfn(i,j,n,iblk)*aicen(i,j,n,iblk) - alvdr(i,j,iblk) = alvdr(i,j,iblk) & - + alvdrn(i,j,n,iblk)*aicen(i,j,n,iblk) - alidr(i,j,iblk) = alidr(i,j,iblk) & - + alidrn(i,j,n,iblk)*aicen(i,j,n,iblk) - - netsw = swvdr(i,j,iblk) + swidr(i,j,iblk) & - + swvdf(i,j,iblk) + swidf(i,j,iblk) - if (netsw > puny) then ! sun above horizon - albice(i,j,iblk) = albice(i,j,iblk) & - + albicen(i,j,n,iblk)*aicen(i,j,n,iblk) - albsno(i,j,iblk) = albsno(i,j,iblk) & - + albsnon(i,j,n,iblk)*aicen(i,j,n,iblk) - albpnd(i,j,iblk) = albpnd(i,j,iblk) & - + albpndn(i,j,n,iblk)*aicen(i,j,n,iblk) - endif - - apeff_ai(i,j,iblk) = apeff_ai(i,j,iblk) & ! for history - + apeffn(i,j,n,iblk)*aicen(i,j,n,iblk) - snowfrac(i,j,iblk) = snowfrac(i,j,iblk) & ! for history - + snowfracn(i,j,n,iblk)*aicen(i,j,n,iblk) - - endif ! aicen > puny - enddo - enddo - enddo - - do j = 1, ny_block - do i = 1, nx_block - - !----------------------------------------------------------------- - ! reduce fresh by fpond for coupling - !----------------------------------------------------------------- - - if (l_mpond_fresh) then - fpond(i,j,iblk) = fpond(i,j,iblk) * rhofresh/dt - fresh(i,j,iblk) = fresh(i,j,iblk) - fpond(i,j,iblk) - endif - - !---------------------------------------------------------------- - ! Store grid box mean albedos and fluxes before scaling by aice - !---------------------------------------------------------------- - - alvdf_ai (i,j,iblk) = alvdf (i,j,iblk) - alidf_ai (i,j,iblk) = alidf (i,j,iblk) - alvdr_ai (i,j,iblk) = alvdr (i,j,iblk) - alidr_ai (i,j,iblk) = alidr (i,j,iblk) - fresh_ai (i,j,iblk) = fresh (i,j,iblk) - fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) - fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) - fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) - fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) - fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) - - if (nbtrcr > 0) then - do k = 1, nbtrcr - flux_bio_ai (i,j,k,iblk) = flux_bio (i,j,k,iblk) - enddo - endif - - !----------------------------------------------------------------- - ! Save net shortwave for scaling factor in scale_factor - !----------------------------------------------------------------- - scale_factor(i,j,iblk) = & - swvdr(i,j,iblk)*(c1 - alvdr_ai(i,j,iblk)) & - + swvdf(i,j,iblk)*(c1 - alvdf_ai(i,j,iblk)) & - + swidr(i,j,iblk)*(c1 - alidr_ai(i,j,iblk)) & - + swidf(i,j,iblk)*(c1 - alidf_ai(i,j,iblk)) - - enddo - enddo - - !----------------------------------------------------------------- - ! Divide fluxes by ice area - ! - the CESM coupler assumes fluxes are per unit ice area - ! - also needed for global budget in diagnostics - !----------------------------------------------------------------- - -! RM and froy -! Now use aice_init, more consistent, see merge_fluxes - call scale_fluxes (nx_block, ny_block, & - tmask (:,:,iblk), nbtrcr, icepack_max_aero, & - aice_init(:,:,iblk), Tf (:,:,iblk), & - Tair (:,:,iblk), Qa (:,:,iblk), & - strairxT (:,:,iblk), strairyT(:,:,iblk), & - fsens (:,:,iblk), flat (:,:,iblk), & - fswabs (:,:,iblk), flwout (:,:,iblk), & - evap (:,:,iblk), & - Tref (:,:,iblk), Qref (:,:,iblk), & - fresh (:,:,iblk), fsalt (:,:,iblk), & - fhocn (:,:,iblk), fswthru (:,:,iblk), & - fswthru_vdr(:,:,iblk), & - fswthru_vdf(:,:,iblk), & - fswthru_idr(:,:,iblk), & - fswthru_idf(:,:,iblk), & - faero_ocn(:,:,:,iblk), & - alvdr (:,:,iblk), alidr (:,:,iblk), & - alvdf (:,:,iblk), alidf (:,:,iblk), & - fzsal (:,:,iblk), fzsal_g (:,:,iblk), & - flux_bio(:,:,1:nbtrcr,iblk)) - -!echmod - comment this out for efficiency, if .not. calc_Tsfc - if (.not. calc_Tsfc) then - - !--------------------------------------------------------------- - ! If surface fluxes were provided, conserve these fluxes at ice - ! free points by passing to ocean. - !--------------------------------------------------------------- - - call sfcflux_to_ocn & - (nx_block, ny_block, & - tmask (:,:,iblk), aice_init(:,:,iblk), & - fsurfn_f (:,:,:,iblk), flatn_f(:,:,:,iblk), & - fresh (:,:,iblk), fhocn (:,:,iblk)) - endif -!echmod - - call ice_timer_stop(timer_couple) ! atm/ocn coupling - - end subroutine coupling_prep - -!======================================================================= -! -! If surface heat fluxes are provided to CICE instead of CICE calculating -! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can -! be provided at points which do not have ice. (This is could be due to -! the heat fluxes being calculated on a lower resolution grid or the -! heat fluxes not recalculated at every CICE timestep.) At ice free points, -! conserve energy and water by passing these fluxes to the ocean. -! -! author: A. McLaren, Met Office - - subroutine sfcflux_to_ocn(nx_block, ny_block, & - tmask, aice, & - fsurfn_f, flatn_f, & - fresh, fhocn) - - use ice_domain_size, only: ncat - - integer (kind=int_kind), intent(in) :: & - nx_block, ny_block ! block dimensions - - logical (kind=log_kind), dimension (nx_block,ny_block), intent(in) :: & - tmask ! land/boundary mask, thickness (T-cell) - - real (kind=dbl_kind), dimension(nx_block,ny_block), intent(in):: & - aice ! initial ice concentration - - real (kind=dbl_kind), dimension(nx_block,ny_block,ncat), intent(in) :: & - fsurfn_f, & ! net surface heat flux (provided as forcing) - flatn_f ! latent heat flux (provided as forcing) - - real (kind=dbl_kind), dimension(nx_block,ny_block), intent(inout):: & - fresh , & ! fresh water flux to ocean (kg/m2/s) - fhocn ! actual ocn/ice heat flx (W/m**2) - -#ifdef CICE_IN_NEMO - - ! local variables - integer (kind=int_kind) :: & - i, j, n ! horizontal indices - - real (kind=dbl_kind) :: & - puny, & ! - Lsub, & ! - rLsub ! 1/Lsub - - character(len=*), parameter :: subname = '(sfcflux_to_ocn)' - - call icepack_query_parameters(puny_out=puny, Lsub_out=Lsub) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - rLsub = c1 / Lsub - - do n = 1, ncat - do j = 1, ny_block - do i = 1, nx_block - if (tmask(i,j) .and. aice(i,j) <= puny) then - fhocn(i,j) = fhocn(i,j) & - + fsurfn_f(i,j,n) + flatn_f(i,j,n) - fresh(i,j) = fresh(i,j) & - + flatn_f(i,j,n) * rLsub - endif - enddo ! i - enddo ! j - enddo ! n - -#endif - - end subroutine sfcflux_to_ocn - -!======================================================================= - - end module CICE_RunMod - -!======================================================================= diff --git a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 index 419dbacc9..80bb2570e 100644 --- a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 @@ -15,11 +15,9 @@ module CICE_InitMod use ice_kinds_mod use ice_exit, only: abort_ice use ice_fileunits, only: init_fileunits, nu_diag - use ice_memusage, only: ice_memusage_init, ice_memusage_print use icepack_intfc, only: icepack_aggregate use icepack_intfc, only: icepack_init_itd, icepack_init_itd_hist use icepack_intfc, only: icepack_init_fsd_bounds, icepack_init_wave - use icepack_intfc, only: icepack_init_snow, icepack_init_radiation use icepack_intfc, only: icepack_configure use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_query_parameters, icepack_query_tracer_flags, & @@ -66,23 +64,21 @@ subroutine cice_init(mpicom_ice) floe_binwidth, c_fsd_range use ice_state, only: alloc_state use ice_flux_bgc, only: alloc_flux_bgc - use ice_calendar, only: dt, dt_dyn, istep, istep1, write_ic, & - init_calendar, advance_timestep, calc_timesteps + use ice_calendar, only: dt, dt_dyn, time, istep, istep1, write_ic, & + init_calendar, calendar use ice_communicate, only: init_communicate, my_task, master_task use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd - use ice_dyn_eap, only: init_eap - use ice_dyn_evp, only: init_evp - use ice_dyn_vp, only: init_vp - use ice_dyn_shared, only: kdyn + use ice_dyn_eap, only: init_eap, alloc_dyn_eap + use ice_dyn_shared, only: kdyn, init_evp, alloc_dyn_shared use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & - get_forcing_atmo, get_forcing_ocn, get_wave_spec, init_snowtable + get_forcing_atmo, get_forcing_ocn, get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & - faero_default, alloc_forcing_bgc, fiso_default - use ice_grid, only: init_grid1, init_grid2, alloc_grid, dealloc_grid + faero_default, faero_optics, alloc_forcing_bgc, fiso_default + use ice_grid, only: init_grid1, init_grid2, alloc_grid use ice_history, only: init_hist, accum_hist use ice_restart_shared, only: restart, runtype use ice_init, only: input_data, init_state @@ -96,16 +92,12 @@ subroutine cice_init(mpicom_ice) mpicom_ice ! communicator for sequential ccsm logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers, & - tr_iso, tr_fsd, wave_spec, tr_snow - character(len=char_len) :: snw_aging_table + tr_iso, tr_fsd, wave_spec character(len=*), parameter :: subname = '(cice_init)' call init_communicate(mpicom_ice) ! initial setup for message passing call init_fileunits ! unit numbers - ! tcx debug, this will create a different logfile for each pe - ! if (my_task /= master_task) nu_diag = 100+my_task - call icepack_configure() ! initialize icepack call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(trim(subname), & @@ -115,17 +107,12 @@ subroutine cice_init(mpicom_ice) call input_zbgc ! vertical biogeochemistry namelist call count_tracers ! count tracers - ! Call this as early as possible, must be after memory_stats is read - if (my_task == master_task) then - call ice_memusage_init(nu_diag) - call ice_memusage_print(nu_diag,subname//':start') - endif - call init_domain_blocks ! set up block decomposition call init_grid1 ! domain distribution call alloc_grid ! allocate grid arrays call alloc_arrays_column ! allocate column arrays call alloc_state ! allocate state arrays + call alloc_dyn_shared ! allocate dyn shared arrays call alloc_flux_bgc ! allocate flux_bgc arrays call alloc_flux ! allocate flux arrays call init_ice_timers ! initialize all timers @@ -135,12 +122,11 @@ subroutine cice_init(mpicom_ice) call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file - if (kdyn == 1) then - call init_evp - else if (kdyn == 2) then - call init_eap ! define eap dynamics parameters, variables - else if (kdyn == 3) then - call init_vp ! define vp dynamics parameters, variables + if (kdyn == 2) then + call alloc_dyn_eap ! allocate dyn_eap arrays + call init_eap (dt_dyn) ! define eap dynamics parameters, variables + else ! for both kdyn = 0 or 1 + call init_evp (dt_dyn) ! define evp dynamics parameters, variables endif call init_coupler_flux ! initialize fluxes exchanged with coupler @@ -168,13 +154,15 @@ subroutine cice_init(mpicom_ice) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) + call calendar(time) ! determine the initial date + call init_forcing_ocn(dt) ! initialize sss and sst from data call init_state ! initialize the ice state call init_transport ! initialize horizontal transport call ice_HaloRestore_init ! restored boundary conditions call icepack_query_parameters(skl_bgc_out=skl_bgc, z_tracers_out=z_tracers, & - wave_spec_out=wave_spec, snw_aging_table_out=snw_aging_table) + wave_spec_out=wave_spec) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(trim(subname), & file=__FILE__,line= __LINE__) @@ -185,22 +173,26 @@ subroutine cice_init(mpicom_ice) call init_diags ! initialize diagnostic output points call init_history_therm ! initialize thermo history variables call init_history_dyn ! initialize dynamic history variables - call calc_timesteps ! update timestep counter if not using npt_unit="1" - call icepack_init_radiation ! initialize icepack shortwave tables call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) - call icepack_query_tracer_flags(tr_iso_out=tr_iso, tr_snow_out=tr_snow) + call icepack_query_tracer_flags(tr_iso_out=tr_iso) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(trim(subname), & file=__FILE__,line= __LINE__) - ! Initialize shortwave components using swdn from previous timestep - ! if restarting. These components will be scaled to current forcing + if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical + !property tables + + ! Initialize shortwave components using swdn from previous timestep + ! if restarting. These components will be scaled to current forcing ! in prep_radiation. if (trim(runtype) == 'continue' .or. restart) & call init_shortwave ! initialize radiative transfer -! call advance_timestep() +! istep = istep + 1 ! update time step counters +! istep1 = istep1 + 1 +! time = time + dt ! determine the time and date +! call calendar(time) ! at the end of the first timestep !-------------------------------------------------------------------- ! coupler communication or forcing data initialization @@ -208,29 +200,19 @@ subroutine cice_init(mpicom_ice) call init_forcing_atmo ! initialize atmospheric forcing (standalone) +! for standalone ! if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice ! call get_forcing_atmo ! atmospheric forcing from data ! call get_forcing_ocn(dt) ! ocean forcing from data - ! snow aging lookup table initialization - if (tr_snow) then ! advanced snow physics - call icepack_init_snow() - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - if (snw_aging_table(1:4) /= 'test') then - call init_snowtable() - endif - endif - ! ! isotopes ! if (tr_iso) call fiso_default ! default values - ! ! aerosols ! ! if (tr_aero) call faero_data ! data file ! ! if (tr_zaero) call fzaero_data ! data file (gx1) ! if (tr_aero .or. tr_zaero) call faero_default ! default values ! if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry + if (z_tracers) call get_atm_bgc ! biogeochemistry if (runtype == 'initial' .and. .not. restart) & @@ -239,12 +221,7 @@ subroutine cice_init(mpicom_ice) call init_flux_atm ! initialize atmosphere fluxes sent to coupler call init_flux_ocn ! initialize ocean fluxes sent to coupler - if (write_ic) call accum_hist(dt) ! write initial conditions - - call dealloc_grid ! deallocate temporary grid arrays - if (my_task == master_task) then - call ice_memusage_print(nu_diag,subname//':end') - endif +! if (write_ic) call accum_hist(dt) ! write initial conditions end subroutine cice_init @@ -254,23 +231,22 @@ subroutine init_restart use ice_arrays_column, only: dhsn use ice_blocks, only: nx_block, ny_block - use ice_calendar, only: calendar + use ice_calendar, only: time, calendar use ice_constants, only: c0 use ice_domain, only: nblocks - use ice_domain_size, only: ncat, n_iso, n_aero, nfsd, nslyr + use ice_domain_size, only: ncat, n_iso, n_aero, nfsd use ice_dyn_eap, only: read_restart_eap use ice_dyn_shared, only: kdyn - use ice_flux, only: Tf use ice_grid, only: tmask use ice_init, only: ice_ic - use ice_init_column, only: init_age, init_FY, init_lvl, init_snowtracers, & - init_meltponds_lvl, init_meltponds_topo, & + use ice_init_column, only: init_age, init_FY, init_lvl, & + init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & + restart_pond_cesm, read_restart_pond_cesm, & restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & - restart_snow, read_restart_snow, & restart_fsd, read_restart_fsd, & restart_iso, read_restart_iso, & restart_aero, read_restart_aero, & @@ -284,14 +260,13 @@ subroutine init_restart i, j , & ! horizontal indices iblk ! block index logical(kind=log_kind) :: & - tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & - tr_pond_topo, tr_snow, tr_fsd, tr_iso, tr_aero, tr_brine, & + tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & + tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, & skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & ntrcr integer(kind=int_kind) :: & nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & - nt_smice, nt_smliq, nt_rhos, nt_rsnw, & nt_iage, nt_FY, nt_aero, nt_fsd, nt_isosno, nt_isoice character(len=*), parameter :: subname = '(init_restart)' @@ -304,23 +279,21 @@ subroutine init_restart call icepack_query_parameters(skl_bgc_out=skl_bgc, & z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & - tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & + tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & - tr_snow_out=tr_snow, tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) + tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd, & - nt_smice_out=nt_smice, nt_smliq_out=nt_smliq, & - nt_rhos_out=nt_rhos, nt_rsnw_out=nt_rsnw, & nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - if (trim(runtype) == 'continue') then + if (trim(runtype) == 'continue') then ! start from core restart file call restartfile() ! given by pointer in ice_in - call calendar() ! update time parameters + call calendar(time) ! update time parameters if (kdyn == 2) call read_restart_eap ! EAP else if (restart) then ! ice_ic = core restart file call restartfile (ice_ic) ! or 'default' or 'none' @@ -328,17 +301,17 @@ subroutine init_restart ! call restartfile_v4 (ice_ic) ! CICE v4.1 binary restart file !!! uncomment if EAP restart data exists ! if (kdyn == 2) call read_restart_eap - endif + endif ! tracers - ! ice age tracer - if (tr_iage) then + ! ice age tracer + if (tr_iage) then if (trim(runtype) == 'continue') & restart_age = .true. if (restart_age) then call read_restart_age else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_age(trcrn(:,:,nt_iage,:,iblk)) enddo ! iblk endif @@ -349,7 +322,7 @@ subroutine init_restart if (restart_FY) then call read_restart_FY else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_FY(trcrn(:,:,nt_FY,:,iblk)) enddo ! iblk endif @@ -360,12 +333,25 @@ subroutine init_restart if (restart_lvl) then call read_restart_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & trcrn(:,:,nt_vlvl,:,iblk)) enddo ! iblk endif endif + ! CESM melt ponds + if (tr_pond_cesm) then + if (trim(runtype) == 'continue') & + restart_pond_cesm = .true. + if (restart_pond_cesm) then + call read_restart_pond_cesm + else + do iblk = 1, nblocks + call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk)) + enddo ! iblk + endif + endif ! level-ice melt ponds if (tr_pond_lvl) then if (trim(runtype) == 'continue') & @@ -373,7 +359,7 @@ subroutine init_restart if (restart_pond_lvl) then call read_restart_pond_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_lvl(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk), & @@ -388,29 +374,13 @@ subroutine init_restart if (restart_pond_topo) then call read_restart_pond_topo else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_topo(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk)) enddo ! iblk endif ! .not. restart_pond endif - - ! snow redistribution/metamorphism - if (tr_snow) then - if (trim(runtype) == 'continue') restart_snow = .true. - if (restart_snow) then - call read_restart_snow - else - do iblk = 1, nblocks - call init_snowtracers(trcrn(:,:,nt_smice:nt_smice+nslyr-1,:,iblk), & - trcrn(:,:,nt_smliq:nt_smliq+nslyr-1,:,iblk), & - trcrn(:,:,nt_rhos :nt_rhos +nslyr-1,:,iblk), & - trcrn(:,:,nt_rsnw :nt_rsnw +nslyr-1,:,iblk)) - enddo ! iblk - endif - endif - ! floe size distribution if (tr_fsd) then if (trim(runtype) == 'continue') restart_fsd = .true. @@ -427,7 +397,7 @@ subroutine init_restart if (restart_iso) then call read_restart_iso else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_isotope(trcrn(:,:,nt_isosno:nt_isosno+n_iso-1,:,iblk), & trcrn(:,:,nt_isoice:nt_isoice+n_iso-1,:,iblk)) enddo ! iblk @@ -439,7 +409,7 @@ subroutine init_restart if (restart_aero) then call read_restart_aero else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_aerosol(trcrn(:,:,nt_aero:nt_aero+4*n_aero-1,:,iblk)) enddo ! iblk endif ! .not. restart_aero @@ -492,8 +462,7 @@ subroutine init_restart trcr_depend = trcr_depend, & trcr_base = trcr_base, & n_trcr_strata = n_trcr_strata, & - nt_strata = nt_strata, & - Tf = Tf(i,j,iblk)) + nt_strata = nt_strata) else ! tcraig, reset all tracer values on land to zero trcrn(i,j,:,:,iblk) = c0 diff --git a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 index 6ff6b1270..edc22b710 100644 --- a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 @@ -7,7 +7,7 @@ ! William H. Lipscomb, LANL ! ! 2006 ECH: moved exit timeLoop to prevent execution of unnecessary timestep -! 2006 ECH: Streamlined for efficiency +! 2006 ECH: Streamlined for efficiency ! 2006 ECH: Converted to free source form (F90) ! 2007 BPB: Modified Delta-Eddington shortwave interface ! 2008 ECH: moved ESMF code to its own driver @@ -15,18 +15,16 @@ module CICE_RunMod use ice_kinds_mod - use ice_communicate, only: my_task, master_task + use perf_mod, only : t_startf, t_stopf, t_barrierf use ice_fileunits, only: nu_diag use ice_arrays_column, only: oceanmixed_ice use ice_constants, only: c0, c1 use ice_constants, only: field_loc_center, field_type_scalar use ice_exit, only: abort_ice - use ice_memusage, only: ice_memusage_print use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_max_iso, icepack_max_aero use icepack_intfc, only: icepack_query_parameters use icepack_intfc, only: icepack_query_tracer_flags, icepack_query_tracer_sizes - use perf_mod, only : t_startf, t_stopf, t_barrierf implicit none private @@ -46,7 +44,7 @@ module CICE_RunMod subroutine CICE_Run - use ice_calendar, only: istep, istep1, dt, stop_now, advance_timestep + use ice_calendar, only: istep, istep1, time, dt, stop_now, calendar use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & @@ -75,15 +73,23 @@ subroutine CICE_Run if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) -! tcraig, use advance_timestep now -! istep = istep + 1 ! update time step counters -! istep1 = istep1 + 1 -! time = time + dt ! determine the time and date + !-------------------------------------------------------------------- + ! timestep loop + !-------------------------------------------------------------------- + +! timeLoop: do + +! call ice_step + + istep = istep + 1 ! update time step counters + istep1 = istep1 + 1 + time = time + dt ! determine the time and date + ! call calendar(time) ! at the end of the timestep - call advance_timestep() ! advance time call ice_timer_start(timer_couple) ! atm/ocn coupling +! for standalone ! for now, wave_spectrum is constant in time ! if (tr_fsd .and. wave_spec) call get_wave_spec ! wave spectrum in ice ! call get_forcing_atmo ! atmospheric forcing from data @@ -95,22 +101,27 @@ subroutine CICE_Run ! ! if (tr_aero) call faero_data ! data file ! ! if (tr_zaero) call fzaero_data ! data file (gx1) ! if (tr_aero .or. tr_zaero) call faero_default ! default values - ! if (skl_bgc .or. z_tracers) call get_forcing_bgc ! biogeochemistry + if (z_tracers) call get_atm_bgc ! biogeochemistry call init_flux_atm ! Initialize atmosphere fluxes sent to coupler call init_flux_ocn ! initialize ocean fluxes sent to coupler + call calendar(time) ! at the end of the timestep + call ice_timer_stop(timer_couple) ! atm/ocn coupling call ice_step +! if (stop_now >= 1) exit timeLoop +! enddo timeLoop + !-------------------------------------------------------------------- ! end of timestep loop !-------------------------------------------------------------------- - call ice_timer_stop(timer_step) ! end timestepping loop timer + call ice_timer_stop(timer_step) ! end timestepping loop timer end subroutine CICE_Run @@ -125,27 +136,26 @@ subroutine ice_step use ice_boundary, only: ice_HaloUpdate use ice_calendar, only: dt, dt_dyn, ndtd, diagfreq, write_restart, istep - use ice_calendar, only: idate, msec - use ice_diagnostics, only: init_mass_diags, runtime_diags, debug_model, debug_ice + use ice_calendar, only: idate, sec + use ice_diagnostics, only: init_mass_diags, runtime_diags use ice_diagnostics_bgc, only: hbrine_diags, zsal_diags, bgc_diags use ice_domain, only: halo_info, nblocks use ice_dyn_eap, only: write_restart_eap use ice_dyn_shared, only: kdyn, kridge use ice_flux, only: scale_factor, init_history_therm, & - daidtt, daidtd, dvidtt, dvidtd, dvsdtt, dvsdtd, dagedtt, dagedtd + daidtt, daidtd, dvidtt, dvidtd, dagedtt, dagedtd use ice_history, only: accum_hist use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart use ice_restart_column, only: write_restart_age, write_restart_FY, & - write_restart_lvl, write_restart_pond_lvl, & + write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & write_restart_pond_topo, write_restart_aero, write_restart_fsd, & - write_restart_iso, write_restart_bgc, write_restart_hbrine, & - write_restart_snow + write_restart_iso, write_restart_bgc, write_restart_hbrine use ice_restart_driver, only: dumpfile use ice_restoring, only: restore_ice, ice_HaloRestore use ice_step_mod, only: prep_radiation, step_therm1, step_therm2, & update_state, step_dyn_horiz, step_dyn_ridge, step_radiation, & - biogeochemistry, step_prep, step_dyn_wave, step_snow + biogeochemistry, save_init, step_dyn_wave use ice_timers, only: ice_timer_start, ice_timer_stop, & timer_diags, timer_column, timer_thermo, timer_bound, & timer_hist, timer_readwrite @@ -153,7 +163,7 @@ subroutine ice_step use ice_prescribed_mod integer (kind=int_kind) :: & - iblk , & ! block index + iblk , & ! block index k , & ! dynamics supercycling index ktherm ! thermodynamics is off when ktherm = -1 @@ -161,28 +171,19 @@ subroutine ice_step offset ! d(age)/dt time offset logical (kind=log_kind) :: & - tr_iage, tr_FY, tr_lvl, tr_fsd, tr_snow, & - tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & + tr_iage, tr_FY, tr_lvl, tr_fsd, & + tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' - character (len=char_len) :: plabeld - - if (debug_model) then - plabeld = 'beginning time step' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo - endif - call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc, skl_bgc_out=skl_bgc, & solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & wave_spec_out=wave_spec) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & - tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & + tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & - tr_iso_out=tr_iso, tr_fsd_out=tr_fsd, tr_snow_out=tr_snow) + tr_iso_out=tr_iso, tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -203,21 +204,22 @@ subroutine ice_step call init_history_bgc call ice_timer_stop(timer_diags) ! diagnostics/history - call ice_timer_start(timer_column) ! column physics - call ice_timer_start(timer_thermo) ! thermodynamics - if (prescribed_ice) then ! read prescribed ice call t_barrierf('cice_run_presc_BARRIER',MPI_COMM_ICE) call t_startf ('cice_run_presc') - call ice_prescribed_run(idate, msec) + call ice_prescribed_run(idate, sec) call t_stopf ('cice_run_presc') endif - call step_prep + call save_init - if (ktherm >= 0) then - !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) - do iblk = 1, nblocks + call ice_timer_start(timer_column) ! column physics + call ice_timer_start(timer_thermo) ! thermodynamics + + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + + if (ktherm >= 0) then !----------------------------------------------------------------- ! scale radiation fields @@ -225,44 +227,23 @@ subroutine ice_step if (calc_Tsfc) call prep_radiation (iblk) - if (debug_model) then - plabeld = 'post prep_radiation' - call debug_ice (iblk, plabeld) - endif - !----------------------------------------------------------------- ! thermodynamics and biogeochemistry !----------------------------------------------------------------- - + call step_therm1 (dt, iblk) ! vertical thermodynamics - - if (debug_model) then - plabeld = 'post step_therm1' - call debug_ice (iblk, plabeld) - endif - call biogeochemistry (dt, iblk) ! biogeochemistry + if (.not.prescribed_ice) & + call step_therm2 (dt, iblk) ! ice thickness distribution thermo - if (debug_model) then - plabeld = 'post biogeochemistry' - call debug_ice (iblk, plabeld) - endif - - call step_therm2 (dt, iblk) ! ice thickness distribution thermo - - if (debug_model) then - plabeld = 'post step_therm2' - call debug_ice (iblk, plabeld) - endif + endif ! ktherm > 0 - enddo - !$OMP END PARALLEL DO - endif ! ktherm > 0 + enddo ! iblk + !$OMP END PARALLEL DO ! clean up, update tendency diagnostics offset = dt - call update_state (dt=dt, daidt=daidtt, dvidt=dvidtt, dvsdt=dvsdtt, & - dagedt=dagedtt, offset=offset) + call update_state (dt, daidtt, dvidtt, dagedtt, offset) call ice_timer_stop(timer_thermo) ! thermodynamics call ice_timer_stop(timer_column) ! column physics @@ -271,6 +252,8 @@ subroutine ice_step ! dynamics, transport, ridging !----------------------------------------------------------------- + if (.not.prescribed_ice) then + ! wave fracture of the floe size distribution ! note this is called outside of the dynamics subcycling loop if (tr_fsd .and. wave_spec) call step_dyn_wave(dt) @@ -280,82 +263,40 @@ subroutine ice_step ! momentum, stress, transport call step_dyn_horiz (dt_dyn) - if (debug_model) then - plabeld = 'post step_dyn_horiz' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo ! iblk - endif - ! ridging - !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) + !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks if (kridge > 0) call step_dyn_ridge (dt_dyn, ndtd, iblk) enddo !$OMP END PARALLEL DO - if (debug_model) then - plabeld = 'post step_dyn_ridge' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo ! iblk - endif - ! clean up, update tendency diagnostics offset = c0 - call update_state (dt=dt_dyn, daidt=daidtd, dvidt=dvidtd, dvsdt=dvsdtd, & - dagedt=dagedtd, offset=offset) + call update_state (dt_dyn, daidtd, dvidtd, dagedtd, offset) enddo - if (debug_model) then - plabeld = 'post dynamics' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo - endif - - call ice_timer_start(timer_column) ! column physics - call ice_timer_start(timer_thermo) ! thermodynamics + endif ! not prescribed ice !----------------------------------------------------------------- - ! snow redistribution and metamorphosis + ! albedo, shortwave radiation !----------------------------------------------------------------- - if (tr_snow) then ! advanced snow physics - !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) - do iblk = 1, nblocks - call step_snow (dt, iblk) - enddo - !$OMP END PARALLEL DO - call update_state (dt=dt) ! clean up - endif + call ice_timer_start(timer_column) ! column physics + call ice_timer_start(timer_thermo) ! thermodynamics - !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) +!MHRI: CHECK THIS OMP + !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks - !----------------------------------------------------------------- - ! albedo, shortwave radiation - !----------------------------------------------------------------- - if (ktherm >= 0) call step_radiation (dt, iblk) - if (debug_model) then - plabeld = 'post step_radiation' - call debug_ice (iblk, plabeld) - endif - !----------------------------------------------------------------- ! get ready for coupling and the next time step !----------------------------------------------------------------- call coupling_prep (iblk) - if (debug_model) then - plabeld = 'post coupling_prep' - call debug_ice (iblk, plabeld) - endif - enddo ! iblk !$OMP END PARALLEL DO @@ -377,9 +318,6 @@ subroutine ice_step if (solve_zsal) call zsal_diags if (skl_bgc .or. z_tracers) call bgc_diags if (tr_brine) call hbrine_diags - if (my_task == master_task) then - call ice_memusage_print(nu_diag,subname) - endif endif call ice_timer_stop(timer_diags) ! diagnostics @@ -393,22 +331,23 @@ subroutine ice_step if (tr_iage) call write_restart_age if (tr_FY) call write_restart_FY if (tr_lvl) call write_restart_lvl + if (tr_pond_cesm) call write_restart_pond_cesm if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo - if (tr_snow) call write_restart_snow if (tr_fsd) call write_restart_fsd if (tr_iso) call write_restart_iso if (tr_aero) call write_restart_aero if (solve_zsal .or. skl_bgc .or. z_tracers) & - call write_restart_bgc + call write_restart_bgc if (tr_brine) call write_restart_hbrine if (kdyn == 2) call write_restart_eap call final_restart endif + call ice_timer_stop(timer_readwrite) ! reading/writing end subroutine ice_step - + !======================================================================= ! ! Prepare for coupling @@ -427,13 +366,12 @@ subroutine coupling_prep (iblk) albpnd, albcnt, apeff_ai, fpond, fresh, l_mpond_fresh, & alvdf_ai, alidf_ai, alvdr_ai, alidr_ai, fhocn_ai, & fresh_ai, fsalt_ai, fsalt, & - fswthru_ai, fhocn, scale_factor, snowfrac, & - fswthru, fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & + fswthru_ai, fhocn, fswthru, scale_factor, snowfrac, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & fsens, flat, fswabs, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt, Uref, wind use ice_flux_bgc, only: faero_ocn, fiso_ocn, Qref_iso, fiso_evap, & - fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai, & + fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai fnit, fsil, famm, fdmsp, fdms, fhum, fdust, falgalN, & fdoc, fdic, fdon, ffep, ffed, bgcflux_ice_to_ocn use ice_grid, only: tmask @@ -443,12 +381,12 @@ subroutine coupling_prep (iblk) use ice_step_mod, only: ocean_mixed_layer use ice_timers, only: timer_couple, ice_timer_start, ice_timer_stop - integer (kind=int_kind), intent(in) :: & - iblk ! block index + integer (kind=int_kind), intent(in) :: & + iblk ! block index ! local variables - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & ilo,ihi,jlo,jhi, & ! beginning and end of physical domain n , & ! thickness category index i,j , & ! horizontal indices @@ -470,6 +408,8 @@ subroutine coupling_prep (iblk) character(len=*), parameter :: subname = '(coupling_prep)' + !----------------------------------------------------------------- + call icepack_query_parameters(puny_out=puny, rhofresh_out=rhofresh) call icepack_query_parameters(skl_bgc_out=skl_bgc) call icepack_query_tracer_sizes(nbtrcr_out=nbtrcr) @@ -586,8 +526,8 @@ subroutine coupling_prep (iblk) fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) - fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) - fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) + fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) + fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) if (nbtrcr > 0) then do k = 1, nbtrcr @@ -608,7 +548,7 @@ subroutine coupling_prep (iblk) enddo !----------------------------------------------------------------- - ! Divide fluxes by ice area + ! Divide fluxes by ice area ! - the CESM coupler assumes fluxes are per unit ice area ! - also needed for global budget in diagnostics !----------------------------------------------------------------- @@ -624,12 +564,7 @@ subroutine coupling_prep (iblk) evap (:,:,iblk), & Tref (:,:,iblk), Qref (:,:,iblk), & fresh (:,:,iblk), fsalt (:,:,iblk), & - fhocn (:,:,iblk), & - fswthru (:,:,iblk), & - fswthru_vdr (:,:,iblk), & - fswthru_vdf (:,:,iblk), & - fswthru_idr (:,:,iblk), & - fswthru_idf (:,:,iblk), & + fhocn (:,:,iblk), fswthru (:,:,iblk), & faero_ocn(:,:,:,iblk), & alvdr (:,:,iblk), alidr (:,:,iblk), & alvdf (:,:,iblk), alidf (:,:,iblk), & @@ -639,6 +574,10 @@ subroutine coupling_prep (iblk) fiso_evap=fiso_evap(:,:,:,iblk), & fiso_ocn =fiso_ocn (:,:,:,iblk), & Uref=Uref(:,:,iblk), wind=wind(:,:,iblk) ) + + !----------------------------------------------------------------- + ! Define ice-ocean bgc fluxes + !----------------------------------------------------------------- if (nbtrcr > 0 .or. skl_bgc) then call bgcflux_ice_to_ocn (nx_block, ny_block, & @@ -656,29 +595,28 @@ subroutine coupling_prep (iblk) if (.not. calc_Tsfc) then !--------------------------------------------------------------- - ! If surface fluxes were provided, conserve these fluxes at ice - ! free points by passing to ocean. + ! If surface fluxes were provided, conserve these fluxes at ice + ! free points by passing to ocean. !--------------------------------------------------------------- - call sfcflux_to_ocn & + call sfcflux_to_ocn & (nx_block, ny_block, & tmask (:,:,iblk), aice_init(:,:,iblk), & fsurfn_f (:,:,:,iblk), flatn_f(:,:,:,iblk), & fresh (:,:,iblk), fhocn (:,:,iblk)) - endif + endif !echmod call ice_timer_stop(timer_couple,iblk) ! atm/ocn coupling end subroutine coupling_prep - !======================================================================= ! ! If surface heat fluxes are provided to CICE instead of CICE calculating -! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can +! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can ! be provided at points which do not have ice. (This is could be due to ! the heat fluxes being calculated on a lower resolution grid or the -! heat fluxes not recalculated at every CICE timestep.) At ice free points, +! heat fluxes not recalculated at every CICE timestep.) At ice free points, ! conserve energy and water by passing these fluxes to the ocean. ! ! author: A. McLaren, Met Office @@ -707,19 +645,19 @@ subroutine sfcflux_to_ocn(nx_block, ny_block, & fresh , & ! fresh water flux to ocean (kg/m2/s) fhocn ! actual ocn/ice heat flx (W/m**2) +#ifdef CICE_IN_NEMO ! local variables integer (kind=int_kind) :: & i, j, n ! horizontal indices - + real (kind=dbl_kind) :: & puny, & ! - Lsub, & ! rLsub ! 1/Lsub character(len=*), parameter :: subname = '(sfcflux_to_ocn)' - call icepack_query_parameters(puny_out=puny, Lsub_out=Lsub) + call icepack_query_parameters(puny_out=puny) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -738,6 +676,7 @@ subroutine sfcflux_to_ocn(nx_block, ny_block, & enddo ! j enddo ! n +#endif end subroutine sfcflux_to_ocn diff --git a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 index b235ebf0e..917774908 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_InitMod.F90 @@ -1,469 +1,444 @@ -module CICE_InitMod - - ! Initialize CICE model. - - use ice_kinds_mod - use ice_exit , only: abort_ice - use ice_fileunits, only: init_fileunits, nu_diag - use icepack_intfc, only: icepack_aggregate - use icepack_intfc, only: icepack_init_itd, icepack_init_itd_hist - use icepack_intfc, only: icepack_init_fsd_bounds, icepack_init_wave - use icepack_intfc, only: icepack_init_snow, icepack_init_radiation - use icepack_intfc, only: icepack_configure - use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted - use icepack_intfc, only: icepack_query_parameters, icepack_query_tracer_flags - use icepack_intfc, only: icepack_query_tracer_indices, icepack_query_tracer_sizes +!======================================================================= +! +! This module contains the CICE initialization routine that sets model +! parameters and initializes the grid and CICE state variables. +! +! authors Elizabeth C. Hunke, LANL +! William H. Lipscomb, LANL +! Philip W. Jones, LANL +! +! 2006: Converted to free form source (F90) by Elizabeth Hunke +! 2008: E. Hunke moved ESMF code to its own driver + + module CICE_InitMod + + use ice_kinds_mod + use ice_exit, only: abort_ice + use ice_fileunits, only: init_fileunits, nu_diag + use icepack_intfc, only: icepack_aggregate + use icepack_intfc, only: icepack_init_itd, icepack_init_itd_hist + use icepack_intfc, only: icepack_init_fsd_bounds, icepack_init_wave + use icepack_intfc, only: icepack_configure + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_query_parameters, icepack_query_tracer_flags, & + icepack_query_tracer_indices, icepack_query_tracer_sizes + + implicit none + private + public :: cice_init - implicit none - private - public :: cice_init1 - public :: cice_init2 +!======================================================================= - private :: init_restart + contains !======================================================================= -contains -!======================================================================= - - subroutine cice_init1() - - ! Initialize the basic state, grid and all necessary parameters for - ! running the CICE model. - - use ice_init , only: input_data - use ice_init_column , only: input_zbgc, count_tracers - use ice_grid , only: init_grid1, alloc_grid - use ice_domain , only: init_domain_blocks - use ice_arrays_column , only: alloc_arrays_column - use ice_state , only: alloc_state - use ice_flux_bgc , only: alloc_flux_bgc - use ice_flux , only: alloc_flux - use ice_timers , only: timer_total, init_ice_timers, ice_timer_start - - character(len=*), parameter :: subname = '(cice_init1)' - !---------------------------------------------------- - - call init_fileunits ! unit numbers - call icepack_configure() ! initialize icepack - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(trim(subname), & - file=__FILE__,line= __LINE__) - - call input_data ! namelist variables - call input_zbgc ! vertical biogeochemistry namelist - call count_tracers ! count tracers - - call init_domain_blocks ! set up block decomposition - call init_grid1 ! domain distribution - call alloc_grid ! allocate grid arrays - call alloc_arrays_column ! allocate column arrays - call alloc_state ! allocate state arrays - call alloc_flux_bgc ! allocate flux_bgc arrays - call alloc_flux ! allocate flux arrays - call init_ice_timers ! initialize all timers - call ice_timer_start(timer_total) ! start timing entire run - - end subroutine cice_init1 - - !======================================================================= - subroutine cice_init2() - - ! Initialize the basic state, and all necessary parameters for - ! running the CICE model. - - use ice_arrays_column , only: hin_max, c_hi_range - use ice_arrays_column , only: floe_rad_l, floe_rad_c, floe_binwidth, c_fsd_range - use ice_calendar , only: dt, dt_dyn, istep, istep1, write_ic, init_calendar, calendar - use ice_communicate , only: my_task, master_task - use ice_diagnostics , only: init_diags - use ice_domain_size , only: ncat, nfsd, nfreq - use ice_dyn_eap , only: init_eap - use ice_dyn_evp , only: init_evp - use ice_dyn_vp , only: init_vp - use ice_dyn_shared , only: kdyn - use ice_flux , only: init_coupler_flux, init_history_therm - use ice_flux , only: init_history_dyn, init_flux_atm, init_flux_ocn - use ice_forcing , only: init_snowtable - use ice_forcing_bgc , only: get_forcing_bgc, get_atm_bgc - use ice_forcing_bgc , only: faero_default, alloc_forcing_bgc, fiso_default - use ice_grid , only: dealloc_grid - use ice_history , only: init_hist, accum_hist - use ice_restart_shared , only: restart, runtype - use ice_init , only: input_data, init_state - use ice_init_column , only: init_thermo_vertical, init_shortwave, init_zbgc - use ice_restoring , only: ice_HaloRestore_init - use ice_timers , only: timer_total, init_ice_timers, ice_timer_start - use ice_transport_driver , only: init_transport - use ice_arrays_column , only: wavefreq, dwavefreq - - logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers - logical(kind=log_kind) :: tr_iso, tr_fsd, wave_spec, tr_snow - character(len=char_len) :: snw_aging_table - real(kind=dbl_kind), dimension(25) :: wave_spectrum_profile ! hardwire for now - character(len=*), parameter :: subname = '(cice_init2)' - !---------------------------------------------------- - - call init_zbgc ! vertical biogeochemistry initialization - call init_calendar ! initialize some calendar stuff - call init_hist (dt) ! initialize output history file - - if (kdyn == 1) then - call init_evp ! define evp dynamics parameters, variables - elseif (kdyn == 2) then - call init_eap ! define eap dynamics parameters, variables - else if (kdyn == 3) then - call init_vp ! define vp dynamics parameters, variables - endif - - call init_coupler_flux ! initialize fluxes exchanged with coupler - call init_thermo_vertical ! initialize vertical thermodynamics - - call icepack_init_itd(ncat=ncat, hin_max=hin_max) ! ice thickness distribution - if (my_task == master_task) then - call icepack_init_itd_hist(ncat=ncat, hin_max=hin_max, c_hi_range=c_hi_range) ! output - endif - - call icepack_query_tracer_flags(tr_fsd_out=tr_fsd) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(trim(subname), & - file=__FILE__,line= __LINE__) - - if (tr_fsd) call icepack_init_fsd_bounds (nfsd, & ! floe size distribution +! +! Initialize CICE model. + + subroutine cice_init + + ! Initialize the basic state, grid and all necessary parameters for + ! running the CICE model. + + use ice_arrays_column, only: hin_max, c_hi_range, alloc_arrays_column + use ice_arrays_column, only: floe_rad_l, floe_rad_c, & + floe_binwidth, c_fsd_range + use ice_state, only: alloc_state + use ice_flux_bgc, only: alloc_flux_bgc + use ice_calendar, only: dt, dt_dyn, time, istep, istep1, write_ic, & + init_calendar, calendar + use ice_communicate, only: my_task, master_task + use ice_diagnostics, only: init_diags + use ice_domain, only: init_domain_blocks + use ice_domain_size, only: ncat, nfsd + use ice_dyn_eap, only: init_eap, alloc_dyn_eap + use ice_dyn_shared, only: kdyn, init_evp, alloc_dyn_shared + use ice_flux, only: init_coupler_flux, init_history_therm, & + init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux + use ice_forcing, only: init_forcing_ocn + use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & + faero_default, faero_optics, alloc_forcing_bgc, fiso_default + use ice_grid, only: init_grid1, init_grid2, alloc_grid + use ice_history, only: init_hist, accum_hist + use ice_restart_shared, only: restart, runtype + use ice_init, only: input_data, init_state + use ice_init_column, only: init_thermo_vertical, init_shortwave, init_zbgc, input_zbgc, count_tracers + use ice_kinds_mod + use ice_restoring, only: ice_HaloRestore_init + use ice_timers, only: timer_total, init_ice_timers, ice_timer_start + use ice_transport_driver, only: init_transport + + logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers, & + tr_iso, tr_fsd, wave_spec + character(len=*), parameter :: subname = '(cice_init)' + + call init_fileunits ! unit numbers + + call icepack_configure() ! initialize icepack + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + call input_data ! namelist variables + call input_zbgc ! vertical biogeochemistry namelist + call count_tracers ! count tracers + + call init_domain_blocks ! set up block decomposition + call init_grid1 ! domain distribution + call alloc_grid ! allocate grid arrays + call alloc_arrays_column ! allocate column arrays + call alloc_state ! allocate state arrays + call alloc_dyn_shared ! allocate dyn shared arrays + call alloc_flux_bgc ! allocate flux_bgc arrays + call alloc_flux ! allocate flux arrays + call init_ice_timers ! initialize all timers + call ice_timer_start(timer_total) ! start timing entire run + call init_grid2 ! grid variables + call init_zbgc ! vertical biogeochemistry initialization + call init_calendar ! initialize some calendar stuff + call init_hist (dt) ! initialize output history file + + if (kdyn == 2) then + call alloc_dyn_eap ! allocate dyn_eap arrays + call init_eap (dt_dyn) ! define eap dynamics parameters, variables + else ! for both kdyn = 0 or 1 + call init_evp (dt_dyn) ! define evp dynamics parameters, variables + endif + + call init_coupler_flux ! initialize fluxes exchanged with coupler + call init_thermo_vertical ! initialize vertical thermodynamics + + call icepack_init_itd(ncat=ncat, hin_max=hin_max) ! ice thickness distribution + if (my_task == master_task) then + call icepack_init_itd_hist(ncat=ncat, hin_max=hin_max, c_hi_range=c_hi_range) ! output + endif + + call icepack_query_tracer_flags(tr_fsd_out=tr_fsd) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + if (tr_fsd) call icepack_init_fsd_bounds (nfsd, & ! floe size distribution floe_rad_l, & ! fsd size lower bound in m (radius) floe_rad_c, & ! fsd size bin centre in m (radius) floe_binwidth, & ! fsd size bin width in m (radius) c_fsd_range, & ! string for history output write_diags=(my_task == master_task)) ! write diag on master only - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - call calendar() ! determine the initial date - - call init_state ! initialize the ice state - call init_transport ! initialize horizontal transport - call ice_HaloRestore_init ! restored boundary conditions - - call icepack_query_parameters(skl_bgc_out=skl_bgc, z_tracers_out=z_tracers, & - wave_spec_out=wave_spec, snw_aging_table_out=snw_aging_table) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(trim(subname), & - file=__FILE__,line= __LINE__) - - if (skl_bgc .or. z_tracers) call alloc_forcing_bgc ! allocate biogeochemistry arrays - - call init_restart ! initialize restart variables - call init_diags ! initialize diagnostic output points - call init_history_therm ! initialize thermo history variables - call init_history_dyn ! initialize dynamic history variables - call icepack_init_radiation ! initialize icepack shortwave tables - - call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) - call icepack_query_tracer_flags(tr_iso_out=tr_iso, tr_snow_out=tr_snow) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(trim(subname), & - file=__FILE__,line= __LINE__) - - ! snow aging lookup table initialization - if (tr_snow) then ! advanced snow physics - call icepack_init_snow() - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - if (snw_aging_table(1:4) /= 'test') then - call init_snowtable() - endif - endif - - if (wave_spec) then - call icepack_init_wave(nfreq=nfreq, & - wave_spectrum_profile=wave_spectrum_profile, wavefreq=wavefreq, dwavefreq=dwavefreq) - end if - - ! Initialize shortwave components using swdn from previous timestep - ! if restarting. These components will be scaled to current forcing - ! in prep_radiation. - - if (trim(runtype) == 'continue' .or. restart) then - call init_shortwave ! initialize radiative transfer - end if - - !-------------------------------------------------------------------- - ! coupler communication or forcing data initialization - !-------------------------------------------------------------------- - - if (z_tracers) call get_atm_bgc ! biogeochemistry - - if (runtype == 'initial' .and. .not. restart) then - call init_shortwave ! initialize radiative transfer using current swdn - end if - - call init_flux_atm ! initialize atmosphere fluxes sent to coupler - call init_flux_ocn ! initialize ocean fluxes sent to coupler - - call dealloc_grid ! deallocate temporary grid arrays - - end subroutine cice_init2 - - !======================================================================= - - subroutine init_restart() - - use ice_arrays_column, only: dhsn - use ice_blocks, only: nx_block, ny_block - use ice_calendar, only: calendar - use ice_constants, only: c0 - use ice_domain, only: nblocks - use ice_domain_size, only: ncat, n_iso, n_aero, nfsd, nslyr - use ice_dyn_eap, only: read_restart_eap - use ice_dyn_shared, only: kdyn - use ice_flux, only: Tf - use ice_grid, only: tmask - use ice_init, only: ice_ic - use ice_init_column, only: init_age, init_FY, init_lvl, init_snowtracers, & - init_meltponds_lvl, init_meltponds_topo, & - init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd - use ice_restart_column, only: restart_age, read_restart_age, & - restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & - restart_pond_lvl, read_restart_pond_lvl, & - restart_pond_topo, read_restart_pond_topo, & - restart_snow, read_restart_snow, & - restart_fsd, read_restart_fsd, & - restart_iso, read_restart_iso, & - restart_aero, read_restart_aero, & - restart_hbrine, read_restart_hbrine, & - restart_bgc - use ice_restart_driver, only: restartfile - use ice_restart_shared, only: runtype, restart - use ice_state ! almost everything - - integer(kind=int_kind) :: & + call calendar(time) ! determine the initial date + + ! TODO: - why is this being called when you are using CMEPS? + call init_forcing_ocn(dt) ! initialize sss and sst from data + + call init_state ! initialize the ice state + call init_transport ! initialize horizontal transport + call ice_HaloRestore_init ! restored boundary conditions + + call icepack_query_parameters(skl_bgc_out=skl_bgc, z_tracers_out=z_tracers, & + wave_spec_out=wave_spec) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + if (skl_bgc .or. z_tracers) call alloc_forcing_bgc ! allocate biogeochemistry arrays + + call init_restart ! initialize restart variables + call init_diags ! initialize diagnostic output points + call init_history_therm ! initialize thermo history variables + call init_history_dyn ! initialize dynamic history variables + + call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) + call icepack_query_tracer_flags(tr_iso_out=tr_iso) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + if (tr_aero .or. tr_zaero) then + call faero_optics !initialize aerosol optical property tables + end if + + ! Initialize shortwave components using swdn from previous timestep + ! if restarting. These components will be scaled to current forcing + ! in prep_radiation. + + if (trim(runtype) == 'continue' .or. restart) then + call init_shortwave ! initialize radiative transfer + end if + + !-------------------------------------------------------------------- + ! coupler communication or forcing data initialization + !-------------------------------------------------------------------- + + if (z_tracers) call get_atm_bgc ! biogeochemistry + + if (runtype == 'initial' .and. .not. restart) then + call init_shortwave ! initialize radiative transfer using current swdn + end if + + call init_flux_atm ! initialize atmosphere fluxes sent to coupler + call init_flux_ocn ! initialize ocean fluxes sent to coupler + + end subroutine cice_init + +!======================================================================= + + subroutine init_restart + + use ice_arrays_column, only: dhsn + use ice_blocks, only: nx_block, ny_block + use ice_calendar, only: time, calendar + use ice_constants, only: c0 + use ice_domain, only: nblocks + use ice_domain_size, only: ncat, n_iso, n_aero, nfsd + use ice_dyn_eap, only: read_restart_eap + use ice_dyn_shared, only: kdyn + use ice_grid, only: tmask + use ice_init, only: ice_ic + use ice_init_column, only: init_age, init_FY, init_lvl, & + init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & + init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd + use ice_restart_column, only: restart_age, read_restart_age, & + restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & + restart_pond_cesm, read_restart_pond_cesm, & + restart_pond_lvl, read_restart_pond_lvl, & + restart_pond_topo, read_restart_pond_topo, & + restart_fsd, read_restart_fsd, & + restart_iso, read_restart_iso, & + restart_aero, read_restart_aero, & + restart_hbrine, read_restart_hbrine, & + restart_zsal, restart_bgc + use ice_restart_driver, only: restartfile + use ice_restart_shared, only: runtype, restart + use ice_state ! almost everything + + integer(kind=int_kind) :: & i, j , & ! horizontal indices iblk ! block index - logical(kind=log_kind) :: & - tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & - tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, tr_snow, & - skl_bgc, z_tracers - integer(kind=int_kind) :: & - ntrcr - integer(kind=int_kind) :: & - nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & - nt_smice, nt_smliq, nt_rhos, nt_rsnw, & - nt_iage, nt_FY, nt_aero, nt_fsd, nt_isosno, nt_isoice - - character(len=*), parameter :: subname = '(init_restart)' - !---------------------------------------------------- - - call icepack_query_tracer_sizes(ntrcr_out=ntrcr) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) + logical(kind=log_kind) :: & + tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & + tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, & + skl_bgc, z_tracers, solve_zsal + integer(kind=int_kind) :: & + ntrcr + integer(kind=int_kind) :: & + nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & + nt_iage, nt_FY, nt_aero, nt_fsd, nt_isosno, nt_isoice + + character(len=*), parameter :: subname = '(init_restart)' + + call icepack_query_tracer_sizes(ntrcr_out=ntrcr) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) - call icepack_query_parameters(skl_bgc_out=skl_bgc, z_tracers_out=z_tracers) - call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & - tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & - tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & - tr_snow_out=tr_snow, tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) - call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & - nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & - nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd, & - nt_smice_out=nt_smice, nt_smliq_out=nt_smliq, & - nt_rhos_out=nt_rhos, nt_rsnw_out=nt_rsnw, & - nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + call icepack_query_parameters(skl_bgc_out=skl_bgc, & + z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) + call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & + tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & + tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & + tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) + call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & + nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & + nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd, & + nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - if (trim(runtype) == 'continue') then - ! start from core restart file - call restartfile() ! given by pointer in ice_in - call calendar() ! update time parameters - if (kdyn == 2) call read_restart_eap ! EAP - else if (restart) then ! ice_ic = core restart file - call restartfile (ice_ic) ! or 'default' or 'none' -!!! uncomment to create netcdf - ! call restartfile_v4 (ice_ic) ! CICE v4.1 binary restart file -!!! uncomment if EAP restart data exists - ! if (kdyn == 2) call read_restart_eap - endif - - ! tracers - ! ice age tracer - if (tr_iage) then - if (trim(runtype) == 'continue') & - restart_age = .true. - if (restart_age) then - call read_restart_age - else - do iblk = 1, nblocks - call init_age(trcrn(:,:,nt_iage,:,iblk)) - enddo ! iblk - endif - endif - ! first-year area tracer - if (tr_FY) then - if (trim(runtype) == 'continue') restart_FY = .true. - if (restart_FY) then - call read_restart_FY - else - do iblk = 1, nblocks - call init_FY(trcrn(:,:,nt_FY,:,iblk)) - enddo ! iblk - endif - endif - ! level ice tracer - if (tr_lvl) then - if (trim(runtype) == 'continue') restart_lvl = .true. - if (restart_lvl) then - call read_restart_lvl - else - do iblk = 1, nblocks - call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & - trcrn(:,:,nt_vlvl,:,iblk)) - enddo ! iblk - endif - endif - ! level-ice melt ponds - if (tr_pond_lvl) then - if (trim(runtype) == 'continue') & - restart_pond_lvl = .true. - if (restart_pond_lvl) then - call read_restart_pond_lvl - else - do iblk = 1, nblocks - call init_meltponds_lvl(trcrn(:,:,nt_apnd,:,iblk), & - trcrn(:,:,nt_hpnd,:,iblk), & - trcrn(:,:,nt_ipnd,:,iblk), & - dhsn(:,:,:,iblk)) - enddo ! iblk - endif - endif - ! topographic melt ponds - if (tr_pond_topo) then - if (trim(runtype) == 'continue') & - restart_pond_topo = .true. - if (restart_pond_topo) then - call read_restart_pond_topo - else - do iblk = 1, nblocks - call init_meltponds_topo(trcrn(:,:,nt_apnd,:,iblk), & - trcrn(:,:,nt_hpnd,:,iblk), & - trcrn(:,:,nt_ipnd,:,iblk)) - enddo ! iblk - endif ! .not. restart_pond - endif - ! snow redistribution/metamorphism - if (tr_snow) then - if (trim(runtype) == 'continue') restart_snow = .true. - if (restart_snow) then - call read_restart_snow - else - do iblk = 1, nblocks - call init_snowtracers(trcrn(:,:,nt_smice:nt_smice+nslyr-1,:,iblk), & - trcrn(:,:,nt_smliq:nt_smliq+nslyr-1,:,iblk), & - trcrn(:,:,nt_rhos :nt_rhos +nslyr-1,:,iblk), & - trcrn(:,:,nt_rsnw :nt_rsnw +nslyr-1,:,iblk)) - enddo ! iblk - endif - endif - - ! floe size distribution - if (tr_fsd) then - if (trim(runtype) == 'continue') restart_fsd = .true. - if (restart_fsd) then - call read_restart_fsd - else - call init_fsd(trcrn(:,:,nt_fsd:nt_fsd+nfsd-1,:,:)) - endif - endif - ! isotopes - if (tr_iso) then - if (trim(runtype) == 'continue') restart_iso = .true. - if (restart_iso) then - call read_restart_iso - else - do iblk = 1, nblocks - call init_isotope(trcrn(:,:,nt_isosno:nt_isosno+n_iso-1,:,iblk), & - trcrn(:,:,nt_isoice:nt_isoice+n_iso-1,:,iblk)) - enddo ! iblk - endif - endif - - if (tr_aero) then ! ice aerosol - if (trim(runtype) == 'continue') restart_aero = .true. - if (restart_aero) then - call read_restart_aero - else - do iblk = 1, nblocks - call init_aerosol(trcrn(:,:,nt_aero:nt_aero+4*n_aero-1,:,iblk)) - enddo ! iblk - endif ! .not. restart_aero - endif - - if (trim(runtype) == 'continue') then - if (tr_brine) & - restart_hbrine = .true. - if (skl_bgc .or. z_tracers) & - restart_bgc = .true. - endif - - if (tr_brine .or. skl_bgc) then ! brine height tracer - call init_hbrine - if (tr_brine .and. restart_hbrine) call read_restart_hbrine - endif - - if (skl_bgc .or. z_tracers) then ! biogeochemistry - if (tr_fsd) then - write (nu_diag,*) 'FSD implementation incomplete for use with BGC' - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + if (trim(runtype) == 'continue') then + ! start from core restart file + call restartfile() ! given by pointer in ice_in + call calendar(time) ! update time parameters + if (kdyn == 2) call read_restart_eap ! EAP + else if (restart) then ! ice_ic = core restart file + call restartfile (ice_ic) ! or 'default' or 'none' + !!! uncomment to create netcdf + ! call restartfile_v4 (ice_ic) ! CICE v4.1 binary restart file + !!! uncomment if EAP restart data exists + ! if (kdyn == 2) call read_restart_eap + endif + + ! tracers + ! ice age tracer + if (tr_iage) then + if (trim(runtype) == 'continue') & + restart_age = .true. + if (restart_age) then + call read_restart_age + else + do iblk = 1, nblocks + call init_age(trcrn(:,:,nt_iage,:,iblk)) + enddo ! iblk + endif + endif + ! first-year area tracer + if (tr_FY) then + if (trim(runtype) == 'continue') restart_FY = .true. + if (restart_FY) then + call read_restart_FY + else + do iblk = 1, nblocks + call init_FY(trcrn(:,:,nt_FY,:,iblk)) + enddo ! iblk + endif + endif + ! level ice tracer + if (tr_lvl) then + if (trim(runtype) == 'continue') restart_lvl = .true. + if (restart_lvl) then + call read_restart_lvl + else + do iblk = 1, nblocks + call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & + trcrn(:,:,nt_vlvl,:,iblk)) + enddo ! iblk + endif + endif + ! CESM melt ponds + if (tr_pond_cesm) then + if (trim(runtype) == 'continue') & + restart_pond_cesm = .true. + if (restart_pond_cesm) then + call read_restart_pond_cesm + else + do iblk = 1, nblocks + call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk)) + enddo ! iblk + endif + endif + ! level-ice melt ponds + if (tr_pond_lvl) then + if (trim(runtype) == 'continue') & + restart_pond_lvl = .true. + if (restart_pond_lvl) then + call read_restart_pond_lvl + else + do iblk = 1, nblocks + call init_meltponds_lvl(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk), & + trcrn(:,:,nt_ipnd,:,iblk), & + dhsn(:,:,:,iblk)) + enddo ! iblk + endif + endif + ! topographic melt ponds + if (tr_pond_topo) then + if (trim(runtype) == 'continue') & + restart_pond_topo = .true. + if (restart_pond_topo) then + call read_restart_pond_topo + else + do iblk = 1, nblocks + call init_meltponds_topo(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk), & + trcrn(:,:,nt_ipnd,:,iblk)) + enddo ! iblk + endif ! .not. restart_pond + endif + ! floe size distribution + if (tr_fsd) then + if (trim(runtype) == 'continue') restart_fsd = .true. + if (restart_fsd) then + call read_restart_fsd + else + call init_fsd(trcrn(:,:,nt_fsd:nt_fsd+nfsd-1,:,:)) + endif + endif + + ! isotopes + if (tr_iso) then + if (trim(runtype) == 'continue') restart_iso = .true. + if (restart_iso) then + call read_restart_iso + else + do iblk = 1, nblocks + call init_isotope(trcrn(:,:,nt_isosno:nt_isosno+n_iso-1,:,iblk), & + trcrn(:,:,nt_isoice:nt_isoice+n_iso-1,:,iblk)) + enddo ! iblk + endif + endif + + if (tr_aero) then ! ice aerosol + if (trim(runtype) == 'continue') restart_aero = .true. + if (restart_aero) then + call read_restart_aero + else + do iblk = 1, nblocks + call init_aerosol(trcrn(:,:,nt_aero:nt_aero+4*n_aero-1,:,iblk)) + enddo ! iblk + endif ! .not. restart_aero + endif + + if (trim(runtype) == 'continue') then + if (tr_brine) & + restart_hbrine = .true. + if (solve_zsal) & + restart_zsal = .true. + if (skl_bgc .or. z_tracers) & + restart_bgc = .true. + endif + + if (tr_brine .or. skl_bgc) then ! brine height tracer + call init_hbrine + if (tr_brine .and. restart_hbrine) call read_restart_hbrine + endif + + if (solve_zsal .or. skl_bgc .or. z_tracers) then ! biogeochemistry + if (tr_fsd) then + write (nu_diag,*) 'FSD implementation incomplete for use with BGC' + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - endif - call init_bgc - endif - - !----------------------------------------------------------------- - ! aggregate tracers - !----------------------------------------------------------------- - - !$OMP PARALLEL DO PRIVATE(iblk) - do iblk = 1, nblocks - do j = 1, ny_block - do i = 1, nx_block - if (tmask(i,j,iblk)) then - call icepack_aggregate(ncat = ncat, & - aicen = aicen(i,j,:,iblk), & - trcrn = trcrn(i,j,:,:,iblk), & - vicen = vicen(i,j,:,iblk), & - vsnon = vsnon(i,j,:,iblk), & - aice = aice (i,j, iblk), & - trcr = trcr (i,j,:,iblk), & - vice = vice (i,j, iblk), & - vsno = vsno (i,j, iblk), & - aice0 = aice0(i,j, iblk), & - ntrcr = ntrcr, & - trcr_depend = trcr_depend, & - trcr_base = trcr_base, & - n_trcr_strata = n_trcr_strata, & - nt_strata = nt_strata, & - Tf = Tf(i,j,iblk)) - else - ! tcraig, reset all tracer values on land to zero - trcrn(i,j,:,:,iblk) = c0 - endif - enddo - enddo - enddo - !$OMP END PARALLEL DO - - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + endif + call init_bgc + endif + + !----------------------------------------------------------------- + ! aggregate tracers + !----------------------------------------------------------------- + + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + if (tmask(i,j,iblk)) then + call icepack_aggregate(ncat = ncat, & + aicen = aicen(i,j,:,iblk), & + trcrn = trcrn(i,j,:,:,iblk), & + vicen = vicen(i,j,:,iblk), & + vsnon = vsnon(i,j,:,iblk), & + aice = aice (i,j, iblk), & + trcr = trcr (i,j,:,iblk), & + vice = vice (i,j, iblk), & + vsno = vsno (i,j, iblk), & + aice0 = aice0(i,j, iblk), & + ntrcr = ntrcr, & + trcr_depend = trcr_depend, & + trcr_base = trcr_base, & + n_trcr_strata = n_trcr_strata, & + nt_strata = nt_strata) + else + ! tcraig, reset all tracer values on land to zero + trcrn(i,j,:,:,iblk) = c0 + endif + enddo + enddo + enddo + !$OMP END PARALLEL DO + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - end subroutine init_restart - !======================================================================= + end subroutine init_restart + +!======================================================================= -end module CICE_InitMod + end module CICE_InitMod !======================================================================= diff --git a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 index c2cae81cb..644ef72fa 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 @@ -7,7 +7,7 @@ ! William H. Lipscomb, LANL ! ! 2006 ECH: moved exit timeLoop to prevent execution of unnecessary timestep -! 2006 ECH: Streamlined for efficiency +! 2006 ECH: Streamlined for efficiency ! 2006 ECH: Converted to free source form (F90) ! 2007 BPB: Modified Delta-Eddington shortwave interface ! 2008 ECH: moved ESMF code to its own driver @@ -16,7 +16,6 @@ module CICE_RunMod use ice_kinds_mod use cice_wrapper_mod, only : t_startf, t_stopf, t_barrierf - use cice_wrapper_mod, only : ufs_logfhour use ice_fileunits, only: nu_diag use ice_arrays_column, only: oceanmixed_ice use ice_constants, only: c0, c1 @@ -45,7 +44,7 @@ module CICE_RunMod subroutine CICE_Run - use ice_calendar, only: istep, istep1, dt, calendar, advance_timestep + use ice_calendar, only: istep, istep1, time, dt, stop_now, calendar use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & @@ -57,9 +56,9 @@ subroutine CICE_Run tr_iso, tr_aero, tr_zaero, skl_bgc, z_tracers, wave_spec, tr_fsd character(len=*), parameter :: subname = '(CICE_Run)' - !-------------------------------------------------------------------- - ! initialize error code and step timer - !-------------------------------------------------------------------- + !-------------------------------------------------------------------- + ! initialize error code and step timer + !-------------------------------------------------------------------- call ice_timer_start(timer_step) ! start timing entire run @@ -74,28 +73,32 @@ subroutine CICE_Run if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - !-------------------------------------------------------------------- - ! timestep loop - !-------------------------------------------------------------------- + !-------------------------------------------------------------------- + ! timestep loop + !-------------------------------------------------------------------- - call ice_timer_start(timer_couple) ! atm/ocn coupling + istep = istep + 1 ! update time step counters + istep1 = istep1 + 1 + time = time + dt ! determine the time and date - call advance_timestep() ! advance timestep and update calendar data + call ice_timer_start(timer_couple) ! atm/ocn coupling if (z_tracers) call get_atm_bgc ! biogeochemistry call init_flux_atm ! Initialize atmosphere fluxes sent to coupler call init_flux_ocn ! initialize ocean fluxes sent to coupler + call calendar(time) ! at the end of the timestep + call ice_timer_stop(timer_couple) ! atm/ocn coupling call ice_step - !-------------------------------------------------------------------- - ! end of timestep loop - !-------------------------------------------------------------------- + !-------------------------------------------------------------------- + ! end of timestep loop + !-------------------------------------------------------------------- - call ice_timer_stop(timer_step) ! end timestepping loop timer + call ice_timer_stop(timer_step) ! end timestepping loop timer end subroutine CICE_Run @@ -108,39 +111,36 @@ end subroutine CICE_Run subroutine ice_step - use ice_constants, only: c3600 use ice_boundary, only: ice_HaloUpdate use ice_calendar, only: dt, dt_dyn, ndtd, diagfreq, write_restart, istep - use ice_calendar, only: idate, myear, mmonth, mday, msec, timesecs - use ice_calendar, only: calendar_sec2hms, write_history, nstreams, histfreq - use ice_diagnostics, only: init_mass_diags, runtime_diags, debug_model, debug_ice - use ice_diagnostics_bgc, only: hbrine_diags, bgc_diags + use ice_calendar, only: idate, sec + use ice_diagnostics, only: init_mass_diags, runtime_diags + use ice_diagnostics_bgc, only: hbrine_diags, zsal_diags, bgc_diags use ice_domain, only: halo_info, nblocks use ice_dyn_eap, only: write_restart_eap use ice_dyn_shared, only: kdyn, kridge use ice_flux, only: scale_factor, init_history_therm, & - daidtt, daidtd, dvidtt, dvidtd, dvsdtt, dvsdtd, dagedtt, dagedtd + daidtt, daidtd, dvidtt, dvidtd, dagedtt, dagedtd use ice_history, only: accum_hist use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart use ice_restart_column, only: write_restart_age, write_restart_FY, & - write_restart_lvl, write_restart_pond_lvl, & + write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & write_restart_pond_topo, write_restart_aero, write_restart_fsd, & - write_restart_iso, write_restart_bgc, write_restart_hbrine, & - write_restart_snow + write_restart_iso, write_restart_bgc, write_restart_hbrine use ice_restart_driver, only: dumpfile use ice_restoring, only: restore_ice, ice_HaloRestore use ice_step_mod, only: prep_radiation, step_therm1, step_therm2, & update_state, step_dyn_horiz, step_dyn_ridge, step_radiation, & - biogeochemistry, step_prep, step_dyn_wave, step_snow + biogeochemistry, save_init, step_dyn_wave use ice_timers, only: ice_timer_start, ice_timer_stop, & timer_diags, timer_column, timer_thermo, timer_bound, & timer_hist, timer_readwrite - use ice_communicate, only: MPI_COMM_ICE, my_task, master_task + use ice_communicate, only: MPI_COMM_ICE use ice_prescribed_mod integer (kind=int_kind) :: & - iblk , & ! block index + iblk , & ! block index k , & ! dynamics supercycling index ktherm ! thermodynamics is off when ktherm = -1 @@ -148,29 +148,19 @@ subroutine ice_step offset ! d(age)/dt time offset logical (kind=log_kind) :: & - tr_iage, tr_FY, tr_lvl, tr_fsd, tr_snow, & - tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & - calc_Tsfc, skl_bgc, z_tracers, wave_spec + tr_iage, tr_FY, tr_lvl, tr_fsd, & + tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & + calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' - character (len=char_len) :: plabeld - integer (kind=int_kind) :: hh,mm,ss,ns - character (len=char_len) :: logmsg - - if (debug_model) then - plabeld = 'beginning time step' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo - endif - call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc, skl_bgc_out=skl_bgc, & - z_tracers_out=z_tracers, ktherm_out=ktherm, wave_spec_out=wave_spec) + solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & + wave_spec_out=wave_spec) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & - tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & + tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & - tr_iso_out=tr_iso, tr_fsd_out=tr_fsd, tr_snow_out=tr_snow) + tr_iso_out=tr_iso, tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -195,12 +185,12 @@ subroutine ice_step if (prescribed_ice) then ! read prescribed ice call t_barrierf('cice_run_presc_BARRIER',MPI_COMM_ICE) call t_startf ('cice_run_presc') - call ice_prescribed_run(idate, msec) + call ice_prescribed_run(idate, sec) call t_stopf ('cice_run_presc') endif #endif - call step_prep + call save_init call ice_timer_start(timer_column) ! column physics call ice_timer_start(timer_thermo) ! thermodynamics @@ -215,33 +205,15 @@ subroutine ice_step !----------------------------------------------------------------- if (calc_Tsfc) call prep_radiation (iblk) - if (debug_model) then - plabeld = 'post prep_radiation' - call debug_ice (iblk, plabeld) - endif !----------------------------------------------------------------- ! thermodynamics and biogeochemistry !----------------------------------------------------------------- - + call step_therm1 (dt, iblk) ! vertical thermodynamics - if (debug_model) then - plabeld = 'post step_therm1' - call debug_ice (iblk, plabeld) - endif - call biogeochemistry (dt, iblk) ! biogeochemistry - if (debug_model) then - plabeld = 'post biogeochemistry' - call debug_ice (iblk, plabeld) - endif - if (.not.prescribed_ice) & call step_therm2 (dt, iblk) ! ice thickness distribution thermo - if (debug_model) then - plabeld = 'post step_therm2' - call debug_ice (iblk, plabeld) - endif endif ! ktherm > 0 @@ -250,8 +222,7 @@ subroutine ice_step ! clean up, update tendency diagnostics offset = dt - call update_state (dt=dt, daidt=daidtt, dvidt=dvidtt, dvsdt=dvsdtt, & - dagedt=dagedtt, offset=offset) + call update_state (dt, daidtt, dvidtt, dagedtt, offset) call ice_timer_stop(timer_thermo) ! thermodynamics call ice_timer_stop(timer_column) ! column physics @@ -270,12 +241,6 @@ subroutine ice_step ! momentum, stress, transport call step_dyn_horiz (dt_dyn) - if (debug_model) then - plabeld = 'post step_dyn_horiz' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo ! iblk - endif ! ridging !$OMP PARALLEL DO PRIVATE(iblk) @@ -283,25 +248,12 @@ subroutine ice_step if (kridge > 0) call step_dyn_ridge (dt_dyn, ndtd, iblk) enddo !$OMP END PARALLEL DO - if (debug_model) then - plabeld = 'post step_dyn_ridge' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo ! iblk - endif ! clean up, update tendency diagnostics offset = c0 - call update_state (dt=dt_dyn, daidt=daidtd, dvidt=dvidtd, dvsdt=dvsdtd, & - dagedt=dagedtd, offset=offset) + call update_state (dt_dyn, daidtd, dvidtd, dagedtd, offset) enddo - if (debug_model) then - plabeld = 'post dynamics' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo - endif endif ! not prescribed ice @@ -312,36 +264,18 @@ subroutine ice_step call ice_timer_start(timer_column) ! column physics call ice_timer_start(timer_thermo) ! thermodynamics - !----------------------------------------------------------------- - ! snow redistribution and metamorphosis - !----------------------------------------------------------------- - - if (tr_snow) then ! advanced snow physics - do iblk = 1, nblocks - call step_snow (dt, iblk) - enddo - call update_state (dt=dt) ! clean up - endif - !MHRI: CHECK THIS OMP !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks if (ktherm >= 0) call step_radiation (dt, iblk) - if (debug_model) then - plabeld = 'post step_radiation' - call debug_ice (iblk, plabeld) - endif !----------------------------------------------------------------- ! get ready for coupling and the next time step !----------------------------------------------------------------- call coupling_prep (iblk) - if (debug_model) then - plabeld = 'post coupling_prep' - call debug_ice (iblk, plabeld) - endif + enddo ! iblk !$OMP END PARALLEL DO @@ -360,6 +294,7 @@ subroutine ice_step call ice_timer_start(timer_diags) ! diagnostics if (mod(istep,diagfreq) == 0) then call runtime_diags(dt) ! log file + if (solve_zsal) call zsal_diags if (skl_bgc .or. z_tracers) call bgc_diags if (tr_brine) call hbrine_diags endif @@ -375,31 +310,23 @@ subroutine ice_step if (tr_iage) call write_restart_age if (tr_FY) call write_restart_FY if (tr_lvl) call write_restart_lvl + if (tr_pond_cesm) call write_restart_pond_cesm if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo - if (tr_snow) call write_restart_snow if (tr_fsd) call write_restart_fsd if (tr_iso) call write_restart_iso if (tr_aero) call write_restart_aero - if (skl_bgc .or. z_tracers) & - call write_restart_bgc + if (solve_zsal .or. skl_bgc .or. z_tracers) & + call write_restart_bgc if (tr_brine) call write_restart_hbrine if (kdyn == 2) call write_restart_eap call final_restart endif call ice_timer_stop(timer_readwrite) ! reading/writing - if (my_task == master_task) then - do ns = 1,nstreams - if (write_history(ns) .and. histfreq(ns) .eq. 'h') then - call calendar_sec2hms(msec,hh,mm,ss) - write(logmsg,'(6(i4,2x))')myear,mmonth,mday,hh,mm,ss - call ufs_logfhour(trim(logmsg),timesecs/c3600) - end if - end do - end if - end subroutine ice_step + end subroutine ice_step + !======================================================================= ! ! Prepare for coupling @@ -409,7 +336,7 @@ end subroutine ice_step subroutine coupling_prep (iblk) use ice_arrays_column, only: alvdfn, alidfn, alvdrn, alidrn, & - albicen, albsnon, albpndn, apeffn, snowfracn + albicen, albsnon, albpndn, apeffn, fzsal_g, fzsal, snowfracn use ice_blocks, only: nx_block, ny_block, get_block, block use ice_domain, only: blocks_ice use ice_calendar, only: dt, nstreams @@ -424,8 +351,9 @@ subroutine coupling_prep (iblk) fsens, flat, fswabs, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt, Uref, wind use ice_flux_bgc, only: faero_ocn, fiso_ocn, Qref_iso, fiso_evap, & - flux_bio, flux_bio_ai, fnit, fsil, famm, fdmsp, fdms, fhum, & - fdust, falgalN, fdoc, fdic, fdon, ffep, ffed, bgcflux_ice_to_ocn + fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai, & + fnit, fsil, famm, fdmsp, fdms, fhum, fdust, falgalN, & + fdoc, fdic, fdon, ffep, ffed, bgcflux_ice_to_ocn use ice_grid, only: tmask use ice_state, only: aicen, aice use ice_state, only: aice_init @@ -433,12 +361,12 @@ subroutine coupling_prep (iblk) use ice_step_mod, only: ocean_mixed_layer use ice_timers, only: timer_couple, ice_timer_start, ice_timer_stop - integer (kind=int_kind), intent(in) :: & - iblk ! block index + integer (kind=int_kind), intent(in) :: & + iblk ! block index ! local variables - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & ilo,ihi,jlo,jhi, & ! beginning and end of physical domain n , & ! thickness category index i,j , & ! horizontal indices @@ -578,6 +506,8 @@ subroutine coupling_prep (iblk) fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) + fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) + fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) if (nbtrcr > 0) then do k = 1, nbtrcr @@ -598,7 +528,7 @@ subroutine coupling_prep (iblk) enddo !----------------------------------------------------------------- - ! Divide fluxes by ice area + ! Divide fluxes by ice area ! - the CESM coupler assumes fluxes are per unit ice area ! - also needed for global budget in diagnostics !----------------------------------------------------------------- @@ -623,12 +553,13 @@ subroutine coupling_prep (iblk) faero_ocn(:,:,:,iblk), & alvdr (:,:,iblk), alidr (:,:,iblk), & alvdf (:,:,iblk), alidf (:,:,iblk), & + fzsal (:,:,iblk), fzsal_g (:,:,iblk), & flux_bio (:,:,1:nbtrcr,iblk), & Qref_iso =Qref_iso (:,:,:,iblk), & fiso_evap=fiso_evap(:,:,:,iblk), & fiso_ocn =fiso_ocn (:,:,:,iblk), & Uref=Uref(:,:,iblk), wind=wind(:,:,iblk) ) - + !----------------------------------------------------------------- ! Define ice-ocean bgc fluxes !----------------------------------------------------------------- @@ -649,16 +580,16 @@ subroutine coupling_prep (iblk) if (.not. calc_Tsfc) then !--------------------------------------------------------------- - ! If surface fluxes were provided, conserve these fluxes at ice - ! free points by passing to ocean. + ! If surface fluxes were provided, conserve these fluxes at ice + ! free points by passing to ocean. !--------------------------------------------------------------- - call sfcflux_to_ocn & + call sfcflux_to_ocn & (nx_block, ny_block, & tmask (:,:,iblk), aice_init(:,:,iblk), & fsurfn_f (:,:,:,iblk), flatn_f(:,:,:,iblk), & fresh (:,:,iblk), fhocn (:,:,iblk)) - endif + endif !echmod call ice_timer_stop(timer_couple,iblk) ! atm/ocn coupling @@ -667,10 +598,10 @@ end subroutine coupling_prep !======================================================================= ! ! If surface heat fluxes are provided to CICE instead of CICE calculating -! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can +! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can ! be provided at points which do not have ice. (This is could be due to ! the heat fluxes being calculated on a lower resolution grid or the -! heat fluxes not recalculated at every CICE timestep.) At ice free points, +! heat fluxes not recalculated at every CICE timestep.) At ice free points, ! conserve energy and water by passing these fluxes to the ocean. ! ! author: A. McLaren, Met Office @@ -704,15 +635,14 @@ subroutine sfcflux_to_ocn(nx_block, ny_block, & ! local variables integer (kind=int_kind) :: & i, j, n ! horizontal indices - + real (kind=dbl_kind) :: & puny, & ! - Lsub, & ! rLsub ! 1/Lsub character(len=*), parameter :: subname = '(sfcflux_to_ocn)' - call icepack_query_parameters(puny_out=puny, Lsub_out=Lsub) + call icepack_query_parameters(puny_out=puny) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) diff --git a/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 index d0aafbb43..0da2ed491 100644 --- a/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/cice_wrapper_mod.F90 @@ -1,93 +1,25 @@ module cice_wrapper_mod #ifdef CESMCOUPLED - use perf_mod , only : t_startf, t_stopf, t_barrierf - use shr_file_mod , only : shr_file_getlogunit, shr_file_setlogunit + use perf_mod , only : t_startf, t_stopf, t_barrierf + use shr_file_mod , only : shr_file_getlogunit, shr_file_setlogunit - use ice_kinds_mod , only : dbl_kind, int_kind, char_len, char_len_long - - implicit none - - real(dbl_kind) :: wtime = 0.0 -contains - ! Define stub routines that do nothing - they are just here to avoid - ! having cppdefs in the main program - subroutine ufs_settimer(timevalue) - real(dbl_kind), intent(inout) :: timevalue - end subroutine ufs_settimer - subroutine ufs_logtimer(nunit,elapsedsecs,string,runtimelog,time0) - integer, intent(in) :: nunit - integer(int_kind), intent(in) :: elapsedsecs - character(len=*), intent(in) :: string - logical, intent(in) :: runtimelog - real(dbl_kind), intent(in) :: time0 - end subroutine ufs_logtimer - subroutine ufs_file_setLogUnit(filename,nunit,runtimelog) - character(len=*), intent(in) :: filename - logical, intent(in) :: runtimelog - integer, intent(out) :: nunit - end subroutine ufs_file_setLogUnit - subroutine ufs_logfhour(msg,hour) - character(len=*), intent(in) :: msg - real(dbl_kind), intent(in) :: hour - end subroutine ufs_logfhour #else - - use ice_kinds_mod , only : dbl_kind, int_kind, char_len, char_len_long - - implicit none - - real(dbl_kind) :: wtime = 0.0 contains - subroutine ufs_settimer(timevalue) - real(dbl_kind), intent(inout) :: timevalue - real(dbl_kind) :: MPI_Wtime - timevalue = MPI_Wtime() - end subroutine ufs_settimer - - subroutine ufs_logtimer(nunit,elapsedsecs,string,runtimelog,time0) - integer, intent(in) :: nunit - integer(int_kind), intent(in) :: elapsedsecs - character(len=*), intent(in) :: string - logical, intent(in) :: runtimelog - real(dbl_kind), intent(in) :: time0 - real(dbl_kind) :: MPI_Wtime, timevalue - if (.not. runtimelog) return - if (time0 > 0.) then - timevalue = MPI_Wtime()-time0 - write(nunit,*)elapsedsecs,' CICE '//trim(string),timevalue - end if - end subroutine ufs_logtimer - subroutine ufs_file_setLogUnit(filename,nunit,runtimelog) - character(len=*), intent(in) :: filename - logical, intent(in) :: runtimelog - integer, intent(out) :: nunit - if (.not. runtimelog) return - open (newunit=nunit, file=trim(filename)) - end subroutine ufs_file_setLogUnit + ! These are just stub routines put in place to remove - subroutine ufs_logfhour(msg,hour) - character(len=*), intent(in) :: msg - real(dbl_kind), intent(in) :: hour - character(len=char_len) :: filename - integer(int_kind) :: nunit - write(filename,'(a,i3.3)')'log.ice.f',int(hour) - open(newunit=nunit,file=trim(filename)) - write(nunit,'(a)')'completed: cice' - write(nunit,'(a,f10.3)')'forecast hour:',hour - write(nunit,'(a)')'valid time: '//trim(msg) - close(nunit) - end subroutine ufs_logfhour - - ! Define stub routines that do nothing - they are just here to avoid - ! having cppdefs in the main program subroutine shr_file_setLogUnit(nunit) integer, intent(in) :: nunit + ! do nothing for this stub - its just here to replace + ! having cppdefs in the main program end subroutine shr_file_setLogUnit subroutine shr_file_getLogUnit(nunit) integer, intent(in) :: nunit + ! do nothing for this stub - its just here to replace + ! having cppdefs in the main program end subroutine shr_file_getLogUnit + subroutine t_startf(string) character(len=*) :: string end subroutine t_startf diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index 6228c0bdd..587e9f5a2 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -15,46 +15,41 @@ module ice_comp_nuopc use NUOPC_Model , only : model_label_SetRunClock => label_SetRunClock use NUOPC_Model , only : model_label_Finalize => label_Finalize use NUOPC_Model , only : NUOPC_ModelGet, SetVM - use ice_constants , only : ice_init_constants, c0 + use ice_constants , only : ice_init_constants use ice_shr_methods , only : chkerr, state_setscalar, state_getscalar, state_diagnose, alarmInit - use ice_shr_methods , only : get_component_instance, state_flddebug - - use ice_import_export , only : ice_import, ice_export, ice_advertise_fields, ice_realize_fields + use ice_shr_methods , only : set_component_logging, get_component_instance + use ice_shr_methods , only : state_flddebug + use ice_import_export , only : ice_import, ice_export + use ice_import_export , only : ice_advertise_fields, ice_realize_fields use ice_domain_size , only : nx_global, ny_global - use ice_grid , only : grid_format, init_grid2 + use ice_domain , only : nblocks, blocks_ice, distrb_info + use ice_blocks , only : block, get_block, nx_block, ny_block, nblocks_x, nblocks_y + use ice_blocks , only : nblocks_tot, get_block_parameter + use ice_distribution , only : ice_distributiongetblockloc + use ice_grid , only : tlon, tlat, hm, tarea, ULON, ULAT use ice_communicate , only : init_communicate, my_task, master_task, mpi_comm_ice use ice_calendar , only : force_restart_now, write_ic - use ice_calendar , only : idate, idate0, mday, mmonth, myear, year_init, month_init, day_init - use ice_calendar , only : msec, dt, calendar, calendar_type, nextsw_cday, istep - use ice_calendar , only : ice_calendar_noleap, ice_calendar_gregorian, use_leap_years + use ice_calendar , only : idate, mday, time, month, daycal, time2sec, year_init + use ice_calendar , only : sec, dt, calendar, calendar_type, nextsw_cday, istep use ice_kinds_mod , only : dbl_kind, int_kind, char_len, char_len_long - use ice_fileunits , only : nu_diag, nu_diag_set, inst_index, inst_name - use ice_fileunits , only : inst_suffix, release_all_fileunits, flush_fileunit - use ice_restart_shared , only : runid, runtype, restart, use_restart_time, restart_dir, restart_file, restart_format, restart_chunksize + use ice_scam , only : scmlat, scmlon, single_column + use ice_fileunits , only : nu_diag, nu_diag_set, inst_index, inst_name, inst_suffix, release_all_fileunits, flush_fileunit + use ice_restart_shared , only : runid, runtype, restart_dir, restart_file use ice_history , only : accum_hist - use ice_history_shared , only : history_format, history_chunksize + use CICE_InitMod , only : cice_init + use CICE_RunMod , only : cice_run use ice_exit , only : abort_ice use icepack_intfc , only : icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc , only : icepack_init_orbit, icepack_init_parameters, icepack_query_orbit use icepack_intfc , only : icepack_query_tracer_flags, icepack_query_parameters + use ice_timers use cice_wrapper_mod , only : t_startf, t_stopf, t_barrierf use cice_wrapper_mod , only : shr_file_getlogunit, shr_file_setlogunit - use cice_wrapper_mod , only : ufs_settimer, ufs_logtimer, ufs_file_setlogunit, wtime #ifdef CESMCOUPLED use shr_const_mod use shr_orb_mod , only : shr_orb_decl, shr_orb_params, SHR_ORB_UNDEF_REAL, SHR_ORB_UNDEF_INT - use ice_scam , only : scmlat, scmlon, scol_mask, scol_frac, scol_ni, scol_nj, scol_area - use nuopc_shr_methods , only : set_component_logging -#else - use ice_shr_methods , only : set_component_logging #endif - use ice_timers - use CICE_InitMod , only : cice_init1, cice_init2 - use CICE_RunMod , only : cice_run - use ice_mesh_mod , only : ice_mesh_set_distgrid, ice_mesh_setmask_from_maskfile, ice_mesh_check - use ice_mesh_mod , only : ice_mesh_init_tlon_tlat_area_hm, ice_mesh_create_scolumn use ice_prescribed_mod , only : ice_prescribed_init - use ice_scam , only : scol_valid, single_column implicit none private @@ -87,22 +82,10 @@ module ice_comp_nuopc character(len=*) , parameter :: orb_variable_year = 'variable_year' character(len=*) , parameter :: orb_fixed_parameters = 'fixed_parameters' - type(ESMF_Mesh) :: ice_mesh + character(len=*),parameter :: shr_cal_noleap = 'NO_LEAP' + character(len=*),parameter :: shr_cal_gregorian = 'GREGORIAN' - integer :: nthrds ! Number of threads to use in this component - integer :: nu_timer = 6 ! Simple timer log, unused except by UFS integer :: dbug = 0 - logical :: profile_memory = .false. - logical :: mastertask - logical :: runtimelog = .false. - integer :: start_ymd ! Start date (YYYYMMDD) - integer :: start_tod ! start time of day (s) - integer :: curr_ymd ! Current date (YYYYMMDD) - integer :: curr_tod ! Current time of day (s) - integer :: stop_ymd ! stop date (YYYYMMDD) - integer :: stop_tod ! stop time of day (sec) - integer :: ref_ymd ! Reference date (YYYYMMDD) - integer :: ref_tod ! reference time of day (s) integer , parameter :: debug_import = 0 ! internal debug level integer , parameter :: debug_export = 0 ! internal debug level character(*), parameter :: modName = "(ice_comp_nuopc)" @@ -172,10 +155,6 @@ subroutine InitializeP0(gcomp, importState, exportState, clock, rc) type(ESMF_State) :: importState, exportState type(ESMF_Clock) :: clock integer, intent(out) :: rc - - logical :: isPresent, isSet - character(len=64) :: value - character(len=char_len_long) :: logmsg !-------------------------------- rc = ESMF_SUCCESS @@ -185,14 +164,6 @@ subroutine InitializeP0(gcomp, importState, exportState, clock, rc) acceptStringList=(/"IPDv01p"/), rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - profile_memory = .false. - call NUOPC_CompAttributeGet(gcomp, name="ProfileMemory", value=value, & - isPresent=isPresent, isSet=isSet, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent .and. isSet) profile_memory=(trim(value)=="true") - write(logmsg,*) profile_memory - call ESMF_LogWrite('CICE_cap:ProfileMemory = '//trim(logmsg), ESMF_LOGMSG_INFO) - end subroutine InitializeP0 !=============================================================================== @@ -207,49 +178,11 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) ! Local variables character(len=char_len_long) :: cvalue - character(len=char_len_long) :: ice_meshfile - character(len=char_len_long) :: ice_maskfile - character(len=char_len_long) :: errmsg - logical :: isPresent, isSet - real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp - type(ESMF_DistGrid) :: ice_distGrid - real(kind=dbl_kind) :: atmiter_conv - real(kind=dbl_kind) :: atmiter_conv_driver - integer (kind=int_kind) :: natmiter - integer (kind=int_kind) :: natmiter_driver - integer :: localPet - integer :: npes - type(ESMF_VM) :: vm - integer :: lmpicom ! local communicator - type(ESMF_Time) :: currTime ! Current time - type(ESMF_Time) :: startTime ! Start time - type(ESMF_Time) :: stopTime ! Stop time - type(ESMF_Time) :: refTime ! Ref time - type(ESMF_TimeInterval) :: timeStep ! Model timestep - type(ESMF_CalKind_Flag) :: esmf_caltype ! esmf calendar type - integer :: yy,mm,dd ! Temporaries for time query - integer :: dtime ! time step - integer :: shrlogunit ! original log unit - character(len=char_len) :: starttype ! infodata start type - integer :: lsize ! local size of coupling array - integer :: n,c,g,i,j,m ! indices - integer :: iblk, jblk ! indices - integer :: ig, jg ! indices - integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain - - character(len=char_len_long) :: diag_filename = 'unset' character(len=char_len_long) :: logmsg - character(len=char_len_long) :: single_column_lnd_domainfile - real(dbl_kind) :: scol_lon - real(dbl_kind) :: scol_lat - real(dbl_kind) :: scol_spval - character(len=char_len) :: tfrz_option ! tfrz_option from cice namelist - character(len=char_len) :: tfrz_option_driver ! tfrz_option from cice namelist + logical :: isPresent, isSet character(len=*), parameter :: subname=trim(modName)//':(InitializeAdvertise) ' !-------------------------------- - call ufs_settimer(wtime) - call NUOPC_CompAttributeGet(gcomp, name="ScalarFieldName", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return if (isPresent .and. isSet) then @@ -310,11 +243,90 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) write(logmsg,'(i6)') dbug call ESMF_LogWrite('CICE_cap: dbug = '//trim(logmsg), ESMF_LOGMSG_INFO) - call NUOPC_CompAttributeGet(gcomp, name="RunTimeLog", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + call ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent .and. isSet) runtimelog=(trim(cvalue)=="true") - write(logmsg,*) runtimelog - call ESMF_LogWrite('CICE_cap:RunTimeLog = '//trim(logmsg), ESMF_LOGMSG_INFO) + + end subroutine InitializeAdvertise + + !=============================================================================== + + subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) + + ! Arguments + type(ESMF_GridComp) :: gcomp + type(ESMF_State) :: importState + type(ESMF_State) :: exportState + type(ESMF_Clock) :: clock + integer, intent(out) :: rc + + ! Local variables + real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp + type(ESMF_DistGrid) :: distGrid + type(ESMF_Mesh) :: Emesh, EmeshTemp + integer :: spatialDim + integer :: numOwnedElements + real(dbl_kind), pointer :: ownedElemCoords(:) + real(dbl_kind), pointer :: lat(:), latMesh(:) + real(dbl_kind), pointer :: lon(:), lonMesh(:) + integer , allocatable :: gindex_ice(:) + integer , allocatable :: gindex_elim(:) + integer , allocatable :: gindex(:) + integer :: globalID + character(ESMF_MAXSTR) :: cvalue + character(len=char_len) :: tfrz_option + character(ESMF_MAXSTR) :: convCIM, purpComp + type(ESMF_VM) :: vm + type(ESMF_Time) :: currTime ! Current time + type(ESMF_Time) :: startTime ! Start time + type(ESMF_Time) :: stopTime ! Stop time + type(ESMF_Time) :: refTime ! Ref time + type(ESMF_TimeInterval) :: timeStep ! Model timestep + type(ESMF_Calendar) :: esmf_calendar ! esmf calendar + type(ESMF_CalKind_Flag) :: esmf_caltype ! esmf calendar type + integer :: start_ymd ! Start date (YYYYMMDD) + integer :: start_tod ! start time of day (s) + integer :: curr_ymd ! Current date (YYYYMMDD) + integer :: curr_tod ! Current time of day (s) + integer :: stop_ymd ! stop date (YYYYMMDD) + integer :: stop_tod ! stop time of day (sec) + integer :: ref_ymd ! Reference date (YYYYMMDD) + integer :: ref_tod ! reference time of day (s) + integer :: yy,mm,dd ! Temporaries for time query + integer :: iyear ! yyyy + integer :: dtime ! time step + integer :: lmpicom + integer :: shrlogunit ! original log unit + character(len=char_len) :: starttype ! infodata start type + integer :: lsize ! local size of coupling array + logical :: isPresent + logical :: isSet + integer :: localPet + integer :: n,c,g,i,j,m ! indices + integer :: iblk, jblk ! indices + integer :: ig, jg ! indices + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + type(block) :: this_block ! block information for current block + integer :: compid ! component id + character(len=char_len_long) :: tempc1,tempc2 + real(dbl_kind) :: diff_lon + integer :: npes + integer :: num_elim_global + integer :: num_elim_local + integer :: num_elim + integer :: num_ice + integer :: num_elim_gcells ! local number of eliminated gridcells + integer :: num_elim_blocks ! local number of eliminated blocks + integer :: num_total_blocks + integer :: my_elim_start, my_elim_end + real(dbl_kind) :: rad_to_deg + integer(int_kind) :: ktherm + character(len=char_len_long) :: diag_filename = 'unset' + character(len=*), parameter :: F00 = "('(ice_comp_nuopc) ',2a,1x,d21.14)" + character(len=*), parameter :: subname=trim(modName)//':(InitializeRealize) ' + !-------------------------------- + + rc = ESMF_SUCCESS + if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) !---------------------------------------------------------------------------- ! generate local mpi comm @@ -322,28 +334,10 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) call ESMF_GridCompGet(gcomp, vm=vm, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_VMGet(vm, mpiCommunicator=lmpicom, localPet=localPet, PetCount=npes, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return -#ifdef CESMCOUPLED - call ESMF_VMGet(vm, pet=localPet, peCount=nthrds, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (nthrds==1) then - call NUOPC_CompAttributeGet(gcomp, "nthreads", value=cvalue, rc=rc) - if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, line=__LINE__, file=u_FILE_u)) return - read(cvalue,*) nthrds - endif -!$ call omp_set_num_threads(nthrds) -#endif - - !---------------------------------------------------------------------------- - ! Initialize cice communicators - !---------------------------------------------------------------------------- - - call init_communicate(lmpicom) ! initial setup for message passing - mastertask = .false. - if (my_task == master_task) mastertask = .true. - !---------------------------------------------------------------------------- ! determine instance information !---------------------------------------------------------------------------- @@ -351,8 +345,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) call get_component_instance(gcomp, inst_suffix, inst_index, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return -! inst_name = "ICE"//trim(inst_suffix) - inst_name = "ICE" + inst_name = "ICE"//trim(inst_suffix) !---------------------------------------------------------------------------- ! start cice timers @@ -368,8 +361,6 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) call ice_init_constants(omega_in=SHR_CONST_OMEGA, radius_in=SHR_CONST_REARTH, & spval_dbl_in=SHR_CONST_SPVAL) - ! TODO: get tfrz_option from driver - call icepack_init_parameters( & secday_in = SHR_CONST_CDAY, & rhoi_in = SHR_CONST_RHOICE, & @@ -393,7 +384,8 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) depressT_in = 0.054_dbl_kind, & Tocnfrz_in = -34.0_dbl_kind*0.054_dbl_kind, & pi_in = SHR_CONST_PI, & - snowpatch_in = 0.005_dbl_kind) + snowpatch_in = 0.005_dbl_kind, & + dragio_in = 0.00962_dbl_kind) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -405,10 +397,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) !---------------------------------------------------------------------------- ! Get orbital values - ! Note that these values are obtained in a call to init_orbit in ice_shortwave.F90 - ! if CESMCOUPLED is not defined - - call ice_orbital_init(gcomp, clock, nu_diag, mastertask, rc) + call ice_orbital_init(gcomp, clock, nu_diag, my_task==master_task, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! Determine runtype and possibly nextsw_cday @@ -420,12 +409,8 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) runtype = "initial" else if (trim(starttype) == trim('continue') ) then runtype = "continue" - restart = .true. - use_restart_time = .true. else if (trim(starttype) == trim('branch')) then runtype = "continue" - restart = .true. - use_restart_time = .true. else call abort_ice( subname//' ERROR: unknown starttype' ) end if @@ -445,6 +430,23 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) runtype = 'initial' ! determined from the namelist in ice_init if CESMCOUPLED is not defined end if + ! Determine if single column + call NUOPC_CompAttributeGet(gcomp, name='single_column', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + read(cvalue,*) single_column + if (single_column) then + call NUOPC_CompAttributeGet(gcomp, name='scmlon', value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scmlon + call NUOPC_CompAttributeGet(gcomp, name='scmlat', value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scmlat + end if + else + single_column = .false. + end if + ! Determine runid call NUOPC_CompAttributeGet(gcomp, name='case_name', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (isPresent .and. isSet) then @@ -483,320 +485,83 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) call ESMF_TimeGet( currTime, calkindflag=esmf_caltype, rc=rc ) if (ChkErr(rc,__LINE__,u_FILE_u)) return if (esmf_caltype == ESMF_CALKIND_NOLEAP) then - calendar_type = ice_calendar_noleap + calendar_type = shr_cal_noleap else if (esmf_caltype == ESMF_CALKIND_GREGORIAN) then - calendar_type = ice_calendar_gregorian + calendar_type = shr_cal_gregorian else call abort_ice( subname//'ERROR:: bad calendar for ESMF' ) end if !---------------------------------------------------------------------------- - ! Set cice logging + ! Initialize cice communicators !---------------------------------------------------------------------------- - ! Note - this must be done AFTER the communicators are set - ! Note that sets the nu_diag module variable in ice_fileunits - ! Set the nu_diag_set flag so it's not reset later - - call shr_file_setLogUnit (shrlogunit) - call ufs_file_setLogUnit('./log.ice.timer',nu_timer,runtimelog) - call NUOPC_CompAttributeGet(gcomp, name="diro", value=cvalue, & - isPresent=isPresent, isSet=isSet, rc=rc) - if (chkerr(rc,__LINE__,u_FILE_u)) return - if (isPresent .and. isSet) then - diag_filename = trim(cvalue) - end if - call NUOPC_CompAttributeGet(gcomp, name="logfile", value=cvalue, & - isPresent=isPresent, isSet=isSet, rc=rc) - if (chkerr(rc,__LINE__,u_FILE_u)) return - if (isPresent .and. isSet) then - diag_filename = trim(diag_filename) // '/' // trim(cvalue) - end if - - if (trim(diag_filename) /= 'unset') then - call set_component_logging(gcomp, mastertask, nu_diag, shrlogunit, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - nu_diag_set = .true. - end if + call init_communicate(lmpicom) ! initial setup for message passing !---------------------------------------------------------------------------- - ! First cice initialization phase - before initializing grid info + ! Set cice logging !---------------------------------------------------------------------------- + ! Note - this must be done AFTER the communicators are set + ! Note that sets the nu_diag module variable in ice_fileunits + ! Set the nu_diag_set flag so it's not reset later -#ifdef CESMCOUPLED - ! Determine if single column - - call NUOPC_CompAttributeGet(gcomp, name='scol_lon', value=cvalue, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) scmlon - call NUOPC_CompAttributeGet(gcomp, name='scol_lat', value=cvalue, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) scmlat - call NUOPC_CompAttributeGet(gcomp, name='scol_spval', value=cvalue, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) scol_spval - - if (scmlon > scol_spval .and. scmlat > scol_spval) then - call NUOPC_CompAttributeGet(gcomp, name='single_column_lnd_domainfile', & - value=single_column_lnd_domainfile, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (trim(single_column_lnd_domainfile) /= 'UNSET') then - single_column = .true. - else - call abort_ice('single_column_domainfile cannot be null for single column mode') - end if - call NUOPC_CompAttributeGet(gcomp, name='scol_ocnmask', value=cvalue, rc=rc) - if (chkerr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) scol_mask - call NUOPC_CompAttributeGet(gcomp, name='scol_ocnfrac', value=cvalue, rc=rc) - if (chkerr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) scol_frac - call NUOPC_CompAttributeGet(gcomp, name='scol_ni', value=cvalue, rc=rc) - if (chkerr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) scol_ni - call NUOPC_CompAttributeGet(gcomp, name='scol_nj', value=cvalue, rc=rc) + if (my_task == master_task) then + call NUOPC_CompAttributeGet(gcomp, name="diro", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) scol_nj - call NUOPC_CompAttributeGet(gcomp, name='scol_area', value=cvalue, rc=rc) + if (isPresent .and. isSet) then + diag_filename = trim(cvalue) + end if + call NUOPC_CompAttributeGet(gcomp, name="logfile", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (chkerr(rc,__LINE__,u_FILE_u)) return - read(cvalue,*) scol_area - - call ice_mesh_create_scolumn(scmlon, scmlat, ice_mesh, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - - scol_valid = (scol_mask == 1) - if (.not. scol_valid) then - ! Read the cice namelist as part of the call to cice_init1 - ! Note that if single_column is true and scol_valid is not - will never get here - call t_startf ('cice_init1') - call cice_init1 - call t_stopf ('cice_init1') - ! Advertise fields - call ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - - call t_stopf ('cice_init_total') - - ! ******************* - ! *** RETURN HERE *** - ! ******************* - RETURN + if (isPresent .and. isSet) then + diag_filename = trim(diag_filename) // '/' // trim(cvalue) end if - end if - ! Read the cice namelist as part of the call to cice_init1 - ! Note that if single_column is true and scol_valid is not - will never get here - call t_startf ('cice_init1') - call cice_init1 - call t_stopf ('cice_init1') - - !----------------------------------------------------------------- - ! Advertise fields - !----------------------------------------------------------------- - call ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - - - ! Form of ocean freezing temperature - ! 'minus1p8' = -1.8 C - ! 'linear_salt' = -depressT * sss - ! 'mushy' conforms with ktherm=2 - call NUOPC_CompAttributeGet(gcomp, name="tfreeze_option", value=tfrz_option_driver, & - isPresent=isPresent, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (.not. isPresent) then - tfrz_option_driver = 'linear_salt' - end if - call icepack_query_parameters( tfrz_option_out=tfrz_option) - if (tfrz_option_driver /= tfrz_option) then - write(errmsg,'(a)') trim(subname)//'WARNING: tfrz_option from driver '//trim(tfrz_option_driver)//& - ' is overwriting tfrz_option from cice namelist '//trim(tfrz_option) - if (mastertask) write(nu_diag,*) trim(errmsg) - call icepack_warnings_flush(nu_diag) - call icepack_init_parameters(tfrz_option_in=tfrz_option_driver) - endif - - ! Flux convergence tolerance - always use the driver attribute value - call NUOPC_CompAttributeGet(gcomp, name="flux_convergence", value=cvalue, & - isPresent=isPresent, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) atmiter_conv_driver - call icepack_query_parameters( atmiter_conv_out=atmiter_conv) - if (atmiter_conv_driver /= atmiter_conv) then - write(errmsg,'(a,d13.5,a,d13.5)') trim(subname)//'WARNING: atmiter_ from driver ',& - atmiter_conv_driver,' is overwritting atmiter_conv from cice namelist ',atmiter_conv - if(mastertask) write(nu_diag,*) trim(errmsg) - call icepack_warnings_flush(nu_diag) - call icepack_init_parameters(atmiter_conv_in=atmiter_conv_driver) + if (trim(diag_filename) /= 'unset') then + open(newunit=nu_diag, file=trim(diag_filename)) + nu_diag_set = .true. end if end if - ! Number of iterations for boundary layer calculations - call NUOPC_CompAttributeGet(gcomp, name="flux_max_iteration", value=cvalue, isPresent=isPresent, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent) then - read(cvalue,*) natmiter_driver - else - natmiter_driver = 5 - end if - call icepack_query_parameters( natmiter_out=natmiter) - if (natmiter_driver /= natmiter) then - write(errmsg,'(a,i8,a,i8)') trim(subname)//'error: natmiter_driver ',natmiter_driver, & - ' must be the same as natmiter from cice namelist ',natmiter - call abort_ice(trim(errmsg)) - endif - - ! Netcdf output created by PIO - call NUOPC_CompAttributeGet(gcomp, name="pio_typename", value=cvalue, & - isPresent=isPresent, isSet=isSet, rc=rc) - if (chkerr(rc,__LINE__,u_FILE_u)) return - if (isPresent .and. isSet) then - if (trim(history_format)/='cdf1' .and. mastertask) then - write(nu_diag,*) trim(subname)//history_format//'WARNING: history_format from cice_namelist ignored' - write(nu_diag,*) trim(subname)//'WARNING: using '//trim(cvalue)//' from ICE_modelio' - endif - if (trim(restart_format)/='cdf1' .and. mastertask) then - write(nu_diag,*) trim(subname)//restart_format//'WARNING: restart_format from cice_namelist ignored' - write(nu_diag,*) trim(subname)//'WARNING: using '//trim(cvalue)//' from ICE_modelio' - endif - - ! The only reason to set these is to detect in ice_history_write if the chunk/deflate settings are ok. - select case (trim(cvalue)) - case ('netcdf4p') - history_format='hdf5' - restart_format='hdf5' - case ('netcdf4c') - if (mastertask) write(nu_diag,*) trim(subname)//'WARNING: pio_typename = netcdf4c is superseded, use netcdf4p' - history_format='hdf5' - restart_format='hdf5' - case default !pio_typename=netcdf or pnetcdf - ! do nothing - end select - else - if(mastertask) write(nu_diag,*) trim(subname)//'WARNING: pio_typename from driver needs to be set for netcdf output to work' - end if - - - -#else - - ! Read the cice namelist as part of the call to cice_init1 - call t_startf ('cice_init1') - call cice_init1 - call t_stopf ('cice_init1') - - !----------------------------------------------------------------- - ! Advertise fields - !----------------------------------------------------------------- - call ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - -#endif - !---------------------------------------------------------------------------- - ! Initialize grid info + ! Initialize cice !---------------------------------------------------------------------------- - if (single_column .and. scol_valid) then - call ice_mesh_init_tlon_tlat_area_hm() - else - ! Determine mesh input file - call NUOPC_CompAttributeGet(gcomp, name='mesh_ice', value=ice_meshfile, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - - ! Determine mask input file - call NUOPC_CompAttributeGet(gcomp, name='mesh_mask', value=cvalue, isPresent=isPresent, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent .and. isSet) then - ice_maskfile = trim(cvalue) - else - ice_maskfile = ice_meshfile - end if - if (my_task == master_task) then - write(nu_diag,*)'mesh file for cice domain is ',trim(ice_meshfile) - write(nu_diag,*)'mask file for cice domain is ',trim(ice_maskfile) - end if - - ! Determine the model distgrid using the decomposition obtained in - ! call to init_grid1 called from cice_init1 - call ice_mesh_set_distgrid(localpet, npes, ice_distgrid, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - - ! Read in the ice mesh on the cice distribution - ice_mesh = ESMF_MeshCreate(filename=trim(ice_meshfile), fileformat=ESMF_FILEFORMAT_ESMFMESH, & - elementDistGrid=ice_distgrid, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - - ! Initialize the cice mesh and the cice mask - if (trim(grid_format) == 'meshnc') then - ! In this case cap code determines the mask file - call ice_mesh_setmask_from_maskfile(ice_maskfile, ice_mesh, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - call ice_mesh_init_tlon_tlat_area_hm() - if (ChkErr(rc,__LINE__,u_FILE_u)) return - else - ! In this case init_grid2 will initialize tlon, tlat, area and hm - call init_grid2() - call ice_mesh_check(gcomp,ice_mesh, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - end if - end if - - call t_stopf ('cice_init_total') - if (mastertask) call ufs_logtimer(nu_timer,msec,'InitializeAdvertise time: ',runtimelog,wtime) - end subroutine InitializeAdvertise - - !=============================================================================== - - subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) + ! Note that cice_init also sets time manager info as well as mpi communicator info, + ! including master_task and my_task - ! Arguments - type(ESMF_GridComp) :: gcomp - type(ESMF_State) :: importState - type(ESMF_State) :: exportState - type(ESMF_Clock) :: clock - integer, intent(out) :: rc + call t_startf ('cice_init') + call cice_init() + call t_stopf ('cice_init') - ! Local variables - integer :: n - integer :: fieldcount - type(ESMF_Field) :: lfield - character(len=char_len_long) :: cvalue - real(dbl_kind) :: scol_lon - real(dbl_kind) :: scol_lat - real(dbl_kind) :: scol_spval - real(dbl_kind), pointer :: fldptr1d(:) - real(dbl_kind), pointer :: fldptr2d(:,:) - integer :: rank - character(len=char_len) :: tfrz_option ! tfrz_option from cice namelist - integer(int_kind) :: ktherm + !---------------------------------------------------------------------------- + ! reset shr logging to my log file + !---------------------------------------------------------------------------- - character(len=char_len_long) :: single_column_lnd_domainfile - character(len=char_len_long) , pointer :: lfieldnamelist(:) => null() - character(len=*), parameter :: subname=trim(modName)//':(InitializeRealize) ' - !-------------------------------- + call icepack_query_parameters(ktherm_out=ktherm) + call icepack_query_parameters(tfrz_option_out=tfrz_option) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) - rc = ESMF_SUCCESS - if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + ! Now write output to nu_diag - this must happen AFTER call to cice_init + if (localPet == 0) then + write(nu_diag,F00) trim(subname),' cice init nextsw_cday = ',nextsw_cday + write(nu_diag,*) trim(subname),' tfrz_option = ',trim(tfrz_option) + if (ktherm == 2 .and. trim(tfrz_option) /= 'mushy') then + write(nu_diag,*) trim(subname),' Warning: Using ktherm = 2 and tfrz_option = ', trim(tfrz_option) + endif + write(nu_diag,*) trim(subname),' inst_name = ',trim(inst_name) + write(nu_diag,*) trim(subname),' inst_index = ',inst_index + write(nu_diag,*) trim(subname),' inst_suffix = ',trim(inst_suffix) + endif - call ufs_settimer(wtime) - !---------------------------------------------------------------------------- - ! Second cice initialization phase -after initializing grid info - !---------------------------------------------------------------------------- - ! Note that cice_init2 also sets time manager info as well as mpi communicator info, - ! including master_task and my_task - ! Note that cice_init2 calls ice_init() which in turn calls icepack_init_parameters - ! which sets the tfrz_option - call t_startf ('cice_init2') - call cice_init2() - call t_stopf ('cice_init2') !--------------------------------------------------------------------------- - ! use EClock to reset calendar information + ! use EClock to reset calendar information on initial start !--------------------------------------------------------------------------- ! - on initial run ! - iyear, month and mday obtained from sync clock - ! - time determined from myear, month and mday + ! - time determined from iyear, month and mday ! - istep0 and istep1 are set to 0 ! - on restart run ! - istep0, time and time_forc are read from restart file @@ -807,7 +572,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) if (ref_ymd /= start_ymd .or. ref_tod /= start_tod) then if (my_task == master_task) then write(nu_diag,*) trim(subname),': ref_ymd ',ref_ymd, ' must equal start_ymd ',start_ymd - write(nu_diag,*) trim(subname),': ref_tod',ref_tod, ' must equal start_tod ',start_tod + write(nu_diag,*) trim(subname),': ref_ymd ',ref_tod, ' must equal start_ymd ',start_tod end if end if @@ -825,118 +590,268 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) end if call abort_ice(subname//' :: ERROR idate lt zero') endif - myear = (idate/10000) ! integer year of basedate - mmonth= (idate-myear*10000)/100 ! integer month of basedate - mday = idate-myear*10000-mmonth*100 ! day of month of basedate + iyear = (idate/10000) ! integer year of basedate + month = (idate-iyear*10000)/100 ! integer month of basedate + mday = idate-iyear*10000-month*100 ! day of month of basedate if (my_task == master_task) then write(nu_diag,*) trim(subname),' curr_ymd = ',curr_ymd write(nu_diag,*) trim(subname),' cice year_init = ',year_init write(nu_diag,*) trim(subname),' cice start date = ',idate - write(nu_diag,*) trim(subname),' cice start ymds = ',myear,mmonth,mday,start_tod + write(nu_diag,*) trim(subname),' cice start ymds = ',iyear,month,mday,start_tod write(nu_diag,*) trim(subname),' cice calendar_type = ',trim(calendar_type) endif +#ifdef CESMCOUPLED + if (calendar_type == "GREGORIAN" .or. & + calendar_type == "Gregorian" .or. & + calendar_type == "gregorian") then + call time2sec(iyear-(year_init-1),month,mday,time) + else + call time2sec(iyear-year_init,month,mday,time) + endif +#endif + time = time+start_tod end if - ! - start time from ESMF clock. Used to set history time units - idate0 = start_ymd - year_init = (idate0/10000) - month_init= (idate0-year_init*10000)/100 ! integer month of basedate - day_init = idate0-year_init*10000-month_init*100 + call calendar(time) ! update calendar info + if (write_ic) then + call accum_hist(dt) ! write initial conditions + end if + + !--------------------------------------------------------------------------- + ! Determine the global index space needed for the distgrid + !--------------------------------------------------------------------------- + + ! number the local grid to get allocation size for gindex_ice + lsize = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + lsize = lsize + 1 + enddo + enddo + enddo + + ! set global index array + allocate(gindex_ice(lsize)) + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + ig = this_block%i_glob(i) + jg = this_block%j_glob(j) + gindex_ice(n) = (jg-1)*nx_global + ig + enddo + enddo + enddo + + ! Determine total number of eliminated blocks globally + globalID = 0 + num_elim_global = 0 ! number of eliminated blocks + num_total_blocks = 0 + do jblk=1,nblocks_y + do iblk=1,nblocks_x + globalID = globalID + 1 + num_total_blocks = num_total_blocks + 1 + if (distrb_info%blockLocation(globalID) == 0) then + num_elim_global = num_elim_global + 1 + end if + end do + end do + + if (num_elim_global > 0) then + + ! Distribute the eliminated blocks in a round robin fashion amoung processors + num_elim_local = num_elim_global / npes + my_elim_start = num_elim_local*localPet + min(localPet, mod(num_elim_global, npes)) + 1 + if (localPet < mod(num_elim_global, npes)) then + num_elim_local = num_elim_local + 1 + end if + my_elim_end = my_elim_start + num_elim_local - 1 + + ! Determine the number of eliminated gridcells locally + globalID = 0 + num_elim_blocks = 0 ! local number of eliminated blocks + num_elim_gcells = 0 + do jblk=1,nblocks_y + do iblk=1,nblocks_x + globalID = globalID + 1 + if (distrb_info%blockLocation(globalID) == 0) then + num_elim_blocks = num_elim_blocks + 1 + if (num_elim_blocks >= my_elim_start .and. num_elim_blocks <= my_elim_end) then + this_block = get_block(globalID, globalID) + num_elim_gcells = num_elim_gcells + & + (this_block%jhi-this_block%jlo+1) * (this_block%ihi-this_block%ilo+1) + end if + end if + end do + end do + + ! Determine the global index space of the eliminated gridcells + allocate(gindex_elim(num_elim_gcells)) + globalID = 0 + num_elim_gcells = 0 ! local number of eliminated gridcells + num_elim_blocks = 0 ! local number of eliminated blocks + do jblk=1,nblocks_y + do iblk=1,nblocks_x + globalID = globalID + 1 + if (distrb_info%blockLocation(globalID) == 0) then + this_block = get_block(globalID, globalID) + num_elim_blocks = num_elim_blocks + 1 + if (num_elim_blocks >= my_elim_start .and. num_elim_blocks <= my_elim_end) then + do j=this_block%jlo,this_block%jhi + do i=this_block%ilo,this_block%ihi + num_elim_gcells = num_elim_gcells + 1 + ig = this_block%i_glob(i) + jg = this_block%j_glob(j) + gindex_elim(num_elim_gcells) = (jg-1)*nx_global + ig + end do + end do + end if + end if + end do + end do + + ! create a global index that includes both active and eliminated gridcells + num_ice = size(gindex_ice) + num_elim = size(gindex_elim) + allocate(gindex(num_elim + num_ice)) + do n = 1,num_ice + gindex(n) = gindex_ice(n) + end do + do n = num_ice+1,num_ice+num_elim + gindex(n) = gindex_elim(n-num_ice) + end do + + deallocate(gindex_elim) - ! - Set use_leap_years based on calendar (as some CICE calls use this instead of the calendar type) - if (calendar_type == ice_calendar_gregorian) then - use_leap_years = .true. else - use_leap_years = .false. ! no_leap calendars - endif - call calendar() ! update calendar info + ! No eliminated land blocks + num_ice = size(gindex_ice) + allocate(gindex(num_ice)) + do n = 1,num_ice + gindex(n) = gindex_ice(n) + end do - !---------------------------------------------------------------------------- - ! reset shr logging to my log file - !---------------------------------------------------------------------------- + end if - call icepack_query_parameters(ktherm_out=ktherm) - call icepack_query_parameters(tfrz_option_out=tfrz_option) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) + !--------------------------------------------------------------------------- + ! Create distGrid from global index array + !--------------------------------------------------------------------------- - ! Now write output to nu_diag - this must happen AFTER call to cice_init - if (mastertask) then - write(nu_diag,'(a,d21.14)') trim(subname)//' cice init nextsw_cday = ',nextsw_cday - write(nu_diag,'(a)') trim(subname)//' tfrz_option = '//trim(tfrz_option) - if (ktherm == 2 .and. trim(tfrz_option) /= 'mushy') then - write(nu_diag,*) trim(subname),' Warning: Using ktherm = 2 and tfrz_option = ', trim(tfrz_option) - endif - write(nu_diag,'(a )') trim(subname)//' inst_name = '//trim(inst_name) - write(nu_diag,'(a,i8 )') trim(subname)//' inst_index = ',inst_index - write(nu_diag,'(a )') trim(subname)//' inst_suffix = ',trim(inst_suffix) - endif + DistGrid = ESMF_DistGridCreate(arbSeqIndexList=gindex, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + !--------------------------------------------------------------------------- + ! Create the CICE mesh + !--------------------------------------------------------------------------- - if (write_ic) then - call accum_hist(dt) ! write initial conditions + ! read in the mesh + call NUOPC_CompAttributeGet(gcomp, name='mesh_ice', value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + EMeshTemp = ESMF_MeshCreate(filename=trim(cvalue), fileformat=ESMF_FILEFORMAT_ESMFMESH, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (my_task == master_task) then + write(nu_diag,*)'mesh file for cice domain is ',trim(cvalue) end if - !----------------------------------------------------------------- - ! Prescribed ice initialization - !----------------------------------------------------------------- + ! recreate the mesh using the above distGrid + EMesh = ESMF_MeshCreate(EMeshTemp, elementDistgrid=Distgrid, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return - call ice_prescribed_init(clock, ice_mesh, rc) + ! obtain mesh lats and lons + call ESMF_MeshGet(Emesh, spatialDim=spatialDim, numOwnedElements=numOwnedElements, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + allocate(ownedElemCoords(spatialDim*numOwnedElements)) + allocate(lonMesh(numOwnedElements), latMesh(numOwnedElements)) + call ESMF_MeshGet(Emesh, ownedElemCoords=ownedElemCoords) if (ChkErr(rc,__LINE__,u_FILE_u)) return -#ifdef CESMCOUPLED - ! if single column is not valid - set all export state fields to zero and return - if (single_column .and. .not. scol_valid) then - write(nu_diag,'(a)')' (ice_comp_nuopc) single column mode point does not contain any ocn/ice '& - //' - setting all export data to 0' - call ice_realize_fields(gcomp, mesh=ice_mesh, & - flds_scalar_name=flds_scalar_name, flds_scalar_num=flds_scalar_num, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - call ESMF_StateGet(exportState, itemCount=fieldCount, rc=rc) - if (chkerr(rc,__LINE__,u_FILE_u)) return - allocate(lfieldnamelist(fieldCount)) - call ESMF_StateGet(exportState, itemNameList=lfieldnamelist, rc=rc) - if (chkerr(rc,__LINE__,u_FILE_u)) return - do n = 1, fieldCount - if (trim(lfieldnamelist(n)) /= flds_scalar_name) then - call ESMF_StateGet(exportState, itemName=trim(lfieldnamelist(n)), field=lfield, rc=rc) - if (chkerr(rc,__LINE__,u_FILE_u)) return - call ESMF_FieldGet(lfield, rank=rank, rc=rc) - if (chkerr(rc,__LINE__,u_FILE_u)) return - if (rank == 2) then - call ESMF_FieldGet(lfield, farrayPtr=fldptr2d, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - fldptr2d(:,:) = 0._dbl_kind - else - call ESMF_FieldGet(lfield, farrayPtr=fldptr1d, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - fldptr1d(:) = 0._dbl_kind - end if - end if + do n = 1,numOwnedElements + lonMesh(n) = ownedElemCoords(2*n-1) + latMesh(n) = ownedElemCoords(2*n) + end do + + call icepack_query_parameters(rad_to_deg_out=rad_to_deg) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + ! obtain internally generated cice lats and lons for error checks + allocate(lon(lsize)) + allocate(lat(lsize)) + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + lon(n) = tlon(i,j,iblk)*rad_to_deg + lat(n) = tlat(i,j,iblk)*rad_to_deg + enddo enddo - deallocate(lfieldnamelist) - ! ******************* - ! *** RETURN HERE *** - ! ******************* - RETURN - else if(single_column) then - write(nu_diag,'(a,3(f10.5,2x))')' (ice_comp_nuopc) single column mode lon/lat/frac is ',& - scmlon,scmlat,scol_frac - end if -#endif + enddo + + ! error check differences between internally generated lons and those read in + do n = 1,lsize + diff_lon = abs(lonMesh(n) - lon(n)) + if ( (diff_lon > 1.e2 .and. abs(diff_lon - 360_dbl_kind) > 1.e-1) .or.& + (diff_lon > 1.e-3 .and. diff_lon < 1._dbl_kind) ) then + !write(6,100)n,lonMesh(n),lon(n), diff_lon +100 format('ERROR: CICE n, lonmesh(n), lon(n), diff_lon = ',i6,2(f21.13,3x),d21.5) + !call abort_ice() + end if + if (abs(latMesh(n) - lat(n)) > 1.e-1) then + !write(6,101)n,latMesh(n),lat(n), abs(latMesh(n)-lat(n)) +101 format('ERROR: CICE n, latmesh(n), lat(n), diff_lat = ',i6,2(f21.13,3x),d21.5) + !call abort_ice() + end if + end do + + ! deallocate memory + deallocate(ownedElemCoords) + deallocate(lon, lonMesh) + deallocate(lat, latMesh) !----------------------------------------------------------------- ! Realize the actively coupled fields !----------------------------------------------------------------- - call ice_realize_fields(gcomp, mesh=ice_mesh, & + call ice_realize_fields(gcomp, mesh=Emesh, & flds_scalar_name=flds_scalar_name, flds_scalar_num=flds_scalar_num, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return + !----------------------------------------------------------------- + ! Prescribed ice initialization - first get compid + !----------------------------------------------------------------- + + call NUOPC_CompAttributeGet(gcomp, name='MCTID', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + read(cvalue,*) compid ! convert from string to integer + else + compid = 0 + end if + call ice_prescribed_init(lmpicom, compid, gindex_ice) + !----------------------------------------------------------------- ! Create cice export state !----------------------------------------------------------------- @@ -951,16 +866,16 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) flds_scalar_name, flds_scalar_num, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - !-------------------------------- - ! diagnostics - !-------------------------------- - ! TODO (mvertens, 2018-12-21): fill in iceberg_prognostic as .false. if (debug_export > 0 .and. my_task==master_task) then call State_fldDebug(exportState, flds_scalar_name, 'cice_export:', & - idate, msec, nu_diag, rc=rc) + idate, sec, nu_diag, rc=rc) end if + !-------------------------------- + ! diagnostics + !-------------------------------- + if (dbug > 0) then call state_diagnose(exportState,subname//':ES',rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -968,9 +883,13 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) + call t_stopf ('cice_init_total') + + deallocate(gindex_ice) + deallocate(gindex) + call flush_fileunit(nu_diag) - if (mastertask) call ufs_logtimer(nu_timer,msec,'InitializeRealize time: ',runtimelog,wtime) end subroutine InitializeRealize !=============================================================================== @@ -1011,23 +930,12 @@ subroutine ModelAdvance(gcomp, rc) character(char_len_long) :: restart_date character(char_len_long) :: restart_filename logical :: isPresent, isSet + character(*) , parameter :: F00 = "('(ice_comp_nuopc) ',2a,i8,d21.14)" character(len=*),parameter :: subname=trim(modName)//':(ModelAdvance) ' character(char_len_long) :: msgString !-------------------------------- rc = ESMF_SUCCESS - if (mastertask) call ufs_logtimer(nu_timer,msec,'ModelAdvance time since last step: ',runtimelog,wtime) - call ufs_settimer(wtime) - - call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) - - if (single_column .and. .not. scol_valid) then - ! ******************* - ! *** RETURN HERE *** - ! ******************* - RETURN - end if - if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) ! query the Component for its clock, importState and exportState @@ -1088,7 +996,7 @@ subroutine ModelAdvance(gcomp, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if if (my_task == master_task) then - write(nu_diag,'(a,2x,i8,2x,d24.14)') trim(subname)//' cice istep, nextsw_cday = ',istep, nextsw_cday + write(nu_diag,F00) trim(subname),' cice istep, nextsw_cday = ',istep, nextsw_cday end if !-------------------------------- @@ -1102,7 +1010,7 @@ subroutine ModelAdvance(gcomp, rc) !-------------------------------- ! cice clock - tod = msec + tod = sec ymd = idate ! model clock @@ -1163,7 +1071,7 @@ subroutine ModelAdvance(gcomp, rc) ! write Debug output if (debug_import > 0 .and. my_task==master_task) then call State_fldDebug(importState, flds_scalar_name, 'cice_import:', & - idate, msec, nu_diag, rc=rc) + idate, sec, nu_diag, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if if (dbug > 0) then @@ -1175,9 +1083,7 @@ subroutine ModelAdvance(gcomp, rc) ! Advance cice and timestep update !-------------------------------- - if(profile_memory) call ESMF_VMLogMemInfo("Entering CICE_Run : ") call CICE_Run() - if(profile_memory) call ESMF_VMLogMemInfo("Leaving CICE_Run : ") !-------------------------------- ! Create export state @@ -1192,7 +1098,7 @@ subroutine ModelAdvance(gcomp, rc) ! write Debug output if (debug_export > 0 .and. my_task==master_task) then call State_fldDebug(exportState, flds_scalar_name, 'cice_export:', & - idate, msec, nu_diag, rc=rc) + idate, sec, nu_diag, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if if (dbug > 0) then @@ -1238,9 +1144,6 @@ subroutine ModelAdvance(gcomp, rc) if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) - if (mastertask) call ufs_logtimer(nu_timer,msec,'ModelAdvance time: ',runtimelog,wtime) - call ufs_settimer(wtime) - end subroutine ModelAdvance !=============================================================================== @@ -1371,35 +1274,35 @@ end subroutine ModelSetRunClock !=============================================================================== subroutine ModelFinalize(gcomp, rc) - - !-------------------------------- - ! Finalize routine - !-------------------------------- - type(ESMF_GridComp) :: gcomp integer, intent(out) :: rc ! local variables - character(*), parameter :: F91 = "('(ice_comp_nuopc) ',73('-'))" + character(*), parameter :: F00 = "('(ice_comp_nuopc) ',8a)" + character(*), parameter :: F91 = "('(ice_comp_nuopc) ',73('-'))" character(len=*),parameter :: subname=trim(modName)//':(ModelFinalize) ' !-------------------------------- + !-------------------------------- + ! Finalize routine + !-------------------------------- + rc = ESMF_SUCCESS - call ufs_settimer(wtime) if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + if (my_task == master_task) then write(nu_diag,F91) - write(nu_diag,'(a)') 'CICE: end of main integration loop' + write(nu_diag,F00) 'CICE: end of main integration loop' write(nu_diag,F91) end if - if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) - if(mastertask) call ufs_logtimer(nu_timer,msec,'ModelFinalize time: ',runtimelog,wtime) + if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) end subroutine ModelFinalize !=============================================================================== +#ifdef CESMCOUPLED subroutine ice_orbital_init(gcomp, clock, logunit, mastertask, rc) !---------------------------------------------------------- @@ -1427,9 +1330,6 @@ subroutine ice_orbital_init(gcomp, clock, logunit, mastertask, rc) rc = ESMF_SUCCESS -#ifndef CESMCOUPLED - return -#else if (first_time) then ! Determine orbital attributes from input @@ -1536,18 +1436,55 @@ subroutine ice_orbital_init(gcomp, clock, logunit, mastertask, rc) file=__FILE__, line=__LINE__) first_time = .false. -#endif end subroutine ice_orbital_init +#else + + subroutine ice_orbital_init(gcomp, clock, logunit, mastertask, rc) + + ! dummy input/output arguments + type(ESMF_GridComp) :: gcomp + type(ESMF_Clock) , intent(in) :: clock + integer , intent(in) :: logunit + logical , intent(in) :: mastertask + integer , intent(out) :: rc ! output error + + ! local variables + real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp + logical :: first_time = .true. + character(len=*) , parameter :: subname = "(cice_orbital_init)" + !-------------------------------- + + rc = ESMF_SUCCESS + + if (first_time) then + ! Start with icepack values then update with values defined in configure file if they exist + ! tcx, This should be identical with initialization, why do it again? Get rid of it + call icepack_query_orbit(eccen_out=eccen, mvelpp_out=mvelpp, lambm0_out=lambm0, obliqr_out=obliqr) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + first_time = .false. + end if + + end subroutine ice_orbital_init + +#endif !=============================================================================== + subroutine ice_cal_ymd2date(year, month, day, date) - ! input/output parameters: + implicit none + + ! !INPUT/OUTPUT PARAMETERS: + integer,intent(in ) :: year,month,day ! calendar year,month,day integer,intent(out) :: date ! coded (yyyymmdd) calendar date !--- local --- character(*),parameter :: subName = "(ice_cal_ymd2date)" + !------------------------------------------------------------------------------- ! NOTE: ! this calendar has a year zero (but no day or month zero) @@ -1558,4 +1495,7 @@ subroutine ice_cal_ymd2date(year, month, day, date) end subroutine ice_cal_ymd2date + !=============================================================================== + + end module ice_comp_nuopc diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index 47abb0373..b32085143 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -3,15 +3,14 @@ module ice_import_export use ESMF use NUOPC use NUOPC_Model - use ice_kinds_mod , only : int_kind, dbl_kind, char_len, char_len_long, log_kind - use ice_constants , only : c0, c1, spval_dbl, radius + use ice_kinds_mod , only : int_kind, dbl_kind, char_len, log_kind + use ice_constants , only : c0, c1, spval_dbl use ice_constants , only : field_loc_center, field_type_scalar, field_type_vector use ice_blocks , only : block, get_block, nx_block, ny_block use ice_domain , only : nblocks, blocks_ice, halo_info, distrb_info use ice_domain_size , only : nx_global, ny_global, block_size_x, block_size_y, max_blocks, ncat - use ice_domain_size , only : nfreq, nfsd use ice_exit , only : abort_ice - use ice_flux , only : strairxT, strairyT, strocnxT_iavg, strocnyT_iavg + use ice_flux , only : strairxt, strairyt, strocnxt, strocnyt use ice_flux , only : alvdr, alidr, alvdf, alidf, Tref, Qref, Uref use ice_flux , only : flat, fsens, flwout, evap, fswabs, fhocn, fswthru use ice_flux , only : fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf @@ -22,13 +21,10 @@ module ice_import_export use ice_flux , only : fresh, fsalt, zlvl, uatm, vatm, potT, Tair, Qa use ice_flux , only : rhoa, swvdr, swvdf, swidr, swidf, flw, frain use ice_flux , only : fsnow, uocn, vocn, sst, ss_tltx, ss_tlty, frzmlt - use ice_flux , only : send_i2x_per_cat use ice_flux , only : sss, Tf, wind, fsw - use ice_arrays_column , only : floe_rad_c, wave_spectrum - use ice_state , only : vice, vsno, aice, aicen_init, trcr, trcrn - use ice_grid , only : tlon, tlat, tarea, tmask, anglet, hm - use ice_grid , only : grid_format - use ice_mesh_mod , only : ocn_gridcell_frac + use ice_state , only : vice, vsno, aice, aicen_init, trcr + use ice_grid , only : tlon, tlat, tarea, tmask, anglet, hm, ocn_gridcell_frac + use ice_grid , only : grid_type, t2ugrid_vector use ice_boundary , only : ice_HaloUpdate use ice_fileunits , only : nu_diag, flush_fileunit use ice_communicate , only : my_task, master_task, MPI_COMM_ICE @@ -38,12 +34,9 @@ module ice_import_export use icepack_intfc , only : icepack_query_parameters, icepack_query_tracer_flags use icepack_intfc , only : icepack_liquidus_temperature use icepack_intfc , only : icepack_sea_freezing_temperature - use icepack_intfc , only : icepack_query_tracer_indices - use icepack_parameters , only : puny, c2 - use cice_wrapper_mod , only : t_startf, t_stopf, t_barrierf + use cice_wrapper_mod , only : t_startf, t_stopf, t_barrierf #ifdef CESMCOUPLED use shr_frz_mod , only : shr_frz_freezetemp - use shr_mpi_mod , only : shr_mpi_min, shr_mpi_max #endif implicit none @@ -61,18 +54,20 @@ module ice_import_export interface state_getfldptr module procedure state_getfldptr_1d module procedure state_getfldptr_2d + module procedure state_getfldptr_3d + module procedure state_getfldptr_4d end interface state_getfldptr private :: state_getfldptr interface state_getimport - module procedure state_getimport_4d - module procedure state_getimport_3d + module procedure state_getimport_4d_output + module procedure state_getimport_3d_output end interface state_getimport private :: state_getimport interface state_setexport - module procedure state_setexport_4d - module procedure state_setexport_3d + module procedure state_setexport_4d_input + module procedure state_setexport_3d_input end interface state_setexport private :: state_setexport @@ -84,15 +79,12 @@ module ice_import_export integer :: ungridded_ubound = 0 end type fld_list_type - ! area correction factors for fluxes send and received from mediator - real(dbl_kind), allocatable :: mod2med_areacor(:) ! ratios of model areas to input mesh areas - real(dbl_kind), allocatable :: med2mod_areacor(:) ! ratios of input mesh areas to model areas - integer, parameter :: fldsMax = 100 integer :: fldsToIce_num = 0 integer :: fldsFrIce_num = 0 type (fld_list_type) :: fldsToIce(fldsMax) type (fld_list_type) :: fldsFrIce(fldsMax) + type(ESMF_GeomType_Flag) :: geomtype integer , parameter :: io_dbug = 10 ! i/o debug messages character(*), parameter :: u_FILE_u = & @@ -116,7 +108,7 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam character(char_len) :: stdname character(char_len) :: cvalue logical :: flds_wiso ! use case - logical :: flds_wave ! use case + logical :: flds_i2o_per_cat ! .true. => select per ice thickness category logical :: isPresent, isSet character(len=*), parameter :: subname='(ice_import_export:ice_advertise_fields)' !------------------------------------------------------------------------------- @@ -124,44 +116,21 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam rc = ESMF_SUCCESS if (io_dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) - ! Determine if ice sends multiple ice category info back to mediator - send_i2x_per_cat = .false. - call NUOPC_CompAttributeGet(gcomp, name='flds_i2o_per_cat', value=cvalue, & - isPresent=isPresent, isSet=isSet, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (isPresent .and. isSet) then - read(cvalue,*) send_i2x_per_cat - end if - if (my_task == master_task) then - write(nu_diag,*)'send_i2x_per_cat = ',send_i2x_per_cat - end if - if (.not.send_i2x_per_cat) then - if (allocated(fswthrun_ai)) then - deallocate(fswthrun_ai) - end if - end if - ! Determine if the following attributes are sent by the driver and if so read them in flds_wiso = .false. - call NUOPC_CompAttributeGet(gcomp, name='flds_wiso', value=cvalue, & - isPresent=isPresent, isSet=isSet, rc=rc) + call NUOPC_CompAttributeGet(gcomp, name='flds_wiso', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return if (isPresent .and. isSet) then read(cvalue,*) flds_wiso - end if - if (my_task == master_task) then - write(nu_diag,*)'flds_wiso = ',flds_wiso + call ESMF_LogWrite('flds_wiso = '// trim(cvalue), ESMF_LOGMSG_INFO) end if - flds_wave = .false. - call NUOPC_CompAttributeGet(gcomp, name='wav_coupling_to_cice', value=cvalue, & - isPresent=isPresent, isSet=isSet, rc=rc) + flds_i2o_per_cat = .false. + call NUOPC_CompAttributeGet(gcomp, name='flds_i2o_per_cat', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return if (isPresent .and. isSet) then - read(cvalue,*) flds_wave - end if - if (my_task == master_task) then - write(nu_diag,*)'flds_wave = ',flds_wave + read(cvalue,*) send_i2x_per_cat + call ESMF_LogWrite('flds_i2o_per_cat = '// trim(cvalue), ESMF_LOGMSG_INFO) end if !----------------- @@ -171,51 +140,43 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsToIce_num, fldsToIce, trim(flds_scalar_name)) ! from ocean - call fldlist_add(fldsToIce_num, fldsToIce, 'So_dhdx' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'So_dhdy' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'So_t' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'So_s' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'So_u' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'So_v' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'Fioo_q' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'sea_surface_slope_zonal' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'sea_surface_slope_merid' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'sea_surface_temperature' ) + call fldlist_add(fldsToIce_num, fldsToIce, 's_surf' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'ocn_current_zonal' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'ocn_current_merid' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'freezing_melting_potential') if (flds_wiso) then call fldlist_add(fldsToIce_num, fldsToIce, 'So_roce_wiso', ungridded_lbound=1, ungridded_ubound=3) end if ! from atmosphere - call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_z' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_u' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_v' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_shum' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_tbot' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_pbot' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_swvdr' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_swvdf' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_swndr' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_swndf' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_lwdn' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_rain' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_snow' ) - call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_ptem' ) !cesm - call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_dens' ) !cesm + call fldlist_add(fldsToIce_num, fldsToIce, 'inst_height_lowest' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'inst_zonal_wind_height_lowest' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'inst_merid_wind_height_lowest' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'inst_spec_humid_height_lowest' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'inst_temp_height_lowest' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'inst_pres_height_lowest' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'mean_down_sw_vis_dir_flx' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'mean_down_sw_ir_dir_flx' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'mean_down_sw_vis_dif_flx' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'mean_down_sw_ir_dif_flx' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'mean_down_lw_flx' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'mean_prec_rate' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'mean_fprec_rate' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_ptem' ) !cesm + call fldlist_add(fldsToIce_num, fldsToIce, 'air_density_height_lowest' ) !cesm ! the following are advertised but might not be connected if they are not present ! in the cmeps esmFldsExchange_xxx_mod.F90 that is model specific ! from atm - black carbon deposition fluxes (3) call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_bcph', ungridded_lbound=1, ungridded_ubound=3) - ! from atm - wet dust deposition fluxes (4 sizes) + ! from atm - wet dust deposition frluxes (4 sizes) call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_dstwet', ungridded_lbound=1, ungridded_ubound=4) - ! from atm - dry dust deposition fluxes (4 sizes) + ! from - atm dry dust deposition frluxes (4 sizes) call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_dstdry', ungridded_lbound=1, ungridded_ubound=4) - ! the following are advertised but might not be connected if they are not advertised in the - ! in the cmeps esmFldsExchange_xxx_mod.F90 that is model specific - ! from wave - if (flds_wave) then - call fldlist_add(fldsToIce_num, fldsToIce, 'Sw_elevation_spectrum', ungridded_lbound=1, & - ungridded_ubound=25) - end if - do n = 1,fldsToIce_num call NUOPC_Advertise(importState, standardName=fldsToIce(n)%stdname, & TransferOfferGeomObject='will provide', rc=rc) @@ -229,67 +190,63 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam call fldlist_add(fldsFrIce_num, fldsFrIce, trim(flds_scalar_name)) ! ice states - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_imask' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_ifrac' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_t' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_vice' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_vsno' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_tref' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_qref' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_snowh' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_u10' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_avsdr' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_avsdf' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_anidr' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_anidf' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'ice_mask' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'ice_fraction' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'sea_ice_surface_temperature' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_ice_volume' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_snow_volume' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_tref' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_qref' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_snowh' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_u10' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_vis_dir_albedo' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_ir_dir_albedo' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_vis_dif_albedo' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'inst_ice_ir_dif_albedo' ) ! the following are advertised but might not be connected if they are not present ! in the cmeps esmFldsExchange_xxx_mod.F90 that is model specific if (send_i2x_per_cat) then - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_ifrac_n', & + call fldlist_add(fldsFrIce_num, fldsFrIce, 'ice_fraction_n', & ungridded_lbound=1, ungridded_ubound=ncat) end if - if (flds_wave) then - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_thick' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_floediam' ) - end if ! ice/atm fluxes computed by ice - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Faii_taux' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Faii_tauy' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Faii_lat' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Faii_sen' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Faii_lwup' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Faii_evap' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Faii_swnet' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'stress_on_air_ice_zonal' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'stress_on_air_ice_merid' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_laten_heat_flx_atm_into_ice' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sensi_heat_flx_atm_into_ice' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_up_lw_flx_ice' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_evap_rate_atm_into_ice' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Faii_swnet' ) ! ice/ocn fluxes computed by ice - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_melth' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_swpen' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_swpen_vdr' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_swpen_vdf' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_swpen_idr' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_swpen_idf' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'net_heat_flx_to_ocn' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_vis_dir_flx' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_vis_dif_flx' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_ir_dir_flx' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_ir_dif_flx' ) if (send_i2x_per_cat) then - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_swpen_ifrac_n', & + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_sw_pen_to_ocn_ifrac_n', & ungridded_lbound=1, ungridded_ubound=ncat) end if - call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_meltw' ) - call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_salt' ) - call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_taux' ) - call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_tauy' ) + call fldlist_add(fldsFrIce_num , fldsFrIce, 'mean_fresh_water_to_ocean_rate' ) + call fldlist_add(fldsFrIce_num , fldsFrIce, 'mean_salt_rate' ) + call fldlist_add(fldsFrIce_num , fldsFrIce, 'stress_on_ocn_ice_zonal' ) + call fldlist_add(fldsFrIce_num , fldsFrIce, 'stress_on_ocn_ice_merid' ) ! the following are advertised but might not be connected if they are not present ! in the cmeps esmFldsExchange_xxx_mod.F90 that is model specific - call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_bcpho' ) - call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_bcphi' ) - call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_flxdst' ) + call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_bcpho' ) + call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_bcphi' ) + call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_flxdst' ) if (flds_wiso) then - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_meltw_wiso', & + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_fresh_water_to_ocean_rate_wiso', & ungridded_lbound=1, ungridded_ubound=3) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Faii_evap_wiso', & + call fldlist_add(fldsFrIce_num, fldsFrIce, 'mean_evap_rate_atm_into_ice_wiso', & ungridded_lbound=1, ungridded_ubound=3) call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_qref_wiso', & ungridded_lbound=1, ungridded_ubound=3) @@ -305,36 +262,21 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam end subroutine ice_advertise_fields - !============================================================================== - subroutine ice_realize_fields(gcomp, mesh, flds_scalar_name, flds_scalar_num, rc) - use ice_scam, only : single_column +!============================================================================== + + subroutine ice_realize_fields(gcomp, mesh, grid, flds_scalar_name, flds_scalar_num, rc) ! input/output variables - type(ESMF_GridComp) :: gcomp - type(ESMF_Mesh) , intent(in) :: mesh - character(len=*) , intent(in) :: flds_scalar_name - integer , intent(in) :: flds_scalar_num - integer , intent(out) :: rc + type(ESMF_GridComp) :: gcomp + type(ESMF_Mesh) , optional , intent(in) :: mesh + type(ESMF_Grid) , optional , intent(in) :: grid + character(len=*) , intent(in) :: flds_scalar_name + integer , intent(in) :: flds_scalar_num + integer , intent(out) :: rc ! local variables - type(ESMF_State) :: importState - type(ESMF_State) :: exportState - type(ESMF_Field) :: lfield - integer :: numOwnedElements - integer :: i, j, iblk, n - integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain - type(block) :: this_block ! block information for current block - real(dbl_kind), allocatable :: mesh_areas(:) - real(dbl_kind), allocatable :: model_areas(:) - real(dbl_kind), pointer :: dataptr(:) - real(dbl_kind) :: max_mod2med_areacor - real(dbl_kind) :: max_med2mod_areacor - real(dbl_kind) :: min_mod2med_areacor - real(dbl_kind) :: min_med2mod_areacor - real(dbl_kind) :: max_mod2med_areacor_glob - real(dbl_kind) :: max_med2mod_areacor_glob - real(dbl_kind) :: min_mod2med_areacor_glob - real(dbl_kind) :: min_med2mod_areacor_glob + type(ESMF_State) :: importState + type(ESMF_State) :: exportState character(len=*), parameter :: subname='(ice_import_export:realize_fields)' !--------------------------------------------------------------------------- @@ -343,96 +285,60 @@ subroutine ice_realize_fields(gcomp, mesh, flds_scalar_name, flds_scalar_num, rc call NUOPC_ModelGet(gcomp, importState=importState, exportState=exportState, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call fldlist_realize( & - state=ExportState, & - fldList=fldsFrIce, & - numflds=fldsFrIce_num, & - flds_scalar_name=flds_scalar_name, & - flds_scalar_num=flds_scalar_num, & - tag=subname//':CICE_Export',& - mesh=mesh, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (present(mesh)) then - call fldlist_realize( & - state=importState, & - fldList=fldsToIce, & - numflds=fldsToIce_num, & - flds_scalar_name=flds_scalar_name, & - flds_scalar_num=flds_scalar_num, & - tag=subname//':CICE_Import',& - mesh=mesh, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return -#ifdef CESMCOUPLED + geomtype = ESMF_GEOMTYPE_MESH - ! allocate area correction factors - call ESMF_MeshGet(mesh, numOwnedElements=numOwnedElements, rc=rc) - if (chkerr(rc,__LINE__,u_FILE_u)) return - allocate (mod2med_areacor(numOwnedElements)) - allocate (med2mod_areacor(numOwnedElements)) + call fldlist_realize( & + state=ExportState, & + fldList=fldsFrIce, & + numflds=fldsFrIce_num, & + flds_scalar_name=flds_scalar_name, & + flds_scalar_num=flds_scalar_num, & + tag=subname//':CICE_Export',& + mesh=mesh, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (single_column) then + call fldlist_realize( & + state=importState, & + fldList=fldsToIce, & + numflds=fldsToIce_num, & + flds_scalar_name=flds_scalar_name, & + flds_scalar_num=flds_scalar_num, & + tag=subname//':CICE_Import',& + mesh=mesh, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return - mod2med_areacor(:) = 1._dbl_kind - med2mod_areacor(:) = 1._dbl_kind + else if (present(grid)) then - else + geomtype = ESMF_GEOMTYPE_GRID - ! Get mesh areas from second field - using second field since the - ! first field is the scalar field + call fldlist_realize( & + state=ExportState, & + fldList=fldsFrIce, & + numflds=fldsFrIce_num, & + flds_scalar_name=flds_scalar_name, & + flds_scalar_num=flds_scalar_num, & + tag=subname//':CICE_Export',& + grid=grid, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return - call ESMF_StateGet(exportState, itemName=trim(fldsFrIce(2)%stdname), field=lfield, rc=rc) + call fldlist_realize( & + state=importState, & + fldList=fldsToIce, & + numflds=fldsToIce_num, & + flds_scalar_name=flds_scalar_name, & + flds_scalar_num=flds_scalar_num, & + tag=subname//':CICE_Import',& + grid=grid, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call ESMF_FieldRegridGetArea(lfield, rc=rc) - if (chkerr(rc,__LINE__,u_FILE_u)) return - call ESMF_FieldGet(lfield, farrayPtr=dataptr, rc=rc) - if (chkerr(rc,__LINE__,u_FILE_u)) return - allocate(mesh_areas(numOwnedElements)) - mesh_areas(:) = dataptr(:) - - ! Determine flux correction factors (module variables) - allocate(model_areas(numOwnedElements)) - mod2med_areacor(:) = 1._dbl_kind - med2mod_areacor(:) = 1._dbl_kind - n = 0 - do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - do j = jlo, jhi - do i = ilo, ihi - n = n+1 - model_areas(n) = tarea(i,j,iblk)/(radius*radius) - mod2med_areacor(n) = model_areas(n) / mesh_areas(n) - med2mod_areacor(n) = mesh_areas(n) / model_areas(n) - enddo - enddo - enddo - deallocate(model_areas) - deallocate(mesh_areas) - end if - min_mod2med_areacor = minval(mod2med_areacor) - max_mod2med_areacor = maxval(mod2med_areacor) - min_med2mod_areacor = minval(med2mod_areacor) - max_med2mod_areacor = maxval(med2mod_areacor) - call shr_mpi_max(max_mod2med_areacor, max_mod2med_areacor_glob, mpi_comm_ice) - call shr_mpi_min(min_mod2med_areacor, min_mod2med_areacor_glob, mpi_comm_ice) - call shr_mpi_max(max_med2mod_areacor, max_med2mod_areacor_glob, mpi_comm_ice) - call shr_mpi_min(min_med2mod_areacor, min_med2mod_areacor_glob, mpi_comm_ice) - - if (my_task == master_task) then - write(nu_diag,'(2A,2g23.15,A )') trim(subname),' : min_mod2med_areacor, max_mod2med_areacor ',& - min_mod2med_areacor_glob, max_mod2med_areacor_glob, 'CICE6' - write(nu_diag,'(2A,2g23.15,A )') trim(subname),' : min_med2mod_areacor, max_med2mod_areacor ',& - min_med2mod_areacor_glob, max_med2mod_areacor_glob, 'CICE6' end if -#endif end subroutine ice_realize_fields !============================================================================== + subroutine ice_import( importState, rc ) ! input/output variables @@ -442,18 +348,14 @@ subroutine ice_import( importState, rc ) ! local variables integer,parameter :: nflds=16 integer,parameter :: nfldv=6 - integer :: i, j, iblk, n, k + integer :: i, j, iblk, n integer :: ilo, ihi, jlo, jhi !beginning and end of physical domain type(block) :: this_block ! block information for current block real (kind=dbl_kind),allocatable :: aflds(:,:,:,:) real (kind=dbl_kind) :: workx, worky real (kind=dbl_kind) :: MIN_RAIN_TEMP, MAX_SNOW_TEMP real (kind=dbl_kind) :: Tffresh - real (kind=dbl_kind) :: inst_pres_height_lowest - real (kind=dbl_kind), pointer :: dataptr2d(:,:) - real (kind=dbl_kind), pointer :: dataptr1d(:) - real (kind=dbl_kind), pointer :: dataptr2d_dstwet(:,:) - real (kind=dbl_kind), pointer :: dataptr2d_dstdry(:,:) + real (kind=dbl_kind) :: inst_pres_height_lowest character(len=char_len) :: tfrz_option integer(int_kind) :: ktherm character(len=*), parameter :: subname = 'ice_import' @@ -463,20 +365,17 @@ subroutine ice_import( importState, rc ) call icepack_query_parameters(Tffresh_out=Tffresh) call icepack_query_parameters(tfrz_option_out=tfrz_option) call icepack_query_parameters(ktherm_out=ktherm) - - if (io_dbug > 5) then - write(msgString,'(A,i8)')trim(subname)//' tfrz_option = ' & - // trim(tfrz_option)//', ktherm = ',ktherm - call ESMF_LogWrite(trim(msgString), ESMF_LOGMSG_INFO) + if (io_dbug > 5) then + write(msgString,'(A,i8)')trim(subname)//' tfrz_option = ' & + // trim(tfrz_option)//', ktherm = ',ktherm + call ESMF_LogWrite(trim(msgString), ESMF_LOGMSG_INFO) end if - - ! call icepack_query_parameters(tfrz_option_out=tfrz_option, & - ! modal_aero_out=modal_aero, z_tracers_out=z_tracers, skl_bgc_out=skl_bgc, & - ! Tffresh_out=Tffresh) - ! call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_iage_out=tr_iage, & - ! tr_FY_out=tr_FY, tr_pond_out=tr_pond, tr_lvl_out=tr_lvl, & - ! tr_zaero_out=tr_zaero, tr_bgc_Nit_out=tr_bgc_Nit) - +! call icepack_query_parameters(tfrz_option_out=tfrz_option, & +! modal_aero_out=modal_aero, z_tracers_out=z_tracers, skl_bgc_out=skl_bgc, & +! Tffresh_out=Tffresh) +! call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_iage_out=tr_iage, & +! tr_FY_out=tr_FY, tr_pond_out=tr_pond, tr_lvl_out=tr_lvl, & +! tr_zaero_out=tr_zaero, tr_bgc_Nit_out=tr_bgc_Nit) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=u_FILE_u, line=__LINE__) @@ -498,70 +397,62 @@ subroutine ice_import( importState, rc ) ! import ocean states - call state_getimport(importState, 'So_t', output=aflds, index=1, rc=rc) + call state_getimport(importState, 'sea_surface_temperature', output=aflds, index=1, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'So_s', output=aflds, index=2, rc=rc) + call state_getimport(importState, 's_surf', output=aflds, index=2, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! import atm states - call state_getimport(importState, 'Sa_z', output=aflds, index=3, rc=rc) + call state_getimport(importState, 'inst_height_lowest', output=aflds, index=3, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (State_FldChk(importState, 'Sa_ptem') .and. State_fldchk(importState, 'Sa_dens')) then + if (State_FldChk(importState, 'Sa_ptem') .and. State_fldchk(importState, 'air_density_height_lowest')) then call state_getimport(importState, 'Sa_ptem', output=aflds, index=4, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'Sa_dens', output=aflds, index=5, rc=rc) + call state_getimport(importState, 'air_density_height_lowest', output=aflds, index=5, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - else if (State_FldChk(importState, 'Sa_pbot')) then - call state_getimport(importState, 'Sa_pbot', output=aflds, index=6, rc=rc) + else if (State_FldChk(importState, 'inst_pres_height_lowest')) then + call state_getimport(importState, 'inst_pres_height_lowest', output=aflds, index=6, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return else call abort_ice(trim(subname)//& - ": ERROR either Sa_ptem and Sa_dens OR Sa_pbot must be in import state") + ": ERROR either Sa_ptem and air_density_height_lowest OR inst_pres_height_lowest must be in import state") end if - call state_getimport(importState, 'Sa_tbot', output=aflds, index=7, rc=rc) + call state_getimport(importState, 'inst_temp_height_lowest', output=aflds, index=7, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'Sa_shum', output=aflds, index=8, rc=rc) + call state_getimport(importState, 'inst_spec_humid_height_lowest', output=aflds, index=8, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! import ocn/ice fluxes - call state_getimport(importState, 'Fioo_q', output=aflds, index=9, & - areacor=med2mod_areacor, rc=rc) + call state_getimport(importState, 'freezing_melting_potential', output=aflds, index=9, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! import atm fluxes - call state_getimport(importState, 'Faxa_swvdr', output=aflds, index=10, & - areacor=med2mod_areacor, rc=rc) + call state_getimport(importState, 'mean_down_sw_vis_dir_flx', output=aflds, index=10, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'Faxa_swndr', output=aflds, index=11, & - areacor=med2mod_areacor, rc=rc) + call state_getimport(importState, 'mean_down_sw_ir_dir_flx', output=aflds, index=11, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'Faxa_swvdf', output=aflds, index=12, & - areacor=med2mod_areacor, rc=rc) + call state_getimport(importState, 'mean_down_sw_vis_dif_flx', output=aflds, index=12, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'Faxa_swndf', output=aflds, index=13, & - areacor=med2mod_areacor, rc=rc) + call state_getimport(importState, 'mean_down_sw_ir_dif_flx', output=aflds, index=13, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'Faxa_lwdn', output=aflds, index=14, & - areacor=med2mod_areacor, rc=rc) + call state_getimport(importState, 'mean_down_lw_flx', output=aflds, index=14, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'Faxa_rain', output=aflds, index=15, & - areacor=med2mod_areacor, rc=rc) + call state_getimport(importState, 'mean_prec_rate', output=aflds, index=15, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'Faxa_snow', output=aflds, index=16, & - areacor=med2mod_areacor, rc=rc) + call state_getimport(importState, 'mean_fprec_rate', output=aflds, index=16, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! perform a halo update @@ -597,30 +488,7 @@ subroutine ice_import( importState, rc ) end do !$OMP END PARALLEL DO - ! import wave elevation spectrum from wave (frequencies 1-25, assume that nfreq is 25) - if (State_FldChk(importState, 'Sw_elevation_spectrum')) then - if (nfreq /= 25) then - call abort_ice(trim(subname)//": ERROR nfreq not equal to 25 ") - end if - call state_getfldptr(importState, 'Sw_elevation_spectrum', fldptr=dataPtr2d, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - do k = 1,nfreq - n = 0 - do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo; ihi = this_block%ihi - jlo = this_block%jlo; jhi = this_block%jhi - do j = jlo, jhi - do i = ilo, ihi - n = n+1 - wave_spectrum(i,j,k,iblk) = dataPtr2d(k,n) - end do - end do - end do - end do - end if - - if ( State_fldChk(importState, 'Sa_ptem') .and. State_fldchk(importState,'Sa_dens')) then + if ( State_fldChk(importState, 'Sa_ptem') .and. State_fldchk(importState,'air_density_height_lowest')) then !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1,ny_block @@ -631,7 +499,7 @@ subroutine ice_import( importState, rc ) end do end do !$OMP END PARALLEL DO - else if (State_fldChk(importState, 'Sa_pbot')) then + else if (State_fldChk(importState, 'inst_pres_height_lowest')) then !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks do j = 1,ny_block @@ -646,11 +514,11 @@ subroutine ice_import( importState, rc ) rhoa(i,j,iblk) = inst_pres_height_lowest / & (287.058_ESMF_KIND_R8*(1._ESMF_KIND_R8+0.608_ESMF_KIND_R8*Qa(i,j,iblk))*Tair(i,j,iblk)) else - rhoa(i,j,iblk) = 1.2_ESMF_KIND_R8 + rhoa(i,j,iblk) = 0._ESMF_KIND_R8 endif end do !i end do !j - end do !iblk + end do !iblk !$OMP END PARALLEL DO end if @@ -660,19 +528,19 @@ subroutine ice_import( importState, rc ) ! Get velocity fields from ocean and atm and slope fields from ocean - call state_getimport(importState, 'So_u', output=aflds, index=1, rc=rc) + call state_getimport(importState, 'ocn_current_zonal', output=aflds, index=1, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'So_v', output=aflds, index=2, rc=rc) + call state_getimport(importState, 'ocn_current_merid', output=aflds, index=2, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'Sa_u', output=aflds, index=3, rc=rc) + call state_getimport(importState, 'inst_zonal_wind_height_lowest', output=aflds, index=3, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'Sa_v', output=aflds, index=4, rc=rc) + call state_getimport(importState, 'inst_merid_wind_height_lowest', output=aflds, index=4, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'So_dhdx', output=aflds, index=5, rc=rc) + call state_getimport(importState, 'sea_surface_slope_zonal', output=aflds, index=5, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'So_dhdy', output=aflds, index=6, rc=rc) + call state_getimport(importState, 'sea_surface_slope_merid', output=aflds, index=6, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -709,45 +577,34 @@ subroutine ice_import( importState, rc ) ! bcphodry ungridded_index=2 ! bcphiwet ungridded_index=3 - call state_getfldptr(importState, 'Faxa_bcph', fldptr=dataPtr2d, rc=rc) + ! bcphodry + call state_getimport(importState, 'Faxa_bcph', output=faero_atm, index=1, ungridded_index=2, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + ! bcphidry + bcphiwet + call state_getimport(importState, 'Faxa_bcph', output=faero_atm, index=2, ungridded_index=1, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'Faxa_bcph', output=faero_atm, index=2, do_sum=.true., ungridded_index=3, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - n = 0 - do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo; ihi = this_block%ihi - jlo = this_block%jlo; jhi = this_block%jhi - do j = jlo, jhi - do i = ilo, ihi - n = n+1 - faero_atm(i,j,1,iblk) = dataPtr2d(2,n) * med2mod_areacor(n) ! bcphodry - faero_atm(i,j,2,iblk) = (dataptr2d(1,n) + dataPtr2d(3,n)) * med2mod_areacor(n) ! bcphidry + bcphiwet - end do - end do - end do end if ! Sum over all dry and wet dust fluxes from ath atmosphere if (State_FldChk(importState, 'Faxa_dstwet') .and. State_FldChk(importState, 'Faxa_dstdry')) then - call state_getfldptr(importState, 'Faxa_dstwet', fldptr=dataPtr2d_dstwet, rc=rc) + call state_getimport(importState, 'Faxa_dstwet', output=faero_atm, index=3, ungridded_index=1, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getfldptr(importState, 'Faxa_dstdry', fldptr=dataPtr2d_dstdry, rc=rc) + call state_getimport(importState, 'Faxa_dstdry', output=faero_atm, index=3, do_sum=.true., ungridded_index=1, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'Faxa_dstwet', output=faero_atm, index=3, do_sum=.true., ungridded_index=2, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'Faxa_dstdry', output=faero_atm, index=3, do_sum=.true., ungridded_index=2, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'Faxa_dstwet', output=faero_atm, index=3, do_sum=.true., ungridded_index=3, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'Faxa_dstdry', output=faero_atm, index=3, do_sum=.true., ungridded_index=3, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'Faxa_dstwet', output=faero_atm, index=3, do_sum=.true., ungridded_index=4, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'Faxa_dstdry', output=faero_atm, index=3, do_sum=.true., ungridded_index=4, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - n = 0 - do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo; ihi = this_block%ihi - jlo = this_block%jlo; jhi = this_block%jhi - do j = jlo, jhi - do i = ilo, ihi - n = n+1 - faero_atm(i,j,3,iblk) = dataPtr2d_dstwet(1,n) + dataptr2d_dstdry(1,n) + & - dataPtr2d_dstwet(2,n) + dataptr2d_dstdry(2,n) + & - dataPtr2d_dstwet(3,n) + dataptr2d_dstdry(3,n) + & - dataPtr2d_dstwet(4,n) + dataptr2d_dstdry(4,n) - faero_atm(i,j,3,iblk) = faero_atm(i,j,3,iblk) * med2mod_areacor(n) - end do - end do - end do end if !------------------------------------------------------- @@ -759,38 +616,32 @@ subroutine ice_import( importState, rc ) ! HDO => ungridded_index=3 if (State_FldChk(importState, 'shum_wiso')) then - call state_getimport(importState, 'Sa_shum_wiso', output=Qa_iso, index=1, ungridded_index=3, rc=rc) + call state_getimport(importState, 'inst_spec_humid_height_lowest_wiso', output=Qa_iso, index=1, ungridded_index=3, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'Sa_shum_wiso', output=Qa_iso, index=2, ungridded_index=1, rc=rc) + call state_getimport(importState, 'inst_spec_humid_height_lowest_wiso', output=Qa_iso, index=2, ungridded_index=1, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'Sa_shum_wiso', output=Qa_iso, index=3, ungridded_index=2, rc=rc) + call state_getimport(importState, 'inst_spec_humid_height_lowest_wiso', output=Qa_iso, index=3, ungridded_index=2, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - ! call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=1, ungridded_index=3, & - ! areacor=med2mod_areacor, rc=rc) - ! if (ChkErr(rc,__LINE__,u_FILE_u)) return - ! call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=2, ungridded_index=1, & - ! areacor=med2mod_areacor, rc=rc) - ! if (ChkErr(rc,__LINE__,u_FILE_u)) return - ! call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=3, ungridded_index=2, & - ! areacor=med2mod_areacor, rc=rc) - ! if (ChkErr(rc,__LINE__,u_FILE_u)) return - - call state_getimport(importState, 'Faxa_snow_wiso', output=fiso_atm, index=1, ungridded_index=3, rc=rc) +! call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=1, ungridded_index=3, rc=rc) +! if (ChkErr(rc,__LINE__,u_FILE_u)) return +! call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=2, ungridded_index=1, rc=rc) +! if (ChkErr(rc,__LINE__,u_FILE_u)) return +! call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=3, ungridded_index=2, rc=rc) +! if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'mean_fprec_rate_wiso', output=fiso_atm, index=1, ungridded_index=3, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'Faxa_snow_wiso', output=fiso_atm, index=2, ungridded_index=1, rc=rc) + call state_getimport(importState, 'mean_fprec_rate_wiso', output=fiso_atm, index=2, ungridded_index=1, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'Faxa_snow_wiso', output=fiso_atm, index=3, ungridded_index=2, rc=rc) + call state_getimport(importState, 'mean_fprec_rate_wiso', output=fiso_atm, index=3, ungridded_index=2, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'So_roce_wiso', output=HDO_ocn , ungridded_index=3, & - areacor=med2mod_areacor, rc=rc) + call state_getimport(importState, 'So_roce_wiso', output=HDO_ocn , ungridded_index=3, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'So_roce_wiso', output=H2_16O_ocn, ungridded_index=1, & - areacor=med2mod_areacor, rc=rc) + call state_getimport(importState, 'So_roce_wiso', output=H2_16O_ocn, ungridded_index=1, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'So_roce_wiso', output=H2_18O_ocn, ungridded_index=2, & - areacor=med2mod_areacor, rc=rc) + call state_getimport(importState, 'So_roce_wiso', output=H2_18O_ocn, ungridded_index=2, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if @@ -839,11 +690,9 @@ subroutine ice_import( importState, rc ) #ifdef CESMCOUPLED ! Use shr_frz_mod for this - do iblk = 1, nblocks - Tf(:,:,iblk) = shr_frz_freezetemp(sss(:,:,iblk)) - end do -#else - !$OMP PARALLEL DO PRIVATE(iblk,i,j) + Tf(:,:,iblk) = shr_frz_freezetemp(sss(:,:,iblk)) +#else + !$OMP PARALLEL DO PRIVATE(iblk,i,j,workx,worky) do iblk = 1, nblocks do j = 1,ny_block do i = 1,nx_block @@ -861,10 +710,10 @@ subroutine ice_import( importState, rc ) if (.not.prescribed_ice) then call t_startf ('cice_imp_t2u') - call ice_HaloUpdate(uocn, halo_info, field_loc_center, field_type_vector) - call ice_HaloUpdate(vocn, halo_info, field_loc_center, field_type_vector) - call ice_HaloUpdate(ss_tltx, halo_info, field_loc_center, field_type_vector) - call ice_HaloUpdate(ss_tlty, halo_info, field_loc_center, field_type_vector) + call t2ugrid_vector(uocn) + call t2ugrid_vector(vocn) + call t2ugrid_vector(ss_tltx) + call t2ugrid_vector(ss_tlty) call t_stopf ('cice_imp_t2u') end if @@ -898,9 +747,8 @@ subroutine ice_import( importState, rc ) end subroutine ice_import !=============================================================================== - subroutine ice_export( exportState, rc ) - use ice_scam, only : single_column + subroutine ice_export( exportState, rc ) ! input/output variables type(ESMF_State), intent(inout) :: exportState @@ -908,7 +756,7 @@ subroutine ice_export( exportState, rc ) ! local variables type(block) :: this_block ! block information for current block - integer :: i, j, iblk, n, k ! indices + integer :: i, j, iblk, n ! incides integer :: n2 ! thickness category index integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain real (kind=dbl_kind) :: workx, worky ! tmps for converting grid @@ -922,15 +770,8 @@ subroutine ice_export( exportState, rc ) real (kind=dbl_kind) :: tauxo (nx_block,ny_block,max_blocks) ! ice/ocean stress real (kind=dbl_kind) :: tauyo (nx_block,ny_block,max_blocks) ! ice/ocean stress real (kind=dbl_kind) :: ailohi(nx_block,ny_block,max_blocks) ! fractional ice area - real (kind=dbl_kind) :: floediam(nx_block,ny_block,max_blocks) - real (kind=dbl_kind) :: floethick(nx_block,ny_block,max_blocks) ! ice thickness - logical (kind=log_kind) :: tr_fsd - integer (kind=int_kind) :: nt_fsd - real (kind=dbl_kind) :: Tffresh real (kind=dbl_kind), allocatable :: tempfld(:,:,:) - real (kind=dbl_kind), pointer :: dataptr_ifrac_n(:,:) - real (kind=dbl_kind), pointer :: dataptr_swpen_n(:,:) - logical (kind=log_kind), save :: first_call = .true. + real (kind=dbl_kind) :: Tffresh character(len=*),parameter :: subname = 'ice_export' !----------------------------------------------------- @@ -938,16 +779,12 @@ subroutine ice_export( exportState, rc ) if (io_dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) call icepack_query_parameters(Tffresh_out=Tffresh) - ! call icepack_query_parameters(tfrz_option_out=tfrz_option, & - ! modal_aero_out=modal_aero, z_tracers_out=z_tracers, skl_bgc_out=skl_bgc, & - ! Tffresh_out=Tffresh) - ! call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_iage_out=tr_iage, & - ! tr_FY_out=tr_FY, tr_pond_out=tr_pond, tr_lvl_out=tr_lvl, & - ! tr_zaero_out=tr_zaero, tr_bgc_Nit_out=tr_bgc_Nit) - - call icepack_query_tracer_indices(nt_fsd_out=nt_fsd) - call icepack_query_tracer_flags(tr_fsd_out=tr_fsd) - +! call icepack_query_parameters(tfrz_option_out=tfrz_option, & +! modal_aero_out=modal_aero, z_tracers_out=z_tracers, skl_bgc_out=skl_bgc, & +! Tffresh_out=Tffresh) +! call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_iage_out=tr_iage, & +! tr_FY_out=tr_FY, tr_pond_out=tr_pond, tr_lvl_out=tr_lvl, & +! tr_zaero_out=tr_zaero, tr_bgc_Nit_out=tr_bgc_Nit) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=u_FILE_u, line=__LINE__) @@ -961,10 +798,8 @@ subroutine ice_export( exportState, rc ) tauya(:,:,:) = c0 tauxo(:,:,:) = c0 tauyo(:,:,:) = c0 - floediam(:,:,:) = c0 - floethick(:,:,:) = c0 - !$OMP PARALLEL DO PRIVATE(iblk,i,j,k,workx,worky, this_block, ilo, ihi, jlo, jhi) + !$OMP PARALLEL DO PRIVATE(iblk,i,j,workx,worky, this_block, ilo, ihi, jlo, jhi) do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo @@ -980,27 +815,6 @@ subroutine ice_export( exportState, rc ) ! surface temperature Tsrf(i,j,iblk) = Tffresh + trcr(i,j,1,iblk) !Kelvin (original ???) - if (tr_fsd) then - ! floe thickness (m) - if (aice(i,j,iblk) > puny) then - floethick(i,j,iblk) = vice(i,j,iblk) / aice(i,j,iblk) - else - floethick(i,j,iblk) = c0 - end if - - ! floe diameter (m) - workx = c0 - worky = c0 - do n = 1, ncat - do k = 1, nfsd - workx = workx + floe_rad_c(k) * aicen_init(i,j,n,iblk) * trcrn(i,j,nt_fsd+k-1,n,iblk) - worky = worky + aicen_init(i,j,n,iblk) * trcrn(i,j,nt_fsd+k-1,n,iblk) - end do - end do - if (worky > c0) workx = c2*workx / worky - floediam(i,j,iblk) = MAX(c2*floe_rad_c(1),workx) - endif - ! wind stress (on POP T-grid: convert to lat-lon) workx = strairxT(i,j,iblk) ! N/m^2 worky = strairyT(i,j,iblk) ! N/m^2 @@ -1008,8 +822,8 @@ subroutine ice_export( exportState, rc ) tauya(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) + workx*sin(ANGLET(i,j,iblk)) ! ice/ocean stress (on POP T-grid: convert to lat-lon) - workx = -strocnxT_iavg(i,j,iblk) ! N/m^2 - worky = -strocnyT_iavg(i,j,iblk) ! N/m^2 + workx = -strocnxT(i,j,iblk) ! N/m^2 + worky = -strocnyT(i,j,iblk) ! N/m^2 tauxo(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) - worky*sin(ANGLET(i,j,iblk)) tauyo(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) + workx*sin(ANGLET(i,j,iblk)) enddo @@ -1056,21 +870,18 @@ subroutine ice_export( exportState, rc ) !--------------------------------- ! Zero out fields with tmask for proper coupler accumulation in ice free areas - if (first_call .or. .not.single_column) then - call state_reset(exportState, c0, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - first_call = .false. - endif + call state_reset(exportState, c0, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return ! Create a temporary field allocate(tempfld(nx_block,ny_block,nblocks)) ! Fractions and mask - call state_setexport(exportState, 'Si_ifrac', input=ailohi, rc=rc) + call state_setexport(exportState, 'ice_fraction', input=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (trim(grid_format) == 'meshnc') then - call state_setexport(exportState, 'Si_imask', input=ocn_gridcell_frac, rc=rc) + if (trim(grid_type) == 'latlon') then + call state_setexport(exportState, 'ice_mask', input=ocn_gridcell_frac, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return else do iblk = 1, nblocks @@ -1085,7 +896,7 @@ subroutine ice_export( exportState, rc ) end do end do end do - call state_setexport(exportState, 'Si_imask', input=tempfld, rc=rc) + call state_setexport(exportState, 'ice_mask', input=tempfld, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if @@ -1094,23 +905,23 @@ subroutine ice_export( exportState, rc ) ! ---- ! surface temperature of ice covered portion (degK) - call state_setexport(exportState, 'Si_t', input=Tsrf , lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'sea_ice_surface_temperature', input=Tsrf , lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! albedo vis dir - call state_setexport(exportState, 'Si_avsdr', input=alvdr, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'inst_ice_vis_dir_albedo', input=alvdr, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! albedo nir dir - call state_setexport(exportState, 'Si_anidr', input=alidr, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'inst_ice_ir_dir_albedo', input=alidr, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! albedo vis dif - call state_setexport(exportState, 'Si_avsdf', input=alvdf, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'inst_ice_vis_dif_albedo', input=alvdf, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! albedo nir dif - call state_setexport(exportState, 'Si_anidf', input=alidf, lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'inst_ice_ir_dif_albedo', input=alidf, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! 10m atm reference wind speed (m/s) @@ -1126,11 +937,11 @@ subroutine ice_export( exportState, rc ) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! Snow volume - call state_setexport(exportState, 'Si_vsno' , input=vsno , lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'mean_snow_volume' , input=vsno , lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! Ice volume - call state_setexport(exportState, 'Si_vice' , input=vice , lmask=tmask, ifrac=ailohi, rc=rc) + call state_setexport(exportState, 'mean_ice_volume' , input=vice , lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! Snow height @@ -1151,59 +962,36 @@ subroutine ice_export( exportState, rc ) call state_setexport(exportState, 'Si_snowh' , input=tempfld , lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - ! ------ - ! optional floe diameter and ice thickness to wave - ! ------ - - ! Sea ice thickness (m) - if (State_FldChk(exportState, 'Si_thick')) then - call state_setexport(exportState, 'Si_thick' , input=floethick , lmask=tmask, ifrac=ailohi, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - end if - - ! Sea ice floe diameter (m) - if (State_FldChk(exportState, 'Si_floediam')) then - call state_setexport(exportState, 'Si_floediam' , input=floediam , lmask=tmask, ifrac=ailohi, rc=rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - end if - ! ------ ! ice/atm fluxes computed by ice ! ------ ! Zonal air/ice stress - call state_setexport(exportState, 'Faii_taux' , input=tauxa, lmask=tmask, ifrac=ailohi, & - areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'stress_on_air_ice_zonal' , input=tauxa, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! Meridional air/ice stress - call state_setexport(exportState, 'Faii_tauy' , input=tauya, lmask=tmask, ifrac=ailohi, & - areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'stress_on_air_ice_merid' , input=tauya, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! Latent heat flux (atm into ice) - call state_setexport(exportState, 'Faii_lat' , input=flat, lmask=tmask, ifrac=ailohi, & - areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'mean_laten_heat_flx_atm_into_ice' , input=flat, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! Sensible heat flux (atm into ice) - call state_setexport(exportState, 'Faii_sen' , input=fsens, lmask=tmask, ifrac=ailohi, & - areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'mean_sensi_heat_flx_atm_into_ice' , input=fsens, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! longwave outgoing (upward), average over ice fraction only - call state_setexport(exportState, 'Faii_lwup' , input=flwout, lmask=tmask, ifrac=ailohi, & - areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'mean_up_lw_flx_ice' , input=flwout, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! Evaporative water flux (kg/m^2/s) - call state_setexport(exportState, 'Faii_evap' , input=evap, lmask=tmask, ifrac=ailohi, & - areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'mean_evap_rate_atm_into_ice' , input=evap, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! Shortwave flux absorbed in ice and ocean (W/m^2) - call state_setexport(exportState, 'Faii_swnet' , input=fswabs, lmask=tmask, ifrac=ailohi, & - areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'Faii_swnet' , input=fswabs, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! ------ @@ -1211,53 +999,43 @@ subroutine ice_export( exportState, rc ) ! ------ ! flux of shortwave through ice to ocean - call state_setexport(exportState, 'Fioi_swpen' , input=fswthru, lmask=tmask, ifrac=ailohi, & - areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'mean_sw_pen_to_ocn' , input=fswthru, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! flux of vis dir shortwave through ice to ocean - call state_setexport(exportState, 'Fioi_swpen_vdr' , input=fswthru_vdr, lmask=tmask, ifrac=ailohi, & - areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'mean_sw_pen_to_ocn_vis_dir_flx' , input=fswthru_vdr, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! flux of vis dif shortwave through ice to ocean - call state_setexport(exportState, 'Fioi_swpen_vdf' , input=fswthru_vdf, lmask=tmask, ifrac=ailohi, & - areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'mean_sw_pen_to_ocn_vis_dif_flx' , input=fswthru_vdf, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! flux of ir dir shortwave through ice to ocean - call state_setexport(exportState, 'Fioi_swpen_idr' , input=fswthru_idr, lmask=tmask, ifrac=ailohi, & - areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'mean_sw_pen_to_ocn_ir_dir_flx' , input=fswthru_idr, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! flux of ir dif shortwave through ice to ocean - call state_setexport(exportState, 'Fioi_swpen_idf' , input=fswthru_idf, lmask=tmask, ifrac=ailohi, & - areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'mean_sw_pen_to_ocn_ir_dif_flx' , input=fswthru_idf, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - ! flux of heat exchange with ocean - call state_setexport(exportState, 'Fioi_melth' , input=fhocn, lmask=tmask, ifrac=ailohi, & - areacor=mod2med_areacor, rc=rc) + ! heat exchange with ocean + call state_setexport(exportState, 'net_heat_flx_to_ocn' , input=fhocn, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - ! flux fresh water to ocean (h2o flux from melting) - call state_setexport(exportState, 'Fioi_meltw' , input=fresh, lmask=tmask, ifrac=ailohi, & - areacor=mod2med_areacor, rc=rc) + ! fresh water to ocean (h2o flux from melting) + call state_setexport(exportState, 'mean_fresh_water_to_ocean_rate' , input=fresh, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - ! flux of salt to ocean (salt flux from melting) - call state_setexport(exportState, 'Fioi_salt' , input=fsalt, lmask=tmask, ifrac=ailohi, & - areacor=mod2med_areacor, rc=rc) + ! salt to ocean (salt flux from melting) + call state_setexport(exportState, 'mean_salt_rate' , input=fsalt, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! stress n i/o zonal - call state_setexport(exportState, 'Fioi_taux' , input=tauxo, lmask=tmask, ifrac=ailohi, & - areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'stress_on_ocn_ice_zonal' , input=tauxo, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! stress n i/o meridional - call state_setexport(exportState, 'Fioi_tauy' , input=tauyo, lmask=tmask, ifrac=ailohi, & - areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'stress_on_ocn_ice_merid' , input=tauyo, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! ------ @@ -1266,22 +1044,19 @@ subroutine ice_export( exportState, rc ) ! hydrophobic bc if (State_FldChk(exportState, 'Fioi_bcpho')) then - call state_setexport(exportState, 'Fioi_bcpho' , input=faero_ocn, index=1, lmask=tmask, ifrac=ailohi, & - areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'Fioi_bcpho' , input=faero_ocn, index=1, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if ! hydrophilic bc if (State_FldChk(exportState, 'Fioi_bcphi')) then - call state_setexport(exportState, 'Fioi_bcphi' , input=faero_ocn, index=2, lmask=tmask, ifrac=ailohi, & - areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'Fioi_bcphi' , input=faero_ocn, index=2, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if ! dust if (State_FldChk(exportState, 'Fioi_flxdst')) then - call state_setexport(exportState, 'Fioi_flxdst' , input=faero_ocn, index=3, lmask=tmask, ifrac=ailohi, & - areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'Fioi_flxdst' , input=faero_ocn, index=3, lmask=tmask, ifrac=ailohi, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if @@ -1289,19 +1064,19 @@ subroutine ice_export( exportState, rc ) ! optional water isotope fluxes to ocean ! ------ - if (State_FldChk(exportState, 'Fioi_meltw_wiso')) then + if (State_FldChk(exportState, 'mean_fresh_water_to_ocean_rate_wiso')) then ! 16O => ungridded_index=1 ! 18O => ungridded_index=2 ! HDO => ungridded_index=3 - call state_setexport(exportState, 'Fioi_meltw_wiso' , input=fiso_ocn, index=1, & - lmask=tmask, ifrac=ailohi, ungridded_index=3, areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'mean_fresh_water_to_ocean_rate_wiso' , input=fiso_ocn, index=1, & + lmask=tmask, ifrac=ailohi, ungridded_index=3, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_setexport(exportState, 'Fioi_meltw_wiso' , input=fiso_ocn, index=2, & - lmask=tmask, ifrac=ailohi, ungridded_index=1, areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'mean_fresh_water_to_ocean_rate_wiso' , input=fiso_ocn, index=2, & + lmask=tmask, ifrac=ailohi, ungridded_index=1, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_setexport(exportState, 'Fioi_meltw_wiso' , input=fiso_ocn, index=3, & - lmask=tmask, ifrac=ailohi, ungridded_index=2, areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'mean_fresh_water_to_ocean_rate_wiso' , input=fiso_ocn, index=3, & + lmask=tmask, ifrac=ailohi, ungridded_index=2, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end if @@ -1309,19 +1084,19 @@ subroutine ice_export( exportState, rc ) ! optional water isotope fluxes to atmospehre ! ------ - if (State_FldChk(exportState, 'Faii_evap_wiso')) then + if (State_FldChk(exportState, 'mean_evap_rate_atm_into_ice_wiso')) then ! Isotope evap to atm - call state_setexport(exportState, 'Faii_evap_wiso' , input=fiso_evap, index=1, & - lmask=tmask, ifrac=ailohi, ungridded_index=3, areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'mean_evap_rate_atm_into_ice_wiso' , input=fiso_evap, index=1, & + lmask=tmask, ifrac=ailohi, ungridded_index=3, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_setexport(exportState, 'Faii_evap_wiso' , input=fiso_evap, index=2, & - lmask=tmask, ifrac=ailohi, ungridded_index=1, areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'mean_evap_rate_atm_into_ice_wiso' , input=fiso_evap, index=2, & + lmask=tmask, ifrac=ailohi, ungridded_index=1, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_setexport(exportState, 'Faii_evap_wiso' , input=fiso_evap, index=3, & - lmask=tmask, ifrac=ailohi, ungridded_index=2, areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'mean_evap_rate_atm_into_ice_wiso' , input=fiso_evap, index=3, & + lmask=tmask, ifrac=ailohi, ungridded_index=2, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - ! qref to atm + ! Isotope evap to atm call state_setexport(exportState, 'Si_qref_wiso' , input=Qref_iso, index=1, & lmask=tmask, ifrac=ailohi, ungridded_index=3, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -1338,18 +1113,18 @@ subroutine ice_export( exportState, rc ) ! ------ ! ice fraction by category - if ( State_FldChk(exportState, 'Si_ifrac_n') .and. & - State_FldChk(exportState, 'Fioi_swpen_ifrac_n')) then + if ( State_FldChk(exportState, 'ice_fraction_n') .and. & + State_FldChk(exportState, 'mean_sw_pen_to_ocn_ifrac_n')) then do n = 1,ncat - call state_setexport(exportState, 'Si_ifrac_n', input=aicen_init, index=n, & + call state_setexport(exportState, 'ice_fraction_n', input=aicen_init, index=n, & ungridded_index=n, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! penetrative shortwave by category ! Note: no need zero out pass-through fields over land for benefit of x2oacc fields in cpl hist files since ! the export state has been zeroed out at the beginning - call state_setexport(exportState, 'Fioi_swpen_ifrac_n', input=fswthrun_ai, index=n, & - lmask=tmask, ifrac=ailohi, ungridded_index=n, areacor=mod2med_areacor, rc=rc) + call state_setexport(exportState, 'mean_sw_pen_to_ocn_ifrac_n', input=fswthrun_ai, index=n, & + lmask=tmask, ifrac=ailohi, ungridded_index=n, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return end do end if @@ -1357,6 +1132,7 @@ subroutine ice_export( exportState, rc ) end subroutine ice_export !=============================================================================== + subroutine fldlist_add(num, fldlist, stdname, ungridded_lbound, ungridded_ubound) ! input/output variables @@ -1386,6 +1162,7 @@ subroutine fldlist_add(num, fldlist, stdname, ungridded_lbound, ungridded_ubound end subroutine fldlist_add !=============================================================================== + subroutine fldlist_realize(state, fldList, numflds, flds_scalar_name, flds_scalar_num, mesh, grid, tag, rc) use NUOPC, only : NUOPC_IsConnected, NUOPC_Realize @@ -1410,7 +1187,6 @@ subroutine fldlist_realize(state, fldList, numflds, flds_scalar_name, flds_scala integer :: n type(ESMF_Field) :: field character(len=80) :: stdname - character(ESMF_MAXSTR) :: msg character(len=*),parameter :: subname='(ice_import_export:fld_list_realize)' ! ---------------------------------------------- @@ -1427,6 +1203,8 @@ subroutine fldlist_realize(state, fldList, numflds, flds_scalar_name, flds_scala if (ChkErr(rc,__LINE__,u_FILE_u)) return else if (present(mesh)) then + call ESMF_LogWrite(trim(subname)//trim(tag)//" Field = "//trim(stdname)//" is connected using mesh", & + ESMF_LOGMSG_INFO) ! Create the field if (fldlist(n)%ungridded_lbound > 0 .and. fldlist(n)%ungridded_ubound > 0) then field = ESMF_FieldCreate(mesh, ESMF_TYPEKIND_R8, name=stdname, meshloc=ESMF_MESHLOC_ELEMENT, & @@ -1434,16 +1212,9 @@ subroutine fldlist_realize(state, fldList, numflds, flds_scalar_name, flds_scala ungriddedUbound=(/fldlist(n)%ungridded_ubound/), & gridToFieldMap=(/2/), rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - write(msg, '(a,i4,2x,i4)') trim(subname)//trim(tag)//" Field = "//trim(stdname)//& - " is connected using mesh with lbound, ubound = ",& - fldlist(n)%ungridded_lbound,fldlist(n)%ungridded_ubound - call ESMF_LogWrite(msg, ESMF_LOGMSG_INFO) else field = ESMF_FieldCreate(mesh, ESMF_TYPEKIND_R8, name=stdname, meshloc=ESMF_MESHLOC_ELEMENT, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - write(msg, '(a,i4,a,i4)') trim(subname)//trim(tag)//" Field = "//trim(stdname)//& - " is connected using mesh without ungridded dimension" - call ESMF_LogWrite(msg, ESMF_LOGMSG_INFO) end if else if (present(grid)) then call ESMF_LogWrite(trim(subname)//trim(tag)//" Field = "//trim(stdname)//" is connected using grid", & @@ -1516,6 +1287,7 @@ end subroutine SetScalarField end subroutine fldlist_realize !=============================================================================== + logical function State_FldChk(State, fldname) ! ---------------------------------------------- ! Determine if field is in state @@ -1530,25 +1302,27 @@ logical function State_FldChk(State, fldname) ! ---------------------------------------------- call ESMF_StateGet(State, trim(fldname), itemType) + State_FldChk = (itemType /= ESMF_STATEITEM_NOTFOUND) end function State_FldChk !=============================================================================== - subroutine state_getimport_4d(state, fldname, output, index, ungridded_index, areacor, rc) + + subroutine state_getimport_4d_output(state, fldname, output, index, do_sum, ungridded_index, rc) ! ---------------------------------------------- ! Map import state field to output array ! ---------------------------------------------- ! input/output variables - type(ESMF_State) , intent(in) :: state - character(len=*) , intent(in) :: fldname - real (kind=dbl_kind) , intent(inout) :: output(:,:,:,:) - integer , intent(in) :: index - integer, optional , intent(in) :: ungridded_index - real(kind=dbl_kind), optional , intent(in) :: areacor(:) - integer , intent(out) :: rc + type(ESMF_State) , intent(in) :: state + character(len=*) , intent(in) :: fldname + real (kind=dbl_kind) , intent(inout) :: output(:,:,:,:) + integer , intent(in) :: index + logical, optional , intent(in) :: do_sum + integer, optional , intent(in) :: ungridded_index + integer , intent(out) :: rc ! local variables type(block) :: this_block ! block information for current block @@ -1556,7 +1330,9 @@ subroutine state_getimport_4d(state, fldname, output, index, ungridded_index, ar integer :: i, j, iblk, n, i1, j1 ! incides real(kind=dbl_kind), pointer :: dataPtr1d(:) ! mesh real(kind=dbl_kind), pointer :: dataPtr2d(:,:) ! mesh - character(len=*), parameter :: subname='(ice_import_export:state_getimport_4d)' + real(kind=dbl_kind), pointer :: dataPtr3d(:,:,:) ! grid + real(kind=dbl_kind), pointer :: dataPtr4d(:,:,:,:) ! grid + character(len=*), parameter :: subname='(ice_import_export:state_getimport)' ! ---------------------------------------------- rc = ESMF_SUCCESS @@ -1564,65 +1340,103 @@ subroutine state_getimport_4d(state, fldname, output, index, ungridded_index, ar ! check that fieldname exists if (.not. State_FldChk(state, trim(fldname))) return - ! get field pointer - if (present(ungridded_index)) then - call state_getfldptr(state, trim(fldname), dataPtr2d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - else - call state_getfldptr(state, trim(fldname), dataPtr1d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - end if + if (geomtype == ESMF_GEOMTYPE_MESH) then - ! set values of output array - n=0 - do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - do j = jlo, jhi - do i = ilo, ihi - n = n+1 - if (present(ungridded_index)) then - output(i,j,index,iblk) = dataPtr2d(ungridded_index,n) - else - output(i,j,index,iblk) = dataPtr1d(n) - end if - end do - end do - end do - if (present(areacor)) then - n = 0 + ! get field pointer + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataPtr2d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call state_getfldptr(state, trim(fldname), dataPtr1d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! set values of output array + n=0 do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo; ihi = this_block%ihi - jlo = this_block%jlo; jhi = this_block%jhi + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi do j = jlo, jhi do i = ilo, ihi - n = n + 1 - output(i,j,index,iblk) = output(i,j,index,iblk) * areacor(n) + n = n+1 + if (present(do_sum)) then ! do sum + if (present(ungridded_index)) then + output(i,j,index,iblk) = output(i,j,index, iblk) + dataPtr2d(ungridded_index,n) + else + output(i,j,index,iblk) = output(i,j,index, iblk) + dataPtr1d(n) + end if + else ! do not do sum + if (present(ungridded_index)) then + output(i,j,index,iblk) = dataPtr2d(ungridded_index,n) + else + output(i,j,index,iblk) = dataPtr1d(n) + end if + end if + end do + end do + end do + + else if (geomtype == ESMF_GEOMTYPE_GRID) then + + ! get field pointer + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataptr4d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call state_getfldptr(state, trim(fldname), dataptr3d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! set values of output array + do iblk = 1,nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo,jhi + do i = ilo,ihi + i1 = i - ilo + 1 + j1 = j - jlo + 1 + if (present(do_sum)) then + if (present(ungridded_index)) then + output(i,j,index,iblk) = output(i,j,index,iblk) + dataPtr4d(i1,j1,iblk,ungridded_index) + else + output(i,j,index,iblk) = output(i,j,index,iblk) + dataPtr3d(i1,j1,iblk) + end if + else + if (present(ungridded_index)) then + output(i,j,index,iblk) = dataPtr4d(i1,j1,iblk,ungridded_index) + else + output(i,j,index,iblk) = dataPtr3d(i1,j1,iblk) + end if + end if end do end do end do + end if - end subroutine state_getimport_4d + end subroutine state_getimport_4d_output !=============================================================================== - subroutine state_getimport_3d(state, fldname, output, ungridded_index, areacor, rc) + + subroutine state_getimport_3d_output(state, fldname, output, do_sum, ungridded_index, rc) ! ---------------------------------------------- ! Map import state field to output array ! ---------------------------------------------- ! input/output variables - type(ESMF_State) , intent(in) :: state - character(len=*) , intent(in) :: fldname - real (kind=dbl_kind) , intent(inout) :: output(:,:,:) - integer, optional , intent(in) :: ungridded_index - real(kind=dbl_kind), optional , intent(in) :: areacor(:) - integer , intent(out) :: rc + type(ESMF_State) , intent(in) :: state + character(len=*) , intent(in) :: fldname + real (kind=dbl_kind) , intent(inout) :: output(:,:,:) + logical, optional , intent(in) :: do_sum + integer, optional , intent(in) :: ungridded_index + integer , intent(out) :: rc ! local variables type(block) :: this_block ! block information for current block @@ -1630,7 +1444,9 @@ subroutine state_getimport_3d(state, fldname, output, ungridded_index, areacor, integer :: i, j, iblk, n, i1, j1 ! incides real(kind=dbl_kind), pointer :: dataPtr1d(:) ! mesh real(kind=dbl_kind), pointer :: dataPtr2d(:,:) ! mesh - character(len=*) , parameter :: subname='(ice_import_export:state_getimport_3d)' + real(kind=dbl_kind), pointer :: dataPtr3d(:,:,:) ! grid + real(kind=dbl_kind), pointer :: dataPtr4d(:,:,:,:) ! grid + character(len=*) , parameter :: subname='(ice_import_export:state_getimport)' ! ---------------------------------------------- rc = ESMF_SUCCESS @@ -1638,53 +1454,83 @@ subroutine state_getimport_3d(state, fldname, output, ungridded_index, areacor, ! check that fieldname exists if (.not. State_FldChk(state, trim(fldname))) return - ! get field pointer - if (present(ungridded_index)) then - call state_getfldptr(state, trim(fldname), dataPtr2d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - else - call state_getfldptr(state, trim(fldname), dataPtr1d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - end if + if (geomtype == ESMF_GEOMTYPE_MESH) then - ! determine output array - n = 0 - do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - do j = jlo, jhi - do i = ilo, ihi - n = n+1 - if (present(ungridded_index)) then - output(i,j,iblk) = dataPtr2d(ungridded_index,n) - else - output(i,j,iblk) = dataPtr1d(n) - end if - end do - end do - end do - if (present(areacor)) then - n = 0 + ! get field pointer + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataPtr2d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call state_getfldptr(state, trim(fldname), dataPtr1d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! determine output array + n=0 do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo; ihi = this_block%ihi - jlo = this_block%jlo; jhi = this_block%jhi + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi do j = jlo, jhi do i = ilo, ihi - n = n + 1 - output(i,j,iblk) = output(i,j,iblk) * areacor(n) + n = n+1 + if (present(do_sum) .and. present(ungridded_index)) then + output(i,j,iblk) = output(i,j,iblk) + dataPtr2d(ungridded_index,n) + else if (present(do_sum)) then + output(i,j,iblk) = output(i,j,iblk) + dataPtr1d(n) + else if (present(ungridded_index)) then + output(i,j,iblk) = dataPtr2d(ungridded_index,n) + else + output(i,j,iblk) = dataPtr1d(n) + end if end do end do end do + + else if (geomtype == ESMF_GEOMTYPE_GRID) then + + ! get field pointer + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataptr4d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call state_getfldptr(state, trim(fldname), dataptr3d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! set values of output array + do iblk = 1,nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo,jhi + do i = ilo,ihi + i1 = i - ilo + 1 + j1 = j - jlo + 1 + if (present(do_sum) .and. present(ungridded_index)) then + output(i,j,iblk) = output(i,j,iblk) + dataPtr4d(i1,j1,iblk,ungridded_index) + else if (present(do_sum)) then + output(i,j,iblk) = output(i,j,iblk) + dataPtr3d(i1,j1,iblk) + else if (present(ungridded_index)) then + output(i,j,iblk) = dataPtr4d(i1,j1,iblk, ungridded_index) + else + output(i,j,iblk) = dataPtr3d(i1,j1,iblk) + end if + end do + end do + end do + end if - end subroutine state_getimport_3d + end subroutine state_getimport_3d_output !=============================================================================== - subroutine state_setexport_4d(state, fldname, input, index, lmask, ifrac, ungridded_index, areacor, rc) + + subroutine state_setexport_4d_input(state, fldname, input, index, lmask, ifrac, ungridded_index, rc) ! ---------------------------------------------- ! Map 4d input array to export state field @@ -1698,7 +1544,6 @@ subroutine state_setexport_4d(state, fldname, input, index, lmask, ifrac, ungrid logical , optional, intent(in) :: lmask(:,:,:) real(kind=dbl_kind) , optional, intent(in) :: ifrac(:,:,:) integer , optional, intent(in) :: ungridded_index - real(kind=dbl_kind) , optional, intent(in) :: areacor(:) integer , intent(out) :: rc ! local variables @@ -1707,8 +1552,9 @@ subroutine state_setexport_4d(state, fldname, input, index, lmask, ifrac, ungrid integer :: i, j, iblk, n, i1, j1 ! indices real(kind=dbl_kind), pointer :: dataPtr1d(:) ! mesh real(kind=dbl_kind), pointer :: dataPtr2d(:,:) ! mesh - integer :: ice_num - character(len=*), parameter :: subname='(ice_import_export:state_setexport_4d)' + real(kind=dbl_kind), pointer :: dataPtr3d(:,:,:) ! grid + real(kind=dbl_kind), pointer :: dataPtr4d(:,:,:,:) ! grid + character(len=*), parameter :: subname='(ice_import_export:state_setexport)' ! ---------------------------------------------- rc = ESMF_SUCCESS @@ -1716,83 +1562,93 @@ subroutine state_setexport_4d(state, fldname, input, index, lmask, ifrac, ungrid ! check that fieldname exists if (.not. State_FldChk(state, trim(fldname))) return - ! get field pointer - if (present(ungridded_index)) then - call state_getfldptr(state, trim(fldname), dataPtr2d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - if (ungridded_index == 1) then - dataptr2d(:,:) = c0 + if (geomtype == ESMF_GEOMTYPE_MESH) then + + ! get field pointer + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataPtr2d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call state_getfldptr(state, trim(fldname), dataPtr1d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return end if + + ! set values of field pointer n = 0 do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo; ihi = this_block%ihi - jlo = this_block%jlo; jhi = this_block%jhi - if (present(lmask) .and. present(ifrac)) then - do j = jlo, jhi - do i = ilo, ihi - n = n+1 + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + if (present(lmask) .and. present(ifrac)) then if ( lmask(i,j,iblk) .and. ifrac(i,j,iblk) > c0 ) then + if (present(ungridded_index)) then + dataPtr2d(ungridded_index,n) = input(i,j,index,iblk) + else + dataPtr1d(n) = input(i,j,index,iblk) + end if + end if + else + if (present(ungridded_index)) then dataPtr2d(ungridded_index,n) = input(i,j,index,iblk) else - dataPtr2d(ungridded_index,n) = c0 + dataPtr1d(n) = input(i,j,index,iblk) end if - end do - end do - else - do j = jlo, jhi - do i = ilo, ihi - n = n+1 - dataPtr2d(ungridded_index,n) = input(i,j,index,iblk) - end do + end if end do - end if - end do - ice_num = n - if (present(areacor)) then - do n = 1,ice_num - dataPtr2d(ungridded_index,n) = dataPtr2d(ungridded_index,n) * areacor(n) end do + end do + + else if (geomtype == ESMF_GEOMTYPE_GRID) then + + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataptr4d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call state_getfldptr(state, trim(fldname), dataptr3d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return end if - else - call state_getfldptr(state, trim(fldname), dataPtr1d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - dataptr1d(:) = c0 - n = 0 - do iblk = 1, nblocks + + do iblk = 1,nblocks this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo; ihi = this_block%ihi - jlo = this_block%jlo; jhi = this_block%jhi - if (present(lmask) .and. present(ifrac)) then - do j = jlo, jhi - do i = ilo, ihi - n = n+1 + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo,jhi + do i = ilo,ihi + i1 = i - ilo + 1 + j1 = j - jlo + 1 + if (present(lmask) .and. present(ifrac)) then if ( lmask(i,j,iblk) .and. ifrac(i,j,iblk) > c0 ) then - dataPtr1d(n) = input(i,j,index,iblk) + if (present(ungridded_index)) then + dataPtr4d(i1,j1,iblk,ungridded_index) = input(i,j,index,iblk) + end if + else + dataPtr3d(i1,j1,iblk) = input(i,j,index,iblk) end if - end do - end do - else - do j = jlo, jhi - do i = ilo, ihi - n = n+1 - dataPtr1d(n) = input(i,j,index,iblk) - end do + else + if (present(ungridded_index)) then + dataPtr4d(i1,j1,iblk,ungridded_index) = input(i,j,index,iblk) + else + dataPtr3d(i1,j1,iblk) = input(i,j,index,iblk) + end if + end if end do - end if - end do - ice_num = n - if (present(areacor)) then - do n = 1,ice_num - dataPtr1d(n) = dataPtr1d(n) * areacor(n) end do - end if + end do + end if - end subroutine state_setexport_4d + end subroutine state_setexport_4d_input !=============================================================================== - subroutine state_setexport_3d(state, fldname, input, lmask, ifrac, ungridded_index, areacor, rc) + + subroutine state_setexport_3d_input(state, fldname, input, lmask, ifrac, ungridded_index, rc) ! ---------------------------------------------- ! Map 3d input array to export state field @@ -1805,7 +1661,6 @@ subroutine state_setexport_3d(state, fldname, input, lmask, ifrac, ungridded_ind logical , optional , intent(in) :: lmask(:,:,:) real(kind=dbl_kind) , optional , intent(in) :: ifrac(:,:,:) integer , optional , intent(in) :: ungridded_index - real(kind=dbl_kind) , optional , intent(in) :: areacor(:) integer , intent(out) :: rc ! local variables @@ -1814,8 +1669,9 @@ subroutine state_setexport_3d(state, fldname, input, lmask, ifrac, ungridded_ind integer :: i, j, iblk, n, i1, j1 ! incides real(kind=dbl_kind), pointer :: dataPtr1d(:) ! mesh real(kind=dbl_kind), pointer :: dataPtr2d(:,:) ! mesh - integer :: num_ice - character(len=*), parameter :: subname='(ice_import_export:state_setexport_3d)' + real(kind=dbl_kind), pointer :: dataPtr3d(:,:,:) ! grid + real(kind=dbl_kind), pointer :: dataPtr4d(:,:,:,:) ! grid + character(len=*), parameter :: subname='(ice_import_export:state_setexport)' ! ---------------------------------------------- rc = ESMF_SUCCESS @@ -1823,59 +1679,92 @@ subroutine state_setexport_3d(state, fldname, input, lmask, ifrac, ungridded_ind ! check that fieldname exists if (.not. State_FldChk(state, trim(fldname))) return - ! get field pointer - if (present(ungridded_index)) then - call state_getfldptr(state, trim(fldname), dataPtr2d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - else - call state_getfldptr(state, trim(fldname), dataPtr1d, rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - end if + if (geomtype == ESMF_GEOMTYPE_MESH) then - n = 0 - do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - do j = jlo, jhi - do i = ilo, ihi - n = n+1 - if (present(lmask) .and. present(ifrac)) then - if ( lmask(i,j,iblk) .and. ifrac(i,j,iblk) > c0 ) then + ! get field pointer + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataPtr2d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call state_getfldptr(state, trim(fldname), dataPtr1d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + if (present(lmask) .and. present(ifrac)) then + if ( lmask(i,j,iblk) .and. ifrac(i,j,iblk) > c0 ) then + if (present(ungridded_index)) then + dataPtr2d(ungridded_index,n) = input(i,j,iblk) + else + dataPtr1d(n) = input(i,j,iblk) + end if + end if + else if (present(ungridded_index)) then dataPtr2d(ungridded_index,n) = input(i,j,iblk) else dataPtr1d(n) = input(i,j,iblk) end if end if - else - if (present(ungridded_index)) then - dataPtr2d(ungridded_index,n) = input(i,j,iblk) - else - dataPtr1d(n) = input(i,j,iblk) - end if - end if + end do end do end do - end do - num_ice = n - if (present(areacor)) then + + else if (geomtype == ESMF_GEOMTYPE_GRID) then + + ! get field pointer if (present(ungridded_index)) then - do n = 1,num_ice - dataPtr2d(:,n) = dataPtr2d(:,n) * areacor(n) - end do + call state_getfldptr(state, trim(fldname), dataptr4d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return else - do n = 1,num_ice - dataPtr1d(n) = dataPtr1d(n) * areacor(n) - end do + call state_getfldptr(state, trim(fldname), dataptr3d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return end if + + do iblk = 1,nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo,jhi + do i = ilo,ihi + i1 = i - ilo + 1 + j1 = j - jlo + 1 + if (present(lmask) .and. present(ifrac)) then + if ( lmask(i,j,iblk) .and. ifrac(i,j,iblk) > c0 ) then + if (present(ungridded_index)) then + dataPtr4d(i1,j1,iblk,ungridded_index) = input(i,j,iblk) + else + dataPtr3d(i1,j1,iblk) = input(i,j,iblk) + end if + end if + else + if (present(ungridded_index)) then + dataPtr4d(i1,j1,iblk,ungridded_index) = input(i,j,iblk) + else + dataPtr3d(i1,j1,iblk) = input(i,j,iblk) + end if + end if + end do + end do + end do + end if - end subroutine state_setexport_3d + end subroutine state_setexport_3d_input !=============================================================================== + subroutine State_GetFldPtr_1d(State, fldname, fldptr, rc) ! ---------------------------------------------- ! Get pointer to a state field @@ -1899,10 +1788,10 @@ subroutine State_GetFldPtr_1d(State, fldname, fldptr, rc) call ESMF_FieldGet(lfield, farrayPtr=fldptr, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - end subroutine State_GetFldPtr_1d !=============================================================================== + subroutine State_GetFldPtr_2d(State, fldname, fldptr, rc) ! ---------------------------------------------- ! Get pointer to a state field @@ -1926,7 +1815,60 @@ subroutine State_GetFldPtr_2d(State, fldname, fldptr, rc) call ESMF_FieldGet(lfield, farrayPtr=fldptr, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - end subroutine State_GetFldPtr_2d + !=============================================================================== + + subroutine State_GetFldPtr_3d(State, fldname, fldptr, rc) + ! ---------------------------------------------- + ! Get pointer to a state field + ! ---------------------------------------------- + + ! input/output variables + type(ESMF_State) , intent(in) :: State + character(len=*) , intent(in) :: fldname + real(kind=dbl_kind) , pointer , intent(inout) :: fldptr(:,:,:) + integer , optional , intent(out) :: rc + + ! local variables + type(ESMF_Field) :: lfield + character(len=*),parameter :: subname='(ice_import_export:State_GetFldPtr_3d)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + call ESMF_StateGet(State, itemName=trim(fldname), field=lfield, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_FieldGet(lfield, farrayPtr=fldptr, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end subroutine State_GetFldPtr_3d + + !=============================================================================== + + subroutine State_GetFldPtr_4d(State, fldname, fldptr, rc) + ! ---------------------------------------------- + ! Get pointer to a state field + ! ---------------------------------------------- + + ! input/output variables + type(ESMF_State) , intent(in) :: State + character(len=*) , intent(in) :: fldname + real(kind=dbl_kind) , pointer , intent(inout) :: fldptr(:,:,:,:) + integer , optional , intent(out) :: rc + + ! local variables + type(ESMF_Field) :: lfield + character(len=*),parameter :: subname='(ice_import_export:State_GetFldPtr_3d)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + call ESMF_StateGet(State, itemName=trim(fldname), field=lfield, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_FieldGet(lfield, farrayPtr=fldptr, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end subroutine State_GetFldPtr_4d + end module ice_import_export diff --git a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 index b46f22ff7..78ea39b4e 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_prescribed_mod.F90 @@ -7,35 +7,39 @@ module ice_prescribed_mod ! Ice/ocean fluxes are set to zero, and ice dynamics are not calculated. ! Regridding and data cycling capabilities are included. - use ESMF, only : ESMF_Clock, ESMF_Mesh, ESMF_SUCCESS, ESMF_FAILURE - use ESMF, only : ESMF_LogFoundError, ESMF_LOGERR_PASSTHRU, ESMF_Finalize, ESMF_END_ABORT - #ifndef CESMCOUPLED use ice_kinds_mod + implicit none private ! except + public :: ice_prescribed_init ! initialize input data stream logical(kind=log_kind), parameter, public :: prescribed_ice = .false. ! true if prescribed ice + contains ! This is a stub routine for now - subroutine ice_prescribed_init(clock, mesh, rc) - type(ESMF_Clock) , intent(in) :: clock - type(ESMF_Mesh) , intent(in) :: mesh - integer , intent(out) :: rc + subroutine ice_prescribed_init(mpicom, compid, gindex) + integer(kind=int_kind), intent(in) :: mpicom + integer(kind=int_kind), intent(in) :: compid + integer(kind=int_kind), intent(in) :: gindex(:) ! do nothing - rc = ESMF_SUCCESS end subroutine ice_prescribed_init -#else - - use ice_kinds_mod - use shr_nl_mod , only : shr_nl_find_group_name - use dshr_strdata_mod , only : shr_strdata_type, shr_strdata_print - use dshr_strdata_mod , only : shr_strdata_init_from_inline, shr_strdata_advance - use dshr_methods_mod , only : dshr_fldbun_getfldptr +#else + + use shr_nl_mod , only : shr_nl_find_group_name + use shr_strdata_mod + use shr_dmodel_mod + use shr_string_mod + use shr_ncread_mod + use shr_sys_mod + use shr_mct_mod + use mct_mod + use pio use ice_broadcast use ice_communicate , only : my_task, master_task, MPI_COMM_ICE + use ice_kinds_mod use ice_fileunits use ice_exit , only : abort_ice use ice_domain_size , only : nx_global, ny_global, ncat, nilyr, nslyr, max_blocks @@ -43,295 +47,313 @@ end subroutine ice_prescribed_init use ice_blocks , only : nx_block, ny_block, block, get_block use ice_domain , only : nblocks, distrb_info, blocks_ice use ice_grid , only : TLAT, TLON, hm, tmask, tarea, grid_type, ocn_gridcell_frac - use ice_calendar , only : idate, calendar_type + use ice_calendar , only : idate, sec, calendar_type use ice_arrays_column , only : hin_max use ice_read_write use ice_exit , only: abort_ice use icepack_intfc , only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc , only: icepack_query_tracer_indices, icepack_query_tracer_sizes use icepack_intfc , only: icepack_query_parameters - use ice_shr_methods , only: chkerr implicit none private ! except - ! public member functions: - public :: ice_prescribed_init ! initialize input data stream - public :: ice_prescribed_run ! get time slices and time interp - public :: ice_prescribed_phys ! set prescribed ice state and fluxes - - ! public data members: - logical(kind=log_kind), public :: prescribed_ice ! true if prescribed ice - - ! private data members: - type(shr_strdata_type) :: sdat ! prescribed data stream - real(kind=dbl_kind),allocatable :: ice_cov(:,:,:) ! ice cover - - character(*), parameter :: u_FILE_u = & - __FILE__ + ! MEMBER FUNCTIONS: + public :: ice_prescribed_init ! initialize input data stream + public :: ice_prescribed_run ! get time slices and time interp + public :: ice_prescribed_phys ! set prescribed ice state and fluxes + + ! !PUBLIC DATA MEMBERS: + logical(kind=log_kind), public :: prescribed_ice ! true if prescribed ice + integer(kind=int_kind),parameter :: nFilesMaximum = 400 ! max number of files + integer(kind=int_kind) :: stream_year_first ! first year in stream to use + integer(kind=int_kind) :: stream_year_last ! last year in stream to use + integer(kind=int_kind) :: model_year_align ! align stream_year_first with this model year + character(len=char_len_long) :: stream_fldVarName + character(len=char_len_long) :: stream_fldFileName(nFilesMaximum) + character(len=char_len_long) :: stream_domTvarName + character(len=char_len_long) :: stream_domXvarName + character(len=char_len_long) :: stream_domYvarName + character(len=char_len_long) :: stream_domAreaName + character(len=char_len_long) :: stream_domMaskName + character(len=char_len_long) :: stream_domFileName + character(len=char_len_long) :: stream_mapread + logical(kind=log_kind) :: prescribed_ice_fill ! true if data fill required + type(shr_strdata_type) :: sdat ! prescribed data stream + character(len=char_len_long) :: fldList ! list of fields in data stream + real(kind=dbl_kind),allocatable :: ice_cov(:,:,:) ! ice cover -!======================================================================= contains -!=============================================================================== - subroutine ice_prescribed_init(clock, mesh, rc) + subroutine ice_prescribed_init(mpicom, compid, gindex) - ! Prescribed ice initialization + ! Prescribed ice initialization - needed to + ! work with new shr_strdata module derived type - include 'mpif.h' + use shr_pio_mod, only : shr_pio_getiotype, shr_pio_getiosys, shr_pio_getioformat - ! input/output parameters - type(ESMF_Clock) , intent(in) :: clock - type(ESMF_Mesh) , intent(in) :: mesh - integer , intent(out) :: rc - - ! local parameters - integer(kind=int_kind),parameter :: nFilesMaximum = 400 ! max number of files - integer(kind=int_kind) :: n, nFile, ierr - integer(kind=int_kind) :: nml_error ! namelist i/o error flag - character(len=char_len_long) :: stream_meshFile - character(len=char_len_long) :: stream_dataFiles(nFilesMaximum) - character(len=char_len_long) :: stream_varname - character(len=char_len_long) :: stream_mapalgo - integer(kind=int_kind) :: stream_yearfirst ! first year in stream to use - integer(kind=int_kind) :: stream_yearlast ! last year in stream to use - integer(kind=int_kind) :: stream_yearalign ! align stream_year_first - integer(kind=int_kind) :: nu_nml - logical :: prescribed_ice_mode - character(*),parameter :: subName = "('ice_prescribed_init')" - character(*),parameter :: F00 = "('(ice_prescribed_init) ',4a)" - character(*),parameter :: F01 = "('(ice_prescribed_init) ',a,i0)" - character(*),parameter :: F02 = "('(ice_prescribed_init) ',2a,i0,)" - !-------------------------------- + implicit none + include 'mpif.h' - namelist /ice_prescribed_nml/ & - prescribed_ice_mode, & - stream_meshfile, & - stream_varname , & - stream_datafiles, & - stream_mapalgo, & - stream_yearalign, & - stream_yearfirst , & - stream_yearlast + ! !nput/output parameters: + integer(kind=int_kind), intent(in) :: mpicom + integer(kind=int_kind), intent(in) :: compid + integer(kind=int_kind), intent(in) :: gindex(:) - rc = ESMF_SUCCESS + !----- Local ------ + type(mct_gsMap) :: gsmap_ice + type(mct_gGrid) :: dom_ice + integer(kind=int_kind) :: lsize + integer(kind=int_kind) :: gsize + integer(kind=int_kind) :: nml_error ! namelist i/o error flag + integer(kind=int_kind) :: n, nFile, ierr + character(len=8) :: fillalgo + character(*),parameter :: subName = '(ice_prescribed_init)' + + namelist /ice_prescribed_nml/ & + prescribed_ice, & + model_year_align, & + stream_year_first , & + stream_year_last , & + stream_fldVarName , & + stream_fldFileName, & + stream_domTvarName, & + stream_domXvarName, & + stream_domYvarName, & + stream_domAreaName, & + stream_domMaskName, & + stream_domFileName, & + stream_mapread, & + prescribed_ice_fill ! default values for namelist - prescribed_ice_mode = .false. ! if true, prescribe ice - stream_yearfirst = 1 ! first year in pice stream to use - stream_yearlast = 1 ! last year in pice stream to use - stream_yearalign = 1 ! align stream_year_first with this model year - stream_varname = 'ice_cov' - stream_meshfile = ' ' - stream_datafiles(:) = ' ' - stream_mapalgo = 'bilinear' - - ! read namelist on master task + prescribed_ice = .false. ! if true, prescribe ice + stream_year_first = 1 ! first year in pice stream to use + stream_year_last = 1 ! last year in pice stream to use + model_year_align = 1 ! align stream_year_first with this model year + stream_fldVarName = 'ice_cov' + stream_fldFileName(:) = ' ' + stream_domTvarName = 'time' + stream_domXvarName = 'lon' + stream_domYvarName = 'lat' + stream_domAreaName = 'area' + stream_domMaskName = 'mask' + stream_domFileName = ' ' + stream_mapread = 'NOT_SET' + prescribed_ice_fill = .false. ! true if pice data fill required + + ! read from input file + call get_fileunit(nu_nml) if (my_task == master_task) then - open (newunit=nu_nml, file=nml_filename, status='old',iostat=nml_error) + open (nu_nml, file=nml_filename, status='old',iostat=nml_error) call shr_nl_find_group_name(nu_nml, 'ice_prescribed_nml', status=nml_error) - if (nml_error /= 0) then - write(nu_diag,F00) "ERROR: problem on read of ice_prescribed_nml namelist" - call abort_ice(subName) + if (nml_error == 0) then + read(nu_nml, ice_prescribed_nml, iostat=nml_error) + if (nml_error > 0) then + call shr_sys_abort( 'problem on read of ice_prescribed namelist in ice_prescribed_mod' ) + endif endif - read(nu_nml, ice_prescribed_nml, iostat=nml_error) - close(nu_nml) end if + call release_fileunit(nu_nml) + call broadcast_scalar(prescribed_ice, master_task) + + ! *** If not prescribed ice then return *** + if (.not. prescribed_ice) RETURN + + call broadcast_scalar(model_year_align,master_task) + call broadcast_scalar(stream_year_first,master_task) + call broadcast_scalar(stream_year_last,master_task) + call broadcast_scalar(stream_fldVarName,master_task) + call broadcast_scalar(stream_domTvarName,master_task) + call broadcast_scalar(stream_domXvarName,master_task) + call broadcast_scalar(stream_domYvarName,master_task) + call broadcast_scalar(stream_domAreaName,master_task) + call broadcast_scalar(stream_domMaskName,master_task) + call broadcast_scalar(stream_domFileName,master_task) + call broadcast_scalar(stream_mapread,master_task) + call broadcast_scalar(prescribed_ice_fill,master_task) + call mpi_bcast(stream_fldFileName, len(stream_fldFileName(1))*NFilesMaximum, & + MPI_CHARACTER, 0, MPI_COMM_ICE, ierr) + + nFile = 0 + do n=1,nFilesMaximum + if (stream_fldFileName(n) /= ' ') nFile = nFile + 1 + end do - ! broadcast namelist input - call broadcast_scalar(prescribed_ice_mode, master_task) - - ! set module variable 'prescribed_ice' - prescribed_ice = prescribed_ice_mode - - ! -------------------------------------------------- - ! only do the following if prescribed ice mode is on - ! -------------------------------------------------- - - if (prescribed_ice_mode) then - - call broadcast_scalar(stream_yearalign , master_task) - call broadcast_scalar(stream_yearfirst , master_task) - call broadcast_scalar(stream_yearlast , master_task) - call broadcast_scalar(stream_meshfile , master_task) - call broadcast_scalar(stream_mapalgo , master_task) - call broadcast_scalar(stream_varname , master_task) - call mpi_bcast(stream_dataFiles, len(stream_datafiles(1))*NFilesMaximum, MPI_CHARACTER, 0, MPI_COMM_ICE, ierr) + ! Read shr_strdata_nml namelist + if (prescribed_ice_fill) then + fillalgo='nn' + else + fillalgo='none' + endif - nFile = 0 - do n = 1,nFilesMaximum - if (stream_datafiles(n) /= ' ') nFile = nFile + 1 + if (my_task == master_task) then + write(nu_diag,*) ' ' + write(nu_diag,*) 'This is the prescribed ice coverage option.' + write(nu_diag,*) ' stream_year_first = ',stream_year_first + write(nu_diag,*) ' stream_year_last = ',stream_year_last + write(nu_diag,*) ' model_year_align = ',model_year_align + write(nu_diag,*) ' stream_fldVarName = ',trim(stream_fldVarName) + do n = 1,nFile + write(nu_diag,*) ' stream_fldFileName = ',trim(stream_fldFileName(n)),n end do + write(nu_diag,*) ' stream_domTvarName = ',trim(stream_domTvarName) + write(nu_diag,*) ' stream_domXvarName = ',trim(stream_domXvarName) + write(nu_diag,*) ' stream_domYvarName = ',trim(stream_domYvarName) + write(nu_diag,*) ' stream_domFileName = ',trim(stream_domFileName) + write(nu_diag,*) ' stream_mapread = ',trim(stream_mapread) + write(nu_diag,*) ' stream_fillalgo = ',trim(fillalgo) + write(nu_diag,*) ' ' + endif + + gsize = nx_global*ny_global + lsize = size(gindex) + call mct_gsMap_init( gsmap_ice, gindex, MPI_COMM_ICE, compid, lsize, gsize) + call ice_prescribed_set_domain( lsize, MPI_COMM_ICE, gsmap_ice, dom_ice ) + + call shr_strdata_create(sdat,name="prescribed_ice", & + mpicom=MPI_COMM_ICE, compid=compid, & + gsmap=gsmap_ice, ggrid=dom_ice, & + nxg=nx_global,nyg=ny_global, & + yearFirst=stream_year_first, & + yearLast=stream_year_last, & + yearAlign=model_year_align, & + offset=0, & + domFilePath='', & + domFileName=trim(stream_domFileName), & + domTvarName=stream_domTvarName, & + domXvarName=stream_domXvarName, & + domYvarName=stream_domYvarName, & + domAreaName=stream_domAreaName, & + domMaskName=stream_domMaskName, & + filePath='', & + filename=stream_fldFileName(1:nFile), & + fldListFile=stream_fldVarName, & + fldListModel=stream_fldVarName, & + fillalgo=trim(fillalgo), & + calendar=trim(calendar_type), & + mapread=trim(stream_mapread)) - if (my_task == master_task) then - write(nu_diag,*) ' ' - write(nu_diag,F00) 'This is the prescribed ice coverage option.' - write(nu_diag,F01) ' stream_yearfirst = ',stream_yearfirst - write(nu_diag,F01) ' stream_yearlast = ',stream_yearlast - write(nu_diag,F01) ' stream_yearalign = ',stream_yearalign - write(nu_diag,F00) ' stream_meshfile = ',trim(stream_meshfile) - write(nu_diag,F00) ' stream_varname = ',trim(stream_varname) - write(nu_diag,F00) ' stream_mapalgo = ',trim(stream_mapalgo) - do n = 1,nFile - write(nu_diag,F00) ' stream_datafiles = ',trim(stream_dataFiles(n)) - end do - write(nu_diag,*) ' ' - endif - - ! initialize sdat - call shr_strdata_init_from_inline(sdat, & - my_task = my_task, & - logunit = nu_diag, & - compname = 'ICE', & - model_clock = clock, & - model_mesh = mesh, & - stream_meshfile = stream_meshfile, & - stream_lev_dimname = 'null', & - stream_mapalgo = trim(stream_mapalgo), & - stream_filenames = stream_datafiles(1:nfile), & - stream_fldlistFile = (/'ice_cov'/), & - stream_fldListModel = (/'ice_cov'/), & - stream_yearFirst = stream_yearFirst, & - stream_yearLast = stream_yearLast, & - stream_yearAlign = stream_yearAlign , & - stream_offset = 0, & - stream_taxmode = 'cycle', & - stream_dtlimit = 1.5_dbl_kind, & - stream_tintalgo = 'linear', & - rc = rc) - if (ChkErr(rc,__LINE__,u_FILE_u)) return - - ! print out sdat info - if (my_task == master_task) then - call shr_strdata_print(sdat,'ice coverage prescribed data') - endif - - ! For one ice category, set hin_max(1) to something big - if (ncat == 1) then - hin_max(1) = 999._dbl_kind - end if - - end if ! end of if prescribed ice mode + if (my_task == master_task) then + call shr_strdata_print(sdat,'SPRESICE data') + endif + !----------------------------------------------------------------- + ! For one ice category, set hin_max(1) to something big + !----------------------------------------------------------------- + if (ncat == 1) then + hin_max(1) = 999._dbl_kind + end if end subroutine ice_prescribed_init !======================================================================= subroutine ice_prescribed_run(mDateIn, secIn) - ! Finds two time slices bounding current model time, remaps if necessary - ! Interpolate to new ice coverage + ! !DESCRIPTION: + ! Finds two time slices bounding current model time, remaps if necessary - ! input/output parameters: - integer(kind=int_kind), intent(in) :: mDateIn ! Current model date (yyyymmdd) - integer(kind=int_kind), intent(in) :: secIn ! Elapsed seconds on model date - - ! local variables - integer(kind=int_kind) :: i,j,n,iblk ! loop indices and counter - integer(kind=int_kind) :: ilo,ihi,jlo,jhi ! beginning and end of physical domain - type (block) :: this_block - real(kind=dbl_kind) :: aice_max ! maximun ice concentration - real(kind=dbl_kind), pointer :: dataptr(:) - integer :: rc ! ESMF return code - character(*),parameter :: subName = "('ice_prescribed_run')" - character(*),parameter :: F00 = "('(ice_prescribed_run) ',a,2g20.13)" - logical :: first_time = .true. - !------------------------------------------------------------------------ + implicit none - rc = ESMF_SUCCESS + ! !INPUT/OUTPUT PARAMETERS: + integer(kind=int_kind), intent(in) :: mDateIn ! Current model date (yyyymmdd) + integer(kind=int_kind), intent(in) :: secIn ! Elapsed seconds on model date - ! Advance sdat stream - call shr_strdata_advance(sdat, ymd=mDateIn, tod=SecIn, logunit=nu_diag, istr='cice_pice', rc=rc) - if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, line=__LINE__, file=__FILE__)) then - call ESMF_Finalize(endflag=ESMF_END_ABORT) - end if + ! local varaibles + integer(kind=int_kind) :: i,j,n,iblk ! loop indices and counter + integer(kind=int_kind) :: ilo,ihi,jlo,jhi ! beginning and end of physical domain + type (block) :: this_block + real(kind=dbl_kind) :: aice_max ! maximun ice concentration + logical, save :: first_time = .true. + character(*),parameter :: subName = '(ice_prescribed_run)' + character(*),parameter :: F00 = "(a,2g20.13)" - ! Get pointer for stream data that is time and spatially interpolate to model time and grid - call dshr_fldbun_getFldPtr(sdat%pstrm(1)%fldbun_model, 'ice_cov', dataptr, rc=rc) - if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, line=__LINE__, file=__FILE__)) then - call ESMF_Finalize(endflag=ESMF_END_ABORT) - end if + !------------------------------------------------------------------------ + ! Interpolate to new ice coverage + !------------------------------------------------------------------------ + + call shr_strdata_advance(sdat,mDateIn,SecIn,MPI_COMM_ICE,'cice_pice') - ! Fill in module ice_cov array - if (.not. allocated(ice_cov)) then + if (first_time) then allocate(ice_cov(nx_block,ny_block,max_blocks)) - end if + endif + ice_cov(:,:,:) = c0 ! This initializes ghost cells as well - n = 0 + + n=0 do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi + do j = jlo, jhi do i = ilo, ihi n = n+1 - ice_cov(i,j,iblk) = dataptr(n) + ice_cov(i,j,iblk) = sdat%avs(1)%rAttr(1,n) end do end do end do + !-------------------------------------------------------------------- ! Check to see that ice concentration is in fraction, not percent + !-------------------------------------------------------------------- if (first_time) then aice_max = maxval(ice_cov) + if (aice_max > c10) then - write(nu_diag,F00) "ERROR: Ice conc data must be in fraction, aice_max= ", aice_max - rc = ESMF_FAILURE - if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, line=__LINE__, file=__FILE__)) then - call ESMF_Finalize(endflag=ESMF_END_ABORT) - end if + write(nu_diag,F00) subname//" ERROR: Ice conc data must be in fraction, aice_max= ",& + aice_max + call abort_ice(subName) end if first_time = .false. end if + !----------------------------------------------------------------- ! Set prescribed ice state and fluxes + !----------------------------------------------------------------- + call ice_prescribed_phys() end subroutine ice_prescribed_run - !======================================================================= - subroutine ice_prescribed_phys() + !=============================================================================== + subroutine ice_prescribed_phys ! Set prescribed ice state using input ice concentration; ! set surface ice temperature to atmospheric value; use ! linear temperature gradient in ice to ocean temperature. + ! !USES: use ice_flux use ice_state use icepack_intfc, only : icepack_aggregate use ice_dyn_evp + implicit none !----- Local ------ integer(kind=int_kind) :: layer ! level index integer(kind=int_kind) :: nc ! ice category index integer(kind=int_kind) :: i,j,k ! longitude, latitude and level indices integer(kind=int_kind) :: iblk - integer(kind=int_kind) :: nt_Tsfc - integer(kind=int_kind) :: nt_sice - integer(kind=int_kind) :: nt_qice - integer(kind=int_kind) :: nt_qsno - integer(kind=int_kind) :: ntrcr - real(kind=dbl_kind) :: slope ! diff in underlying ocean tmp and ice surface tmp - real(kind=dbl_kind) :: Ti ! ice level temperature - real(kind=dbl_kind) :: Tmlt ! ice level melt temperature - real(kind=dbl_kind) :: qin_save(nilyr) - real(kind=dbl_kind) :: qsn_save(nslyr) - real(kind=dbl_kind) :: hi ! ice prescribed (hemispheric) ice thickness - real(kind=dbl_kind) :: hs ! snow thickness - real(kind=dbl_kind) :: zn ! normalized ice thickness - real(kind=dbl_kind) :: salin(nilyr) ! salinity (ppt) - real(kind=dbl_kind) :: rad_to_deg, pi, puny - real(kind=dbl_kind) :: rhoi - real(kind=dbl_kind) :: rhos - real(kind=dbl_kind) :: cp_ice - real(kind=dbl_kind) :: cp_ocn - real(kind=dbl_kind) :: lfresh - real(kind=dbl_kind) :: depressT + integer(kind=int_kind) :: nt_Tsfc, nt_sice, nt_qice, nt_qsno, ntrcr + + real(kind=dbl_kind) :: slope ! diff in underlying ocean tmp and ice surface tmp + real(kind=dbl_kind) :: Ti ! ice level temperature + real(kind=dbl_kind) :: Tmlt ! ice level melt temperature + real(kind=dbl_kind) :: qin_save(nilyr) + real(kind=dbl_kind) :: qsn_save(nslyr) + real(kind=dbl_kind) :: hi ! ice prescribed (hemispheric) ice thickness + real(kind=dbl_kind) :: hs ! snow thickness + real(kind=dbl_kind) :: zn ! normalized ice thickness + real(kind=dbl_kind) :: salin(nilyr) ! salinity (ppt) + real(kind=dbl_kind) :: rad_to_deg, pi, puny + real(kind=dbl_kind) :: rhoi, rhos, cp_ice, cp_ocn, lfresh, depressT + real(kind=dbl_kind), parameter :: nsal = 0.407_dbl_kind real(kind=dbl_kind), parameter :: msal = 0.573_dbl_kind real(kind=dbl_kind), parameter :: saltmax = 3.2_dbl_kind ! max salinity at ice base (ppm) character(*),parameter :: subName = '(ice_prescribed_phys)' - !----------------------------------------------------------------- call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_sice_out=nt_sice, & nt_qice_out=nt_qice, nt_qsno_out=nt_qsno) @@ -436,7 +458,7 @@ subroutine ice_prescribed_phys() trcrn(i,j,nt_sice:nt_sice+nilyr-1,:,iblk) = c0 trcrn(i,j,nt_qice:nt_qice+nilyr-1,:,iblk) = c0 trcrn(i,j,nt_qsno:nt_qsno+nslyr-1,:,iblk) = c0 - end if ! ice_cov >= eps04 + end if ! ice_cov >= eps04 !-------------------------------------------------------------------- ! compute aggregate ice state and open water area @@ -455,13 +477,11 @@ subroutine ice_prescribed_phys() trcr_depend = trcr_depend(1:ntrcr), & trcr_base = trcr_base(1:ntrcr,:), & n_trcr_strata = n_trcr_strata(1:ntrcr), & - nt_strata = nt_strata(1:ntrcr,:), & - Tf = Tf(i,j,iblk)) - - end if ! tmask - enddo ! i - enddo ! j - enddo ! iblk + nt_strata = nt_strata(1:ntrcr,:)) + end if ! tmask + enddo ! i + enddo ! j + enddo ! iblk do iblk = 1, nblocks do j = 1, ny_block @@ -475,11 +495,11 @@ subroutine ice_prescribed_phys() ! set non-computed fluxes, ice velocities, ice-ocn stresses to zero !-------------------------------------------------------------------- - frzmlt (:,:,:) = c0 - uvel (:,:,:) = c0 - vvel (:,:,:) = c0 - strocnxT_iavg(:,:,:) = c0 - strocnyT_iavg(:,:,:) = c0 + frzmlt (:,:,:) = c0 + uvel (:,:,:) = c0 + vvel (:,:,:) = c0 + strocnxT (:,:,:) = c0 + strocnyT (:,:,:) = c0 !----------------------------------------------------------------- ! other atm and ocn fluxes @@ -489,6 +509,105 @@ subroutine ice_prescribed_phys() end subroutine ice_prescribed_phys + !=============================================================================== + subroutine ice_prescribed_set_domain( lsize, mpicom, gsmap_i, dom_i ) + + ! Arguments + integer , intent(in) :: lsize + integer , intent(in) :: mpicom + type(mct_gsMap), intent(in) :: gsMap_i + type(mct_ggrid), intent(inout) :: dom_i + + ! Local Variables + integer :: i, j, iblk, n ! indices + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + real(dbl_kind), pointer :: data1(:) ! temporary + real(dbl_kind), pointer :: data2(:) ! temporary + real(dbl_kind), pointer :: data3(:) ! temporary + real(dbl_kind), pointer :: data4(:) ! temporary + real(dbl_kind), pointer :: data5(:) ! temporary + real(dbl_kind), pointer :: data6(:) ! temporary + integer , pointer :: idata(:) ! temporary + real(kind=dbl_kind) :: rad_to_deg + type(block) :: this_block ! block information for current block + character(*),parameter :: subName = '(ice_prescribed_set_domain)' + !-------------------------------- + + call icepack_query_parameters(rad_to_deg_out=rad_to_deg) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + ! Initialize mct domain type + call mct_gGrid_init(GGrid=dom_i, & + CoordChars='lat:lon:hgt', OtherChars='area:aream:mask:frac', lsize=lsize ) + call mct_aVect_zero(dom_i%data) + + ! Determine global gridpoint number attribute, GlobGridNum, which is set automatically by MCT + call mct_gsMap_orderedPoints(gsMap_i, my_task, idata) + call mct_gGrid_importIAttr(dom_i,'GlobGridNum',idata,lsize) + deallocate(idata) + + ! Determine domain (numbering scheme is: West to East and South to North to South pole) + ! Initialize attribute vector with special value + + allocate(data1(lsize)) + allocate(data2(lsize)) + allocate(data3(lsize)) + allocate(data4(lsize)) + allocate(data5(lsize)) + allocate(data6(lsize)) + + data1(:) = -9999.0_dbl_kind + data2(:) = -9999.0_dbl_kind + data3(:) = -9999.0_dbl_kind + data4(:) = -9999.0_dbl_kind + call mct_gGrid_importRAttr(dom_i,"lat" ,data1,lsize) + call mct_gGrid_importRAttr(dom_i,"lon" ,data2,lsize) + call mct_gGrid_importRAttr(dom_i,"area" ,data3,lsize) + call mct_gGrid_importRAttr(dom_i,"aream",data4,lsize) + data5(:) = 0.0_dbl_kind + data6(:) = 0.0_dbl_kind + call mct_gGrid_importRAttr(dom_i,"mask" ,data5,lsize) + call mct_gGrid_importRAttr(dom_i,"frac" ,data6,lsize) + + ! Fill in correct values for domain components + ! lat/lon in degrees, area in radians^2, mask is 1 (ocean), 0 (non-ocean) + n=0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + + data1(n) = TLON(i,j,iblk)*rad_to_deg + data2(n) = TLAT(i,j,iblk)*rad_to_deg + data3(n) = tarea(i,j,iblk)/(radius*radius) + + data5(n) = real(nint(hm(i,j,iblk)),kind=dbl_kind) + if (trim(grid_type) == 'latlon') then + data6(n) = ocn_gridcell_frac(i,j,iblk) + else + data6(n) = real(nint(hm(i,j,iblk)),kind=dbl_kind) + end if + + enddo !i + enddo !j + enddo !iblk + call mct_gGrid_importRattr(dom_i,"lon" ,data1,lsize) + call mct_gGrid_importRattr(dom_i,"lat" ,data2,lsize) + call mct_gGrid_importRattr(dom_i,"area",data3,lsize) + call mct_gGrid_importRattr(dom_i,"mask",data5,lsize) + call mct_gGrid_importRattr(dom_i,"frac",data6,lsize) + + deallocate(data1, data2, data3, data4, data5, data6) + + end subroutine ice_prescribed_set_domain + #endif end module ice_prescribed_mod diff --git a/cicecore/drivers/nuopc/dmi/CICE.F90 b/cicecore/drivers/nuopc/dmi/CICE.F90 index 5ace27736..ec1963d38 100644 --- a/cicecore/drivers/nuopc/dmi/CICE.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE.F90 @@ -1,21 +1,21 @@ !======================================================================= -! Copyright (c) 2023, Triad National Security, LLC +! Copyright (c) 2020, Triad National Security, LLC ! All rights reserved. -! -! Copyright 2023. Triad National Security, LLC. This software was -! produced under U.S. Government contract DE-AC52-06NA25396 for Los +! +! Copyright 2020. Triad National Security, LLC. This software was +! produced under U.S. Government contract DE-AC52-06NA25396 for Los ! Alamos National Laboratory (LANL), which is operated by Triad -! National Security, LLC for the U.S. Department of Energy. The U.S. -! Government has rights to use, reproduce, and distribute this software. -! NEITHER THE GOVERNMENT NOR TRIAD NATIONAL SECURITY, LLC MAKES ANY +! National Security, LLC for the U.S. Department of Energy. The U.S. +! Government has rights to use, reproduce, and distribute this software. +! NEITHER THE GOVERNMENT NOR TRIAD NATIONAL SECURITY, LLC MAKES ANY ! WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF -! THIS SOFTWARE. If software is modified to produce derivative works, -! such modified software should be clearly marked, so as not to confuse +! THIS SOFTWARE. If software is modified to produce derivative works, +! such modified software should be clearly marked, so as not to confuse ! it with the version available from LANL. ! ! The full license and distribution policy are available from ! https://github.com/CICE-Consortium -! +! !======================================================================= ! ! Main driver routine for CICE. Initializes and steps through the model. @@ -49,7 +49,7 @@ program icemodel call CICE_Run !----------------------------------------------------------------- - ! Finalize CICE + ! Finalize CICE !----------------------------------------------------------------- call CICE_Finalize @@ -57,3 +57,39 @@ program icemodel end program icemodel !======================================================================= +! +! Wrapper for the print_state debugging routine. +! Useful for debugging in the main driver (see ice.F_debug) +! ip, jp, mtask are set in ice_diagnostics.F +! +! author Elizabeth C. Hunke, LANL +! + subroutine debug_ice(iblk, plabeld) + + use ice_kinds_mod + use ice_calendar, only: istep1 + use ice_communicate, only: my_task + use ice_diagnostics, only: check_step, iblkp, ip, jp, mtask, print_state + use ice_blocks, only: nx_block, ny_block + + character (char_len), intent(in) :: plabeld + integer (kind=int_kind), intent(in) :: iblk + + ! local + integer (kind=int_kind) :: i, j + character(len=*), parameter :: subname='(debug_ice)' + + if (istep1 >= check_step .and. & + iblk==iblkp .and. my_task==mtask) then + + do j = 1, ny_block + do i = 1, nx_block + if (i==ip .and. j==jp) call print_state(plabeld,i,j,iblk) + enddo + enddo + + endif + + end subroutine debug_ice + +!======================================================================= diff --git a/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 b/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 index 4577113f1..f79464ba8 100644 --- a/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 @@ -15,11 +15,9 @@ module CICE_InitMod use ice_kinds_mod use ice_exit, only: abort_ice use ice_fileunits, only: init_fileunits, nu_diag - use ice_memusage, only: ice_memusage_init, ice_memusage_print use icepack_intfc, only: icepack_aggregate use icepack_intfc, only: icepack_init_itd, icepack_init_itd_hist use icepack_intfc, only: icepack_init_fsd_bounds, icepack_init_wave - use icepack_intfc, only: icepack_init_snow, icepack_init_radiation use icepack_intfc, only: icepack_configure use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_query_parameters, icepack_query_tracer_flags, & @@ -71,23 +69,21 @@ subroutine cice_init(mpi_comm) floe_binwidth, c_fsd_range use ice_state, only: alloc_state use ice_flux_bgc, only: alloc_flux_bgc - use ice_calendar, only: dt, dt_dyn, write_ic, & - init_calendar, advance_timestep, calc_timesteps + use ice_calendar, only: dt, dt_dyn, time, istep, istep1, write_ic, & + init_calendar, calendar use ice_communicate, only: init_communicate, my_task, master_task use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd - use ice_dyn_eap, only: init_eap - use ice_dyn_evp, only: init_evp - use ice_dyn_vp, only: init_vp - use ice_dyn_shared, only: kdyn + use ice_dyn_eap, only: init_eap, alloc_dyn_eap + use ice_dyn_shared, only: kdyn, init_evp, alloc_dyn_shared use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & - get_forcing_atmo, get_forcing_ocn, get_wave_spec, init_snowtable + get_forcing_atmo, get_forcing_ocn, get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & - faero_default, alloc_forcing_bgc, fiso_default - use ice_grid, only: init_grid1, init_grid2, alloc_grid, dealloc_grid + faero_default, faero_optics, alloc_forcing_bgc, fiso_default + use ice_grid, only: init_grid1, init_grid2, alloc_grid use ice_history, only: init_hist, accum_hist use ice_restart_shared, only: restart, runtype use ice_init, only: input_data, init_state @@ -101,8 +97,7 @@ subroutine cice_init(mpi_comm) mpi_comm ! communicator for sequential ccsm logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers, & - tr_iso, tr_fsd, wave_spec, tr_snow - character(len=char_len) :: snw_aging_table + tr_iso, tr_fsd, wave_spec character(len=*), parameter :: subname = '(cice_init)' if (present(mpi_comm)) then @@ -124,17 +119,12 @@ subroutine cice_init(mpi_comm) call input_zbgc ! vertical biogeochemistry namelist call count_tracers ! count tracers - ! Call this as early as possible, must be after memory_stats is read - if (my_task == master_task) then - call ice_memusage_init(nu_diag) - call ice_memusage_print(nu_diag,subname//':start') - endif - call init_domain_blocks ! set up block decomposition call init_grid1 ! domain distribution call alloc_grid ! allocate grid arrays call alloc_arrays_column ! allocate column arrays call alloc_state ! allocate state arrays + call alloc_dyn_shared ! allocate dyn shared arrays call alloc_flux_bgc ! allocate flux_bgc arrays call alloc_flux ! allocate flux arrays call init_ice_timers ! initialize all timers @@ -144,12 +134,11 @@ subroutine cice_init(mpi_comm) call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file - if (kdyn == 1) then - call init_evp - else if (kdyn == 2) then - call init_eap ! define eap dynamics parameters, variables - else if (kdyn == 3) then - call init_vp ! define vp dynamics parameters, variables + if (kdyn == 2) then + call alloc_dyn_eap ! allocate dyn_eap arrays + call init_eap (dt_dyn) ! define eap dynamics parameters, variables + else ! for both kdyn = 0 or 1 + call init_evp (dt_dyn) ! define evp dynamics parameters, variables endif call init_coupler_flux ! initialize fluxes exchanged with coupler @@ -176,15 +165,16 @@ subroutine cice_init(mpi_comm) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) -#ifndef CICE_IN_NEMO + +! call calendar(time) ! determine the initial date + call init_forcing_ocn(dt) ! initialize sss and sst from data -#endif call init_state ! initialize the ice state call init_transport ! initialize horizontal transport call ice_HaloRestore_init ! restored boundary conditions call icepack_query_parameters(skl_bgc_out=skl_bgc, z_tracers_out=z_tracers, & - wave_spec_out=wave_spec, snw_aging_table_out=snw_aging_table) + wave_spec_out=wave_spec) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(trim(subname), & file=__FILE__,line= __LINE__) @@ -195,30 +185,32 @@ subroutine cice_init(mpi_comm) call init_diags ! initialize diagnostic output points call init_history_therm ! initialize thermo history variables call init_history_dyn ! initialize dynamic history variables - call calc_timesteps ! update timestep counter if not using npt_unit="1" - call icepack_init_radiation ! initialize icepack shortwave tables call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) - call icepack_query_tracer_flags(tr_iso_out=tr_iso, tr_snow_out=tr_snow) + call icepack_query_tracer_flags(tr_iso_out=tr_iso) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(trim(subname), & file=__FILE__,line= __LINE__) - ! Initialize shortwave components using swdn from previous timestep - ! if restarting. These components will be scaled to current forcing + if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical + !property tables + + ! Initialize shortwave components using swdn from previous timestep + ! if restarting. These components will be scaled to current forcing ! in prep_radiation. if (trim(runtype) == 'continue' .or. restart) & call init_shortwave ! initialize radiative transfer - call advance_timestep() + istep = istep + 1 ! update time step counters + istep1 = istep1 + 1 + time = time + dt ! determine the time and date + call calendar(time) ! at the end of the first timestep !-------------------------------------------------------------------- ! coupler communication or forcing data initialization !-------------------------------------------------------------------- -#ifndef CICE_IN_NEMO call init_forcing_atmo ! initialize atmospheric forcing (standalone) -#endif #ifndef coupled #ifndef CESMCOUPLED @@ -228,20 +220,8 @@ subroutine cice_init(mpi_comm) call get_forcing_ocn(dt) ! ocean forcing from data #endif - ! snow aging lookup table initialization - if (tr_snow) then ! advanced snow physics - call icepack_init_snow() - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - if (snw_aging_table(1:4) /= 'test') then - call init_snowtable() - endif - endif - ! isotopes if (tr_iso) call fiso_default ! default values - ! aerosols ! if (tr_aero) call faero_data ! data file ! if (tr_zaero) call fzaero_data ! data file (gx1) @@ -257,12 +237,7 @@ subroutine cice_init(mpi_comm) call init_flux_atm ! initialize atmosphere fluxes sent to coupler call init_flux_ocn ! initialize ocean fluxes sent to coupler - if (write_ic) call accum_hist(dt) ! write initial conditions - - call dealloc_grid ! deallocate temporary grid arrays - if (my_task == master_task) then - call ice_memusage_print(nu_diag,subname//':end') - endif + if (write_ic) call accum_hist(dt) ! write initial conditions end subroutine cice_init @@ -272,28 +247,27 @@ subroutine init_restart use ice_arrays_column, only: dhsn use ice_blocks, only: nx_block, ny_block - use ice_calendar, only: calendar + use ice_calendar, only: time, calendar use ice_constants, only: c0 use ice_domain, only: nblocks - use ice_domain_size, only: ncat, n_iso, n_aero, nfsd, nslyr + use ice_domain_size, only: ncat, n_iso, n_aero, nfsd use ice_dyn_eap, only: read_restart_eap use ice_dyn_shared, only: kdyn use ice_grid, only: tmask use ice_init, only: ice_ic - use ice_init_column, only: init_age, init_FY, init_lvl, init_snowtracers, & - init_meltponds_lvl, init_meltponds_topo, & + use ice_init_column, only: init_age, init_FY, init_lvl, & + init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & + restart_pond_cesm, read_restart_pond_cesm, & restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & - restart_snow, read_restart_snow, & restart_fsd, read_restart_fsd, & restart_iso, read_restart_iso, & restart_aero, read_restart_aero, & restart_hbrine, read_restart_hbrine, & - restart_bgc - use ice_flux, only: Tf + restart_zsal, restart_bgc use ice_restart_driver, only: restartfile use ice_restart_shared, only: runtype, restart use ice_state ! almost everything @@ -302,14 +276,13 @@ subroutine init_restart i, j , & ! horizontal indices iblk ! block index logical(kind=log_kind) :: & - tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & - tr_pond_topo, tr_snow, tr_fsd, tr_iso, tr_aero, tr_brine, & - skl_bgc, z_tracers + tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & + tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, & + skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & ntrcr integer(kind=int_kind) :: & nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & - nt_smice, nt_smliq, nt_rhos, nt_rsnw, & nt_iage, nt_FY, nt_aero, nt_fsd, nt_isosno, nt_isoice character(len=*), parameter :: subname = '(init_restart)' @@ -320,25 +293,23 @@ subroutine init_restart file=__FILE__, line=__LINE__) call icepack_query_parameters(skl_bgc_out=skl_bgc, & - z_tracers_out=z_tracers) + z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & - tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & + tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & - tr_snow_out=tr_snow, tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) + tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd, & - nt_smice_out=nt_smice, nt_smliq_out=nt_smliq, & - nt_rhos_out=nt_rhos, nt_rsnw_out=nt_rsnw, & nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - if (trim(runtype) == 'continue') then + if (trim(runtype) == 'continue') then ! start from core restart file call restartfile() ! given by pointer in ice_in - call calendar() ! update time parameters + call calendar(time) ! update time parameters if (kdyn == 2) call read_restart_eap ! EAP else if (restart) then ! ice_ic = core restart file call restartfile (ice_ic) ! or 'default' or 'none' @@ -346,17 +317,17 @@ subroutine init_restart ! call restartfile_v4 (ice_ic) ! CICE v4.1 binary restart file !!! uncomment if EAP restart data exists ! if (kdyn == 2) call read_restart_eap - endif + endif ! tracers - ! ice age tracer - if (tr_iage) then + ! ice age tracer + if (tr_iage) then if (trim(runtype) == 'continue') & restart_age = .true. if (restart_age) then call read_restart_age else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_age(trcrn(:,:,nt_iage,:,iblk)) enddo ! iblk endif @@ -367,7 +338,7 @@ subroutine init_restart if (restart_FY) then call read_restart_FY else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_FY(trcrn(:,:,nt_FY,:,iblk)) enddo ! iblk endif @@ -378,12 +349,25 @@ subroutine init_restart if (restart_lvl) then call read_restart_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & trcrn(:,:,nt_vlvl,:,iblk)) enddo ! iblk endif endif + ! CESM melt ponds + if (tr_pond_cesm) then + if (trim(runtype) == 'continue') & + restart_pond_cesm = .true. + if (restart_pond_cesm) then + call read_restart_pond_cesm + else + do iblk = 1, nblocks + call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk)) + enddo ! iblk + endif + endif ! level-ice melt ponds if (tr_pond_lvl) then if (trim(runtype) == 'continue') & @@ -391,7 +375,7 @@ subroutine init_restart if (restart_pond_lvl) then call read_restart_pond_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_lvl(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk), & @@ -406,29 +390,13 @@ subroutine init_restart if (restart_pond_topo) then call read_restart_pond_topo else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_topo(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk)) enddo ! iblk endif ! .not. restart_pond endif - - ! snow redistribution/metamorphism - if (tr_snow) then - if (trim(runtype) == 'continue') restart_snow = .true. - if (restart_snow) then - call read_restart_snow - else - do iblk = 1, nblocks - call init_snowtracers(trcrn(:,:,nt_smice:nt_smice+nslyr-1,:,iblk), & - trcrn(:,:,nt_smliq:nt_smliq+nslyr-1,:,iblk), & - trcrn(:,:,nt_rhos :nt_rhos +nslyr-1,:,iblk), & - trcrn(:,:,nt_rsnw :nt_rsnw +nslyr-1,:,iblk)) - enddo ! iblk - endif - endif - ! floe size distribution if (tr_fsd) then if (trim(runtype) == 'continue') restart_fsd = .true. @@ -445,7 +413,7 @@ subroutine init_restart if (restart_iso) then call read_restart_iso else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_isotope(trcrn(:,:,nt_isosno:nt_isosno+n_iso-1,:,iblk), & trcrn(:,:,nt_isoice:nt_isoice+n_iso-1,:,iblk)) enddo ! iblk @@ -457,7 +425,7 @@ subroutine init_restart if (restart_aero) then call read_restart_aero else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_aerosol(trcrn(:,:,nt_aero:nt_aero+4*n_aero-1,:,iblk)) enddo ! iblk endif ! .not. restart_aero @@ -466,6 +434,8 @@ subroutine init_restart if (trim(runtype) == 'continue') then if (tr_brine) & restart_hbrine = .true. + if (solve_zsal) & + restart_zsal = .true. if (skl_bgc .or. z_tracers) & restart_bgc = .true. endif @@ -475,7 +445,7 @@ subroutine init_restart if (tr_brine .and. restart_hbrine) call read_restart_hbrine endif - if (skl_bgc .or. z_tracers) then ! biogeochemistry + if (solve_zsal .or. skl_bgc .or. z_tracers) then ! biogeochemistry if (tr_fsd) then write (nu_diag,*) 'FSD implementation incomplete for use with BGC' call icepack_warnings_flush(nu_diag) @@ -508,8 +478,7 @@ subroutine init_restart trcr_depend = trcr_depend, & trcr_base = trcr_base, & n_trcr_strata = n_trcr_strata, & - nt_strata = nt_strata, & - Tf = Tf(i,j,iblk)) + nt_strata = nt_strata) else ! tcraig, reset all tracer values on land to zero trcrn(i,j,:,:,iblk) = c0 diff --git a/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 b/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 index 5f8fb52bc..ad575f714 100644 --- a/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 @@ -7,7 +7,7 @@ ! William H. Lipscomb, LANL ! ! 2006 ECH: moved exit timeLoop to prevent execution of unnecessary timestep -! 2006 ECH: Streamlined for efficiency +! 2006 ECH: Streamlined for efficiency ! 2006 ECH: Converted to free source form (F90) ! 2007 BPB: Modified Delta-Eddington shortwave interface ! 2008 ECH: moved ESMF code to its own driver @@ -15,13 +15,11 @@ module CICE_RunMod use ice_kinds_mod - use ice_communicate, only: my_task, master_task use ice_fileunits, only: nu_diag use ice_arrays_column, only: oceanmixed_ice use ice_constants, only: c0, c1 use ice_constants, only: field_loc_center, field_type_scalar use ice_exit, only: abort_ice - use ice_memusage, only: ice_memusage_print use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_max_iso, icepack_max_aero use icepack_intfc, only: icepack_query_parameters @@ -43,9 +41,9 @@ module CICE_RunMod ! Philip W. Jones, LANL ! William H. Lipscomb, LANL - subroutine CICE_Run(stop_now_cpl) + subroutine CICE_Run - use ice_calendar, only: dt, stop_now, advance_timestep + use ice_calendar, only: istep, istep1, time, dt, stop_now, calendar use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & @@ -56,7 +54,6 @@ subroutine CICE_Run(stop_now_cpl) logical (kind=log_kind) :: & tr_iso, tr_aero, tr_zaero, skl_bgc, z_tracers, wave_spec, tr_fsd character(len=*), parameter :: subname = '(CICE_Run)' - logical (kind=log_kind), optional, intent(in) :: stop_now_cpl !-------------------------------------------------------------------- ! initialize error code and step timer @@ -80,16 +77,17 @@ subroutine CICE_Run(stop_now_cpl) ! timestep loop !-------------------------------------------------------------------- #ifndef CICE_DMI - timeLoop: do + timeLoop: do #endif #endif call ice_step - call advance_timestep() ! advance time + istep = istep + 1 ! update time step counters + istep1 = istep1 + 1 + time = time + dt ! determine the time and date + + call calendar(time) ! at the end of the timestep - if (present(stop_now_cpl)) then - if (stop_now_cpl) return - endif #ifndef CICE_IN_NEMO #ifndef CICE_DMI if (stop_now >= 1) exit timeLoop @@ -133,7 +131,7 @@ subroutine CICE_Run(stop_now_cpl) ! end of timestep loop !-------------------------------------------------------------------- - call ice_timer_stop(timer_step) ! end timestepping loop timer + call ice_timer_stop(timer_step) ! end timestepping loop timer end subroutine CICE_Run @@ -148,32 +146,31 @@ subroutine ice_step use ice_boundary, only: ice_HaloUpdate use ice_calendar, only: dt, dt_dyn, ndtd, diagfreq, write_restart, istep - use ice_diagnostics, only: init_mass_diags, runtime_diags, debug_model, debug_ice - use ice_diagnostics_bgc, only: hbrine_diags, bgc_diags + use ice_diagnostics, only: init_mass_diags, runtime_diags + use ice_diagnostics_bgc, only: hbrine_diags, zsal_diags, bgc_diags use ice_domain, only: halo_info, nblocks use ice_dyn_eap, only: write_restart_eap use ice_dyn_shared, only: kdyn, kridge use ice_flux, only: scale_factor, init_history_therm, & - daidtt, daidtd, dvidtt, dvidtd, dvsdtt, dvsdtd, dagedtt, dagedtd + daidtt, daidtd, dvidtt, dvidtd, dagedtt, dagedtd use ice_history, only: accum_hist use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart use ice_restart_column, only: write_restart_age, write_restart_FY, & - write_restart_lvl, write_restart_pond_lvl, & + write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & write_restart_pond_topo, write_restart_aero, write_restart_fsd, & - write_restart_iso, write_restart_bgc, write_restart_hbrine, & - write_restart_snow + write_restart_iso, write_restart_bgc, write_restart_hbrine use ice_restart_driver, only: dumpfile use ice_restoring, only: restore_ice, ice_HaloRestore use ice_step_mod, only: prep_radiation, step_therm1, step_therm2, & update_state, step_dyn_horiz, step_dyn_ridge, step_radiation, & - biogeochemistry, step_prep, step_dyn_wave, step_snow + biogeochemistry, save_init, step_dyn_wave use ice_timers, only: ice_timer_start, ice_timer_stop, & timer_diags, timer_column, timer_thermo, timer_bound, & timer_hist, timer_readwrite integer (kind=int_kind) :: & - iblk , & ! block index + iblk , & ! block index k , & ! dynamics supercycling index ktherm ! thermodynamics is off when ktherm = -1 @@ -181,27 +178,19 @@ subroutine ice_step offset ! d(age)/dt time offset logical (kind=log_kind) :: & - tr_iage, tr_FY, tr_lvl, tr_fsd, tr_snow, & - tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & - calc_Tsfc, skl_bgc, z_tracers, wave_spec + tr_iage, tr_FY, tr_lvl, tr_fsd, & + tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & + calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' - character (len=char_len) :: plabeld - - if (debug_model) then - plabeld = 'beginning time step' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo - endif - call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc, skl_bgc_out=skl_bgc, & - z_tracers_out=z_tracers, ktherm_out=ktherm, wave_spec_out=wave_spec) + solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & + wave_spec_out=wave_spec) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & - tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & + tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & - tr_iso_out=tr_iso, tr_fsd_out=tr_fsd, tr_snow_out=tr_snow) + tr_iso_out=tr_iso, tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -225,11 +214,12 @@ subroutine ice_step call ice_timer_start(timer_column) ! column physics call ice_timer_start(timer_thermo) ! thermodynamics - call step_prep + call save_init - if (ktherm >= 0) then - !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) - do iblk = 1, nblocks + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + + if (ktherm >= 0) then !----------------------------------------------------------------- ! scale radiation fields @@ -237,44 +227,22 @@ subroutine ice_step if (calc_Tsfc) call prep_radiation (iblk) - if (debug_model) then - plabeld = 'post prep_radiation' - call debug_ice (iblk, plabeld) - endif - !----------------------------------------------------------------- ! thermodynamics and biogeochemistry !----------------------------------------------------------------- - + call step_therm1 (dt, iblk) ! vertical thermodynamics - - if (debug_model) then - plabeld = 'post step_therm1' - call debug_ice (iblk, plabeld) - endif - call biogeochemistry (dt, iblk) ! biogeochemistry - - if (debug_model) then - plabeld = 'post biogeochemistry' - call debug_ice (iblk, plabeld) - endif - call step_therm2 (dt, iblk) ! ice thickness distribution thermo - if (debug_model) then - plabeld = 'post step_therm2' - call debug_ice (iblk, plabeld) - endif + endif ! ktherm > 0 - enddo - !$OMP END PARALLEL DO - endif ! ktherm > 0 + enddo ! iblk + !$OMP END PARALLEL DO ! clean up, update tendency diagnostics offset = dt - call update_state (dt=dt, daidt=daidtt, dvidt=dvidtt, dvsdt=dvsdtt, & - dagedt=dagedtt, offset=offset) + call update_state (dt, daidtt, dvidtt, dagedtt, offset) call ice_timer_stop(timer_thermo) ! thermodynamics call ice_timer_stop(timer_column) ! column physics @@ -292,83 +260,38 @@ subroutine ice_step ! momentum, stress, transport call step_dyn_horiz (dt_dyn) - if (debug_model) then - plabeld = 'post step_dyn_horiz' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo ! iblk - endif - ! ridging - !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) + !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks if (kridge > 0) call step_dyn_ridge (dt_dyn, ndtd, iblk) enddo !$OMP END PARALLEL DO - if (debug_model) then - plabeld = 'post step_dyn_ridge' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo ! iblk - endif - ! clean up, update tendency diagnostics offset = c0 - call update_state (dt=dt_dyn, daidt=daidtd, dvidt=dvidtd, dvsdt=dvsdtd, & - dagedt=dagedtd, offset=offset) + call update_state (dt_dyn, daidtd, dvidtd, dagedtd, offset) enddo - if (debug_model) then - plabeld = 'post dynamics' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo - endif - - call ice_timer_start(timer_column) ! column physics - call ice_timer_start(timer_thermo) ! thermodynamics - !----------------------------------------------------------------- - ! snow redistribution and metamorphosis + ! albedo, shortwave radiation !----------------------------------------------------------------- - if (tr_snow) then ! advanced snow physics - !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) - do iblk = 1, nblocks - call step_snow (dt, iblk) - enddo - !$OMP END PARALLEL DO - call update_state (dt=dt) ! clean up - endif + call ice_timer_start(timer_column) ! column physics + call ice_timer_start(timer_thermo) ! thermodynamics !MHRI: CHECK THIS OMP !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks - !----------------------------------------------------------------- - ! albedo, shortwave radiation - !----------------------------------------------------------------- - if (ktherm >= 0) call step_radiation (dt, iblk) - if (debug_model) then - plabeld = 'post step_radiation' - call debug_ice (iblk, plabeld) - endif - !----------------------------------------------------------------- ! get ready for coupling and the next time step !----------------------------------------------------------------- call coupling_prep (iblk) - if (debug_model) then - plabeld = 'post coupling_prep' - call debug_ice (iblk, plabeld) - endif - enddo ! iblk !$OMP END PARALLEL DO @@ -387,11 +310,9 @@ subroutine ice_step call ice_timer_start(timer_diags) ! diagnostics if (mod(istep,diagfreq) == 0) then call runtime_diags(dt) ! log file + if (solve_zsal) call zsal_diags if (skl_bgc .or. z_tracers) call bgc_diags if (tr_brine) call hbrine_diags - if (my_task == master_task) then - call ice_memusage_print(nu_diag,subname) - endif endif call ice_timer_stop(timer_diags) ! diagnostics @@ -405,22 +326,23 @@ subroutine ice_step if (tr_iage) call write_restart_age if (tr_FY) call write_restart_FY if (tr_lvl) call write_restart_lvl + if (tr_pond_cesm) call write_restart_pond_cesm if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo - if (tr_snow) call write_restart_snow if (tr_fsd) call write_restart_fsd if (tr_iso) call write_restart_iso if (tr_aero) call write_restart_aero - if (skl_bgc .or. z_tracers) & - call write_restart_bgc + if (solve_zsal .or. skl_bgc .or. z_tracers) & + call write_restart_bgc if (tr_brine) call write_restart_hbrine if (kdyn == 2) call write_restart_eap call final_restart endif + call ice_timer_stop(timer_readwrite) ! reading/writing end subroutine ice_step - + !======================================================================= ! ! Prepare for coupling @@ -430,7 +352,7 @@ end subroutine ice_step subroutine coupling_prep (iblk) use ice_arrays_column, only: alvdfn, alidfn, alvdrn, alidrn, & - albicen, albsnon, albpndn, apeffn, snowfracn + albicen, albsnon, albpndn, apeffn, fzsal_g, fzsal, snowfracn use ice_blocks, only: nx_block, ny_block, get_block, block use ice_domain, only: blocks_ice use ice_calendar, only: dt, nstreams @@ -439,13 +361,12 @@ subroutine coupling_prep (iblk) albpnd, albcnt, apeff_ai, fpond, fresh, l_mpond_fresh, & alvdf_ai, alidf_ai, alvdr_ai, alidr_ai, fhocn_ai, & fresh_ai, fsalt_ai, fsalt, & - fswthru_ai, fhocn, scale_factor, snowfrac, & - fswthru, fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & + fswthru_ai, fhocn, fswthru, scale_factor, snowfrac, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & fsens, flat, fswabs, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt use ice_flux_bgc, only: faero_ocn, fiso_ocn, Qref_iso, fiso_evap, & - flux_bio, flux_bio_ai + fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai use ice_grid, only: tmask use ice_state, only: aicen, aice #ifdef CICE_IN_NEMO @@ -455,12 +376,12 @@ subroutine coupling_prep (iblk) use ice_step_mod, only: ocean_mixed_layer use ice_timers, only: timer_couple, ice_timer_start, ice_timer_stop - integer (kind=int_kind), intent(in) :: & - iblk ! block index + integer (kind=int_kind), intent(in) :: & + iblk ! block index ! local variables - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & ilo,ihi,jlo,jhi, & ! beginning and end of physical domain n , & ! thickness category index i,j , & ! horizontal indices @@ -499,7 +420,7 @@ subroutine coupling_prep (iblk) enddo enddo - call ice_timer_start(timer_couple,iblk) ! atm/ocn coupling + call ice_timer_start(timer_couple) ! atm/ocn coupling if (oceanmixed_ice) & call ocean_mixed_layer (dt,iblk) ! ocean surface fluxes and sst @@ -596,6 +517,8 @@ subroutine coupling_prep (iblk) fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) + fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) + fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) if (nbtrcr > 0) then do k = 1, nbtrcr @@ -616,7 +539,7 @@ subroutine coupling_prep (iblk) enddo !----------------------------------------------------------------- - ! Divide fluxes by ice area + ! Divide fluxes by ice area ! - the CESM coupler assumes fluxes are per unit ice area ! - also needed for global budget in diagnostics !----------------------------------------------------------------- @@ -632,38 +555,34 @@ subroutine coupling_prep (iblk) evap (:,:,iblk), & Tref (:,:,iblk), Qref (:,:,iblk), & fresh (:,:,iblk), fsalt (:,:,iblk), & - fhocn (:,:,iblk), & - fswthru (:,:,iblk), & - fswthru_vdr (:,:,iblk), & - fswthru_vdf (:,:,iblk), & - fswthru_idr (:,:,iblk), & - fswthru_idf (:,:,iblk), & + fhocn (:,:,iblk), fswthru (:,:,iblk), & faero_ocn(:,:,:,iblk), & alvdr (:,:,iblk), alidr (:,:,iblk), & alvdf (:,:,iblk), alidf (:,:,iblk), & - flux_bio =flux_bio (:,:,1:nbtrcr,iblk), & + fzsal (:,:,iblk), fzsal_g (:,:,iblk), & + flux_bio (:,:,1:nbtrcr,iblk), & Qref_iso =Qref_iso (:,:,:,iblk), & fiso_evap=fiso_evap(:,:,:,iblk), & fiso_ocn =fiso_ocn (:,:,:,iblk)) - + #ifdef CICE_IN_NEMO !echmod - comment this out for efficiency, if .not. calc_Tsfc if (.not. calc_Tsfc) then !--------------------------------------------------------------- - ! If surface fluxes were provided, conserve these fluxes at ice - ! free points by passing to ocean. + ! If surface fluxes were provided, conserve these fluxes at ice + ! free points by passing to ocean. !--------------------------------------------------------------- - call sfcflux_to_ocn & + call sfcflux_to_ocn & (nx_block, ny_block, & tmask (:,:,iblk), aice_init(:,:,iblk), & fsurfn_f (:,:,:,iblk), flatn_f(:,:,:,iblk), & fresh (:,:,iblk), fhocn (:,:,iblk)) - endif + endif !echmod #endif - call ice_timer_stop(timer_couple,iblk) ! atm/ocn coupling + call ice_timer_stop(timer_couple) ! atm/ocn coupling end subroutine coupling_prep @@ -672,10 +591,10 @@ end subroutine coupling_prep !======================================================================= ! ! If surface heat fluxes are provided to CICE instead of CICE calculating -! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can +! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can ! be provided at points which do not have ice. (This is could be due to ! the heat fluxes being calculated on a lower resolution grid or the -! heat fluxes not recalculated at every CICE timestep.) At ice free points, +! heat fluxes not recalculated at every CICE timestep.) At ice free points, ! conserve energy and water by passing these fluxes to the ocean. ! ! author: A. McLaren, Met Office @@ -708,15 +627,14 @@ subroutine sfcflux_to_ocn(nx_block, ny_block, & ! local variables integer (kind=int_kind) :: & i, j, n ! horizontal indices - + real (kind=dbl_kind) :: & puny, & ! - Lsub, & ! rLsub ! 1/Lsub character(len=*), parameter :: subname = '(sfcflux_to_ocn)' - call icepack_query_parameters(puny_out=puny, Lsub_out=Lsub) + call icepack_query_parameters(puny_out=puny) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) diff --git a/cicecore/drivers/standalone/cice/CICE.F90 b/cicecore/drivers/standalone/cice/CICE.F90 index 5ace27736..ec1963d38 100644 --- a/cicecore/drivers/standalone/cice/CICE.F90 +++ b/cicecore/drivers/standalone/cice/CICE.F90 @@ -1,21 +1,21 @@ !======================================================================= -! Copyright (c) 2023, Triad National Security, LLC +! Copyright (c) 2020, Triad National Security, LLC ! All rights reserved. -! -! Copyright 2023. Triad National Security, LLC. This software was -! produced under U.S. Government contract DE-AC52-06NA25396 for Los +! +! Copyright 2020. Triad National Security, LLC. This software was +! produced under U.S. Government contract DE-AC52-06NA25396 for Los ! Alamos National Laboratory (LANL), which is operated by Triad -! National Security, LLC for the U.S. Department of Energy. The U.S. -! Government has rights to use, reproduce, and distribute this software. -! NEITHER THE GOVERNMENT NOR TRIAD NATIONAL SECURITY, LLC MAKES ANY +! National Security, LLC for the U.S. Department of Energy. The U.S. +! Government has rights to use, reproduce, and distribute this software. +! NEITHER THE GOVERNMENT NOR TRIAD NATIONAL SECURITY, LLC MAKES ANY ! WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF -! THIS SOFTWARE. If software is modified to produce derivative works, -! such modified software should be clearly marked, so as not to confuse +! THIS SOFTWARE. If software is modified to produce derivative works, +! such modified software should be clearly marked, so as not to confuse ! it with the version available from LANL. ! ! The full license and distribution policy are available from ! https://github.com/CICE-Consortium -! +! !======================================================================= ! ! Main driver routine for CICE. Initializes and steps through the model. @@ -49,7 +49,7 @@ program icemodel call CICE_Run !----------------------------------------------------------------- - ! Finalize CICE + ! Finalize CICE !----------------------------------------------------------------- call CICE_Finalize @@ -57,3 +57,39 @@ program icemodel end program icemodel !======================================================================= +! +! Wrapper for the print_state debugging routine. +! Useful for debugging in the main driver (see ice.F_debug) +! ip, jp, mtask are set in ice_diagnostics.F +! +! author Elizabeth C. Hunke, LANL +! + subroutine debug_ice(iblk, plabeld) + + use ice_kinds_mod + use ice_calendar, only: istep1 + use ice_communicate, only: my_task + use ice_diagnostics, only: check_step, iblkp, ip, jp, mtask, print_state + use ice_blocks, only: nx_block, ny_block + + character (char_len), intent(in) :: plabeld + integer (kind=int_kind), intent(in) :: iblk + + ! local + integer (kind=int_kind) :: i, j + character(len=*), parameter :: subname='(debug_ice)' + + if (istep1 >= check_step .and. & + iblk==iblkp .and. my_task==mtask) then + + do j = 1, ny_block + do i = 1, nx_block + if (i==ip .and. j==jp) call print_state(plabeld,i,j,iblk) + enddo + enddo + + endif + + end subroutine debug_ice + +!======================================================================= diff --git a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 index 194293118..0a8614eb2 100644 --- a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 @@ -15,11 +15,9 @@ module CICE_InitMod use ice_kinds_mod use ice_exit, only: abort_ice use ice_fileunits, only: init_fileunits, nu_diag - use ice_memusage, only: ice_memusage_init, ice_memusage_print use icepack_intfc, only: icepack_aggregate use icepack_intfc, only: icepack_init_itd, icepack_init_itd_hist use icepack_intfc, only: icepack_init_fsd_bounds, icepack_init_wave - use icepack_intfc, only: icepack_init_snow, icepack_init_radiation use icepack_intfc, only: icepack_configure use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_query_parameters, icepack_query_tracer_flags, & @@ -66,23 +64,21 @@ subroutine cice_init floe_binwidth, c_fsd_range use ice_state, only: alloc_state use ice_flux_bgc, only: alloc_flux_bgc - use ice_calendar, only: dt, write_ic, & - init_calendar, advance_timestep, calc_timesteps + use ice_calendar, only: dt, dt_dyn, time, istep, istep1, write_ic, & + init_calendar, calendar use ice_communicate, only: init_communicate, my_task, master_task use ice_diagnostics, only: init_diags use ice_domain, only: init_domain_blocks use ice_domain_size, only: ncat, nfsd - use ice_dyn_eap, only: init_eap - use ice_dyn_evp, only: init_evp - use ice_dyn_vp, only: init_vp - use ice_dyn_shared, only: kdyn + use ice_dyn_eap, only: init_eap, alloc_dyn_eap + use ice_dyn_shared, only: kdyn, init_evp, alloc_dyn_shared use ice_flux, only: init_coupler_flux, init_history_therm, & init_history_dyn, init_flux_atm, init_flux_ocn, alloc_flux use ice_forcing, only: init_forcing_ocn, init_forcing_atmo, & - get_forcing_atmo, get_forcing_ocn, get_wave_spec, init_snowtable + get_forcing_atmo, get_forcing_ocn, get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & - faero_default, alloc_forcing_bgc, fiso_default - use ice_grid, only: init_grid1, init_grid2, alloc_grid, dealloc_grid + faero_default, faero_optics, alloc_forcing_bgc, fiso_default + use ice_grid, only: init_grid1, init_grid2, alloc_grid use ice_history, only: init_hist, accum_hist use ice_restart_shared, only: restart, runtype use ice_init, only: input_data, init_state @@ -93,8 +89,7 @@ subroutine cice_init use ice_transport_driver, only: init_transport logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers, & - tr_iso, tr_fsd, wave_spec, tr_snow - character(len=char_len) :: snw_aging_table + tr_iso, tr_fsd, wave_spec character(len=*), parameter :: subname = '(cice_init)' call init_communicate ! initial setup for message passing @@ -112,17 +107,12 @@ subroutine cice_init call input_zbgc ! vertical biogeochemistry namelist call count_tracers ! count tracers - ! Call this as early as possible, must be after memory_stats is read - if (my_task == master_task) then - call ice_memusage_init(nu_diag) - call ice_memusage_print(nu_diag,subname//':start') - endif - call init_domain_blocks ! set up block decomposition call init_grid1 ! domain distribution call alloc_grid ! allocate grid arrays call alloc_arrays_column ! allocate column arrays call alloc_state ! allocate state arrays + call alloc_dyn_shared ! allocate dyn shared arrays call alloc_flux_bgc ! allocate flux_bgc arrays call alloc_flux ! allocate flux arrays call init_ice_timers ! initialize all timers @@ -132,12 +122,11 @@ subroutine cice_init call init_calendar ! initialize some calendar stuff call init_hist (dt) ! initialize output history file - if (kdyn == 1) then - call init_evp - else if (kdyn == 2) then - call init_eap ! define eap dynamics parameters, variables - else if (kdyn == 3) then - call init_vp ! define vp dynamics parameters, variables + if (kdyn == 2) then + call alloc_dyn_eap ! allocate dyn_eap arrays + call init_eap (dt_dyn) ! define eap dynamics parameters, variables + else ! for both kdyn = 0 or 1 + call init_evp (dt_dyn) ! define evp dynamics parameters, variables endif call init_coupler_flux ! initialize fluxes exchanged with coupler @@ -165,13 +154,15 @@ subroutine cice_init if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) +! call calendar(time) ! determine the initial date + call init_forcing_ocn(dt) ! initialize sss and sst from data call init_state ! initialize the ice state call init_transport ! initialize horizontal transport call ice_HaloRestore_init ! restored boundary conditions call icepack_query_parameters(skl_bgc_out=skl_bgc, z_tracers_out=z_tracers, & - wave_spec_out=wave_spec, snw_aging_table_out=snw_aging_table) + wave_spec_out=wave_spec) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(trim(subname), & file=__FILE__,line= __LINE__) @@ -182,29 +173,26 @@ subroutine cice_init call init_diags ! initialize diagnostic output points call init_history_therm ! initialize thermo history variables call init_history_dyn ! initialize dynamic history variables - call calc_timesteps ! update timestep counter if not using npt_unit="1" - call icepack_init_radiation ! initialize icepack shortwave tables call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) - call icepack_query_tracer_flags(tr_iso_out=tr_iso, tr_snow_out=tr_snow) + call icepack_query_tracer_flags(tr_iso_out=tr_iso) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(trim(subname), & file=__FILE__,line= __LINE__) - ! Initialize shortwave components using swdn from previous timestep - ! if restarting. These components will be scaled to current forcing + if (tr_aero .or. tr_zaero) call faero_optics !initialize aerosol optical + !property tables + + ! Initialize shortwave components using swdn from previous timestep + ! if restarting. These components will be scaled to current forcing ! in prep_radiation. if (trim(runtype) == 'continue' .or. restart) & call init_shortwave ! initialize radiative transfer - if (write_ic) call accum_hist(dt) ! write initial conditions - -! tcraig, use advance_timestep here -! istep = istep + 1 ! update time step counters -! istep1 = istep1 + 1 -! time = time + dt ! determine the time and date -! call calendar(time) ! at the end of the first timestep - call advance_timestep() + istep = istep + 1 ! update time step counters + istep1 = istep1 + 1 + time = time + dt ! determine the time and date + call calendar(time) ! at the end of the first timestep !-------------------------------------------------------------------- ! coupler communication or forcing data initialization @@ -216,20 +204,8 @@ subroutine cice_init call get_forcing_atmo ! atmospheric forcing from data call get_forcing_ocn(dt) ! ocean forcing from data - ! snow aging lookup table initialization - if (tr_snow) then ! advanced snow physics - call icepack_init_snow() - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - if (snw_aging_table(1:4) /= 'test') then - call init_snowtable() - endif - endif - ! isotopes if (tr_iso) call fiso_default ! default values - ! aerosols ! if (tr_aero) call faero_data ! data file ! if (tr_zaero) call fzaero_data ! data file (gx1) @@ -243,11 +219,7 @@ subroutine cice_init call init_flux_atm ! initialize atmosphere fluxes sent to coupler call init_flux_ocn ! initialize ocean fluxes sent to coupler - call dealloc_grid ! deallocate temporary grid arrays - - if (my_task == master_task) then - call ice_memusage_print(nu_diag,subname//':end') - endif + if (write_ic) call accum_hist(dt) ! write initial conditions end subroutine cice_init @@ -257,28 +229,27 @@ subroutine init_restart use ice_arrays_column, only: dhsn use ice_blocks, only: nx_block, ny_block - use ice_calendar, only: calendar + use ice_calendar, only: time, calendar use ice_constants, only: c0 use ice_domain, only: nblocks - use ice_domain_size, only: ncat, n_iso, n_aero, nfsd, nslyr + use ice_domain_size, only: ncat, n_iso, n_aero, nfsd use ice_dyn_eap, only: read_restart_eap use ice_dyn_shared, only: kdyn use ice_grid, only: tmask use ice_init, only: ice_ic - use ice_init_column, only: init_age, init_FY, init_lvl, init_snowtracers, & - init_meltponds_lvl, init_meltponds_topo, & + use ice_init_column, only: init_age, init_FY, init_lvl, & + init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd use ice_restart_column, only: restart_age, read_restart_age, & restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & + restart_pond_cesm, read_restart_pond_cesm, & restart_pond_lvl, read_restart_pond_lvl, & restart_pond_topo, read_restart_pond_topo, & - restart_snow, read_restart_snow, & restart_fsd, read_restart_fsd, & restart_iso, read_restart_iso, & restart_aero, read_restart_aero, & restart_hbrine, read_restart_hbrine, & - restart_bgc - use ice_flux, only: Tf + restart_zsal, restart_bgc use ice_restart_driver, only: restartfile use ice_restart_shared, only: runtype, restart use ice_state ! almost everything @@ -287,14 +258,13 @@ subroutine init_restart i, j , & ! horizontal indices iblk ! block index logical(kind=log_kind) :: & - tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & - tr_pond_topo, tr_snow, tr_fsd, tr_iso, tr_aero, tr_brine, & - skl_bgc, z_tracers + tr_iage, tr_FY, tr_lvl, tr_pond_cesm, tr_pond_lvl, & + tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, & + skl_bgc, z_tracers, solve_zsal integer(kind=int_kind) :: & ntrcr integer(kind=int_kind) :: & nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & - nt_smice, nt_smliq, nt_rhos, nt_rsnw, & nt_iage, nt_FY, nt_aero, nt_fsd, nt_isosno, nt_isoice character(len=*), parameter :: subname = '(init_restart)' @@ -305,25 +275,23 @@ subroutine init_restart file=__FILE__, line=__LINE__) call icepack_query_parameters(skl_bgc_out=skl_bgc, & - z_tracers_out=z_tracers) + z_tracers_out=z_tracers, solve_zsal_out=solve_zsal) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & - tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & + tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & - tr_snow_out=tr_snow, tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) + tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd, & - nt_smice_out=nt_smice, nt_smliq_out=nt_smliq, & - nt_rhos_out=nt_rhos, nt_rsnw_out=nt_rsnw, & nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - if (trim(runtype) == 'continue') then + if (trim(runtype) == 'continue') then ! start from core restart file call restartfile() ! given by pointer in ice_in - call calendar() ! update time parameters + call calendar(time) ! update time parameters if (kdyn == 2) call read_restart_eap ! EAP else if (restart) then ! ice_ic = core restart file call restartfile (ice_ic) ! or 'default' or 'none' @@ -331,17 +299,17 @@ subroutine init_restart ! call restartfile_v4 (ice_ic) ! CICE v4.1 binary restart file !!! uncomment if EAP restart data exists ! if (kdyn == 2) call read_restart_eap - endif + endif ! tracers - ! ice age tracer - if (tr_iage) then + ! ice age tracer + if (tr_iage) then if (trim(runtype) == 'continue') & restart_age = .true. if (restart_age) then call read_restart_age else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_age(trcrn(:,:,nt_iage,:,iblk)) enddo ! iblk endif @@ -352,7 +320,7 @@ subroutine init_restart if (restart_FY) then call read_restart_FY else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_FY(trcrn(:,:,nt_FY,:,iblk)) enddo ! iblk endif @@ -363,12 +331,25 @@ subroutine init_restart if (restart_lvl) then call read_restart_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & trcrn(:,:,nt_vlvl,:,iblk)) enddo ! iblk endif endif + ! CESM melt ponds + if (tr_pond_cesm) then + if (trim(runtype) == 'continue') & + restart_pond_cesm = .true. + if (restart_pond_cesm) then + call read_restart_pond_cesm + else + do iblk = 1, nblocks + call init_meltponds_cesm(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk)) + enddo ! iblk + endif + endif ! level-ice melt ponds if (tr_pond_lvl) then if (trim(runtype) == 'continue') & @@ -376,7 +357,7 @@ subroutine init_restart if (restart_pond_lvl) then call read_restart_pond_lvl else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_lvl(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk), & @@ -391,29 +372,13 @@ subroutine init_restart if (restart_pond_topo) then call read_restart_pond_topo else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_meltponds_topo(trcrn(:,:,nt_apnd,:,iblk), & trcrn(:,:,nt_hpnd,:,iblk), & trcrn(:,:,nt_ipnd,:,iblk)) enddo ! iblk endif ! .not. restart_pond endif - - ! snow redistribution/metamorphism - if (tr_snow) then - if (trim(runtype) == 'continue') restart_snow = .true. - if (restart_snow) then - call read_restart_snow - else - do iblk = 1, nblocks - call init_snowtracers(trcrn(:,:,nt_smice:nt_smice+nslyr-1,:,iblk), & - trcrn(:,:,nt_smliq:nt_smliq+nslyr-1,:,iblk), & - trcrn(:,:,nt_rhos :nt_rhos +nslyr-1,:,iblk), & - trcrn(:,:,nt_rsnw :nt_rsnw +nslyr-1,:,iblk)) - enddo ! iblk - endif - endif - ! floe size distribution if (tr_fsd) then if (trim(runtype) == 'continue') restart_fsd = .true. @@ -430,7 +395,7 @@ subroutine init_restart if (restart_iso) then call read_restart_iso else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_isotope(trcrn(:,:,nt_isosno:nt_isosno+n_iso-1,:,iblk), & trcrn(:,:,nt_isoice:nt_isoice+n_iso-1,:,iblk)) enddo ! iblk @@ -442,7 +407,7 @@ subroutine init_restart if (restart_aero) then call read_restart_aero else - do iblk = 1, nblocks + do iblk = 1, nblocks call init_aerosol(trcrn(:,:,nt_aero:nt_aero+4*n_aero-1,:,iblk)) enddo ! iblk endif ! .not. restart_aero @@ -451,6 +416,8 @@ subroutine init_restart if (trim(runtype) == 'continue') then if (tr_brine) & restart_hbrine = .true. + if (solve_zsal) & + restart_zsal = .true. if (skl_bgc .or. z_tracers) & restart_bgc = .true. endif @@ -460,7 +427,7 @@ subroutine init_restart if (tr_brine .and. restart_hbrine) call read_restart_hbrine endif - if (skl_bgc .or. z_tracers) then ! biogeochemistry + if (solve_zsal .or. skl_bgc .or. z_tracers) then ! biogeochemistry if (tr_fsd) then write (nu_diag,*) 'FSD implementation incomplete for use with BGC' call icepack_warnings_flush(nu_diag) @@ -493,8 +460,7 @@ subroutine init_restart trcr_depend = trcr_depend, & trcr_base = trcr_base, & n_trcr_strata = n_trcr_strata, & - nt_strata = nt_strata, & - Tf = Tf(i,j,iblk)) + nt_strata = nt_strata) else ! tcraig, reset all tracer values on land to zero trcrn(i,j,:,:,iblk) = c0 diff --git a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 index 59213f728..b45db2514 100644 --- a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 @@ -7,7 +7,7 @@ ! William H. Lipscomb, LANL ! ! 2006 ECH: moved exit timeLoop to prevent execution of unnecessary timestep -! 2006 ECH: Streamlined for efficiency +! 2006 ECH: Streamlined for efficiency ! 2006 ECH: Converted to free source form (F90) ! 2007 BPB: Modified Delta-Eddington shortwave interface ! 2008 ECH: moved ESMF code to its own driver @@ -15,13 +15,11 @@ module CICE_RunMod use ice_kinds_mod - use ice_communicate, only: my_task, master_task use ice_fileunits, only: nu_diag use ice_arrays_column, only: oceanmixed_ice use ice_constants, only: c0, c1 use ice_constants, only: field_loc_center, field_type_scalar use ice_exit, only: abort_ice - use ice_memusage, only: ice_memusage_print use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_max_iso, icepack_max_aero use icepack_intfc, only: icepack_query_parameters @@ -45,7 +43,7 @@ module CICE_RunMod subroutine CICE_Run - use ice_calendar, only: dt, stop_now, advance_timestep + use ice_calendar, only: istep, istep1, time, dt, stop_now, calendar use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & @@ -84,12 +82,11 @@ subroutine CICE_Run call ice_step -! tcraig, use advance_timestep now -! istep = istep + 1 ! update time step counters -! istep1 = istep1 + 1 -! time = time + dt ! determine the time and date -! call calendar(time) ! at the end of the timestep - call advance_timestep() ! advance time + istep = istep + 1 ! update time step counters + istep1 = istep1 + 1 + time = time + dt ! determine the time and date + + call calendar(time) ! at the end of the timestep #ifndef CICE_IN_NEMO if (stop_now >= 1) exit timeLoop @@ -125,7 +122,7 @@ subroutine CICE_Run ! end of timestep loop !-------------------------------------------------------------------- - call ice_timer_stop(timer_step) ! end timestepping loop timer + call ice_timer_stop(timer_step) ! end timestepping loop timer end subroutine CICE_Run @@ -140,32 +137,31 @@ subroutine ice_step use ice_boundary, only: ice_HaloUpdate use ice_calendar, only: dt, dt_dyn, ndtd, diagfreq, write_restart, istep - use ice_diagnostics, only: init_mass_diags, runtime_diags, debug_model, debug_ice - use ice_diagnostics_bgc, only: hbrine_diags, bgc_diags + use ice_diagnostics, only: init_mass_diags, runtime_diags + use ice_diagnostics_bgc, only: hbrine_diags, zsal_diags, bgc_diags use ice_domain, only: halo_info, nblocks use ice_dyn_eap, only: write_restart_eap use ice_dyn_shared, only: kdyn, kridge use ice_flux, only: scale_factor, init_history_therm, & - daidtt, daidtd, dvidtt, dvidtd, dvsdtt, dvsdtd, dagedtt, dagedtd + daidtt, daidtd, dvidtt, dvidtd, dagedtt, dagedtd use ice_history, only: accum_hist use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart use ice_restart_column, only: write_restart_age, write_restart_FY, & - write_restart_lvl, write_restart_pond_lvl, & + write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & write_restart_pond_topo, write_restart_aero, write_restart_fsd, & - write_restart_iso, write_restart_bgc, write_restart_hbrine, & - write_restart_snow + write_restart_iso, write_restart_bgc, write_restart_hbrine use ice_restart_driver, only: dumpfile use ice_restoring, only: restore_ice, ice_HaloRestore use ice_step_mod, only: prep_radiation, step_therm1, step_therm2, & update_state, step_dyn_horiz, step_dyn_ridge, step_radiation, & - biogeochemistry, step_prep, step_dyn_wave, step_snow + biogeochemistry, save_init, step_dyn_wave use ice_timers, only: ice_timer_start, ice_timer_stop, & timer_diags, timer_column, timer_thermo, timer_bound, & timer_hist, timer_readwrite integer (kind=int_kind) :: & - iblk , & ! block index + iblk , & ! block index k , & ! dynamics supercycling index ktherm ! thermodynamics is off when ktherm = -1 @@ -173,27 +169,19 @@ subroutine ice_step offset ! d(age)/dt time offset logical (kind=log_kind) :: & - tr_iage, tr_FY, tr_lvl, tr_fsd, tr_snow, & - tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & - calc_Tsfc, skl_bgc, z_tracers, wave_spec + tr_iage, tr_FY, tr_lvl, tr_fsd, & + tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & + calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' - character (len=char_len) :: plabeld - - if (debug_model) then - plabeld = 'beginning time step' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo - endif - call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc, skl_bgc_out=skl_bgc, & - z_tracers_out=z_tracers, ktherm_out=ktherm, wave_spec_out=wave_spec) + solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & + wave_spec_out=wave_spec) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & - tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & + tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & - tr_iso_out=tr_iso, tr_fsd_out=tr_fsd, tr_snow_out=tr_snow) + tr_iso_out=tr_iso, tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -217,11 +205,12 @@ subroutine ice_step call ice_timer_start(timer_column) ! column physics call ice_timer_start(timer_thermo) ! thermodynamics - call step_prep + call save_init - if (ktherm >= 0) then - !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) - do iblk = 1, nblocks + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + + if (ktherm >= 0) then !----------------------------------------------------------------- ! scale radiation fields @@ -229,44 +218,22 @@ subroutine ice_step if (calc_Tsfc) call prep_radiation (iblk) - if (debug_model) then - plabeld = 'post prep_radiation' - call debug_ice (iblk, plabeld) - endif - !----------------------------------------------------------------- ! thermodynamics and biogeochemistry !----------------------------------------------------------------- - + call step_therm1 (dt, iblk) ! vertical thermodynamics - - if (debug_model) then - plabeld = 'post step_therm1' - call debug_ice (iblk, plabeld) - endif - call biogeochemistry (dt, iblk) ! biogeochemistry - - if (debug_model) then - plabeld = 'post biogeochemistry' - call debug_ice (iblk, plabeld) - endif - call step_therm2 (dt, iblk) ! ice thickness distribution thermo - if (debug_model) then - plabeld = 'post step_therm2' - call debug_ice (iblk, plabeld) - endif + endif ! ktherm > 0 - enddo - !$OMP END PARALLEL DO - endif ! ktherm > 0 + enddo ! iblk + !$OMP END PARALLEL DO ! clean up, update tendency diagnostics offset = dt - call update_state (dt=dt, daidt=daidtt, dvidt=dvidtt, dvsdt=dvsdtt, & - dagedt=dagedtt, offset=offset) + call update_state (dt, daidtt, dvidtt, dagedtt, offset) call ice_timer_stop(timer_thermo) ! thermodynamics call ice_timer_stop(timer_column) ! column physics @@ -284,82 +251,38 @@ subroutine ice_step ! momentum, stress, transport call step_dyn_horiz (dt_dyn) - if (debug_model) then - plabeld = 'post step_dyn_horiz' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo ! iblk - endif - ! ridging - !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) + !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks if (kridge > 0) call step_dyn_ridge (dt_dyn, ndtd, iblk) enddo !$OMP END PARALLEL DO - if (debug_model) then - plabeld = 'post step_dyn_ridge' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo ! iblk - endif - ! clean up, update tendency diagnostics offset = c0 - call update_state (dt=dt_dyn, daidt=daidtd, dvidt=dvidtd, dvsdt=dvsdtd, & - dagedt=dagedtd, offset=offset) + call update_state (dt_dyn, daidtd, dvidtd, dagedtd, offset) enddo - if (debug_model) then - plabeld = 'post dynamics' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo - endif - - call ice_timer_start(timer_column) ! column physics - call ice_timer_start(timer_thermo) ! thermodynamics - !----------------------------------------------------------------- - ! snow redistribution and metamorphosis + ! albedo, shortwave radiation !----------------------------------------------------------------- - if (tr_snow) then ! advanced snow physics - !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) - do iblk = 1, nblocks - call step_snow (dt, iblk) - enddo - !$OMP END PARALLEL DO - call update_state (dt=dt) ! clean up - endif + call ice_timer_start(timer_column) ! column physics + call ice_timer_start(timer_thermo) ! thermodynamics - !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) +!MHRI: CHECK THIS OMP + !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks - !----------------------------------------------------------------- - ! albedo, shortwave radiation - !----------------------------------------------------------------- - if (ktherm >= 0) call step_radiation (dt, iblk) - if (debug_model) then - plabeld = 'post step_radiation' - call debug_ice (iblk, plabeld) - endif - !----------------------------------------------------------------- ! get ready for coupling and the next time step !----------------------------------------------------------------- call coupling_prep (iblk) - if (debug_model) then - plabeld = 'post coupling_prep' - call debug_ice (iblk, plabeld) - endif - enddo ! iblk !$OMP END PARALLEL DO @@ -378,11 +301,9 @@ subroutine ice_step call ice_timer_start(timer_diags) ! diagnostics if (mod(istep,diagfreq) == 0) then call runtime_diags(dt) ! log file + if (solve_zsal) call zsal_diags if (skl_bgc .or. z_tracers) call bgc_diags if (tr_brine) call hbrine_diags - if (my_task == master_task) then - call ice_memusage_print(nu_diag,subname) - endif endif call ice_timer_stop(timer_diags) ! diagnostics @@ -396,22 +317,23 @@ subroutine ice_step if (tr_iage) call write_restart_age if (tr_FY) call write_restart_FY if (tr_lvl) call write_restart_lvl + if (tr_pond_cesm) call write_restart_pond_cesm if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo - if (tr_snow) call write_restart_snow if (tr_fsd) call write_restart_fsd if (tr_iso) call write_restart_iso if (tr_aero) call write_restart_aero - if (skl_bgc .or. z_tracers) & - call write_restart_bgc + if (solve_zsal .or. skl_bgc .or. z_tracers) & + call write_restart_bgc if (tr_brine) call write_restart_hbrine if (kdyn == 2) call write_restart_eap call final_restart endif + call ice_timer_stop(timer_readwrite) ! reading/writing end subroutine ice_step - + !======================================================================= ! ! Prepare for coupling @@ -421,7 +343,7 @@ end subroutine ice_step subroutine coupling_prep (iblk) use ice_arrays_column, only: alvdfn, alidfn, alvdrn, alidrn, & - albicen, albsnon, albpndn, apeffn, snowfracn + albicen, albsnon, albpndn, apeffn, fzsal_g, fzsal, snowfracn use ice_blocks, only: nx_block, ny_block, get_block, block use ice_domain, only: blocks_ice use ice_calendar, only: dt, nstreams @@ -430,13 +352,14 @@ subroutine coupling_prep (iblk) albpnd, albcnt, apeff_ai, fpond, fresh, l_mpond_fresh, & alvdf_ai, alidf_ai, alvdr_ai, alidr_ai, fhocn_ai, & fresh_ai, fsalt_ai, fsalt, & - fswthru_ai, fhocn, scale_factor, snowfrac, & + fswthru_ai, fhocn, & fswthru, fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & + scale_factor, snowfrac, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & fsens, flat, fswabs, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt use ice_flux_bgc, only: faero_ocn, fiso_ocn, Qref_iso, fiso_evap, & - flux_bio, flux_bio_ai + fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai use ice_grid, only: tmask use ice_state, only: aicen, aice #ifdef CICE_IN_NEMO @@ -446,12 +369,12 @@ subroutine coupling_prep (iblk) use ice_step_mod, only: ocean_mixed_layer use ice_timers, only: timer_couple, ice_timer_start, ice_timer_stop - integer (kind=int_kind), intent(in) :: & - iblk ! block index + integer (kind=int_kind), intent(in) :: & + iblk ! block index ! local variables - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & ilo,ihi,jlo,jhi, & ! beginning and end of physical domain n , & ! thickness category index i,j , & ! horizontal indices @@ -490,7 +413,7 @@ subroutine coupling_prep (iblk) enddo enddo - call ice_timer_start(timer_couple,iblk) ! atm/ocn coupling + call ice_timer_start(timer_couple) ! atm/ocn coupling if (oceanmixed_ice) & call ocean_mixed_layer (dt,iblk) ! ocean surface fluxes and sst @@ -587,6 +510,8 @@ subroutine coupling_prep (iblk) fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) + fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) + fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) if (nbtrcr > 0) then do k = 1, nbtrcr @@ -607,7 +532,7 @@ subroutine coupling_prep (iblk) enddo !----------------------------------------------------------------- - ! Divide fluxes by ice area + ! Divide fluxes by ice area ! - the CESM coupler assumes fluxes are per unit ice area ! - also needed for global budget in diagnostics !----------------------------------------------------------------- @@ -624,7 +549,7 @@ subroutine coupling_prep (iblk) Tref (:,:,iblk), Qref (:,:,iblk), & fresh (:,:,iblk), fsalt (:,:,iblk), & fhocn (:,:,iblk), & - fswthru (:,:,iblk), & + fswthru (:,:,iblk), & fswthru_vdr (:,:,iblk), & fswthru_vdf (:,:,iblk), & fswthru_idr (:,:,iblk), & @@ -632,29 +557,30 @@ subroutine coupling_prep (iblk) faero_ocn(:,:,:,iblk), & alvdr (:,:,iblk), alidr (:,:,iblk), & alvdf (:,:,iblk), alidf (:,:,iblk), & - flux_bio =flux_bio (:,:,1:nbtrcr,iblk), & + fzsal (:,:,iblk), fzsal_g (:,:,iblk), & + flux_bio (:,:,1:nbtrcr,iblk), & Qref_iso =Qref_iso (:,:,:,iblk), & fiso_evap=fiso_evap(:,:,:,iblk), & fiso_ocn =fiso_ocn (:,:,:,iblk)) - + #ifdef CICE_IN_NEMO !echmod - comment this out for efficiency, if .not. calc_Tsfc if (.not. calc_Tsfc) then !--------------------------------------------------------------- - ! If surface fluxes were provided, conserve these fluxes at ice - ! free points by passing to ocean. + ! If surface fluxes were provided, conserve these fluxes at ice + ! free points by passing to ocean. !--------------------------------------------------------------- - call sfcflux_to_ocn & + call sfcflux_to_ocn & (nx_block, ny_block, & tmask (:,:,iblk), aice_init(:,:,iblk), & fsurfn_f (:,:,:,iblk), flatn_f(:,:,:,iblk), & fresh (:,:,iblk), fhocn (:,:,iblk)) - endif + endif !echmod #endif - call ice_timer_stop(timer_couple,iblk) ! atm/ocn coupling + call ice_timer_stop(timer_couple) ! atm/ocn coupling end subroutine coupling_prep @@ -663,10 +589,10 @@ end subroutine coupling_prep !======================================================================= ! ! If surface heat fluxes are provided to CICE instead of CICE calculating -! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can +! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can ! be provided at points which do not have ice. (This is could be due to ! the heat fluxes being calculated on a lower resolution grid or the -! heat fluxes not recalculated at every CICE timestep.) At ice free points, +! heat fluxes not recalculated at every CICE timestep.) At ice free points, ! conserve energy and water by passing these fluxes to the ocean. ! ! author: A. McLaren, Met Office @@ -699,15 +625,14 @@ subroutine sfcflux_to_ocn(nx_block, ny_block, & ! local variables integer (kind=int_kind) :: & i, j, n ! horizontal indices - + real (kind=dbl_kind) :: & puny, & ! - Lsub, & ! rLsub ! 1/Lsub character(len=*), parameter :: subname = '(sfcflux_to_ocn)' - call icepack_query_parameters(puny_out=puny, Lsub_out=Lsub) + call icepack_query_parameters(puny_out=puny) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) diff --git a/cicecore/drivers/unittest/opticep/CICE_RunMod.F90 b/cicecore/drivers/unittest/opticep/CICE_RunMod.F90 index 59213f728..c7ae7601f 100644 --- a/cicecore/drivers/unittest/opticep/CICE_RunMod.F90 +++ b/cicecore/drivers/unittest/opticep/CICE_RunMod.F90 @@ -7,7 +7,7 @@ ! William H. Lipscomb, LANL ! ! 2006 ECH: moved exit timeLoop to prevent execution of unnecessary timestep -! 2006 ECH: Streamlined for efficiency +! 2006 ECH: Streamlined for efficiency ! 2006 ECH: Converted to free source form (F90) ! 2007 BPB: Modified Delta-Eddington shortwave interface ! 2008 ECH: moved ESMF code to its own driver @@ -15,13 +15,11 @@ module CICE_RunMod use ice_kinds_mod - use ice_communicate, only: my_task, master_task use ice_fileunits, only: nu_diag use ice_arrays_column, only: oceanmixed_ice use ice_constants, only: c0, c1 use ice_constants, only: field_loc_center, field_type_scalar use ice_exit, only: abort_ice - use ice_memusage, only: ice_memusage_print use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_max_iso, icepack_max_aero use icepack_intfc, only: icepack_query_parameters @@ -45,7 +43,7 @@ module CICE_RunMod subroutine CICE_Run - use ice_calendar, only: dt, stop_now, advance_timestep + use ice_calendar, only: istep, istep1, time, dt, stop_now, calendar use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & get_wave_spec use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & @@ -84,12 +82,11 @@ subroutine CICE_Run call ice_step -! tcraig, use advance_timestep now -! istep = istep + 1 ! update time step counters -! istep1 = istep1 + 1 -! time = time + dt ! determine the time and date -! call calendar(time) ! at the end of the timestep - call advance_timestep() ! advance time + istep = istep + 1 ! update time step counters + istep1 = istep1 + 1 + time = time + dt ! determine the time and date + + call calendar(time) ! at the end of the timestep #ifndef CICE_IN_NEMO if (stop_now >= 1) exit timeLoop @@ -125,7 +122,7 @@ subroutine CICE_Run ! end of timestep loop !-------------------------------------------------------------------- - call ice_timer_stop(timer_step) ! end timestepping loop timer + call ice_timer_stop(timer_step) ! end timestepping loop timer end subroutine CICE_Run @@ -140,32 +137,31 @@ subroutine ice_step use ice_boundary, only: ice_HaloUpdate use ice_calendar, only: dt, dt_dyn, ndtd, diagfreq, write_restart, istep - use ice_diagnostics, only: init_mass_diags, runtime_diags, debug_model, debug_ice - use ice_diagnostics_bgc, only: hbrine_diags, bgc_diags + use ice_diagnostics, only: init_mass_diags, runtime_diags + use ice_diagnostics_bgc, only: hbrine_diags, zsal_diags, bgc_diags use ice_domain, only: halo_info, nblocks use ice_dyn_eap, only: write_restart_eap use ice_dyn_shared, only: kdyn, kridge use ice_flux, only: scale_factor, init_history_therm, & - daidtt, daidtd, dvidtt, dvidtd, dvsdtt, dvsdtd, dagedtt, dagedtd + daidtt, daidtd, dvidtt, dvidtd, dagedtt, dagedtd use ice_history, only: accum_hist use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart use ice_restart_column, only: write_restart_age, write_restart_FY, & - write_restart_lvl, write_restart_pond_lvl, & + write_restart_lvl, write_restart_pond_cesm, write_restart_pond_lvl, & write_restart_pond_topo, write_restart_aero, write_restart_fsd, & - write_restart_iso, write_restart_bgc, write_restart_hbrine, & - write_restart_snow + write_restart_iso, write_restart_bgc, write_restart_hbrine use ice_restart_driver, only: dumpfile use ice_restoring, only: restore_ice, ice_HaloRestore use ice_step_mod, only: prep_radiation, step_therm1, step_therm2, & update_state, step_dyn_horiz, step_dyn_ridge, step_radiation, & - biogeochemistry, step_prep, step_dyn_wave, step_snow + biogeochemistry, save_init, step_dyn_wave use ice_timers, only: ice_timer_start, ice_timer_stop, & timer_diags, timer_column, timer_thermo, timer_bound, & timer_hist, timer_readwrite integer (kind=int_kind) :: & - iblk , & ! block index + iblk , & ! block index k , & ! dynamics supercycling index ktherm ! thermodynamics is off when ktherm = -1 @@ -173,27 +169,26 @@ subroutine ice_step offset ! d(age)/dt time offset logical (kind=log_kind) :: & - tr_iage, tr_FY, tr_lvl, tr_fsd, tr_snow, & - tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & - calc_Tsfc, skl_bgc, z_tracers, wave_spec + tr_iage, tr_FY, tr_lvl, tr_fsd, & + tr_pond_cesm, tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & + calc_Tsfc, skl_bgc, solve_zsal, z_tracers, wave_spec character(len=*), parameter :: subname = '(ice_step)' character (len=char_len) :: plabeld - if (debug_model) then - plabeld = 'beginning time step' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo - endif + plabeld = 'beginning time step' + do iblk = 1, nblocks + call debug_ice (iblk, plabeld) + enddo call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc, skl_bgc_out=skl_bgc, & - z_tracers_out=z_tracers, ktherm_out=ktherm, wave_spec_out=wave_spec) + solve_zsal_out=solve_zsal, z_tracers_out=z_tracers, ktherm_out=ktherm, & + wave_spec_out=wave_spec) call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & - tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & + tr_lvl_out=tr_lvl, tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & - tr_iso_out=tr_iso, tr_fsd_out=tr_fsd, tr_snow_out=tr_snow) + tr_iso_out=tr_iso, tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -217,11 +212,12 @@ subroutine ice_step call ice_timer_start(timer_column) ! column physics call ice_timer_start(timer_thermo) ! thermodynamics - call step_prep + call save_init - if (ktherm >= 0) then - !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) - do iblk = 1, nblocks + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + + if (ktherm >= 0) then !----------------------------------------------------------------- ! scale radiation fields @@ -229,44 +225,36 @@ subroutine ice_step if (calc_Tsfc) call prep_radiation (iblk) - if (debug_model) then - plabeld = 'post prep_radiation' - call debug_ice (iblk, plabeld) - endif + plabeld = 'post prep_radiation' + call debug_ice (iblk, plabeld) !----------------------------------------------------------------- ! thermodynamics and biogeochemistry !----------------------------------------------------------------- - + call step_therm1 (dt, iblk) ! vertical thermodynamics - if (debug_model) then - plabeld = 'post step_therm1' - call debug_ice (iblk, plabeld) - endif + plabeld = 'post step_therm1' + call debug_ice (iblk, plabeld) call biogeochemistry (dt, iblk) ! biogeochemistry - if (debug_model) then - plabeld = 'post biogeochemistry' - call debug_ice (iblk, plabeld) - endif + plabeld = 'post biogeochemistry' + call debug_ice (iblk, plabeld) call step_therm2 (dt, iblk) ! ice thickness distribution thermo - if (debug_model) then - plabeld = 'post step_therm2' - call debug_ice (iblk, plabeld) - endif + plabeld = 'post step_therm2' + call debug_ice (iblk, plabeld) - enddo - !$OMP END PARALLEL DO - endif ! ktherm > 0 + endif + + enddo ! iblk + !$OMP END PARALLEL DO ! clean up, update tendency diagnostics offset = dt - call update_state (dt=dt, daidt=daidtt, dvidt=dvidtt, dvsdt=dvsdtt, & - dagedt=dagedtt, offset=offset) + call update_state (dt, daidtt, dvidtt, dagedtt, offset) call ice_timer_stop(timer_thermo) ! thermodynamics call ice_timer_stop(timer_column) ! column physics @@ -284,70 +272,49 @@ subroutine ice_step ! momentum, stress, transport call step_dyn_horiz (dt_dyn) - if (debug_model) then - plabeld = 'post step_dyn_horiz' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo ! iblk - endif + do iblk = 1, nblocks + plabeld = 'post step_dyn_horiz' + call debug_ice (iblk, plabeld) + enddo ! iblk ! ridging - !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) + !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks if (kridge > 0) call step_dyn_ridge (dt_dyn, ndtd, iblk) enddo !$OMP END PARALLEL DO - if (debug_model) then - plabeld = 'post step_dyn_ridge' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo ! iblk - endif + do iblk = 1, nblocks + plabeld = 'post step_dyn_ridge' + call debug_ice (iblk, plabeld) + enddo ! iblk ! clean up, update tendency diagnostics offset = c0 - call update_state (dt=dt_dyn, daidt=daidtd, dvidt=dvidtd, dvsdt=dvsdtd, & - dagedt=dagedtd, offset=offset) + call update_state (dt_dyn, daidtd, dvidtd, dagedtd, offset) enddo - if (debug_model) then - plabeld = 'post dynamics' - do iblk = 1, nblocks - call debug_ice (iblk, plabeld) - enddo - endif - - call ice_timer_start(timer_column) ! column physics - call ice_timer_start(timer_thermo) ! thermodynamics + plabeld = 'post dynamics' + do iblk = 1, nblocks + call debug_ice (iblk, plabeld) + enddo !----------------------------------------------------------------- - ! snow redistribution and metamorphosis + ! albedo, shortwave radiation !----------------------------------------------------------------- - if (tr_snow) then ! advanced snow physics - !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) - do iblk = 1, nblocks - call step_snow (dt, iblk) - enddo - !$OMP END PARALLEL DO - call update_state (dt=dt) ! clean up - endif + call ice_timer_start(timer_column) ! column physics + call ice_timer_start(timer_thermo) ! thermodynamics - !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) +!MHRI: CHECK THIS OMP + !$OMP PARALLEL DO PRIVATE(iblk) do iblk = 1, nblocks - !----------------------------------------------------------------- - ! albedo, shortwave radiation - !----------------------------------------------------------------- - if (ktherm >= 0) call step_radiation (dt, iblk) - if (debug_model) then - plabeld = 'post step_radiation' - call debug_ice (iblk, plabeld) - endif + plabeld = 'post step_radiation' + call debug_ice (iblk, plabeld) !----------------------------------------------------------------- ! get ready for coupling and the next time step @@ -355,10 +322,8 @@ subroutine ice_step call coupling_prep (iblk) - if (debug_model) then - plabeld = 'post coupling_prep' - call debug_ice (iblk, plabeld) - endif + plabeld = 'post coupling_prep' + call debug_ice (iblk, plabeld) enddo ! iblk !$OMP END PARALLEL DO @@ -378,11 +343,9 @@ subroutine ice_step call ice_timer_start(timer_diags) ! diagnostics if (mod(istep,diagfreq) == 0) then call runtime_diags(dt) ! log file + if (solve_zsal) call zsal_diags if (skl_bgc .or. z_tracers) call bgc_diags if (tr_brine) call hbrine_diags - if (my_task == master_task) then - call ice_memusage_print(nu_diag,subname) - endif endif call ice_timer_stop(timer_diags) ! diagnostics @@ -396,22 +359,23 @@ subroutine ice_step if (tr_iage) call write_restart_age if (tr_FY) call write_restart_FY if (tr_lvl) call write_restart_lvl + if (tr_pond_cesm) call write_restart_pond_cesm if (tr_pond_lvl) call write_restart_pond_lvl if (tr_pond_topo) call write_restart_pond_topo - if (tr_snow) call write_restart_snow if (tr_fsd) call write_restart_fsd if (tr_iso) call write_restart_iso if (tr_aero) call write_restart_aero - if (skl_bgc .or. z_tracers) & - call write_restart_bgc + if (solve_zsal .or. skl_bgc .or. z_tracers) & + call write_restart_bgc if (tr_brine) call write_restart_hbrine if (kdyn == 2) call write_restart_eap call final_restart endif + call ice_timer_stop(timer_readwrite) ! reading/writing end subroutine ice_step - + !======================================================================= ! ! Prepare for coupling @@ -421,7 +385,7 @@ end subroutine ice_step subroutine coupling_prep (iblk) use ice_arrays_column, only: alvdfn, alidfn, alvdrn, alidrn, & - albicen, albsnon, albpndn, apeffn, snowfracn + albicen, albsnon, albpndn, apeffn, fzsal_g, fzsal, snowfracn use ice_blocks, only: nx_block, ny_block, get_block, block use ice_domain, only: blocks_ice use ice_calendar, only: dt, nstreams @@ -430,13 +394,12 @@ subroutine coupling_prep (iblk) albpnd, albcnt, apeff_ai, fpond, fresh, l_mpond_fresh, & alvdf_ai, alidf_ai, alvdr_ai, alidr_ai, fhocn_ai, & fresh_ai, fsalt_ai, fsalt, & - fswthru_ai, fhocn, scale_factor, snowfrac, & - fswthru, fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & + fswthru_ai, fhocn, fswthru, scale_factor, snowfrac, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & fsens, flat, fswabs, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt use ice_flux_bgc, only: faero_ocn, fiso_ocn, Qref_iso, fiso_evap, & - flux_bio, flux_bio_ai + fzsal_ai, fzsal_g_ai, flux_bio, flux_bio_ai use ice_grid, only: tmask use ice_state, only: aicen, aice #ifdef CICE_IN_NEMO @@ -446,12 +409,12 @@ subroutine coupling_prep (iblk) use ice_step_mod, only: ocean_mixed_layer use ice_timers, only: timer_couple, ice_timer_start, ice_timer_stop - integer (kind=int_kind), intent(in) :: & - iblk ! block index + integer (kind=int_kind), intent(in) :: & + iblk ! block index ! local variables - integer (kind=int_kind) :: & + integer (kind=int_kind) :: & ilo,ihi,jlo,jhi, & ! beginning and end of physical domain n , & ! thickness category index i,j , & ! horizontal indices @@ -490,7 +453,7 @@ subroutine coupling_prep (iblk) enddo enddo - call ice_timer_start(timer_couple,iblk) ! atm/ocn coupling + call ice_timer_start(timer_couple) ! atm/ocn coupling if (oceanmixed_ice) & call ocean_mixed_layer (dt,iblk) ! ocean surface fluxes and sst @@ -587,6 +550,8 @@ subroutine coupling_prep (iblk) fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) + fzsal_ai (i,j,iblk) = fzsal (i,j,iblk) + fzsal_g_ai(i,j,iblk) = fzsal_g(i,j,iblk) if (nbtrcr > 0) then do k = 1, nbtrcr @@ -607,7 +572,7 @@ subroutine coupling_prep (iblk) enddo !----------------------------------------------------------------- - ! Divide fluxes by ice area + ! Divide fluxes by ice area ! - the CESM coupler assumes fluxes are per unit ice area ! - also needed for global budget in diagnostics !----------------------------------------------------------------- @@ -623,38 +588,34 @@ subroutine coupling_prep (iblk) evap (:,:,iblk), & Tref (:,:,iblk), Qref (:,:,iblk), & fresh (:,:,iblk), fsalt (:,:,iblk), & - fhocn (:,:,iblk), & - fswthru (:,:,iblk), & - fswthru_vdr (:,:,iblk), & - fswthru_vdf (:,:,iblk), & - fswthru_idr (:,:,iblk), & - fswthru_idf (:,:,iblk), & + fhocn (:,:,iblk), fswthru (:,:,iblk), & faero_ocn(:,:,:,iblk), & alvdr (:,:,iblk), alidr (:,:,iblk), & alvdf (:,:,iblk), alidf (:,:,iblk), & - flux_bio =flux_bio (:,:,1:nbtrcr,iblk), & + fzsal (:,:,iblk), fzsal_g (:,:,iblk), & + flux_bio (:,:,1:nbtrcr,iblk), & Qref_iso =Qref_iso (:,:,:,iblk), & fiso_evap=fiso_evap(:,:,:,iblk), & fiso_ocn =fiso_ocn (:,:,:,iblk)) - + #ifdef CICE_IN_NEMO !echmod - comment this out for efficiency, if .not. calc_Tsfc if (.not. calc_Tsfc) then !--------------------------------------------------------------- - ! If surface fluxes were provided, conserve these fluxes at ice - ! free points by passing to ocean. + ! If surface fluxes were provided, conserve these fluxes at ice + ! free points by passing to ocean. !--------------------------------------------------------------- - call sfcflux_to_ocn & + call sfcflux_to_ocn & (nx_block, ny_block, & tmask (:,:,iblk), aice_init(:,:,iblk), & fsurfn_f (:,:,:,iblk), flatn_f(:,:,:,iblk), & fresh (:,:,iblk), fhocn (:,:,iblk)) - endif + endif !echmod #endif - call ice_timer_stop(timer_couple,iblk) ! atm/ocn coupling + call ice_timer_stop(timer_couple) ! atm/ocn coupling end subroutine coupling_prep @@ -663,10 +624,10 @@ end subroutine coupling_prep !======================================================================= ! ! If surface heat fluxes are provided to CICE instead of CICE calculating -! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can +! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can ! be provided at points which do not have ice. (This is could be due to ! the heat fluxes being calculated on a lower resolution grid or the -! heat fluxes not recalculated at every CICE timestep.) At ice free points, +! heat fluxes not recalculated at every CICE timestep.) At ice free points, ! conserve energy and water by passing these fluxes to the ocean. ! ! author: A. McLaren, Met Office @@ -699,15 +660,14 @@ subroutine sfcflux_to_ocn(nx_block, ny_block, & ! local variables integer (kind=int_kind) :: & i, j, n ! horizontal indices - + real (kind=dbl_kind) :: & puny, & ! - Lsub, & ! rLsub ! 1/Lsub character(len=*), parameter :: subname = '(sfcflux_to_ocn)' - call icepack_query_parameters(puny_out=puny, Lsub_out=Lsub) + call icepack_query_parameters(puny_out=puny) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) diff --git a/cicecore/shared/ice_init_column.F90 b/cicecore/shared/ice_init_column.F90 index 22cd3184a..58c541eef 100644 --- a/cicecore/shared/ice_init_column.F90 +++ b/cicecore/shared/ice_init_column.F90 @@ -7,10 +7,9 @@ module ice_init_column use ice_kinds_mod - use ice_blocks, only: nx_block, ny_block use ice_constants use ice_communicate, only: my_task, master_task, ice_barrier - use ice_domain_size, only: ncat, max_blocks + use ice_domain_size, only: ncat, max_blocks use ice_domain_size, only: nblyr, nilyr, nslyr use ice_domain_size, only: n_aero, n_zaero, n_algae use ice_domain_size, only: n_doc, n_dic, n_don @@ -35,7 +34,7 @@ module ice_init_column use icepack_intfc, only: icepack_init_zbgc use icepack_intfc, only: icepack_init_thermo use icepack_intfc, only: icepack_step_radiation, icepack_init_orbit - use icepack_intfc, only: icepack_init_bgc + use icepack_intfc, only: icepack_init_bgc, icepack_init_zsalinity use icepack_intfc, only: icepack_init_ocean_bio, icepack_load_ocean_bio_array use icepack_intfc, only: icepack_init_hbrine @@ -44,9 +43,9 @@ module ice_init_column private public :: init_thermo_vertical, init_shortwave, & init_age, init_FY, init_lvl, init_fsd, & - init_meltponds_lvl, init_meltponds_topo, & + init_meltponds_cesm, init_meltponds_lvl, init_meltponds_topo, & init_aerosol, init_bgc, init_hbrine, init_zbgc, input_zbgc, & - count_tracers, init_isotope, init_snowtracers + count_tracers, init_isotope ! namelist parameters needed locally @@ -130,6 +129,7 @@ module ice_init_column subroutine init_thermo_vertical + use ice_blocks, only: nx_block, ny_block use ice_flux, only: salinz, Tmltz integer (kind=int_kind) :: & @@ -145,7 +145,7 @@ subroutine init_thermo_vertical character(len=*), parameter :: subname='(init_thermo_vertical)' !----------------------------------------------------------------- - ! initialize + ! initialize heat_capacity, l_brine, and salinity profile !----------------------------------------------------------------- call icepack_query_parameters(depressT_out=depressT) @@ -184,10 +184,11 @@ subroutine init_shortwave albsnon, alvdrn, alidrn, alvdfn, alidfn, fswsfcn, & fswthrun, fswthrun_vdr, fswthrun_vdf, fswthrun_idr, fswthrun_idf, & fswintn, albpndn, apeffn, trcrn_sw, dhsn, ffracn, snowfracn, & + kaer_tab, waer_tab, gaer_tab, kaer_bc_tab, waer_bc_tab, gaer_bc_tab, bcenh, & swgrid, igrid - use ice_blocks, only: block, get_block + use ice_blocks, only: block, get_block, nx_block, ny_block use ice_calendar, only: dt, calendar_type, & - days_per_year, nextsw_cday, yday, msec + days_per_year, nextsw_cday, yday, sec use ice_diagnostics, only: npnt, print_points, pmloc, piloc, pjloc use ice_domain, only: nblocks, blocks_ice use ice_flux, only: alvdf, alidf, alvdr, alidr, & @@ -213,9 +214,8 @@ subroutine init_shortwave logical (kind=log_kind) :: & l_print_point, & ! flag to print designated grid point diagnostics debug, & ! if true, print diagnostics - dEdd_algae, & ! use prognostic chla in dEdd radiation - modal_aero, & ! use modal aerosol optical treatment - snwgrain ! use variable snow radius + dEdd_algae, & ! from icepack + modal_aero ! from icepack character (char_len) :: shortwave @@ -225,13 +225,12 @@ subroutine init_shortwave real (kind=dbl_kind), dimension(ncat) :: & fbri ! brine height to ice thickness - real(kind= dbl_kind), dimension(:,:), allocatable :: & - ztrcr_sw, & ! zaerosols (kg/m^3) and chla (mg/m^3) - rsnow ! snow grain radius tracer (10^-6 m) + real(kind=dbl_kind), allocatable :: & + ztrcr_sw(:,:) ! logical (kind=log_kind) :: tr_brine, tr_zaero, tr_bgc_n integer (kind=int_kind) :: nt_alvl, nt_apnd, nt_hpnd, nt_ipnd, nt_aero, & - nt_fbri, nt_tsfc, ntrcr, nbtrcr, nbtrcr_sw, nt_rsnw + nt_fbri, nt_tsfc, ntrcr, nbtrcr, nbtrcr_sw integer (kind=int_kind), dimension(icepack_max_algae) :: & nt_bgc_N integer (kind=int_kind), dimension(icepack_max_aero) :: & @@ -244,19 +243,17 @@ subroutine init_shortwave call icepack_query_parameters(shortwave_out=shortwave) call icepack_query_parameters(dEdd_algae_out=dEdd_algae) call icepack_query_parameters(modal_aero_out=modal_aero) - call icepack_query_parameters(snwgrain_out=snwgrain) call icepack_query_tracer_sizes(ntrcr_out=ntrcr, nbtrcr_out=nbtrcr, nbtrcr_sw_out=nbtrcr_sw) call icepack_query_tracer_flags(tr_brine_out=tr_brine, tr_zaero_out=tr_zaero, & tr_bgc_n_out=tr_bgc_n) call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, & nt_ipnd_out=nt_ipnd, nt_aero_out=nt_aero, nt_fbri_out=nt_fbri, nt_tsfc_out=nt_tsfc, & - nt_bgc_N_out=nt_bgc_N, nt_zaero_out=nt_zaero, nt_rsnw_out=nt_rsnw) + nt_bgc_N_out=nt_bgc_N, nt_zaero_out=nt_zaero) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__,line= __LINE__) allocate(ztrcr_sw(nbtrcr_sw, ncat)) - allocate(rsnow(nslyr,ncat)) do iblk=1,nblocks @@ -265,12 +262,12 @@ subroutine init_shortwave Iswabsn(:,:,:,:,iblk) = c0 Sswabsn(:,:,:,:,iblk) = c0 - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - + do j = 1, ny_block ! can be jlo, jhi do i = 1, nx_block ! can be ilo, ihi @@ -319,7 +316,7 @@ subroutine init_shortwave do j = jlo, jhi do i = ilo, ihi - if (shortwave(1:4) == 'dEdd') then ! delta Eddington + if (trim(shortwave) == 'dEdd') then ! delta Eddington #ifndef CESMCOUPLED ! initialize orbital parameters @@ -333,18 +330,14 @@ subroutine init_shortwave fbri(:) = c0 ztrcr_sw(:,:) = c0 - rsnow (:,:) = c0 do n = 1, ncat - if (tr_brine) fbri(n) = trcrn(i,j,nt_fbri,n,iblk) - if (snwgrain) then - do k = 1, nslyr - rsnow(k,n) = trcrn(i,j,nt_rsnw+k-1,n,iblk) - enddo - endif + if (tr_brine) fbri(n) = trcrn(i,j,nt_fbri,n,iblk) enddo if (tmask(i,j,iblk)) then - call icepack_step_radiation (dt=dt, & + call icepack_step_radiation (dt=dt, ncat=ncat, & + nblyr=nblyr, nilyr=nilyr, nslyr=nslyr, & + dEdd_algae=dEdd_algae, & swgrid=swgrid(:), igrid=igrid(:), & fbri=fbri(:), & aicen=aicen(i,j,:,iblk), & @@ -363,7 +356,12 @@ subroutine init_shortwave calendar_type=calendar_type, & days_per_year=days_per_year, & nextsw_cday=nextsw_cday, yday=yday, & - sec=msec, & + sec=sec, & + kaer_tab=kaer_tab, kaer_bc_tab=kaer_bc_tab(:,:), & + waer_tab=waer_tab, waer_bc_tab=waer_bc_tab(:,:), & + gaer_tab=gaer_tab, gaer_bc_tab=gaer_bc_tab(:,:), & + bcenh=bcenh(:,:,:), & + modal_aero=modal_aero, & swvdr=swvdr(i,j,iblk), swvdf=swvdf(i,j,iblk),& swidr=swidr(i,j,iblk), swidf=swidf(i,j,iblk),& coszen=coszen(i,j,iblk), fsnow=fsnow(i,j,iblk),& @@ -381,11 +379,10 @@ subroutine init_shortwave albpndn=albpndn(i,j,:,iblk), apeffn=apeffn(i,j,:,iblk), & snowfracn=snowfracn(i,j,:,iblk), & dhsn=dhsn(i,j,:,iblk), ffracn=ffracn(i,j,:,iblk), & - rsnow=rsnow(:,:), & l_print_point=l_print_point, & initonly = .true.) endif - + !----------------------------------------------------------------- ! Define aerosol tracer on shortwave grid !----------------------------------------------------------------- @@ -402,7 +399,7 @@ subroutine init_shortwave enddo ! j !----------------------------------------------------------------- - ! Aggregate albedos + ! Aggregate albedos ! Match loop order in coupling_prep for same order of operations !----------------------------------------------------------------- @@ -411,7 +408,7 @@ subroutine init_shortwave do i = ilo, ihi if (aicen(i,j,n,iblk) > puny) then - + alvdf(i,j,iblk) = alvdf(i,j,iblk) & + alvdfn(i,j,n,iblk)*aicen(i,j,n,iblk) alidf(i,j,iblk) = alidf(i,j,iblk) & @@ -420,7 +417,7 @@ subroutine init_shortwave + alvdrn(i,j,n,iblk)*aicen(i,j,n,iblk) alidr(i,j,iblk) = alidr(i,j,iblk) & + alidrn(i,j,n,iblk)*aicen(i,j,n,iblk) - + netsw = swvdr(i,j,iblk) + swidr(i,j,iblk) & + swvdf(i,j,iblk) + swidf(i,j,iblk) if (netsw > puny) then ! sun above horizon @@ -431,12 +428,12 @@ subroutine init_shortwave albpnd(i,j,iblk) = albpnd(i,j,iblk) & + albpndn(i,j,n,iblk)*aicen(i,j,n,iblk) endif - + apeff_ai(i,j,iblk) = apeff_ai(i,j,iblk) & + apeffn(i,j,n,iblk)*aicen(i,j,n,iblk) snowfrac(i,j,iblk) = snowfrac(i,j,iblk) & + snowfracn(i,j,n,iblk)*aicen(i,j,n,iblk) - + endif ! aicen > puny enddo ! i @@ -478,7 +475,6 @@ subroutine init_shortwave enddo ! iblk deallocate(ztrcr_sw) - deallocate(rsnow) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -516,7 +512,7 @@ end subroutine init_FY ! Initialize ice lvl tracers (call prior to reading restart data) - subroutine init_lvl(iblk, alvl, vlvl) + subroutine init_lvl(iblk, alvl, vlvl) use ice_constants, only: c0, c1 use ice_arrays_column, only: ffracn, dhsn @@ -537,6 +533,22 @@ end subroutine init_lvl !======================================================================= +! Initialize melt ponds. + + subroutine init_meltponds_cesm(apnd, hpnd) + + real(kind=dbl_kind), dimension(:,:,:), intent(out) :: & + apnd , & ! melt pond area fraction + hpnd ! melt pond depth + character(len=*),parameter :: subname='(init_meltponds_cesm)' + + apnd(:,:,:) = c0 + hpnd(:,:,:) = c0 + + end subroutine init_meltponds_cesm + +!======================================================================= + ! Initialize melt ponds. subroutine init_meltponds_lvl(apnd, hpnd, ipnd, dhsn) @@ -570,34 +582,11 @@ subroutine init_meltponds_topo(apnd, hpnd, ipnd) apnd(:,:,:) = c0 hpnd(:,:,:) = c0 ipnd(:,:,:) = c0 - + end subroutine init_meltponds_topo !======================================================================= -! Initialize snow redistribution/metamorphosis tracers (call prior to reading restart data) - - subroutine init_snowtracers(smice, smliq, rhos_cmp, rsnw) - - real(kind=dbl_kind), dimension(:,:,:,:), intent(out) :: & - smice, smliq, rhos_cmp, rsnw - character(len=*),parameter :: subname='(init_snowtracers)' - - real (kind=dbl_kind) :: & - rsnw_fall, & ! snow grain radius of new fallen snow (10^-6 m) - rhos ! snow density (kg/m^3) - - call icepack_query_parameters(rsnw_fall_out=rsnw_fall, rhos_out=rhos) - - rsnw (:,:,:,:) = rsnw_fall - rhos_cmp(:,:,:,:) = rhos - smice (:,:,:,:) = rhos - smliq (:,:,:,:) = c0 - - end subroutine init_snowtracers - -!======================================================================= - ! Initialize floe size distribution tracer (call prior to reading restart data) subroutine init_fsd(floesize) @@ -605,6 +594,7 @@ subroutine init_fsd(floesize) use ice_arrays_column, only: floe_rad_c, floe_binwidth, & wavefreq, dwavefreq, wave_sig_ht, wave_spectrum, & d_afsd_newi, d_afsd_latg, d_afsd_latm, d_afsd_wave, d_afsd_weld + use ice_blocks, only: nx_block, ny_block use ice_domain_size, only: ncat, max_blocks, nfsd use ice_init, only: ice_ic use ice_state, only: aicen @@ -722,18 +712,20 @@ end subroutine init_aerosol ! Initialize vertical profile for biogeochemistry - subroutine init_bgc() + subroutine init_bgc() use ice_arrays_column, only: zfswin, trcrn_sw, & ocean_bio_all, ice_bio_net, snow_bio_net, & - cgrid, igrid, bphi, iDi, bTiz, iki + cgrid, igrid, bphi, iDi, bTiz, iki, & + Rayleigh_criteria, Rayleigh_real use ice_blocks, only: block, get_block use ice_domain, only: nblocks, blocks_ice use ice_flux, only: sss use ice_flux_bgc, only: nit, amm, sil, dmsp, dms, algalN, & doc, don, dic, fed, fep, zaeros, hum use ice_forcing_bgc, only: init_bgc_data, get_forcing_bgc - use ice_restart_column, only: read_restart_bgc, restart_bgc + use ice_restart_column, only: restart_zsal, & + read_restart_bgc, restart_bgc use ice_state, only: trcrn ! local variables @@ -747,24 +739,33 @@ subroutine init_bgc() integer (kind=int_kind) :: & max_nbtrcr, max_algae, max_don, max_doc, max_dic, max_aero, max_fe + logical (kind=log_kind) :: & + RayleighC , & + solve_zsal + type (block) :: & this_block ! block information for current block real(kind=dbl_kind), allocatable :: & trcrn_bgc(:,:) - + real(kind=dbl_kind), dimension(nilyr,ncat) :: & - sicen + sicen + + real(kind=dbl_kind) :: & + RayleighR integer (kind=int_kind) :: & - nbtrcr, ntrcr, ntrcr_o, nt_sice + nbtrcr, ntrcr, ntrcr_o, & + nt_sice, nt_bgc_S character(len=*), parameter :: subname='(init_bgc)' ! Initialize + call icepack_query_parameters(solve_zsal_out=solve_zsal) call icepack_query_tracer_sizes(nbtrcr_out=nbtrcr, ntrcr_out=ntrcr, ntrcr_o_out=ntrcr_o) - call icepack_query_tracer_indices(nt_sice_out=nt_sice) + call icepack_query_tracer_indices(nt_sice_out=nt_sice, nt_bgc_S_out=nt_bgc_S) call icepack_query_tracer_sizes(max_nbtrcr_out=max_nbtrcr, & max_algae_out=max_algae, max_don_out=max_don, max_doc_out=max_doc, & max_dic_out=max_dic, max_aero_out=max_aero, max_fe_out=max_fe) @@ -774,31 +775,78 @@ subroutine init_bgc() allocate(trcrn_bgc(ntrcr,ncat)) - bphi(:,:,:,:,:) = c0 ! initial porosity for no ice + bphi(:,:,:,:,:) = c0 ! initial porosity for no ice iDi (:,:,:,:,:) = c0 ! interface diffusivity bTiz(:,:,:,:,:) = c0 ! initial bio grid ice temperature iki (:,:,:,:,:) = c0 ! permeability ocean_bio_all(:,:,:,:) = c0 - ice_bio_net (:,:,:,:) = c0 ! integrated ice tracer conc (mmol/m^2 or mg/m^2) + ice_bio_net (:,:,:,:) = c0 ! integrated ice tracer conc (mmol/m^2 or mg/m^2) snow_bio_net (:,:,:,:) = c0 ! integrated snow tracer conc (mmol/m^2 or mg/m^2) zfswin (:,:,:,:,:) = c0 ! shortwave flux on bio grid trcrn_sw (:,:,:,:,:) = c0 ! tracers active in the shortwave calculation trcrn_bgc (:,:) = c0 + RayleighR = c0 + RayleighC = .false. !----------------------------------------------------------------- - ! biogeochemistry initialization + ! zsalinity initialization !----------------------------------------------------------------- + + if (solve_zsal) then ! default values + + !$OMP PARALLEL DO PRIVATE(iblk,i,j,k,n,ilo,ihi,jlo,jhi,this_block,trcrn_bgc) + do iblk = 1, nblocks + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + call icepack_init_zsalinity(nblyr=nblyr, ntrcr_o=ntrcr_o, & + Rayleigh_criteria = RayleighC, & + Rayleigh_real = RayleighR, & + trcrn_bgc = trcrn_bgc, & + nt_bgc_S = nt_bgc_S, & + ncat = ncat, & + sss = sss(i,j,iblk)) + if (.not. restart_zsal) then + Rayleigh_real (i,j,iblk) = RayleighR + Rayleigh_criteria(i,j,iblk) = RayleighC + do n = 1,ncat + do k = 1, nblyr + trcrn (i,j,nt_bgc_S+k-1, n,iblk) = & + trcrn_bgc( nt_bgc_S+k-1-ntrcr_o,n) + enddo + enddo + endif + enddo ! i + enddo ! j + enddo ! iblk + !$OMP END PARALLEL DO + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + endif ! solve_zsal - if (.not. restart_bgc) then + if (.not. solve_zsal) restart_zsal = .false. + !----------------------------------------------------------------- + ! biogeochemistry initialization + !----------------------------------------------------------------- + + if (.not. restart_bgc) then + !----------------------------------------------------------------- ! Initial Ocean Values if not coupled to the ocean bgc !----------------------------------------------------------------- !$OMP PARALLEL DO PRIVATE(iblk,i,j,n,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo @@ -832,14 +880,14 @@ subroutine init_bgc() !$OMP PARALLEL DO PRIVATE(iblk,i,j,n,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi do j = jlo, jhi - do i = ilo, ihi + do i = ilo, ihi call icepack_load_ocean_bio_array(max_nbtrcr=icepack_max_nbtrcr, & max_algae=icepack_max_algae, max_don=icepack_max_don, & @@ -847,7 +895,7 @@ subroutine init_bgc() max_dic=icepack_max_dic, max_aero=icepack_max_aero, & nit =nit (i,j, iblk), amm=amm(i,j, iblk), sil =sil (i,j, iblk), & dmsp=dmsp(i,j, iblk), dms=dms(i,j, iblk), algalN=algalN(i,j,:,iblk), & - doc =doc (i,j,:,iblk), don=don(i,j,:,iblk), dic =dic (i,j,:,iblk), & + doc =doc (i,j,:,iblk), don=don(i,j,:,iblk), dic =dic (i,j,:,iblk), & fed =fed (i,j,:,iblk), fep=fep(i,j,:,iblk), zaeros=zaeros(i,j,:,iblk), & hum=hum (i,j, iblk), ocean_bio_all=ocean_bio_all(i,j,:,iblk)) @@ -861,18 +909,18 @@ subroutine init_bgc() if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - if (.not. restart_bgc) then + if (.not. restart_bgc) then !$OMP PARALLEL DO PRIVATE(iblk,i,j,k,n,ilo,ihi,jlo,jhi,this_block,sicen,trcrn_bgc) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi do j = jlo, jhi - do i = ilo, ihi + do i = ilo, ihi do n = 1, ncat do k = 1, nilyr sicen(k,n) = trcrn(i,j,nt_sice+k-1,n,iblk) @@ -900,7 +948,7 @@ subroutine init_bgc() ! read restart to complete BGC initialization !----------------------------------------------------------------- - if (restart_bgc) call read_restart_bgc + if (restart_zsal .or. restart_bgc) call read_restart_bgc deallocate(trcrn_bgc) @@ -943,7 +991,7 @@ subroutine init_hbrine() if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__,line= __LINE__) - first_ice(:,:,:,:) = .true. + first_ice(:,:,:,:) = .true. if (tr_brine) trcrn(:,:,nt_fbri,:,:) = c1 end subroutine init_hbrine @@ -951,7 +999,7 @@ end subroutine init_hbrine !======================================================================= ! Namelist variables, set to default values; may be altered at run time -! +! ! author Elizabeth C. Hunke, LANL ! Nicole Jeffery, LANL @@ -959,7 +1007,8 @@ subroutine input_zbgc use ice_arrays_column, only: restore_bgc use ice_broadcast, only: broadcast_scalar - use ice_restart_column, only: restart_bgc, restart_hbrine + use ice_restart_column, only: restart_bgc, restart_zsal, & + restart_hbrine use ice_restart_shared, only: restart character (len=char_len) :: & @@ -972,13 +1021,13 @@ subroutine input_zbgc tr_bgc_N, tr_bgc_C, tr_bgc_chl, & tr_bgc_DON, tr_bgc_Fe, tr_zaero, & tr_bgc_hum, tr_aero - + integer (kind=int_kind) :: & ktherm logical (kind=log_kind) :: & solve_zsal, skl_bgc, z_tracers, scale_bgc, solve_zbgc, dEdd_algae, & - modal_aero, restart_zsal + modal_aero character (char_len) :: & bgc_flux_type @@ -999,7 +1048,7 @@ subroutine input_zbgc restore_bgc, restart_bgc, scale_bgc, solve_zsal, restart_zsal, & tr_bgc_Nit, tr_bgc_C, tr_bgc_chl, tr_bgc_Am, tr_bgc_Sil, & tr_bgc_DMS, tr_bgc_PON, tr_bgc_hum, tr_bgc_DON, tr_bgc_Fe, & - grid_o, grid_o_t, l_sk, grid_oS, & + grid_o, grid_o_t, l_sk, grid_oS, & l_skS, phi_snow, initbio_frac, frazil_scav, & ratio_Si2N_diatoms , ratio_Si2N_sp , ratio_Si2N_phaeo , & ratio_S2N_diatoms , ratio_S2N_sp , ratio_S2N_phaeo , & @@ -1036,9 +1085,9 @@ subroutine input_zbgc fedtype_1 , feptype_1 , zaerotype_bc1 , & zaerotype_bc2 , zaerotype_dust1 , zaerotype_dust2 , & zaerotype_dust3 , zaerotype_dust4 , ratio_C2N_diatoms , & - ratio_C2N_sp , ratio_C2N_phaeo , ratio_chl2N_diatoms, & + ratio_C2N_sp , ratio_C2N_phaeo , ratio_chl2N_diatoms, & ratio_chl2N_sp , ratio_chl2N_phaeo , F_abs_chl_diatoms , & - F_abs_chl_sp , F_abs_chl_phaeo , ratio_C2N_proteins + F_abs_chl_sp , F_abs_chl_phaeo , ratio_C2N_proteins !----------------------------------------------------------------- @@ -1061,22 +1110,22 @@ subroutine input_zbgc restart_bgc = .false. ! biogeochemistry restart restart_zsal = .false. ! salinity restart restart_hbrine = .false. ! hbrine restart - scale_bgc = .false. ! initial bgc tracers proportional to S - skl_bgc = .false. ! solve skeletal biochemistry + scale_bgc = .false. ! initial bgc tracers proportional to S + skl_bgc = .false. ! solve skeletal biochemistry z_tracers = .false. ! solve vertically resolved tracers dEdd_algae = .false. ! dynamic algae contributes to shortwave absorption ! in delta-Eddington calculation - solve_zbgc = .false. ! turn on z layer biochemistry - tr_bgc_PON = .false. !--------------------------------------------- + solve_zbgc = .false. ! turn on z layer biochemistry + tr_bgc_PON = .false. !--------------------------------------------- tr_bgc_Nit = .false. ! biogeochemistry (skl or zbgc) tr_bgc_C = .false. ! if skl_bgc = .true. then skl tr_bgc_chl = .false. ! if z_tracers = .true. then vertically resolved tr_bgc_Sil = .false. ! if z_tracers + solve_zbgc = .true. then - tr_bgc_Am = .false. ! vertically resolved with reactions + tr_bgc_Am = .false. ! vertically resolved with reactions tr_bgc_DMS = .false. !------------------------------------------------ - tr_bgc_DON = .false. ! + tr_bgc_DON = .false. ! tr_bgc_hum = .false. ! - tr_bgc_Fe = .false. ! + tr_bgc_Fe = .false. ! tr_bgc_N = .true. ! ! brine height parameter @@ -1085,17 +1134,17 @@ subroutine input_zbgc ! skl biology parameters bgc_flux_type = 'Jin2006'! type of ocean-ice poston velocity ('constant') - ! z biology parameters - grid_o = c5 ! for bottom flux - grid_o_t = c5 ! for top flux - l_sk = 7.0_dbl_kind ! characteristic diffusive scale (m) + ! z biology parameters + grid_o = c5 ! for bottom flux + grid_o_t = c5 ! for top flux + l_sk = 7.0_dbl_kind ! characteristic diffusive scale (m) initbio_frac = c1 ! fraction of ocean trcr concentration in bio trcrs - frazil_scav = c1 ! increase in initial bio tracer from ocean scavenging - ratio_Si2N_diatoms = 1.8_dbl_kind ! algal Si to N (mol/mol) + frazil_scav = c1 ! increase in initial bio tracer from ocean scavenging + ratio_Si2N_diatoms = 1.8_dbl_kind ! algal Si to N (mol/mol) ratio_Si2N_sp = c0 ! diatoms, small plankton, phaeocystis ratio_Si2N_phaeo = c0 ratio_S2N_diatoms = 0.03_dbl_kind ! algal S to N (mol/mol) - ratio_S2N_sp = 0.03_dbl_kind + ratio_S2N_sp = 0.03_dbl_kind ratio_S2N_phaeo = 0.03_dbl_kind ratio_Fe2C_diatoms = 0.0033_dbl_kind ! algal Fe to C (umol/mol) ratio_Fe2C_sp = 0.0033_dbl_kind @@ -1106,7 +1155,7 @@ subroutine input_zbgc ratio_Fe2DON = 0.023_dbl_kind ! Fe to N of DON (nmol/umol) ratio_Fe2DOC_s = p1 ! Fe to C of DOC (nmol/umol) saccharids ratio_Fe2DOC_l = 0.033_dbl_kind ! Fe to C of DOC (nmol/umol) lipids - fr_resp = 0.05_dbl_kind ! frac of algal growth lost due to respiration + fr_resp = 0.05_dbl_kind ! frac of algal growth lost due to respiration tau_min = 5200.0_dbl_kind ! rapid mobile to stationary exchanges (s) tau_max = 1.73e5_dbl_kind ! long time mobile to stationary exchanges (s) algal_vel = 1.11e-8_dbl_kind! 0.5 cm/d(m/s) Lavoie 2005 1.5 cm/day @@ -1115,13 +1164,13 @@ subroutine input_zbgc chlabs_diatoms = 0.03_dbl_kind ! chl absorption (1/m/(mg/m^3)) chlabs_sp = 0.01_dbl_kind chlabs_phaeo = 0.05_dbl_kind - alpha2max_low_diatoms = 0.8_dbl_kind ! light limitation (1/(W/m^2)) + alpha2max_low_diatoms = 0.8_dbl_kind ! light limitation (1/(W/m^2)) alpha2max_low_sp = 0.67_dbl_kind alpha2max_low_phaeo = 0.67_dbl_kind - beta2max_diatoms = 0.018_dbl_kind ! light inhibition (1/(W/m^2)) + beta2max_diatoms = 0.018_dbl_kind ! light inhibition (1/(W/m^2)) beta2max_sp = 0.0025_dbl_kind beta2max_phaeo = 0.01_dbl_kind - mu_max_diatoms = 1.2_dbl_kind ! maximum growth rate (1/day) + mu_max_diatoms = 1.2_dbl_kind ! maximum growth rate (1/day) mu_max_sp = 0.851_dbl_kind mu_max_phaeo = 0.851_dbl_kind grow_Tdep_diatoms = 0.06_dbl_kind ! Temperature dependence of growth (1/C) @@ -1151,10 +1200,10 @@ subroutine input_zbgc K_Fe_diatoms = c1 ! iron half saturation (nM) K_Fe_sp = 0.2_dbl_kind K_Fe_phaeo = p1 - f_don_protein = 0.6_dbl_kind ! fraction of spilled grazing to proteins - kn_bac_protein = 0.03_dbl_kind ! Bacterial degredation of DON (1/d) - f_don_Am_protein = 0.25_dbl_kind ! fraction of remineralized DON to ammonium - f_doc_s = 0.4_dbl_kind ! fraction of mortality to DOC + f_don_protein = 0.6_dbl_kind ! fraction of spilled grazing to proteins + kn_bac_protein = 0.03_dbl_kind ! Bacterial degredation of DON (1/d) + f_don_Am_protein = 0.25_dbl_kind ! fraction of remineralized DON to ammonium + f_doc_s = 0.4_dbl_kind ! fraction of mortality to DOC f_doc_l = 0.4_dbl_kind f_exude_s = c1 ! fraction of exudation to DOC f_exude_l = c1 @@ -1164,15 +1213,15 @@ subroutine input_zbgc fsal = c1 ! Salinity limitation (ppt) op_dep_min = p1 ! Light attenuates for optical depths exceeding min fr_graze_s = p5 ! fraction of grazing spilled or slopped - fr_graze_e = p5 ! fraction of assimilation excreted + fr_graze_e = p5 ! fraction of assimilation excreted fr_mort2min = p5 ! fractionation of mortality to Am fr_dFe = 0.3_dbl_kind ! fraction of remineralized nitrogen ! (in units of algal iron) - k_nitrif = c0 ! nitrification rate (1/day) + k_nitrif = c0 ! nitrification rate (1/day) t_iron_conv = 3065.0_dbl_kind ! desorption loss pFe to dFe (day) - max_loss = 0.9_dbl_kind ! restrict uptake to % of remaining value - max_dfe_doc1 = 0.2_dbl_kind ! max ratio of dFe to saccharides in the ice - !(nM Fe/muM C) + max_loss = 0.9_dbl_kind ! restrict uptake to % of remaining value + max_dfe_doc1 = 0.2_dbl_kind ! max ratio of dFe to saccharides in the ice + !(nM Fe/muM C) fr_resp_s = 0.75_dbl_kind ! DMSPd fraction of respiration loss as DMSPd y_sk_DMS = p5 ! fraction conversion given high yield t_sk_conv = 3.0_dbl_kind ! Stefels conversion time (d) @@ -1206,47 +1255,46 @@ subroutine input_zbgc F_abs_chl_diatoms = 2.0_dbl_kind ! scales absorbed radiation for dEdd F_abs_chl_sp = 4.0_dbl_kind F_abs_chl_phaeo = 5.0 - ratio_C2N_proteins = 7.0_dbl_kind ! ratio of C to N in proteins (mol/mol) + ratio_C2N_proteins = 7.0_dbl_kind ! ratio of C to N in proteins (mol/mol) ! z salinity parameters - grid_oS = c5 ! for bottom flux - l_skS = 7.0_dbl_kind ! characteristic diffusive scale (m) + grid_oS = c5 ! for bottom flux + l_skS = 7.0_dbl_kind ! characteristic diffusive scale (m) !----------------------------------------------------------------- ! read from input file !----------------------------------------------------------------- - if (my_task == master_task) then - write(nu_diag,*) subname,' Reading zbgc_nml' + call get_fileunit(nu_nml) - call get_fileunit(nu_nml) + if (my_task == master_task) then open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) if (nml_error /= 0) then - call abort_ice(subname//'ERROR: zbgc_nml open file '// & - trim(nml_filename), & - file=__FILE__, line=__LINE__) - endif + nml_error = -1 + else + nml_error = 1 + endif - nml_error = 1 + print*,'Reading zbgc_nml' do while (nml_error > 0) read(nu_nml, nml=zbgc_nml,iostat=nml_error) end do - if (nml_error /= 0) then - call abort_ice(subname//'ERROR: zbgc_nml reading ', & - file=__FILE__, line=__LINE__) - endif - close(nu_nml) - call release_fileunit(nu_nml) + if (nml_error == 0) close(nu_nml) + endif + call broadcast_scalar(nml_error, master_task) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: reading zbgc namelist') endif + call release_fileunit(nu_nml) !----------------------------------------------------------------- ! broadcast !----------------------------------------------------------------- - call broadcast_scalar(solve_zsal, master_task) - call broadcast_scalar(restart_zsal, master_task) + call broadcast_scalar(solve_zsal, master_task) + call broadcast_scalar(restart_zsal, master_task) call broadcast_scalar(tr_brine, master_task) - call broadcast_scalar(restart_hbrine, master_task) + call broadcast_scalar(restart_hbrine, master_task) call broadcast_scalar(phi_snow, master_task) call broadcast_scalar(grid_oS, master_task) @@ -1264,14 +1312,14 @@ subroutine input_zbgc call broadcast_scalar(tr_bgc_Am, master_task) call broadcast_scalar(tr_bgc_Sil, master_task) call broadcast_scalar(tr_bgc_hum, master_task) - call broadcast_scalar(tr_bgc_DMS, master_task) - call broadcast_scalar(tr_bgc_PON, master_task) - call broadcast_scalar(tr_bgc_DON, master_task) - call broadcast_scalar(tr_bgc_Fe, master_task) + call broadcast_scalar(tr_bgc_DMS, master_task) + call broadcast_scalar(tr_bgc_PON, master_task) + call broadcast_scalar(tr_bgc_DON, master_task) + call broadcast_scalar(tr_bgc_Fe, master_task) call broadcast_scalar(z_tracers, master_task) call broadcast_scalar(tr_zaero, master_task) - call broadcast_scalar(dEdd_algae, master_task) + call broadcast_scalar(dEdd_algae, master_task) call broadcast_scalar(modal_aero, master_task) call broadcast_scalar(grid_o, master_task) call broadcast_scalar(grid_o_t, master_task) @@ -1303,31 +1351,31 @@ subroutine input_zbgc call broadcast_scalar(chlabs_diatoms , master_task) call broadcast_scalar(chlabs_sp , master_task) call broadcast_scalar(chlabs_phaeo , master_task) - call broadcast_scalar(alpha2max_low_diatoms , master_task) + call broadcast_scalar(alpha2max_low_diatoms , master_task) call broadcast_scalar(alpha2max_low_sp , master_task) call broadcast_scalar(alpha2max_low_phaeo , master_task) - call broadcast_scalar(beta2max_diatoms , master_task) - call broadcast_scalar(beta2max_sp , master_task) - call broadcast_scalar(beta2max_phaeo , master_task) - call broadcast_scalar(mu_max_diatoms , master_task) - call broadcast_scalar(mu_max_sp , master_task) - call broadcast_scalar(mu_max_phaeo , master_task) - call broadcast_scalar(grow_Tdep_diatoms, master_task) - call broadcast_scalar(grow_Tdep_sp , master_task) - call broadcast_scalar(grow_Tdep_phaeo , master_task) - call broadcast_scalar(fr_graze_diatoms , master_task) - call broadcast_scalar(fr_graze_sp , master_task) - call broadcast_scalar(fr_graze_phaeo , master_task) - call broadcast_scalar(mort_pre_diatoms , master_task) - call broadcast_scalar(mort_pre_sp , master_task) - call broadcast_scalar(mort_pre_phaeo , master_task) - call broadcast_scalar(mort_Tdep_diatoms, master_task) - call broadcast_scalar(mort_Tdep_sp , master_task) - call broadcast_scalar(mort_Tdep_phaeo , master_task) - call broadcast_scalar(k_exude_diatoms , master_task) - call broadcast_scalar(k_exude_sp , master_task) - call broadcast_scalar(k_exude_phaeo , master_task) - call broadcast_scalar(K_Nit_diatoms , master_task) + call broadcast_scalar(beta2max_diatoms , master_task) + call broadcast_scalar(beta2max_sp , master_task) + call broadcast_scalar(beta2max_phaeo , master_task) + call broadcast_scalar(mu_max_diatoms , master_task) + call broadcast_scalar(mu_max_sp , master_task) + call broadcast_scalar(mu_max_phaeo , master_task) + call broadcast_scalar(grow_Tdep_diatoms, master_task) + call broadcast_scalar(grow_Tdep_sp , master_task) + call broadcast_scalar(grow_Tdep_phaeo , master_task) + call broadcast_scalar(fr_graze_diatoms , master_task) + call broadcast_scalar(fr_graze_sp , master_task) + call broadcast_scalar(fr_graze_phaeo , master_task) + call broadcast_scalar(mort_pre_diatoms , master_task) + call broadcast_scalar(mort_pre_sp , master_task) + call broadcast_scalar(mort_pre_phaeo , master_task) + call broadcast_scalar(mort_Tdep_diatoms, master_task) + call broadcast_scalar(mort_Tdep_sp , master_task) + call broadcast_scalar(mort_Tdep_phaeo , master_task) + call broadcast_scalar(k_exude_diatoms , master_task) + call broadcast_scalar(k_exude_sp , master_task) + call broadcast_scalar(k_exude_phaeo , master_task) + call broadcast_scalar(K_Nit_diatoms , master_task) call broadcast_scalar(K_Nit_sp , master_task) call broadcast_scalar(K_Nit_phaeo , master_task) call broadcast_scalar(K_Am_diatoms , master_task) @@ -1343,17 +1391,17 @@ subroutine input_zbgc call broadcast_scalar(kn_bac_protein , master_task) call broadcast_scalar(f_don_Am_protein , master_task) call broadcast_scalar(f_doc_s , master_task) - call broadcast_scalar(f_doc_l , master_task) + call broadcast_scalar(f_doc_l , master_task) call broadcast_scalar(f_exude_s , master_task) call broadcast_scalar(f_exude_l , master_task) - call broadcast_scalar(k_bac_s , master_task) + call broadcast_scalar(k_bac_s , master_task) call broadcast_scalar(k_bac_l , master_task) call broadcast_scalar(T_max , master_task) call broadcast_scalar(fsal , master_task) call broadcast_scalar(op_dep_min , master_task) - call broadcast_scalar(fr_graze_s , master_task) - call broadcast_scalar(fr_graze_e , master_task) - call broadcast_scalar(fr_mort2min , master_task) + call broadcast_scalar(fr_graze_s , master_task) + call broadcast_scalar(fr_graze_e , master_task) + call broadcast_scalar(fr_mort2min , master_task) call broadcast_scalar(fr_dFe , master_task) call broadcast_scalar(k_nitrif , master_task) call broadcast_scalar(t_iron_conv , master_task) @@ -1361,18 +1409,18 @@ subroutine input_zbgc call broadcast_scalar(max_dfe_doc1 , master_task) call broadcast_scalar(fr_resp_s , master_task) call broadcast_scalar(y_sk_DMS , master_task) - call broadcast_scalar(t_sk_conv , master_task) + call broadcast_scalar(t_sk_conv , master_task) call broadcast_scalar(t_sk_ox , master_task) call broadcast_scalar(algaltype_diatoms, master_task) - call broadcast_scalar(algaltype_sp , master_task) - call broadcast_scalar(algaltype_phaeo , master_task) + call broadcast_scalar(algaltype_sp , master_task) + call broadcast_scalar(algaltype_phaeo , master_task) call broadcast_scalar(nitratetype , master_task) call broadcast_scalar(ammoniumtype , master_task) call broadcast_scalar(silicatetype , master_task) - call broadcast_scalar(dmspptype , master_task) - call broadcast_scalar(dmspdtype , master_task) + call broadcast_scalar(dmspptype , master_task) + call broadcast_scalar(dmspdtype , master_task) call broadcast_scalar(humtype , master_task) - call broadcast_scalar(doctype_s , master_task) + call broadcast_scalar(doctype_s , master_task) call broadcast_scalar(doctype_l , master_task) call broadcast_scalar(dontype_protein , master_task) call broadcast_scalar(fedtype_1 , master_task) @@ -1392,7 +1440,7 @@ subroutine input_zbgc call broadcast_scalar(F_abs_chl_diatoms , master_task) call broadcast_scalar(F_abs_chl_sp , master_task) call broadcast_scalar(F_abs_chl_phaeo , master_task) - call broadcast_scalar(ratio_C2N_proteins , master_task) + call broadcast_scalar(ratio_C2N_proteins , master_task) !----------------------------------------------------------------- ! zsalinity and brine @@ -1403,13 +1451,21 @@ subroutine input_zbgc write(nu_diag,*) subname//' WARNING: restart = false, setting bgc restart flags to false' restart_bgc = .false. restart_hbrine = .false. + restart_zsal = .false. endif - if (solve_zsal) then + if (solve_zsal .and. nblyr < 1) then if (my_task == master_task) then - write(nu_diag,*) subname,' ERROR: solve_zsal=T deprecated' + write(nu_diag,*) subname,' ERROR: solve_zsal=T but 0 zsalinity tracers' endif abort_flag = 101 + endif + + if (solve_zsal .and. ((.not. tr_brine) .or. (ktherm /= 1))) then + if (my_task == master_task) then + write(nu_diag,*) subname,' ERROR: solve_zsal needs tr_brine=T and ktherm=1' + endif + abort_flag = 102 endif if (tr_brine .and. nblyr < 1 ) then @@ -1417,7 +1473,7 @@ subroutine input_zbgc write(nu_diag,*) subname,' ERROR: tr_brine=T but no biology layers compiled' endif abort_flag = 103 - endif + endif !----------------------------------------------------------------- ! biogeochemistry @@ -1452,14 +1508,14 @@ subroutine input_zbgc abort_flag = 107 endif - if (dEdd_algae .AND. shortwave(1:4) /= 'dEdd') then + if (dEdd_algae .AND. trim(shortwave) /= 'dEdd') then if (my_task == master_task) then - write(nu_diag,*) subname,' ERROR: dEdd_algae = T but shortwave /= dEdd or dEdd_snicar_ad' + write(nu_diag,*) subname,' ERROR: dEdd_algae = T but shortwave /= dEdd' endif abort_flag = 108 endif - if (dEdd_algae .AND. (.NOT. tr_bgc_N) .AND. (.NOT. tr_zaero)) then + if (dEdd_algae .AND. (.NOT. tr_bgc_N) .AND. (.NOT. tr_zaero)) then if (my_task == master_task) then write(nu_diag,*) subname,' ERROR: need tr_bgc_N or tr_zaero for dEdd_algae' endif @@ -1472,10 +1528,10 @@ subroutine input_zbgc endif abort_flag = 110 endif - - if (modal_aero .AND. shortwave(1:4) /= 'dEdd') then + + if (modal_aero .AND. trim(shortwave) /= 'dEdd') then if (my_task == master_task) then - write(nu_diag,*) subname,' ERROR: modal_aero = T but shortwave /= dEdd or dEdd_snicar_ad' + write(nu_diag,*) subname,' ERROR: modal_aero = T but shortwave /= dEdd' endif abort_flag = 111 endif @@ -1592,9 +1648,12 @@ subroutine input_zbgc write(nu_diag,1010) ' restart_hbrine = ', restart_hbrine write(nu_diag,1005) ' phi_snow = ', phi_snow endif - write(nu_diag,1010) ' solve_zsal (deprecated) = ', solve_zsal - write(nu_diag,* ) ' WARNING: zsalinity has been deprecated. Namelists and interfaces' - write(nu_diag,* ) ' will be removed in a future version' + write(nu_diag,1010) ' solve_zsal = ', solve_zsal + if (solve_zsal) then + write(nu_diag,1010) ' restart_zsal = ', restart_zsal + write(nu_diag,1000) ' grid_oS = ', grid_oS + write(nu_diag,1005) ' l_skS = ', l_skS + endif write(nu_diag,1010) ' skl_bgc = ', skl_bgc write(nu_diag,1010) ' restart_bgc = ', restart_bgc @@ -1660,7 +1719,7 @@ subroutine input_zbgc !----------------------------------------------------------------- call icepack_init_parameters( & - ktherm_in=ktherm, shortwave_in=shortwave, & + ktherm_in=ktherm, shortwave_in=shortwave, solve_zsal_in=solve_zsal, & skl_bgc_in=skl_bgc, z_tracers_in=z_tracers, scale_bgc_in=scale_bgc, & dEdd_algae_in=dEdd_algae, & solve_zbgc_in=solve_zbgc, & @@ -1688,7 +1747,6 @@ subroutine input_zbgc 1010 format (a30,2x,l6) ! logical 1020 format (a30,2x,i6) ! integer 1030 format (a30, a8) ! character - 1031 format (a30, a ) ! character end subroutine input_zbgc @@ -1712,18 +1770,16 @@ subroutine count_tracers integer (kind=int_kind) :: ntrcr logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_pond, tr_aero, tr_fsd - logical (kind=log_kind) :: tr_snow - logical (kind=log_kind) :: tr_iso, tr_pond_lvl, tr_pond_topo + logical (kind=log_kind) :: tr_iso, tr_pond_cesm, tr_pond_lvl, tr_pond_topo integer (kind=int_kind) :: nt_Tsfc, nt_sice, nt_qice, nt_qsno, nt_iage, nt_FY integer (kind=int_kind) :: nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, nt_aero integer (kind=int_kind) :: nt_fsd, nt_isosno, nt_isoice - integer (kind=int_kind) :: nt_smice, nt_smliq, nt_rhos, nt_rsnw integer (kind=int_kind) :: & nbtrcr, nbtrcr_sw, & ntrcr_o, nt_fbri, & nt_bgc_Nit, nt_bgc_Am, nt_bgc_Sil, & - nt_bgc_DMS, nt_bgc_PON, & + nt_bgc_DMS, nt_bgc_PON, nt_bgc_S, & nt_bgc_DMSPp, nt_bgc_DMSPd, & nt_zbgc_frac, nlt_chl_sw, & nlt_bgc_Nit, nlt_bgc_Am, nlt_bgc_Sil, & @@ -1780,15 +1836,16 @@ subroutine count_tracers tr_bgc_N, tr_bgc_C, tr_bgc_chl, & tr_bgc_DON, tr_bgc_Fe, tr_zaero, & tr_bgc_hum - + logical (kind=log_kind) :: & - skl_bgc, z_tracers + solve_zsal, skl_bgc, z_tracers character(len=*), parameter :: subname='(count_tracers)' !----------------------------------------------------------------- call icepack_query_parameters( & + solve_zsal_out=solve_zsal, & skl_bgc_out=skl_bgc, z_tracers_out=z_tracers) call icepack_warnings_flush(nu_diag) @@ -1797,9 +1854,9 @@ subroutine count_tracers call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & tr_lvl_out=tr_lvl, tr_aero_out=tr_aero, tr_pond_out=tr_pond, & - tr_pond_lvl_out=tr_pond_lvl, & + tr_pond_cesm_out=tr_pond_cesm, tr_pond_lvl_out=tr_pond_lvl, & tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_fsd_out=tr_fsd, & - tr_snow_out=tr_snow, tr_iso_out=tr_iso, & + tr_iso_out=tr_iso, & tr_bgc_Nit_out=tr_bgc_Nit, tr_bgc_Am_out =tr_bgc_Am, tr_bgc_Sil_out=tr_bgc_Sil, & tr_bgc_DMS_out=tr_bgc_DMS, tr_bgc_PON_out=tr_bgc_PON, & tr_bgc_N_out =tr_bgc_N, tr_bgc_C_out =tr_bgc_C, tr_bgc_chl_out=tr_bgc_chl, & @@ -1857,26 +1914,11 @@ subroutine count_tracers nt_ipnd = ntrcr ! on level-ice ponds (if frzpnd='hlid') endif if (tr_pond_topo) then - ntrcr = ntrcr + 1 ! + ntrcr = ntrcr + 1 ! nt_ipnd = ntrcr ! refrozen pond ice lid thickness endif endif - nt_smice = 0 - nt_smliq = 0 - nt_rhos = 0 - nt_rsnw = 0 - if (tr_snow) then - nt_smice = ntrcr + 1 - ntrcr = ntrcr + nslyr ! mass of ice in nslyr snow layers - nt_smliq = ntrcr + 1 - ntrcr = ntrcr + nslyr ! mass of liquid in nslyr snow layers - nt_rhos = ntrcr + 1 - ntrcr = ntrcr + nslyr ! snow density in nslyr layers - nt_rsnw = ntrcr + 1 - ntrcr = ntrcr + nslyr ! snow grain radius in nslyr layers - endif - nt_fsd = 0 if (tr_fsd) then nt_fsd = ntrcr + 1 ! floe size distribution @@ -1900,14 +1942,13 @@ subroutine count_tracers !tcx, modify code so we don't have to reset n_aero here n_aero = 0 !echmod - this is not getting set correctly (overwritten later?) endif - + !----------------------------------------------------------------- ! initialize zbgc tracer indices !----------------------------------------------------------------- nbtrcr = 0 nbtrcr_sw = 0 - nt_zbgc_frac = 0 ! vectors of size icepack_max_algae nlt_bgc_N(:) = 0 @@ -1966,6 +2007,12 @@ subroutine count_tracers ntrcr = ntrcr + 1 endif + nt_bgc_S = 0 + if (solve_zsal) then ! .true. only if tr_brine = .true. + nt_bgc_S = ntrcr + 1 + ntrcr = ntrcr + nblyr + endif + if (skl_bgc .or. z_tracers) then if (skl_bgc) then @@ -2149,6 +2196,7 @@ subroutine count_tracers enddo ! mm endif ! tr_zaero + nt_zbgc_frac = 0 if (nbtrcr > 0) then nt_zbgc_frac = ntrcr + 1 ntrcr = ntrcr + nbtrcr @@ -2158,7 +2206,7 @@ subroutine count_tracers !tcx, +1 here is the unused tracer, want to get rid of it ntrcr = ntrcr + 1 -!tcx, reset unused tracer index, eventually get rid of it. +!tcx, reset unusaed tracer index, eventually get rid of it. if (nt_iage <= 0) nt_iage = ntrcr if (nt_FY <= 0) nt_FY = ntrcr if (nt_alvl <= 0) nt_alvl = ntrcr @@ -2166,16 +2214,12 @@ subroutine count_tracers if (nt_apnd <= 0) nt_apnd = ntrcr if (nt_hpnd <= 0) nt_hpnd = ntrcr if (nt_ipnd <= 0) nt_ipnd = ntrcr - if (nt_smice <= 0) nt_smice = ntrcr - if (nt_smliq <= 0) nt_smliq = ntrcr - if (nt_rhos <= 0) nt_rhos = ntrcr - if (nt_rsnw <= 0) nt_rsnw = ntrcr if (nt_fsd <= 0) nt_fsd = ntrcr if (nt_isosno<= 0) nt_isosno= ntrcr if (nt_isoice<= 0) nt_isoice= ntrcr if (nt_aero <= 0) nt_aero = ntrcr if (nt_fbri <= 0) nt_fbri = ntrcr -! if (nt_bgc_S <= 0) nt_bgc_S = ntrcr + if (nt_bgc_S <= 0) nt_bgc_S = ntrcr if (my_task == master_task) then write(nu_diag,*) ' ' @@ -2196,10 +2240,9 @@ subroutine count_tracers nt_qice_in=nt_qice, nt_qsno_in=nt_qsno, nt_iage_in=nt_iage, nt_fy_in=nt_fy, & nt_alvl_in=nt_alvl, nt_vlvl_in=nt_vlvl, nt_apnd_in=nt_apnd, nt_hpnd_in=nt_hpnd, & nt_ipnd_in=nt_ipnd, nt_fsd_in=nt_fsd, nt_aero_in=nt_aero, & - nt_smice_in=nt_smice, nt_smliq_in=nt_smliq, nt_rhos_in=nt_rhos, nt_rsnw_in=nt_rsnw, & nt_isosno_in=nt_isosno, nt_isoice_in=nt_isoice, nt_fbri_in=nt_fbri, & nt_bgc_Nit_in=nt_bgc_Nit, nt_bgc_Am_in=nt_bgc_Am, nt_bgc_Sil_in=nt_bgc_Sil, & - nt_bgc_DMS_in=nt_bgc_DMS, nt_bgc_PON_in=nt_bgc_PON, & + nt_bgc_DMS_in=nt_bgc_DMS, nt_bgc_PON_in=nt_bgc_PON, nt_bgc_S_in=nt_bgc_S, & nt_bgc_N_in=nt_bgc_N, nt_bgc_chl_in=nt_bgc_chl, & nt_bgc_DOC_in=nt_bgc_DOC, nt_bgc_DON_in=nt_bgc_DON, nt_bgc_DIC_in=nt_bgc_DIC, & nt_zaero_in=nt_zaero, nt_bgc_DMSPp_in=nt_bgc_DMSPp, nt_bgc_DMSPd_in=nt_bgc_DMSPd, & @@ -2237,12 +2280,12 @@ subroutine init_zbgc use ice_state, only: trcr_base, trcr_depend, n_trcr_strata, & nt_strata - use ice_arrays_column, only: R_C2N, R_chl2N, R_C2N_DON, R_Si2N, trcrn_sw + use ice_arrays_column, only: R_C2N, R_chl2N, R_C2N_DON, R_Si2N integer (kind=int_kind) :: & nbtrcr, nbtrcr_sw, nt_fbri, & nt_bgc_Nit, nt_bgc_Am, nt_bgc_Sil, & - nt_bgc_DMS, nt_bgc_PON, & + nt_bgc_DMS, nt_bgc_PON, nt_bgc_S, & nt_bgc_DMSPp, nt_bgc_DMSPd, & nt_zbgc_frac, nlt_chl_sw, & nlt_bgc_Nit, nlt_bgc_Am, nlt_bgc_Sil, & @@ -2323,7 +2366,7 @@ subroutine init_zbgc tau_rel ! release timescale (s), stationary to mobile phase logical (kind=log_kind) :: & - skl_bgc, z_tracers, dEdd_algae + skl_bgc, z_tracers, dEdd_algae, solve_zsal real (kind=dbl_kind), dimension(icepack_max_algae) :: & F_abs_chl ! to scale absorption in Dedd @@ -2393,10 +2436,12 @@ subroutine init_zbgc !----------------------------------------------------------------- call icepack_query_parameters( & + solve_zsal_out=solve_zsal, & skl_bgc_out=skl_bgc, z_tracers_out=z_tracers, & dEdd_algae_out=dEdd_algae, & grid_o_out=grid_o, l_sk_out=l_sk, & initbio_frac_out=initbio_frac, & + grid_oS_out=grid_oS, l_skS_out=l_skS, & phi_snow_out=phi_snow, frazil_scav_out = frazil_scav) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & @@ -2422,7 +2467,7 @@ subroutine init_zbgc call icepack_query_tracer_indices( & nt_fbri_out=nt_fbri, & nt_bgc_Nit_out=nt_bgc_Nit, nt_bgc_Am_out=nt_bgc_Am, nt_bgc_Sil_out=nt_bgc_Sil, & - nt_bgc_DMS_out=nt_bgc_DMS, nt_bgc_PON_out=nt_bgc_PON, & + nt_bgc_DMS_out=nt_bgc_DMS, nt_bgc_PON_out=nt_bgc_PON, nt_bgc_S_out=nt_bgc_S, & nt_bgc_N_out=nt_bgc_N, nt_bgc_chl_out=nt_bgc_chl, & nt_bgc_DOC_out=nt_bgc_DOC, nt_bgc_DON_out=nt_bgc_DON, nt_bgc_DIC_out=nt_bgc_DIC, & nt_zaero_out=nt_zaero, nt_bgc_DMSPp_out=nt_bgc_DMSPp, nt_bgc_DMSPd_out=nt_bgc_DMSPd, & @@ -2606,6 +2651,18 @@ subroutine init_zbgc ntd = 0 ! if nt_fbri /= 0 then use fbri dependency if (nt_fbri == 0) ntd = -1 ! otherwise make tracers depend on ice volume + if (solve_zsal) then ! .true. only if tr_brine = .true. + do k = 1,nblyr + trcr_depend(nt_bgc_S + k - 1) = 2 + nt_fbri + ntd + trcr_base (nt_bgc_S,1) = c0 ! default: ice area + trcr_base (nt_bgc_S,2) = c1 + trcr_base (nt_bgc_S,3) = c0 + n_trcr_strata(nt_bgc_S) = 1 + nt_strata(nt_bgc_S,1) = nt_fbri + nt_strata(nt_bgc_S,2) = 0 + enddo + endif + bio_index(:) = 0 bio_index_o(:) = 0 @@ -2621,7 +2678,7 @@ subroutine init_zbgc if (skl_bgc .or. z_tracers) then if (tr_bgc_N) then - do mm = 1, n_algae + do mm = 1, n_algae call init_bgc_trcr(nk, nt_fbri, & nt_bgc_N(mm), nlt_bgc_N(mm), & algaltype(mm), nt_depend, & @@ -2641,14 +2698,14 @@ subroutine init_zbgc nt_strata, bio_index) bio_index_o(nlt_bgc_Nit) = icepack_max_algae + 1 endif ! tr_bgc_Nit - + if (tr_bgc_C) then ! ! Algal C is not yet distinct from algal N ! * Reqires exudation and/or changing C:N ratios ! for implementation ! - ! do mm = 1,n_algae + ! do mm = 1,n_algae ! call init_bgc_trcr(nk, nt_fbri, & ! nt_bgc_C(mm), nlt_bgc_C(mm), & ! algaltype(mm), nt_depend, & @@ -2698,7 +2755,7 @@ subroutine init_zbgc trcr_base, n_trcr_strata, & nt_strata, bio_index) bio_index_o(nlt_bgc_Am) = 2*icepack_max_algae + icepack_max_doc + icepack_max_dic + 2 - endif + endif if (tr_bgc_Sil) then call init_bgc_trcr(nk, nt_fbri, & nt_bgc_Sil, nlt_bgc_Sil, & @@ -2707,7 +2764,7 @@ subroutine init_zbgc trcr_base, n_trcr_strata, & nt_strata, bio_index) bio_index_o(nlt_bgc_Sil) = 2*icepack_max_algae + icepack_max_doc + icepack_max_dic + 3 - endif + endif if (tr_bgc_DMS) then ! all together call init_bgc_trcr(nk, nt_fbri, & nt_bgc_DMSPp, nlt_bgc_DMSPp, & @@ -2732,7 +2789,7 @@ subroutine init_zbgc trcr_base, n_trcr_strata, & nt_strata, bio_index) bio_index_o(nlt_bgc_DMS) = 2*icepack_max_algae + icepack_max_doc + icepack_max_dic + 6 - endif + endif if (tr_bgc_PON) then call init_bgc_trcr(nk, nt_fbri, & nt_bgc_PON, nlt_bgc_PON, & @@ -2774,8 +2831,8 @@ subroutine init_zbgc bio_index_o(nlt_bgc_Fep(mm)) = 2*icepack_max_algae + icepack_max_doc + icepack_max_dic & + icepack_max_don + icepack_max_fe + 7 + mm enddo ! mm - endif ! tr_bgc_Fe - + endif ! tr_bgc_Fe + if (tr_bgc_hum) then call init_bgc_trcr(nk, nt_fbri, & nt_bgc_hum, nlt_bgc_hum, & @@ -2784,7 +2841,7 @@ subroutine init_zbgc trcr_base, n_trcr_strata, & nt_strata, bio_index) bio_index_o(nlt_bgc_hum) = 2*icepack_max_algae + icepack_max_doc + 8 + icepack_max_dic & - + icepack_max_don + 2*icepack_max_fe + icepack_max_aero + + icepack_max_don + 2*icepack_max_fe + icepack_max_aero endif endif ! skl_bgc or z_tracers @@ -2808,7 +2865,7 @@ subroutine init_zbgc ! and 2 snow layers (snow surface + interior) nk = nblyr + 1 - nt_depend = 2 + nt_fbri + ntd + nt_depend = 2 + nt_fbri + ntd ! z layer aerosols if (tr_zaero) then @@ -2829,15 +2886,15 @@ subroutine init_zbgc endif ! tr_zaero if (nbtrcr > 0) then - do k = 1,nbtrcr - zbgc_frac_init(k) = c1 - trcr_depend(nt_zbgc_frac+k-1) = 2+nt_fbri + do k = 1,nbtrcr + zbgc_frac_init(k) = c1 + trcr_depend(nt_zbgc_frac+k-1) = 2+nt_fbri trcr_base(nt_zbgc_frac+ k - 1,1) = c0 trcr_base(nt_zbgc_frac+ k - 1,2) = c1 trcr_base(nt_zbgc_frac+ k - 1,3) = c0 - n_trcr_strata(nt_zbgc_frac+ k - 1)= 1 + n_trcr_strata(nt_zbgc_frac+ k - 1)= 1 nt_strata(nt_zbgc_frac+ k - 1,1) = nt_fbri - nt_strata(nt_zbgc_frac+ k - 1,2) = 0 + nt_strata(nt_zbgc_frac+ k - 1,2) = 0 tau_ret(k) = c1 tau_rel(k) = c1 if (bgc_tracer_type(k) >= c0 .and. bgc_tracer_type(k) < p5) then @@ -2865,7 +2922,7 @@ subroutine init_zbgc do k = 1, nbtrcr zbgc_init_frac(k) = frazil_scav if (bgc_tracer_type(k) < c0) zbgc_init_frac(k) = initbio_frac - enddo + enddo !----------------------------------------------------------------- ! set values in icepack @@ -2882,7 +2939,7 @@ subroutine init_zbgc !----------------------------------------------------------------- ! final consistency checks - !----------------------------------------------------------------- + !----------------------------------------------------------------- if (nbtrcr > icepack_max_nbtrcr) then write (nu_diag,*) subname,' ' write (nu_diag,*) subname,'nbtrcr > icepack_max_nbtrcr' @@ -2891,10 +2948,6 @@ subroutine init_zbgc endif if (.NOT. dEdd_algae) nbtrcr_sw = 1 - ! tcraig, added 6/1/21, why is nbtrcr_sw set here? - call icepack_init_tracer_sizes(nbtrcr_sw_in=nbtrcr_sw) - allocate(trcrn_sw(nx_block,ny_block,nbtrcr_sw,ncat,max_blocks)) ! bgc tracers active in the delta-Eddington shortwave - !----------------------------------------------------------------- ! spew !----------------------------------------------------------------- @@ -2903,13 +2956,13 @@ subroutine init_zbgc write(nu_diag,1020) ' number of bio tracers = ', nbtrcr write(nu_diag,1020) ' number of Isw tracers = ', nbtrcr_sw - + elseif (z_tracers) then - + write(nu_diag,1020) ' number of ztracers = ', nbtrcr write(nu_diag,1020) ' number of Isw tracers = ', nbtrcr_sw write(nu_diag,1000) ' initbio_frac = ', initbio_frac - write(nu_diag,1000) ' frazil_scav = ', frazil_scav + write(nu_diag,1000) ' frazil_scav = ', frazil_scav endif ! skl_bgc or solve_bgc endif ! master_task @@ -2958,7 +3011,7 @@ subroutine init_bgc_trcr(nk, nt_fbri, & integer (kind=int_kind) :: & k , & ! loop index n_strata , & ! temporary values - nt_strata1, & ! + nt_strata1, & ! nt_strata2 real (kind=dbl_kind) :: & @@ -2971,7 +3024,7 @@ subroutine init_bgc_trcr(nk, nt_fbri, & !-------- bgc_tracer_type(nlt_bgc) = bgctype - + if (nk > 1) then ! include vertical bgc in snow do k = nk, nk+1 trcr_depend (nt_bgc + k ) = 2 ! snow volume @@ -2983,10 +3036,10 @@ subroutine init_bgc_trcr(nk, nt_fbri, & nt_strata (nt_bgc + k,2) = 0 enddo - trcr_base1 = c0 - trcr_base2 = c1 + trcr_base1 = c0 + trcr_base2 = c1 trcr_base3 = c0 - n_strata = 1 + n_strata = 1 nt_strata1 = nt_fbri nt_strata2 = 0 else ! nk = 1 diff --git a/cicecore/shared/ice_restart_column.F90 b/cicecore/shared/ice_restart_column.F90 index 2c5b18c36..e819b1098 100644 --- a/cicecore/shared/ice_restart_column.F90 +++ b/cicecore/shared/ice_restart_column.F90 @@ -12,7 +12,7 @@ module ice_restart_column use ice_communicate, only: my_task, master_task use ice_constants, only: c0, c1, p5 use ice_constants, only: field_loc_center, field_type_scalar - use ice_domain_size, only: ncat, nslyr, nfsd, nblyr + use ice_domain_size, only: ncat, nfsd, nblyr use ice_restart,only: read_restart_field, write_restart_field use ice_exit, only: abort_ice use ice_fileunits, only: nu_diag @@ -29,25 +29,26 @@ module ice_restart_column public :: write_restart_age, read_restart_age, & write_restart_FY, read_restart_FY, & write_restart_lvl, read_restart_lvl, & + write_restart_pond_cesm, read_restart_pond_cesm, & write_restart_pond_lvl, read_restart_pond_lvl, & write_restart_pond_topo, read_restart_pond_topo, & - write_restart_snow, read_restart_snow, & write_restart_fsd, read_restart_fsd, & write_restart_iso, read_restart_iso, & write_restart_aero, read_restart_aero, & write_restart_bgc, read_restart_bgc, & write_restart_hbrine, read_restart_hbrine - logical (kind=log_kind), public :: & + logical (kind=log_kind), public :: & restart_age , & ! if .true., read age tracer restart file restart_FY , & ! if .true., read FY tracer restart file restart_lvl , & ! if .true., read lvl tracer restart file + restart_pond_cesm, & ! if .true., read meltponds restart file restart_pond_lvl , & ! if .true., read meltponds restart file restart_pond_topo, & ! if .true., read meltponds restart file - restart_snow , & ! if .true., read snow tracer restart file restart_fsd , & ! if .true., read floe size restart file restart_iso , & ! if .true., read isotope tracer restart file restart_aero , & ! if .true., read aerosol tracer restart file + restart_zsal , & ! if .true., read Salinity from restart file restart_hbrine , & ! if .true., read hbrine from restart file restart_bgc ! if .true., read bgc restart file @@ -253,6 +254,73 @@ subroutine read_restart_lvl() end subroutine read_restart_lvl +!======================================================================= +! +! Dumps all values needed for restarting +! +! authors Elizabeth C. Hunke, LANL +! David A. Bailey, NCAR + + subroutine write_restart_pond_cesm() + + use ice_fileunits, only: nu_dump_pond + use ice_state, only: trcrn + + ! local variables + + logical (kind=log_kind) :: diag + integer (kind=int_kind) :: nt_apnd, nt_hpnd + character(len=*),parameter :: subname='(write_restart_pond_cesm)' + + call icepack_query_tracer_indices(nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + diag = .true. + + call write_restart_field(nu_dump_pond,0,trcrn(:,:,nt_apnd,:,:),'ruf8', & + 'apnd',ncat,diag) + call write_restart_field(nu_dump_pond,0,trcrn(:,:,nt_hpnd,:,:),'ruf8', & + 'hpnd',ncat,diag) + + end subroutine write_restart_pond_cesm + +!======================================================================= + +! Reads all values needed for a meltpond volume restart +! +! authors Elizabeth C. Hunke, LANL +! David A. Bailey, NCAR + + subroutine read_restart_pond_cesm() + + use ice_fileunits, only: nu_restart_pond + use ice_state, only: trcrn + + ! local variables + + logical (kind=log_kind) :: & + diag + integer (kind=int_kind) :: nt_apnd, nt_hpnd + character(len=*),parameter :: subname='(read_restart_pond_cesm)' + + call icepack_query_tracer_indices(nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + diag = .true. + + if (my_task == master_task) write(nu_diag,*) subname,'min/max cesm ponds' + + call read_restart_field(nu_restart_pond,0,trcrn(:,:,nt_apnd,:,:),'ruf8', & + 'apnd',ncat,diag,field_loc_center,field_type_scalar) + call read_restart_field(nu_restart_pond,0,trcrn(:,:,nt_hpnd,:,:),'ruf8', & + 'hpnd',ncat,diag,field_loc_center,field_type_scalar) + + end subroutine read_restart_pond_cesm + !======================================================================= ! ! Dumps all values needed for restarting @@ -304,7 +372,7 @@ end subroutine write_restart_pond_lvl subroutine read_restart_pond_lvl() use ice_arrays_column, only: dhsn, ffracn - use ice_fileunits, only: nu_restart_pond + use ice_fileunits, only: nu_restart_pond use ice_flux, only: fsnow use ice_state, only: trcrn @@ -384,7 +452,7 @@ end subroutine write_restart_pond_topo subroutine read_restart_pond_topo() - use ice_fileunits, only: nu_restart_pond + use ice_fileunits, only: nu_restart_pond use ice_state, only: trcrn ! local variables @@ -415,93 +483,6 @@ end subroutine read_restart_pond_topo !======================================================================= -! Dumps all values needed for restarting snow redistribution/metamorphism -! author Elizabeth C. Hunke, LANL - - subroutine write_restart_snow() - - use ice_fileunits, only: nu_dump_snow - use ice_state, only: trcrn - - ! local variables - - logical (kind=log_kind) :: diag - integer (kind=int_kind) :: nt_smice, nt_smliq, nt_rhos, nt_rsnw, k - character(len=3) :: ck - character(len=*),parameter :: subname='(write_restart_snow)' - - call icepack_query_tracer_indices(nt_smice_out=nt_smice, & - nt_smliq_out=nt_smliq, nt_rhos_out=nt_rhos, nt_rsnw_out=nt_rsnw) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - diag = .true. - - !----------------------------------------------------------------- - - do k = 1,nslyr - write(ck,'(i3.3)') k - call write_restart_field(nu_dump_snow,0, trcrn(:,:,nt_smice+k-1,:,:), & - 'ruf8','smice'//trim(ck),ncat,diag) - call write_restart_field(nu_dump_snow,0, trcrn(:,:,nt_smliq+k-1,:,:), & - 'ruf8','smliq'//trim(ck),ncat,diag) - call write_restart_field(nu_dump_snow,0, trcrn(:,:,nt_rhos+k-1,:,:), & - 'ruf8','rhos'//trim(ck),ncat,diag) - call write_restart_field(nu_dump_snow,0, trcrn(:,:,nt_rsnw+k-1,:,:), & - 'ruf8','rsnw'//trim(ck),ncat,diag) - enddo - - end subroutine write_restart_snow - -!======================================================================= - -! Reads all values needed for a restart with snow redistribution/metamorphism -! author Elizabeth C. Hunke, LANL - - subroutine read_restart_snow() - - use ice_fileunits, only: nu_restart_snow - use ice_state, only: trcrn - - ! local variables - - logical (kind=log_kind) :: & - diag - integer (kind=int_kind) :: nt_smice, nt_smliq, nt_rhos, nt_rsnw, k - character(len=3) :: ck - character(len=*),parameter :: subname='(read_restart_snow)' - - call icepack_query_tracer_indices(nt_smice_out=nt_smice, & - nt_smliq_out=nt_smliq, nt_rhos_out=nt_rhos, nt_rsnw_out=nt_rsnw) - call icepack_warnings_flush(nu_diag) - if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & - file=__FILE__, line=__LINE__) - - diag = .true. - - if (my_task == master_task) write(nu_diag,*) subname,'min/max snow tracers' - - do k=1,nslyr - write(ck,'(i3.3)') k - call read_restart_field(nu_restart_snow,0,trcrn(:,:,nt_smice+k-1,:,:), & - 'ruf8','smice'//trim(ck),ncat,diag, & - field_type=field_type_scalar,field_loc=field_loc_center) - call read_restart_field(nu_restart_snow,0,trcrn(:,:,nt_smliq+k-1,:,:), & - 'ruf8','smliq'//trim(ck),ncat,diag, & - field_type=field_type_scalar,field_loc=field_loc_center) - call read_restart_field(nu_restart_snow,0,trcrn(:,:,nt_rhos+k-1,:,:), & - 'ruf8','rhos'//trim(ck),ncat,diag, & - field_type=field_type_scalar,field_loc=field_loc_center) - call read_restart_field(nu_restart_snow,0,trcrn(:,:,nt_rsnw+k-1,:,:), & - 'ruf8','rsnw'//trim(ck),ncat,diag, & - field_type=field_type_scalar,field_loc=field_loc_center) - enddo - - end subroutine read_restart_snow - -!======================================================================= - ! Dumps all values needed for restarting ! author Elizabeth C. Hunke, LANL @@ -514,7 +495,7 @@ subroutine write_restart_fsd() logical (kind=log_kind) :: diag integer (kind=int_kind) :: nt_fsd, k - character(len=3) :: ck + character*3 ck character(len=*),parameter :: subname='(write_restart_fsd)' call icepack_query_tracer_indices(nt_fsd_out=nt_fsd) @@ -549,7 +530,7 @@ subroutine read_restart_fsd() logical (kind=log_kind) :: & diag integer (kind=int_kind) :: nt_fsd, k - character(len=3) :: ck + character*3 ck character(len=*),parameter :: subname='(read_restart_fsd)' call icepack_query_tracer_indices(nt_fsd_out=nt_fsd) @@ -585,7 +566,7 @@ subroutine write_restart_iso() logical (kind=log_kind) :: diag integer (kind=int_kind) :: nt_isosno, nt_isoice, k - character(len=3) :: ck + character*3 ck character(len=*),parameter :: subname='(write_restart_iso)' call icepack_query_tracer_indices(nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) @@ -627,7 +608,7 @@ subroutine read_restart_iso() logical (kind=log_kind) :: & diag integer (kind=int_kind) :: nt_isosno, nt_isoice, k - character(len=3) :: ck + character*3 ck character(len=*),parameter :: subname='(read_restart_iso)' call icepack_query_tracer_indices(nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) @@ -810,14 +791,14 @@ subroutine read_restart_hbrine() !$OMP PARALLEL DO PRIVATE(iblk,i,j,n,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi do j = jlo, jhi - do i = ilo, ihi + do i = ilo, ihi do n = 1, ncat if (first_ice_real(i,j,n,iblk) >= p5) then first_ice (i,j,n,iblk) = .true. @@ -825,7 +806,7 @@ subroutine read_restart_hbrine() first_ice (i,j,n,iblk) = .false. endif enddo ! ncat - enddo ! i + enddo ! i enddo ! j enddo ! iblk !$OMP END PARALLEL DO @@ -843,7 +824,6 @@ subroutine write_restart_hbrine() use ice_blocks, only: block, get_block use ice_domain, only: nblocks, blocks_ice use ice_fileunits, only: nu_dump_hbrine - use ice_grid, only: tmask use ice_state, only: trcrn use ice_restart,only: write_restart_field @@ -871,17 +851,16 @@ subroutine write_restart_hbrine() !$OMP PARALLEL DO PRIVATE(iblk,i,j,n,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi do j = jlo, jhi - do i = ilo, ihi + do i = ilo, ihi do n = 1, ncat - ! zero out first_ice over land - if (tmask(i,j,iblk) .and. first_ice (i,j,n,iblk)) then + if (first_ice (i,j,n,iblk)) then first_ice_real(i,j,n,iblk) = c1 else first_ice_real(i,j,n,iblk) = c0 @@ -907,6 +886,7 @@ end subroutine write_restart_hbrine subroutine write_restart_bgc() + use ice_arrays_column, only: Rayleigh_criteria, Rayleigh_real use ice_blocks, only: block, get_block use ice_domain, only: nblocks, blocks_ice use ice_domain_size, only: ncat, n_algae, n_doc, n_dic, & @@ -914,9 +894,8 @@ subroutine write_restart_bgc() use ice_fileunits, only: nu_dump_bgc use ice_flux_bgc, only: nit, amm, sil, dmsp, dms, algalN, & doc, don, dic, fed, fep, zaeros, hum - use ice_grid, only: tmask use ice_state, only: trcrn - use ice_flux, only: sss + use ice_flux, only: sss use ice_restart, only: write_restart_field ! local variables @@ -930,45 +909,45 @@ subroutine write_restart_bgc() character (len=3) :: nchar, ncharb - integer (kind=int_kind) :: nt_bgc_Am, & + integer (kind=int_kind) :: nt_bgc_S, nt_bgc_Am, & nt_bgc_DMS, nt_bgc_DMSPd, & nt_bgc_DMSPp, nt_bgc_Nit, nt_bgc_Sil, & nt_bgc_PON, nt_zbgc_frac, nt_bgc_hum, nbtrcr - integer (kind=int_kind), dimension(icepack_max_algae) :: & - nt_bgc_N , & ! diatoms, phaeocystis, pico/small - nt_bgc_C , & ! diatoms, phaeocystis, pico/small - nt_bgc_chl ! diatoms, phaeocystis, pico/small + integer (kind=int_kind), dimension(icepack_max_algae) :: & + nt_bgc_N , & ! diatoms, phaeocystis, pico/small + nt_bgc_C , & ! diatoms, phaeocystis, pico/small + nt_bgc_chl ! diatoms, phaeocystis, pico/small - integer (kind=int_kind), dimension(icepack_max_doc) :: & + integer (kind=int_kind), dimension(icepack_max_doc) :: & nt_bgc_DOC ! dissolved organic carbon - integer (kind=int_kind), dimension(icepack_max_don) :: & + integer (kind=int_kind), dimension(icepack_max_don) :: & nt_bgc_DON ! dissolved organic nitrogen - integer (kind=int_kind), dimension(icepack_max_dic) :: & + integer (kind=int_kind), dimension(icepack_max_dic) :: & nt_bgc_DIC ! dissolved inorganic carbon - integer (kind=int_kind), dimension(icepack_max_fe) :: & + integer (kind=int_kind), dimension(icepack_max_fe) :: & nt_bgc_Fed, & ! dissolved iron nt_bgc_Fep ! particulate iron - integer (kind=int_kind), dimension(icepack_max_aero) :: & + integer (kind=int_kind), dimension(icepack_max_aero) :: & nt_zaero ! black carbon and other aerosols - + logical (kind=log_kind) :: tr_bgc_Nit, tr_bgc_Am, tr_bgc_Sil,& tr_bgc_DMS, tr_bgc_PON, tr_bgc_N, tr_bgc_C, & tr_bgc_DON, tr_bgc_Fe, tr_zaero , tr_bgc_chl, & tr_bgc_hum - logical (kind=log_kind) :: skl_bgc + logical (kind=log_kind) :: skl_bgc, solve_zsal type (block) :: & this_block ! block information for current block character(len=*),parameter :: subname='(write_restart_bgc)' - call icepack_query_parameters(skl_bgc_out=skl_bgc) + call icepack_query_parameters(skl_bgc_out=skl_bgc, solve_zsal_out=solve_zsal) call icepack_query_tracer_sizes(nbtrcr_out=nbtrcr) call icepack_query_tracer_flags(tr_bgc_Nit_out=tr_bgc_Nit, & tr_bgc_Am_out=tr_bgc_Am, tr_bgc_Sil_out=tr_bgc_Sil, & @@ -976,7 +955,7 @@ subroutine write_restart_bgc() tr_bgc_N_out=tr_bgc_N, tr_bgc_C_out=tr_bgc_C, & tr_bgc_DON_out=tr_bgc_DON, tr_bgc_Fe_out=tr_bgc_Fe, tr_zaero_out=tr_zaero, & tr_bgc_chl_out=tr_bgc_chl, tr_bgc_hum_out=tr_bgc_hum) - call icepack_query_tracer_indices(nt_bgc_Am_out=nt_bgc_Am, & + call icepack_query_tracer_indices(nt_bgc_S_out=nt_bgc_S, nt_bgc_Am_out=nt_bgc_Am, & nt_bgc_DMS_out=nt_bgc_DMS, nt_bgc_DMSPd_out=nt_bgc_DMSPd, & nt_bgc_C_out=nt_bgc_C, nt_bgc_chl_out=nt_bgc_chl, & nt_bgc_DMSPp_out=nt_bgc_DMSPp, nt_bgc_Nit_out=nt_bgc_Nit, & @@ -991,37 +970,41 @@ subroutine write_restart_bgc() diag = .true. !----------------------------------------------------------------- - ! Zero out tracers over land + ! Salinity and extras !----------------------------------------------------------------- + if (solve_zsal) then + + do k = 1,nblyr + write(nchar,'(i3.3)') k + call write_restart_field(nu_dump_bgc,0,trcrn(:,:,nt_bgc_S+k-1,:,:),'ruf8', & + 'zSalinity'//trim(nchar),ncat,diag) + enddo + + call write_restart_field(nu_dump_bgc,0,sss,'ruf8','sss',1,diag) !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) + this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi do j = jlo, jhi do i = ilo, ihi - if (.not. tmask(i,j,iblk)) then - if (tr_bgc_N ) algalN(i,j,:,iblk) = c0 - if (tr_bgc_C ) doc (i,j,:,iblk) = c0 - if (tr_bgc_C ) dic (i,j,:,iblk) = c0 - if (tr_bgc_Nit) nit (i,j ,iblk) = c0 - if (tr_bgc_Am ) amm (i,j ,iblk) = c0 - if (tr_bgc_Sil) sil (i,j ,iblk) = c0 - if (tr_bgc_hum) hum (i,j ,iblk) = c0 - if (tr_bgc_DMS) dms (i,j ,iblk) = c0 - if (tr_bgc_DMS) dmsp (i,j ,iblk) = c0 - if (tr_bgc_DON) don (i,j,:,iblk) = c0 - if (tr_bgc_Fe ) fed (i,j,:,iblk) = c0 - if (tr_bgc_Fe ) fep (i,j,:,iblk) = c0 + if (Rayleigh_criteria(i,j,iblk)) then + Rayleigh_real (i,j,iblk) = c1 + elseif (.NOT. Rayleigh_criteria(i,j,iblk)) then + Rayleigh_real (i,j,iblk) = c0 endif enddo enddo enddo !$OMP END PARALLEL DO + call write_restart_field(nu_dump_bgc,0,Rayleigh_real,'ruf8','Rayleigh',1,diag) + + endif ! solve_zsal + !----------------------------------------------------------------- ! Skeletal layer BGC !----------------------------------------------------------------- @@ -1075,7 +1058,7 @@ subroutine write_restart_bgc() if (tr_bgc_PON) & call write_restart_field(nu_dump_bgc,0,trcrn(:,:,nt_bgc_PON,:,:), & 'ruf8','bgc_PON',ncat,diag) - + if (tr_bgc_DON) then do k = 1, n_don write(nchar,'(i3.3)') k @@ -1084,19 +1067,19 @@ subroutine write_restart_bgc() enddo endif if (tr_bgc_Fe ) then - do k = 1, n_fed + do k = 1, n_fed write(nchar,'(i3.3)') k call write_restart_field(nu_dump_bgc,0,trcrn(:,:,nt_bgc_Fed (k),:,:), & 'ruf8','bgc_Fed'//trim(nchar),ncat,diag) enddo - do k = 1, n_fep + do k = 1, n_fep write(nchar,'(i3.3)') k call write_restart_field(nu_dump_bgc,0,trcrn(:,:,nt_bgc_Fep (k),:,:), & 'ruf8','bgc_Fep'//trim(nchar),ncat,diag) enddo endif - else + else !----------------------------------------------------------------- ! Z layer BGC @@ -1267,7 +1250,7 @@ subroutine write_restart_bgc() write(nchar,'(i3.3)') k call write_restart_field(nu_dump_bgc,0,dic(:,:,k,:),'ruf8','dic'//trim(nchar),1,diag) enddo !k - endif + endif if (tr_bgc_Nit) & call write_restart_field(nu_dump_bgc,0,nit, 'ruf8','nit', 1,diag) if (tr_bgc_Am) & @@ -1313,13 +1296,14 @@ end subroutine write_restart_bgc subroutine read_restart_bgc() + use ice_arrays_column, only: Rayleigh_real, Rayleigh_criteria use ice_blocks, only: block, get_block use ice_communicate, only: my_task, master_task use ice_domain, only: nblocks, blocks_ice use ice_domain_size, only: ncat, n_algae, n_doc, n_dic,& n_don, n_zaero, n_fed, n_fep use ice_fileunits, only: nu_restart_bgc - use ice_flux, only: sss + use ice_flux, only: sss use ice_flux_bgc, only: nit, amm, sil, dmsp, dms, algalN, & doc, don, dic, fed, fep, zaeros, hum use ice_state, only: trcrn @@ -1337,44 +1321,44 @@ subroutine read_restart_bgc() logical (kind=log_kind) :: diag - integer (kind=int_kind) :: nt_bgc_Am, & + integer (kind=int_kind) :: nt_bgc_S, nt_bgc_Am, & nt_bgc_DMS, nt_bgc_DMSPd, & nt_bgc_DMSPp, nt_bgc_Nit, nt_bgc_Sil, & nt_bgc_PON, nt_zbgc_frac, nt_bgc_hum, nbtrcr - integer (kind=int_kind), dimension(icepack_max_algae) :: & - nt_bgc_N , & ! diatoms, phaeocystis, pico/small - nt_bgc_C , & ! diatoms, phaeocystis, pico/small - nt_bgc_chl ! diatoms, phaeocystis, pico/small + integer (kind=int_kind), dimension(icepack_max_algae) :: & + nt_bgc_N , & ! diatoms, phaeocystis, pico/small + nt_bgc_C , & ! diatoms, phaeocystis, pico/small + nt_bgc_chl ! diatoms, phaeocystis, pico/small - integer (kind=int_kind), dimension(icepack_max_doc) :: & + integer (kind=int_kind), dimension(icepack_max_doc) :: & nt_bgc_DOC ! dissolved organic carbon - integer (kind=int_kind), dimension(icepack_max_don) :: & + integer (kind=int_kind), dimension(icepack_max_don) :: & nt_bgc_DON ! dissolved organic nitrogen - integer (kind=int_kind), dimension(icepack_max_dic) :: & + integer (kind=int_kind), dimension(icepack_max_dic) :: & nt_bgc_DIC ! dissolved inorganic carbon - integer (kind=int_kind), dimension(icepack_max_fe) :: & + integer (kind=int_kind), dimension(icepack_max_fe) :: & nt_bgc_Fed, & ! dissolved iron nt_bgc_Fep ! particulate iron - integer (kind=int_kind), dimension(icepack_max_aero) :: & + integer (kind=int_kind), dimension(icepack_max_aero) :: & nt_zaero ! black carbon and other aerosols - + logical (kind=log_kind) :: tr_bgc_Nit, tr_bgc_Am, tr_bgc_Sil,& tr_bgc_DMS, tr_bgc_PON, tr_bgc_N, tr_bgc_C, & tr_bgc_DON, tr_bgc_Fe, tr_zaero , tr_bgc_chl, & tr_bgc_hum - logical (kind=log_kind) :: skl_bgc + logical (kind=log_kind) :: skl_bgc, solve_zsal character (len=3) :: nchar, ncharb character(len=*),parameter :: subname='(read_restart_bgc)' - call icepack_query_parameters(skl_bgc_out=skl_bgc) + call icepack_query_parameters(skl_bgc_out=skl_bgc, solve_zsal_out=solve_zsal) call icepack_query_tracer_sizes(nbtrcr_out=nbtrcr) call icepack_query_tracer_flags(tr_bgc_Nit_out=tr_bgc_Nit, & tr_bgc_Am_out=tr_bgc_Am, tr_bgc_Sil_out=tr_bgc_Sil, & @@ -1382,7 +1366,7 @@ subroutine read_restart_bgc() tr_bgc_N_out=tr_bgc_N, tr_bgc_C_out=tr_bgc_C, & tr_bgc_DON_out=tr_bgc_DON, tr_bgc_Fe_out=tr_bgc_Fe, tr_zaero_out=tr_zaero, & tr_bgc_chl_out=tr_bgc_chl, tr_bgc_hum_out=tr_bgc_hum) - call icepack_query_tracer_indices(nt_bgc_Am_out=nt_bgc_Am, & + call icepack_query_tracer_indices(nt_bgc_S_out=nt_bgc_S, nt_bgc_Am_out=nt_bgc_Am, & nt_bgc_DMS_out=nt_bgc_DMS, nt_bgc_DMSPd_out=nt_bgc_DMSPd, & nt_bgc_C_out=nt_bgc_C, nt_bgc_chl_out=nt_bgc_chl, & nt_bgc_DMSPp_out=nt_bgc_DMSPp, nt_bgc_Nit_out=nt_bgc_Nit, & @@ -1396,6 +1380,44 @@ subroutine read_restart_bgc() diag = .true. + !----------------------------------------------------------------- + ! Salinity and extras + !----------------------------------------------------------------- + + if (restart_zsal) then + + if (my_task == master_task) write(nu_diag,*) subname,'zSalinity restart' + do k = 1,nblyr + write(nchar,'(i3.3)') k + call read_restart_field(nu_restart_bgc,0,trcrn(:,:,nt_bgc_S+k-1,:,:),'ruf8', & + 'zSalinity'//trim(nchar),ncat,diag,field_loc_center,field_type_scalar) + enddo + + if (my_task == master_task) write(nu_diag,*) subname,'sea surface salinity' + call read_restart_field(nu_restart_bgc,0,sss,'ruf8','sss',1,diag) + call read_restart_field(nu_restart_bgc,0,Rayleigh_real,'ruf8','Rayleigh',1,diag) + + !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + if (Rayleigh_real (i,j,iblk) .GE. c1) then + Rayleigh_criteria (i,j,iblk) = .true. + elseif (Rayleigh_real (i,j,iblk) < c1) then + Rayleigh_criteria (i,j,iblk) = .false. + endif + enddo + enddo + enddo ! iblk + !$OMP END PARALLEL DO + endif ! restart_zsal + !----------------------------------------------------------------- ! Skeletal Layer BGC !----------------------------------------------------------------- @@ -1466,13 +1488,13 @@ subroutine read_restart_bgc() enddo endif if (tr_bgc_Fe) then - do k = 1, n_fed + do k = 1, n_fed write(nchar,'(i3.3)') k call read_restart_field(nu_restart_bgc,0, & trcrn(:,:,nt_bgc_Fed (k),:,:), & 'ruf8','bgc_Fed'//trim(nchar),ncat,diag) enddo - do k = 1, n_fep + do k = 1, n_fep write(nchar,'(i3.3)') k call read_restart_field(nu_restart_bgc,0, & trcrn(:,:,nt_bgc_Fep (k),:,:), & @@ -1710,7 +1732,7 @@ subroutine read_restart_bgc() enddo !k endif endif ! restart_bgc - + end subroutine read_restart_bgc !======================================================================= diff --git a/cicecore/version.txt b/cicecore/version.txt index c908e44d9..83a606cb9 100644 --- a/cicecore/version.txt +++ b/cicecore/version.txt @@ -1 +1 @@ -CICE 6.5.0 +CICE 6.1.3 diff --git a/configuration/scripts/Makefile b/configuration/scripts/Makefile index d109472b0..7b39d5c8d 100644 --- a/configuration/scripts/Makefile +++ b/configuration/scripts/Makefile @@ -73,8 +73,9 @@ RM := rm AR := ar .SUFFIXES: +.SUFFIXES: .F90 .F .c .o -.PHONY: all cice libcice targets target db_files db_flags clean realclean helloworld calchk sumchk bcstchk gridavgchk halochk optargs opticep +.PHONY: all cice libcice targets target db_files db_flags clean realclean all: $(EXEC) cice: $(EXEC) @@ -91,9 +92,7 @@ cice: $(EXEC) targets: @echo " " - @echo "Supported Makefile Targets are: cice, libcice, makdep, depends, clean, realclean" - @echo " Diagnostics: targets, db_files, db_flags" - @echo " Unit Tests : helloworld, calchk, sumchk, bcstchk, gridavgchk, halochk, optargs, opticep" + @echo "Supported Makefile Targets are: cice, libcice, makdep, depends, clean, realclean, targets, db_files, db_flags" target: targets db_files: @@ -135,36 +134,6 @@ $(DEPGEN): $(OBJS_DEPGEN) @ echo "Building makdep" $(SCC) -o $@ $(CFLAGS_HOST) $< -#------------------------------------------------------------------------------- -# unit tests -#------------------------------------------------------------------------------- - -# this builds all dependent source code automatically even though only a subset might actually be used -# this is no different than the cice target and in fact the binary is called cice -# it exists just to create separation as needed for unit tests - -calchk: $(EXEC) - -sumchk: $(EXEC) - -bcstchk: $(EXEC) - -gridavgchk: $(EXEC) - -halochk: $(EXEC) - -opticep: $(EXEC) - -# this builds just a subset of source code specified explicitly and requires a separate target - -HWOBJS := helloworld.o -helloworld: $(HWOBJS) - $(LD) -o $(EXEC) $(LDFLAGS) $(HWOBJS) $(ULIBS) $(SLIBS) - -OAOBJS := optargs.o optargs_subs.o -optargs: $(OAOBJS) - $(LD) -o $(EXEC) $(LDFLAGS) $(OAOBJS) $(ULIBS) $(SLIBS) - #------------------------------------------------------------------------------- # build rules: MACFILE, cmd-line, or env vars must provide the needed macros #------------------------------------------------------------------------------- @@ -176,13 +145,13 @@ libcice: $(OBJS) @ echo "$(AR) -r $(EXEC) $(OBJS)" $(AR) -r $(EXEC) $(OBJS) -%.o : %.c +.c.o: $(CC) $(CFLAGS) $(CPPDEFS) $(INCLDIR) $< -%.o : %.F %.d +.F.o: $(FC) -c $(FFLAGS) $(FIXEDFLAGS) $(CPPDEFS) $(INCLDIR) $< -%.o : %.F90 %.d +.F90.o: $(FC) -c $(FFLAGS) $(FREEFLAGS) $(CPPDEFS) $(MODDIR) $(INCLDIR) $< clean: diff --git a/configuration/scripts/cice.build b/configuration/scripts/cice.build index 33411158b..b9aed44fe 100755 --- a/configuration/scripts/cice.build +++ b/configuration/scripts/cice.build @@ -124,19 +124,16 @@ else if (${ICE_IOTYPE} =~ pio*) then else set IODIR = io_binary endif -if (${ICE_SNICARHC} == 'false') then - setenv ICE_CPPDEFS "${ICE_CPPDEFS} -DNO_SNICARHC" -endif ### List of source code directories (in order of importance). cat >! Filepath << EOF ${ICE_SANDBOX}/cicecore/drivers/${ICE_DRVOPT} -${ICE_SANDBOX}/cicecore/cicedyn/dynamics -${ICE_SANDBOX}/cicecore/cicedyn/general -${ICE_SANDBOX}/cicecore/cicedyn/analysis -${ICE_SANDBOX}/cicecore/cicedyn/infrastructure -${ICE_SANDBOX}/cicecore/cicedyn/infrastructure/io/$IODIR -${ICE_SANDBOX}/cicecore/cicedyn/infrastructure/comm/${ICE_COMMDIR} +${ICE_SANDBOX}/cicecore/cicedynB/dynamics +${ICE_SANDBOX}/cicecore/cicedynB/general +${ICE_SANDBOX}/cicecore/cicedynB/analysis +${ICE_SANDBOX}/cicecore/cicedynB/infrastructure +${ICE_SANDBOX}/cicecore/cicedynB/infrastructure/io/$IODIR +${ICE_SANDBOX}/cicecore/cicedynB/infrastructure/comm/${ICE_COMMDIR} ${ICE_SANDBOX}/cicecore/shared ${ICE_SANDBOX}/icepack/columnphysics EOF @@ -145,10 +142,6 @@ if !($?ICE_MACHINE_BLDTHRDS) then set ICE_MACHINE_BLDTHRDS = 1 endif -if (${directmake} == 0) then - set target = ${ICE_TARGET} -endif - if (${directmake} == 1) then echo "make ${target}" ${ICE_MACHINE_MAKE} -j ${ICE_MACHINE_BLDTHRDS} VPFILE=Filepath EXEC=${ICE_RUNDIR}/cice \ @@ -192,12 +185,12 @@ if (${quiet} == "true") then echo " quiet mode on... patience" ${ICE_MACHINE_MAKE} -j ${ICE_MACHINE_BLDTHRDS} VPFILE=Filepath EXEC=${ICE_RUNDIR}/cice \ -f ${ICE_CASEDIR}/Makefile MACFILE=${ICE_CASEDIR}/Macros.${ICE_MACHCOMP} \ - DEPFILE=${ICE_CASEDIR}/makdep.c ${target} >& ${ICE_BLDLOG_FILE} + DEPFILE=${ICE_CASEDIR}/makdep.c cice >& ${ICE_BLDLOG_FILE} set bldstat = ${status} else ${ICE_MACHINE_MAKE} -j ${ICE_MACHINE_BLDTHRDS} VPFILE=Filepath EXEC=${ICE_RUNDIR}/cice \ -f ${ICE_CASEDIR}/Makefile MACFILE=${ICE_CASEDIR}/Macros.${ICE_MACHCOMP} \ - DEPFILE=${ICE_CASEDIR}/makdep.c ${target} |& tee ${ICE_BLDLOG_FILE} + DEPFILE=${ICE_CASEDIR}/makdep.c cice |& tee ${ICE_BLDLOG_FILE} set bldstat = ${status} endif diff --git a/configuration/scripts/cice.settings b/configuration/scripts/cice.settings old mode 100644 new mode 100755 index ee4709940..7d9bce65c --- a/configuration/scripts/cice.settings +++ b/configuration/scripts/cice.settings @@ -1,9 +1,9 @@ -#!/bin/csh -f +#! /bin/csh -f setenv ICE_CASENAME undefined setenv ICE_SANDBOX undefined setenv ICE_MACHINE undefined -setenv ICE_ENVNAME undefined +setenv ICE_COMPILER undefined setenv ICE_MACHCOMP undefined setenv ICE_SCRIPTS undefined setenv ICE_CASEDIR undefined @@ -13,15 +13,12 @@ setenv ICE_RSTDIR ${ICE_RUNDIR}/restart setenv ICE_HSTDIR ${ICE_RUNDIR}/history setenv ICE_LOGDIR ${ICE_CASEDIR}/logs setenv ICE_DRVOPT standalone/cice -setenv ICE_TARGET cice -setenv ICE_IOTYPE netcdf # binary, netcdf, pio1, pio2 +setenv ICE_IOTYPE netcdf # set to none if netcdf library is unavailable setenv ICE_CLEANBUILD true -setenv ICE_CPPDEFS "" setenv ICE_QUIETMODE false setenv ICE_GRID undefined setenv ICE_NTASKS undefined setenv ICE_NTHRDS undefined -setenv ICE_OMPSCHED "static,1" setenv ICE_TEST undefined setenv ICE_TESTNAME undefined setenv ICE_TESTID undefined @@ -29,10 +26,8 @@ setenv ICE_BASELINE undefined setenv ICE_BASEGEN undefined setenv ICE_BASECOM undefined setenv ICE_BFBCOMP undefined -setenv ICE_BFBTYPE restart setenv ICE_SPVAL undefined setenv ICE_RUNLENGTH -1 -setenv ICE_MEMUSE -1 setenv ICE_ACCOUNT undefined setenv ICE_QUEUE undefined @@ -44,7 +39,6 @@ setenv ICE_COMMDIR mpi if (${ICE_NTASKS} == 1) setenv ICE_COMMDIR serial ### Specialty code -setenv ICE_SNICARHC false # compile with big hardcoded snicar table setenv ICE_BLDDEBUG false # build debug flags -setenv ICE_COVERAGE false # build coverage flags +setenv ICE_CODECOV false # build debug flags diff --git a/configuration/scripts/forapps/ufs/comp_ice.backend.clean b/configuration/scripts/forapps/ufs/comp_ice.backend.clean new file mode 100755 index 000000000..d75d381b4 --- /dev/null +++ b/configuration/scripts/forapps/ufs/comp_ice.backend.clean @@ -0,0 +1,46 @@ +#! /bin/csh -f + +### Expect to find the following environment variables set on entry: +# MACHINE_ID +# SYSTEM_USERDIR +# SRCDIR +# EXEDIR + +setenv OBJDIR $EXEDIR/compile ; if !(-d $OBJDIR) mkdir -p $OBJDIR + +if (${MACHINE_ID} =~ cheyenne*) then + setenv ARCH cheyenne_intel +else if (${MACHINE_ID} =~ orion*) then + setenv ARCH orion_intel +else if (${MACHINE_ID} =~ hera*) then + setenv ARCH hera_intel +else if (${MACHINE_ID} =~ wcoss*) then + setenv ARCH wcoss_dell_p3_intel +else if (${MACHINE_ID} =~ stampede*) then + setenv ARCH stampede_intel +else + echo "CICE6 ${0}: ERROR in ARCH setup, ${hname}" + exit -2 +endif + +echo "CICE6 ${0}: ARCH = $ARCH" + +cd $OBJDIR + +setenv MAKENAME gmake +setenv MAKETHRDS 1 +setenv MAKEFILE ${SRCDIR}/configuration/scripts/Makefile +setenv MACROSFILE ${SRCDIR}/configuration/scripts/machines/Macros.$ARCH + +echo "CICE6 ${0}: EXEDIR = ${EXEDIR}" +echo "CICE6 ${0}: OBJDIR = ${OBJDIR}" +echo "CICE6 ${0}: MAKEFILE = ${MAKEFILE}" +echo "CICE6 ${0}: MACROSFILE = ${MACROSFILE}" +echo "CICE6 ${0}: ESMFMKFILE = ${ESMFMKFILE}" + +#clean +${MAKENAME} EXEC=${OBJDIR}/libcice6.a \ + -f ${MAKEFILE} MACFILE=${MACROSFILE} clean + +#clean install +rm -r -f ${BINDIR} diff --git a/configuration/scripts/forapps/ufs/comp_ice.backend.libcice b/configuration/scripts/forapps/ufs/comp_ice.backend.libcice new file mode 100755 index 000000000..47985bef2 --- /dev/null +++ b/configuration/scripts/forapps/ufs/comp_ice.backend.libcice @@ -0,0 +1,149 @@ +#! /bin/csh -f + +### Expect to find the following environment variables set on entry: +# MACHINE_ID +# SYSTEM_USERDIR +# SRCDIR +# EXEDIR + +### local variable that begin with ICE_ are needed in the Macros file +# ICE_COMMDIR +# ICE_BLDDEBUG +# ICE_THREADED +# ICE_CPPDEFS + +setenv OBJDIR $EXEDIR/compile ; if !(-d $OBJDIR) mkdir -p $OBJDIR + +setenv THRD no # set to yes for OpenMP threading + +if (${MACHINE_ID} =~ cheyenne*) then + setenv ARCH cheyenne_intel +else if (${MACHINE_ID} =~ orion*) then + setenv ARCH orion_intel +else if (${MACHINE_ID} =~ hera*) then + setenv ARCH hera_intel +else if (${MACHINE_ID} =~ wcoss*) then + setenv ARCH wcoss_dell_p3_intel +else if (${MACHINE_ID} =~ stampede*) then + setenv ARCH stampede_intel +else + echo "CICE6 ${0}: ERROR in ARCH setup, ${hname}" + exit -2 +endif + +echo "CICE6 ${0}: ARCH = $ARCH" + +cd $OBJDIR + +setenv SHRDIR csm_share # location of CCSM shared code +setenv DRVDIR nuopc/cmeps + +#if ($NTASK == 1) then +# setenv ICE_COMMDIR serial +#else + setenv ICE_COMMDIR mpi +#endif + +if ($THRD == 'yes') then + setenv ICE_THREADED true +else + setenv ICE_THREADED false +endif + +if ($?ICE_CPPDEFS) then + setenv ICE_CPPDEFS "${ICE_CPPDEFS} -Dcoupled" +else + setenv ICE_CPPDEFS "-Dcoupled" +endif + +if !($?IO_TYPE) then + setenv IO_TYPE netcdf4 # set to none if netcdf library is unavailable +endif +if ($IO_TYPE == 'netcdf3' || $IO_TYPE == 'netcdf4') then + setenv IODIR io_netcdf + setenv ICE_CPPDEFS "${ICE_CPPDEFS} -DUSE_NETCDF" +else if ($IO_TYPE == 'pio') then + setenv IODIR io_pio + setenv ICE_CPPDEFS "${ICE_CPPDEFS} -DUSE_NETCDF" +else + setenv IODIR io_binary +endif + +# Build in debug mode. If DEBUG=Y, enable DEBUG compilation. This +# flag is set in ${ROOTDIR}/coupledFV3_MOM6_CICE_debug.appBuilder file. +if (! $?DEBUG) then + setenv ICE_BLDDEBUG false +else + if ($DEBUG == "Y") then + setenv ICE_BLDDEBUG true + else + setenv ICE_BLDDEBUG false + endif +endif +echo "CICE6 ${0}: DEBUG = ${ICE_BLDDEBUG}" + +### List of source code directories (in order of importance). +cat >! Filepath << EOF +${SRCDIR}/cicecore/drivers/${DRVDIR} +${SRCDIR}/cicecore/cicedynB/dynamics +${SRCDIR}/cicecore/cicedynB/general +${SRCDIR}/cicecore/cicedynB/analysis +${SRCDIR}/cicecore/cicedynB/infrastructure +${SRCDIR}/cicecore/cicedynB/infrastructure/io/${IODIR} +${SRCDIR}/cicecore/cicedynB/infrastructure/comm/${ICE_COMMDIR} +${SRCDIR}/cicecore/shared +${SRCDIR}/icepack/columnphysics +${SRCDIR}/$SHRDIR +EOF + +setenv MAKENAME gmake +setenv MAKETHRDS 1 +setenv MAKEFILE ${SRCDIR}/configuration/scripts/Makefile +setenv MACROSFILE ${SRCDIR}/configuration/scripts/machines/Macros.$ARCH +setenv DEPFILE ${SRCDIR}/configuration/scripts/makdep.c + +echo "CICE6 ${0}: EXEDIR = ${EXEDIR}" +echo "CICE6 ${0}: OBJDIR = ${OBJDIR}" +echo "CICE6 ${0}: MAKEFILE = ${MAKEFILE}" +echo "CICE6 ${0}: MACROSFILE = ${MACROSFILE}" +echo "CICE6 ${0}: DEPFILE = ${DEPFILE}" +echo "CICE6 ${0}: ESMFMKFILE = ${ESMFMKFILE}" + +#diagnostics +#${MAKENAME} -j ${MAKETHRDS} VPFILE=Filepath EXEC=${OBJDIR}/cice \ +# -f ${MAKEFILE} MACFILE=${MACROSFILE} DEPFILE=${DEPFILE} db_files +#${MAKENAME} -j ${MAKETHRDS} VPFILE=Filepath EXEC=${OBJDIR}/cice \ +# -f ${MAKEFILE} MACFILE=${MACROSFILE} DEPFILE=${DEPFILE} db_flags + +#clean +#${MAKENAME} VPFILE=Filepath EXEC=${OBJDIR}/cice \ +# -f ${MAKEFILE} MACFILE=${MACROSFILE} DEPFILE=${DEPFILE} clean + +#needed to trigger a failed build to rest of system +rm ${BINDIR}/cice6.mk + +#build lib (includes dependencies) +${MAKENAME} -j ${MAKETHRDS} VPFILE=Filepath EXEC=${OBJDIR}/libcice6.a \ + -f ${MAKEFILE} MACFILE=${MACROSFILE} DEPFILE=${DEPFILE} libcice + +if ($status != 0) then + echo "CICE6 ${0}: gmake failed, exiting" + exit -2 +endif + +#install +mkdir -p ${BINDIR} +cp -f ${OBJDIR}/libcice6.a ${BINDIR}/ +cp -f ${OBJDIR}/ice_comp_nuopc.mod ${BINDIR}/ +cp -f ${OBJDIR}/ice_timers.mod ${BINDIR}/ + +cat >! ${BINDIR}/cice6.mk << EOF +# ESMF self-describing build dependency makefile fragment + +ESMF_DEP_FRONT = ice_comp_nuopc +ESMF_DEP_INCPATH = ${BINDIR} +ESMF_DEP_CMPL_OBJS = +ESMF_DEP_LINK_OBJS = ${BINDIR}/libcice6.a + +EOF + diff --git a/configuration/scripts/ice_in b/configuration/scripts/ice_in index 63a97d7d8..eb96db66f 100644 --- a/configuration/scripts/ice_in +++ b/configuration/scripts/ice_in @@ -1,72 +1,42 @@ &setup_nml days_per_year = 365 - use_leap_years = .true. - year_init = 2005 - month_init = 1 - day_init = 1 - sec_init = 0 + use_leap_years = .false. + year_init = 1997 istep0 = 0 dt = 3600.0 - npt_unit = '1' npt = 24 ndtd = 1 runtype = 'initial' ice_ic = './restart/iced_gx3_v5.nc' + restart = .true. restart_ext = .false. - use_restart_time = .false. - restart_format = 'default' - restart_rearranger = 'default' - restart_iotasks = -99 - restart_root = -99 - restart_stride = -99 - restart_deflate = 0 - restart_chunksize = 0, 0 + use_restart_time = .true. + restart_format = 'nc' lcdf64 = .false. numin = 21 numax = 89 restart_dir = './restart/' restart_file = 'iced' pointer_file = './ice.restart_file' - dumpfreq = 'd','x','x','x','x' - dumpfreq_n = 1 , 1 , 1 , 1 , 1 - dumpfreq_base = 'init','init','init','init','init' + dumpfreq = 'd' + dumpfreq_n = 1 dump_last = .false. bfbflag = 'off' diagfreq = 24 diag_type = 'stdout' diag_file = 'ice_diag.d' - debug_model = .false. - debug_model_step = 0 - debug_model_i = -1 - debug_model_j = -1 - debug_model_iblk = -1 - debug_model_task = -1 - debug_forcing = .false. print_global = .true. print_points = .true. - timer_stats = .false. - memory_stats = .false. - conserv_check = .false. latpnt(1) = 90. lonpnt(1) = 0. latpnt(2) = -65. lonpnt(2) = -45. + dbug = .false. histfreq = 'm','x','x','x','x' histfreq_n = 1 , 1 , 1 , 1 , 1 - histfreq_base = 'zero','zero','zero','zero','zero' - hist_avg = .true.,.true.,.true.,.true.,.true. - hist_suffix = 'x','x','x','x','x' + hist_avg = .true. history_dir = './history/' history_file = 'iceh' - history_precision = 4 - history_format = 'default' - history_rearranger = 'default' - history_iotasks = -99 - history_root = -99 - history_stride = -99 - history_deflate = 0 - history_chunksize = 0, 0 - hist_time_axis = 'end' write_ic = .true. incond_dir = './history/' incond_file = 'iceh_ic' @@ -76,11 +46,7 @@ &grid_nml grid_format = 'bin' grid_type = 'displaced_pole' - grid_ice = 'B' - grid_atm = 'A' - grid_ocn = 'A' grid_file = 'grid' - kmt_type = 'file' kmt_file = 'kmt' bathymetry_file = 'unknown_bathymetry_file' bathymetry_format = 'default' @@ -89,17 +55,12 @@ kcatbound = 0 dxrect = 30.e5 dyrect = 30.e5 - lonrefrect = -156.50d0 - latrefrect = 71.35d0 - scale_dxdy = .false. - dxscale = 1.d0 - dyscale = 1.d0 close_boundaries = .false. ncat = 5 nfsd = 1 nilyr = 7 nslyr = 1 - nblyr = 1 + nblyr = 7 orca_halogrid = .false. / @@ -119,12 +80,12 @@ restart_FY = .false. tr_lvl = .true. restart_lvl = .false. + tr_pond_cesm = .false. + restart_pond_cesm = .false. tr_pond_topo = .false. restart_pond_topo = .false. tr_pond_lvl = .true. restart_pond_lvl = .false. - tr_snow = .false. - restart_snow = .false. tr_iso = .false. restart_iso = .false. tr_aero = .false. @@ -138,23 +99,22 @@ ktherm = 2 conduct = 'bubbly' ksno = 0.3d0 - hi_min = 0.01d0 a_rapid_mode = 0.5e-3 Rac_rapid_mode = 10.0 aspect_rapid_mode = 1.0 dSdt_slow_mode = -5.0e-8 phi_c_slow_mode = 0.05 phi_i_mushy = 0.85 - Tliquidus_max = -0.1d0 - hfrazilmin = 0.05d0 - floediam = 300.0d0 + sw_redist = .false. + sw_frac = 0.9d0 + sw_dtemp = 0.02d0 / &dynamics_nml kdyn = 1 ndte = 240 revised_evp = .false. - evp_algorithm = 'standard_2d' + kevp_kernel = 0 brlx = 300.0 arlx = 300.0 advection = 'remap' @@ -162,20 +122,11 @@ krdg_partic = 1 krdg_redist = 1 mu_rdg = 3 - Pstar = 2.75e4 - Cstar = 20 Cf = 17. Ktens = 0. - e_yieldcurve = 2. - e_plasticpot = 2. - visc_method = 'avg_zeta' - elasticDamp = 0.36d0 - deltaminEVP = 1e-11 - deltaminVP = 2e-9 - capping_method = 'max' - seabed_stress = .false. - seabed_stress_method = 'LKD' - k1 = 7.5 + e_ratio = 2. + basalstress = .false. + k1 = 8. k2 = 15. alphab = 20. threshold_hw = 30. @@ -183,26 +134,10 @@ kridge = 1 ktransport = 1 ssh_stress = 'geostrophic' - maxits_nonlin = 10 - precond = 'pgmres' - dim_fgmres = 50 - dim_pgmres = 5 - maxits_fgmres = 1 - maxits_pgmres = 1 - monitor_nonlin = .false. - monitor_fgmres = .false. - monitor_pgmres = .false. - ortho_type = 'mgs' - reltol_nonlin = 1e-8 - reltol_fgmres = 1e-1 - reltol_pgmres = 1e-6 - algo_nonlin = 'picard' - use_mean_vrel = .true. / &shortwave_nml shortwave = 'dEdd' - snw_ssp_table = 'test' albedo_type = 'ccsm3' albicev = 0.78 albicei = 0.36 @@ -215,9 +150,6 @@ dT_mlt = 1.5 rsnw_mlt = 1500. kalg = 0.6 - sw_redist = .false. - sw_frac = 0.9d0 - sw_dtemp = 0.02d0 / &ponds_nml @@ -231,48 +163,20 @@ pndaspect = 0.8 / -&snow_nml - snwredist = 'none' - snwgrain = .false. - use_smliq_pnd = .false. - rsnw_fall = 100.0 - rsnw_tmax = 1500.0 - rhosnew = 100.0 - rhosmin = 100.0 - rhosmax = 450.0 - windmin = 10.0 - drhosdwind = 27.3 - snwlvlfac = 0.3 - snw_aging_table = 'test' - snw_filename = 'unknown' - snw_rhos_fname = 'unknown' - snw_Tgrd_fname = 'unknown' - snw_T_fname = 'unknown' - snw_tau_fname = 'unknown' - snw_kappa_fname = 'unknown' - snw_drdt0_fname = 'unknown' -/ - &forcing_nml formdrag = .false. - atmbndy = 'similarity' - rotate_wind = .true. + atmbndy = 'default' calc_strair = .true. calc_Tsfc = .true. highfreq = .false. natmiter = 5 atmiter_conv = 0.0d0 ustar_min = 0.0005 - iceruf = 0.0005 - calc_dragio = .false. - iceruf_ocn = 0.03 - emissivity = 0.985 + emissivity = 0.95 fbot_xfer_type = 'constant' update_ocn_f = .false. l_mpond_fresh = .false. tfrz_option = 'mushy' - saltflux_option = 'constant' - ice_ref_salinity = 4.0 oceanmixed_ice = .true. wave_spec_type = 'none' wave_spec_file = 'unknown_wave_spec_file' @@ -284,17 +188,14 @@ precip_units = 'mm_per_month' default_season = 'winter' atm_data_type = 'ncar' - atm_data_version = '_undef' ocn_data_type = 'default' bgc_data_type = 'default' fe_data_type = 'default' - ice_data_type = 'latsst' - ice_data_conc = 'parabolic' - ice_data_dist = 'uniform' - fyear_init = 2005 + ice_data_type = 'default' + fyear_init = 1997 ycycle = 1 atm_data_format = 'bin' - atm_data_dir = 'unknown_atm_data_dir' + atm_data_dir = '/glade/u/home/tcraig/cice_data/' bgc_data_dir = 'unknown_bgc_data_dir' ocn_data_format = 'bin' ocn_data_dir = '/unknown_ocn_data_dir' @@ -311,14 +212,12 @@ processor_shape = 'slenderX2' distribution_type = 'cartesian' distribution_wght = 'latitude' - distribution_wght_file = 'unknown' ew_boundary_type = 'cyclic' ns_boundary_type = 'open' maskhalo_dyn = .false. maskhalo_remap = .false. maskhalo_bound = .false. add_mpi_barriers = .false. - debug_blocks = .false. / &zbgc_nml @@ -471,31 +370,14 @@ / &icefields_nml - f_tlon = .true. - f_tlat = .true. - f_ulon = .true. - f_ulat = .true. - f_nlon = .true. - f_nlat = .true. - f_elon = .true. - f_elat = .true. f_tmask = .true. - f_umask = .false. - f_nmask = .false. - f_emask = .false. f_blkmask = .true. f_tarea = .true. f_uarea = .true. - f_narea = .false. - f_earea = .false. f_dxt = .false. f_dyt = .false. f_dxu = .false. f_dyu = .false. - f_dxe = .false. - f_dye = .false. - f_dxn = .false. - f_dyn = .false. f_HTN = .false. f_HTE = .false. f_ANGLE = .true. @@ -591,7 +473,6 @@ f_strength = 'm' f_divu = 'm' f_shear = 'm' - f_vort = 'x' f_sig1 = 'm' f_sig2 = 'm' f_sigP = 'm' @@ -621,8 +502,7 @@ f_fcondtopn_ai = 'x' f_fmelttn_ai = 'x' f_flatn_ai = 'x' - f_fsensn_ai = 'x' - f_CMIP = 'x' + f_fsensn_ai = 'x' / &icefields_mechred_nml @@ -661,21 +541,6 @@ f_apeff_ai = 'm' / -&icefields_snow_nml - f_smassicen = 'x' - f_smassliqn = 'x' - f_rhos_cmpn = 'x' - f_rhos_cntn = 'x' - f_rsnwn = 'x' - f_smassice = 'm' - f_smassliq = 'm' - f_rhos_cmp = 'm' - f_rhos_cnt = 'm' - f_rsnw = 'm' - f_meltsliq = 'm' - f_fsloss = 'm' -/ - &icefields_bgc_nml f_fiso_atm = 'x' f_fiso_ocn = 'x' diff --git a/configuration/scripts/machines/Macros.cheyenne_intel b/configuration/scripts/machines/Macros.cheyenne_intel index b1726558d..243295487 100644 --- a/configuration/scripts/machines/Macros.cheyenne_intel +++ b/configuration/scripts/machines/Macros.cheyenne_intel @@ -12,9 +12,7 @@ FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -trace FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) -# FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -link_mpi=dbg - FFLAGS += -O0 -g -check all -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -link_mpi=dbg -stand f08 -# FFLAGS += -O0 -g -check all -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -init=snan,arrays -link_mpi=dbg + FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -link_mpi=dbg else FFLAGS += -O2 endif @@ -46,7 +44,7 @@ LIB_NETCDF := $(NETCDF_PATH)/lib #LIB_PNETCDF := $(PNETCDF_PATH)/lib LIB_MPI := $(IMPILIBDIR) -#SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff -L$(LIB_PNETCDF) -lpnetcdf +#SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff -L$(LIB_PNETCDF) -lpnetcdf -lgptl SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff ifeq ($(ICE_THREADED), true) @@ -61,8 +59,8 @@ ifeq ($(ICE_IOTYPE), pio1) endif ifeq ($(ICE_IOTYPE), pio2) - CPPDEFS := $(CPPDEFS) + CPPDEFS := $(CPPDEFS) -DGPTL LIB_PIO := $(PIO_LIBDIR) - SLIBS := $(SLIBS) -L$(LIB_PIO) -lpiof -lpioc + SLIBS := $(SLIBS) -L$(LIB_PIO) -lpiof -lpioc -lgptl endif diff --git a/configuration/scripts/machines/Macros.gaffney_gnu b/configuration/scripts/machines/Macros.gaffney_gnu index 4ae235bc9..0d13560de 100644 --- a/configuration/scripts/machines/Macros.gaffney_gnu +++ b/configuration/scripts/machines/Macros.gaffney_gnu @@ -16,14 +16,14 @@ ifeq ($(ICE_BLDDEBUG), true) CFLAGS += -O0 endif -ifeq ($(ICE_COVERAGE), true) +ifeq ($(ICE_CODECOV), true) FFLAGS += -O0 -g -fprofile-arcs -ftest-coverage CFLAGS += -O0 -g -coverage LDFLAGS += -g -ftest-coverage -fprofile-arcs endif ifneq ($(ICE_BLDDEBUG), true) -ifneq ($(ICE_COVERAGE), true) +ifneq ($(ICE_CODECOV), true) FFLAGS += -O2 CFLAGS += -O2 endif @@ -66,3 +66,10 @@ ifeq ($(ICE_THREADED), true) FFLAGS += -fopenmp endif +### if using parallel I/O, load all 3 libraries. PIO must be first! +ifeq ($(ICE_IOTYPE), pio) + PIO_PATH:=/glade/u/home/jedwards/pio1_6_5/pio + INCLDIR += -I$(PIO_PATH) + SLIBS := $(SLIBS) -L$(PIO_PATH) -lpio +endif + diff --git a/configuration/scripts/machines/Macros.izumi_gnu b/configuration/scripts/machines/Macros.izumi_gnu index 99df7a033..cdc6620f4 100644 --- a/configuration/scripts/machines/Macros.izumi_gnu +++ b/configuration/scripts/machines/Macros.izumi_gnu @@ -16,14 +16,14 @@ ifeq ($(ICE_BLDDEBUG), true) CFLAGS += -O0 endif -ifeq ($(ICE_COVERAGE), true) +ifeq ($(ICE_CODECOV), true) FFLAGS += -O0 -g -fprofile-arcs -ftest-coverage CFLAGS += -O0 -g -coverage LDFLAGS += -g -ftest-coverage -fprofile-arcs endif ifneq ($(ICE_BLDDEBUG), true) -ifneq ($(ICE_COVERAGE), true) +ifneq ($(ICE_CODECOV), true) FFLAGS += -O2 CFLAGS += -O2 endif @@ -55,3 +55,10 @@ ifeq ($(ICE_THREADED), true) FFLAGS += -fopenmp endif +## if using parallel I/O, load all 3 libraries. PIO must be first! +ifeq ($(ICE_IOTYPE), pio) + PIO_PATH:= + INCLDIR += -I + SLIBS := $(SLIB) -L$(PIO_PATH) -lpiofS +endif + diff --git a/configuration/scripts/machines/Macros.onyx_gnu b/configuration/scripts/machines/Macros.onyx_gnu index 31d0e64aa..d423cd9ab 100644 --- a/configuration/scripts/machines/Macros.onyx_gnu +++ b/configuration/scripts/machines/Macros.onyx_gnu @@ -8,7 +8,7 @@ CFLAGS := -c FIXEDFLAGS := -ffixed-line-length-132 FREEFLAGS := -ffree-form -FFLAGS := -fconvert=big-endian -fbacktrace -ffree-line-length-none -fallow-argument-mismatch +FFLAGS := -fconvert=big-endian -fbacktrace -ffree-line-length-none FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) @@ -16,14 +16,14 @@ ifeq ($(ICE_BLDDEBUG), true) CFLAGS += -O0 endif -ifeq ($(ICE_COVERAGE), true) +ifeq ($(ICE_CODECOV), true) FFLAGS += -O0 -g -fprofile-arcs -ftest-coverage CFLAGS += -O0 -g -coverage LDFLAGS += -g -ftest-coverage -fprofile-arcs endif ifneq ($(ICE_BLDDEBUG), true) -ifneq ($(ICE_COVERAGE), true) +ifneq ($(ICE_CODECOV), true) FFLAGS += -O2 CFLAGS += -O2 endif @@ -65,3 +65,10 @@ ifeq ($(ICE_THREADED), true) FFLAGS += -fopenmp endif +### if using parallel I/O, load all 3 libraries. PIO must be first! +ifeq ($(ICE_IOTYPE), pio) + PIO_PATH:=/glade/u/home/jedwards/pio1_6_5/pio + INCLDIR += -I$(PIO_PATH) + SLIBS := $(SLIBS) -L$(PIO_PATH) -lpio +endif + diff --git a/configuration/scripts/machines/Macros.perlmutter_gnu b/configuration/scripts/machines/Macros.perlmutter_gnu index 220d2dd80..2e80f7364 100644 --- a/configuration/scripts/machines/Macros.perlmutter_gnu +++ b/configuration/scripts/machines/Macros.perlmutter_gnu @@ -1,5 +1,5 @@ #============================================================================== -# Macros file for NERSC perlmutter, gnu compiler +# Macros file for NAVYDSRC gordon, gnu compiler #============================================================================== CPP := ftn -E @@ -8,18 +8,28 @@ CFLAGS := -c FIXEDFLAGS := -ffixed-line-length-132 FREEFLAGS := -ffree-form -FFLAGS := -fconvert=big-endian -fbacktrace -ffree-line-length-none -fallow-argument-mismatch +FFLAGS := -fconvert=big-endian -fbacktrace -ffree-line-length-none FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow - CFLAGS += -O0 -else + FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow + CFLAGS += -O0 +endif + +ifeq ($(ICE_CODECOV), true) + FFLAGS += -O0 -g -fprofile-arcs -ftest-coverage + CFLAGS += -O0 -g -coverage + LDFLAGS += -g -ftest-coverage -fprofile-arcs +endif + +ifneq ($(ICE_BLDDEBUG), true) +ifneq ($(ICE_CODECOV), true) FFLAGS += -O2 CFLAGS += -O2 endif +endif -SCC := cc +SCC := cc SFC := ftn MPICC := cc MPIFC := ftn @@ -55,3 +65,10 @@ ifeq ($(ICE_THREADED), true) FFLAGS += -fopenmp endif +### if using parallel I/O, load all 3 libraries. PIO must be first! +ifeq ($(ICE_IOTYPE), pio) + PIO_PATH:=/glade/u/home/jedwards/pio1_6_5/pio + INCLDIR += -I$(PIO_PATH) + SLIBS := $(SLIBS) -L$(PIO_PATH) -lpio +endif + diff --git a/configuration/scripts/machines/Macros.travisCI_gnu b/configuration/scripts/machines/Macros.travisCI_gnu index 5d68fdceb..aa7b12c05 100644 --- a/configuration/scripts/machines/Macros.travisCI_gnu +++ b/configuration/scripts/machines/Macros.travisCI_gnu @@ -16,14 +16,14 @@ ifeq ($(ICE_BLDDEBUG), true) CFLAGS += -O0 endif -ifeq ($(ICE_COVERAGE), true) +ifeq ($(ICE_CODECOV), true) FFLAGS += -O0 -g -fprofile-arcs -ftest-coverage CFLAGS += -O0 -g -coverage LDFLAGS += -g -ftest-coverage -fprofile-arcs endif ifneq ($(ICE_BLDDEBUG), true) -ifneq ($(ICE_COVERAGE), true) +ifneq ($(ICE_CODECOV), true) FFLAGS += -O2 CFLAGS += -O2 endif diff --git a/configuration/scripts/machines/env.badger_intel b/configuration/scripts/machines/env.badger_intel new file mode 100755 index 000000000..8fe69148b --- /dev/null +++ b/configuration/scripts/machines/env.badger_intel @@ -0,0 +1,45 @@ +#!/bin/tcsh -f + +set inp = "undefined" +if ($#argv == 1) then + set inp = $1 +endif + +if ("$inp" != "-nomodules") then + +#source /usr/share/Modules/init/csh + +#module purge +#module load intel +#module load openmpi +module unload hdf5-serial +module unload hdf5-parallel +module unload netcdf-serial +module unload netcdf-h5parallel +module load hdf5-serial +module load netcdf-serial/4.4.0 +module load hdf5-parallel +module load netcdf-h5parallel/4.4.0 + +#setenv OMP_STACKSIZE 256M +#setenv MP_LABELIO yes +#setenv MP_INFOLEVEL 2 +#setenv MP_SHARED_MEMORY yes +#setenv MP_EUILIB us +#setenv MP_EAGER_LIMIT 0 + +endif + +setenv ICE_MACHINE_ENVNAME badger +setenv ICE_MACHINE_COMPILER intel +setenv ICE_MACHINE_MAKE gmake +setenv ICE_MACHINE_WKDIR /net/scratch3/$user/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /usr/projects/climate/eclare/DATA/Consortium +setenv ICE_MACHINE_BASELINE /net/scratch3/$user/CICE_BASELINE +setenv ICE_MACHINE_SUBMIT "sbatch " +#setenv ICE_MACHINE_ACCT e3sm +setenv ICE_MACHINE_ACCT climatehilat +setenv ICE_MACHINE_QUEUE "default" +setenv ICE_MACHINE_TPNODE 16 +setenv ICE_MACHINE_BLDTHRDS 1 +setenv ICE_MACHINE_QSTAT "squeue --jobs=" diff --git a/configuration/scripts/options/set_nml.alt04 b/configuration/scripts/options/set_nml.alt04 index 273f7d87d..96a3a9c1b 100644 --- a/configuration/scripts/options/set_nml.alt04 +++ b/configuration/scripts/options/set_nml.alt04 @@ -21,7 +21,7 @@ evp_algorithm = 'shared_mem_1d' fbot_xfer_type = 'Cdn_ocn' shortwave = 'dEdd' formdrag = .true. -advection = 'upwind' +advection = 'remap' kstrength = 0 krdg_partic = 0 krdg_redist = 0 diff --git a/configuration/scripts/parse_namelist.sh b/configuration/scripts/parse_namelist.sh index dcb0d1ccc..ea539a2d0 100755 --- a/configuration/scripts/parse_namelist.sh +++ b/configuration/scripts/parse_namelist.sh @@ -10,7 +10,7 @@ filename=$1 filemods=$2 #echo "$0 $1 $2" -echo "running ${scriptname}" +echo "running parse_namelist.sh" foundstring="FoundSTRING" vnamearray=() valuearray=() @@ -43,9 +43,11 @@ do fi done - grep -q "^[[:space:]]*${vname}[[:space:]]*=" $filename - grepout=$? - if [ ${grepout} -eq 0 ]; then + #sed -i 's|\(^\s*'"$vname"'\s*\=\s*\)\(.*$\)|\1'"$value"'|g' $filename + cp ${filename} ${filename}.check + sed -i.sedbak -e 's|\(^[[:space:]]*'"$vname"'[[:space:]]*=[[:space:]]*\)\(.*$\)|\1'"$foundstring"'|g' ${filename}.check + grep -q ${foundstring} ${filename}.check + if [ $? -eq 0 ]; then sed -i.sedbak -e 's|\(^[[:space:]]*'"$vname"'[[:space:]]*=[[:space:]]*\)\(.*$\)|\1'"$value"'|g' ${filename} if [[ "${found}" == "${foundstring}" ]]; then vnamearray+=($vname) @@ -53,17 +55,17 @@ do else valuearray[$found]=${value} fi + if [[ -e "${filename}.sedbak" ]]; then + rm ${filename}.sedbak + fi else echo "${scriptname} ERROR: parsing error for ${vname}" exit -99 fi + rm ${filename}.check ${filename}.check.sedbak fi done < "$filemods" -if [[ -e "${filename}.sedbak" ]]; then - rm ${filename}.sedbak -fi - exit 0 diff --git a/configuration/scripts/parse_settings.sh b/configuration/scripts/parse_settings.sh index a3f432801..d6ed31c15 100755 --- a/configuration/scripts/parse_settings.sh +++ b/configuration/scripts/parse_settings.sh @@ -10,7 +10,7 @@ filename=$1 filemods=$2 #echo "$0 $1 $2" -echo "running ${scriptname}" +echo "running parse_settings.sh" foundstring="FoundSTRING" vnamearray=() valuearray=() @@ -23,11 +23,8 @@ do else #vname=`echo $line | sed "s|\(^\s*set\S*\)\s\{1,100\}\(\S*\)\s\{1,100\}\(\S*\).*$|\2|g"` #value=`echo $line | sed "s|\(^\s*set\S*\)\s\{1,100\}\(\S*\)\s\{1,100\}\(\S*\).*$|\3|g"` - vname=`echo $line | sed "s|\(^[[:space:]]*set[^[:space:]]*\)[[:space:]][[:space:]]*\([^[:space:]]*\).*$|\2|g"` + vname=`echo $line | sed "s|\(^[[:space:]]*set[^[:space:]]*\)[[:space:]][[:space:]]*\([^[:space:]]*\)[[:space:]][[:space:]]*\([^[:space:]]*\).*$|\2|g"` value=`echo $line | sed "s|\(^[[:space:]]*set[^[:space:]]*\)[[:space:]][[:space:]]*\([^[:space:]]*\)[[:space:]][[:space:]]*\([^[:space:]]*\).*$|\3|g"` - if [[ "${value}" == "${line}" ]]; then - value="" - fi # echo "$line $vname $value" found=${foundstring} @@ -46,27 +43,22 @@ do fi done - grep -q "^[[:space:]]*set.* ${vname}[[:space:]]*" $filename - grepout=$? - if [ ${grepout} -eq 0 ]; then - sed -i.sedbak -e 's|\(^[[:space:]]*set.* '"$vname"' \)[^#]*\(#*.*$\)|\1 '"$value"' \2|g' $filename - if [[ "${found}" == "${foundstring}" ]]; then - vnamearray+=($vname) - valuearray+=($value) - else - valuearray[$found]=${value} - fi + #sed -i 's|\(^\s*set.* '"$vname"' \)[^#]*\(#*.*$\)|\1 '"$value"' \2|g' $filename + sed -i.sedbak -e 's|\(^[[:space:]]*set.* '"$vname"' \)[^#]*\(#*.*$\)|\1 '"$value"' \2|g' $filename + + if [[ "${found}" == "${foundstring}" ]]; then + vnamearray+=($vname) + valuearray+=($value) else - echo "${scriptname} ERROR: parsing error for ${vname}" - exit -99 + valuearray[$found]=${value} + fi + + if [[ -e "${filename}.sedbak" ]]; then + rm ${filename}.sedbak fi fi done < "$filemods" -if [[ -e "${filename}.sedbak" ]]; then - rm ${filename}.sedbak -fi - exit 0 diff --git a/configuration/scripts/tests/QC/cice.t-test.py b/configuration/scripts/tests/QC/cice.t-test.py index b941c4912..987175245 100755 --- a/configuration/scripts/tests/QC/cice.t-test.py +++ b/configuration/scripts/tests/QC/cice.t-test.py @@ -57,15 +57,6 @@ def gen_filenames(base_dir, test_dir): " # of files: {}".format(len(files_b))) sys.exit(-1) - if len(files_a) < 1825: - logger.error("Number of output files too small, expecting at least 1825." + \ - " Exiting...\n" + \ - "Baseline directory: {}\n".format(path_a) + \ - " # of files: {}\n".format(len(files_a)) + \ - "Test directory: {}\n".format(path_b) + \ - " # of files: {}".format(len(files_b))) - sys.exit(-1) - logger.info("Number of files: %d", len(files_a)) return path_a, path_b, files_a, files_b @@ -177,10 +168,7 @@ def stage_one(data_d, num_files, mean_d, variance_d): df = n_eff - 1 # Read in t_crit table - if os.path.exists('./CICE_t_critical_p0.8.nc'): - nfid = nc.Dataset("./CICE_t_critical_p0.8.nc", 'r') - else: - nfid = nc.Dataset("configuration/scripts/tests/QC/CICE_t_critical_p0.8.nc", 'r') + nfid = nc.Dataset("configuration/scripts/tests/QC/CICE_t_critical_p0.8.nc", 'r') df_table = nfid.variables['df'][:] t_crit_table = nfid.variables['tcrit'][:] nfid.close() @@ -241,10 +229,7 @@ def stage_one(data_d, num_files, mean_d, variance_d): t_val = mean_d / np.sqrt(variance_d / num_files) # Find t_crit from the nearest value on the Lookup Table Test - if os.path.exists('./CICE_Lookup_Table_p0.8_n1825.nc'): - nfid = nc.Dataset("./CICE_Lookup_Table_p0.8_n1825.nc", 'r') - else: - nfid = nc.Dataset("configuration/scripts/tests/QC/CICE_Lookup_Table_p0.8_n1825.nc", 'r') + nfid = nc.Dataset("configuration/scripts/tests/QC/CICE_Lookup_Table_p0.8_n1825.nc", 'r') r1_table = nfid.variables['r1'][:] t_crit_table = nfid.variables['tcrit'][:] nfid.close() @@ -379,8 +364,8 @@ def plot_data(data, lat, lon, units, case, plot_type): try: # Load the necessary plotting libraries import matplotlib.pyplot as plt - import cartopy.crs as ccrs - import cartopy.feature as cfeature + from mpl_toolkits.basemap import Basemap + from mpl_toolkits.axes_grid1 import make_axes_locatable except ImportError: logger.warning('Error loading necessary Python modules in plot_data function') return @@ -389,200 +374,87 @@ def plot_data(data, lat, lon, units, case, plot_type): import warnings warnings.filterwarnings("ignore", category=UserWarning) - # define north and south polar stereographic coord ref system - npstereo = ccrs.NorthPolarStereo(central_longitude=-90.0) # define projection - spstereo = ccrs.SouthPolarStereo(central_longitude= 90.0) # define projection - - # define figure - fig = plt.figure(figsize=[14,7]) - - # add axis for each hemishpere - ax1 = fig.add_subplot(121,projection=npstereo) - ax2 = fig.add_subplot(122,projection=spstereo) - - # set plot extents - ax1.set_extent([-180.,180.,35.,90.],ccrs.PlateCarree()) - ax2.set_extent([-180.,180.,-90.,-35.],ccrs.PlateCarree()) - - # add land features NH plot - ax1.add_feature(cfeature.LAND, color='lightgray') - ax1.add_feature(cfeature.BORDERS) - ax1.add_feature(cfeature.COASTLINE) - - # add land features SH plot - ax2.add_feature(cfeature.LAND, color='lightgray') - ax2.add_feature(cfeature.BORDERS) - ax2.add_feature(cfeature.COASTLINE) - - # add grid lines - dlon = 30.0 - dlat = 15.0 - mpLons = np.arange(-180. ,180.0+dlon,dlon) - mpLats = np.arange(-90.,90.0+dlat ,dlat) - - g1 = ax1.gridlines(xlocs=mpLons,ylocs=mpLats, - draw_labels=True, - x_inline=False,y_inline=False) - - g2 = ax2.gridlines(xlocs=mpLons,ylocs=mpLats, - draw_labels=True, - x_inline=False,y_inline=False) - - - # Specify Min/max colors for each hemisphere - # check for minus to see if it is a difference plot - if '\n- ' in case: # this is a difference plot - # specify colormap - mycmap = 'seismic' # blue,white,red with white centered colormap - - # determine max absolute value to use for color range - # intent is use same min/max with center zero - dmin = np.abs(data.min()) - dmax = np.abs(data.max()) - clim = np.max([dmin,dmax]) - - # this specifies both hemishperes the same range. - cminNH = -clim - cmaxNH = clim - cminSH = -clim - cmaxSH = clim - - else: # not a difference plot - # specify colormap - mycmap = 'jet' - - # arbitrary limits for each Hemishpere - cminNH = 0.0 - cmaxNH = 5.0 - cminSH = 0.0 - cmaxSH = 2.0 + # Create the figure and axis + fig, axes = plt.subplots(nrows=1, ncols=2,figsize=(14, 8)) + + # Plot the northern hemisphere data as a scatter plot + # Create the basemap, and draw boundaries + plt.sca(axes[0]) + m = Basemap(projection='npstere', boundinglat=35,lon_0=270, resolution='l') + m.drawcoastlines() + m.fillcontinents() + m.drawcountries() if plot_type == 'scatter': - # plot NH - scNH = ax1.scatter(lon,lat,c=data,cmap=mycmap,s=4,edgecolors='none', - vmin=cminNH, vmax=cmaxNH, - transform=ccrs.PlateCarree()) - - # plot SH - scSH = ax2.scatter(lon,lat,c=data,cmap=mycmap,s=4,edgecolors='none', - vmin=cminSH, vmax=cmaxSH, - transform=ccrs.PlateCarree()) + x, y = m(lon,lat) + sc = m.scatter(x, y, c=data, cmap='jet', lw=0, s=4) + else: + # Create new arrays to add 1 additional longitude value to prevent a + # small amount of whitespace around longitude of 0/360 degrees. + lon_cyc = np.zeros((lon.shape[0],lon.shape[1]+1)) + mask = np.zeros((data.shape[0],data.shape[1]+1)) + lat_cyc = np.zeros((lat.shape[0],lat.shape[1]+1)) + + mask[:,0:-1] = data.mask[:,:] + mask[:,-1] = data.mask[:,0] + lon_cyc[:,0:-1] = lon[:,:]; lon_cyc[:,-1] = lon[:,0] + lat_cyc[:,0:-1] = lat[:,:]; lat_cyc[:,-1] = lat[:,0] + + lon1 = np.ma.masked_array(lon_cyc, mask=mask) + lat1 = np.ma.masked_array(lat_cyc, mask=mask) + + d = np.zeros((data.shape[0],data.shape[1]+1)) + d[:,0:-1] = data[:,:] + d[:,-1] = data[:,0] + d1 = np.ma.masked_array(d,mask=mask) + + x, y = m(lon1.data, lat1.data) - else: if plot_type == 'contour': - print("contour plot depreciated. using pcolor.") - - scNH = ax1.pcolormesh(lon,lat,data,cmap=mycmap, - vmin=cminNH, vmax=cmaxNH, - transform=ccrs.PlateCarree()) - - scSH = ax2.pcolormesh(lon,lat,data,cmap=mycmap, - vmin=cminSH, vmax=cmaxSH, - transform=ccrs.PlateCarree()) - - #else: - # # Create new arrays to add 1 additional longitude value to prevent a - # # small amount of whitespace around seam - # lon_cyc = np.zeros((lon.shape[0],lon.shape[1]+1)) - # lat_cyc = np.zeros((lat.shape[0],lat.shape[1]+1)) - # data1 = np.zeros((data.shape[0],data.shape[1]+1)) - # mask = np.zeros((data.shape[0],data.shape[1]+1)) - - # mask[:,0:-1] = data.mask[:,:] - # mask[:,-1] = data.mask[:,0] - # lon_cyc[:,0:-1] = lon[:,:] - # lon_cyc[:,-1] = lon[:,0] - # lat_cyc[:,0:-1] = lat[:,:] - # lat_cyc[:,-1] = lat[:,0] - # data1[:,0:-1] = data[:,:] - # data1[:,-1] = data[:,0] - - # lon1 = np.ma.masked_array(lon_cyc, mask=mask) - # lat1 = np.ma.masked_array(lat_cyc, mask=mask) - # data1 = np.ma.masked_array(data1, mask=mask) - - # if plot_type == 'contour': - # # plotting around -180/180 and 0/360 is a challenge. - # # need to use lons in both 0-360 and +- 180 - # # make lons +/- 180 - # lon1_pm180 = np.where(lon1 < 180.0, lon1, lon1-360.0) - # lon1_pm180 = np.ma.masked_where(lon1.mask,lon1_pm180) - - # # get 90-270 lons from the lon 0-360 array (lon1) - # # note: use 91, 269 to prevent small amount of white space in contour plots - # lonmask = np.logical_or(lon1 <= 91.0,lon1 >= 269.0) - # lons_90_270 = np.ma.masked_where(lonmask,lon1) - # lats_90_270 = np.ma.MaskedArray(lat1,mask=lons_90_270.mask) - # data_90_270 = np.ma.MaskedArray(data1,mask=lons_90_270.mask) - # data_90_270.mask = np.logical_or(data1.mask,data_90_270.mask) - - # # get -92-92 lons from +/- 180 (lon1_pm180) - # # note: use 92 to prevent small amount of white space in contour plots - # lonmask = np.logical_or(lon1_pm180 <= -92.0, lon1_pm180 >= 92.0) - # lons_m90_90 = np.ma.masked_where(lonmask,lon1_pm180) - # lats_m90_90 = np.ma.MaskedArray(lat1,mask=lons_m90_90.mask) - # data_m90_90 = np.ma.MaskedArray(data1,mask=lons_m90_90.mask) - # data_m90_90.mask = np.logical_or(data1.mask,data_m90_90.mask) - - # # plot NH 90-270 - # sc = ax1.contourf(lons_90_270, lats_90_270, data_90_270, cmap=mycmap, - # transform=ccrs.PlateCarree(), - # extend='both') - # # plot NH -90-90 - # sc = ax1.contourf(lons_m90_90, lats_m90_90, data_m90_90, cmap=mycmap, - # transform=ccrs.PlateCarree(), - # extend='both') - - # # plot SH 90-270 - # sc = ax2.contourf(lons_90_270, lats_90_270, data_90_270, cmap=mycmap, - # transform=ccrs.PlateCarree(), - # extend='both') - # # plot SH -90-90 - # sc = ax2.contourf(lons_m90_90, lats_m90_90, data_m90_90, cmap=mycmap, - # transform=ccrs.PlateCarree(), - # extend='both') - - - #plt.suptitle('CICE Mean Ice Thickness\n{}'.format(case), y=0.95) - plt.suptitle(f'CICE Mean Ice Thickness\n{case:s}') - - # add more whitespace between plots for colorbar. - plt.subplots_adjust(wspace=0.4) - - # add separate axes for colorbars - # first get position/size of current axes - pos1 = ax1.get_position() - pos2 = ax2.get_position() - - # now add new colormap axes using the position ax1, ax2 as reference - cax1 = fig.add_axes([pos1.x0+pos1.width+0.03, - pos1.y0, - 0.02, - pos1.height]) - - cax2 = fig.add_axes([pos2.x0+pos2.width+0.03, - pos2.y0, - 0.02, - pos2.height]) + sc = m.contourf(x, y, d1, cmap='jet') + else: # pcolor + sc = m.pcolor(x, y, d1, cmap='jet') + m.drawparallels(np.arange(-90.,120.,15.),labels=[1,0,0,0]) # draw parallels + m.drawmeridians(np.arange(0.,420.,30.),labels=[1,1,1,1]) # draw meridians - if '\n- ' in case: - # If making a difference plot, use scientific notation for colorbar - cbNH = plt.colorbar(scNH, cax=cax1, orientation="vertical", - pad=0.1, format="%.1e") - cbSH = plt.colorbar(scSH, cax=cax2, orientation="vertical", - pad=0.1, format="%.1e") + # Plot the southern hemisphere data as a scatter plot + plt.sca(axes[1]) + m = Basemap(projection='spstere', boundinglat=-45,lon_0=270, resolution='l') + m.drawcoastlines() + m.fillcontinents() + m.drawcountries() + if plot_type == 'scatter': + x, y = m(lon,lat) + sc = m.scatter(x, y, c=data, cmap='jet', lw=0, s=4) else: - #pass - # If plotting non-difference data, do not use scientific notation for colorbar - cbNH = plt.colorbar(scNH, cax=cax1, orientation="vertical", - pad=0.1, format="%.2f") - cbSH = plt.colorbar(scSH, cax=cax2, orientation="vertical", - pad=0.1, format="%.2f") + x, y = m(lon1.data, lat1.data) + + # Bandaid for a bug in the version of Basemap used during development + outside = (x <= m.xmin) | (x >= m.xmax) | (y <= m.ymin) | (y >= m.ymax) + tmp = np.ma.masked_where(outside,d1) + + if plot_type == 'contour': + sc = m.contourf(x, y, tmp, cmap='jet') + else: # pcolor + sc = m.pcolor(x, y, tmp, cmap='jet') + + m.drawparallels(np.arange(-90.,120.,15.),labels=[1,0,0,0]) # draw parallels + m.drawmeridians(np.arange(0.,420.,30.),labels=[1,1,1,1]) # draw meridians - cbNH.set_label(units, loc='center') - cbSH.set_label(units, loc='center') + plt.suptitle('CICE Mean Ice Thickness\n{}'.format(case), y=0.95) + + # Make some room at the bottom of the figure, and create a colorbar + fig.subplots_adjust(bottom=0.2) + cbar_ax = fig.add_axes([0.11,0.1,0.8,0.05]) + if '\n- ' in case: + # If making a difference plot, use scientific notation for colorbar + cb = plt.colorbar(sc, cax=cbar_ax, orientation="horizontal", format="%.2e") + else: + # If plotting non-difference data, do not use scientific notation for colorbar + cb = plt.colorbar(sc, cax=cbar_ax, orientation="horizontal", format="%.2f") + cb.set_label(units, x=1.0) outfile = 'ice_thickness_{}.png'.format(case.replace('\n- ','_minus_')) logger.info('Creating map of the data ({})'.format(outfile)) @@ -602,8 +474,7 @@ def plot_two_stage_failures(data, lat, lon): logger.info('Creating map of the failures (two_stage_test_failure_map.png)') # Load the necessary plotting libraries import matplotlib.pyplot as plt - import cartopy.crs as ccrs - import cartopy.feature as cfeature + from mpl_toolkits.basemap import Basemap from mpl_toolkits.axes_grid1 import make_axes_locatable from matplotlib.colors import LinearSegmentedColormap @@ -611,19 +482,15 @@ def plot_two_stage_failures(data, lat, lon): import warnings warnings.filterwarnings("ignore", category=UserWarning) - # Create the figure + # Create the figure and axis fig = plt.figure(figsize=(12, 8)) - - # define plot projection and create axis - pltprj = ccrs.Mollweide(central_longitude=0.0) - ax = fig.add_subplot(111,projection=pltprj) - - # add land - ax.add_feature(cfeature.LAND, color='lightgray') - ax.add_feature(cfeature.BORDERS) - ax.add_feature(cfeature.COASTLINE) - #gshhs = cfeature.GSHHSFeature(scale='auto',facecolor='lightgray',edgecolor='none') - #ax.add_feature(gshhs) + ax = fig.add_axes([0.05, 0.08, 0.9, 0.9]) + + # Create the basemap, and draw boundaries + m = Basemap(projection='moll', lon_0=0., resolution='l') + m.drawmapboundary(fill_color='white') + m.drawcoastlines() + m.drawcountries() # Create the custom colormap colors = [(0, 0, 1), (1, 0, 0)] # Blue, Red @@ -631,20 +498,11 @@ def plot_two_stage_failures(data, lat, lon): cm = LinearSegmentedColormap.from_list(cmap_name, colors, N=2) # Plot the data as a scatter plot - sc = ax.scatter(lon,lat,c=int_data,cmap=cm,s=4,lw=0, - vmin=0.,vmax=1., - transform=ccrs.PlateCarree()) - - # add grid lines - dlon = 60.0 - dlat = 30.0 - mpLons = np.arange(-180. ,180.0+dlon,dlon) - mpLats = np.arange(-90.,90.0+dlat ,dlat) - mpLabels = {"left": "y", - "bottom": "x"} - - ax.gridlines(xlocs=mpLons,ylocs=mpLats, - draw_labels=mpLabels) + x, y = m(lon, lat) + sc = m.scatter(x, y, c=int_data, cmap=cm, lw=0, vmin=0, vmax=1, s=4) + + m.drawmeridians(np.arange(0, 360, 60), labels=[0, 0, 0, 1], fontsize=10) + m.drawparallels(np.arange(-90, 90, 30), labels=[1, 0, 0, 0], fontsize=10) plt.title('CICE Two-Stage Test Failures') diff --git a/configuration/scripts/tests/base_suite.ts b/configuration/scripts/tests/base_suite.ts old mode 100644 new mode 100755 index 3a18d8548..fad8b22f3 --- a/configuration/scripts/tests/base_suite.ts +++ b/configuration/scripts/tests/base_suite.ts @@ -4,84 +4,47 @@ smoke gx3 1x1 debug,diag1,run2day smoke gx3 1x4 debug,diag1,run2day smoke gx3 4x1 debug,diag1,run5day restart gx3 8x2 debug -restart gx3 8x2 debug,gx3nc smoke gx3 8x2 diag24,run1year,medium -smoke gx3 7x2 diag1,bigdiag,run1day,diagpt1 -decomp gx3 4x2x25x29x5 none -smoke gx3 4x2 diag1,run5day smoke_gx3_8x2_diag1_run5day -smoke gx3 4x1 diag1,run5day,thread smoke_gx3_8x2_diag1_run5day -smoke gx3 1x8 diag1,run5day,evp1d -restart gx1 40x4 droundrobin,medium -restart tx1 40x4 dsectrobin,medium -restart tx1 40x4 dsectrobin,medium,jra55do +decomp gx3 4x2x25x29x5 +smoke gx3 4x2 diag1,run5day smoke_gx3_8x2_diag1_run5day +smoke gx3 4x1 diag1,run5day,thread smoke_gx3_8x2_diag1_run5day +restart gx1 40x4 droundrobin,medium +restart tx1 40x4 dsectrobin,medium restart gx3 4x4 none -restart gx3 4x4 gx3nc -restart gx3 10x4 maskhalo +restart gx3 4x4 iobinary restart gx3 6x2 alt01 restart gx3 8x2 alt02 restart gx3 4x2 alt03 -restart gx3 12x2 alt03,maskhalo,droundrobin restart gx3 4x4 alt04 restart gx3 4x4 alt05 -restart gx3 8x2 alt06 -restart gx3 8x3 alt07 -restart gx3 16x2 snicar -restart gx3 12x2 snicartest -restart gx3 8x3 saltflux -restart gx3 18x2 debug,maskhalo restart gx3 6x2 alt01,debug,short restart gx3 8x2 alt02,debug,short restart gx3 4x2 alt03,debug,short -smoke gx3 12x2 alt03,debug,short,maskhalo,droundrobin smoke gx3 4x4 alt04,debug,short smoke gx3 4x4 alt05,debug,short -smoke gx3 8x2 alt06,debug,short -smoke gx3 8x3 alt07,debug,short -smoke gx3 16x2 snicar,debug,short -smoke gx3 12x2 snicartest,debug,short -smoke gx3 10x2 debug,diag1,run5day,gx3sep2 -smoke gx3 7x2x5x29x12 diag1,bigdiag,run1day,debug -restart gbox128 4x2 short -restart gbox128 4x2 boxnodyn,short -restart gbox128 4x2 boxnodyn,short,debug -restart gbox128 2x2 boxadv,short -smoke gbox128 2x2 boxadv,short,debug -restart gbox128 4x4 boxrestore,short -smoke gbox128 4x4 boxrestore,short,debug -restart gbox80 1x1 box2001 -smoke gbox80 1x1 boxslotcyl -smoke gbox12 1x1x12x12x1 boxchan,diag1,debug -restart gx3 8x2 modal +restart gbox128 4x2 short +restart gbox128 4x2 boxdyn,short +restart gbox128 4x2 boxdyn,short,debug +restart gbox128 2x2 boxadv,short +smoke gbox128 2x2 boxadv,short,debug +restart gbox128 4x4 boxrestore,short +smoke gbox128 4x4 boxrestore,short,debug +restart gbox80 1x1 box2001 +smoke gbox80 1x1 boxslotcyl smoke gx3 8x2 bgcz -smoke gx3 8x2 jra55do -smoke gx3 8x2 bgczm,debug +smoke gx3 8x2 bgcz,debug smoke gx3 8x1 bgcskl,debug -#smoke gx3 4x1 bgcz,thread smoke_gx3_8x2_bgcz +#smoke gx3 4x1 bgcz,thread smoke_gx3_8x2_bgcz restart gx1 4x2 bgcsklclim,medium restart gx1 8x1 bgczclim,medium -smoke gx1 24x1 medium,run90day,yi2008 -smoke gx1 24x1 medium,run90day,yi2008,jra55do -smoke gx3 8x1 medium,run90day,yi2008 -restart gx1 24x1 short -restart gx1 16x2 seabedLKD,gx1apr,short,debug -restart gx1 15x2 seabedprob -restart gx1 32x1 gx1prod +smoke gx1 24x1 jra55_gx1_2008,medium,run90day +smoke gx3 8x1 jra55_gx3_2008,medium,run90day +restart gx1 24x1 jra55_gx1,short +restart gx3 8x1 jra55_gx3,short smoke gx3 4x2 fsd1,diag24,run5day,debug -smoke gx3 8x2 fsd12,diag24,run5day +smoke gx3 8x2 fsd12,diag24,run5day,short restart gx3 4x2 fsd12,debug,short -smoke gx3 8x2 fsd12ww3,diag24,run1day +smoke gx3 8x2 fsd12ww3,diag24,run1day,medium smoke gx3 4x1 isotope,debug restart gx3 8x2 isotope -smoke gx3 4x1 snwitdrdg,snwgrain,icdefault,debug -smoke gx3 4x1 snw30percent,icdefault,debug -restart gx3 8x2 snwitdrdg,icdefault,snwgrain -restart gx3 4x4 gx3ncarbulk,iobinary -restart gx3 4x4 histall,precision8,cdf64 -smoke gx3 30x1 bgcz,histall -smoke gx3 14x2 fsd12,histall -smoke gx3 4x1 dynpicard -restart gx3 8x2 gx3ncarbulk,debug -restart gx3 4x4 gx3ncarbulk,diag1 -smoke gx3 4x1 calcdragio -restart gx3 4x2 atmbndyconstant -restart gx3 4x2 atmbndymixed + diff --git a/configuration/scripts/tests/io_suite.ts b/configuration/scripts/tests/io_suite.ts old mode 100644 new mode 100755 index e5e7feee6..3e98642e9 --- a/configuration/scripts/tests/io_suite.ts +++ b/configuration/scripts/tests/io_suite.ts @@ -1,61 +1,68 @@ # Test Grid PEs Sets BFB-compare # some iobinary configurations fail due to bathymetry netcdf file requirement, remove them -# iobinary cannot work with JRA55 because netcdf is turned off -restart gx3 8x4 gx3ncarbulk,debug,histall,iobinary,precision8 -#restart gx3 12x2 gx3ncarbulk,alt01,histall,iobinary -restart gx3 16x2 gx3ncarbulk,alt02,histall,iobinary,precision8 -#restart gx3 4x2 gx3ncarbulk,alt03,histall,iobinary -restart gx3 8x4 gx3ncarbulk,alt04,histall,iobinary,precision8 -restart gx3 4x4 gx3ncarbulk,alt05,histall,iobinary -restart gx3 14x2 gx3ncarbulk,alt06,histall,iobinary,precision8 -restart gx3 14x2 gx3ncarbulk,alt07,histall,iobinary,precision8 -restart gx3 32x1 gx3ncarbulk,bgcz,histall,iobinary,precision8 -restart gx3 16x2 gx3ncarbulk,bgcskl,histall,iobinary -restart gx3 14x2 gx3ncarbulk,isotope,histall,iobinary,precision8 -restart gx3 16x2 gx3ncarbulk,fsd12,histall,iobinary -restart gx3 8x4 gx3ncarbulk,debug,histall,iobinary,precision8,histinst +restart gx3 8x4 debug,histall,iobinary,precision8 +#restart gx3 12x2 alt01,histall,iobinary +restart gx3 16x2 alt02,histall,iobinary,precision8 +#restart gx3 4x2 alt03,histall,iobinary +restart gx3 8x4 alt04,histall,iobinary,precision8 +restart gx3 4x4 alt05,histall,iobinary +restart gx3 32x1 bgcz,histall,iobinary,precision8 +restart gx3 16x2 bgcskl,histall,iobinary +restart gx3 14x2 isotope,histall,iobinary,precision8 +restart gx3 16x2 fsd12,histall,iobinary -restart gx3 32x1 debug,histall,ionetcdf,iocdf1,precision8 -restart gx3 15x2 alt01,histall,ionetcdf,iocdf2,precision8 -restart gx3 15x2 alt02,histall,ionetcdf,iocdf5 -restart gx3 24x1 alt03,histall,ionetcdf,iohdf5,iohdf5opts -restart gx3 8x4 alt04,histall,ionetcdf,iocdf1 -restart gx3 8x4 alt05,histall,ionetcdf,iocdf2 -restart gx3 16x2 alt06,histall,ionetcdf,iocdf5,precision8 -restart gx3 16x2 alt07,histall,ionetcdf,iohdf5,precision8 -restart gx3 30x1 bgczm,histall,ionetcdf,iocdf1 -restart gx3 15x2 bgcskl,histall,ionetcdf,iocdf2,precision8 -restart gx3 31x1 isotope,histall,ionetcdf,iocdf5,precision8 -restart gx3 14x2 fsd12,histall,ionetcdf,iohdf5 -restart gx3 32x1 debug,histall,ionetcdf,iohdf5,histinst +restart gx3 32x1 debug,histall,ionetcdf +restart gx3 15x2 alt01,histall,ionetcdf,precision8,cdf64 +restart gx3 15x2 alt02,histall,ionetcdf +restart gx3 24x1 alt03,histall,ionetcdf,precision8 +restart gx3 8x4 alt04,histall,ionetcdf,cdf64 +restart gx3 8x4 alt05,histall,ionetcdf,precision8,cdf64 +restart gx3 30x1 bgcz,histall,ionetcdf +restart gx3 15x2 bgcskl,histall,ionetcdf,precision8 +restart gx3 31x1 isotope,histall,ionetcdf,cdf64 +restart gx3 14x2 fsd12,histall,ionetcdf,precision8 -restart gx3 16x2x100x2x4 histall,iopio1,iopioopts -restart gx3 16x2 debug,histall,iopio1,iocdf2 -restart gx3 14x2 alt01,histall,iopio1,iocdf5 -restart gx3 32x1 alt02,histall,iopio1,iohdf5 -restart gx3 24x1 alt03,histall,iopio1,iopnetcdf1,precision8 -restart gx3 8x4 alt04,histall,iopio1,iopnetcdf2,precision8 -restart gx3 8x4 alt05,histall,iopio1,iopnetcdf5,precision8 -restart gx3 32x1 alt06,histall,iopio1,iocdf1 -restart gx3 32x1 alt07,histall,iopio1,iocdf2,precision8 -restart gx3 16x2 bgczm,histall,iopio1,iocdf5,precision8 -restart gx3 30x1 bgcskl,histall,iopio1,iohdf5,precision8 -restart gx3 8x4 isotope,histall,iopio1,iopnetcdf1 -restart gx3 12x2 fsd12,histall,iopio1,iopnetcdf2 -restart gx3 16x2 debug,histall,iopio1,iopnetcdf5,histinst +restart gx3 16x2 debug,histall,iopio1,precision8,cdf64 +restart gx3 14x2 alt01,histall,iopio1,cdf64 +restart gx3 32x1 alt02,histall,iopio1,precision8 +restart gx3 24x1 alt03,histall,iopio1 +restart gx3 8x4 alt04,histall,iopio1,precision8,cdf64 +restart gx3 8x4 alt05,histall,iopio1,cdf64 +restart gx3 16x2 bgcz,histall,iopio1,precision8 +restart gx3 30x1 bgcskl,histall,iopio1 +restart gx3 8x4 isotope,histall,iopio1,precision8,cdf64 +restart gx3 12x2 fsd12,histall,iopio1,cdf64 -restart gx3 16x2x100x2x4 debug,histall,iopio2,iopioopts,run5day -restart gx3 16x2 debug,histall,iopio2,iopnetcdf1,precision8 -restart gx3 14x2 alt01,histall,iopio2,iopnetcdf2,precision8 -restart gx3 32x1 alt02,histall,iopio2,iopnetcdf5,precision8 -restart gx3 24x1 alt03,histall,iopio2,iocdf1 -restart gx3 8x4 alt04,histall,iopio2,iocdf2 -restart gx3 8x4 alt05,histall,iopio2,iocdf5 -restart gx3 16x2 alt06,histall,iopio2,iohdf5,iohdf5opts -restart gx3 16x2 alt07,histall,iopio2,iopnetcdf1 -restart gx3 16x2 bgczm,histall,iopio2,iopnetcdf2 -restart gx3 30x1 bgcskl,histall,iopio2,iopnetcdf5 -restart gx3 8x4 isotope,histall,iopio2,iohdf5,precision8 -restart gx3 12x2 fsd12,histall,iopio2,iocdf1,precision8 -restart gx3 16x2 debug,histall,iopio2,iocdf2,histinst,precision8 +restart gx3 16x2 debug,histall,iopio2 +restart gx3 14x2 alt01,histall,iopio2,precision8,cdf64 +restart gx3 32x1 alt02,histall,iopio2,cdf64 +restart gx3 24x1 alt03,histall,iopio2,precision8 +restart gx3 8x4 alt04,histall,iopio2 +restart gx3 8x4 alt05,histall,iopio2,precision8,cdf64 +restart gx3 16x2 bgcz,histall,iopio2,cdf64 +restart gx3 30x1 bgcskl,histall,iopio2,precision8 +restart gx3 8x4 isotope,histall,iopio2 +restart gx3 12x2 fsd12,histall,iopio2,precision8,cdf64 + +restart gx3 16x2 debug,histall,iopio1p,precision8 +restart gx3 14x2 alt01,histall,iopio1p +restart gx3 32x1 alt02,histall,iopio1p,precision8,cdf64 +restart gx3 24x1 alt03,histall,iopio1p,cdf64 +restart gx3 8x4 alt04,histall,iopio1p,precision8 +restart gx3 8x4 alt05,histall,iopio1p +restart gx3 16x2 bgcz,histall,iopio1p,precision8,cdf64 +restart gx3 30x1 bgcskl,histall,iopio1p,cdf64 +restart gx3 8x4 isotope,histall,iopio1p,precision8 +restart gx3 12x2 fsd12,histall,iopio1p + +restart gx3 16x2 debug,histall,iopio2p,cdf64 +restart gx3 14x2 alt01,histall,iopio2p,precision8 +restart gx3 32x1 alt02,histall,iopio2p +restart gx3 24x1 alt03,histall,iopio2p,precision8,cdf64 +restart gx3 8x4 alt04,histall,iopio2p,cdf64 +restart gx3 8x4 alt05,histall,iopio2p,precision8 +restart gx3 16x2 bgcz,histall,iopio2p +restart gx3 30x1 bgcskl,histall,iopio2p,precision8,cdf64 +restart gx3 8x4 isotope,histall,iopio2p,cdf64 +restart gx3 12x2 fsd12,histall,iopio2p,precision8 diff --git a/configuration/scripts/tests/nothread_suite.ts b/configuration/scripts/tests/nothread_suite.ts index a262ec135..49f834a98 100644 --- a/configuration/scripts/tests/nothread_suite.ts +++ b/configuration/scripts/tests/nothread_suite.ts @@ -1,7 +1,7 @@ # Test Grid PEs Sets BFB-compare restart gx3 8x1x25x29x2 dslenderX2 -smoke gx3 8x1x25x29x2 dslenderX2,diag1,reprosum +logbfb gx3 8x1x25x29x2 dslenderX2,diag1,reprosum smoke gx3 16x1 diag1,run5day smoke gx3 1x1 debug,diag1,run2day @@ -14,22 +14,18 @@ smoke gx3 16x1 diag24,run1year,medium #restart tx1 160x1 dsectrobin,medium restart gx3 16x1 none -restart gx3 16x1 gx3ncarbulk,iobinary +restart gx3 16x1 iobinary restart gx3 12x1 alt01 restart gx3 16x1 alt02 restart gx3 8x1 alt03 -restart gx3 16x1x5x29x6 alt04 +restart gx3 16x1 alt04 restart gx3 16x1 alt05 -restart gx3 20x1 alt06 -restart gx3 18x1 alt07 restart gx3 18x1 alt01,debug,short restart gx3 20x1 alt02,debug,short restart gx3 24x1 alt03,debug,short smoke gx3 24x1 alt04,debug,short smoke gx3 32x1 alt05,debug,short -smoke gx3 16x1 alt06,debug,short -smoke gx3 20x1 alt07,debug,short restart gx3 16x1 isotope smoke gx3 6x1 isotope,debug smoke gx3 8x1 fsd1,diag24,run5day,debug @@ -38,8 +34,8 @@ restart gx3 12x1 fsd12,debug,short smoke gx3 20x1 fsd12ww3,diag24,run1day,medium restart gbox128 8x1 short -restart gbox128 16x1 boxnodyn,short -restart gbox128 24x1 boxnodyn,short,debug +restart gbox128 16x1 boxdyn,short +restart gbox128 24x1 boxdyn,short,debug restart gbox128 12x1 boxadv,short smoke gbox128 20x1 boxadv,short,debug restart gbox128 32x1 boxrestore,short @@ -47,20 +43,20 @@ smoke gbox128 24x1 boxrestore,short,debug restart gbox80 1x1 box2001 smoke gbox80 1x1 boxslotcyl -smoke gx3 16x1 medium,run90day,yi2008 -restart gx3 12x1 short +smoke gx3 16x1 jra55_gx3_2008,medium,run90day +restart gx3 12x1 jra55_gx3,short #tcraig, hangs nodes intermittently on izumi -#smoke gx1 24x1 medium,run90day,yi2008 -#restart gx1 24x1 short +#smoke gx1 24x1 jra55_gx1_2008,medium,run90day +#restart gx1 24x1 jra55_gx1,short smoke gx3 16x1 bgcz -smoke gx3 16x1 bgczm,debug +smoke gx3 16x1 bgcz,debug smoke gx3 24x1 bgcskl,debug #tcraig, hangs nodes intermittently on izumi #restart gx1 128x1 bgcsklclim,medium #restart gx1 256x1 bgczclim,medium -decomp gx3 8x1x5x29x20 none +decomp gx3 8x1x5x29x20 restart gx3 1x1x50x58x4 droundrobin restart_gx3_8x1x25x29x2_dslenderX2 restart gx3 4x1x25x116x1 dslenderX1 restart_gx3_8x1x25x29x2_dslenderX2 restart gx3 12x1x4x29x9 dspacecurve restart_gx3_8x1x25x29x2_dslenderX2 @@ -68,13 +64,13 @@ restart gx3 16x1x8x10x10 droundrobin restart_gx3_8x1x25x29x2_ restart gx3 6x1x50x58x1 droundrobin restart_gx3_8x1x25x29x2_dslenderX2 restart gx3 8x1x19x19x5 droundrobin restart_gx3_8x1x25x29x2_dslenderX2 restart gx3 20x1x5x29x20 dsectrobin,short restart_gx3_8x1x25x29x2_dslenderX2 -restart gx3 32x1x5x10x12 drakeX2 restart_gx3_8x1x25x29x2_dslenderX2 +restart gx3 32x1x5x10x10 drakeX2 restart_gx3_8x1x25x29x2_dslenderX2 restart gx3 16x1x8x10x10 droundrobin,maskhalo restart_gx3_8x1x25x29x2_dslenderX2 restart gx3 4x1x25x29x4 droundrobin restart_gx3_8x1x25x29x2_dslenderX2 -smoke gx3 1x1x50x58x4 droundrobin,diag1,maskhalo,reprosum,cmplog smoke_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum -smoke gx3 4x1x25x116x1 dslenderX1,diag1,maskhalo,reprosum,cmplog smoke_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum -smoke gx3 20x1x5x29x20 dsectrobin,diag1,short,reprosum,cmplog smoke_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum -smoke gx3 16x1x8x10x10 droundrobin,diag1,reprosum,cmplog smoke_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum -smoke gx3 6x1x50x58x1 droundrobin,diag1,reprosum,cmplog smoke_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum -smoke gx3 12x1x4x29x9 dspacecurve,diag1,maskhalo,reprosum,cmplog smoke_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum +logbfb gx3 1x1x50x58x4 droundrobin,diag1,maskhalo,reprosum logbfb_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum +logbfb gx3 4x1x25x116x1 dslenderX1,diag1,maskhalo,reprosum logbfb_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum +logbfb gx3 20x1x5x29x20 dsectrobin,diag1,short,reprosum logbfb_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum +logbfb gx3 16x1x8x10x10 droundrobin,diag1,reprosum logbfb_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum +logbfb gx3 6x1x50x58x1 droundrobin,diag1,reprosum logbfb_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum +logbfb gx3 12x1x4x29x9 dspacecurve,diag1,maskhalo,reprosum logbfb_gx3_8x1x25x29x2_diag1_dslenderX2_reprosum diff --git a/configuration/scripts/tests/report_results.csh b/configuration/scripts/tests/report_results.csh index e1f3a7342..d04cc5d59 100755 --- a/configuration/scripts/tests/report_results.csh +++ b/configuration/scripts/tests/report_results.csh @@ -49,7 +49,7 @@ set totl = `grep "#totl = " results.log | cut -c 9-` set pass = `grep "#pass = " results.log | cut -c 9-` set fail = `grep "#fail = " results.log | cut -c 9-` set cases = `grep -v "#" results.log | grep ${mach}_ | cut -d " " -f 2 | sort -u` -set envnames = `grep -v "#" results.log | grep ${mach}_ | cut -d "_" -f 2 | sort -u` +set compilers = `grep -v "#" results.log | grep ${mach}_ | cut -d "_" -f 2 | sort -u` #echo "debug ${repo}" #echo "debug ${bran}" @@ -82,26 +82,23 @@ if ("${shrepo}" !~ "*cice-consortium*") then endif set noglob -set green = "\![#00C000](images/00C000.png)" -set red = "\![#F00000](images/F00000.png)" -set orange = "\![#FFA500](images/FFA500.png)" -set yellow = "\![#FFE600](images/FFE600.png)" -set gray = "\![#AAAAAA](images/AAAAAA.png)" +set green = "\![#00C000](https://placehold.it/15/00C000/000000?text=+)" +set red = "\![#F00000](https://placehold.it/15/F00000/000000?text=+)" +set orange = "\![#FFA500](https://placehold.it/15/FFA500/000000?text=+)" +set yellow = "\![#FFE600](https://placehold.it/15/FFE600/000000?text=+)" +set gray = "\![#AAAAAA](https://placehold.it/15/AAAAAA/000000?text=+)" unset noglob #============================================================== # Create results table #============================================================== -foreach envname ( ${envnames} ) - - set machinfo = `grep -m 1 "#machinfo = " results.log | cut -d = -f 2` - set envinfo = `grep -m 1 "#envinfo ${envname} = " results.log | cut -d = -f 2` +foreach compiler ( ${compilers} ) set cnt = 0 set found = 1 while ($found == 1) - set ofile = "${shhash}.${mach}.${envname}.${xcdat}.${xctim}.$cnt" + set ofile = "${shhash}.${mach}.${compiler}.${xcdat}.${xctim}.$cnt" set outfile = "${wikiname}/${tsubdir}/${ofile}.md" if (-e ${outfile}) then @ cnt = $cnt + 1 @@ -129,7 +126,7 @@ EOF @ rothr = 0 foreach case ( ${cases} ) -if ( ${case} =~ *_${envname}_* ) then +if ( ${case} =~ *_${compiler}_* ) then # check that case results are meaningful set fbuild = `grep " ${case} " results.log | grep " build" | cut -c 1-4` @@ -295,9 +292,7 @@ cat >! ${outfile} << EOF - hash = ${hash} - hash created by ${hashuser} ${hashdate} - vers = ${vers} -- tested by ${user}, ${cdat} ${ctim} UTC -- ${mach} : ${machinfo} -- ${envname} : ${envinfo} +- tested on ${mach}, ${compiler}, ${user}, ${cdat} ${ctim} UTC - ${ttotl} total tests: ${tpass} pass, ${tfail} fail - ${ttotl} total regressions: ${rpass} pass, ${rfail} fail, ${rothr} other EOF @@ -331,9 +326,9 @@ if ($chk == 0) then cat >! ${hashfile} << EOF #### ${hash} -| machine | envname | version | date | test fail | comp fail | total | +| machine | compiler | version | date | test fail | comp fail | total | | ------ | ------ | ------ | ------ | ------ | ------ | ------ | -| ${mach} | ${envname} | ${vers} | ${cdat} | ${tcolor} ${tfail}, ${tunkn} | ${rcolor} ${rfail}, ${rothr} | [${ttotl}](${ofile}) | +| ${mach} | ${compiler} | ${vers} | ${cdat} | ${tcolor} ${tfail}, ${tunkn} | ${rcolor} ${rfail}, ${rothr} | [${ttotl}](${ofile}) | EOF if (-e ${hashfile}.prev) cat ${hashfile}.prev >> ${hashfile} @@ -341,7 +336,7 @@ if (-e ${hashfile}.prev) cat ${hashfile}.prev >> ${hashfile} else set oline = `grep -n "#### ${hash}" ${hashfile} | head -1 | cut -d : -f 1` @ nline = ${oline} + 3 - sed -i "$nline a | ${mach} | ${envname} | ${vers} | ${cdat} | ${tcolor} ${tfail}, ${tunkn} | ${rcolor} ${rfail}, ${rothr} | [${ttotl}](${ofile}) | " ${hashfile} + sed -i "$nline a | ${mach} | ${compiler} | ${vers} | ${cdat} | ${tcolor} ${tfail}, ${tunkn} | ${rcolor} ${rfail}, ${rothr} | [${ttotl}](${ofile}) | " ${hashfile} endif #===================== @@ -355,9 +350,9 @@ if ($chk == 0) then cat >! ${machfile} << EOF #### ${mach} -| version | hash | envname | date | test fail | comp fail | total | +| version | hash | compiler | date | test fail | comp fail | total | | ------ | ------ | ------ | ------ | ------ | ------ | ------ | -| ${vers} | ${shhash} | ${envname} | ${cdat} | ${tcolor} ${tfail}, ${tunkn} | ${rcolor} ${rfail}, ${rothr} | [${ttotl}](${ofile}) | +| ${vers} | ${shhash} | ${compiler} | ${cdat} | ${tcolor} ${tfail}, ${tunkn} | ${rcolor} ${rfail}, ${rothr} | [${ttotl}](${ofile}) | EOF if (-e ${machfile}.prev) cat ${machfile}.prev >> ${machfile} @@ -365,10 +360,10 @@ if (-e ${machfile}.prev) cat ${machfile}.prev >> ${machfile} else set oline = `grep -n "#### ${mach}" ${machfile} | head -1 | cut -d : -f 1` @ nline = ${oline} + 3 - sed -i "$nline a | ${vers} | ${shhash} | ${envname} | ${cdat} | ${tcolor} ${tfail}, ${tunkn} | ${rcolor} ${rfail}, ${rothr} | [${ttotl}](${ofile}) | " ${machfile} + sed -i "$nline a | ${vers} | ${shhash} | ${compiler} | ${cdat} | ${tcolor} ${tfail}, ${tunkn} | ${rcolor} ${rfail}, ${rothr} | [${ttotl}](${ofile}) | " ${machfile} endif -#foreach envname +#foreach compiler end #===================== diff --git a/doc/source/cice_index.rst b/doc/source/cice_index.rst index 6b97d2b8f..c32f1cdbf 100644 --- a/doc/source/cice_index.rst +++ b/doc/source/cice_index.rst @@ -5,12 +5,12 @@ Index of primary variables and parameters ========================================== -This index defines many (but not all) of the symbols used frequently in the CICE model -code. All quantities in the code are expressed in MKS units (temperatures may take -either Celsius or Kelvin units). Deprecated parameters are listed at the end. - -Namelist variables are partly included here, but they are fully documented in -section :ref:`tabnamelist`. +This index defines many of the symbols used frequently in the CICE model +code. Values appearing in this list are fixed or recommended; most +namelist parameters are indicated ( :math:`E_\circ`) with their default +values. For other namelist options, see Section :ref:`tabnamelist`. All +quantities in the code are expressed in MKS units (temperatures may take +either Celsius or Kelvin units). .. csv-table:: *Alphabetical Index* :header: " ", " ", " " @@ -28,25 +28,25 @@ section :ref:`tabnamelist`. "a4Ds", "history field accumulations, 4D categories, vertical snow", "" "a4Df", "history field accumulations, 4D categories, fsd", "" "a_min", "minimum area concentration for computing velocity", "0.001" - "a_rapid_mode", "brine channel diameter", "" - "add_mpi_barriers", "turns on MPI barriers for communication throttling", "" - "advection", "type of advection algorithm used (‘remap’ or ‘upwind’)", "remap" + "a_rapid_mode", ":math:`{\bullet}` brine channel diameter", "" + "add_mpi_barriers", ":math:`\bullet` turns on MPI barriers for communication throttling", "" + "advection", ":math:`\bullet` type of advection algorithm used (‘remap’ or ‘upwind’)", "remap" "afsd(n)", "floe size distribution (in category n)", "" - "ahmax", "thickness above which ice albedo is constant", "0.3m" + "ahmax", ":math:`\bullet` thickness above which ice albedo is constant", "0.3m" "aice_extmin", "minimum value for ice extent diagnostic", "0.15" "aice_init", "concentration of ice at beginning of timestep", "" "aice0", "fractional open water area", "" "aice(n)", "total concentration of ice in grid cell (in category n)", "" - "albedo_type", "type of albedo parameterization (‘ccsm3’ or ‘constant’)", "" + "albedo_type", ":math:`\bullet` type of albedo parameterization (‘ccsm3’ or ‘constant’)", "" "albcnt", "counter for averaging albedo", "" "albice", "bare ice albedo", "" - "albicei", "near infrared ice albedo for thicker ice", "" - "albicev", "visible ice albedo for thicker ice", "" + "albicei", ":math:`\bullet` near infrared ice albedo for thicker ice", "" + "albicev", ":math:`\bullet` visible ice albedo for thicker ice", "" "albocn", "ocean albedo", "0.06" "albpnd", "melt pond albedo", "" "albsno", "snow albedo", "" - "albsnowi", "near infrared, cold snow albedo", "" - "albsnowv", "visible, cold snow albedo", "" + "albsnowi", ":math:`\bullet` near infrared, cold snow albedo", "" + "albsnowv", ":math:`\bullet` visible, cold snow albedo", "" "algalN", "algal nitrogen concentration", mmol/m\ :math:`^3` "alv(n)dr(f)", "albedo: visible (near IR), direct (diffuse)", "" "alv(n)dr(f)_ai", "grid-box-mean value of alv(n)dr(f)", "" @@ -60,23 +60,23 @@ section :ref:`tabnamelist`. "araftn", "area fraction of rafted ice", "" "aredistrn", "redistribution function: fraction of new ridge area", "" "ardgn", "fractional area of ridged ice", "" - "aspect_rapid_mode", "brine convection aspect ratio", "1" + "aspect_rapid_mode", ":math:`\bullet` brine convection aspect ratio", "1" "astar", "e-folding scale for participation function", "0.05" - "atmiter_conv", "convergence criteria for ustar", "0.00" - "atm_data_dir", "directory for atmospheric forcing data", "" - "atm_data_format", "format of atmospheric forcing files", "" - "atm_data_type", "type of atmospheric forcing", "" - "atmbndy", "atmo boundary layer parameterization ('similarity', ‘constant’, or 'mixed')", "" + "atmiter_conv", ":math:`\bullet` convergence criteria for ustar", "0.00" + "atm_data_dir", ":math:`\bullet` directory for atmospheric forcing data", "" + "atm_data_format", ":math:`\bullet` format of atmospheric forcing files", "" + "atm_data_type", ":math:`\bullet` type of atmospheric forcing", "" + "atmbndy", ":math:`\bullet` atmo boundary layer parameterization (‘default’ or ‘constant’)", "" "avail_hist_fields", "type for history field data", "" "awtidf", "weighting factor for near-ir, diffuse albedo", "0.36218" "awtidr", "weighting factor for near-ir, direct albedo", "0.00182" "awtvdf", "weighting factor for visible, diffuse albedo", "0.63282" "awtvdr", "weighting factor for visible, direct albedo", "0.00318" "**B**", "", "" - "bfbflag", "for bit-for-bit reproducible diagnostics, and reproducible outputs when using the VP solver", "" - "bgc_data_dir", "data directory for bgc", "" - "bgc_data_type", "source of silicate, nitrate data", "" - "bgc_flux_type", "ice–ocean flux velocity", "" + "bfb_flag", ":math:`\bullet` for bit-for-bit reproducible diagnostics", "" + "bgc_data_dir", ":math:`\bullet` data directory for bgc", "" + "bgc_data_type", ":math:`\bullet` source of silicate, nitrate data", "" + "bgc_flux_type", ":math:`\bullet` ice–ocean flux velocity", "" "bgc_tracer_type", "tracer_type for bgc tracers", "" "bgrid", "nondimensional vertical grid points for bio grid", "" "bignum", "a large number", ":math:`10^{30}`" @@ -92,15 +92,11 @@ section :ref:`tabnamelist`. "bTiz", "temperature of ice layers on bio grid", "" "**C**", "", "" "c", "real(\ :math:`n`)", "" - "rotate_wind", "if true, rotate wind/stress components to computational grid", "T" - "calc_dragio", "if true, calculate ``dragio`` from ``iceruf_ocn`` and ``thickness_ocn_layer1``", "F" - "calc_strair", "if true, calculate wind stress", "T" - "calc_Tsfc", "if true, calculate surface temperature", "T" - "capping", "parameter associated with capping method of viscosities", "1.0" - "capping_method", "namelist to specify capping method", "hibler" + "calc_strair", ":math:`\bullet` if true, calculate wind stress", "T" + "calc_Tsfc", ":math:`\bullet` if true, calculate surface temperature", "T" "Cdn_atm", "atmospheric drag coefficient", "" "Cdn_ocn", "ocean drag coefficient", "" - "Cf", "ratio of ridging work to PE change in ridging", "17." + "Cf", ":math:`\bullet` ratio of ridging work to PE change in ridging", "17." "cgrid", "vertical grid points for ice grid (compare bgrid)", "" "char_len", "length of character variable strings", "80" "char_len_long", "length of longer character variable strings", "256" @@ -110,13 +106,12 @@ section :ref:`tabnamelist`. "cm_to_m", "cm to meters conversion", "0.01" "coldice", "value for constant albedo parameterization", "0.70" "coldsnow", "value for constant albedo parameterization", "0.81" - "conduct", "conductivity parameterization", "" + "conduct", ":math:`\bullet` conductivity parameterization", "" "congel", "basal ice growth", "m" "conserv_check", "if true, check conservation", "" "cosw", "cosine of the turning angle in water", "1." "coszen", "cosine of the zenith angle", "" "Cp", "proportionality constant for potential energy", "kg/m\ :math:`^2`/s\ :math:`^2`" - "cpl_frazil", ":math:`\bullet` type of frazil ice coupling", "" "cp_air", "specific heat of air", "1005.0 J/kg/K" "cp_ice", "specific heat of fresh ice", "2106. J/kg/K" "cp_ocn", "specific heat of sea water", "4218. J/kg/K" @@ -142,88 +137,67 @@ section :ref:`tabnamelist`. "dardg2(n)dt", "rate of fractional area gain by new ridges (category n)", "1/s" "daymo", "number of days in one month", "" "daycal", "day number at end of month", "" - "days_per_year", "number of days in one year", "365" - "day_init", "the initial day of the month", "" + "days_per_year", ":math:`\bullet` number of days in one year", "365" "dbl_kind", "definition of double precision", "selected_real_kind(13)" - "debug_blocks", "write extra diagnostics for blocks and decomposition", ".false." - "debug_forcing", "write extra diagnostics for forcing inputs", ".false." - "debug_model", "Logical that controls extended model point debugging.", "" - "debug_model_i", "Local i gridpoint that defines debug_model point output.", "" - "debug_model_iblk", "Local iblk value that defines debug_model point output.", "" - "debug_model_j", "Local j gridpoint that defines debug_model point output.", "" - "debug_model_task", "Local mpi task value that defines debug_model point output.", "" - "debug_model_step", "Initial timestep for output from the debug_model flag.", "" + "dbug", ":math:`\bullet` write extra diagnostics", ".false." "Delta", "function of strain rates (see Section :ref:`dynam`)", "1/s" - "deltaminEVP", "minimum value of Delta for EVP (see Section :ref:`dynam`)", "1/s" - "deltaminVP", "minimum value of Delta for VP (see Section :ref:`dynam`)", "1/s" "default_season", "Season from which initial values of forcing are set.", "winter" "denom1", "combination of constants for stress equation", "" "depressT", "ratio of freezing temperature to salinity of brine", "0.054 deg/ppt" "dhbr_bt", "change in brine height at the bottom of the column", "" "dhbr_top", "change in brine height at the top of the column", "" "dhsn", "depth difference for snow on sea ice and pond ice", "" - "diag_file", "diagnostic output file (alternative to standard out)", "" - "diag_type", "where diagnostic output is written", "stdout" - "diagfreq", "how often diagnostic output is written (10 = once per 10 dt)", "" + "diag_file", ":math:`\bullet` diagnostic output file (alternative to standard out)", "" + "diag_type", ":math:`\bullet` where diagnostic output is written", "stdout" + "diagfreq", ":math:`\bullet` how often diagnostic output is written (10 = once per 10 dt)", "" "distrb", "distribution data type", "" "distrb_info", "block distribution information", "" - "distribution_type", "method used to distribute blocks on processors", "" - "distribution_weight", "weighting method used to compute work per block", "" + "distribution_type", ":math:`\bullet` method used to distribute blocks on processors", "" + "distribution_weight", ":math:`\bullet` weighting method used to compute work per block", "" "divu", "strain rate I component, velocity divergence", "1/s" "divu_adv", "divergence associated with advection", "1/s" - "DminTarea", "deltamin \* tarea", "m\ :math:`^2`/s" "dms", "dimethyl sulfide concentration", "mmol/m\ :math:`^3`" "dmsp", "dimethyl sulfoniopropionate concentration", "mmol/m\ :math:`^3`" - "dpscale", "time scale for flushing in permeable ice", ":math:`1\times 10^{-3}`" - "drhosdwind", "wind compaction factor for snow", "27.3 kg s/m\ :math:`^{4}`" + "dpscale", ":math:`\bullet` time scale for flushing in permeable ice", ":math:`1\times 10^{-3}`" "dragio", "drag coefficient for water on ice", "0.00536" - "dSdt_slow_mode", "drainage strength parameter", "" + "dSdt_slow_mode", ":math:`\bullet` drainage strength parameter", "" "dsnow", "change in snow thickness", "m" - "dt", "thermodynamics time step", "3600. s" + "dt", ":math:`\bullet` thermodynamics time step", "3600. s" "dt_dyn", "dynamics/ridging/transport time step", "" - "dT_mlt", ":math:`\Delta` temperature per :math:`\Delta` snow grain radius", "1. deg" + "dT_mlt", ":math:`\bullet` :math:`\Delta` temperature per :math:`\Delta` snow grain radius", "1. deg" "dte", "subcycling time step for EVP dynamics (:math:`\Delta t_e`)", "s" "dte2T", "dte / 2(damping time scale)", "" "dtei", "1/dte, where dte is the EVP subcycling time step", "1/s" - "dump_file", "output file for restart dump", "" - "dumpfreq", "dump frequency for restarts, y, m, d, h or 1", "" - "dumpfreq_base", "reference date for restart output, zero or init", "" - "dumpfreq_n", "restart output frequency", "" - "dump_last", "if true, write restart on last time step of simulation", "" + "dump_file", ":math:`\bullet` output file for restart dump", "" + "dumpfreq", ":math:`\bullet` dump frequency for restarts, y, m, d, h or 1", "" + "dumpfreq_n", ":math:`\bullet` restart output frequency", "" + "dump_last", ":math:`\bullet` if true, write restart on last time step of simulation", "" "dwavefreq", "widths of wave frequency bins", "1/s" - "dxE", "width of E cell (:math:`\Delta x`) through the middle", "m" - "dxN", "width of N cell (:math:`\Delta x`) through the middle", "m" - "dxT", "width of T cell (:math:`\Delta x`) through the middle", "m" - "dxU", "width of U cell (:math:`\Delta x`) through the middle", "m" "dxhy", "combination of HTE values", "" - "dyE", "height of E cell (:math:`\Delta y`) through the middle", "m" - "dyN", "height of N cell (:math:`\Delta y`) through the middle", "m" - "dyT", "height of T cell (:math:`\Delta y`) through the middle", "m" - "dyU", "height of U cell (:math:`\Delta y`) through the middle", "m" + "dxt", "width of T cell (:math:`\Delta x`) through the middle", "m" + "dxu", "width of U cell (:math:`\Delta x`) through the middle", "m" "dyhx", "combination of HTN values", "" + "dyn_dt", "dynamics and transport time step (:math:`\Delta t_{dyn}`)", "s" + "dyt", "height of T cell (:math:`\Delta y`) through the middle", "m" + "dyu", "height of U cell (:math:`\Delta y`) through the middle", "m" "dvidtd", "ice volume tendency due to dynamics/transport", "m/s" "dvidtt", "ice volume tendency due to thermodynamics", "m/s" "dvirdg(n)dt", "ice volume ridging rate (category n)", "m/s" "**E**", "", "" "e11, e12, e22", "strain rate tensor components", "" - "earea", "area of E-cell", "m\ :math:`^2`" "ecci", "yield curve minor/major axis ratio, squared", "1/4" "eice(n)", "energy of melting of ice per unit area (in category n)", "J/m\ :math:`^2`" - "emask", "land/boundary mask, T east edge (E-cell)", "" - "emissivity", "emissivity of snow and ice", "0.985" + "emissivity", "emissivity of snow and ice", "0.95" "eps13", "a small number", "10\ :math:`^{-13}`" "eps16", "a small number", "10\ :math:`^{-16}`" "esno(n)", "energy of melting of snow per unit area (in category n)", "J/m\ :math:`^2`" - "etax2", "2 x eta (shear viscosity)", "kg/s" "evap", "evaporative water flux", "kg/m\ :math:`^2`/s" - "ew_boundary_type", "type of east-west boundary condition", "" - "elasticDamp", "coefficient for calculating the parameter E, 0\ :math:`<` elasticDamp :math:`<`\ 1", "0.36" - "e_yieldcurve", "yield curve minor/major axis ratio", "2" - "e_plasticpot", "plastic potential minor/major axis ratio", "2" + "ew_boundary_type", ":math:`\bullet` type of east-west boundary condition", "" + "eyc", "coefficient for calculating the parameter E, 0\ :math:`<` eyc :math:`<`\ 1", "0.36" "**F**", "", "" "faero_atm", "aerosol deposition rate", "kg/m\ :math:`^2`/s" "faero_ocn", "aerosol flux to the ocean", "kg/m\ :math:`^2`/s" - "fbot_xfer_type", "type of heat transfer coefficient under ice", "" + "fbot_xfer_type", ":math:`\bullet` type of heat transfer coefficient under ice", "" "fcondtop(n)(_f)", "conductive heat flux", "W/m\ :math:`^2`" "fcor_blk", "Coriolis parameter", "1/s" "ferrmax", "max allowed energy flux error (thermodynamics)", "1x :math:`10^{-3}` W/m\ :math:`^2`" @@ -253,8 +227,8 @@ section :ref:`tabnamelist`. "flux_bio_ai", "all biogeochemistry fluxes passed to ocean, grid cell mean", "" "flw", "incoming longwave radiation", "W/m\ :math:`^2`" "flwout", "outgoing longwave radiation", "W/m\ :math:`^2`" - "fmU", "Coriolis parameter * mass in U cell", "kg/s" - "formdrag", "calculate form drag", "" + "fm", "Coriolis parameter * mass in U cell", "kg/s" + "formdrag", ":math:`\bullet` calculate form drag", "" "fpond", "fresh water flux to ponds", "kg/m\ :math:`^2`/s" "fr_resp", "bgc respiration fraction", "0.05" "frain", "rainfall rate", "kg/m\ :math:`^2`/s" @@ -265,14 +239,13 @@ section :ref:`tabnamelist`. "frzmlt", "freezing/melting potential", "W/m\ :math:`^2`" "frzmlt_init", "freezing/melting potential at beginning of time step", "W/m\ :math:`^2`" "frzmlt_max", "maximum magnitude of freezing/melting potential", "1000. W/m\ :math:`^2`" - "frzpnd", "Stefan refreezing of melt ponds", "‘hlid’" + "frzpnd", ":math:`\bullet` Stefan refreezing of melt ponds", "‘hlid’" "fsalt", "net salt flux to ocean", "kg/m\ :math:`^2`/s" "fsalt_ai", "grid-box-mean salt flux to ocean (fsalt)", "kg/m\ :math:`^2`/s" "fsens", "sensible heat flux", "W/m\ :math:`^2`" "fsnow", "snowfall rate", "kg/m\ :math:`^2`/s" "fsnowrdg", "snow fraction that survives in ridging", "0.5" "fsurf(n)(_f)", "net surface heat flux excluding fcondtop", "W/m\ :math:`^2`" - "fsloss", "rate of snow loss to leads", "kg/m\ :math:`^{2}` s" "fsw", "incoming shortwave radiation", "W/m\ :math:`^2`" "fswabs", "total absorbed shortwave radiation", "W/m\ :math:`^2`" "fswfac", "scaling factor to adjust ice quantities for updated data", "" @@ -284,64 +257,44 @@ section :ref:`tabnamelist`. "fswthru_idr", "near IR direct shortwave penetrating to ocean", "W/m\ :math:`^2`" "fswthru_idf", "near IR diffuse shortwave penetrating to ocean", "W/m\ :math:`^2`" "fswthru_ai", "grid-box-mean shortwave penetrating to ocean (fswthru)", "W/m\ :math:`^2`" - "fyear", "current forcing data year", "" - "fyear_final", "last forcing data year", "" - "fyear_init", "initial forcing data year", "" + "fyear", "current data year", "" + "fyear_final", "last data year", "" + "fyear_init", ":math:`\bullet` initial data year", "" "**G**", "", "" "gravit", "gravitational acceleration", "9.80616 m/s\ :math:`^2`" - "grid_atm", "grid structure for atm forcing/coupling fields, 'A', 'B', 'C', etc", "" - "grid_atm_dynu", "grid for atm dynamic-u forcing/coupling fields, 'T', 'U', 'N', 'E'", "" - "grid_atm_dynv", "grid for atm dynamic-v forcing/coupling fields, 'T', 'U', 'N', 'E'", "" - "grid_atm_thrm", "grid for atm thermodynamic forcing/coupling fields, 'T', 'U', 'N', 'E'", "" - "grid_file", "input file for grid info", "" - "grid_format", "format of grid files", "" - "grid_ice", "structure of the model ice grid, ‘B’, ‘C’, etc", "" - "grid_ice_dynu", "grid for ice dynamic-u model fields, 'T', 'U', 'N', 'E'", "" - "grid_ice_dynv", "grid for ice dynamic-v model fields, 'T', 'U', 'N', 'E'", "" - "grid_ice_thrm", "grid for ice thermodynamic model fields, 'T', 'U', 'N', 'E'", "" - "grid_ocn", "grid structure for ocn forcing/coupling fields, 'A', 'B', 'C', etc", "" - "grid_ocn_dynu", "grid for ocn dynamic-u forcing/coupling fields, 'T', 'U', 'N', 'E'", "" - "grid_ocn_dynv", "grid for ocn dynamic-v forcing/coupling fields, 'T', 'U', 'N', 'E'", "" - "grid_ocn_thrm", "grid for ocn thermodynamic forcing/coupling fields, 'T', 'U', 'N', 'E'", "" - "grid_type", "‘rectangular’, ‘displaced_pole’, ‘column’ or ‘regional’", "" - "gridcpl_file", "input file for coupling grid info", "" + "grid_file", ":math:`\bullet` input file for grid info", "" + "grid_format", ":math:`\bullet` format of grid files", "" + "grid_type", ":math:`\bullet` ‘rectangular’, ‘displaced_pole’, ‘column’ or ‘regional’", "" + "gridcpl_file", ":math:`\bullet` input file for coupling grid info", "" "grow_net", "specific biogeochemistry growth rate per grid cell", "s :math:`^{-1}`" "Gstar", "piecewise-linear ridging participation function parameter", "0.15" "**H**", "", "" "halo_info", "information for updating ghost cells", "" + "heat_capacity", ":math:`\bullet` if true, use salinity-dependent thermodynamics", "T" "hfrazilmin", "minimum thickness of new frazil ice", "0.05 m" "hi_min", "minimum ice thickness for thinnest ice category", "0.01 m" "hi_ssl", "ice surface scattering layer thickness", "0.05 m" "hicen", "ice thickness in category n", "m" - "highfreq", "high-frequency atmo coupling", "F" + "highfreq", ":math:`\bullet` high-frequency atmo coupling", "F" "hin_old", "ice thickness prior to growth/melt", "m" "hin_max", "category thickness limits", "m" - "hist_avg", "if true, write averaged data instead of snapshots", "T,T,T,T,T" - "histfreq", "units of history output frequency: y, m, w, d or 1", "m,x,x,x,x" - "histfreq_base", "reference date for history output, zero or init", "" - "histfreq_n", "integer output frequency in histfreq units", "1,1,1,1,1" - "history_chunksize", "history chunksizes in x,y directions (_format='hdf5' only)", "0,0" - "history_deflate", "compression level for history (_format='hdf5' only)", "0" - "history_dir", "path to history output files", "" - "history_file", "history output file prefix", "" - "history_format", "history file format", "" - "history_iotasks", "history output total number of tasks used", "" - "history_precision", "history output precision: 4 or 8 byte", "4" - "history_rearranger", "history output io rearranger method", "" - "history_root", "history output io root task id", "" - "history_stride", "history output io task stride", "" - "hist_time_axis", "history file time axis interval location: begin, middle, end", "end" - "hist_suffix", "suffix to `history_file` in filename. x means no suffix", "x,x,x,x,x" + "hist_avg", ":math:`\bullet` if true, write averaged data instead of snapshots", "T" + "histfreq", ":math:`\bullet` units of history output frequency: y, m, w, d or 1", "" + "histfreq_n", ":math:`\bullet` integer output frequency in histfreq units", "" + "history_dir", ":math:`\bullet` path to history output files", "" + "history_file", ":math:`\bullet` history output file prefix", "" + "history_format", ":math:`\bullet` history file format", "" + "history_precision", ":math:`\bullet` history output precision: 4 or 8 byte", "4" "hm", "land/boundary mask, thickness (T-cell)", "" "hmix", "ocean mixed layer depth", "20. m" "hour", "hour of the year", "" "hp0", "pond depth at which shortwave transition to bare ice occurs", "0.2 m" - "hp1", "critical ice lid thickness for topo ponds (dEdd)", "0.01 m" + "hp1", ":math:`\bullet` critical ice lid thickness for topo ponds (dEdd)", "0.01 m" "hpmin", "minimum melt pond depth (shortwave)", "0.005 m" "hpondn", "melt pond depth", "m" "hs_min", "minimum thickness for which :math:`T_s` is computed", "1.\ :math:`\times`\ 10\ :math:`^{-4}` m" - "hs0", "snow depth at which transition to ice occurs (dEdd)", "m" - "hs1", "snow depth of transition to pond ice", "0.03 m" + "hs0", ":math:`\bullet` snow depth at which transition to ice occurs (dEdd)", "0.03 m" + "hs1", ":math:`\bullet` snow depth of transition to pond ice", "0.03 m" "hs_ssl", "snow surface scattering layer thickness", "0.04 m" "Hstar", "determines mean thickness of ridged ice", "25. m" "HTE", "length of eastern edge (:math:`\Delta y`) of T-cell", "m" @@ -353,66 +306,60 @@ section :ref:`tabnamelist`. "i0vis","fraction of penetrating visible solar radiation", "0.70" "iblkp","block on which to write debugging data", "" "i(j)block", "Cartesian i,j position of block", "" - "ice_data_conc", "ice initialization concentration, used mainly for box tests", "" - "ice_data_dist", "ice initialization distribution, used mainly for box tests", "" - "ice_data_type", "ice initialization mask, used mainly for box tests", "" "ice_hist_field", "type for history variables", "" - "ice_ic", "choice of initial conditions (see :ref:`tab-ic`)", "" + "ice_ic", ":math:`\bullet` choice of initial conditions (see :ref:`tab-ic`)", "" "ice_stdout", "unit number for standard output", "" "ice_stderr", "unit number for standard error output", "" - "ice_ref_salinity", "reference salinity for ice–ocean exchanges", "" + "ice_ref_salinity", "reference salinity for ice–ocean exchanges", "4. ppt" "icells", "number of grid cells with specified property (for vectorization)", "" - "iceruf", "ice surface roughness at atmosphere interface", "5.\ :math:`\times`\ 10\ :math:`^{-4}` m" - "iceruf_ocn", "under-ice roughness (at ocean interface)", "0.03 m" - "iceEmask", "dynamics ice extent mask (E-cell)", "" - "iceNmask", "dynamics ice extent mask (N-cell)", "" - "iceTmask", "dynamics ice extent mask (T-cell)", "" - "iceUmask", "dynamics ice extent mask (U-cell)", "" + "iceruf", "ice surface roughness", "5.\ :math:`\times`\ 10\ :math:`^{-4}` m" + "icetmask", "ice extent mask (T-cell)", "" + "iceumask", "ice extent mask (U-cell)", "" "idate", "the date at the end of the current time step (yyyymmdd)", "" "idate0", "initial date", "" "ierr", "general-use error flag", "" "igrid", "interface points for vertical bio grid", "" "i(j)hi", "last i(j) index of physical domain (local)", "" "i(j)lo", "first i(j) index of physical domain (local)", "" - "incond_dir", "directory to write snapshot of initial condition", "" - "incond_file", "prefix for initial condition file name", "" + "incond_dir", ":math:`\bullet` directory to write snapshot of initial condition", "" + "incond_file", ":math:`\bullet` prefix for initial condition file name", "" "int_kind", "definition of an integer", "selected_real_kind(6)" "integral_order", "polynomial order of quadrature integrals in remapping", "3" "ip, jp", "local processor coordinates on which to write debugging data", "" "istep", "local step counter for time loop", "" - "istep0", "number of steps taken in previous run", "0" + "istep0", ":math:`\bullet` number of steps taken in previous run", "0" "istep1", "total number of steps at current time step", "" "Iswabs", "shortwave radiation absorbed in ice layers", "W/m\ :math:`^2`" "**J**", "", "" "**K**", "", "" - "kalg", "absorption coefficient for algae", "" + "kalg", ":math:`\bullet` absorption coefficient for algae", "" "kappav", "visible extinction coefficient in ice, wavelength\ :math:`<`\ 700nm", "1.4 m\ :math:`^{-1}`" - "kcatbound", "category boundary formula", "" - "kdyn", "type of dynamics (1 = EVP, 2 = EAP, 3 = VP, 0,-1 = off)", "1" + "kcatbound", ":math:`\bullet` category boundary formula", "" + "kdyn", ":math:`\bullet` type of dynamics (1 = EVP, 0 = off)", "1" "kg_to_g", "kg to g conversion factor", "1000." "kice", "thermal conductivity of fresh ice (:cite:`Bitz99`)", "2.03 W/m/deg" - "kitd", "type of itd conversions (0 = delta function, 1 = linear remap)", "1" - "kmt_file", "input file for land mask info", "" - "kmt_type", "file, default, channel, wall, or boxislands", "file" - "krdg_partic", "ridging participation function", "1" - "krdg_redist", "ridging redistribution function", "1" + "kitd", ":math:`\bullet` type of itd conversions (0 = delta function, 1 = linear remap)", "1" + "kmt_file", ":math:`\bullet` input file for land mask info", "" + "krdg_partic", ":math:`\bullet` ridging participation function", "1" + "krdg_redist", ":math:`\bullet` ridging redistribution function", "1" "krgdn", "mean ridge thickness per thickness of ridging ice", "" + "kseaice", "thermal conductivity of ice for zero-layer thermodynamics", "2.0 W/m/deg" "ksno", "thermal conductivity of snow", "0.30 W/m/deg" - "kstrength", "ice stength formulation (1= :cite:`Rothrock75`, 0 = :cite:`Hibler79`)", "1" - "ktherm", "thermodynamic formulation (-1 = off, 1 = :cite:`Bitz99`, 2 = mushy)", "" + "kstrength", ":math:`\bullet` ice stength formulation (1= :cite:`Rothrock75`, 0 = :cite:`Hibler79`)", "1" + "ktherm", ":math:`\bullet` thermodynamic formulation (0 = zero-layer, 1 = :cite:`Bitz99`, 2 = mushy)", "" "**L**", "", "" "l_brine", "flag for brine pocket effects", "" "l_fixed_area", "flag for prescribing remapping fluxes", "" - "l_mpond_fresh", "if true, retain (topo) pond water until ponds drain", "" - "latpnt", "desired latitude of diagnostic points", "degrees N" + "l_mpond_fresh", ":math:`\bullet` if true, retain (topo) pond water until ponds drain", "" + "latpnt", ":math:`\bullet` desired latitude of diagnostic points", "degrees N" "latt(u)_bounds", "latitude of T(U) grid cell corners", "degrees N" - "lcdf64", "if true, use 64-bit  format", "" + "lcdf64", ":math:`\bullet` if true, use 64-bit  format", "" "Lfresh", "latent heat of melting of fresh ice = Lsub - Lvap", "J/kg" "lhcoef", "transfer coefficient for latent heat", "" "lmask_n(s)", "northern (southern) hemisphere mask", "" "local_id", "local address of block in current distribution", "" "log_kind", "definition of a logical variable", "kind(.true.)" - "lonpnt", "desired longitude of diagnostic points", "degrees E" + "lonpnt", ":math:`\bullet` desired longitude of diagnostic points", "degrees E" "lont(u)_bounds", "longitude of T(U) grid cell corners", "degrees E" "Lsub", "latent heat of sublimation for fresh water", "2.835\ :math:`\times` 10\ :math:`^6` J/kg" "ltripole_grid", "flag to signal use of tripole grid", "" @@ -423,44 +370,37 @@ section :ref:`tabnamelist`. "m1", "constant for lateral melt rate", "1.6\ :math:`\times`\ 10\ :math:`^{-6}` m/s deg\ :math:`^{-m2}`" "m2", "constant for lateral melt rate", "1.36" "m2_to_km2", "m\ :math:`^2` to km\ :math:`^2` conversion", "1\ :math:`\times`\ 10\ :math:`^{-6}`" - "maskhalo_bound", "turns on *bound_state* halo masking", "" - "maskhalo_dyn", "turns on dynamics halo masking", "" - "maskhalo_remap", "turns on transport halo masking", "" + "maskhalo_bound", ":math:`\bullet` turns on *bound_state* halo masking", "" + "maskhalo_dyn", ":math:`\bullet` turns on dynamics halo masking", "" + "maskhalo_remap", ":math:`\bullet` turns on transport halo masking", "" "master_task", "task ID for the controlling processor", "" "max_blocks", "maximum number of blocks per processor", "" "max_ntrcr", "maximum number of tracers available", "5" "maxraft", "maximum thickness of ice that rafts", "1. m" - "mday", "model day of the month", "" + "mday", "day of the month", "" "meltb", "basal ice melt", "m" "meltl", "lateral ice melt", "m" "melts", "snow melt", "m" - "meltsliq", "snow melt mass", "kg/m\ :math:`^{2}`" - "meltsliqn", "snow melt mass in category n", "kg/m\ :math:`^{2}`" "meltt", "top ice melt", "m" "min_salin", "threshold for brine pockets", "0.1 ppt" "mlt_onset", "day of year that surface melt begins", "" - "mmonth", "model month number", "" + "month", "the month number", "" "monthp", "previous month number", "" - "month_init", "the initial month", "" "mps_to_cmpdy", "m per s to cm per day conversion", "8.64\ :math:`\times`\ 10\ :math:`^6`" - "msec", "model seconds elasped into day", "" "mtask", "local processor number that writes debugging data", "" - "mu_rdg", "e-folding scale of ridged ice", "" - "myear", "model year", "" - "myear_max", "maximum allowed model year", "" + "mu_rdg", ":math:`\bullet` e-folding scale of ridged ice", "" "my_task", "task ID for the current processor", "" "**N**", "", "" "n_aero", "number of aerosol species", "" - "narea", "area of N-cell", "m\ :math:`^2`" - "natmiter", "number of atmo boundary layer iterations", "5" + "natmiter", ":math:`\bullet` number of atmo boundary layer iterations", "5" "nblocks", "number of blocks on current processor", "" "nblocks_tot", "total number of blocks in decomposition", "" "nblocks_x(y)", "total number of blocks in x(y) direction", "" "nbtrcr", "number of biology tracers", "" "ncat", "number of ice categories", "5" "ncat_hist", "number of categories written to history", "" - "ndte", "number of subcycles", "120" - "ndtd", "number of dynamics/advection steps under thermo", "1" + "ndte", ":math:`\bullet` number of subcycles", "120" + "ndtd", ":math:`\bullet` number of dynamics/advection steps under thermo", "1" "new_day", "flag for beginning new day", "" "new_hour", "flag for beginning new hour", "" "new_month", "flag for beginning new month", "" @@ -473,12 +413,10 @@ section :ref:`tabnamelist`. "nilyr", "number of ice layers in each category", "7" "nit", "nitrate concentration", "mmol/m\ :math:`^3`" "nlt_bgc_[chem]", "ocean sources and sinks for biogeochemistry", "" - "nmask", "land/boundary mask, T north edge (N-cell)", "" "nml_filename", "namelist file name", "" - "nprocs", "total number of processors", "" - "npt", "total run length values associate with npt_unit", "" - "npt_unit", "units of the run length, number set by npt", "" - "ns_boundary_type", "type of north-south boundary condition", "" + "nprocs", ":math:`\bullet` total number of processors", "" + "npt", ":math:`\bullet` total number of time steps (dt)", "" + "ns_boundary_type", ":math:`\bullet` type of north-south boundary condition", "" "nslyr", "number of snow layers in each category", "" "nspint", "number of solar spectral intervals", "" "nstreams", "number of history output streams (frequencies)", "" @@ -504,13 +442,14 @@ section :ref:`tabnamelist`. "nvarz", "number of category, vertical grid fields written to history", "" "nx(y)_block", "total number of gridpoints on block in x(y) direction", "" "nx(y)_global", "number of physical gridpoints in x(y) direction, global domain", "" + "nyr", "year number", "" "**O**", "", "" "ocean_bio", "concentrations of bgc constituents in the ocean", "" - "oceanmixed_file", "data file containing ocean forcing data", "" - "oceanmixed_ice", "if true, use internal ocean mixed layer", "" - "ocn_data_dir", "directory for ocean forcing data", "" - "ocn_data_format", "format of ocean forcing files", "" - "ocn_data_type", "source of surface temperature, salinity data", "" + "oceanmixed_file", ":math:`\bullet` data file containing ocean forcing data", "" + "oceanmixed_ice", ":math:`\bullet` if true, use internal ocean mixed layer", "" + "ocn_data_dir", ":math:`\bullet` directory for ocean forcing data", "" + "ocn_data_format", ":math:`\bullet` format of ocean forcing files", "" + "ocn_data_type", ":math:`\bullet` source of surface temperature, salinity data", "" "omega", "angular velocity of Earth", "7.292\ :math:`\times`\ 10\ :math:`^{-5}` rad/s" "opening", "rate of ice opening due to divergence and shear", "1/s" "**P**", "", "" @@ -535,10 +474,10 @@ section :ref:`tabnamelist`. "p6", "3/5", "" "p666", "2/3", "" "p75", "3/4", "" - "phi_c_slow_mode", "critical liquid fraction", "" - "phi_i_mushy", "solid fraction at lower boundary", "" + "phi_c_slow_mode", ":math:`\bullet` critical liquid fraction", "" + "phi_i_mushy", ":math:`\bullet` solid fraction at lower boundary", "" "phi_sk", "skeletal layer porosity", "" - "phi_snow", "snow porosity for brine height tracer", "" + "phi_snow", ":math:`\bullet` snow porosity for brine height tracer", "" "pi", ":math:`\pi`", "" "pi2", ":math:`2\pi`", "" "pih", ":math:`\pi /2`", "" @@ -546,15 +485,16 @@ section :ref:`tabnamelist`. "pi(j,b,m)loc", "x (y, block, task) location of diagnostic points", "" "plat", "grid latitude of diagnostic points", "" "plon", "grid longitude of diagnostic points", "" - "pndaspect", "aspect ratio of pond changes (depth:area)", "0.8" - "pointer_file", "input file for restarting", "" + "pndaspect", ":math:`\bullet` aspect ratio of pond changes (depth:area)", "0.8" + "pointer_file", ":math:`\bullet` input file for restarting", "" "potT", "atmospheric potential temperature", "K" "PP_net", "total primary productivity per grid cell", "mg C/m\ :math:`^2`/s" - "precip_units", "liquid precipitation data units", "" - "print_global", "if true, print global data", "F" - "print_points", "if true, print point data", "F" - "processor_shape", "descriptor for processor aspect ratio", "" - "Pstar", "ice strength parameter", "2.75\ :math:`\times`\ 10\ :math:`^4`\ N/m\ :math:`^2`" + "precip_units", ":math:`\bullet` liquid precipitation data units", "" + "print_global", ":math:`\bullet` if true, print global data", "F" + "print_points", ":math:`\bullet` if true, print point data", "F" + "processor_shape", ":math:`\bullet` descriptor for processor aspect ratio", "" + "prs_sig", "replacement pressure", "N/m" + "Pstar", "ice strength parameter", "2.75\ :math:`\times`\ 10\ :math:`^4`\ N/m" "puny", "a small positive number", "1\ :math:`\times`\ 10\ :math:`^{-11}`" "**Q**", "", "" "Qa", "specific humidity at 10 m", "kg/kg" @@ -566,102 +506,66 @@ section :ref:`tabnamelist`. "R_C2N", "algal carbon to nitrate factor", "7. mole/mole" "R_gC2molC", "mg/mmol carbon", "12.01 mg/mole" "R_chl2N", "algal chlorophyll to nitrate factor", "3. mg/mmol" - "R_ice", "parameter for Delta-Eddington ice albedo", "" - "R_pnd", "parameter for Delta-Eddington pond albedo", "" + "R_ice", ":math:`\bullet` parameter for Delta-Eddington ice albedo", "" + "R_pnd", ":math:`\bullet` parameter for Delta-Eddington pond albedo", "" "R_S2N", "algal silicate to nitrate factor", "0.03 mole/mole" - "R_snw", "parameter for Delta-Eddington snow albedo", "" + "R_snw", ":math:`\bullet` parameter for Delta-Eddington snow albedo", "" "r16_kind", "definition of quad precision", "selected_real_kind(26)", "" - "Rac_rapid_mode", "critical Rayleigh number", "10" + "Rac_rapid_mode", ":math:`\bullet` critical Rayleigh number", "10" "rad_to_deg", "degree-radian conversion", ":math:`180/\pi`" "radius", "earth radius", "6.37\ :math:`\times`\ 10\ :math:`^6` m" "rdg_conv", "convergence for ridging", "1/s" "rdg_shear", "shear for ridging", "1/s" "real_kind", "definition of single precision real", "selected_real_kind(6)" "refindx", "refractive index of sea ice", "1.310" - "rep_prs", "replacement pressure", "N/m" "revp", "real(revised_evp)", "" - "restart", "if true, initialize ice state from file", "T" - "restart_age", "if true, read age restart file", "" - "restart_bgc", "if true, read bgc restart file", "" - "restart_chunksize", "restart chunksizes in x,y directions (_format='hdf5' only)", "0,0" - "restart_deflate", "compression level for restart (_format='hdf5' only)", "0" - "restart_dir", "path to restart/dump files", "" - "restart_file", "restart file prefix", "" - "restart_format", "restart file format", "" - "restart_iotasks", "restart output total number of tasks used", "" - "restart_rearranger", "restart output io rearranger method", "" - "restart_root", "restart output io root task id", "" - "restart_stride", "restart output io task stride", "" - "restart_[tracer]", "if true, read tracer restart file", "" - "restart_ext", "if true, read/write halo cells in restart file", "" - "restart_coszen", "if true, read/write coszen in restart file", "" - "restore_bgc", "if true, restore nitrate/silicate to data", "" - "restore_ice", "if true, restore ice state along lateral boundaries", "" - "restore_ocn", "restore sst to data", "" - "revised_evp", "if true, use revised EVP parameters and approach", "" - "rfracmin", "minimum melt water fraction added to ponds", "0.15" - "rfracmax", "maximum melt water fraction added to ponds", "1.0" + "restart", ":math:`\bullet` if true, initialize using restart file instead of defaults", "T" + "restart_age", ":math:`\bullet` if true, read age restart file", "" + "restart_bgc", ":math:`\bullet` if true, read bgc restart file", "" + "restart_dir", ":math:`\bullet` path to restart/dump files", "" + "restart_file", ":math:`\bullet` restart file prefix", "" + "restart_format", ":math:`\bullet` restart file format", "" + "restart_[tracer]", ":math:`\bullet` if true, read tracer restart file", "" + "restart_ext", ":math:`\bullet` if true, read/write halo cells in restart file", "" + "restart_coszen", ":math:`\bullet` if true, read/write coszen in restart file", "" + "restore_bgc", ":math:`\bullet` if true, restore nitrate/silicate to data", "" + "restore_ice", ":math:`\bullet` if true, restore ice state along lateral boundaries", "" + "restore_ocn", ":math:`\bullet` restore sst to data", "" + "revised_evp", ":math:`\bullet` if true, use revised EVP parameters and approach", "" + "rfracmin", ":math:`\bullet` minimum melt water fraction added to ponds", "0.15" + "rfracmax", ":math:`\bullet` maximum melt water fraction added to ponds", "1.0" "rhoa", "air density", "kg/m\ :math:`^3`" "rhofresh", "density of fresh water", "1000.0 kg/m\ :math:`^3`" "rhoi", "density of ice", "917. kg/m\ :math:`^3`" "rhos", "density of snow", "330. kg/m\ :math:`^3`" - "rhos_cmp", "density of snow due to wind compaction", "kg/m\ :math:`^3`" - "rhos_cnt", "density of ice and liquid content of snow", "kg/m\ :math:`^3`" "rhosi", "average sea ice density (for hbrine tracer)", "940. kg/m\ :math:`^3`" - "rhosmax", "maximum snow density", "450 kg/m\ :math:`^{3}`" - "rhosmin", "minimum snow density", "100 kg/m\ :math:`^{3}`" - "rhosnew", "new snow density", "100 kg/m\ :math:`^{3}`" "rhow", "density of seawater", "1026. kg/m\ :math:`^3`" "rnilyr", "real(nlyr)", "" "rside", "fraction of ice that melts laterally", "" - "rsnw", "snow grain radius", "10\ :math:`^{-6}` m" - "rsnw_fall", "freshly fallen snow grain radius", "100. :math:`\times` 10\ :math:`^{-6}` m" - "rsnw_mlt", "melting snow grain radius", "1000. :math:`\times` 10\ :math:`^{-6}` m" + "rsnw_fresh", "freshly fallen snow grain radius", "100. :math:`\times` 10\ :math:`^{-6}` m" + "rsnw_melt", ":math:`\bullet` melting snow grain radius", "1000. :math:`\times` 10\ :math:`^{-6}` m" "rsnw_nonmelt", "nonmelting snow grain radius", "500. :math:`\times` 10\ :math:`^{-6}` m" "rsnw_sig", "standard deviation of snow grain radius", "250. :math:`\times` 10\ :math:`^{-6}` m" - "rsnw_tmax", "maximum snow radius", "1500. :math:`\times` 10\ :math:`^{-6}` m" - "runid", "identifier for run", "" - "runtype", "type of initialization used", "" + "runid", ":math:`\bullet` identifier for run", "" + "runtype", ":math:`\bullet` type of initialization used", "" "**S**", "", "" "s11, s12, s22", "stress tensor components", "" "saltmax", "max salinity, at ice base (:cite:`Bitz99`)", "3.2 ppt" "scale_factor", "scaling factor for shortwave radiation components", "" - "seabed_stress", "if true, calculate seabed stress", "F" - "seabed_stress_method", "method for calculating seabed stress (‘LKD’ or ‘probabilistic’)", "LKD" + "sec", "seconds elasped into idate", "" "secday", "number of seconds in a day", "86400." - "sec_init", "the initial second", "" "shcoef", "transfer coefficient for sensible heat", "" "shear", "strain rate II component", "1/s" "shlat", "southern latitude of artificial mask edge", "30\ :math:`^\circ`\ N" - "shortwave", "flag for shortwave parameterization (‘ccsm3’ or ‘dEdd’ or 'dEdd_snicar_ad')", "" - "sig1(2)", "principal stress components :math:`\sigma_{n,1}`, :math:`\sigma_{n,2}` (diagnostic)", "" - "sigP", "internal ice pressure", "N/m" + "shortwave", ":math:`\bullet` flag for shortwave parameterization (‘ccsm3’ or ‘dEdd’)", "" + "sig1(2)", "principal stress components (diagnostic)", "" "sil", "silicate concentration", "mmol/m\ :math:`^3`" "sinw", "sine of the turning angle in water", "0." "Sinz", "ice salinity profile", "ppt" "sk_l", "skeletal layer thickness", "0.03 m" "snoice", "snow–ice formation", "m" "snowpatch", "length scale for parameterizing nonuniform snow coverage", "0.02 m" - "skl_bgc", "biogeochemistry on/off", "" - "smassice", "mass of ice in snow from smice tracer", "kg/m\ :math:`^2`" - "smassliq", "mass of liquid in snow from smliq tracer", "kg/m\ :math:`^2`" - "snowage_drdt0", "initial rate of change of effective snow radius", " " - "snowage_rhos", "snow aging parameter (density)", " " - "snowage_kappa", "snow aging best-fit parameter", " " - "snowage_tau", "snow aging best-fit parameter", " " - "snowage_T", "snow aging parameter (temperature)", " " - "snowage_Tgrd", "snow aging parameter (temperature gradient)", " " - "snw_aging_table", "snow aging lookup table", " " - "snw_filename", "snowtable filename", " " - "snw_tau_fname", "snowtable file tau fieldname", " " - "snw_kappa_fname", "snowtable file kappa fieldname", " " - "snw_drdt0_fname", "snowtable file drdt0 fieldname", " " - "snw_rhos_fname", "snowtable file rhos fieldname", " " - "snw_Tgrd_fname", "snowtable file Tgrd fieldname", " " - "snw_T_fname", "snowtable file T fieldname", " " - "snwgrain", "activate snow metamorphosis", " " - "snwlvlfac", "fractional increase in snow depth for redistribution on ridges", "0.3" - "snwredist", "type of snow redistribution", " " + "skl_bgc", ":math:`\bullet` biogeochemistry on/off", "" "spval", "special value (single precision)", ":math:`10^{30}`", "" "spval_dbl", "special value (double precision)", ":math:`10^{30}`", "" "ss_tltx(y)", "sea surface in the x(y) direction", "m/m" @@ -670,17 +574,17 @@ section :ref:`tabnamelist`. "Sswabs", "shortwave radiation absorbed in snow layers", "W/m\ :math:`^2`" "stefan-boltzmann", "Stefan-Boltzmann constant", "5.67\ :math:`\times`\ 10\ :math:`^{-8}` W/m\ :math:`^2`\ K\ :math:`^4`" "stop_now", "if 1, end program execution", "" - "strairx(y)U", "stress on ice by air in the x(y)-direction (centered in U cell)", "N/m\ :math:`^2`" + "strairx(y)", "stress on ice by air in the x(y)-direction (centered in U cell)", "N/m\ :math:`^2`" "strairx(y)T", "stress on ice by air, x(y)-direction (centered in T cell)", "N/m\ :math:`^2`" "strax(y)", "wind stress components from data", "N/m\ :math:`^2`" "strength", "ice strength", "N/m" "stress12", "internal ice stress, :math:`\sigma_{12}`", "N/m" - "stressm", "internal ice stress, :math:`\sigma_{11}-\sigma_{22}` (:math:`\sigma_2` in the doc)", "N/m" - "stressp", "internal ice stress, :math:`\sigma_{11}+\sigma_{22}` (:math:`\sigma_1` in the doc)", "N/m" - "strintx(y)U", "divergence of internal ice stress, x(y)", "N/m\ :math:`^2`" - "strocnx(y)U", "ice–ocean stress in the x(y)-direction (U-cell)", "N/m\ :math:`^2`" + "stressm", "internal ice stress, :math:`\sigma_{11}-\sigma_{22}`", "N/m" + "stressp", "internal ice stress, :math:`\sigma_{11}+\sigma_{22}`", "N/m" + "strintx(y)", "divergence of internal ice stress, x(y)", "N/m\ :math:`^2`" + "strocnx(y)", "ice–ocean stress in the x(y)-direction (U-cell)", "N/m\ :math:`^2`" "strocnx(y)T", "ice–ocean stress, x(y)-dir. (T-cell)", "N/m\ :math:`^2`" - "strtltx(y)U", "surface stress due to sea surface slope", "N/m\ :math:`^2`" + "strtltx(y)", "surface stress due to sea surface slope", "N/m\ :math:`^2`" "swv(n)dr(f)", "incoming shortwave radiation, visible (near IR), direct (diffuse)", "W/m\ :math:`^2`" "**T**", "", "" "Tair", "air temperature at 10 m", "K" @@ -689,39 +593,39 @@ section :ref:`tabnamelist`. "tarear", "1/tarea", "1/m\ :math:`^2`" "tareas", "area of southern hemisphere T-cells", "m\ :math:`^2`" "tcstr", "string identifying T grid for history variables", "" + "tday", "absolute day number", "" "Tf", "freezing temperature", "C" "Tffresh", "freezing temp of fresh ice", "273.15 K" - "tfrz_option", "form of ocean freezing temperature", "" - "saltflux_option", "form of coupled salt flux ", "" + "tfrz_option", ":math:`\bullet` form of ocean freezing temperature", "" "thinS", "minimum ice thickness for brine tracer", "" - "timer_stats", "logical to turn on extra timer statistics", ".false." - "timesecs", "total elapsed time in seconds", "s" + "time", "total elapsed time", "s" "time_beg", "beginning time for history averages", "" "time_bounds", "beginning and ending time for history averages", "" "time_end", "ending time for history averages", "" "time_forc", "time of last forcing update", "s" "Timelt", "melting temperature of ice top surface", "0. C" + "tinyarea", "puny \* tarea", "m\ :math:`^2`" "Tinz", "Internal ice temperature", "C" "TLAT", "latitude of cell center", "radians" - "Tliquidus_max", "maximum liquidus temperature of mush", "0. C" "TLON", "longitude of cell center", "radians" "tmask", "land/boundary mask, thickness (T-cell)", "" "tmass", "total mass of ice and snow", "kg/m\ :math:`^2`" "Tmin", "minimum allowed internal temperature", "-100. C" "Tmltz", "melting temperature profile of ice", "" "Tocnfrz", "temperature of constant freezing point parameterization", "-1.8 C" - "tr_aero", "if true, use aerosol tracers", "" - "tr_bgc_[tracer]", "if true, use biogeochemistry tracers", "" - "tr_brine", "if true, use brine height tracer", "" - "tr_FY", "if true, use first-year area tracer", "" - "tr_iage", "if true, use ice age tracer", "" - "tr_lvl", "if true, use level ice area and volume tracers", "" - "tr_pond_lvl", "if true, use level-ice melt pond scheme", "" - "tr_pond_topo", "if true, use topo melt pond scheme", "" + "tr_aero", ":math:`\bullet` if true, use aerosol tracers", "" + "tr_bgc_[tracer]", ":math:`\bullet` if true, use biogeochemistry tracers", "" + "tr_brine", ":math:`\bullet` if true, use brine height tracer", "" + "tr_FY", ":math:`\bullet` if true, use first-year area tracer", "" + "tr_iage", ":math:`\bullet` if true, use ice age tracer", "" + "tr_lvl", ":math:`\bullet` if true, use level ice area and volume tracers", "" + "tr_pond_cesm", ":math:`\bullet` if true, use CESM melt pond scheme", "" + "tr_pond_lvl", ":math:`\bullet` if true, use level-ice melt pond scheme", "" + "tr_pond_topo", ":math:`\bullet` if true, use topo melt pond scheme", "" "trcr", "ice tracers", "" "trcr_depend", "tracer dependency on basic state variables", "" "Tref", "2m atmospheric reference temperature", "K" - "trestore", "restoring time scale", "days" + "trestore", ":math:`\bullet` restoring time scale", "days" "tripole", "if true, block lies along tripole boundary", "" "tripoleT", "if true, tripole boundary is T-fold; if false, U-fold", "" "Tsf_errmax", "max allowed :math:`T_{\mathit sf}` error (thermodynamics)", "5.\ :math:`\times`\ 10\ :math:`^{-4}`\ deg" @@ -736,15 +640,14 @@ section :ref:`tabnamelist`. "uatm", "wind velocity in the x direction", "m/s" "ULAT", "latitude of U-cell centers", "radians" "ULON", "longitude of U-cell centers", "radians" - "umask", "land/boundary mask, velocity corner (U-cell)", "" + "umask", "land/boundary mask, velocity (U-cell)", "" "umax_stab", "ice speed threshold (diagnostics)", "1. m/s" "umin", "min wind speed for turbulent fluxes", "1. m/s" "uocn", "ocean current in the x-direction", "m/s" - "update_ocn_f", "if true, include frazil ice fluxes in ocean flux fields", "" - "use_leap_years", "if true, include leap days", "" - "use_restart_time", "if true, use date from restart file", "" - "use_smliq_pnd", "use liquid in snow for ponds", " " - "ustar_min", "minimum friction velocity under ice", "" + "update_ocn_f", ":math:`\bullet` if true, include frazil ice fluxes in ocean flux fields", "" + "use_leap_years", ":math:`\bullet` if true, include leap days", "" + "use_restart_time", ":math:`\bullet` if true, use date from restart file", "" + "ustar_min", ":math:`\bullet` minimum friction velocity under ice", "" "ucstr", "string identifying U grid for history variables", "" "uvel", "x-component of ice velocity", "m/s" "uvel_init", "x-component of ice velocity at beginning of time step", "m/s" @@ -754,10 +657,8 @@ section :ref:`tabnamelist`. "vice(n)", "volume per unit area of ice (in category n)", "m" "vicen_init", "ice volume at beginning of timestep", "m" "viscosity_dyn", "dynamic viscosity of brine", ":math:`1.79\times 10^{-3}` kg/m/s" - "visc_method", "method for calculating viscosities (‘avg_strength’ or ‘avg_zeta’)", "avg_zeta" "vocn", "ocean current in the y-direction", "m/s" "vonkar", "von Karman constant", "0.4" - "vort", "vorticity", "1/s" "vraftn", "volume of rafted ice", "m" "vrdgn", "volume of ridged ice", "m" "vredistrn", "redistribution function: fraction of new ridge volume", "" @@ -771,29 +672,18 @@ section :ref:`tabnamelist`. "wave_spectrum", "wave spectrum", "m\ :math:`^2`/s" "wavefreq", "wave frequencies", "1/s" "wind", "wind speed", "m/s" - "windmin", "minimum wind speed to compact snow", "10 m/s" "write_history", "if true, write history now", "" - "write_ic", "if true, write initial conditions", "" + "write_ic", ":math:`\bullet` if true, write initial conditions", "" "write_restart", "if 1, write restart now", "" "**X**", "", "" "**Y**", "", "" - "ycycle", "number of years in forcing data cycle", "" - "yday", "day of the year, computed in the model calendar", "" + "ycycle", ":math:`\bullet` number of years in forcing data cycle", "" + "yday", "day of the year", "" "yield_curve", "type of yield curve", "ellipse" "yieldstress11(12, 22)", "yield stress tensor components", "" - "year_init", "the initial year", "" + "year_init", ":math:`\bullet` the initial year", "" "**Z**", "", "" - "zetax2", "2 x zeta (bulk viscosity)", "kg/s" - "zlvl", "atmospheric level height (momentum)", "m" - "zlvs", "atmospheric level height (scalars)", "m" + "zlvl", "atmospheric level height", "m" "zref", "reference height for stability", "10. m" "zTrf", "reference height for :math:`T_{ref}`, :math:`Q_{ref}`, :math:`U_{ref}`", "2. m" "zvir", "gas constant (water vapor)/gas constant (air) - 1", "0.606" - "**Deprecated options and parameters**", "", "" - "heat_capacity", "if true, use salinity-dependent thermodynamics", "T" - "kseaice", "thermal conductivity of ice for zero-layer thermodynamics", "2.0 W/m/deg" - "ktherm", "thermodynamic formulation (0 = zero-layer, 1 = :cite:`Bitz99`, 2 = mushy)", "" - "tr_pond_cesm", "if true, use CESM melt pond scheme", "" - -.. - new deprecation comments diff --git a/doc/source/conf.py b/doc/source/conf.py index 0e7ce0886..8d0df9777 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -38,9 +38,6 @@ 'sphinxcontrib.bibtex', ] -# Name of the bibliography file for sphinxcontrib.bibtex. -bibtex_bibfiles = ['master_list.bib'] - # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] @@ -57,7 +54,7 @@ # General information about the project. project = u'CICE' -copyright = u'2023, Triad National Security, LLC (code) and National Center for Atmospheric Research (documentation)' +copyright = u'2020, Triad National Security, LLC (code) and National Center for Atmospheric Research (documentation)' author = u'CICE-Consortium' # The version info for the project you're documenting, acts as replacement for @@ -65,9 +62,9 @@ # built documents. # # The short X.Y version. -version = u'6.5.0' +version = u'6.1.3' # The full version, including alpha/beta/rc tags. -version = u'6.5.0' +version = u'6.1.3' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/source/developer_guide/dg_dynamics.rst b/doc/source/developer_guide/dg_dynamics.rst index 2c886a95f..3551763b5 100644 --- a/doc/source/developer_guide/dg_dynamics.rst +++ b/doc/source/developer_guide/dg_dynamics.rst @@ -6,17 +6,17 @@ Dynamics ============================ -The CICE **cicecore/** directory consists of the non icepack source code. Within that +The CICE **cicecore/** directory consists of the non icepack source code. Within that directory there are the following subdirectories -**cicecore/cicedyn/analysis** contains higher level history and diagnostic routines. +**cicecore/cicedynB/analysis** contains higher level history and diagnostic routines. -**cicecore/cicedyn/dynamics** contains all the dynamical evp, eap, and transport routines. +**cicecore/cicedynB/dynamics** contains all the dynamical evp, eap, and transport routines. -**cicecore/cicedyn/general** contains routines associated with forcing, flux calculation, +**cicecore/cicedynB/general** contains routines associated with forcing, flux calculation, initialization, and model timestepping. -**cicecore/cicedyn/infrastructure** contains most of the low-level infrastructure associated +**cicecore/cicedynB/infrastructure** contains most of the low-level infrastructure associated with communication (halo updates, gather, scatter, global sums, etc) and I/O reading and writing binary and netcdf files. @@ -29,20 +29,112 @@ coupling layers. Dynamical Solvers -------------------- -The dynamics solvers are found in **cicecore/cicedyn/dynamics/**. A couple of different solvers are -available including EVP, EAP and VP. The dynamics solver is specified in namelist with the -``kdyn`` variable. ``kdyn=1`` is evp, ``kdyn=2`` is eap, ``kdyn=3`` is VP. +The dynamics solvers are found in **cicecore/cicedynB/dynamics/**. A couple of different solvers are +available including EVP, revised EVP, and EAP. The dynamics solver is specified in namelist with the +``kdyn`` variable. ``kdyn=1`` is evp, ``kdyn=2`` is eap, and revised evp requires the ``revised_evp`` +namelist flag be set to true. -Two alternative implementations of EVP are included. The first alternative is the Revised EVP, triggered when the ``revised_evp`` is set to true. The second alternative is the 1d EVP solver triggered when the ``evp_algorithm`` is set to ``shared_mem_1d`` as oppose to the default setting of ``evp_standard_2d``. The solutions with ``evp_algorithm`` set to ``standard_2d`` or ``shared_mem_1d`` will -not be bit-for-bit identical when compared to each other. The reason for this is floating point round off errors that occur unless strict compiler flags are used. ``evp_algorithm=shared_mem_1d`` is primarily built for OpenMP. If MPI domain splitting is used then the solver will only run on the master processor. ``evp_algorithm=shared_mem_1d`` is not supported -with the tripole grid. +Multiple evp solvers are supported thru the namelist flag ``kevp_kernel``. The standard implementation +and current default is ``kevp_kernel=0``. In this case, the stress is solved on the regular decomposition +via subcycling and calls to subroutine stress and subroutine stepu with MPI global sums required in each +subcycling call. With ``kevp_kernel=2``, the data required to compute the stress is gathered to the root +MPI process and the stress calculation is performed on the root task without any MPI global sums. OpenMP +parallelism is supported in ``kevp_kernel=2``. The solutions with ``kevp_kernel`` set to 0 or 2 will +not be bit-for-bit +identical but should be the same to roundoff and produce the same climate. ``kevp_kernel=2`` may perform +better for some configurations, some machines, and some pe counts. ``kevp_kernel=2`` is not supported +with the tripole grid and is still being validated. Until ``kevp_kernel=2`` is fully validated, it will +abort if set. To override the abort, use value 102 for testing. Transport ----------------- -The transport (advection) methods are found in **cicecore/cicedyn/dynamics/**. Two methods are supported, -upwind and remap. These are set in namelist via the ``advection`` variable. -Transport can be disabled with the ``ktransport`` namelist variable. +The transport (advection) methods are found in **cicecore/cicedynB/dynamics/**. Only the incremental +remapping method is supported at this time, and is set in namelist via the ``advection`` variable. +Transport can be turned off by setting ``advection = none`` or ``ktransport = -1``. +Infrastructure +======================= + +Kinds +------------------ + +**cicecore/shared/ice_kinds_mod.F90** defines the kinds datatypes used in CICE. These kinds are +used throughout CICE code to define variable types. The CICE kinds are adopted from the kinds +defined in Icepack for consistency in interfaces. + +Constants +------------------ + +**cicecore/shared/ice_constants.F90** defines several model constants. Some are hardwired parameters +while others have internal defaults and can be set thru namelist. + +Dynamic Array Allocation +------------------------------- + +CICE v5 and earlier was implemented using mainly static arrays and required several CPPs to be set to define grid size, +blocks sizes, tracer numbers, and so forth. With CICE v6 and later, arrays are dynamically allocated and those +parameters are namelist settings. The following CPPs are no longer used in CICE v6 and later versions, + + -DNXGLOB=100 -DNYGLOB=116 -DBLCKX=25 -DBLCKY=29 -DMXBLCKS=4 -DNICELYR=7 -DNSNWLYR=1 -DNICECAT=5 -DTRAGE=1 -DTRFY=1 -DTRLVL=1 -DTRPND=1 -DTRBRI=0 -DNTRAERO=1 -DTRZS=0 -DNBGCLYR=7 -DTRALG=0 -DTRBGCZ=0 -DTRDOC=0 -DTRDOC=0 -DTRDIC=0 -DTRDON=0 -DTRFED=0 -DTRFEP=0 -DTRZAERO=0 -DTRBGCS=0 -DNUMIN=11 -DNUMAX=99 + +as they have been migrated to :ref:`tabnamelist` + + nx_global, ny_global, block_size_x, block_size_y, max_blocks, nilyr, nslyr, ncat, nblyr, n_aero, n_zaero, n_algae, n_doc, n_dic, n_don, n_fed, n_fep, numin, numax + + +Time Manager +------------------ + +Time manager data is module data in **cicecore/shared/ice_calendar.F90**. Much of the time manager +data is public and operated on during the model timestepping. The model timestepping actually takes +place in the **CICE_RunMod.F90** file which is part of the driver code and tends to look like this:: + + call ice_step + istep = istep + 1 ! update time step counters + istep1 = istep1 + 1 + time = time + dt ! determine the time and date + + + +Communication +------------------ + +Two low-level communications packages, mpi and serial, are provided as part of CICE. This software +provides a middle layer between the model and the underlying libraries. Only the CICE mpi or +serial directories are compiled with CICE, not both. + +**cicedynB/infrastructure/comm/mpi/** +is based on MPI and provides various methods to do halo updates, global sums, gather/scatter, broadcasts +and similar using some fairly generic interfaces to isolate the MPI calls in the code. + +**cicedynB/infrastructure/comm/serial/** support the same interfaces, but operates +in shared memory mode with no MPI. The serial library will be used, by default in the CICE scripts, +if the number of MPI tasks is set to 1. The serial library allows the model to be run on a single +core or with OpenMP parallelism only without requiring an MPI library. + +I/O +------------------ + +There are three low-level IO packages in CICE, io_netcdf, io_binary, and io_pio. This software +provides a middle layer between the model and the underlying IO writing. +Only one of the three IO directories can be built with CICE. The CICE scripts will build with the io_netcdf +by default, but other options can be selecting by setting ``ICE_IOTYPE`` in **cice.settings** in the +case. This has to be set before CICE is built. + +**cicedynB/infrastructure/io/io_netcdf/** is the +default for the standalone CICE model, and it supports writing history and restart files in netcdf +format using standard netcdf calls. It does this by writing from and reading to the root task and +gathering and scattering fields from the root task to support model parallelism. + +**cicedynB/infrastructure/io/io_binary/** supports files in binary format using a gather/scatter +approach and reading to and writing from the root task. + +**cicedynB/infrastructure/io/io_pio/** support reading and writing through the pio interface. pio +is a parallel io library (https://github.com/NCAR/ParallelIO) that supports reading and writing of +binary and netcdf file through various interfaces including netcdf and pnetcdf. pio is generally +more parallel in memory even when using serial netcdf than the standard gather/scatter methods, +and it provides parallel read/write capabilities by optionally linking and using pnetcdf. + diff --git a/doc/source/intro/citing.rst b/doc/source/intro/citing.rst index 593041b21..c128bc4e6 100644 --- a/doc/source/intro/citing.rst +++ b/doc/source/intro/citing.rst @@ -15,7 +15,7 @@ More information can be found by following the DOI link to zenodo. If you use CICE, please cite the version number of the code you are using or modifying. -If using code from the CICE-Consortium repository ``main`` branch +If using code from the CICE-Consortium repository ``master`` branch that includes modifications that have not yet been released with a version number, then in addition to the most recent version number, the hash at time of diff --git a/doc/source/science_guide/sg_horiztrans.rst b/doc/source/science_guide/sg_horiztrans.rst index 4ccf00e9b..2cca81469 100644 --- a/doc/source/science_guide/sg_horiztrans.rst +++ b/doc/source/science_guide/sg_horiztrans.rst @@ -33,7 +33,7 @@ introductory comments in **ice\_transport\_remap.F90**. Prognostic equations for ice and/or snow density may be included in future model versions but have not yet been implemented. -Two transport schemes are available: upwind and the incremental +One transport scheme is available, the incremental remapping scheme of :cite:`Dukowicz00` as modified for sea ice by :cite:`Lipscomb04`. diff --git a/doc/source/science_guide/sg_tracers.rst b/doc/source/science_guide/sg_tracers.rst index 5935fe67e..bbd18eb1f 100644 --- a/doc/source/science_guide/sg_tracers.rst +++ b/doc/source/science_guide/sg_tracers.rst @@ -78,6 +78,8 @@ is not in use. "tr_FY", "1", "aice", "nt_FY", " " "tr_lvl", "2", "aice", "nt_alvl", " " " ", " ", "vice", "nt_vlvl", " " + "tr_pond_cesm", "2", "aice", "nt_apnd", " " + " ", " ", "apnd", "nt_vpnd", " " "tr_pond_lvl", "3", "aice", "nt_apnd", " " " ", " ", "apnd", "nt_vpnd", " " " ", " ", "apnd", "nt_ipnd", " " @@ -88,10 +90,7 @@ is not in use. "tr_iso", "n_iso", "vice, vsno", "nt_iso"," " "tr_brine", " ", "vice", "nt_fbri", " " "tr_fsd","nfsd","aice","nt_fsd"," " - "tr_snow","nslyr","vsno","nt_rsnw"," " - " ","nslyr","vsno","nt_rhos"," " - " ","nslyr","vsno","nt_smice"," " - " ","nslyr","vsno","nt_smliq"," " + "solve_zsal", "n_trzs", "fbri or (a,v)ice", "nt_bgc_S", " " "tr_bgc_N", "n_algae", "fbri or (a,v)ice", "nt_bgc_N", "nlt_bgc_N" "tr_bgc_Nit", " ", "fbri or (a,v)ice", "nt_bgc_Nit", "nlt_bgc_Nit" "tr_bgc_C", "n_doc", "fbri or (a,v)ice", "nt_bgc_DOC", "nlt_bgc_DOC" @@ -110,13 +109,10 @@ is not in use. "tr_zaero", "n_zaero", "fbri or (a,v)ice", "nt_zaero", "nlt_zaero" " ", "1", "fbri", "nt_zbgc_frac", " " -.. - "solve_zsal", "n_trzs", "fbri or (a,v)ice", "nt_bgc_S", " " - "tr_pond_cesm", "2", "aice", "nt_apnd", " " - " ", " ", "apnd", "nt_vpnd", " " + Users may add any number of additional tracers that are transported conservatively, provided that the dependency ``trcr_depend`` is defined appropriately. See Section :ref:`addtrcr` for guidance on adding tracers. -Please see the `Icepack documentation `_ for additional information about tracers that depend on other tracers, the floe size distribution, advanced snow physics, age of the ice, aerosols, water isotopes, brine height, and the sea ice ecosystem. +Please see the `Icepack documentation `_ for additional information about tracers that depend on other tracers, the floe size distribution, age of the ice, aerosols, water isotopes, brine height, and the sea ice ecosystem. diff --git a/doc/source/user_guide/ug_case_settings.rst b/doc/source/user_guide/ug_case_settings.rst index 9f1f8a259..032c8b529 100644 --- a/doc/source/user_guide/ug_case_settings.rst +++ b/doc/source/user_guide/ug_case_settings.rst @@ -2,8 +2,8 @@ .. _case_settings: -Case Settings, Model Namelist, and CPPs -========================================== +Case Settings +===================== There are two important files that define the case, **cice.settings** and **ice_in**. **cice.settings** is a list of env variables that define many @@ -33,12 +33,14 @@ can be found in :ref:`cicecpps`. The following CPPs are available. "ESMF_INTERFACE", "Turns on ESMF support in a subset of driver code. Also USE_ESMF_LIB and USE_ESMF_METADATA" "FORTRANUNDERSCORE", "Used in ice_shr_reprosum86.c to support Fortran-C interfaces. This should generally be turned on at all times. There are other CPPs (FORTRANDOUBULEUNDERSCORE, FORTRANCAPS, etc) in ice_shr_reprosum.c that are generally not used in CICE but could be useful if problems arise in the Fortran-C interfaces" "GPTL", "Turns on GPTL initialization if needed for PIO" + "key_oasis3", "Leverages Oasis CPPs to define the local MPI communicator" + "key_oasis3mct", "Leverages Oasis CPPs to define the local MPI communicator" + "key_oasis4", "Leverages Oasis CPPs to define the local MPI communicator" + "key_iomput", "Leverages Oasis CPPs to define the local MPI communicator" "NO_F2003", "Turns off some Fortran 2003 features" "NO_I8", "Converts integer*8 to integer*4. This could have adverse affects for certain algorithms including the ddpdd implementation associated with the ``bfbflag``" "NO_R16", "Converts real*16 to real*8. This could have adverse affects for certain algorithms including the lsum16 implementation associated with the ``bfbflag``" - "NO_SNICARHC", "Does not compile hardcoded (HC) 5 band snicar tables tables needed by ``shortwave=dEdd_snicar_ad``. May reduce compile time." - "USE_NETCDF", "Turns on netCDF code. This is normally on and is needed for released configurations. An older value, ncdf, is still supported." - "USE_PIO1", "Modifies CICE PIO implementation to be compatible with PIO1. By default, code is compatible with PIO2" + "USE_NETCDF", "Turns on netcdf code. This is normally on and is needed for released configurations. An older value, ncdf, is still supported" "","" "**Application Macros**", "" "CESMCOUPLED", "Turns on code changes for the CESM coupled application " @@ -80,39 +82,27 @@ can be modified as needed. "ICE_HSTDIR", "string", "unused", "${ICE_RUNDIR}/history" "ICE_LOGDIR", "string", "log directory", "${ICE_CASEDIR}/logs" "ICE_DRVOPT", "string", "unused", "standalone/cice" - "ICE_TARGET", "string", "build target", "set by cice.setup" - "ICE_IOTYPE", "string", "I/O source code", "set by cice.setup" - " ", "binary", "uses io_binary directory, no support for netCDF files" - " ", "netcdf", "uses io_netCDF directory, supports netCDF files" - " ", "pio1", "uses io_pio directory with PIO1 library, supports netCDF and parallel netCDF thru PIO interfaces" - " ", "pio2", "uses io_pio directory with PIO2 library, supports netCDF and parallel netCDF thru PIO interfaces" + "ICE_IOTYPE", "string", "I/O format", "set by cice.setup" + " ", "netcdf", "serial netCDF" + " ", "pio", "parallel netCDF" + " ", "none", "netCDF library is not available" "ICE_CLEANBUILD", "true, false", "automatically clean before building", "true" "ICE_CPPDEFS", "user defined preprocessor macros for build", "null" "ICE_QUIETMODE", "true, false", "reduce build output to the screen", "false" "ICE_GRID", "string (see below)", "grid", "set by cice.setup" - " ", "gbox12", "12x12 box", " " - " ", "gbox80", "80x80 box", " " - " ", "gbox128", "128x128 box", " " - " ", "gbox180", "180x180 box", " " - " ", "gx1", "1-deg displace-pole (Greenland) global grid", " " " ", "gx3", "3-deg displace-pole (Greenland) global grid", " " + " ", "gx1", "1-deg displace-pole (Greenland) global grid", " " " ", "tx1", "1-deg tripole global grid", " " - "ICE_NTASKS", "integer", "number of MPI tasks", "set by cice.setup" - "ICE_NTHRDS", "integer", "number of threads per task", "set by cice.setup" - "ICE_OMPSCHED", "string", "OpenMP SCHEDULE env setting", "static,1" + " ", "gbox80", "80x80 box", " " + " ", "gbox128", "128x128 box", " " + "ICE_NTASKS", "integer", "number of tasks, must be set to 1", "set by cice.setup" + "ICE_NTHRDS", "integer", "number of threads per task, must be set to 1", "set by cice.setup" "ICE_TEST", "string", "test setting if using a test", "set by cice.setup" "ICE_TESTNAME", "string", "test name if using a test", "set by cice.setup" - "ICE_TESTID", "string", "test name testid", "set by cice.setup" - "ICE_BASELINE", "string", "baseline directory name, associated with cice.setup --bdir ", "set by cice.setup" + "ICE_BASELINE", "string", "baseline directory name, associated with cice.setup -bdir ", "set by cice.setup" "ICE_BASEGEN", "string", "baseline directory name for regression generation, associated with cice.setup -bgen ", "set by cice.setup" "ICE_BASECOM", "string", "baseline directory name for regression comparison, associated with cice.setup -bcmp ", "set by cice.setup" - "ICE_BFBCOMP", "string", "location of case for comparison, associated with cice.setup --bcmp", "set by cice.setup" - "ICE_BFBTYPE", "string", "type and files used in BFBCOMP", "restart" - " ", "log", "log file comparison for bit for bit", " " - " ", "logrest", "log and restart files for bit for bit", " " - " ", "qcchk", "QC test for same climate", " " - " ", "qcchkf", "QC test for different climate", " " - " ", "restart", "restart files for bit for bit", " " + "ICE_BFBCOMP", "string", "location of case for comparison, associated with cice.setup -td", "set by cice.setup" "ICE_SPVAL", "string", "special value for cice.settings strings", "set by cice.setup" "ICE_RUNLENGTH", "integer (see below)", "batch run length default", "set by cice.setup" " ", "-1", "15 minutes (default)", " " @@ -124,8 +114,6 @@ can be modified as needed. "ICE_ACCOUNT", "string", "batch account number", "set by cice.setup, .cice_proj or by default" "ICE_QUEUE", "string", "batch queue name", "set by cice.setup or by default" "ICE_THREADED", "true, false", "force threading in compile, will always compile threaded if ICE_NTHRDS :math:`> 1`", "false" - "ICE_COMMDIR", "mpi, serial", "specify infrastructure comm version", "set by ICE_NTASKS" - "ICE_SNICARHC", "true, false", "turn on hardcoded (HC) SNICAR tables in Icepack", "false" "ICE_BLDDEBUG", "true, false", "turn on compile debug flags", "false" "ICE_COVERAGE", "true, false", "turn on code coverage flags", "false" @@ -133,7 +121,7 @@ can be modified as needed. .. _tabnamelist: -Tables of Namelist Options +Table of namelist options ------------------------------- CICE reads a namelist input file, **ice_in**, consisting of several namelist groups. The tables below @@ -159,122 +147,59 @@ setup_nml "``conserv_check``", "logical", "check conservation", "``.false.``" "``cpl_bgc``", "logical", "couple bgc thru driver", "``.false.``" "``days_per_year``", "integer", "number of days in a model year", "365" - "``day_init``", "integer", "the initial day of the month if not using restart", "1" - "``debug_forcing``", "logical", "write extra forcing diagnostics", "``.false.``" - "``debug_model``", "logical", "write extended model point diagnostics", "``.false.``" - "``debug_model_i``", "integer", "local i index of debug_model point", "-1" - "``debug_model_iblk``", "integer", "iblk value for debug_model point", "-1" - "``debug_model_j``", "integer", "local j index of debug_model point", "-1" - "``debug_model_task``", "integer", "mpi task value for debug_model point", "-1" - "``debug_model_step``", "logical", "initial timestep to write ``debug_model`` output", "0" + "``dbug``", "logical", "write extra diagnostics", "``.false.``" "``diagfreq``", "integer", "frequency of diagnostic output in timesteps", "24" "``diag_type``", "``stdout``", "write diagnostic output to stdout", "``stdout``" "", "``file``", "write diagnostic output to file", "" "``diag_file``", "string", "diagnostic output file", "'ice_diag.d'" "``dt``", "real", "thermodynamics time step length in seconds", "3600." - "``dumpfreq``", "``d``", "write restart every ``dumpfreq_n`` days", "'y','x','x','x','x'" - "", "``d1``", "write restart once after ``dumpfreq_n`` days", "" + "``dumpfreq``", "``d``", "write restart every ``dumpfreq_n`` days", "``y``" "", "``h``", "write restart every ``dumpfreq_n`` hours", "" - "", "``h1``", "write restart once after ``dumpfreq_n`` hours", "" "", "``m``", "write restart every ``dumpfreq_n`` months", "" - "", "``m1``", "write restart once after ``dumpfreq_n`` months", "" "", "``y``", "write restart every ``dumpfreq_n`` years", "" - "", "``y1``", "write restart once after ``dumpfreq_n`` years", "" - "", "``1``", "write restart every ``dumpfreq_n`` time steps", "" - "", "``11``", "write restart once after ``dumpfreq_n`` time steps", "" - "``dumpfreq_base``", "init", "restart output frequency relative to year_init, month_init, day_init", "'init','init','init','init','init'" - "", "zero", "restart output frequency relative to year-month-day of 0000-01-01", "" - "``dumpfreq_n``", "integer array", "write restart frequency with ``dumpfreq``", "1,1,1,1,1" + "", "``1``", "write restart every ``dumpfreq_n`` time step", "" + "``dumpfreq_n``", "integer", "write restart frequency with ``dumpfreq``", "1" "``dump_last``", "logical", "write restart on last time step of simulation", "``.false.``" + "``hist_avg``", "logical", "write time-averaged data", "``.true.``" "``histfreq``", "``d``", "write history every ``histfreq_n`` days", "'1','h','d','m','y'" "", "``h``", "write history every ``histfreq_n`` hours", "" "", "``m``", "write history every ``histfreq_n`` months", "" "", "``x``", "unused frequency stream (not written)", "" "", "``y``", "write history every ``histfreq_n`` years", "" "", "``1``", "write history every ``histfreq_n`` time step", "" - "``histfreq_base``", "init", "history output frequency relative to year_init, month_init, day_init", "'zero','zero','zero','zero','zero'" - "", "zero", "history output frequency relative to year-month-day of 0000-01-01", "" "``histfreq_n``", "integer array", "frequency history output is written with ``histfreq``", "1,1,1,1,1" - "``history_chunksize``", "integer array", "chunksizes (x,y) for history output (hdf5 only)", "0,0" - "``history_deflate``", "integer", "compression level (0 to 9) for history output (hdf5 only)", "0" "``history_dir``", "string", "path to history output directory", "'./'" "``history_file``", "string", "output file for history", "'iceh'" - "``history_format``", "``binary``", "write history files with binary format", "``cdf1``" - "", "``cdf1``", "write history files with netcdf cdf1 (netcdf3-classic) format", "" - "", "``cdf2``", "write history files with netcdf cdf2 (netcdf3-64bit-offset) format", "" - "", "``cdf5``", "write history files with netcdf cdf5 (netcdf3-64bit-data) format", "" - "", "``default``", "write history files in default format", "" - "", "``hdf5``", "write history files with netcdf hdf5 (netcdf4) format", "" - "", "``pio_pnetcdf``", "write history files with pnetcdf in PIO, deprecated", "" - "", "``pio_netcdf``", "write history files with netcdf in PIO, deprecated", "" - "", "``pnetcdf1``", "write history files with pnetcdf cdf1 (netcdf3-classic) format", "" - "", "``pnetcdf2``", "write history files with pnetcdf cdf2 (netcdf3-64bit-offset) format", "" - "", "``pnetcdf5``", "write history files with pnetcdf cdf5 (netcdf3-64bit-data) format", "" - "``history_iotasks``", "integer", "pe io tasks for history output with history_root and history_stride (PIO only), -99=internal default", "-99" + "``history_format``", "``default``", "read/write history files in default format", "``default``" + "", "``pio_pnetcdf``", "read/write restart files with pnetcdf in pio", "" "``history_precision``", "integer", "history file precision: 4 or 8 byte", "4" - "``history_rearranger``", "box", "box io rearranger option for history output (PIO only)", "default" - "", "default", "internal default io rearranger option for history output", "" - "", "subset", "subset io rearranger option for history output", "" - "``history_root``", "integer", "pe root task for history output with history_iotasks and history_stride (PIO only), -99=internal default", "-99" - "``history_stride``", "integer", "pe stride for history output with history_iotasks and history_root (PIO only), -99=internal default", "-99" - "``hist_avg``", "logical", "write time-averaged data", "``.true.,.true.,.true.,.true.,.true.``" - "``hist_suffix``", "character array", "appended to history_file when not x", "``x,x,x,x,x``" - "``hist_time_axis``","character","history file time axis interval location: begin, middle, end","end" - "``ice_ic``", "``default``", "equal to internal", "``default``" - "", "``internal``", "initial conditions set based on ice\_data\_type,conc,dist inputs", "" + "``ice_ic``", "``default``", "latitude and sst dependent initial condition", "``default``" "", "``none``", "no ice", "" "", "'path/file'", "restart file name", "" "``incond_dir``", "string", "path to initial condition directory", "'./'" "``incond_file``", "string", "output file prefix for initial condition", "‘iceh_ic’" "``istep0``", "integer", "initial time step number", "0" "``latpnt``", "real", "latitude of (2) diagnostic points", "90.0,-65.0" - "``lcdf64``", "logical", "use 64-bit netCDF format, deprecated, see history_format, restart_format", "``.false.``" + "``lcdf64``", "logical", "use 64-bit netcdf format", "``.false.``" "``lonpnt``", "real", "longitude of (2) diagnostic points", "0.0,-45.0" - "``memory_stats``", "logical", "turns on memory use diagnostics", "``.false.``" - "``month_init``", "integer", "the initial month if not using restart", "1" "``ndtd``", "integer", "number of dynamics/advection/ridging/steps per thermo timestep", "1" - "``npt``", "integer", "total number of npt_units to run the model", "99999" - "``npt_unit``", "``d``", "run ``npt`` days", "1" - "", "``h``", "run ``npt`` hours", "" - "", "``m``", "run ``npt`` months", "" - "", "``s``", "run ``npt`` seconds", "" - "", "``y``", "run ``npt`` years", "" - "", "``1``", "run ``npt`` timesteps", "" + "``npt``", "integer", "total number of time steps to take", "99999" "``numin``", "integer", "minimum internal IO unit number", "11" "``numax``", "integer", "maximum internal IO unit number", "99" "``pointer_file``", "string", "restart pointer filename", "'ice.restart_file'" "``print_global``", "logical", "print global sums diagnostic data", "``.true.``" "``print_points``", "logical", "print diagnostic data for two grid points", "``.false.``" - "``restart``", "logical", "exists but deprecated, now set internally based on other inputs", "" - "``restart_chunksize``", "integer array", "chunksizes (x,y) for restart output (hdf5 only)", "0,0" - "``restart_deflate``", "integer", "compression level (0 to 9) for restart output (hdf5 only)", "0" + "``restart``", "logical", "initialize using restart file", "``.false.``" "``restart_dir``", "string", "path to restart directory", "'./'" "``restart_ext``", "logical", "read/write halo cells in restart files", "``.false.``" "``restart_file``", "string", "output file prefix for restart dump", "'iced'" - "``restart_format``", "``binary``", "write restart files with binary format", "``cdf1``" - "", "``cdf1``", "write restart files with netcdf cdf1 (netcdf3-classic) format", "" - "", "``cdf2``", "write restart files with netcdf cdf2 (netcdf3-64bit-offset) format", "" - "", "``cdf5``", "write restart files with netcdf cdf5 (netcdf3-64bit-data) format", "" - "", "``default``", "write restart files in default format", "" - "", "``hdf5``", "write restart files with netcdf hdf5 (netcdf4) format", "" - "", "``pio_pnetcdf``", "write restart files with pnetcdf in PIO, deprecated", "" - "", "``pio_netcdf``", "write restart files with netcdf in PIO, deprecated", "" - "", "``pnetcdf1``", "write restart files with pnetcdf cdf1 (netcdf3-classic) format", "" - "", "``pnetcdf2``", "write restart files with pnetcdf cdf2 (netcdf3-64bit-offset) format", "" - "", "``pnetcdf5``", "write restart files with pnetcdf cdf5 (netcdf3-64bit-data) format", "" - "``restart_iotasks``", "integer", "pe io tasks for restart output with restart_root and restart_stride (PIO only), -99=internal default", "-99" - "``restart_rearranger``", "box", "box io rearranger option for restart output (PIO only)", "default" - "", "default", "internal default io rearranger option for restart output", "" - "", "subset", "subset io rearranger option for restart output", "" - "``restart_root``", "integer", "pe root task for restart output with restart_iotasks and restart_stride (PIO only), -99=internal default", "-99" - "``restart_stride``", "integer", "pe stride for restart output with restart_iotasks and restart_root (PIO only), -99=internal default", "-99" + "``restart_format``", "``default``", "read/write restart file with default format", "``default``" + "", "``pio_pnetcdf``", "read/write restart files with pnetcdf in pio", "" "``runid``", "string", "label for run (currently CESM only)", "'unknown'" "``runtype``", "``continue``", "restart using ``pointer_file``", "``initial``" "", "``initial``", "start from ``ice_ic``", "" - "``sec_init``", "integer", "the initial second if not using restart", "0" - "``timer_stats``", "logical", "controls extra timer output", "``.false.``" "``use_leap_years``", "logical", "include leap days", "``.false.``" - "``use_restart_time``", "logical", "set initial date using restart file on initial runtype only", "``.false.``" + "``use_restart_time``", "logical", "set initial date using restart file", "``.true.``" "``version_name``", "string", "model version", "'unknown_version_name'" "``write_ic``", "logical", "write initial condition", "``.false.``" "``year_init``", "integer", "the initial year if not using restart", "0" @@ -288,28 +213,16 @@ grid_nml :widths: 15, 15, 30, 15 "", "", "", "" - "``bathymetry_file``", "string", "name of bathymetry file to be read", "'unknown_bathymetry_file'" - "``bathymetry_format``", "``default``", "NetCDF depth field", "'default'" + "``bathymetry_file``", "string", "name of bathymetry file to be read", "‘unknown_bathymetry_file’" + "``bathymetry_format``", "``default``", "NetCDF depth field", "‘default’" "", "``pop``", "pop thickness file in cm in ascii format", "" - "``close_boundaries``", "logical", "force two gridcell wide land mask on boundaries for rectangular grids", "``.false.``" + "``close_boundaries``", "logical", "set land on edges of grid", "``.false.``" "``dxrect``", "real", "x-direction grid spacing for rectangular grid in cm", "0.0" - "``dxscale``", "real", "user defined rectgrid x-grid scale factor", "1.0" "``dyrect``", "real", "y-direction grid spacing for rectangular grid in cm", "0.0" - "``dyscale``", "real", "user defined rectgrid y-grid scale factor", "1.0" "``gridcpl_file``", "string", "input file for coupling grid info", "'unknown_gridcpl_file'" - "``grid_atm``", "``A``", "atm forcing/coupling grid, all fields on T grid", "``A``" - "", "``B``", "atm forcing/coupling grid, thermo fields on T grid, dyn fields on U grid", "" - "", "``C``", "atm forcing/coupling grid, thermo fields on T grid, dynu fields on E grid, dynv fields on N grid", "" - "", "``CD``", "atm forcing/coupling grid, thermo fields on T grid, dyn fields on N and E grid", "" "``grid_file``", "string", "name of grid file to be read", "'unknown_grid_file'" "``grid_format``", "``bin``", "read direct access grid and kmt files", "``bin``" "", "``nc``", "read grid and kmt files", "" - "``grid_ice``", "``B``", "use B grid structure with T at center and U at NE corner", "``B``" - "", "``C``", "use C grid structure with T at center, U at E edge, V at N edge", "" - "``grid_ocn``", "``A``", "ocn forcing/coupling grid, all fields on T grid", "``A``" - "", "``B``", "ocn forcing/coupling grid, thermo fields on T grid, dyn fields on U grid", "" - "", "``C``", "ocn forcing/coupling grid, thermo fields on T grid, dynu fields on E grid, dynv fields on N grid", "" - "", "``CD``", "ocn forcing/coupling grid, thermo fields on T grid, dyn fields on N and E grid", "" "``grid_type``", "``displaced_pole``", "read from file in *popgrid*", "``rectangular``" "", "``rectangular``", "defined in *rectgrid*", "" "", "``regional``", "read from file in *popgrid*", "" @@ -319,24 +232,14 @@ grid_nml "", "``1``", "new formulation with round numbers", "" "", "``2``", "WMO standard categories", "" "", "``3``", "asymptotic scheme", "" - "``kmt_file``", "string", "name of land mask file to be read", "``unknown_kmt_file``" - "``kmt_type``", "boxislands", "ocean/land mask set internally, complex test geometory", "file" - "", "channel", "ocean/land mask set internally as zonal channel", "" - "", "channel_oneeast", "ocean/land mask set internally as single gridcell east-west zonal channel", "" - "", "channel_onenorth", "ocean/land mask set internally as single gridcell north-south zonal channel", "" - "", "default", "ocean/land mask set internally, land in upper left and lower right of domain, ", "" - "", "file", "ocean/land mask setup read from file, see kmt_file", "" - "", "wall", "ocean/land mask set at right edge of domain", "" - "``latrefrect``","real","lower left corner lat for rectgrid in deg", "71.35" - "``lonrefrect``","real","lower left corner lon for rectgrid in deg", "-156.5" + "``kmt_file``", "string", "name of land mask file to be read", "'unknown_kmt_file'" "``nblyr``", "integer", "number of zbgc layers", "0" "``ncat``", "integer", "number of ice thickness categories", "0" "``nfsd``", "integer", "number of floe size categories", "1" "``nilyr``", "integer", "number of vertical layers in ice", "0" "``nslyr``", "integer", "number of vertical layers in snow", "0" "``orca_halogrid``", "logical", "use orca haloed grid for data/grid read", "``.false.``" - "``scale_dxdy``", "logical", "apply dxscale, dyscale to rectgrid", "``false``" - "``use_bathymetry``", "logical", "use read in bathymetry file for seabedstress option", "``.false.``" + "``use_bathymetry``", "logical", "use read in bathymetry file for basalstress option", "``.false.``" "", "", "", "" domain_nml @@ -350,7 +253,6 @@ domain_nml "``add_mpi_barriers``", "logical", "throttle communication", "``.false.``" "``block_size_x``", "integer", "block size in x direction", "-1" "``block_size_y``", "integer", "block size in y direction", "-1" - "``debug_blocks``", "logical", "add additional print statements to debug the block decomposition", "``.false.``" "``distribution_type``", "``cartesian``", "2D cartesian block distribution method", "``cartesian``" "", "``rake``", "redistribute blocks among neighbors", "" "", "``roundrobin``", "1 block per proc until blocks are used", "" @@ -359,8 +261,7 @@ domain_nml "", "``spacecurve``", "distribute blocks via space-filling curves", "" "", "``spiralcenter``", "distribute blocks via roundrobin from center of grid outward in a spiral", "" "", "``wghtfile``", "distribute blocks based on weights specified in ``distribution_wght_file``", "" - "``distribution_wght``", "``block``", "full block weight method with land block elimination", "``latitude``" - "", "``blockall``", "full block weight method without land block elimination", "" + "``distribution_wght``", "``block``", "full block size distribution weight method", "``latitude``" "", "``latitude``", "latitude/ocean sets ``work_per_block``", "" "``distribution_wght_file``", "string", "distribution weight file when distribution_type is ``wghtfile``", "'unknown'" "``ew_boundary_type``", "``cyclic``", "periodic boundary conditions in x-direction", "``cyclic``" @@ -406,25 +307,20 @@ tracer_nml "``tr_iage``", "logical", "ice age", "``.false.``" "``tr_iso``", "logical", "isotopes", "``.false.``" "``tr_lvl``", "logical", "level ice area and volume", "``.false.``" + "``tr_pond_cesm``", "logical", "CESM melt ponds", "``.false.``" "``tr_pond_lvl``", "logical", "level-ice melt ponds", "``.false.``" - "``tr_pond_cesm``", " ", "DEPRECATED", " " "``tr_pond_topo``", "logical", "topo melt ponds", "``.false.``" - "``tr_snow``", "logical", "advanced snow physics", "``.false.``" "``restart_aero``", "logical", "restart tracer values from file", "``.false.``" "``restart_age``", "logical", "restart tracer values from file", "``.false.``" "``restart_fsd``", "logical", "restart floe size distribution values from file", "``.false.``" "``restart_FY``", "logical", "restart tracer values from file", "``.false.``" "``restart_iso``", "logical", "restart tracer values from file", "``.false.``" "``restart_lvl``", "logical", "restart tracer values from file", "``.false.``" + "``restart_pond_cesm``", "logical", "restart tracer values from file", "``.false.``" "``restart_pond_lvl``", "logical", "restart tracer values from file", "``.false.``" "``restart_pond_topo``", "logical", "restart tracer values from file", "``.false.``" - "``restart_snow``", "logical", "restart snow tracer values from file", "``.false.``" "", "", "", "" -.. - "``tr_pond_cesm``", "logical", "CESM melt ponds", "``.false.``" - "``restart_pond_cesm``", "logical", "restart tracer values from file", "``.false.``" - thermo_nml ~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -438,70 +334,47 @@ thermo_nml "``conduct``", "``bubbly``", "conductivity scheme :cite:`Pringle07`", "``bubbly``" "", "``MU71``", "conductivity :cite:`Maykut71`", "" "``dSdt_slow_mode``", "real", "slow drainage strength parameter m/s/K", "-1.5e-7" - "``floediam``", "real", "effective floe diameter for lateral melt in m", "300.0" - "``hfrazilmin``", "real", "min thickness of new frazil ice in m", "0.05" - "``hi_min``", "real", "minimum ice thickness in m", "0.01" "``kitd``", "``0``", "delta function ITD approximation", "1" "", "``1``", "linear remapping ITD approximation", "" "``ksno``", "real", "snow thermal conductivity", "0.3" "``ktherm``", "``-1``", "thermodynamic model disabled", "1" + "", "``0``", "zero-layer thermodynamic model", "" "", "``1``", "Bitz and Lipscomb thermodynamic model", "" "", "``2``", "mushy-layer thermodynamic model", "" "``phi_c_slow_mode``", ":math:`0<\phi_c < 1`", "critical liquid fraction", "0.05" "``phi_i_mushy``", ":math:`0<\phi_i < 1`", "solid fraction at lower boundary", "0.85" "``Rac_rapid_mode``", "real", "critical Rayleigh number", "10.0" - "``Tliquidus_max``", "real", "maximum liquidus temperature of mush (C)", "0.0" + "``sw_redist``", "logical", "redistribute internal shortwave to surface", "``.false.``" + "``sw_frac``", "real", "fraction redistributed", "0.9" + "``sw_dtemp``", "real", "temperature difference from melt to start redistributing", "0.02" "", "", "", "" - -.. _dynamics_nml: - dynamics_nml ~~~~~~~~~~~~~~~~~~~~~~~~~ -.. - commented out - "``damping_andacc``", "integer", "damping factor for Anderson acceleration", "0" - "``dim_andacc``", "integer", "size of Anderson minimization matrix", "5" - "``fpfunc_andacc``", "``1``", "fix point function for Anderson acceleration, FMGRES(A(x),b(x))", "1" - "", "``2``", "fix point function for Anderson acceleration, x-A(x)x+b(x)", "" - "``reltol_andacc``", "real", "relative tolerance for Anderson acceleration", "1e-6" - "``start_andacc``", "integer", "acceleration delay factor for Anderson acceleration", "0" - commented out - .. csv-table:: **dynamics_nml namelist options** :header: "variable", "options/format", "description", "default value" :widths: 15, 15, 30, 15 "", "", "", "" "``advection``", "``remap``", "linear remapping advection scheme", "``remap``" - "", "``upwind``", "donor cell advection", "" - "``algo_nonlin``", "``anderson``", "use nonlinear anderson algorithm for implicit solver", "picard" - "", "``picard``", "use picard algorithm", "" + "", "``none``", "advection off", "" "``alphab``", "real", ":math:`\alpha_{b}` factor in :cite:`Lemieux16`", "20.0" "``arlx``", "real", "revised_evp value", "300.0" "``brlx``", "real", "revised_evp value", "300.0" - "``capping_method``", "``max``", "max capping in :cite:`Hibler79`", "max" - "", "``sum``", "sum capping in :cite:`Kreyscher00`", "" + "``basalstress``", "logical", "use basal stress parameterization for landfast ice", "``.false.``" "``Cf``", "real", "ratio of ridging work to PE change in ridging", "17.0" "``coriolis``", "``constant``", "constant coriolis value = 1.46e-4 s\ :math:`^{-1}`", "``latitude``" "", "``latitude``", "coriolis variable by latitude", "" "", "``zero``", "zero coriolis", "" "``Cstar``", "real", "constant in Hibler strength formula", "20" - "``deltaminEVP``", "real", "minimum delta for viscosities", "1e-11" - "``deltaminVP``", "real", "minimum delta for viscosities", "2e-9" - "``dim_fgmres``", "integer", "maximum number of Arnoldi iterations for FGMRES solver", "50" - "``dim_pgmres``", "integer", "maximum number of Arnoldi iterations for PGMRES preconditioner", "5" - "``e_plasticpot``", "real", "aspect ratio of elliptical plastic potential", "2.0" - "``e_yieldcurve``", "real", "aspect ratio of elliptical yield curve", "2.0" - "``elasticDamp``", "real", "elastic damping parameter", "0.36" - "``evp_algorithm``", "``standard_2d``", "standard 2d EVP memory parallel solver", "standard_2d" - "", "``shared_mem_1d``", "1d shared memory solver", "" + "``e_ratio``", "real", "EVP ellipse aspect ratio", "2.0" "``kdyn``", "``-1``", "dynamics algorithm OFF", "1" "", "``0``", "dynamics OFF", "" "", "``1``", "EVP dynamics", "" "", "``2``", "EAP dynamics", "" - "", "``3``", "VP dynamics", "" + "``kevp_kernel``", "``0``", "standard 2D EVP memory parallel solver", "0" + "", "``2``", "1D shared memory solver (not fully validated)", "" "``kstrength``", "``0``", "ice strength formulation :cite:`Hibler79`", "1" "", "``1``", "ice strength formulation :cite:`Rothrock75`", "" "``krdg_partic``", "``0``", "old ridging participation function", "1" @@ -513,35 +386,15 @@ dynamics_nml "``ktransport``", "``-1``", "transport disabled", "1" "", "``1``", "transport enabled", "" "``Ktens``", "real", "Tensile strength factor (see :cite:`Konig10`)", "0.0" - "``k1``", "real", "1st free parameter for landfast parameterization", "7.5" + "``k1``", "real", "1st free parameter for landfast parameterization", "8.0" "``k2``", "real", "2nd free parameter (N/m\ :math:`^3`) for landfast parameterization", "15.0" - "``maxits_fgmres``", "integer", "maximum number of restarts for FGMRES solver", "1" - "``maxits_nonlin``", "integer", "maximum number of nonlinear iterations for VP solver", "10" - "``maxits_pgmres``", "integer", "maximum number of restarts for PGMRES preconditioner", "1" - "``monitor_fgmres``", "logical", "write velocity norm at each FGMRES iteration", "``.false.``" - "``monitor_nonlin``", "logical", "write velocity norm at each nonlinear iteration", "``.false.``" - "``monitor_pgmres``", "logical", "write velocity norm at each PGMRES iteration", "``.false.``" "``mu_rdg``", "real", "e-folding scale of ridged ice for ``krdg_partic`` = 1 in m^0.5", "3.0" "``ndte``", "integer", "number of EVP subcycles", "120" - "``ortho_type``", "``cgs``", "Use classical Gram-Shchmidt in FGMRES solver", "``mgs``" - "", "``mgs``", "Use modified Gram-Shchmidt in FGMRES solver", "" - "``precond``", "``diag``", "Use Jacobi preconditioner for the FGMRES solver", "``pgmres``" - "", "``ident``", "Don't use a preconditioner for the FGMRES solver", "" - "", "``pgmres``", "Use GMRES as preconditioner for FGMRES solver", "" "``Pstar``", "real", "constant in Hibler strength formula (N/m\ :math:`^2`)", "2.75e4" - "``reltol_fgmres``", "real", "relative tolerance for FGMRES solver", "1e-1" - "``reltol_nonlin``", "real", "relative tolerance for nonlinear solver", "1e-8" - "``reltol_pgmres``", "real", "relative tolerance for PGMRES preconditioner", "1e-6" "``revised_evp``", "logical", "use revised EVP formulation", "``.false.``" - "``seabed_stress``", "logical", "use seabed stress parameterization for landfast ice", "``.false.``" - "``seabed_stress_method``", "``LKD``", "linear keel draft method :cite:`Lemieux16`", "``LKD``" - "", "``probabilistic``", "probability of contact method :cite:`Dupont22`", "" "``ssh_stress``", "``coupled``", "computed from coupled sea surface height gradient", "``geostrophic``" "", "``geostropic``", "computed from ocean velocity", "" "``threshold_hw``", "real", "Max water depth for grounding (see :cite:`Amundrud04`)", "30." - "``use_mean_vrel``", "logical", "Use mean of two previous iterations for vrel in VP", "``.true.``" - "``visc_method``", "``avg_strength``", "average strength for viscosities on U grid", "``avg_zeta``" - "", "``avg_zeta``", "average zeta for viscosities on U grid", "" "``yield_curve``", "``ellipse``", "elliptical yield curve", "``ellipse``" "", "", "", "" @@ -567,13 +420,7 @@ shortwave_nml "``R_pnd``", "real", "tuning parameter for ponded sea ice albedo from Delta-Eddington shortwave", "0.0" "``R_snw``", "real", "tuning parameter for snow (broadband albedo) from Delta-Eddington shortwave", "1.5" "``shortwave``", "``ccsm3``", "NCAR CCSM3 shortwave distribution method", "``ccsm3``" - "", "``dEdd``", "Delta-Eddington method (3-band)", "" - "", "``dEdd_snicar_ad``", "Delta-Eddington method with 5 band snow", "" - "``snw_ssp_table``", "``snicar``", "lookup table for `dEdd_snicar_ad`", "``test``" - "", "``test``", "reduced lookup table for `dEdd_snicar_ad` testing", "" - "``sw_dtemp``", "real", "temperature difference from melt to start redistributing", "0.02" - "``sw_frac``", "real", "fraction redistributed", "0.9" - "``sw_redist``", "logical", "redistribute internal shortwave to surface", "``.false.``" + "", "``dEdd``", "Delta-Eddington method", "" "", "", "", "" ponds_nml @@ -588,120 +435,58 @@ ponds_nml "``frzpnd``", "``cesm``", "CESM pond refreezing forumulation", "``cesm``" "", "``hlid``", "Stefan refreezing with pond ice thickness", "" "``hp1``", "real", "critical ice lid thickness for topo ponds in m", "0.01" - "``hs0``", "real", "snow depth of transition to bare sea ice in m", "" + "``hs0``", "real", "snow depth of transition to bare sea ice in m", "0.03" "``hs1``", "real", "snow depth of transition to pond ice in m", "0.03" "``pndaspect``", "real", "aspect ratio of pond changes (depth:area)", "0.8" "``rfracmax``", ":math:`0 \le r_{max} \le 1`", "maximum melt water added to ponds", "0.85" "``rfracmin``", ":math:`0 \le r_{min} \le 1`", "minimum melt water added to ponds", "0.15" "", "", "", "" -snow_nml -~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. csv-table:: **snow_nml namelist options** - :header: "variable", "options/format", "description", "default value" - :widths: 15, 15, 30, 15 - - "", "", "", "" - "``drhosdwind``", "real", "wind compactions factor for now in kg-s/m^4", "27.3" - "``rhosmax``", "real", "maximum snow density in kg/m^3", "450." - "``rhosmin``", "real", "minimum snow density in kg/m^3", "100." - "``rhosnew``", "real", "new snow density in kg/m^3", "100." - "``rsnw_fall``", "real", "radius of new snow in 1.0e-6 m", "100." - "``rsnw_tmax``", "real", "maximum snow radius in 1.0e-6 m", "1500." - "``snwgrain``", "logical", "snow metamorophsis flag", "``.false.``" - "``snwlvlfac``", "real", "fractional increase in snow", "0.3" - "``snwredist``", "``bulk``", "bulk snow redistribution scheme", "``none``" - "", "``ITD``", "ITD snow redistribution scheme", "" - "", "``ITDrdg``", "ITDrdg snow redistribution scheme", "" - "", "``none``", "snow redistribution scheme off", "" - "``snw_aging_table``", "file", "read 1D and 3D fields for dry metamorophsis lookup table", "test" - "", "snicar", "read 3D fields for dry metamorophsis lookup table", "" - "", "test", "internally generated dry metamorophsis lookup table for testing", "" - "``snw_drdt0_fname``", "string", "snow aging file drdt0 fieldname", "unknown" - "``snw_filename``", "string", "snow aging table data filename", "unknown" - "``snw_kappa_fname``", "string", "snow aging file kappa fieldname", "unknown" - "``snw_rhos_fname``", "string", "snow aging file rhos fieldname", "unknown" - "``snw_T_fname``", "string", "snow aging file T fieldname", "unknown" - "``snw_tau_fname``", "string", "snow aging file tau fieldname", "unknown" - "``snw_Tgrd_fname``", "string", "snow aging file Tgrd fieldname", "unknown" - "``use_smliq_pnd``", "logical", "use liquid in snow for ponds", "``.false.``" - "``windmin``", "real", "minimum wind speed to compact snow in m/s", "10." - "", "", "", "" - forcing_nml ~~~~~~~~~~~~~~~~~~~~~~~~~ -.. - commented out - "``calc_dragio``", "logical", "compute dragio from iceruf_ocean and thickness of first ocean level, not supported", "``.false.``" - "``iceruf_ocn``", "real", "under ice roughness in meters, not supported", "0.03" - commented out - .. csv-table:: **forcing_nml namelist options** :header: "variable", "options/format", "description", "default value" :widths: 15, 15, 30, 15 "", "", "", "" - "``atmbndy``", "string", "bulk transfer coefficients", "``similarity``" - "", "``similarity``", "stability-based boundary layer", "" - "", "``constant``", "constant-based boundary layer", "" - "", "``mixed``", "stability-based boundary layer for wind stress, constant-based for sensible+latent heat fluxes", "" + "``atmbndy``", "``constant``", "bulk transfer coefficients", "``default``" + "", "``default``", "stability-based boundary layer", "" "``atmiter_conv``", "real", "convergence criteria for ustar", "0.0" - "``atm_data_dir``", "string", "path or partial path to atmospheric forcing data directory", "" + "``atm_data_dir``", "string", "path to atmospheric forcing data directory", "" "``atm_data_format``", "``bin``", "read direct access binary atmo forcing file format", "``bin``" "", "``nc``", "read netcdf atmo forcing files", "" "``atm_data_type``", "``box2001``", "forcing data for :cite:`Hunke01` box problem", "``default``" "", "``default``", "constant values defined in the code", "" - "", "``hycom``", "HYCOM atm forcing data in netCDF format", "" - "", "``JRA55``", "JRA55 forcing data :cite:`Tsujino18`", "" - "", "``JRA55do``", "JRA55do forcing data :cite:`Tsujino18`", "" + "", "``hycom``", "HYCOM atm forcing data in netcdf format", "" + "", "``JRA55_gx1``", "JRA55 forcing data for gx1 grid :cite:`Tsujino18`", "" + "", "``JRA55_gx3``", "JRA55 forcing data for gx3 grid :cite:`Tsujino18`", "" + "", "``JRA55_tx1``", "JRA55 forcing data for tx1 grid :cite:`Tsujino18`", "" + "", "``LYq``", "COREII Large-Yeager (AOMIP) forcing data :cite:`Large09`", "" "", "``monthly``", "monthly forcing data", "" "", "``ncar``", "NCAR bulk forcing data", "" "", "``oned``", "column forcing data", "" - "``atm_data_version``","string", "date of atm data forcing file creation", "``_undef``" "``bgc_data_dir``", "string", "path to oceanic forcing data directory", "'unknown_bgc_data_dir'" "``bgc_data_type``", "``clim``", "bgc climatological data", "``default``" "", "``default``", "constant values defined in the code", "" - "", "``hycom``", "HYCOM ocean forcing data in netCDF format", "" + "", "``hycom``", "HYCOM ocean forcing data in netcdf format", "" "", "``ncar``", "POP ocean forcing data", "" "``calc_strair``", "``.false.``", "read wind stress and speed from files", "``.true.``" "", "``.true.``", "calculate wind stress and speed", "" "``calc_Tsfc``", "logical", "calculate surface temperature", "``.true.``" - "``cpl_frazil``", "``external``", "frazil water/salt fluxes are handled outside of Icepack", "``fresh_ice_correction``" - "", "``fresh_ice_correction``", "correct fresh-ice frazil water/salt fluxes for mushy physics", "" - "", "``internal``", "send full frazil water/salt fluxes for mushy physics", "" "``default_season``", "``summer``", "forcing initial summer values", "``winter``" "", "``winter``", "forcing initial winter values", "" - "``emissivity``", "real", "emissivity of snow and ice", "0.985" - "``fbot_xfer_type``", "``Cdn_ocn``", "variable ocean heat transfer coefficient scheme", "``constant``" + "``emissivity``", "real", "emissivity of snow and ice", "0.95" + "``fbot_xfer_type``", "``Cdn_ocn``", "variabler ocean heat transfer coefficient scheme", "``constant``" "", "``constant``", "constant ocean heat transfer coefficient", "" "``fe_data_type``", "``clim``", "ocean climatology forcing value for iron", "``default``" "", "``default``", "default forcing value for iron", "" "``formdrag``", "logical", "calculate form drag", "``.false.``" "``fyear_init``", "integer", "first year of atmospheric forcing data", "1900" "``highfreq``", "logical", "high-frequency atmo coupling", "``.false.``" - "``ice_data_conc``", "``box2001``", "ice distribution ramped from 0 to 1 west to east consistent with :ref:`box2001` test (:cite:`Hunke01`)", "``default``" - "", "``c1``", "initial ice concentation of 1.0", "" - "", "``default``", "same as parabolic", "" - "", "``p5``", "initial concentration of 0.5", "" - "", "``p8``", "initial concentration of 0.8", "" - "", "``p9``", "initial concentration of 0.9", "" - "", "``parabolic``", "parabolic in ice thickness space with sum of aicen=1.0", "" - "``ice_data_dist``", "``box2001``", "ice distribution ramped from 0 to 1 west to east consistent with :ref:`box2001` test (:cite:`Hunke01`)", "``default``" - "", "``default``", "uniform distribution, equivalent to uniform", "" - "", "``gauss``", "gauss distbution of ice with a peak in the center of the domain", "" - "", "``uniform``", "uniform distribution, equivalent to default", "" - "``ice_data_type``", "``block``", "ice block covering about 25 percent of the area in center of domain", "``default``" - "", "``boxslotcyl``", "slot cylinder ice mask associated with :ref:`boxslotcyl` test (:cite:`Zalesak79`)", "" - "", "``box2001``", "box2001 ice mask associate with :ref:`box2001` test (:cite:`Hunke01`)", "" - "", "``channel``", "ice defined on entire grid in i-direction and 50% in j-direction in center of domain", "" - "", "``default``", "same as latsst", "" - "", "``eastblock``", "ice block covering about 25 percent of domain at the east edge of the domain", "" - "", "``latsst``", "ice dependent on latitude and ocean temperature", "" - "", "``uniform``", "ice defined at all grid points", "" - "``ice_ref_salinity``", "real", "sea ice salinity for coupling fluxes (ppt)", "4.0" - "``iceruf``", "real", "ice surface roughness at atmosphere interface in meters", "0.0005" + "``ice_data_type``", "``boxslotcyl``", "initialize ice concentration and velocity for :ref:`boxslotcyl` test (:cite:`Zalesak79`)", "``default``" + "", "``box2001``", "initialize ice concentration for :ref:`box2001` test (:cite:`Hunke01`)", "" + "", "``default``", "no special initialization", "" "``l_mpond_fresh``", "``.false.``", "release pond water immediately to ocean", "``.false.``" "", "``true``", "retain (topo) pond water until ponds drain", "" "``natmiter``", "integer", "number of atmo boundary layer iterations", "5" @@ -710,10 +495,10 @@ forcing_nml "``oceanmixed_ice``", "logical", "active ocean mixed layer calculation", "``.false.``" "``ocn_data_dir``", "string", "path to oceanic forcing data directory", "'unknown_ocn_data_dir'" "``ocn_data_format``", "``bin``", "read direct access binary ocean forcing files", "``bin``" - "", "``nc``", "read netCDF ocean forcing files", "" + "", "``nc``", "read netcdf ocean forcing files", "" "``ocn_data_type``", "``clim``", "ocean climatological data formulation", "``default``" "", "``default``", "constant values defined in the code", "" - "", "``hycom``", "HYCOM ocean forcing data in netCDF format", "" + "", "``hycom``", "HYCOM ocean forcing data in netcdf format", "" "", "``ncar``", "POP ocean forcing data", "" "``precip_units``", "``mks``", "liquid precipitation data units", "``mks``" "", "``mm_per_month``", "", "" @@ -722,15 +507,11 @@ forcing_nml "``restart_coszen``", "logical", "read/write coszen in restart files", "``.false.``" "``restore_ocn``", "logical", "restore sst to data", "``.false.``" "``restore_ice``", "logical", "restore ice state along lateral boundaries", "``.false.``" - "``rotate_wind``", "logical", "rotate wind from east/north to computation grid", "``.true.``" - "``saltflux_option``", "``constant``", "computed using ice_ref_salinity", "``constant``" - "", "``prognostic``", "computed using prognostic salinity", "" - "``tfrz_option``","``constant``", "constant ocean freezing temperature (Tocnfrz)","``mushy``" - "", "``linear_salt``", "linear function of salinity (ktherm=1)", + "``tfrz_option``", "``linear_salt``", "linear functino of salinity (ktherm=1)", "``mushy``" "", "``minus1p8``", "constant ocean freezing temperature (:math:`-1.8^{\circ} C`)", "" "", "``mushy``", "matches mushy-layer thermo (ktherm=2)", "" "``trestore``", "integer", "sst restoring time scale (days)", "90" - "``ustar_min``", "real", "minimum value of ocean friction velocity in m/s", "0.0005" + "``ustar_min``", "real", "minimum value of ocean friction velocity", "0.0005 m/s" "``update_ocn_f``", "``.false.``", "do not include frazil water/salt fluxes in ocn fluxes", "``.false.``" "", "``true``", "include frazil water/salt fluxes in ocn fluxes", "" "``wave_spec_file``", "string", "data file containing wave spectrum forcing data", "" @@ -796,7 +577,7 @@ zbgc_nml "``f_exude_s``", "real", "fraction of exudation to DOC saccharids", "1.0" "``grid_o``", "real", "z biology for bottom flux", "5.0" "``grid_o_t``", "real", "z biology for top flux", "5.0" - "``grid_oS``", "real", "zsalinity DEPRECATED", "" + "``grid_oS``", "real", "z salinity for bottom flux", "5.0" "``grow_Tdep_diatoms``", "real", "temperature dependence growth diatoms per degC", "0.06" "``grow_Tdep_phaeo``", "real", "temperature dependence growth phaeocystis per degC", "0.06" "``grow_Tdep_sp``", "real", "temperature dependence growth small plankton per degC", "0.06" @@ -822,10 +603,10 @@ zbgc_nml "``K_Sil_sp``", "real", "silicate half saturation small plankton mmol/m^3", "0.0" "``kn_bac_protein``", "real", "bacterial degradation of DON per day", "0.03" "``l_sk``", "real", "characteristic diffusive scale in m", "7.0" - "``l_skS``", "real", "zsalinity DEPRECATED", "" + "``l_skS``", "real", "z salinity characteristic diffusive scale in m", "7.0" "``max_dfe_doc1``", "real", "max ratio of dFe to saccharides in the ice in nm Fe / muM C", "0.2" "``max_loss``", "real", "restrict uptake to percent of remaining value", "0.9" - "``modal_aero``", "logical", "modal aerosols", "``.false.``" + "``modal_aero``", "logical", "modal aersols", "``.false.``" "``mort_pre_diatoms``", "real", "mortality diatoms", "0.007" "``mort_pre_phaeo``", "real", "mortality phaeocystis", "0.007" "``mort_pre_sp``", "real", "mortality small plankton", "0.007" @@ -862,14 +643,14 @@ zbgc_nml "``ratio_S2N_sp``", "real", "algal S to N in mol/mol small plankton", "0.03" "``restart_bgc``", "logical", "restart tracer values from file", "``.false.``" "``restart_hbrine``", "logical", "", "``.false.``" - "``restart_zsal``", "logical", "zsalinity DEPRECATED", "``.false.``" + "``restart_zsal``", "logical", "", "``.false.``" "``restore_bgc``", "logical", "restore bgc to data", "``.false.``" "``R_dFe2dust``", "real", "g/g :cite:`Tagliabue09`", "0.035" "``scale_bgc``", "logical", "", "``.false.``" "``silicatetype``", "real", "mobility type between stationary and mobile silicate", "-1.0" "``skl_bgc``", "logical", "biogeochemistry", "``.false.``" "``solve_zbgc``", "logical", "", "``.false.``" - "``solve_zsal``", "logical", "zsalinity DEPRECATED, update salinity tracer profile", "``.false.``" + "``solve_zsal``", "logical", "update salinity tracer profile", "``.false.``" "``tau_max``", "real", "long time mobile to stationary exchanges", "1.73e-5" "``tau_min``", "real", "rapid module to stationary exchanges", "5200." "``tr_bgc_Am``", "logical", "ammonium tracer", "``.false.``" @@ -902,17 +683,6 @@ zbgc_nml icefields_nml ~~~~~~~~~~~~~~~~~~~~~~~~~ -There are several icefield namelist groups to control model history output. See the -source code for a full list of supported output fields. - -* ``icefields_nml`` is in **cicecore/cicedyn/analysis/ice_history_shared.F90** -* ``icefields_bgc_nml`` is in **cicecore/cicedyn/analysis/ice_history_bgc.F90** -* ``icefields_drag_nml`` is in **cicecore/cicedyn/analysis/ice_history_drag.F90** -* ``icefields_fsd_nml`` is in **cicecore/cicedyn/analysis/ice_history_fsd.F90** -* ``icefields_mechred_nml`` is in **cicecore/cicedyn/analysis/ice_history_mechred.F90** -* ``icefields_pond_nml`` is in **cicecore/cicedyn/analysis/ice_history_pond.F90** -* ``icefields_snow_nml`` is in **cicecore/cicedyn/analysis/ice_history_snow.F90** - .. csv-table:: **icefields_nml namelist options** :header: "variable", "options/format", "description", "default value" :widths: 15, 15, 30, 15 @@ -934,4 +704,3 @@ source code for a full list of supported output fields. "", "``md``", "*e.g.,* write both monthly and daily files", "" "", "", "", "" - diff --git a/doc/source/user_guide/ug_implementation.rst b/doc/source/user_guide/ug_implementation.rst index 7d172e91d..cbfe37b0c 100644 --- a/doc/source/user_guide/ug_implementation.rst +++ b/doc/source/user_guide/ug_implementation.rst @@ -5,9 +5,7 @@ Implementation ======================== CICE is written in FORTRAN90 and runs on platforms using UNIX, LINUX, -and other operating systems. The current coding standard is Fortran2003 -with use of Fortran2008 feature CONTIGUOUS in the 1d evp solver. -The code is based on a two-dimensional +and other operating systems. The code is based on a two-dimensional horizontal orthogonal grid that is broken into two-dimensional horizontal blocks and parallelized over blocks with MPI and OpenMP threads. The code also includes some optimizations @@ -47,7 +45,7 @@ as follows **cicecore/** CICE source code -**cicecore/cicedyn/** +**cicecore/cicedynB/** routines associated with the dynamics core **cicecore/drivers/** @@ -81,94 +79,34 @@ this tool. Grid, boundary conditions and masks ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -The spatial discretization of the original implementation is specialized -for a generalized orthogonal B-grid as in :cite:`Murray96` or -:cite:`Smith95`. Figure :ref:`fig-Bgrid` is a schematic of CICE -B-grid. This cell with the tracer point :math:`t(i,j)` in the middle -is referred to as T-cell. The ice and snow area, volume and energy are -given at the t-point. The velocity :math:`{\bf u}(i,j)` -associated with :math:`t(i,j)` is defined in the northeast (NE) -corner. The other corners of the T-cell are northwest (NW), southwest -(SW) and southeast (SE). The lengths of the four edges of the T-cell -are respectively HTN, HTW, HTS and HTE for the northern, western, -southern and eastern edges. The lengths of the T-cell through the -middle are respectively dxT and dyT along the x and y axis. - -We also occasionally refer to “U-cells,” which are centered on the -northeast corner of the corresponding T-cells and have velocity in the -center of each. The velocity components are aligned along grid lines. - -The internal ice stress tensor takes four different values within a grid -cell with the B-grid implementation; bilinear approximations are used for the stress tensor and the ice +The spatial discretization is specialized for a generalized orthogonal +B-grid as in :cite:`Murray96` or +:cite:`Smith95`. The ice and snow area, volume and energy are +given at the center of the cell, velocity is defined at the corners, and +the internal ice stress tensor takes four different values within a grid +cell; bilinear approximations are used for the stress tensor and the ice velocity across the cell, as described in :cite:`Hunke02`. This tends to avoid the grid decoupling problems associated with the -B-grid. - -.. _fig-Bgrid: - -.. figure:: ./figures/CICE_Bgrid.png - :align: center - :scale: 55% - - Schematic of CICE B-grid. - -The ability to solve on the C and CD grids was added later. With the C-grid, -the u velocity points are located on the E edges and the v velocity points -are located on the N edges of the T cell rather than at the T cell corners. -On the CD-grid, the u and v velocity points are located on both the N and E edges. -To support this capability, N and E grids were added to the existing T and U grids, -and the N and E grids are defined at the northern and eastern edge of the T cell. -This is shown in Figure :ref:`fig-Cgrid`. - -.. _fig-Cgrid: - -.. figure:: ./figures/CICE_Cgrid.png - :align: center - :scale: 55% - - Schematic of CICE CD-grid. +B-grid. EVP is available on the C-grid through the MITgcm code +distribution, http://mitgcm.org/viewvc/MITgcm/MITgcm/pkg/seaice/. +Since ice thickness and thermodynamic variables such as temperature are given +in the center of each cell, the grid cells are referred to as “T cells.” +We also occasionally refer to “U cells,” which are centered on the +northeast corner of the corresponding T cells and have velocity in the +center of each. The velocity components are aligned along grid lines. -The user has several ways to initialize the grid: *popgrid* reads grid +The user has several choices of grid routines: *popgrid* reads grid lengths and other parameters for a nonuniform grid (including tripole and regional grids), and *rectgrid* creates a regular rectangular grid. -The input files **global_gx3.grid** and **global_gx3.kmt** contain the +The input files **global\_gx3.grid** and **global\_gx3.kmt** contain the :math:`\left<3^\circ\right>` POP grid and land mask; -**global_gx1.grid** and **global_gx1.kmt** contain the -:math:`\left<1^\circ\right>` grid and land mask, and **global_tx1.grid** -and **global_tx1.kmt** contain the :math:`\left<1^\circ\right>` POP +**global\_gx1.grid** and **global\_gx1.kmt** contain the +:math:`\left<1^\circ\right>` grid and land mask, and **global\_tx1.grid** +and **global\_tx1.kmt** contain the :math:`\left<1^\circ\right>` POP tripole grid and land mask. These are binary unformatted, direct access, Big Endian files. -The input grid file for the B-grid and CD-grid is identical. That file -contains each cells' HTN, HTE, ULON, ULAT, and kmt value. From those -variables, the longitude, latitude, grid lengths (dx and dy), areas, -and masks can be derived for all grids. Table :ref:`tab-gridvars` lists -the primary prognostic grid variable names on the different grids. - -.. _tab-gridvars: - -.. table:: Primary CICE Prognostic Grid Variable Names - - +----------------+-------+-------+-------+-------+ - | variable | T | U | N | E | - +================+=======+=======+=======+=======+ - | longitude | TLON | ULON | NLON | ELON | - +----------------+-------+-------+-------+-------+ - | latitude | TLAT | ULAT | NLAT | ELAT | - +----------------+-------+-------+-------+-------+ - | dx | dxT | dxU | dxN | dxE | - +----------------+-------+-------+-------+-------+ - | dy | dyT | dyU | dyN | dyE | - +----------------+-------+-------+-------+-------+ - | area | tarea | uarea | narea | earea | - +----------------+-------+-------+-------+-------+ - | mask (logical) | tmask | umask | nmask | emask | - +----------------+-------+-------+-------+-------+ - | mask (real) | hm | uvm | npm | epm | - +----------------+-------+-------+-------+-------+ - - In CESM, the sea ice model may exchange coupling fluxes using a different grid than the computational grid. This functionality is activated using the namelist variable ``gridcpl_file``. @@ -183,7 +121,7 @@ block distribution are ``nx_block`` :math:`\times`\ ``ny_block``. The physical portion of a subdomain is indexed as [``ilo:ihi``, ``jlo:jhi``], with nghost “ghost” or “halo" cells outside the domain used for boundary conditions. These parameters are illustrated in :ref:`fig-grid` in one -dimension. The routines *global_scatter* and *global_gather* +dimension. The routines *global\_scatter* and *global\_gather* distribute information from the global domain to the local domains and back, respectively. If MPI is not being used for grid decomposition in the ice model, these routines simply adjust the indexing on the global @@ -192,9 +130,7 @@ recommend that the user choose the local domains so that the global domain is evenly divided, if this is not possible then the furthest east and/or north blocks will contain nonphysical points (“padding”). These points are excluded from the computation domain and have little effect -on model performance. ``nghost`` is a hardcoded parameter in **ice_blocks.F90**. -While the halo code has been implemented to support arbitrary sized halos, -``nghost`` is set to 1 and has not been formally tested on larger halos. +on model performance. .. _fig-grid: @@ -215,34 +151,32 @@ four subdomains. The user sets the ``NTASKS`` and ``NTHRDS`` settings in **cice.settings** and chooses a block size ``block_size_x`` :math:`\times`\ ``block_size_y``, ``max_blocks``, and decomposition information ``distribution_type``, ``processor_shape``, -and ``distribution_type`` in **ice_in**. That information is used to +and ``distribution_type`` in **ice\_in**. That information is used to determine how the blocks are distributed across the processors, and how the processors are -distributed across the grid domain. The model is parallelized over blocks -for both MPI and OpenMP. Some suggested combinations for these +distributed across the grid domain. Recommended combinations of these parameters for best performance are given in Section :ref:`performance`. The script **cice.setup** computes some default decompositions and layouts but the user can overwrite the defaults by manually changing the values in -`ice_in`. At runtime, the model will print decomposition +`ice\_in`. At runtime, the model will print decomposition information to the log file, and if the block size or max blocks is inconsistent with the task and thread size, the model will abort. The code will also print a warning if the maximum number of blocks is too large. Although this is not fatal, it does use extra memory. If ``max_blocks`` is -set to -1, the code will compute a tentative ``max_blocks`` on the fly. +set to -1, the code will compute a ``max_blocks`` on the fly. -A loop at the end of routine *create_blocks* in module -**ice_blocks.F90** will print the locations for all of the blocks on -the global grid if the namelist variable ``debug_blocks`` is set to be true. Likewise, a similar loop at -the end of routine *create_local_block_ids* in module -**ice_distribution.F90** will print the processor and local block +A loop at the end of routine *create\_blocks* in module +**ice\_blocks.F90** will print the locations for all of the blocks on +the global grid if dbug is set to be true. Likewise, a similar loop at +the end of routine *create\_local\_block\_ids* in module +**ice\_distribution.F90** will print the processor and local block number for each block. With this information, the grid decomposition -into processors and blocks can be ascertained. This ``debug_blocks`` variable -should be used carefully as there may be hundreds or thousands of blocks to print -and this information should be needed only rarely. ``debug_blocks`` -can be set to true using the -``debugblocks`` option with **cice.setup**. This information is +into processors and blocks can be ascertained. The dbug flag must be +manually set in the code in each case (independently of the dbug flag in +**ice\_in**), as there may be hundreds or thousands of blocks to print +and this information should be needed only rarely. This information is much easier to look at using a debugger such as Totalview. There is also -an output field that can be activated in `icefields_nml`, ``f_blkmask``, +an output field that can be activated in `icefields\_nml`, ``f_blkmask``, that prints out the variable ``blkmask`` to the history file and which labels the blocks in the grid decomposition according to ``blkmask = my_task + iblk/100``. @@ -268,83 +202,41 @@ middle of the row. The grid is constructed by “folding” the top row, so that the left-hand half and the right-hand half of it coincide. Two choices for constructing the tripole grid are available. The one first introduced to CICE is called “U-fold”, which means that the poles and -the grid cells between them are U-cells on the grid. Alternatively the -poles and the cells between them can be grid T-cells, making a “T-fold.” +the grid cells between them are U cells on the grid. Alternatively the +poles and the cells between them can be grid T cells, making a “T-fold.” Both of these options are also supported by the OPA/NEMO ocean model, which calls the U-fold an “f-fold” (because it uses the Arakawa C-grid -in which U-cells are on T-rows). The choice of tripole grid is given by +in which U cells are on T-rows). The choice of tripole grid is given by the namelist variable ``ns_boundary_type``, ‘tripole’ for the U-fold and ‘tripoleT’ for the T-fold grid. In the U-fold tripole grid, the poles have U-index -:math:`nx\_global/2` and :math:`nx\_global` on the top U-row of the -physical grid, and points with U-index :math:`i` and :math:`nx\_global-i` +:math:`{\tt nx\_global}/2` and ``nx_global`` on the top U-row of the +physical grid, and points with U-index i and :math:`{\tt nx\_global-i}` are coincident. Let the fold have U-row index :math:`n` on the global grid; this will also be the T-row index of the T-row to the south of the fold. There are ghost (halo) T- and U-rows to the north, beyond the fold, on the logical grid. The point with index i along the ghost T-row of index :math:`n+1` physically coincides with point -:math:`nx\_global-i+1` on the T-row of index :math:`n`. The +:math:`{\tt nx\_global}-{\tt i}+1` on the T-row of index :math:`n`. The ghost U-row of index :math:`n+1` physically coincides with the U-row of -index :math:`n-1`. In the schematics below, symbols A-H represent -grid points from 1:nx_global at a given j index and the setup of the -tripole seam is depicted within a few rows of the seam. - -.. _tab-tripole: +index :math:`n-1`. -.. table:: Tripole (u-fold) Grid Schematic - :align: center - - +--------------+---------------------------------------+--------------+ - | global j | | global j | - | index | grid point IDs (i index) | index source | - +==============+====+====+====+====+====+====+====+====+==============+ - | ny_global+2 | H | G | F | E | D | C | B | A | ny_global-1 | - +--------------+----+----+----+----+----+----+----+----+--------------+ - | ny_global+1 | H | G | F | E | D | C | B | A | ny_global | - +--------------+----+----+----+----+----+----+----+----+--------------+ - | ny_global | A | B | C | D | E | F | G | H | | - +--------------+----+----+----+----+----+----+----+----+--------------+ - | ny_global-1 | A | B | C | D | E | F | G | H | | - +--------------+----+----+----+----+----+----+----+----+--------------+ - - -In the T-fold tripole grid, the poles have T-index :math:`1` and and -:math:`nx\_global/2+1` on the top T-row of the physical grid, and -points with T-index :math:`i` and :math:`nx\_global-i+2` are +In the T-fold tripole grid, the poles have T-index 1 and and +:math:`{\tt nx\_global}/2+1` on the top T-row of the physical grid, and +points with T-index i and :math:`{\tt nx\_global}-{\tt i}+2` are coincident. Let the fold have T-row index :math:`n` on the global grid. It is usual for the northernmost row of the physical domain to be a U-row, but in the case of the T-fold, the U-row of index :math:`n` is “beyond” the fold; although it is not a ghost row, it is not physically independent, because it coincides with U-row :math:`n-1`, and it therefore has to be treated like a ghost row. Points i on U-row -:math:`n` coincides with :math:`nx\_global-i+1` on U-row +:math:`n` coincides with :math:`{\tt nx\_global}-{\tt i}+1` on U-row :math:`n-1`. There are still ghost T- and U-rows :math:`n+1` to the north of U-row :math:`n`. Ghost T-row :math:`n+1` coincides with T-row :math:`n-1`, and ghost U-row :math:`n+1` coincides with U-row :math:`n-2`. -.. _tab-tripoleT: - -.. table:: TripoleT (t-fold) Grid Schematic - :align: center - - +--------------+--------------------------------------------+--------------+ - | global j | | global j | - | index | grid point IDs (i index) | index source | - +==============+====+====+====+====+====+====+====+====+====+==============+ - | ny_global+2 | | H | G | F | E | D | C | B | A | ny_global-2 | - +--------------+----+----+----+----+----+----+----+----+----+--------------+ - | ny_global+1 | | H | G | F | E | D | C | B | A | ny_global-1 | - +--------------+----+----+----+----+----+----+----+----+----+--------------+ - | ny_global | A | BH | CG | DF | E | FD | GC | HB | | | - +--------------+----+----+----+----+----+----+----+----+----+--------------+ - | ny_global-1 | A | B | C | D | E | F | G | H | | | - +--------------+----+----+----+----+----+----+----+----+----+--------------+ - | ny_global-2 | A | B | C | D | E | F | G | H | | | - +--------------+----+----+----+----+----+----+----+----+----+--------------+ - - The tripole grid thus requires two special kinds of treatment for certain rows, arranged by the halo-update routines. First, within rows along the fold, coincident points must always have the same value. This @@ -354,36 +246,7 @@ the coincident physical rows. Both operations involve the tripole buffer, which is used to assemble the data for the affected rows. Special treatment is also required in the scattering routine, and when computing global sums one of each pair of coincident points has to be -excluded. Halos of center, east, north, and northeast points are supported, -and each requires slightly different halo indexing across the tripole seam. - -***************** -Rectangular grids -***************** - -Rectangular test grids can be defined for CICE. They are generated -internally and defined by several namelist -settings including ``grid_type`` = ``rectangular``, ``nx_global``, ``ny_global``, -``dx_rect``, ``dy_rect``, ``lonrefrect``, and ``latrefrect``. Forcing and -initial condition can be set via namelists ``atm_data_type``, ``ocn_data_type``, -``ice_data_type``, ``ice_data_conc``, ``ice_data_dist``. Variable grid spacing -is also supported with the namelist settings ``scale_dxdy`` which turns on -the option, and ``dxscale`` and ``dyscale`` which sets the variable grid scaling -factor. Values of 1.0 will produced constant grid spacing. For rectangular grids, -``lonrefrect`` and ``latrefrect`` define the lower left longitude and latitude -value of the grid, ``dx_rect`` and ``dy_rect`` define the base grid spacing, and -``dxscale`` and ``dyscale`` provide the grid space scaling. The base spacing -is set in the center of the rectangular domain and the scaling is applied symetrically -outward as a multiplicative factor in the x and y directions. - -Several predefined rectangular grids are available in CICE with -**cice.setup --grid** including ``gbox12``, ``gbox80``, ``gbox128``, and ``gbox180`` -where 12, 80, 128, and 180 are the number of gridcells in each direction. -Several predefined options also exist, set with **cice.setup --set**, to -establish varied idealized configurations of box tests including ``box2001``, -``boxadv``, ``boxchan``, ``boxchan1e``, ``boxchan1n``, ``boxnodyn``, ``boxrestore``, ``boxslotcyl``, and -``boxopen``, ``boxclosed``, and ``boxforcee``. See **cice.setup --help** for a current -list of supported settings. +excluded. ************** Vertical Grids @@ -405,20 +268,11 @@ routines, is adopted from POP. The boundary routines perform boundary communications among processors when MPI is in use and among blocks whenever there is more than one block per processor. -Boundary conditions are defined by the ``ns_boundary_type`` and ``ew_boundary_type`` -namelist inputs. Valid values are ``open`` and ``cyclic``. In addition, -``tripole`` and ``tripoleT`` are options for the ``ns_boundary_type``. -Closed boundary conditions are not supported currently. -The domain can be physically closed with the ``close_boundaries`` -namelist which forces a land mask on the boundary with a two gridcell depth. -Where the boundary is land, the boundary_type settings play no role. -For example, in the displaced-pole grids, at least one row of grid cells along the north -and south boundaries is land. Along the east/west domain boundaries not -masked by land, periodic conditions wrap the domain around the globe. In -this example, -the appropriate namelist settings are ``nsboundary_type`` = ``open``, -``ew_boundary_type`` = ``cyclic``, and ``close_boundaries`` = ``.false.``. - +Open/cyclic boundary conditions are the default in CICE; the physical +domain can still be closed using the land mask. In our bipolar, +displaced-pole grids, one row of grid cells along the north and south +boundaries is located on land, and along east/west domain boundaries not +masked by land, periodic conditions wrap the domain around the globe. CICE can be run on regional grids with open boundary conditions; except for variables describing grid lengths, non-land halo cells along the grid edge must be filled by restoring them to specified values. The @@ -427,7 +281,7 @@ restoring timescale ``trestore`` may be used (it is also used for restoring ocean sea surface temperature in stand-alone ice runs). This implementation is only intended to provide the “hooks" for a more sophisticated treatment; the rectangular grid option can be used to test -this configuration. The ‘displaced_pole’ grid option should not be used +this configuration. The ‘displaced\_pole’ grid option should not be used unless the regional grid contains land all along the north and south boundaries. The current form of the boundary condition routines does not allow Neumann boundary conditions, which must be set explicitly. This @@ -451,36 +305,27 @@ testing. Masks ***** -A land mask hm (:math:`M_h`) is specified in the cell centers (on the -T-grid), with 0 -representing land and 1 representing ocean cells. Corresponding masks -for the U, N, and E grids are given by +A land mask hm (:math:`M_h`) is specified in the cell centers, with 0 +representing land and 1 representing ocean cells. A corresponding mask +uvm (:math:`M_u`) for velocity and other corner quantities is given by .. math:: M_u(i,j)=\min\{M_h(l),\,l=(i,j),\,(i+1,j),\,(i,j+1),\,(i+1,j+1)\}. -.. math:: - M_n(i,j)=\min\{M_h(l),\,l=(i,j),\,(i,j+1)\}. - -.. math:: - M_e(i,j)=\min\{M_h(l),\,l=(i,j),\,(i+1,j)\}. - -The logical masks ``tmask``, ``umask``, ``nmask``, and ``emask`` -(which correspond to the real masks ``hm``, ``uvm``, ``npm``, and ``epm`` -respectively) are useful in conditional statements. +The logical masks ``tmask`` and ``umask`` (which correspond to the real masks +``hm`` and ``uvm``, respectively) are useful in conditional statements. In addition to the land masks, two other masks are implemented in -*dyn_prep* in order to reduce the dynamics component’s work on a global -grid. At each time step the logical masks ``iceTmask`` and ``iceUmask`` are +*dyn\_prep* in order to reduce the dynamics component’s work on a global +grid. At each time step the logical masks ``ice_tmask`` and ``ice_umask`` are determined from the current ice extent, such that they have the value “true” wherever ice exists. They also include a border of cells around the ice pack for numerical purposes. These masks are used in the dynamics component to prevent unnecessary calculations on grid points where there is no ice. They are not used in the thermodynamics component, so that ice may form in previously ice-free cells. Like the -land masks ``hm`` and ``uvm``, the ice extent masks ``iceTmask`` and ``iceUmask`` -are for T-cells and U-cells, respectively. Note that the ice extent masks -``iceEmask`` and ``iceNmask`` are also defined when using the C or CD grid. +land masks ``hm`` and ``uvm``, the ice extent masks ``ice_tmask`` and ``ice_umask`` +are for T cells and U cells, respectively. Improved parallel performance may result from utilizing halo masks for boundary updates of the full ice state, incremental remapping transport, @@ -496,122 +341,6 @@ or southern hemispheres, respectively. Special constants (``spval`` and points in the history files and diagnostics. -.. _interpolation: - -**************************** -Interpolating between grids -**************************** - -Fields in CICE are generally defined at particular grid locations, such as T cell centers, -U corners, or N or E edges. These are assigned internally in CICE based on the ``grid_ice`` -namelist variable. Forcing/coupling fields are also associated with a -specific set of grid locations that may or may not be the same as on the internal CICE model grid. -The namelist variables ``grid_atm`` and ``grid_ocn`` define the forcing/coupling grids. -The ``grid_ice``, ``grid_atm``, and ``grid_ocn`` variables are independent and take -values like ``A``, ``B``, ``C``, or ``CD`` consistent with the Arakawa grid convention :cite:`Arakawa77`. -The relationship between the grid system and the internal grids is shown in :ref:`tab-gridsys`. - -.. _tab-gridsys: - -.. table:: Grid System and Type Definitions - :align: center - - +--------------+----------------+----------------+----------------+ - | grid system | thermo grid | u dynamic grid | v dynamic grid | - +==============+================+================+================+ - | A | T | T | T | - +--------------+----------------+----------------+----------------+ - | B | T | U | U | - +--------------+----------------+----------------+----------------+ - | C | T | E | N | - +--------------+----------------+----------------+----------------+ - | CD | T | N+E | N+E | - +--------------+----------------+----------------+----------------+ - -For all grid systems, thermodynamic variables are always defined on the ``T`` grid for the model and -model forcing/coupling fields. However, the dynamics u and v fields vary. -In the ``CD`` grid, there are twice as many u and v fields as on the other grids. Within the CICE model, -the variables ``grid_ice_thrm``, ``grid_ice_dynu``, ``grid_ice_dynv``, ``grid_atm_thrm``, -``grid_atm_dynu``, ``grid_atm_dynv``, ``grid_ocn_thrm``, ``grid_ocn_dynu``, and ``grid_ocn_dynv`` are -character strings (``T``, ``U``, ``N``, ``E`` , ``NE``) derived from the ``grid_ice``, ``grid_atm``, -and ``grid_ocn`` namelist values. - -The CICE model has several internal methods that will interpolate (a.k.a. map or average) fields on -(``T``, ``U``, ``N``, ``E``, ``NE``) grids to (``T``, ``U``, ``N``, ``E``). An interpolation -to an identical grid results in a field copy. The generic interface to this method is ``grid_average_X2Y``, -and there are several forms. - -.. code-block:: fortran - - subroutine grid_average_X2Y(type,work1,grid1,work2,grid2) - character(len=*) , intent(in) :: type ! mapping type (S, A, F) - real (kind=dbl_kind), intent(in) :: work1(:,:,:) ! input field(nx_block, ny_block, max_blocks) - character(len=*) , intent(in) :: grid1 ! work1 grid (T, U, N, E) - real (kind=dbl_kind), intent(out) :: work2(:,:,:) ! output field(nx_block, ny_block, max_blocks) - character(len=*) , intent(in) :: grid2 ! work2 grid (T, U, N, E) - -where type is an interpolation type with the following valid values, - -type = ``S`` is a normalized, masked, area-weighted interpolation - -.. math:: - work2 = \frac{\sum_{i=1}^{n} (M_{1i}A_{1i}work1_{i})} {\sum_{i=1}^{n} (M_{1i}A_{1i})} - -type = ``A`` is a normalized, unmasked, area-weighted interpolation - -.. math:: - work2 = \frac{\sum_{i=1}^{n} (A_{1i}work1_{i})} {\sum_{i=1}^{n} (A_{1i})} - -type = ``F`` is a normalized, unmasked, conservative flux interpolation - -.. math:: - work2 = \frac{\sum_{i=1}^{n} (A_{1i}work1_{i})} {n*A_{2}} - -with A defined as the appropriate gridcell area and M as the gridcell mask. -Another form of the ``grid_average_X2Y`` is - -.. code-block:: fortran - - subroutine grid_average_X2Y(type,work1,grid1,wght1,mask1,work2,grid2) - character(len=*) , intent(in) :: type ! mapping type (S, A, F) - real (kind=dbl_kind), intent(in) :: work1(:,:,:) ! input field(nx_block, ny_block, max_blocks) - real (kind=dbl_kind), intent(in) :: wght1(:,:,:) ! input weight(nx_block, ny_block, max_blocks) - real (kind=dbl_kind), intent(in) :: mask1(:,:,:) ! input mask(nx_block, ny_block, max_blocks) - character(len=*) , intent(in) :: grid1 ! work1 grid (T, U, N, E) - real (kind=dbl_kind), intent(out) :: work2(:,:,:) ! output field(nx_block, ny_block, max_blocks) - character(len=*) , intent(in) :: grid2 ! work2 grid (T, U, N, E) - -In this case, the input arrays `wght1` and `mask1` are used in the interpolation equations instead of gridcell -area and mask. This version allows the user to define the weights and mask -explicitly. This implementation is supported only for type = ``S`` or ``A`` interpolations. - -A final form of the ``grid_average_X2Y`` interface is - -.. code-block:: fortran - - subroutine grid_average_X2Y(type,work1a,grid1a,work1b,grid1b,work2,grid2) - character(len=*) , intent(in) :: type ! mapping type (S, A, F) - real (kind=dbl_kind), intent(in) :: work1a(:,:,:) ! input field(nx_block, ny_block, max_blocks) - character(len=*) , intent(in) :: grid1a ! work1 grid (N, E) - real (kind=dbl_kind), intent(in) :: work1b(:,:,:) ! input field(nx_block, ny_block, max_blocks) - character(len=*) , intent(in) :: grid1b ! work1 grid (N, E) - real (kind=dbl_kind), intent(out) :: work2(:,:,:) ! output field(nx_block, ny_block, max_blocks) - character(len=*) , intent(in) :: grid2 ! work2 grid (T, U) - -This version supports mapping from an ``NE`` grid to a ``T`` or ``U`` grid. In this case, the ``1a`` arguments -are for either the `N` or `E` field and the 1b arguments are for the complementary field (``E`` or ``N`` respectively). -At present, only ``S`` type mappings are supported with this interface. - -In all cases, the work1, wght1, and mask1 input arrays should have correct halo values when called. Examples of usage -can be found in the source code, but the following example maps the uocn and vocn fields from their native -forcing/coupling grid to the ``U`` grid using a masked, area-weighted, average method. - -.. code-block:: fortran - - call grid_average_X2Y('S', uocn, grid_ocn_dynu, uocnU, 'U') - call grid_average_X2Y('S', vocn, grid_ocn_dynv, vocnU, 'U') - - .. _performance: *************** @@ -630,8 +359,7 @@ The user specifies the total number of tasks and threads in **cice.settings** and the block size and decompostion in the namelist file. The main trades offs are the relative efficiency of large square blocks versus model internal load balance -as CICE computation cost is very small for ice-free blocks. The code -is parallelized over blocks for both MPI and OpenMP. +as CICE computation cost is very small for ice-free blocks. Smaller, more numerous blocks provides an opportunity for better load balance by allocating each processor both ice-covered and ice-free blocks. But smaller, more numerous blocks becomes @@ -642,18 +370,6 @@ volume-to-surface ratio important for communication cost. Often 3 to 8 blocks per processor provide the decompositions flexiblity to create reasonable load balance configurations. -Like MPI, load balance -of blocks across threads is important for efficient performance. Most of the OpenMP -threading is implemented with ``SCHEDULE(runtime)``, so the OMP_SCHEDULE env -variable can be used to set the OpenMPI schedule. The default ``OMP_SCHEDULE`` -setting is defined by the -variable ``ICE_OMPSCHE`` in **cice.settings**. ``OMP_SCHEDULE`` values of "STATIC,1" -and "DYNAMIC,1" are worth testing. The OpenMP implementation in -CICE is constantly under review, but users should validate results and -performance on their machine. CICE should be bit-for-bit with different block sizes, -different decompositions, different MPI task counts, and different OpenMP threads. -Finally, we recommend the ``OMP_STACKSIZE`` env variable should be set to 32M or greater. - The ``distribution_type`` options allow standard cartesian distributions of blocks, redistribution via a ‘rake’ algorithm for improved load balancing across processors, and redistribution based on space-filling @@ -711,9 +427,7 @@ block equally. This is useful in POP which always has work in each block and is written with a lot of array syntax requiring calculations over entire blocks (whether or not land is present). This option is provided in CICE as well for -direct-communication compatibility with POP. Blocks that contain 100% -land grid cells are eliminated with 'block'. The 'blockall' option is identical -to 'block' but does not do land block elimination. The ‘latitude’ option +direct-communication compatibility with POP. The ‘latitude’ option weights the blocks based on latitude and the number of ocean grid cells they contain. Many of the non-cartesian decompositions support automatic land block elimination and provide alternative ways to @@ -747,7 +461,7 @@ characteristics. In the ‘sectcart’ case, the domain is divided into four (east-west,north-south) quarters and the loops are done over each, sequentially. The ``wghtfile`` decomposition drives the decomposition based on -weights provided in a weight file. That file should be a netCDF +weights provided in a weight file. That file should be a netcdf file with a double real field called ``wght`` containing the relative weight of each gridcell. :ref:`fig-distrbB` (b) and (c) show an example. The weights associated with each gridcell will be @@ -815,141 +529,20 @@ schemes and the aerosol tracers, and the level-ice pond parameterization additionally requires the level-ice tracers. -.. _timemanagerplus: - -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Time Manager and Initialization -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The time manager is an important piece of the CICE model. - -.. _timemanager: - -**************************** -Time Manager -**************************** - -The primary prognostic variables in the time manager are ``myear``, -``mmonth``, ``mday``, and ``msec``. These are integers and identify -the current model year, month, day, and second respectively. -The model timestep is ``dt`` with units of seconds. See :ref:`parameters` -for additional information about choosing an appropriate timestep. -The internal variables ``istep``, ``istep0``, and ``istep1`` keep -track of the number of timesteps. ``istep`` is the counter for -the current run and is set to 0 at the start of each run. ``istep0`` -is the step count at the start of a long multi-restart run, and -``istep1`` is the step count of a long multi-restart run and -is continuous across model restarts. - -In general, the time manager should be advanced by calling -*advance_timestep*. This subroutine in **ice_calendar.F90** -automatically advances the model time by ``dt``. It also advances -the istep numbers and calls subroutine *calendar* to update -additional calendar data. - -The namelist variable ``use_restart_time`` specifies whether to -use the time and step numbers saved on a restart file or whether -to set the initial model time to the namelist values defined by -``year_init``, ``month_init``, ``day_init``, and ``sec_init``. -Normally, ``use_restart_time`` is set to false on the initial run. -In continue mode, use_restart_time is ignored and the restart -date is always used to initialize the model run. -More information about the restart capability can be found in :ref:`restartfiles`. - -Several different calendars are supported including noleap (365 days -per year), 360-day (twelve 30 day months per year), and gregorian -(leap days every 4 years except every 100 years except every 400 -years). The gregorian calendar in CICE is formally a proleptic gregorian -calendar without any discontinuties over time. The calendar is set -by specifying ``days_per_year`` and ``use_leap_years`` in the -namelist, and the following combinations are supported, - -.. _tab-cal: - -.. table:: Supported Calendar Options - - +----------------------+----------------------+------------+ - | ``days_per_year`` | ``use_leap_years`` | calendar | - +======================+======================+============+ - | 365 | false | noleap | - +----------------------+----------------------+------------+ - | 365 | true | gregorian | - +----------------------+----------------------+------------+ - | 360 | false | 360-day | - +----------------------+----------------------+------------+ - - -The history (:ref:`history`) and restart (:ref:`restartfiles`) -outputs and frequencies are specified in namelist and -are computed relative to a reference date defined by the namelist -``histfreq_base`` and ``dumpfreq_base``. Valid values for each are -`zero` and `init`. If set to `zero`, all output will be relative -to the absolute reference year-month-day date, 0000-01-01. This is the default -value for ``histfreq_base``, so runs with different initial -dates will have identical output. If the ``histfreq_base`` or -``dumpfreq_base`` are set to `init`, all frequencies -will be relative to the model initial date specified by ``year_init``, -``month_init``, and ``day_init``. ``sec_init`` plays no role -in setting output frequencies. `init` is the default for -``dumpfreq_base`` and makes it easy to generate restarts -5 or 10 model days after startup as we often do in testing. -Both ``histfreq_base`` and ``dumpfreq_base`` are arrays -and can be set for each stream separately. - -In general, output is always -written at the start of the year, month, day, or hour without -any ability to shift the phase. For instance, monthly output -is always written on the first of the month. It is not possible, -for instance, to write monthly data once a month on the 10th of the month. -In the same way, quarterly data for Dec-Jan-Feb vs Jan-Feb-Mar -is not easily controlled. A better approach is to create monthly -data and then to aggregate to quarters as a post-processing step. -The history and restart (``histfreq``, ``dumpfreq``) setting `1` -indicates output at a frequency of timesteps. This is the character -`1` as opposed to the integer 1. This frequency output -is computed using ``istep1``, the model timestep. This -may vary with each run depending on several factors including the -model timestep, initial date, and value of ``istep0``. - -The model year is limited by some integer math. In particular, calculation -of elapsed hours in **ice_calendar.F90**, and the model year is -limited to the value of ``myear_max`` set in that file. Currently, that's -200,000 years. - -The time manager was updated in early 2021. The standalone model -was modified, and some tests were done in a coupled framework after -modifications to the high level coupling interface. For some coupled models, the -coupling interface may need to be updated when updating CICE with the new time manager. -In particular, the old prognostic variable ``time`` no longer exists in CICE, -``year_init`` only defines the model initial year, and -the calendar subroutine is called without any arguments. One can -set the namelist variables ``year_init``, ``month_init``, ``day_init``, -``sec_init``, and ``dt`` in conjuction with ``days_per_year`` and -``use_leap_years`` to initialize the model date, timestep, and calendar. -To overwrite the default/namelist settings in the coupling layer, -set the **ice_calendar.F90** variables ``myear``, ``mmonth``, ``mday``, -``msec`` and ``dt`` after the namelists have been read. Subroutine -*calendar* should then be called to update all the calendar data. -Finally, subroutine *advance_timestep* should be used to advance -the model time manager. It advances the step numbers, advances -time by ``dt``, and updates the calendar data. The older method -of manually advancing the steps and adding ``dt`` to ``time`` should -be deprecated. - .. _init: -**************************** -Initialization and Restarts -**************************** +~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Initialization and coupling +~~~~~~~~~~~~~~~~~~~~~~~~~~~ The ice model’s parameters and variables are initialized in several steps. Many constants and physical parameters are set in -**ice_constants.F90**. Namelist variables (:ref:`tabnamelist`), -whose values can be altered at run time, are handled in *input_data* +**ice\_constants.F90**. Namelist variables (:ref:`tabnamelist`), +whose values can be altered at run time, are handled in *input\_data* and other initialization routines. These variables are given default values in the code, which may then be changed when the input file -**ice_in** is read. Other physical constants, numerical parameters, and +**ice\_in** is read. Other physical constants, numerical parameters, and variables are first set in initialization routines for each ice model component or module. Then, if the ice model is being restarted from a previous run, core variables are read and reinitialized in @@ -965,70 +558,18 @@ layers and the ice thickness distribution defined by ``kcatbound`` = 0. Restart information for some tracers is also included in the netCDF restart files. -Three namelist variables generally control model initialization, ``runtype``, -``ice_ic``, and ``use_restart_time``. The valid values for ``runtype`` -are ``initial`` or ``continue``. When ``runtype`` = `continue`, the -restart filename is stored in a small text (pointer) file, ``use_restart_time`` -is forced to true and ``ice_ic`` plays no role. When ``runtype`` = -`initial`, ``ice_ic`` has three options, ``none``, ``internal``, -or *filename*. These initial states are no-ice, namelist driven initial -condition, and ice defined by a file respectively. If ``ice_ic`` is set -to ``internal``, the initial state is defined by the namelist values -``ice_data_type``, ``ice_data_dist``, and ``ice_data_conc``. In `initial` mode, -``use_restart_time`` should generally be set to false and the initial -time is then defined by ``year_init``, ``month_init``, ``day_init``, -and ``sec_init``. These combinations options are summarized in -:ref:`tab-ic`. - -Restart files and initial condition files are generally the same format and -can be the same files. -They contain the model state from a particular instance in time. In general, -that state includes the physical and dynamical state as well as the -state of optional tracers. Reading of various tracer groups can -be independently controlled by various restart flags. In other -words, a restart file can be used to initialize a new configuration -where new tracers are used (i.e. bgc). In that case, the physical -state of the model will be read, but if bgc tracers don't exist on the -restart file, they can be initialized from scratch. - -In ``continue`` mode, a pointer file is used to restart the model. -In this mode, the CICE model writes out a small text (pointer) file -to the run directory that names the most recent restart file. On -restart, the model reads the pointer file which defines the -name of the restart file. The model then reads that restart file. -By having this feature, the ice namelist does not need to be constantly -updated with the latest -restart filename, and the model can be automatically resubmitted. -Manually editing the pointer file in the middle of a run will reset -the restart filename and allow the run to continue. - -Table :ref:`tab-ic` shows ``runtype``, ``ice_ic``, and ``use_restart_time`` -namelist combinations for initializing -the model. If namelist defines the start date, it's done with -``year_init``, ``month_init``, ``day_init``, and ``sec_init``. - -.. _tab-ic: - -.. table:: Ice Initialization - - +----------------+--------------------------+--------------------------------------+----------------------------------------+ - | ``runtype`` | ``ice_ic`` | ``use_restart_time`` | Note | - +================+==========================+======================================+========================================+ - | `initial` | `none` | not used | no ice, | - | | | | namelist defines start date | - +----------------+--------------------------+--------------------------------------+----------------------------------------+ - | `initial` | `internal` or | not used | set by namelist ice_data_type, | - | | `default` | | ice_data_dist, ice_data_conc | - +----------------+--------------------------+--------------------------------------+----------------------------------------+ - | `initial` | *filename* | false | read ice state from filename, | - | | | | namelist defines start date | - +----------------+--------------------------+--------------------------------------+----------------------------------------+ - | `initial` | *filename* | true | read ice state from filename, | - | | | | restart file defines start date | - +----------------+--------------------------+--------------------------------------+----------------------------------------+ - | `continue` | not used | not used | pointer file defines restart file, | - | | | | restart file defines start date | - +----------------+--------------------------+--------------------------------------+----------------------------------------+ +Three namelist variables control model initialization, ``ice_ic``, ``runtype``, +and ``restart``, as described in :ref:`tab-ic`. It is possible to do an +initial run from a file **filename** in two ways: (1) set runtype = +‘initial’, restart = true and ice\_ic = **filename**, or (2) runtype = +‘continue’ and pointer\_file = **./restart/ice.restart\_file** where +**./restart/ice.restart\_file** contains the line +“./restart/[filename]". The first option is convenient when repeatedly +starting from a given file when subsequent restart files have been +written. With this arrangement, the tracer restart flags can be set to +true or false, depending on whether the tracer restart data exist. With +the second option, tracer restart flags are set to ‘continue’ for all +active tracers. An additional namelist option, ``restart_ext`` specifies whether halo cells are included in the restart files. This option is useful for tripole and @@ -1038,21 +579,42 @@ An additional namelist option, ``restart_coszen`` specifies whether the cosine of the zenith angle is included in the restart files. This is mainly used in coupled models. -MPI is initialized in *init_communicate* for both coupled and +MPI is initialized in *init\_communicate* for both coupled and stand-alone MPI runs. The ice component communicates with a flux coupler or other climate components via external routines that handle the -variables listed in the `Icepack documentation `_. +variables listed in the `Icepack documentation `_. For stand-alone runs, -routines in **ice_forcing.F90** read and interpolate data from files, +routines in **ice\_forcing.F90** read and interpolate data from files, and are intended merely to provide guidance for the user to write his or her own routines. Whether the code is to be run in stand-alone or coupled mode is determined at compile time, as described below. +Table :ref:`tab-ic` shows ice initial state resulting from combinations of +``ice_ic``, ``runtype`` and ``restart``. :math:`^a`\ If false, restart is reset to +true. :math:`^b`\ restart is reset to false. :math:`^c`\ ice_ic is +reset to ‘none.’ + +.. _tab-ic: + +.. table:: Ice Initial State + + +----------------+--------------------------+--------------------------------------+----------------------------------------+ + | ice\_ic | | | | + +================+==========================+======================================+========================================+ + | | initial/false | initial/true | continue/true (or false\ :math:`^a`) | + +----------------+--------------------------+--------------------------------------+----------------------------------------+ + | none | no ice | no ice\ :math:`^b` | restart using **pointer\_file** | + +----------------+--------------------------+--------------------------------------+----------------------------------------+ + | default | SST/latitude dependent | SST/latitude dependent\ :math:`^b` | restart using **pointer\_file** | + +----------------+--------------------------+--------------------------------------+----------------------------------------+ + | **filename** | no ice\ :math:`^c` | start from **filename** | restart using **pointer\_file** | + +----------------+--------------------------+--------------------------------------+----------------------------------------+ + .. _parameters: -********************************** +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Choosing an appropriate time step -********************************** +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The time step is chosen based on stability of the transport component (both horizontal and in thickness space) and on resolution of the @@ -1106,9 +668,9 @@ t_e`) is thus .. math:: dte = dt\_dyn/ndte. -A second parameter, :math:`E_\circ` (``elasticDamp``), defines the elastic wave +A second parameter, :math:`E_\circ` (``eyc``), defines the elastic wave damping timescale :math:`T`, described in Section :ref:`dynam`, as -``elasticDamp * dt_dyn``. The forcing terms are not updated during the subcycling. +``eyc * dt_dyn``. The forcing terms are not updated during the subcycling. Given the small step (``dte``) at which the EVP dynamics model is subcycled, the elastic parameter :math:`E` is also limited by stability constraints, as discussed in :cite:`Hunke97`. Linear stability @@ -1117,7 +679,10 @@ stable as long as the subcycling time step :math:`\Delta t_e` sufficiently resolves the damping timescale :math:`T`. For the stability analysis we had to make several simplifications of the problem; hence the location of the boundary between stable and unstable regions is -merely an estimate. The current default parameters for the EVP and EAP are :math:`ndte=240` and :math:`E_\circ=0.36`. For high resolution applications, it is however recommended to increase the value of :math:`ndte` :cite:`Koldunov19`, :cite:`Bouchat22`. +merely an estimate. In practice, the ratio +:math:`\Delta t_e ~:~ T ~:~ \Delta t`  = 1 : 40 : 120 provides both +stability and acceptable efficiency for time steps (:math:`\Delta t`) on +the order of 1 hour. Note that only :math:`T` and :math:`\Delta t_e` figure into the stability of the dynamics component; :math:`\Delta t` does not. Although @@ -1136,89 +701,9 @@ relaxation parameter ``arlx1i`` effectively sets the damping timescale in the problem, and ``brlx`` represents the effective subcycling :cite:`Bouillon13` (see Section :ref:`revp`). -.. _modelio: - -~~~~~~~~~~~~~~~~~~~~~~~~ -Model Input and Output -~~~~~~~~~~~~~~~~~~~~~~~~ - -.. _iooverview: - -************* -IO Overview -************* - -CICE provides the ability to read and write binary unformatted or netCDF -data via a number of different methods. The IO implementation is specified -both at build-time (via selection of specific source code) and run-time (via namelist). -Three different IO packages are available in CICE under the directory -**cicecore/cicedyn/infrastructure/io**. Those are io_binary, io_netcdf, and -io_pio2, and those support IO thru binary, netCDF (https://www.unidata.ucar.edu/software/netcdf), -and PIO (https://github.com/NCAR/ParallelIO) interfaces respectively. -The io_pio2 directory supports both PIO1 and PIO2 and can write data thru the -netCDF or parallel netCDF (pnetCDF) interface. The netCDF history files are CF-compliant, and -header information for data contained in the netCDF files is displayed with -the command ``ncdump -h filename.nc``. To select the io source code, set ``ICE_IOTYPE`` -in **cice.settings** to ``binary``, ``netcdf``, ``pio1``, or ``pio2``. - -At run-time, more detailed IO settings are available. ``restart_format`` and -``history_format`` namelist options specify the method and format further. Valid options -are listed in :ref:`formats`. These options specify the format of new files created -by CICE. Existing files can be read in any format as long as it's consistent -with ``ICE_IOTYPE`` defined. Note that with ``ICE_IOTYPE = binary``, the format name -is actually ignored. The CICE netCDF output contains a global metadata attribute, ``io_flavor``, -that indicates the format chosen for the file. ``ncdump -k filename.nc`` also -provides information about the specific netCDF file format. -In general, the detailed format is not enforced for input files, so any netCDF format -can be read in CICE regardless of CICE namelist settings. - -.. _formats: - -.. table:: CICE IO formats - - +--------------+----------------------+-------------+---------------------+ - | **Namelist** | **Format** | **Written** | **Valid With** | - | **Option** | | **Thru** | **ICE_IOTYPE** | - +--------------+----------------------+-------------+---------------------+ - | binary | Fortran binary | fortran | binary | - +--------------+----------------------+-------------+---------------------+ - | cdf1 | netCDF3-classic | netCDF | netcdf, pio1, pio2 | - +--------------+----------------------+-------------+---------------------+ - | cdf2 | netCDF3-64bit-offset | netCDF | netcdf, pio1, pio2 | - +--------------+----------------------+-------------+---------------------+ - | cdf5 | netCDF3-64bit-data | netCDF | netcdf, pio1, pio2 | - +--------------+----------------------+-------------+---------------------+ - | default | binary or cdf1, | varies | binary, netcdf, | - | | depends on ICE_IOTYPE| | pio1, pio2 | - +--------------+----------------------+-------------+---------------------+ - | hdf5 | netCDF4 hdf5 | netCDF | netcdf, pio1, pio2 | - +--------------+----------------------+-------------+---------------------+ - | pnetcdf1 | netCDF3-classic | pnetCDF | pio1, pio2 | - +--------------+----------------------+-------------+---------------------+ - | pnetcdf2 | netCDF3-64bit-offset | pnetCDF | pio1, pio2 | - +--------------+----------------------+-------------+---------------------+ - | pnetcdf5 | netCDF3-64bit-data | pnetCDF | pio1, pio2 | - +--------------+----------------------+-------------+---------------------+ - -There are additional namelist options that affect PIO performance for both -restart and history output. [``history_,restart_``] -[``iotasks,root,stride``] -namelist options control the PIO processor/task usage and specify the total number of -IO tasks, the root IO task, and the IO task stride respectively. -``history_rearranger`` and ``restart_rearranger`` -define the PIO rearranger strategy. Finally, [``history_,restart_``] -[``deflate,chunksize``] provide -controls for hdf5 compression and chunking for the ``hdf5`` options -in both netCDF and PIO output. ``hdf5`` is written serially thru the -netCDF library and in parallel thru the PIO library in CICE. Additional -details about the netCDF and PIO settings and implementations can -found in (https://www.unidata.ucar.edu/software/netcdf) -and (https://github.com/NCAR/ParallelIO). - -netCDF requires CICE compilation with a netCDF library built externally. -PIO requires CICE compilation with a PIO and netCDF library built externally. -Both netCDF and PIO can be built with many options which may require additional libraries -such as MPI, hdf5, or pnetCDF. +~~~~~~~~~~~~ +Model output +~~~~~~~~~~~~ .. _history: @@ -1226,85 +711,72 @@ such as MPI, hdf5, or pnetCDF. History files ************* -CICE provides history data output in binary unformatted or netCDF formats via -separate implementations of binary, netCDF, and PIO interfaces as described -above. In addition, ``history_format`` as well as other history namelist -options control the specific file format as well as features related to -IO performance, see :ref:`iooverview`. - -The data is written at the period(s) given by ``histfreq`` and -``histfreq_n`` relative to a reference date specified by ``histfreq_base``. -The files are written to binary or netCDF files prepended by the ``history_file`` -and ``history_suffix`` -namelist setting. The settings for history files are set in the +CICE provides history data in binary unformatted or netCDF formats via +separate implementations of binary, netcdf, and pio source code under the +directory **infrastructure/io**. ``ICE_IOTYPE`` defined in cice.settings +specifies the IO type and defines which source code directory is compiled. +At the present time, binary, netcdf, and PIO are exclusive formats +for history and restart files, and history and restart file must use the same +io package. The namelist variable ``history_format`` further refines the +format approach or style for some io packages. + +Model output data is averaged over the period(s) given by ``histfreq`` and +``histfreq_n``, and written to binary or netCDF files prepended by ``history_file`` +in **ice_in**. These settings for history files are set in the **setup_nml** section of **ice_in** (see :ref:`tabnamelist`). -The history filenames will have a form like -**[history_file][history_suffix][_freq].[timeID].[nc,da]** -depending on the namelist options chosen. With binary files, a separate header +If ``history_file`` = ‘iceh’ then the +filenames will have the form **iceh.[timeID].nc** or **iceh.[timeID].da**, +depending on the output file format chosen in **cice.settings** (set +``ICE_IOTYPE``). The netCDF history files are CF-compliant; header information for +data contained in the netCDF files is displayed with the command ``ncdump -h +filename.nc``. Parallel netCDF output is available using the PIO library; the +output file attribute ``io_flavor`` distinguishes output files written with PIO from +those written with standard netCDF. With binary files, a separate header file is written with equivalent information. Standard fields are output -according to settings in the **icefields_nml** section of **ice_in** +according to settings in the **icefields\_nml** section of **ice\_in** (see :ref:`tabnamelist`). The user may add (or subtract) variables not already available in the namelist by following the instructions in section :ref:`addhist`. -The history implementation has been divided into several +The history module has been divided into several modules based on the desired formatting and on the variables themselves. Parameters, variables and routines needed by multiple -modules is in **ice_history_shared.F90**, while the primary routines +modules is in **ice\_history\_shared.F90**, while the primary routines for initializing and accumulating all of the history variables are in -**ice_history.F90**. These routines call format-specific code in the -**io_binary**, **io_netcdf** and **io_pio2** directories. History +**ice\_history.F90**. These routines call format-specific code in the +**io\_binary**, **io\_netcdf** and **io\_pio** directories. History variables specific to certain components or parameterizations are -collected in their own history modules (**ice_history_bgc.F90**, -**ice_history_drag.F90**, **ice_history_mechred.F90**, -**ice_history_pond.F90**). +collected in their own history modules (**ice\_history\_bgc.F90**, +**ice\_history\_drag.F90**, **ice\_history\_mechred.F90**, +**ice\_history\_pond.F90**). The history modules allow output at different frequencies. Five output -options (``1``, ``h``, ``d``, ``m``, ``y``) are available simultaneously for ``histfreq`` -during a run, and each stream must have a unique value for ``histfreq``. In other words, ``d`` -cannot be used by two different streams. Each stream has an associated frequency -set by ``histfreq_n``. The frequency is -relative to a reference date specified by the corresponding entry in ``histfreq_base``. -Each stream can be instantaneous or time averaged -data over the frequency internal. The ``hist_avg`` namelist turns on time averaging -for each stream individually. -The same model variable can be written to multiple history streams (ie. daily ``d`` and -monthly ``m``) via its namelist flag, `f_` :math:`\left<{var}\right>`, while ``x`` -turns that history variable off. For example, ``f_aice = 'md'`` will write aice to the -monthly and daily streams. -Grid variable history output flags are logicals and written to all stream files if -turned on. If there are no namelist flags +frequencies (``1``, ``h``, ``d``, ``m``, ``y``) are available simultaneously during a run. +The same variable can be output at different frequencies (say daily and +monthly) via its namelist flag, `f\_` :math:`\left<{var}\right>`, which +is now a character string corresponding to ``histfreq`` or ‘x’ for none. +(Grid variable flags are still logicals, since they are written to all +files, no matter what the frequency is.) If there are no namelist flags with a given ``histfreq`` value, or if an element of ``histfreq_n`` is 0, then -no file will be written at that frequency. The history filenames are set in -the subroutine **construct_filename** in **ice_history_shared.F90**. -In cases where two streams produce the same identical filename, the model will -abort. Use the namelist ``hist_suffix`` to make stream filenames unique. -More information about how the frequency is computed is found in :ref:`timemanager`. -Also, some -Earth Sytem Models require the history file time axis to be centered in the averaging -interval. The flag ``hist_time_axis`` will allow the user to chose ``begin``, ``middle``, -or ``end`` for the time stamp. +no file will be written at that frequency. The output period can be +discerned from the filenames. For example, in the namelist: :: - histfreq = '1', 'h', 'd', 'm', 'y' - histfreq_n = 1 , 6 , 0 , 1 , 1 - histfreq_base = 'zero','zero','zero','zero','zero' - hist_avg = .true.,.true.,.true.,.true.,.true. - f_hi = '1' - f_hs = 'h' - f_Tsfc = 'd' - f_aice = 'm' - f_meltb = 'mh' - f_iage = 'x' + ``histfreq`` = ’1’, ’h’, ’d’, ’m’, ’y’ + ``histfreq_n`` = 1, 6, 0, 1, 1 + ``f_hi`` = ’1’ + ``f_hs`` = ’h’ + ``f_Tsfc`` = ’d’ + ``f_aice`` = ’m’ + ``f_meltb`` = ’mh’ + ``f_iage`` = ’x’ Here, ``hi`` will be written to a file on every timestep, ``hs`` will be written once every 6 hours, ``aice`` once a month, ``meltb`` once a month AND -once every 6 hours, and ``Tsfc`` and ``iage`` will not be written. All streams -are time averaged over the interval although because one stream has ``histfreq=1`` and -``histfreq_n=1``, that is equivalent to instantaneous output each model timestep. +once every 6 hours, and ``Tsfc`` and ``iage`` will not be written. From an efficiency standpoint, it is best to set unused frequencies in ``histfreq`` to ‘x’. Having output at all 5 frequencies takes nearly 5 times @@ -1312,14 +784,6 @@ as long as for a single frequency. If you only want monthly output, the most efficient setting is ``histfreq`` = ’m’,’x’,’x’,’x’,’x’. The code counts the number of desired streams (``nstreams``) based on ``histfreq``. -There is no restart capability built into the history implementation. If the -model stops in the middle of a history accumulation period, that data is lost -on restart, and the accumulation is zeroed out at startup. That means the -dump frequency (see :ref:`restartfiles`) and history frequency need to be -somewhat coordinated. For -example, if monthly history files are requested, the dump frequency should be -set to an integer number of months. - The history variable names must be unique for netCDF, so in cases where a variable is written at more than one frequency, the variable name is appended with the frequency in files after the first one. In the example @@ -1327,14 +791,19 @@ above, ``meltb`` is called ``meltb`` in the monthly file (for backward compatibility with the default configuration) and ``meltb_h`` in the 6-hourly file. -If ``write_ic`` is set to true in **ice_in**, a snapshot of the same set +Using the same frequency twice in ``histfreq`` will have unexpected +consequences and currently will cause the code to abort. It is not +possible at the moment to output averages once a month and also once +every 3 months, for example. + +If ``write_ic`` is set to true in **ice\_in**, a snapshot of the same set of history fields at the start of the run will be written to the history -directory in **iceh_ic.[timeID].nc(da)**. Several history variables are -hard-coded for instantaneous output regardless of the ``hist_avg`` averaging flag, at +directory in **iceh\_ic.[timeID].nc(da)**. Several history variables are +hard-coded for instantaneous output regardless of the averaging flag, at the frequency given by their namelist flag. -The normalized principal components of internal ice stress (``sig1``, ``sig2``) are computed -in *principal_stress* and written to the history file. This calculation +The normalized principal components of internal ice stress are computed +in *principal\_stress* and written to the history file. This calculation is not necessary for the simulation; principal stresses are merely computed for diagnostic purposes and included here for the user’s convenience. @@ -1342,7 +811,7 @@ convenience. Several history variables are available in two forms, a value representing an average over the sea ice fraction of the grid cell, and another that is multiplied by :math:`a_i`, representing an average over -the grid cell area. Our naming convention attaches the suffix “_ai" to +the grid cell area. Our naming convention attaches the suffix “\_ai" to the grid-cell-mean variable names. Beginning with CICE v6, history variables requested by the Sea Ice Model Intercomparison @@ -1352,9 +821,9 @@ Project (SIMIP) :cite:`Notz16` have been added as possible history output variab `daily `_ requested SIMIP variables provide the names of possible history fields in CICE. However, each of the additional variables can be output at any temporal frequency -specified in the **icefields_nml** section of **ice_in** as detailed above. +specified in the **icefields\_nml** section of **ice\_in** as detailed above. Additionally, a new history output variable, ``f_CMIP``, has been added. When ``f_CMIP`` -is added to the **icefields_nml** section of **ice_in** then all SIMIP variables +is added to the **icefields\_nml** section of **ice\_in** then all SIMIP variables will be turned on for output at the frequency specified by ``f_CMIP``. It may also be helpful for debugging to increase the precision of the history file @@ -1367,56 +836,35 @@ Diagnostic files Like ``histfreq``, the parameter ``diagfreq`` can be used to regulate how often output is written to a log file. The log file unit to which diagnostic -output is written is set in **ice_fileunits.F90**. If ``diag_type`` = +output is written is set in **ice\_fileunits.F90**. If ``diag_type`` = ‘stdout’, then it is written to standard out (or to **ice.log.[ID]** if you redirect standard out as in **cice.run**); otherwise it is written -to the file given by ``diag_file``. - -In addition to the standard diagnostic +to the file given by ``diag_file``. In addition to the standard diagnostic output (maximum area-averaged thickness, velocity, average albedo, total ice area, and total ice and snow volumes), the namelist options ``print_points`` and ``print_global`` cause additional diagnostic information to be computed and written. ``print_global`` outputs global sums that are useful for checking global conservation of mass and energy. -``print_points`` writes data for two specific grid points defined by the -input namelist ``lonpnt`` and ``latpnt``. By default, one +``print_points`` writes data for two specific grid points. Currently, one point is near the North Pole and the other is in the Weddell Sea; these -may be changed in **ice_in**. - -The namelist ``debug_model`` prints detailed -debug diagnostics for a single point as the model advances. The point is defined -by the namelist ``debug_model_i``, ``debug_model_j``, ``debug_model_iblk``, -and ``debug_model_task``. These are the local i, j, block, and mpi task index values -of the point to be diagnosed. This point is defined in local index space -and can be values in the array halo. If the local point is not defined in -namelist, the point associated with ``lonpnt(1)`` and ``latpnt(1)`` is used. -``debug_model`` is normally used when the model aborts and needs to be debugged -in detail at a particular (usually failing) grid point. - -Memory use diagnostics are controlled by the logical namelist ``memory_stats``. -This feature uses an intrinsic query in C defined in **ice_memusage_gptl.c**. -Memory diagnostics will be written at the the frequency defined by -diagfreq. - -Timers are declared and initialized in **ice_timers.F90**, and the code -to be timed is wrapped with calls to *ice_timer_start* and -*ice_timer_stop*. Finally, *ice_timer_print* writes the results to +may be changed in **ice\_in**. + +Timers are declared and initialized in **ice\_timers.F90**, and the code +to be timed is wrapped with calls to *ice\_timer\_start* and +*ice\_timer\_stop*. Finally, *ice\_timer\_print* writes the results to the log file. The optional “stats" argument (true/false) prints -additional statistics. The "stats" argument can be set by the ``timer_stats`` -namelist. Calling *ice_timer_print_all* prints all of +additional statistics. Calling *ice\_timer\_print\_all* prints all of the timings at once, rather than having to call each individually. Currently, the timers are set up as in :ref:`timers`. Section :ref:`addtimer` contains instructions for adding timers. The timings provided by these timers are not mutually exclusive. For -example, the Column timer includes the timings from several other -timers, while timer Bound is called from many different places in -the code, including the dynamics and advection routines. The -Dynamics, Advection, and Column timers do not overlap and represent -most of the overall model work. +example, the column timer (5) includes the timings from 6–10, and +subroutine *bound* (timer 15) is called from many different places in +the code, including the dynamics and advection routines. -The timers use *MPI_WTIME* for parallel runs and the F90 intrinsic -*system_clock* for single-processor runs. +The timers use *MPI\_WTIME* for parallel runs and the F90 intrinsic +*system\_clock* for single-processor runs. .. _timers: @@ -1429,64 +877,54 @@ The timers use *MPI_WTIME* for parallel runs and the F90 intrinsic +--------------+-------------+----------------------------------------------------+ | 1 | Total | the entire run | +--------------+-------------+----------------------------------------------------+ - | 2 | Timeloop | total minus initialization and exit | + | 2 | Step | total minus initialization and exit | +--------------+-------------+----------------------------------------------------+ - | 3 | Dynamics | dynamics | + | 3 | Dynamics | EVP | +--------------+-------------+----------------------------------------------------+ | 4 | Advection | horizontal transport | +--------------+-------------+----------------------------------------------------+ | 5 | Column | all vertical (column) processes | +--------------+-------------+----------------------------------------------------+ - | 6 | Thermo | vertical thermodynamics, part of Column timer | + | 6 | Thermo | vertical thermodynamics | +--------------+-------------+----------------------------------------------------+ - | 7 | Shortwave | SW radiation and albedo, part of Thermo timer | + | 7 | Shortwave | SW radiation and albedo | +--------------+-------------+----------------------------------------------------+ - | 8 | Ridging | mechanical redistribution, part of Column timer | + | 8 | Meltponds | melt ponds | +--------------+-------------+----------------------------------------------------+ - | 9 | FloeSize | flow size, part of Column timer | + | 9 | Ridging | mechanical redistribution | +--------------+-------------+----------------------------------------------------+ - | 10 | Coupling | sending/receiving coupler messages | + | 10 | Cat Conv | transport in thickness space | +--------------+-------------+----------------------------------------------------+ - | 11 | ReadWrite | reading/writing files | + | 11 | Coupling | sending/receiving coupler messages | +--------------+-------------+----------------------------------------------------+ - | 12 | Diags | diagnostics (log file) | + | 12 | ReadWrite | reading/writing files | +--------------+-------------+----------------------------------------------------+ - | 13 | History | history output | + | 13 | Diags | diagnostics (log file) | +--------------+-------------+----------------------------------------------------+ - | 14 | Bound | boundary conditions and subdomain communications | + | 14 | History | history output | +--------------+-------------+----------------------------------------------------+ - | 15 | BundBound | halo update bundle copy | + | 15 | Bound | boundary conditions and subdomain communications | +--------------+-------------+----------------------------------------------------+ - | 16 | BGC | biogeochemistry, part of Thermo timer | + | 16 | BGC | biogeochemistry | +--------------+-------------+----------------------------------------------------+ - | 17 | Forcing | forcing | - +--------------+-------------+----------------------------------------------------+ - | 18 | 1d-evp | 1d evp, part of Dynamics timer | - +--------------+-------------+----------------------------------------------------+ - | 19 | 2d-evp | 2d evp, part of Dynamics timer | - +--------------+-------------+----------------------------------------------------+ - | 20 | UpdState | update state | - +--------------+-------------+----------------------------------------------------+ - -.. _restartfiles: ************* Restart files ************* -CICE reads and writes restart data in binary unformatted or netCDF formats via -separate implementations of binary, netCDF, and PIO interfaces as described -above. In addition, ``restart_format`` as well as other restart namelist -options control the specific file format as well as features related to -IO performance, see :ref:`iooverview`. +CICE provides restart data in binary unformatted or netCDF formats via +separate implementations of binary, netcdf, and pio source code under the +directory **infrastructure/io**. ``ICE_IOTYPE`` defined in cice.settings +specifies the IO type and defines which source code directory is compiled. +At the present time, binary, netcdf, and PIO are exclusive formats +for history and restart files, and history and restart file must use the same +io package. The namelist variable ``restart_format`` further refines the +format approach or style for some io packages. The restart files created by CICE contain all of the variables needed for a full, exact restart. The filename begins with the character string -defined by the ``restart_file`` namelist input, and the restart dump frequency -is given by the namelist -variables ``dumpfreq`` and ``dumpfreq_n`` relative to a reference date -specified by ``dumpfreq_base``. Multiple restart frequencies are supported -in the code with a similar mechanism to history streams. The pointer to the filename from +‘iced.’, and the restart dump frequency is given by the namelist +variables ``dumpfreq`` and ``dumpfreq_n``. The pointer to the filename from which the restart data is to be read for a continuation run is set in ``pointer_file``. The code assumes that auxiliary binary tracer restart files will be identified using the same pointer and file name prefix, @@ -1498,9 +936,8 @@ Additional namelist flags provide further control of restart behavior. ``dump_last`` = true causes a set of restart files to be written at the end of a run when it is otherwise not scheduled to occur. The flag ``use_restart_time`` enables the user to choose to use the model date -provided in the restart files for initial runs. If ``use_restart_time`` = false then the -initial model date stamp is determined from the namelist parameters, -``year_init``, ``month_init``, ``day_init``, and ``sec_init``. +provided in the restart files. If ``use_restart_time`` = false then the +initial model date stamp is determined from the namelist parameters. lcdf64 = true sets 64-bit netCDF output, allowing larger file sizes. Routines for gathering, scattering and (unformatted) reading and writing @@ -1512,7 +949,13 @@ restarts on the various tripole grids. They are accessed by setting available when using PIO; in this case extra halo update calls fill ghost cells for tripole grids (do not use PIO for regional grids). -Restart files are available for the CICE code distributions +Two netCDF restart files are available for the CICE v5 and v6 code distributions for the gx3 and gx1 grids (see :ref:`force` for information about obtaining these files). -They were created using the default model -configuration and run for multiple years using the JRA55 forcing. +They were created using the default v5 model +configuration, but +initialized with no ice. The gx3 case was run for 1 year using the 1997 +forcing data provided with the code. The gx1 case was run for 20 years, +so that the date of restart in the file is 1978-01-01. Note that the +restart dates provided in the restart files can be overridden using the +namelist variables ``use_restart_time``, ``year_init`` and ``istep0``. The +forcing time can also be overridden using ``fyear_init``. diff --git a/doc/source/user_guide/ug_running.rst b/doc/source/user_guide/ug_running.rst index 9337b3c47..957cfc4fc 100644 --- a/doc/source/user_guide/ug_running.rst +++ b/doc/source/user_guide/ug_running.rst @@ -16,125 +16,60 @@ To run stand-alone, CICE requires - bash and csh - gmake (GNU Make) -- Fortran and C compilers (Intel, PGI, GNU, Cray, NVHPC, AOCC, and NAG have been tested) -- NetCDF (optional, but required to test standard configurations that have netCDF grid, input, and forcing files) -- MPI (optional, but required for running on more than 1 processor) -- PIO (optional, but required for running with PIO I/O interfaces) +- Fortran and C compilers (Intel, PGI, GNU, Cray, and NAG have been tested) +- NetCDF (this is actually optional but required to test out of the box configurations) +- MPI (this is actually optional but without it you can only run on 1 processor) Below are lists of software versions that the Consortium has tested at some point. There is no guarantee that all compiler versions work with all CICE model versions. At any given point, the Consortium is regularly testing on several different compilers, but not -necessarily on all possible versions or combinations. CICE supports both PIO1 and PIO2. To -use PIO1, the ``USE_PIO1`` macro should also be set. A CICE goal is to be relatively portable +necessarily on all possible versions or combinations. A CICE goal is to be relatively portable across different hardware, compilers, and other software. As a result, the coding implementation tends to be on the conservative side at times. If there are problems porting to a particular system, please let the Consortium know. The Consortium has tested the following compilers at some point, -- AOCC 3.0.0 -- Intel ifort 15.0.3.187 -- Intel ifort 16.0.1.150 -- Intel ifort 17.0.1.132 -- Intel ifort 17.0.2.174 -- Intel ifort 17.0.5.239 -- Intel ifort 18.0.1.163 -- Intel ifort 18.0.5 -- Intel ifort 19.0.2 -- Intel ifort 19.0.3.199 -- Intel ifort 19.1.0.166 -- Intel ifort 19.1.1.217 -- Intel ifort 19.1.2.254 -- Intel ifort 2021.4.0 -- Intel ifort 2021.6.0 -- Intel ifort 2021.8.0 -- Intel ifort 2021.9.0 -- Intel ifort 2022.2.1 +- Intel 15.0.3.187 +- Intel 16.0.1.150 +- Intel 17.0.1.132 +- Intel 17.0.2.174 +- Intel 17.0.5.239 +- Intel 18.0.1.163 +- Intel 19.0.2 +- Intel 19.0.3.199 - PGI 16.10.0 -- PGI 19.9-0 -- PGI 20.1-0 -- PGI 20.4-0 - GNU 6.3.0 - GNU 7.2.0 - GNU 7.3.0 -- GNU 7.7.0 -- GNU 8.3.0 -- GNU 9.3.0 -- GNU 10.1.0 -- GNU 11.2.0 -- GNU 12.1.0 -- GNU 12.2.0 -- Cray CCE 8.5.8 -- Cray CCE 8.6.4 -- Cray CCE 13.0.2 -- Cray CCE 14.0.3 -- Cray CCE 15.0.1 +- Cray 8.5.8 +- Cray 8.6.4 - NAG 6.2 -- NVC 23.5-0 -The Consortium has tested the following MPI implementations and versions, +The Consortium has tested the following mpi versions, - MPICH 7.3.2 - MPICH 7.5.3 - MPICH 7.6.2 - MPICH 7.6.3 -- MPICH 7.7.0 - MPICH 7.7.6 -- MPICH 7.7.7 -- MPICH 7.7.19 -- MPICH 7.7.20 -- MPICH 8.1.14 -- MPICH 8.1.21 -- MPICH 8.1.25 - Intel MPI 18.0.1 -- Intel MPI 18.0.4 -- Intel MPI 2019 Update 6 -- Intel MPI 2019 Update 8 - MPT 2.14 - MPT 2.17 - MPT 2.18 - MPT 2.19 -- MPT 2.20 -- MPT 2.21 -- MPT 2.22 -- MPT 2.25 -- mvapich2-2.3.3 - OpenMPI 1.6.5 -- OpenMPI 4.0.2 The NetCDF implementation is relatively general and should work with any version of NetCDF 3 or 4. The Consortium has tested - NetCDF 4.3.0 - NetCDF 4.3.2 - NetCDF 4.4.0 -- NetCDF 4.4.1.1.3 -- NetCDF 4.4.1.1.6 +- NetCDF 4.4.1.1.32 - NetCDF 4.4.1.1 - NetCDF 4.4.2 - NetCDF 4.5.0 -- NetCDF 4.5.2 - NetCDF 4.6.1.3 -- NetCDF 4.6.3 -- NetCDF 4.6.3.2 -- NetCDF 4.7.2 -- NetCDF 4.7.4 -- NetCDF 4.8.1 -- NetCDF 4.8.1.1 -- NetCDF 4.8.1.3 -- NetCDF 4.9.0.1 -- NetCDF 4.9.0.3 -- NetCDF 4.9.2 - -CICE has been tested with - -- PIO 1.10.1 -- PIO 2.5.4 -- PIO 2.5.9 -- PIO 2.6.0 -- PIO 2.6.1 -- PnetCDF 1.12.2 -- PnetCDF 1.12.3 -- PnetCDF 2.6.2 Please email the Consortium if this list can be extended. @@ -208,8 +143,11 @@ Some hints: - To change namelist, manually edit the **ice_in** file - To change batch settings, manually edit the top of the **cice.run** or **cice.test** (if running a test) file -- When the run scripts are submitted, the current **ice_in**, **cice.settings**, and **env.[machine]** files are copied from the case directory into the run directory. Users should generally not edit files in the run directory as these are overwritten when following the standard workflow. **cice.settings** can be sourced to establish the case values in the login shell. -- Some useful aliases can be found in the :ref:`aliases` section +- When the run scripts are submitted, the current **ice_in**, **cice.settings**, and **env.[machine]** files are copied from the case directory into the run directory. Users should generally not edit files in the run directory as these are overwritten when following the standard workflow. **cice.settings** can be sourced to establish the case values in the login shell. An alias like the following can be established to quickly switch between case and run directories:: + + alias cdrun 'cd `\grep "setenv ICE_RUNDIR" cice.settings | awk "{print "\$"NF}"`' + alias cdcase 'cd `\grep "setenv ICE_CASEDIR" cice.settings | awk "{print "\$"NF}"`' + - To turn on the debug compiler flags, set ``ICE_BLDDEBUG`` in **cice.setttings** to true. It is also possible to use the ``debug`` option (``-s debug``) when creating the case with **cice.setup** to set this option automatically. - To change compiler options, manually edit the Macros file. To add user defined preprocessor macros, modify ``ICE_CPPDEFS`` in **cice.settings** using the syntax ``-DCICE_MACRO``. - To clean the build before each compile, set ``ICE_CLEANBUILD`` in **cice.settings** to true (this is the default value), or use the ``buildclean`` option (``-s buildclean``) when creating the case with **cice.setup**. To not clean before the build, set ``ICE_CLEANBUILD`` in **cice.settings** to false, or use the ``buildincremental`` option (``-s buildincremental``) when creating the case with **cice.setup**. It is recommended that the ``ICE_CLEANBUILD`` be set to true if there are any questions about whether the build is proceeding properly. @@ -322,7 +260,7 @@ Some of the options are ``bgcISPOL`` and ``bgcNICE`` specify bgc options -``boxadv``, ``boxnodyn``, and ``boxrestore`` are simple box configurations +``boxadv``, ``boxdyn``, and ``boxrestore`` are simple box configurations ``alt*`` which turns on various combinations of dynamics and physics options for testing @@ -558,9 +496,8 @@ in the **env.[machine]** file. This can also be manually changed in the **cice. .. _laptops: -Porting to Laptops or Personal Computers +Porting to Laptop or Personal Computers ----------------------------------------- - To get the required software necessary to build and run CICE, and use the plotting and quality control scripts included in the repository, a `conda `_ environment file is available at : ``configuration/scripts/machines/environment.yml``. @@ -582,10 +519,10 @@ On macOS: .. code-block:: bash - # Download the Miniconda installer to ~/miniconda.sh - curl -L https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh -o ~/miniconda.sh + # Download the Miniconda installer to ~/Downloads/miniconda.sh + curl -L https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh -o ~/Downloads/miniconda.sh # Install Miniconda - bash ~/miniconda.sh + bash ~/Downloads/miniconda.sh # Follow the prompts @@ -778,14 +715,7 @@ Next, create the "cice" conda environment from the ``environment.yml`` file in t conda env create -f configuration/scripts/machines/environment.yml -This step needs to be done only once and will maintain a static conda environment. To update the conda environment later, use - -.. code-block:: bash - - conda env create -f configuration/scripts/machines/environment.yml --force - -This will update the conda environment to the latest software versions. - +This step needs to be done only once. .. _using_conda_env: @@ -844,7 +774,7 @@ A few notes about the conda configuration: - It is not recommeded to run other test suites than ``quick_suite`` or ``travis_suite`` on a personal computer. - The conda environment is automatically activated when compiling or running the model using the ``./cice.build`` and ``./cice.run`` scripts in the case directory. These scripts source the file ``env.conda_{linux.macos}``, which calls ``conda activate cice``. -- To use the "cice" conda environment with the Python plotting (see :ref:`timeseries`) and quality control (QC) scripts (see :ref:`CodeValidation`), you must manually activate the environment: +- To use the "cice" conda environment with the Python plotting (see :ref:`timeseries`) and quality control scripts (see :ref:`CodeCompliance`), you must manually activate the environment: .. code-block:: bash @@ -907,50 +837,6 @@ modify the scripts and input settings in the case directory, NOT the run directo In general, files in the run directory are overwritten by versions in the case directory when the model is built, submitted, and run. -.. _aliases: - -Use of Shell Aliases -------------------------- - -This section provides a list of some potentially useful shell aliases that leverage the CICE -scripts. These are not defined by CICE and are not required for using CICE. They -are provided as an example of what can be done by users. -The current **ice_in**, **cice.settings**, and **env.[machine]** files are copied from -the case directory into the run directory when the model is run. Users can create aliases -leveraging the variables in these files. Aliases like the following can be established -in shell startup files or otherwise at users discretion: - -.. code-block:: bash - - #!/bin/tcsh - # From a case or run directory, source the necessary environment files to run CICE - alias cice_env 'source env.*; source cice.settings' - # Go from case directory to run directory and back (see https://stackoverflow.com/a/34874698/) - alias cdrun 'set rundir=`\grep "setenv ICE_RUNDIR" cice.settings | awk "{print "\$"NF}"` && cd $rundir' - alias cdcase 'set casedir=`\grep "setenv ICE_CASEDIR" cice.settings | awk "{print "\$"NF}"` && cd $casedir' - - #!/bin/bash - # From case/test directory, go to run directory - alias cdrun='cd $(cice_var ICE_RUNDIR)' - # From run directory, go to case/test directory - alias cdcase='cd $(cice_var ICE_CASEDIR)' - # monitor current cice run (from ICE_RUNDIR directory) - alias cice_tail='tail -f $(ls -1t cice.runlog.* |head -1)' - # open log from last CICE run (from ICE_CASEDIR directory) - alias cice_lastrun='$EDITOR $(ls -1t logs/cice.runlog.* |head -1)' - # open log from last CICE build (from ICE_CASEDIR directory) - alias cice_lastbuild='$EDITOR $(ls -1t logs/cice.bldlog.* |head -1)' - # show CICE run directory when run in the case directory - alias cice_rundir='cice_var ICE_RUNDIR' - # open a tcsh shell and source env.* and cice.settings (useful for launching CICE in a debugger) - alias cice_shell='tcsh -c "cice_env; tcsh"' - - ## Functions - # Print the value of a CICE variable ($1) from cice.settings - cice_var() { - \grep "setenv $1" cice.settings | awk "{print "\$"3}" - } - .. _timeseries: Timeseries Plotting @@ -969,7 +855,7 @@ To use the ``timeseries.py`` script, the following requirements must be met: * matplotlib Python package * datetime Python package -See :ref:`CodeValidation` for additional information about how to setup the Python +See :ref:`CodeCompliance` for additional information about how to setup the Python environment, but we recommend using ``pip`` as follows: :: pip install --user numpy diff --git a/doc/source/user_guide/ug_testing.rst b/doc/source/user_guide/ug_testing.rst index 6867214b5..bea67e6ae 100644 --- a/doc/source/user_guide/ug_testing.rst +++ b/doc/source/user_guide/ug_testing.rst @@ -8,7 +8,7 @@ Testing CICE This section documents primarily how to use the CICE scripts to carry out CICE testing. Exactly what to test is a separate question and depends on the kinds of code changes being made. Prior to merging -changes to the CICE Consortium main, changes will be reviewed and +changes to the CICE Consortium master, changes will be reviewed and developers will need to provide a summary of the tests carried out. There is a base suite of tests provided by default with CICE and this @@ -23,8 +23,7 @@ The testing scripts support several features - Ability to compare results to prior baselines to verify bit-for-bit (``--bcmp``) - Ability to define where baseline tests are stored (``--bdir``) - Ability to compare tests against each other (``--diff``) - - Ability to set or overide the batch account number (``--acct``) and queue name (``--queue``) - - Ability to control how test suites execute (``--setup-only``, ``--setup-build``, ``--setup-build-run``, ``--setup-build-submit``) + - Ability to set account number (``--acct``), which is otherwise not set and may result in tests not being submitted .. _indtests: @@ -230,9 +229,7 @@ boundary around the entire domain. It includes the following namelist modificat - ``dyrect``: ``16.e5`` cm - ``ktherm``: ``-1`` (disables thermodynamics) - ``coriolis``: ``constant`` (``f=1.46e-4`` s\ :math:`^{-1}`) -- ``ice_data_type`` : ``box2001`` (special initial ice mask) -- ``ice_data_conc`` : ``p5`` -- ``ice_data_dist`` : ``box2001`` (special ice concentration initialization) +- ``ice_data_type`` : ``box2001`` (special ice concentration initialization) - ``atm_data_type`` : ``box2001`` (special atmospheric and ocean forcing) Ocean stresses are computed as in :cite:`Hunke01` where they are circular and centered @@ -260,9 +257,7 @@ boundary around the entire domain. It includes the following namelist modificat - ``ktherm``: ``-1`` (disables thermodynamics) - ``kridge``: ``-1`` (disables ridging) - ``kdyn``: ``-1`` (disables dynamics) -- ``ice_data_type`` : ``boxslotcyl`` (special initial ice mask) -- ``ice_data_conc`` : ``c1`` -- ``ice_data_dist`` : ``uniform`` +- ``ice_data_type`` : ``boxslotcyl`` (special ice concentration and velocity initialization) Dynamics is disabled because we directly impose a constant ice velocity. The ice velocity field is circular and centered in the square domain, and such that the slotted cylinder makes a complete revolution with a period :math:`T=` 12 days : @@ -302,6 +297,15 @@ results.csh script in the testsuite.[testid]:: cd testsuite.[testid] ./results.csh +To report the test results, as is required for Pull Requests to be accepted into +the master the CICE Consortium code see :ref:`testreporting`. + +If using the ``--tdir`` option, that directory must not exist before the script is run. The tdir directory will be +created by the script and it will be populated by all tests as well as scripts that support the +test suite:: + + ./cice.setup --suite base_suite --mach wolf --env gnu --testid myid --tdir /scratch/$user/testsuite.myid + Multiple suites are supported on the command line as comma separated arguments:: ./cice.setup --suite base_suite,decomp_suite --mach wolf --env gnu --testid myid @@ -310,52 +314,13 @@ If a user adds ``--set`` to the suite, all tests in that suite will add that opt ./cice.setup --suite base_suite,decomp_suite --mach wolf --env gnu --testid myid -s debug -The option settings defined at the command line have precedence over the test suite +The option settings defined in the suite have precendence over the command line values if there are conflicts. The predefined test suites are defined under **configuration/scripts/tests** and -the files defining the suites have a suffix of .ts in that directory. Some of the -available tests suites are - -``quick_suite`` - consists of a handful of basic CICE tests - -``base_suite`` - consists of a much large suite of tests covering much of the CICE functionality - -``decomp_suite`` - checks that different decompositions and pe counts produce bit-for-bit results - -``omp_suite`` - checks that OpenMP single thread and multi-thread cases are bit-for-bit identical - -``io_suite`` - tests the various IO options including binary, netcdf, and pio. PIO should be installed locally and accessible to the CICE build system to make full use of this suite. - -``perf_suite`` - runs a series of tests to evaluate model scaling and performance - -``reprosum_suite`` - verifies that CICE log files are bit-for-bit with different decompositions and pe counts when the bfbflag is set to reprosum - -``gridsys_suite`` - tests B, C, and CD grid_ice configurations - -``prod_suite`` - consists of a handful of tests running 5 to 10 model years and includes some QC testing. These tests will be relatively expensive and take more time compared to other suites. - -``unittest_suite`` - runs unit tests in the CICE repository - -``travis_suite`` - consists of a small suite of tests suitable for running on low pe counts. This is the suite used with Github Actions for CI in the workflow. - -``first_suite`` - this small suite of tests is redundant with tests in other suites. It runs several of the critical baseline tests that other test compare to. It can improve testing turnaround if listed first in a series of test suites. - -When running multiple suites on the command line (i.e. ``--suite first_suite,base_suite,omp_suite``) the suites will be run in the order defined by the user and redundant tests across multiple suites will be created and executed only once. - -The format for the test suite file is relatively simple. +the files defining the suites +have a suffix of .ts in that directory. The format for the test suite file +is relatively simple. It is a text file with white space delimited columns that define a handful of values in a specific order. The first column is the test name, the second the grid, the third the pe count, @@ -371,6 +336,7 @@ Lines that begin with # or are blank are ignored. For example, smoke col 1x1 debug,run1year restart col 1x1 debug restart col 1x1 diag1 + restart col 1x1 pondcesm restart col 1x1 pondlvl restart col 1x1 pondtopo @@ -416,8 +382,8 @@ following options are valid for suites, ``--report`` This is only used by ``--suite`` and when set, invokes a script that sends the test results to the results page when all tests are complete. Please see :ref:`testreporting` for more information. -``--coverage`` - When invoked, code coverage diagnostics are generated. This will modify the build and reduce optimization and generate coverage reports using lcov or codecov tools. General use is not recommended, this is mainly used as a diagnostic to periodically assess test coverage. Please see :ref:`codecoverage` for more information. +``--codecov`` + When invoked, code coverage diagnostics are generated. This will modify the build and reduce optimization. The results will be uploaded to the **codecov.io** website via the **report_codecov.csh** script. General use is not recommended, this is mainly used as a diagnostic to periodically assess test coverage. Please see :ref:`codecoverage` for more information. ``--setup-only`` This is only used by ``--suite`` and when set, just creates the suite testcases. It does not build or submit them to run. By default, the suites do ``--setup-build-submit``. @@ -445,26 +411,8 @@ The *cice.setup** options ``--setup-only``, ``--setup-build``, and ``--setup-bui which means by default the test suite builds and submits the jobs. By defining other values for those environment variables, users can control the suite script. When using **suite.submit** manually, the string ``true`` (all lowercase) is the only string that will turn on a feature, and both SUITE_RUN and SUITE_SUBMIT cannot be true at the same time. -By leveraging the **cice.setup** command line arguments ``--setup-only``, ``--setup-build``, and ``--setup-build-run`` as well as the environment variables SUITE_BUILD, SUITE_RUN, and SUITE_SUBMIT, users can run **cice.setup** and **suite.submit** in various combinations to quickly setup, setup and build, submit, resubmit, run interactively, or rebuild and resubmit full testsuites quickly and easily. See :ref:`examplesuites` for an example. - -The script **create_fails.csh** will process the output from results.csh and generate a new -test suite file, **fails.ts**, from the failed tests. -**fails.ts** can then be edited and passed into ``cice.setup --suite fails.ts ...`` to rerun -subsets of failed tests to more efficiently move thru the development, testing, and -validation process. However, a full test suite should be run on the final development -version of the code. - -To report the test results, as is required for Pull Requests to be accepted into -the main the CICE Consortium code see :ref:`testreporting`. - -If using the ``--tdir`` option, that directory must not exist before the script is run. The tdir directory will be -created by the script and it will be populated by all tests as well as scripts that support the -test suite:: - - ./cice.setup --suite base_suite --mach wolf --env gnu --testid myid --tdir /scratch/$user/testsuite.myid - +By leveraging the **cice.setup** command line arguments ``--setup-only``, ``--setup-build``, and ``--setup-build-run`` as well as the environment variables SUITE_BUILD, SUITE_RUN, and SUITE_SUBMIT, users can run **cice.setup** and **suite.submit** in various combinations to quickly setup, setup and build, submit, resubmit, run interactively, or rebuild and resubmit full testsuites quickly and easily. See below for an example. -.. _examplesuites: Test Suite Examples ~~~~~~~~~~~~~~~~~~~~~~~~ @@ -513,7 +461,7 @@ Test Suite Examples ./results.csh If there are conflicts between the ``--set`` options in the suite and on the command line, - the command line options will take precedence. + the suite will take precedence. 5) **Multiple test suites from a single command line** @@ -577,8 +525,8 @@ Test Suite Examples This will compare to results saved in the baseline [bdir] directory under the subdirectory cice.v01a. With the ``--bcmp`` option, the results will be tested against prior baselines to verify bit-for-bit, which is an important step prior - to approval of many (not all, see :ref:`validation`) Pull Requests to incorporate code into - the CICE Consortium main branch. You can use other regression options as well. + to approval of many (not all, see :ref:`compliance`) Pull Requests to incorporate code into + the CICE Consortium master code. You can use other regression options as well. (``--bdir`` and ``--bgen``) 10) **Basic test suite, use of default string in regression testing** @@ -603,7 +551,7 @@ Test Suite Examples set mydate = `date -u "+%Y%m%d"` git clone https://github.com/myfork/cice cice.$mydate --recursive cd cice.$mydate - ./cice.setup --suite base_suite --mach conrad --env cray,gnu,intel,pgi --testid $mydate --bcmp default --bgen default --bdir /tmp/work/user/CICE_BASELINES_MAIN + ./cice.setup --suite base_suite --mach conrad --env cray,gnu,intel,pgi --testid $mydate --bcmp default --bgen default --bdir /tmp/work/user/CICE_BASELINES_MASTER When this is invoked, a new set of baselines will be generated and compared to the prior results each time without having to change the arguments. @@ -677,78 +625,6 @@ Test Suite Examples The setenv syntax is for csh/tcsh. In bash, the syntax would be SUITE_BUILD=true. -.. _unittesting: - -Unit Testing ---------------- - -Unit testing is supported in the CICE scripts. Unit tests are implemented -via a distinct top level driver that tests CICE model features explicitly. -These drivers can be found in **cicecore/drivers/unittest/**. In addition, -there are some script files that also support the unit testing. - -The unit tests build and run very much like the standard CICE model. -A case is created and model output is saved to the case logs directory. -Unit tests can be run as part of a test suite and the output is -compared against an earlier set of output using a simple diff of the -log files. - -For example, to run the existing calendar unit test as a case, - -.. code-block:: bash - - ./cice.setup -m onyx -e intel --case calchk01 -p 1x1 -s calchk - cd calchk01 - ./cice.build - ./cice.submit - -Or to run the existing calendar unit test as a test, - -.. code-block:: bash - - ./cice.setup -m onyx -e intel --test unittest -p 1x1 --testid cc01 -s calchk --bgen cice.cc01 - cd onyx_intel_unittest_gx3_1x1_calchk.cc01/ - ./cice.build - ./cice.submit - -To create a new unit test, add a new driver in **cicecore/driver/unittest**. -The directory name should be the name of the test. -Then create the appropriate set_nml or set_env files for the new unittest name -in **configuration/scripts/options**. In particular, **ICE_DRVOPT** and -**ICE_TARGET** need to be defined in a set_env file. Finally, edit -**configuration/scripts/Makefile** and create a target for the unit test. -The unit tests calchk or helloworld can be used as examples. - -The following strings should be written to the log file at the end of the unit test run. -The string "COMPLETED SUCCESSFULLY" will indicate the run ran to completion. The string -"TEST COMPLETED SUCCESSFULLY" will indicate all the unit testing passed during the run. -The unit test log file output is compared as part of regression testing. The string -"RunningUnitTest" indicates the start of the output to compare. -That string should be written to the log file at the start of the unit test model output. -These strings will be queried by the testing scripts and will impact the test reporting. -See other unit tests for examples about how these strings could be written. - -The following are brief descriptions of some of the current unit tests, - - - **bcstchk** is a unit test that exercises the methods in ice_broadcast.F90. This test does not - depend on the CICE grid to carry out the testing. By testing with a serial and mpi configuration, - both sets of software are tested independently and correctness is verified. - - **calchk** is a unit test that exercises the CICE calendar over 100,000 years and verifies correctness. - This test does not depend on the CICE initialization. - - **gridavgchk** is a unit test that exercises the CICE grid_average_X2Y methods and verifies results. - - **halochk** is a unit test that exercises the CICE haloUpdate methods and verifies results. - - **helloworld** is a simple test that writes out helloworld and uses no CICE infrastructure. - This tests exists to demonstrate how to build a unit test by specifying the object files directly - in the Makefile - - **optargs** is a unit test that tests passing optional arguments down a calling tree and verifying - that the optional attribute is preserved correctly. - - **opticep** is a cice test that turns off the icepack optional arguments passed into icepack. This - can only be run with a subset of CICE/Icepack cases to verify the optional arguments are working correctly. - - **sumchk** is a unit test that exercises the methods in ice_global_reductions.F90. This test requires - that a CICE grid and decomposition be initialized, so CICE_InitMod.F90 is leveraged to initialize - the model prior to running a suite of unit validation tests to verify correctness. - - .. _testreporting: Test Reporting @@ -759,7 +635,7 @@ to the official CICE Consortium Test-Results `wiki page `_. You may need write permission on the wiki. If you are interested in using the wiki, please contact the Consortium. Note that in order for code to be -accepted to the CICE main branch through a Pull Request it is necessary +accepted to the CICE master through a Pull Request it is necessary for the developer to provide proof that their code passes relevant tests. This can be accomplished by posting the full results to the wiki, or by copying the testing summary to the Pull Request comments. @@ -791,73 +667,54 @@ wait for all runs to be complete, and run the results and report_results scripts Code Coverage Testing ------------------------------ -The ``--coverage`` feature in **cice.setup** provides a method to diagnose code coverage. +The ``--codecov`` feature in **cice.setup** provides a method to diagnose code coverage. This argument turns on special compiler flags including reduced optimization and then -invokes the gcov tool. Once runs are complete, either lcov or codecov can be used -to analyze the results. -This option is currently only available with the gnu compiler and on a few systems -with modified Macros files. In the current implementation, when ``--coverage`` is -invoked, the sandbox is copied to a new sandbox called something like cice_lcov_yymmdd-hhmmss. -The source code in the new sandbox is modified slightly to improve coverage statistics -and the full coverage suite is run there. - -At the present time, the ``--coverage`` flag invokes the lcov analysis automatically -by running the **report_lcov.csh** script in the test suite directory. The output -will show up at the `CICE lcov website `__. To -use the tool, you should have write permission for that repository. The lcov tool -should be run on a full multi-suite test suite, and it can -take several hours to process the data once the test runs are complete. A typical -instantiation would be -:: - - ./cice.setup --suite first_suite,base_suite,travis_suite,decomp_suite,reprosum_suite,io_suite,quick_suite --mach cheyenne --env gnu --testid cc01 --coverage - -Alternatively, codecov analysis can be carried out by manually running the **report_codecov.csh** -script from the test suite directory, but there are several ongoing problems with this approach and -it is not generally recommended. A script that summarizes the end-to-end process for codecov -analysis can be found in ..**configuration/scripts/tests/cice_test_codecov.csh**. The codecov -analysis is largely identical to the analysis performed by lcov, codecov just provides a nicer -web experience to view the output. - -This is a special diagnostic test and is not part of the standard model testing. -General use is not recommended, this is mainly used as a diagnostic to periodically -assess test coverage. - -..Because codecov.io does not support git submodule analysis right now, a customized -..repository has to be created to test CICE with Icepack integrated directly. The repository -..https://github.com/apcraig/Test_CICE_Icepack serves as the current default test repository. -..In general, to setup the code coverage test in CICE, the current CICE main has -..to be copied into the Test_CICE_Icepack repository, then the full test suite -..can be run with the gnu compiler with the ``--coverage`` argument. - -..The test suite will run and then a report will be generated and uploaded to -..the `codecov.io site `_ by the -..**report_codecov.csh** script. The env variable CODECOV_TOKEN needs to be defined -..either in the environment or in a file named **~/.codecov_cice_token**. That -..token provides write permission to the Test_CICE_Icepack codecov.io site and is available -..by contacting the Consortium team directly. - -..A script that carries out the end-to-end testing can be found in -..**configuration/scripts/tests/cice_test_codecov.csh** - -..This is a special diagnostic test and does not constitute proper model testing. -..General use is not recommended, this is mainly used as a diagnostic to periodically -..assess test coverage. The interaction with codecov.io is not always robust and -..can be tricky to manage. Some constraints are that the output generated at runtime -..is copied into the directory where compilation took place. That means each -..test should be compiled separately. Tests that invoke multiple runs -..(such as exact restart and the decomp test) will only save coverage information -..for the last run, so some coverage information may be lost. The gcov tool can -..be a little slow to run on large test suites, and the codecov.io bash uploader -..(that runs gcov and uploads the data to codecov.io) is constantly evolving. -..Finally, gcov requires that the diagnostic output be copied into the git sandbox for -..analysis. These constraints are handled by the current scripts, but may change -..in the future. - - -.. _validation: - -Code Validation Test (non bit-for-bit validation) +invokes the gcov tool. +This option is currently only available with the gnu compiler and on a few systems. + +Because codecov.io does not support git submodule analysis right now, a customized +repository has to be created to test CICE with Icepack integrated directly. The repository +https://github.com/apcraig/Test_CICE_Icepack serves as the current default test repository. +In general, to setup the code coverage test in CICE, the current CICE master has +to be copied into the Test_CICE_Icepack repository, then the code coverage tool can +be run on that repository. A sample script to do that would be:: + + git clone https://github.com/cice-consortium/cice cice.master --recursive + + git clone https://github.com/apcraig/test_cice_icepack + cd test_cice_icepack + git rm -r * + cp -p -r ../cice.master/* . + git add . + git commit -m "update to current cice master" + git push origin master + + ./cice.setup --suite first_suite,base_suite,travis_suite,decomp_suite,reprosum_suite,quick_suite -m gordon -e gnu --codecov --testid cc01 + +To use, submit a full test suite using an updated Test_CICE_Icepack version +and the gnu compiler with the ``--codecov`` argument. +The test suite will run and then a report will be generated and uploaded to +the `codecov.io site `_ by the +**report_codecov.csh** script. + +This is a special diagnostic test and does not constitute proper model testing. +General use is not recommended, this is mainly used as a diagnostic to periodically +assess test coverage. The interaction with codecov.io is not always robust and +can be tricky to manage. Some constraints are that the output generated at runtime +is copied into the directory where compilation took place. That means each +test should be compiled separately. Tests that invoke multiple runs +(such as exact restart and the decomp test) will only save coverage information +for the last run, so some coverage information may be lost. The gcov tool can +be a little slow to run on large test suites, and the codecov.io bash uploader +(that runs gcov and uploads the data to codecov.io) is constantly evolving. +Finally, gcov requires that the diagnostic output be copied into the git sandbox for +analysis. These constraints are handled by the current scripts, but may change +in the future. + + +.. _compliance: + +Code Compliance Test (non bit-for-bit validation) ---------------------------------------------------- A core tenet of CICE dycore and CICE innovations is that they must not change @@ -982,7 +839,7 @@ autocorrelation :math:`r_1`. .. _quadratic: -Quadratic Skill Validation Test +Quadratic Skill Compliance Test ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In addition to the two-stage test of mean sea ice thickness, we also @@ -1066,12 +923,12 @@ hemispheres, and must exceed a critical value nominally set to test and the Two-Stage test described in the previous section are provided in :cite:`Hunke18`. -.. _CodeValidation: +.. _CodeCompliance: -Code Validation Testing Procedure +Code Compliance Testing Procedure ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -The CICE code validation (QC) test is performed by running a python script +The CICE code compliance test is performed by running a python script (**configurations/scripts/tests/QC/cice.t-test.py**). In order to run the script, the following requirements must be met: @@ -1081,11 +938,7 @@ In order to run the script, the following requirements must be met: * matplotlib Python package (optional) * basemap Python package (optional) -QC testing should be carried out using configurations (ie. namelist settings) that -exercise the active code modifications. Multiple configurations may need to be tested -in some cases. Developers can contact the Consortium for guidance or if there are questions. - -In order to generate the files necessary for the validation test, test cases should be +In order to generate the files necessary for the compliance test, test cases should be created with the ``qc`` option (i.e., ``--set qc``) when running cice.setup. This option results in daily, non-averaged history files being written for a 5 year simulation. @@ -1096,16 +949,8 @@ To install the necessary Python packages, the ``pip`` Python utility can be used pip install --user netCDF4 pip install --user numpy pip install --user matplotlib - pip install --user cartopy - -You can also setup a conda env with the same utitities - -.. code-block:: bash - conda env create -f configuration/scripts/tests/qctest.yml - conda activate qctest - -To run the validation test, setup a baseline run with the original baseline model and then +To run the compliance test, setup a baseline run with the original baseline model and then a perturbation run based on recent model changes. Use ``--set qc`` in both runs in addition to other settings needed. Then use the QC script to compare history output, @@ -1148,16 +993,10 @@ Below is an example of a step-by-step procedure for testing a code change that m # Run a full regression test to verify bit-for-bit # Create a baseline dataset (only necessary if no baseline exists on the system) - # if you want to replace an existing baseline, you should first delete the directory cice.my.baseline in ${ICE_BASELINE}. # git clone the baseline code ./cice.setup -m onyx -e intel --suite base_suite --testid base0 --bgen cice.my.baseline - # Check the results - - cd testsuite.base0 - ./results.csh - # Run the test suite with the new code # git clone the new code @@ -1180,7 +1019,6 @@ If the regression comparisons fail, then you may want to run the QC test, # Create a QC baseline # From the baseline sandbox - # Generate the test case(s) using options or namelist changes to activate new code modifications ./cice.setup -m onyx -e intel --test smoke -g gx1 -p 44x1 --testid qc_base -s qc,medium cd onyx_intel_smoke_gx1_44x1_medium_qc.qc_base @@ -1189,10 +1027,9 @@ If the regression comparisons fail, then you may want to run the QC test, ./cice.submit # Create the t-test testing data - # From the updated sandbox - # Generate the same test case(s) as the baseline using options or namelist changes to activate new code modifications + # From the update sandbox - ./cice.setup -m onyx -e intel --test smoke -g gx1 -p 44x1 --testid qc_test -s qc,medium + ./cice.setup -m onyx -e intel --test smoke -g gx1 -p 44x1 -testid qc_test -s qc,medium cd onyx_intel_smoke_gx1_44x1_medium_qc.qc_test # modify ice_in to activate the namelist options that were determined above ./cice.build @@ -1201,8 +1038,7 @@ If the regression comparisons fail, then you may want to run the QC test, # Wait for runs to finish # Perform the QC test - # From the updated sandbox - cp configuration/scripts/tests/QC/cice.t-test.py . + cp configuration/scripts/tests/QC/cice.t-test.py ./cice.t-test.py /p/work/turner/CICE_RUNS/onyx_intel_smoke_gx1_44x1_medium_qc.qc_base \ /p/work/turner/CICE_RUNS/onyx_intel_smoke_gx1_44x1_medium_qc.qc_test @@ -1214,3 +1050,4 @@ If the regression comparisons fail, then you may want to run the QC test, INFO:__main__: INFO:__main__:Quality Control Test PASSED +